code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
#!/usr/bin/env python
"""
--------------------------------------------------------------------------------
Created: Jackson Lee 11/4/14
This script reads in a tab delimited file of annotations and queries
asynchronously the MGRAST REST API to parse back the original ncbi tax_id entry.
This script then uses the ncbi tax_id to query the taxonomic lineage.
Input file format:
query sequence id hit m5nr id (md5sum) percentage identity alignment length, number of mismatches number of gap openings query start query end hit start hit end e-value bit score semicolon separated list of annotations
mgm4581121.3|contig-1350000035_45_1_2592_+ 0000679ceb3fc9c950779468e06329a7 61.03 136 53 654 789 366 501 6.80E-44 175 hyalin repeat protein
mgm4581121.3|contig-18000183_1_1_2226_+ 0000679ceb3fc9c950779468e06329a7 64.44 45 16 525 569 457 501 1.70E-08 57 hyalin repeat protein
['Download complete. 78538 rows retrieved']
MGRAST REST API:
http://api.metagenomics.anl.gov/m5nr/md5/<M5nr MD5 hash>?source=GenBank
e.g. http://api.metagenomics.anl.gov/m5nr/md5/000821a2e2f63df1a3873e4b280002a8?source=GenBank
resources:
http://api.metagenomics.anl.gov/api.html#m5nr
http://angus.readthedocs.org/en/2014/howe-mgrast.html
Returns:
{"next":null,"prev":null,"version":"10","url":"http://api.metagenomics.anl.gov//m5nr/md5/000821a2e2f63df1a3873e4b280002a8?source=GenBank&offset=0","data":[{"source":"GenBank","function":"sulfatase","ncbi_tax_id":399741,"accession":"ABV39241.1","type":"protein","organism":"Serratia proteamaculans 568","md5":"000821a2e2f63df1a3873e4b280002a8","alias":["GI:157320144"]}],"limit":10,"total_count":1,"offset":0}
This output will then be stored in a buffer and queried for the exact id and grab the xml based lineage
http://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=taxonomy&id=399741
<TaxaSet><Taxon><TaxId>399741</TaxId><ScientificName>Serratia proteamaculans 568</ScientificName><OtherNames><EquivalentName>Serratia proteamaculans str. 568</EquivalentName><EquivalentName>Serratia proteamaculans strain 568</EquivalentName></OtherNames><ParentTaxId>28151</ParentTaxId><Rank>no rank</Rank><Division>Bacteria</Division><GeneticCode><GCId>11</GCId><GCName>Bacterial, Archaeal and Plant Plastid</GCName></GeneticCode><MitoGeneticCode><MGCId>0</MGCId><MGCName>Unspecified</MGCName></MitoGeneticCode><Lineage>cellular organisms; Bacteria; Proteobacteria; Gammaproteobacteria; Enterobacteriales; Enterobacteriaceae; Serratia; Serratia proteamaculans</Lineage>
Output file format:
A tab delimited file of
contig-faa-name\tlineage
an error.log of mismatches from both MGRAST and NCBI is also generated
--------------------------------------------------------------------------------
usage: query_ncbi_lineage_from_mgrast_md5.py -i mgrast_organism.txt -o output.file
"""
#-------------------------------------------------------------------------------
#
##http thread pool from: http://stackoverflow.com/questions/2632520/what-is-the-fastest-way-to-send-100-000-http-requests-in-python
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
#Header - Linkers, Libs, Constants
from string import strip
from argparse import ArgumentParser, RawDescriptionHelpFormatter
from threading import Thread
import sys
import time
import requests
import json
from Queue import Queue
#-------------------------------------------------------------------------------
#function declarations
def doWork():
while not exitapp:
id, name, mgrast_urlstring = q.get()
if id % 100 == 0:
print 'Query: HTTP Thread: ' + str(id) + ' started.'
try:
mgrast_response = requests.get(url=mgrast_urlstring, timeout=10)
if mgrast_response.status_code == 200:
json_data = json.loads(mgrast_response.text)
if json_data['data']!= []:
if 'ncbi_tax_id' in json_data['data'][0]:
eutils_urlstring = 'http://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=taxonomy&id=' + str(json_data['data'][0]['ncbi_tax_id'])
eutils_response = requests.get(url=eutils_urlstring, timeout=10)
if eutils_response.status_code == 200:
if '<Lineage>' in eutils_response.text:
output_dict[name] = eutils_response.text.split('Lineage>')[1][0:-2]
else:
output_dict[name] = 'No NCBI'
else:
print 'HTTP error, Thread: ' + str(id) + ' in eutils worker with error: ' + eutils_response.reason
logfile.write(str(id) + '\t' + urlstring + '\t' + eutils_response.reason + '\n')
raise
else:
output_dict[name] = 'No MGRAST tax ID'
else:
output_dict[name] = 'No MGRAST source data'
else:
print 'HTTP error, Thread: ' + str(id) + ' in MG-RAST worker with error: ' + mgrast_response.reason
logfile.write(str(id) + '\t' + urlstring + '\t' + mgrast_response.reason + '\n')
raise
except:
print 'Thread: ' + str(id) + '. Error. '
print sys.exc_info()[0]
q.task_done()
#-------------------------------------------------------------------------------
#Body
print "Running..."
if __name__ == '__main__':
parser = ArgumentParser(usage = "query_ncbi_lineage_from_mgrast_md5.py -i \
mgrast_organism.txt -o output.file",
description=__doc__,
formatter_class=RawDescriptionHelpFormatter)
parser.add_argument("-i", "--input_file", action="store",
dest="inputfilename",
help="tab-delimited MGRAST organism file")
parser.add_argument("-o", "--output_filename", action="store",
dest="outputfilename",
help="tab-delimited output file")
options = parser.parse_args()
mandatories = ["outputfilename","inputfilename"]
for m in mandatories:
if not options.__dict__[m]:
print "\nError: Missing Arguments\n"
parser.print_help()
exit(-1)
inputfilename = options.inputfilename
outputfilename = options.outputfilename
infile_list = []
with open(inputfilename,'U') as infile:
infile_list = [line.strip().split('\t') for line in infile]
infile.close()
urlpool = []
name_list = []
for entry in infile_list[1:-1]:
contig_name = entry[0]
md5_hash = entry[1]
urlpool.append([contig_name, 'http://api.metagenomics.anl.gov/m5nr/md5/' + md5_hash + '?source=RefSeq'])
name_list.append(contig_name)
concurrent = 10
exitapp = False
output_dict = {}
print "Querying MGRAST REST API Service..."
with open('./' + outputfilename + '.errorlog.txt','w') as logfile:
q = Queue(concurrent * 2)
for i in range(concurrent):
t = Thread(target=doWork)
t.daemon = True
time.sleep(1)
t.start()
try:
for id, url_load in enumerate(urlpool):
q.put([id] + url_load)
q.join()
except KeyboardInterrupt:
exitapp = True
sys.exit(1)
logfile.close()
logfile.close()
print "Matching taxonomies and writing..."
with open(outputfilename, 'w') as outfile:
for name in name_list:
if name in output_dict:
outfile.write(name + '\t' + output_dict[name] + '\n')
else:
outfile.write(name + '\t' + 'None\n')
outfile.close()
print "Done!"
|
leejz/meta-omics-scripts
|
query_ncbi_lineage_from_mgrast_md5.py
|
Python
|
mit
| 8,172
|
"""Implementation of Pearson collaborative filtering algorithm"""
from math import sqrt, pow
from mlcollection.lib.collaborativefiltering.collaborativefilterbase import \
AbstractCollaborativeFilter
from operator import itemgetter
from scipy import stats, isnan
__author__ = 'Paul Osborne <osbpau@gmail.com>'
class PearsonCollaborativeFilter(AbstractCollaborativeFilter):
"""Filter based on the Pearson correlation score between items/individuals
The Pearson correlation score or coefficient is a measure of how well two
sets of data fit onto a straight line. There is some added complexity
when compared to the Euclidean filtering algorithm, but the Pearson
algorithm does a better job of dealing with data which is not normalized.
An example where data is not normalized might be examining movie reviews.
It may be the case that some reviewers are just more harsh than other
reviewers. In this case we cannot just draw conclusions about shared
interest based on the score, we need to look a bit deeper to see if there
really is a correlation between the preferences of these two people.
The Pearson algorithm, when finding the similarity between two inviduals,
first finds the subset of data that the two members have each rated/scored.
Then, the score is calculated (using some statistics) based on the sum of
the squares of the ratings and the sum of the products of the ratings.
The formula for pearson's product-moment coefficent is as follows:
.. math::
\rho_{X,Y}={\mathrm{cov}(X,Y) \over \sigma_X \sigma_Y} ={E[(X-\mu_X)(Y-\mu_Y)] \over \sigma_X\sigma_Y}
which in the statistics world is often just referred to as `corr(x,y)`.
"""
def __init__(self, ranked_dataset=None):
AbstractCollaborativeFilter.__init__(self, ranked_dataset)
def get_recommendations(self, individual):
"""Find recommendedations for this individual
Given the individual return a dictionary where the keys are the items
and the values are ratings for the movie for the individual. The
higher the rating, the more likely it is that the individual may like
the movie.
"""
dataset = self.ranked_dataset
totals = {}
sim_sums = {}
ind_items = set(dataset[individual].keys())
for other in (x for x in dataset if x != individual):
shared_items = ind_items & set(dataset[other].keys())
# get a measure of how similar individual and other are
if len(shared_items) == 0:
continue
x = [dataset[individual][i] for i in shared_items]
y = [dataset[other][i] for i in shared_items]
coeff = stats.pearsonr(x, y)[0]
if coeff < 0 or isnan(coeff):
continue
for item in set(dataset[other]) - ind_items:
if not item in sim_sums:
sim_sums[item] = 0
totals[item] = 0
sim_sums[item] += coeff * dataset[other][item]
totals[item] += coeff
rankings = {}
for item in totals:
rankings[item] = sim_sums[item] / totals[item]
return rankings
|
posborne/mlcollection
|
mlcollection/lib/collaborativefiltering/pearson.py
|
Python
|
mit
| 3,340
|
from django.shortcuts import render
from django.utils.translation import activate
def index(request):
# latest_question_list = Question.objects.order_by('-pub_date')[:5]
# context = {'latest_question_list': latest_question_list}
# activate('pt-br')
# print(request.LANGUAGE_CODE)
context = {}
return render(request, 'index.html', context)
|
torchmed/torchmed
|
torchmed/views.py
|
Python
|
mit
| 364
|
# for accessing babusca library.
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import smatrix
import scattering
import g1
import g2
import generators
|
georglind/babusca
|
examples/context.py
|
Python
|
mit
| 209
|
from setuptools import setup, find_packages
with open('pyluno/meta.py') as f:
exec(f.read())
setup(
name='pyluno',
version=__version__,
packages=find_packages(exclude=['tests']),
description='A Luno API for Python',
author='Cayle Sharrock/Grant Stephens',
author_email='grant@stephens.co.za',
scripts=['demo.py'],
install_requires=[
'futures>=3.0.3',
'nose>=1.3.7',
'requests>=2.8.1',
'pandas>=0.17.0',
],
license='MIT',
url='https://github.com/grantstephens/pyluno',
download_url='https://github.com/grantstephens/pyluno/tarball/%s'
% (__version__, ),
keywords='Luno Bitcoin exchange API',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Office/Business :: Financial',
'Topic :: Utilities',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
test_suite='tests',
extras_require={
'test': ['requests-mock>=0.7.0', 'nose'],
}
)
|
grantstephens/pyluno
|
setup.py
|
Python
|
mit
| 1,485
|
import re
from django.db.models import fields
from django.template.defaultfilters import slugify
def _unique_slugify(instance, value, slug_field_name='slug', queryset=None, slug_separator='-'):
slug_field = instance._meta.get_field(slug_field_name)
slug_len = slug_field.max_length
# Sort out the initial slug. Chop its length down if we need to.
slug = slugify(value)
if slug_len:
slug = slug[:slug_len]
slug = _slug_strip(slug, slug_separator)
original_slug = slug
# Create a queryset, excluding the current instance.
if queryset is None:
queryset = instance.__class__._default_manager.all()
if instance.pk:
queryset = queryset.exclude(pk=instance.pk)
# Find a unique slug. If one matches, at '-2' to the end and try again
# (then '-3', etc).
next = 2
while not slug or queryset.filter(**{slug_field_name: slug}):
slug = original_slug
end = '-%s' % next
if slug_len and len(slug) + len(end) > slug_len:
slug = slug[:slug_len-len(end)]
slug = _slug_strip(slug, slug_separator)
slug = '%s%s' % (slug, end)
next += 1
setattr(instance, slug_field.attname, slug)
return slug
def _slug_strip(value, separator=None):
"""
Cleans up a slug by removing slug separator characters that occur at the
beginning or end of a slug.
If an alternate separator is used, it will also replace any instances of the
default '-' separator with the new separator.
"""
if separator == '-' or not separator:
re_sep = '-'
else:
re_sep = '(?:-|%s)' % re.escape(separator)
value = re.sub('%s+' % re_sep, separator, value)
return re.sub(r'^%s+|%s+$' % (re_sep, re_sep), '', value)
class AutoSlugField(fields.SlugField):
"""Auto slug field, creates unique slug for model."""
def __init__(self, prepopulate_from, *args, **kwargs):
"""Create auto slug field.
If field is unique, the uniqueness of the slug is ensured from existing
slugs by adding extra number at the end of slug.
If field has slug given, it is used instead. If you want to re-generate
the slug, just set it :const:`None` or :const:`""` so it will be re-
generated automatically.
:param prepopulate_from: Must be assigned to list of field names which
are used to prepopulate automatically.
:type prepopulate_from: sequence
"""
self.prepopulate_separator = kwargs.get("prepopulate_separator", u"-")
self.prepopulate_from = prepopulate_from
kwargs["blank"] = True
super(fields.SlugField, self).__init__(*args, **kwargs)
def pre_save(self, model_instance, add): #@UnusedVariable
"""Pre-save event"""
current_slug = getattr(model_instance, self.attname)
# Use current slug instead, if it is given.
# Assumption: There are no empty slugs.
if not (current_slug is None or current_slug == ""):
slug = current_slug
else:
slug = self.prepopulate_separator.\
join(unicode(getattr(model_instance, prepop))
for prepop in self.prepopulate_from)
if self.unique:
return _unique_slugify(model_instance, value=slug,
slug_field_name=self.attname)
else:
return slugify(slug)[:self.max_length]
|
underlost/GamerNews
|
gamernews/apps/news/fields.py
|
Python
|
mit
| 3,543
|
import _plotly_utils.basevalidators
class VsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(self, plotly_name="vsrc", parent_name="cone", **kwargs):
super(VsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/cone/_vsrc.py
|
Python
|
mit
| 426
|
# -*- coding: utf-8 -*-
""" hmacHash.py
Implemention of Request for Comments: 2104
HMAC: Keyed-Hashing for Message Authentication
HMAC is a mechanism for message authentication
using cryptographic hash functions. HMAC can be used with any
iterative cryptographic hash function, e.g., MD5, SHA-1, in
combination with a secret shared key. The cryptographic strength of
HMAC depends on the properties of the underlying hash function.
This implementation of HMAC uses a generic cryptographic 'hashFunction'
(self.H). Hash functions must conform to the crypto.hash method
conventions and are not directly compatible with the Python sha1 or md5 algorithms.
[IETF] RFC 2104 "HMAC: Keyed-Hashing for Message Authentication"
>>>key = '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
>>>keyedHashAlg = HMAC(SHA1, key)
>>>result = keyedHashAlg(data)
"""
from crypto.hash.hash import Hash
class HMAC(Hash):
""" To compute HMAC over the data `text' we perform
H(K XOR opad, H(K XOR ipad, text))
"""
def __init__(self, hashFunction, key = None):
""" initialize HMAC with hashfunction and optionally the key """
# should check for right type of function
self.H = hashFunction() # a new instance for inner hash
self.H_outer = hashFunction() # separate outer context to allow intermediate digests
self.B = self.H.raw_block_size # in bytes, note - hash block size typically 1
# and raw_block_size much larger
# e.g. raw_block_size is 64 bytes for SHA1 and MD5
self.name = 'HMAC_'+self.H.name
self.blocksize = 1 # single octets can be hashed by padding to raw block size
self.raw_block_size = self.H.raw_block_size
self.digest_size = self.H.digest_size
if key != None:
self.setKey(key)
else:
self.keyed = None
def setKey(self,key):
""" setKey(key) ... key is binary string """
if len(key) > self.B: # if key is too long then hash it
key = self.H(key) # humm... this is odd, hash can be smaller than B
else: # should raise error on short key, but breaks tests :-(
key =key + (self.B-len(key)) * chr(0)
self.k_xor_ipad = ''.join([chr(ord(bchar)^0x36) for bchar in key])
self.k_xor_opad = ''.join([chr(ord(bchar)^0x5C) for bchar in key])
self.keyed = 1
self.reset()
def reset(self):
self.H.reset()
if self.keyed == None :
raise 'no key defined'
self.H.update(self.k_xor_ipad) # start inner hash with key xored with ipad
# outer hash always called as one full pass (no updates)
def update(self,data):
if self.keyed == None :
raise 'no key defined'
self.H.update(data)
def digest(self):
if self.keyed == None :
raise 'no key defined'
return self.H_outer(self.k_xor_opad+self.H.digest())
from crypto.hash.sha1Hash import SHA1
class HMAC_SHA1(HMAC):
""" Predefined HMAC built on SHA1 """
def __init__(self, key = None):
""" optionally initialize with key """
HMAC.__init__(self,SHA1,key)
from crypto.hash.md5Hash import MD5
class HMAC_MD5(HMAC):
""" Predefined HMAC built on SHA1 """
def __init__(self, key = None):
""" optionally initialize with key """
HMAC.__init__(self,MD5,key)
|
TheDSCPL/SSRE_2017-2018_group8
|
Projeto/Python/cryptopy/crypto/keyedHash/hmacHash.py
|
Python
|
mit
| 3,613
|
from __future__ import absolute_import
import six
if six.PY3:
from unittest import TestCase, mock
else:
import sys
if sys.version_info < (2, 7, 0):
from unittest2 import TestCase
else:
from unittest import TestCase
import mock
from BacklogPy.base import BacklogBase
class TestBacklogBase(TestCase):
def test_api_url(self):
backlog_base = BacklogBase('space-id', 'api-key')
self.assertEqual(backlog_base._api_url,
'https://space-id.backlog.jp/api/v2')
backlog_base = BacklogBase('space-id', 'api-key', suffix='com')
self.assertEqual(backlog_base._api_url,
'https://space-id.backlog.com/api/v2')
def test_request(self):
with mock.patch('requests.request') as m:
backlog_base = BacklogBase('space-id', 'api-key')
backlog_base._request('/path')
args, kwargs = m.call_args_list[0]
self.assertTupleEqual(args, ('GET',
'https://space-id.backlog.jp/api/v2/path'))
self.assertDictEqual(kwargs,
{'params': {'apiKey': 'api-key'}, 'data': {},
'headers': {}})
with mock.patch('requests.request') as m:
backlog_base._request('/path', method='POST')
args, kwargs = m.call_args_list[0]
self.assertTupleEqual(args,
('POST',
'https://space-id.backlog.jp/api/v2/path'))
self.assertDictEqual(kwargs,
{'params': {'apiKey': 'api-key'}, 'data': {},
'headers': {}})
with mock.patch('requests.request') as m:
backlog_base._request('/path', method='POST',
query_parameters={'id': 123})
args, kwargs = m.call_args_list[0]
self.assertTupleEqual(args,
('POST',
'https://space-id.backlog.jp/api/v2/path'))
self.assertDictEqual(kwargs,
{'params': {'apiKey': 'api-key', 'id': 123},
'data': {},
'headers': {}})
with mock.patch('requests.request') as m:
backlog_base._request('/path', method='POST',
query_parameters={'id': 123},
form_parameters={'name': 'abc'})
args, kwargs = m.call_args_list[0]
self.assertTupleEqual(args,
('POST',
'https://space-id.backlog.jp/api/v2/path'))
self.assertDictEqual(kwargs,
{'params': {'apiKey': 'api-key', 'id': 123},
'data': {'name': 'abc'},
'headers': {
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'}})
|
koxudaxi/BacklogPy
|
tests/backlog/test_base.py
|
Python
|
mit
| 3,123
|
# http://stackoverflow.com/questions/13202799/python-code-geometric-brownian-motion-whats-wrong
import matplotlib.pyplot as plt
import numpy as np
T = 2
mu = 0.1
sigma = 0.1
S0 = 10
dt = 0.01
alpha = 0.1
N = round(T/dt)
t = np.linspace(0, T, N)
W = np.random.standard_normal(size = N)
W = np.cumsum(W)*np.sqrt(dt) ### standard brownian motion ###
X = ((mu + alpha * W)-0.5*sigma**2)*t + sigma*W
S = S0*np.exp(X) ### geometric brownian motion ###
plt.plot(t, S, '-', t, W, '--')
plt.show()
|
laputian/dml
|
test/bm_stacko_1_1.py
|
Python
|
mit
| 489
|
import mock
import lxml.etree as ET
from .utils import make_cobertura
def test_parse_path():
from pycobertura import Cobertura
xml_path = 'foo.xml'
with mock.patch('pycobertura.cobertura.os.path.exists', return_value=True):
with mock.patch('pycobertura.cobertura.ET.parse') as mock_parse:
cobertura = Cobertura(xml_path)
assert cobertura.xml is mock_parse.return_value.getroot.return_value
def test_version():
cobertura = make_cobertura()
assert cobertura.version == '1.9'
def test_line_rate():
cobertura = make_cobertura()
assert cobertura.line_rate() == 0.9
def test_line_rate_by_class():
cobertura = make_cobertura()
expected_line_rates = {
'Main': 1.0,
'search.BinarySearch': 0.9166666666666666,
'search.ISortedArraySearch': 1.0,
'search.LinearSearch': 0.7142857142857143,
}
for class_name in cobertura.classes():
assert cobertura.line_rate(class_name) == \
expected_line_rates[class_name]
def test_branch_rate():
cobertura = make_cobertura()
assert cobertura.branch_rate() == 0.75
def test_branch_rate_by_class():
cobertura = make_cobertura()
expected_branch_rates = {
'Main': 1.0,
'search.BinarySearch': 0.8333333333333334,
'search.ISortedArraySearch': 1.0,
'search.LinearSearch': 0.6666666666666666,
}
for class_name in cobertura.classes():
assert cobertura.branch_rate(class_name) == \
expected_branch_rates[class_name]
def test_total_misses():
cobertura = make_cobertura()
assert cobertura.total_misses() == 3
def test_missed_statements_by_class_name():
cobertura = make_cobertura()
expected_missed_statements = {
'Main': [],
'search.BinarySearch': [24],
'search.ISortedArraySearch': [],
'search.LinearSearch': [19, 24],
}
for class_name in cobertura.classes():
assert cobertura.missed_statements(class_name) == \
expected_missed_statements[class_name]
def test_list_packages():
cobertura = make_cobertura()
packages = cobertura.packages()
assert packages == ['', 'search']
def test_list_classes():
cobertura = make_cobertura()
classes = cobertura.classes()
assert classes == [
'Main',
'search.BinarySearch',
'search.ISortedArraySearch',
'search.LinearSearch'
]
def test_hit_lines__by_iterating_over_classes():
cobertura = make_cobertura()
expected_lines = {
'Main': [10, 16, 17, 18, 19, 23, 25, 26, 28, 29, 30],
'search.BinarySearch': [12, 16, 18, 20, 21, 23, 25, 26, 28, 29, 31],
'search.ISortedArraySearch': [],
'search.LinearSearch': [9, 13, 15, 16, 17],
}
for class_name in cobertura.classes():
assert cobertura.hit_statements(class_name) == expected_lines[class_name]
def test_missed_lines():
cobertura = make_cobertura()
expected_lines = {
'Main': [],
'search.BinarySearch': [24],
'search.ISortedArraySearch': [],
'search.LinearSearch': [19, 20, 21, 22, 23, 24],
}
for class_name in cobertura.classes():
assert cobertura.missed_lines(class_name) == expected_lines[class_name]
def test_total_statements():
cobertura = make_cobertura()
assert cobertura.total_statements() == 30
def test_total_statements_by_class():
cobertura = make_cobertura()
expected_total_statements = {
'Main': 11,
'search.BinarySearch': 12,
'search.ISortedArraySearch': 0,
'search.LinearSearch': 7,
}
for class_name in cobertura.classes():
assert cobertura.total_statements(class_name) == \
expected_total_statements[class_name]
def test_total_misses():
cobertura = make_cobertura()
assert cobertura.total_misses() == 3
def test_total_misses_by_class():
cobertura = make_cobertura()
expected_total_misses = {
'Main': 0,
'search.BinarySearch': 1,
'search.ISortedArraySearch': 0,
'search.LinearSearch': 2,
}
for class_name in cobertura.classes():
assert cobertura.total_misses(class_name) == \
expected_total_misses[class_name]
def test_total_hits():
cobertura = make_cobertura()
assert cobertura.total_hits() == 27
def test_total_hits_by_class():
cobertura = make_cobertura()
expected_total_misses = {
'Main': 11,
'search.BinarySearch': 11,
'search.ISortedArraySearch': 0,
'search.LinearSearch': 5,
}
for class_name in cobertura.classes():
assert cobertura.total_hits(class_name) == \
expected_total_misses[class_name]
def test_filename():
cobertura = make_cobertura()
expected_filenames = {
'Main': 'Main.java',
'search.BinarySearch': 'search/BinarySearch.java',
'search.ISortedArraySearch': 'search/ISortedArraySearch.java',
'search.LinearSearch': 'search/LinearSearch.java',
}
for class_name in cobertura.classes():
assert cobertura.filename(class_name) == \
expected_filenames[class_name]
def test_filepath():
base_path = 'foo/bar/baz'
cobertura = make_cobertura(base_path=base_path)
expected_filepaths = {
'Main': 'foo/bar/baz/Main.java',
'search.BinarySearch': 'foo/bar/baz/search/BinarySearch.java',
'search.ISortedArraySearch': 'foo/bar/baz/search/ISortedArraySearch.java',
'search.LinearSearch': 'foo/bar/baz/search/LinearSearch.java',
}
for class_name in cobertura.classes():
assert cobertura.filepath(class_name) == \
expected_filepaths[class_name]
def test_class_source__sources_not_found():
cobertura = make_cobertura('tests/cobertura.xml')
expected_sources = {
'Main': [(0, 'tests/Main.java not found', None)],
'search.BinarySearch': [(0, 'tests/search/BinarySearch.java not found', None)],
'search.ISortedArraySearch': [(0, 'tests/search/ISortedArraySearch.java not found', None)],
'search.LinearSearch': [(0, 'tests/search/LinearSearch.java not found', None)],
}
for class_name in cobertura.classes():
assert cobertura.class_source(class_name) == expected_sources[class_name]
def test_line_statuses():
cobertura = make_cobertura('tests/dummy.source1/coverage.xml')
expected_line_statuses = {
'dummy/__init__': [],
'dummy/dummy': [
(1, True),
(2, True),
(4, True),
(5, False),
(6, False),
],
'dummy/dummy2': [
(1, True),
(2, True),
],
'dummy/dummy4': [
(1, False),
(2, False),
(4, False),
(5, False),
(6, False)
],
}
for class_name in cobertura.classes():
assert cobertura.line_statuses(class_name) == \
expected_line_statuses[class_name]
def test_class_source__sources_found():
cobertura = make_cobertura('tests/dummy.source1/coverage.xml')
expected_sources = {
'dummy/__init__': [],
'dummy/dummy': [
(1, 'def foo():\n', True),
(2, ' pass\n', True),
(3, '\n', None),
(4, 'def bar():\n', True),
(5, " a = 'a'\n", False),
(6, " b = 'b'\n", False),
],
'dummy/dummy2': [
(1, 'def baz():\n', True),
(2, ' pass\n', True)
],
'dummy/dummy4': [
(1, 'def barbaz():\n', False),
(2, ' pass\n', False),
(3, '\n', None),
(4, 'def foobarbaz():\n', False),
(5, ' a = 1 + 3\n', False),
(6, ' pass\n', False)
],
}
for class_name in cobertura.classes():
assert cobertura.class_source(class_name) == \
expected_sources[class_name]
|
msabramo/pycobertura
|
tests/test_cobertura.py
|
Python
|
mit
| 7,957
|
import time
from usb_vendor import PIC_USB
import traceback
# Product IDs: Master PIC is 0x0004, Rocket PIC is 0x0005, Barge PIC is 0x0006
comms = PIC_USB(0x0005)
def main():
print("START")
loop_time = .2 # How often to run the main loop, in seconds
while True:
start_time = time.clock()
# print(chr(27) + "[2J")
# quad_info()
try:
# debug_uart_buffers()
# debug_uart_status()
rocket_info()
endstops()
# debug_oc_status()
except Exception, e:
print "Error occurred. {}".format(e)
traceback.print_exc()
print "Retrying..."
comms = PIC_USB(0x0005)
while (time.clock() - start_time) < loop_time:
pass
def rocket_info():
info = comms.get_rocket_info()
print "Rocket Tilt {} | Rocket Speed {} | Throttle {} | Motor Speed {} | Motor Thrust {} | Stepper Speed {} | Tilt Angle {} | Tilt Direction {} | Rocket State {}".format(
info["tilt"],
info["speed"],
info["throttle"],
info["motor_speed"],
info["motor_thrust"],
info["stepper_speed"],
info["tilt_ang"],
info["tilt_dir"],
info["rocket_state"],
)
def debug_uart_buffers():
info = comms.debug_uart_buffers()
rx = info["rx"]
tx = info["tx"]
print "TX_head {} | TX_tail {} | TX_count {} || RX_head {} | RX_tail {} | RX_count {}".format(
tx["head"],
tx["tail"],
tx["count"],
rx["head"],
rx["tail"],
rx["count"],
)
def debug_uart_status():
info = comms.debug_uart_status()
uart1 = info["uart1"]
uart2 = info["uart2"]
print "[UART1] URXDA: {} | OERR {} | FERR {} || PERR {} | RIDLE {} | ADDEN {}".format(
uart1["URXDA"],
uart1["OERR"],
uart1["FERR"],
uart1["PERR"],
uart1["RIDLE"],
uart1["ADDEN"]
)
print "[UART2] URXDA: {} | OERR {} | FERR {} || PERR {} | RIDLE {} | ADDEN {}".format(
uart2["URXDA"],
uart2["OERR"],
uart2["FERR"],
uart2["PERR"],
uart2["RIDLE"],
uart2["ADDEN"]
)
def debug_oc_status():
info = comms.debug_oc_status()
print "DC_OCM0 {} | DC_OCM1 {} | DC_OCM2 {} | DC_OCTSEL {} | DC_OCFLT {}".format(
info["DC_OCM0"],
info["DC_OCM1"],
info["DC_CM2"],
info["DC_OCTSEL"],
info["DC_OCFLT"],
)
print "ST_OCM0 {} | ST_OCM1 {} | ST_OCM2 {} | ST_OCTSEL {} | ST_OCFLT {}".format(
info["ST_OCM0"],
info["ST_OCM1"],
info["ST_CM2"],
info["ST_OCTSEL"],
info["ST_OCFLT"]
)
def current_state():
info = comms.get_state()
print "Current State {}".format(
info["state"],
)
def quad_info():
info = comms.get_quad_info()
print "Quad Counter {} | Overflow {}".format(
info["counter"],
info["overflow"],
)
def endstops():
"""
Reads the system's endstops.
"""
info = comms.get_limit_sw_info()
print("Y_BOT {} | Y_TOP {} | X_L {} | X_R {} | BARGE {} ".format(
info["Y_BOT"],
info["Y_TOP"],
info["X_L"],
info["X_R"],
info["BARGE"])
)
if __name__ == '__main__':
main()
|
RyanEggert/space-lander
|
rocket_control/printvals.py
|
Python
|
mit
| 3,261
|
# coding: utf-8
# python 3.5
import Orange
from orangecontrib.associate.fpgrowth import *
import pandas as pd
import numpy as np
import sys
import os
from collections import defaultdict
from itertools import chain
from itertools import combinations
from itertools import compress
from itertools import product
from sklearn.metrics import accuracy_score
from multiprocessing import Pool
from multiprocessing import freeze_support
# Global Setting
DIR_UCI = '/mnt/data/uci'
# ------------------------------------------------------
# Rule Class
# ------------------------------------------------------
class Rule :
def __init__(self):
self.value = list()
self.consequent = list()
self.support = float()
self.conf = float()
def setValue(self, values) :
self.value = values
def setConsequent(self, consequents) :
self.consequent = consequents
def setSupport(self, supports) :
self.support = supports
def setConf(self, confidence) :
self.conf = confidence
def getValue(self) :
return(self.value)
def getConsequent(self) :
return(self.consequent)
def getSupport(self) :
return(self.support)
def getSupportD(self) :
return(self.support * len(self.value))
def getConf(self) :
return(self.conf)
def output(self) :
print("value:" + str(self.value))
print("consequent:" + str(self.consequent))
print("support:" + str(self.support))
print("conf:" + str(self.conf))
# ======================================================
# Rules のうち、P個の属性値が分かれば、クラスを推定できるか
# ======================================================
def getPerIdentifiedClass(rules, p) :
attribute_values = [rule.getValue() for rule in rules]
attribute_values = list(chain.from_iterable(attribute_values))
attribute_values = list(set(attribute_values))
combi_attribute_values = combinations(attribute_values,p)
count = 0
bunbo = 0
for combi in combi_attribute_values :
bunbo += 1
rules_target = []
for rule in rules :
matching_count = len(list(set(combi) & set(rule.getValue())))
if matching_count == len(list(combi)) :
rules_target.append(rule)
# rules_target が空なら評価から外す
if len(rules_target) == 0:
bunbo -= 1
#
else :
consequents = [rule.getConsequent() for rule in rules_target]
if len(list(set(consequents))) == 1:
count += 1
if bunbo == 0:
ans = 0
else:
ans = (float(count) / float(bunbo))
return(ans)
# ======================================================
# ルールが対象のクラスを説明するかどうか
# ======================================================
def isExplainRule(obj, rule) :
matching_count = len(list(set(obj) & set(rule.getValue())))
if matching_count == len(rule.getValue()) : return(True)
else : return(False)
# ======================================================
# ルールが対象のクラスを説明するかどうか
# ======================================================
def getMatchingFactor(obj, rule) :
matching_factor = len(list(set(obj) & set(rule.getValue())))
matching_factor = matching_factor / len(rule.getValue())
return(matching_factor)
# ======================================================
# ルールのsupport P を返す
# ======================================================
def getSupportP(obj, rule) :
matching_factor = getMatchingFactor(obj, rule)
return(rule.getSupportD() * matching_factor)
# ======================================================
# ルールから対象のクラスを予測
# ======================================================
def estimateClass(obj, rules) :
list_judge = [isExplainRule(obj, r) for r in rules]
# 1つ以上マッチするなら
if any(list_judge) :
consequents = [rules[i].getConsequent() for i, judge in enumerate(list_judge) if judge]
# マッチしたルールが推論するクラスの数がただ1つなら
if len(set(consequents)) == 1 :
return(consequents[0])
else :
rules_match = list(compress(rules,list_judge))
supportD = [r.getSupportD() for r in rules_match]
return(rules_match[supportD.index(max(supportD))].getConsequent())
# rule が objに1つもマッチしない場合は部分一致ルールによる推定
else :
supportP = [getSupportP(obj, rule) for rule in rules]
return(rules[supportP.index(max(supportP))].getConsequent())
# ======================================================
# LERS による精度評価
# ======================================================
def predictByLERS(FILENAME, iter1, iter2, rules) :
# read test data
filepath = DIR_UCI+'/'+FILENAME+'/alpha/'+FILENAME+'-test'+str(iter1)+'-'+str(iter2)+'.txt'
decision_table_test = pd.read_csv(filepath, delimiter=' ', header=None)
decision_table_test = decision_table_test.dropna()
decision_class = decision_table_test[decision_table_test.columns[-1]].values.tolist()
decision_table_test = decision_table_test.drop(decision_table_test.columns[len(decision_table_test.columns)-1], axis=1)
decision_table_test = decision_table_test.values.tolist()
# LERS で予測
predictions = []
for obj in decision_table_test:
estimated_class = estimateClass(obj, rules)
predictions.append(estimated_class)
# 正答率を求める
accuracy = accuracy_score(decision_class, predictions)
print(accuracy)
return(accuracy)
# =====================================
# Main 関数
# =====================================
def getRulesByFPGrowth(FILENAME, classes, iter1, iter2, minsup, minconf) :
# read data
filepath = DIR_UCI+'/'+FILENAME+'/alpha/'+FILENAME+'-train'+str(iter1)+'-'+str(iter2)+'.txt'
data_pd = pd.read_csv(filepath, delimiter=' ')
pd.DataFrame.to_csv(data_pd, DIR_UCI+'/'+FILENAME+'/alpha/'+FILENAME+'-train'+str(iter1)+'-'+str(iter2)+'.basket', index=False, sep=',')
filepath = DIR_UCI+'/'+FILENAME+'/alpha/'+FILENAME+'-train'+str(iter1)+'-'+str(iter2)+'.basket'
data_table = Orange.data.Table(filepath)
#print len(data_table)
# set parameter
num_lines = sum(1 for line in open(filepath))
minsup = float(minsup) / float(num_lines)
#
#itemsets = frequent_itemsets(data_table, minsup)
#print(itemsets)
#print(list(itemsets))
X, mapping = OneHot.encode(data_table, include_class=True)
#print(X)
itemsets = dict(frequent_itemsets(X, minsup))
#print(itemsets)
#print(len(itemsets))
rules = [(P, Q, supp, conf) for P, Q, supp, conf in association_rules(itemsets, minconf) if len(Q) == 1]
#print(rules)
names = {item: '{}={}'.format(var.name, val) for item, var, val in OneHot.decode(mapping, data_table, mapping)}
for ante, cons, supp, conf in rules:
print(', '.join(names[i] for i in ante), '-->', names[next(iter(cons))], '(supp: {}, conf: {})'.format(supp, conf))
# induce rules
#rules_orange = Orange.associate.AssociationRulesSparseInducer(data_table, support=minsup, confidence=minconf)
#rules_orange = Orange.associate.AssociationRulesSparseInducer(data_table, support = minsup, max_item_sets = 2000)
# convert Rule Class
#rules = []
#for rule_orange in rules_orange :
# consequent = rule_orange.right.get_metas(str).keys()
# if len(consequent) == 1 and consequent[0] in classes and rule_orange.confidence >= minconf :
# rule = Rule()
# rule.setValue(rule_orange.left.get_metas(str).keys())
# rule.setConsequent(consequent[0])
# rule.setSupport(rule_orange.support)
# rule.setConf(rule_orange.confidence)
# rules.append(rule)
# END
#return(rules)
# ======================================================
# Apriori_LERS
# ======================================================
def Apriori_LERS(FILENAME, classes, iter1, iter2, min_sup, min_conf):
# rule 抽出
rules = getRulesByApriori(FILENAME, classes, iter1, iter2, min_sup, min_conf)
# predict by LERS
accuracy = predictByLERS(FILENAME, iter1, iter2, rules)
# save
savepath = DIR_UCI+'/'+FILENAME+'/Apriori_LERS.csv'
with open(savepath, "a") as f :
f.writelines('Apriori_LERS,{min_sup},{FILENAME},{iter1},{iter2},{acc}'.format(FILENAME=FILENAME,iter1=iter1,iter2=iter2,acc=accuracy,min_sup=min_sup)+"\n")
# END
return(accuracy)
def wrapper_Apriori_LERS(multi_args):
multi_args[0](multi_args[1],multi_args[2],multi_args[3],multi_args[4],multi_args[5],multi_args[6])
# ========================================
# listの平均と分散を求める
# ========================================
def getEvalMeanVar(result):
ans = '{mean}±{std}'.format(mean=('%.3f' % round(np.mean(results),3)), std=('%.3f' % round(np.std(results),3)))
return(ans)
# ========================================
# multi に実行する
# ========================================
def multi_main(proc, FILENAME, FUN, **kargs):
pool = Pool(proc)
results = []
multiargs = []
classes = kargs['classes']
min_sup_range = kargs['min_sup'] if 'min_sup' in kargs else range(2,11)
min_conf = kargs['min_conf']
# Apriori_LERS 用
if FUN == Apriori_LERS :
WRAPPER_FUN = wrapper_Apriori_LERS
for iter1, iter2, min_sup in product(range(1,11), range(1,11), min_sup_range):
multiargs.append((FUN, FILENAME, classes, iter1, iter2, min_sup, min_conf))
#print(multiargs)
results = pool.map(WRAPPER_FUN, multiargs)
else :
print("I dont' know the function.")
return(results)
# ========================================
# main
# ========================================
if __name__ == "__main__":
FILENAME = 'hayes-roth'
FILENAME = 'german_credit_categorical'
# number of class
classes = ['D1', 'D2', 'D3']
classes = ['D1', 'D2',]
iter1 = 10
iter2 = 3
# support と confidence の閾値
#min_sup_range = range(2,11,1)
#min_sup_range = range(2,20,2)
min_sup = 100
min_conf = 1.0
# rule induction
getRulesByFPGrowth(FILENAME, classes, iter1, iter2, min_sup, min_conf)
#print len(rules)
#for r in rules:
# print(r.output())
# predict by LERS
#print(predictByLERS(FILENAME, iter1, iter2, rules))
# 並列実行して全データで評価
#proc=32
#freeze_support()
#FUN = Apriori_LERS
#results = multi_main(proc, FILENAME, FUN, classes = classes, min_sup = min_sup_range, min_conf = min_conf)
|
gingi99/research_dr
|
python/FPgrowth/orange_fpgrowth.py
|
Python
|
mit
| 10,802
|
#!/usr/bin/env python
"""
pitchanalysis.py
--
Christopher Kuech
cjkuech@gmail.com
--
Requires:
Python 2.7
Instructions:
python pitchanalysis.py [wav-file-name]
"""
import matplotlib
from math import log
matplotlib.use("TkAgg")
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.figure import Figure
import numpy as np
import pyaudio
import sys
from time import time, sleep
import Tkinter as tk
import wavelab
(WIDTH, HEIGHT) = (800, 500)
FNAME = './Bach.wav' if len(sys.argv) != 2 else sys.argv[1]
font = ('Helvetica', 14, 'bold')
CHUNK = 1024
def audioworker():
"""the function run on the audio thread"""
global frame
p = pyaudio.PyAudio()
stream = p.open(format=p.get_format_from_width(2),
channels=1, rate=4*44100, output=True)
# unknown why rate is off by 4x
while True:
stream.write(data[frame:frame+CHUNK].tostring())
frame = (frame + CHUNK) % len(wav)
stream.stop_stream()
stream.close()
p.terminate()
def graphicsworker():
"""the function run on the graphics thread"""
while True:
start = time()
p = ptype.get()
w = wsize.get()
wty = wtype.get()
# compute frequencies from clip
clip = data[frame:frame+w]
if wty == "hanning":
clip *= np.hanning(w)
elif wty == "hamming":
clip *= np.hamming(w)
freqs = wavelab.frequencies(clip)
# update plot
xs = np.sort(freqs.keys())
ys = np.array(map(freqs.get, xs))
axes.cla()
(xmax, ymin, ymax) = (10e4, 0.000001, 10e2)
# (xlim, ylim) = (_, (ymin, ymax)) = ((0, 1e4), (1e-3, 1e7))
axes.set_xscale("log")
axes.set_yscale("linear")
axes.set_xlim((1, xmax))
if p == "square":
# axes.set_yscale("linear")
axes.set_ylim((ymin**2, ymax**2))
ys = ys * ys
elif p == "dB":
# axes.set_yscale("log")
axes.set_ylim((log(ymin), log(ymax)))
ys = np.log(ys)
elif p == "-dB":
# axes.set_yscale("log")
axes.set_ylim((-log(ymax), -log(ymin)))
ys = -np.log(ys)
elif p == "linear":
# axes.set_yscale("linear")
axes.set_ylim((ymin, ymax))
axes.plot(xs, ys, 'r-')
canvas.show()
# pitch tracker
freq = max(freqs, key=lambda f: freqs[f])
pitch.set(wavelab.pitch(freq).replace('/','\n'))
# attempt to achieve 30fps animation (at best)
dt = time() - start
sleep(max(0, 1.0/30.0 - dt))
# read wave file
(framerate, wav) = wavelab.readwav(FNAME)
data = np.concatenate((wav, wav)) # avoid out of bounds
frame = 0
# create a GUI instance (do before any use of Tkinter)
root = tk.Tk()
root.wm_title("Frequency Spectrogram")
# these objects hold the variables from the widgets
wsize = tk.IntVar() # window size (in frames)
wsize.set(2205)
wtype = tk.StringVar() # type of windowing to use
wtype.set("rectangle")
ptype = tk.StringVar() # type of power to use
ptype.set("square")
pitch = tk.StringVar() # the current pitch
pitch.set("")
widgetps = lambda n, v: {'variable': v, 'text': n, 'value': n}
# returns the dict of kwargs that initialize a widget
# create the canvas widget and add it to the GUI
# canvas = tk.Canvas(root, borderwidth=0, width=WIDTH, height=HEIGHT, bg='#000')
# canvas.grid(row=0, column=0, columnspan=4)
# canvas.show()
canvasframe = tk.Frame(root, width=WIDTH, height=HEIGHT)
canvasframe.grid(row=0, column=0, columnspan=4)
figure = Figure()
axes = figure.add_axes( (0.1, 0.1, 0.8, 0.8), frameon=True,
xlabel="Frequency (Hz)", ylabel="Power")
canvas = FigureCanvasTkAgg(figure, canvasframe)
canvas.get_tk_widget().pack(side=tk.TOP, fill=tk.BOTH, expand=1)
canvas.show()
# create the wtype controller and add it to the GUI
tk.Label(root, font=font, text="Windowing").grid(row=1, column=0, pady=10)
wframe = tk.Frame(root)
wframe.grid(row=2, column=0, pady=10, sticky="n")
tk.Radiobutton(wframe, **widgetps("rectangle", wtype)).grid(sticky="w", row=0)
tk.Radiobutton(wframe, **widgetps("hamming" , wtype)).grid(sticky="w", row=1)
tk.Radiobutton(wframe, **widgetps("hanning" , wtype)).grid(sticky="w", row=2)
# create the wsize controller and add it to the GUI
tk.Label(root, font=font, text="Window Size").grid(row=1, column=1, pady=10)
tk.Scale(root, variable=wsize, orient=tk.HORIZONTAL, from_=10, to=4410).grid(row=2, column=1, sticky="wen")
# create the ptype controller and add it to the GUI
tk.Label(root, font=font, text="Power").grid(row=1, column=2, pady=10)
pframe = tk.Frame(root)
pframe.grid(row=2, column=2, pady=10, sticky="n")
tk.Radiobutton(pframe, **widgetps("square", ptype)).grid(sticky="w", row=0)
tk.Radiobutton(pframe, **widgetps("dB", ptype)).grid(sticky="w", row=1)
tk.Radiobutton(pframe, **widgetps("-dB", ptype)).grid(sticky="w", row=2)
tk.Radiobutton(pframe, **widgetps("linear", ptype)).grid(sticky="w", row=3)
# create the area where the pitchlabel is displayed
tk.Label(root, font=font, text="Pitch").grid(row=1, column=3, pady=10)
(fontfamily, fontsize, fontweight) = font
pitchfont = (fontfamily, 24, fontweight)
pitchlabel = tk.Label(root, font=pitchfont, textvariable=pitch, width=7).grid(row=2, column=3)
# start the other threads
wavelab.thread(audioworker)
wavelab.thread(graphicsworker)
# start the main update loop for the GUI (and block)
tk.mainloop()
|
chriskuech/wavelab
|
pitchanalysis.py
|
Python
|
mit
| 5,174
|
from distutils.core import setup
setup(
name='Bioloid',
version='0.1.0',
author='Dave Hylands',
author_email='dhylands@gmail.com',
packages=['bioloid'],
scripts=[],
url='http://pypi.python.org/pypi/Bioloid/',
license='LICENSE',
description='Provides access to bioloid devices.',
long_description=open('README.md').read(),
install_requires=[
'pyyaml',
'pyserial'
],
)
|
dhylands/Bioloid
|
setup.py
|
Python
|
mit
| 432
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'pxmarkerdialog.ui'
#
# Created by: PyQt5 UI code generator 5.8.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_PxMarkerDialog(object):
def setupUi(self, PxMarkerDialog):
PxMarkerDialog.setObjectName("PxMarkerDialog")
PxMarkerDialog.resize(400, 300)
self.btn_confirm_box = QtWidgets.QDialogButtonBox(PxMarkerDialog)
self.btn_confirm_box.setGeometry(QtCore.QRect(290, 20, 81, 241))
self.btn_confirm_box.setOrientation(QtCore.Qt.Vertical)
self.btn_confirm_box.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.btn_confirm_box.setObjectName("btn_confirm_box")
self.pxmarker_table_widget = QtWidgets.QTableWidget(PxMarkerDialog)
self.pxmarker_table_widget.setGeometry(QtCore.QRect(10, 20, 271, 261))
self.pxmarker_table_widget.setEditTriggers(QtWidgets.QAbstractItemView.DoubleClicked|QtWidgets.QAbstractItemView.EditKeyPressed)
self.pxmarker_table_widget.setObjectName("pxmarker_table_widget")
self.pxmarker_table_widget.setColumnCount(3)
self.pxmarker_table_widget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.pxmarker_table_widget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.pxmarker_table_widget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.pxmarker_table_widget.setHorizontalHeaderItem(2, item)
self.pxmarker_table_widget.horizontalHeader().setDefaultSectionSize(50)
self.pxmarker_table_widget.horizontalHeader().setMinimumSectionSize(40)
self.pxmarker_table_widget.horizontalHeader().setStretchLastSection(True)
self.pxmarker_table_widget.verticalHeader().setVisible(False)
self.pxmarker_table_widget.verticalHeader().setHighlightSections(False)
self.retranslateUi(PxMarkerDialog)
self.btn_confirm_box.accepted.connect(PxMarkerDialog.accept)
self.btn_confirm_box.rejected.connect(PxMarkerDialog.reject)
QtCore.QMetaObject.connectSlotsByName(PxMarkerDialog)
def retranslateUi(self, PxMarkerDialog):
_translate = QtCore.QCoreApplication.translate
PxMarkerDialog.setWindowTitle(_translate("PxMarkerDialog", "Edit Pixel Markers"))
item = self.pxmarker_table_widget.horizontalHeaderItem(0)
item.setText(_translate("PxMarkerDialog", "Class"))
item = self.pxmarker_table_widget.horizontalHeaderItem(1)
item.setText(_translate("PxMarkerDialog", "Color"))
item = self.pxmarker_table_widget.horizontalHeaderItem(2)
item.setText(_translate("PxMarkerDialog", "Feature"))
|
mohikhsan/px-labeler
|
px-labeler/pxgui/ui_pxmarkerdialog.py
|
Python
|
mit
| 2,812
|
import sys
import os
import files.io
from StringIO import StringIO
from constants import *
added_path = os.path.join(os.path.dirname(os.path.abspath(os.path.dirname(__file__))), "script")
sys.path.append(added_path)
import script
import mess
sys.path.pop()
def full_file(pointer):
pointer.seek(0)
return pointer.read()
def send_confirmation_email(recipient, contents):
connection = script.get_mailserver_connection()
email_headers = {
"Subject": CONFIRMATION_EMAIL_SUBJECT,
"To": recipient,
"From": CONFIRMATION_EMAIL_FROM
}
mess.Message(email_headers, contents).send(connection)
connection.quit()
def run_on(key, actually_send=False, confirmation_email=CONFIRMATION_EMAIL_FROM, confirmation_email_on_success=True, confirmation_email_on_fail=True, special_text="", send_default_confirmation=False):
input_files = files.io.get(key)
assert "template" in input_files, "Need to submit non-empty template"
assert "spreadsheet" in input_files, "Need to submit non-empty spreadsheet"
output_location = StringIO()
kwargs = {}
if send_default_confirmation:
pass
else:
kwargs["send_confirmation_to"] = None
if "constants" in input_files:
kwargs["constants"] = StringIO(input_files["constants"])
success = script.run_script(
StringIO(input_files["template"]),
StringIO(input_files["spreadsheet"]),
output=output_location,
actually_send=actually_send,
**kwargs
)
script_results = special_text + full_file(output_location)
if confirmation_email and ((confirmation_email_on_success and success) or (confirmation_email_on_fail and not success)):
send_confirmation_email(confirmation_email, script_results)
return script_results, success
|
wnavarre/email-dictator
|
web/run_on.py
|
Python
|
mit
| 1,802
|
# -*- coding:utf-8 -*-
# 使用 UTF-8
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
import sys
from heap import Heap
class Rectangle(object):
"""docstring for Rectangle"""
def __init__(self, dimension, entry=None):
super(Rectangle, self).__init__()
self.dimension = dimension
self.min_dim = [None for _ in xrange(dimension)]
self.max_dim = [None for _ in xrange(dimension)]
if entry is not None:
for ipos in xrange(self.dimension):
self.min_dim[ipos] = entry[ipos]
self.max_dim[ipos] = entry[ipos]
def resize(self, rects):
"""
通过给定的 子节点Rectangle列表
重新计算当前 Rectangle 的 MBR(Minimal Boundary Rect)
"""
for ipos in xrange(self.dimension):
self.min_dim[ipos] = min(map(lambda x: x.min_dim[ipos], rects))
self.max_dim[ipos] = max(map(lambda x: x.max_dim[ipos], rects))
def resize2(self, entry):
"""
通过给定的 entry,
重新计算当前 Rectangle 的 MBR(Minimal Boundary Rect)
entry 代表一条数据的所有维度
"""
for ipos in xrange(self.dimension):
if entry[ipos] < self.min_dim[ipos]:
self.min_dim[ipos] = entry[ipos]
elif entry[ipos] > self.max_dim[ipos]:
self.max_dim[ipos] = entry[ipos]
def expand_area(self, entry):
new_area = 1.0
curr_area = 1.0
for ipos in xrange(self.dimension):
max_value = self.max_dim[ipos]
min_value = self.min_dim[ipos]
try:
curr_area *= (max_value - min_value)
except TypeError as e:
# 未完全初始化的 Rectangle
return -1
if entry[ipos] > self.max_dim[ipos]:
max_value = entry[ipos]
elif entry[ipos] < self.min_dim[ipos]:
min_value = entry[ipos]
try:
new_area *= (max_value - min_value)
except TypeError as e:
# 未完全初始化的 Rectangle
return -1
return new_area - curr_area
def overlap_area(self, rect):
area = 1.0
for ipos in xrange(self.dimension):
try:
if self.max_dim[ipos] < rect.max_dim[ipos]:
factor = self.max_dim[ipos] - rect.min_dim[ipos]
else:
factor = rect.max_dim[ipos] - self.min_dim[ipos]
except TypeError as e:
# 未完全初始化的 Rectangle
return -1
if factor < 0:
return 0.0
area *= factor
return area
def __contains__(self, rect):
for ipos in xrange(self.dimension):
if self.max_dim[ipos] < rect.min_dim[ipos]:
return False
if self.min_dim[ipos] > rect.max_dim[ipos]:
return False
return True
def __str__(self):
return "Min:{0}, Max:{1}".format(
self.min_dim, self.max_dim)
class RNode(object):
def __init__(self, degree, dimension):
super(RNode, self).__init__()
self.num = 0
self.isleaf = True
self.degree = degree
self.dimension = dimension
if dimension < 2:
raise Exception("请使用 B/B+树 代替")
if dimension > 6:
print "WARNING:R树推荐维度为 [2,6]"
self.mbr = Rectangle(self.dimension)
self.threshold = degree*2
self.rects = [None for _ in xrange(self.threshold)]
self.pnodes = [None for _ in xrange(self.threshold)]
def adjust(self):
self.mbr = Rectangle(self.dimension)
self.mbr.resize(self.rects[:self.num])
def involve(self, entry):
self.mbr.resize2(entry)
def pointer(self):
return self
def most_overlap_pos(self, ipos):
"""
从 self.pnodes 中找到与 self.pnodes[ipos] 重合度最大的点的位置
"""
child = self.pnodes[ipos]
ichild_pos, max_overlap, max_overlap_pos = 0, -1, 0
while ichild_pos < self.num:
if ipos == ichild_pos:
continue
overlap = child.overlap_area(self.pnodes[ichild_pos].mbr)
if max_overlap < overlap:
max_overlap = overlap
max_overlap_pos = ichild_pos
ichild_pos += 1
return max_overlap_pos
class DataNode(object):
"""docstring for DataNode"""
def __init__(self, max_length=10):
super(DataNode, self).__init__()
self.num = 0
self.data = None
self.max_length = max_length
base, mode = divmod(self.max_length, 2)
if mode > 0:
base += 1
self.min_length = base
self.mbr = Rectangle(self.dimension)
class RTree(object):
"""docstring for RTree"""
def __init__(self, degree, dimension):
super(RTree, self).__init__()
self.degree = degree
self.dimension = dimension
self.threshold = degree*2
self.root = self.allocate_namenode()
def allocate_namenode(self):
raise NotImplementedError()
def deallocate_namenode(self, node):
raise NotImplementedError()
def allocate_datanode(self):
raise NotImplementedError()
def deallocate_datanode(self, node):
raise NotImplementedError()
def save_docs(self, metanode):
raise NotImplementedError()
def load_docs(self, metanode, ipos):
raise NotImplementedError()
def search(self, rect, node=None):
if node is None:
node = self.root
indexes = []
ipos = node.num-1
while ipos >= 0:
if rect in node.rects[ipos]:
indexes.append(ipos)
ipos -= 1
if len(indexes) == 0:
return []
if node.isleaf is True:
return map(lambda x: self.load_docs(node.pnodes[x]), indexes)
results = []
for ipos in indexes:
results.extend(self.search(rect, node.pnodes[ipos]))
return results
def split(self, parent, ipos, node):
"""
由于 R树 中节点内部是无序的,为了减少移动数据的开销
分裂后的两个节点一个放在分裂前节点的位置,一个放在末尾
目前分裂的简单算法:
直接选取第一个点当作旧节点的核心rect
计算旧核心rect与其他rect的重合度
选取重合度最低的一个rect作为新节点的核心rect
计算新核心rect与其他rect的重合度
对比每个非核心rect与两个核心的重合度
选出与新核心重合度更高的 degree-1 个节点组成新节点
"""
if parent.isleaf is False:
new_node = self.allocate_namenode()
new_node.isleaf = node.isleaf
ancor = node.rects[0]
heap = Heap(node.pnodes, reverse=True,
key=lambda x: ancor.overlap_area(x.mbr))
ipos = 0
while ipos < node.degree:
new_node.pnodes[ipos] = heap.pop()
new_node.rects[ipos] = new_node.pnodes[ipos].mbr
ipos += 1
new_node.num = node.degree
new_node.adjust()
ipos = 0
length = len(heap)
while ipos < length:
node.pnodes[ipos] = heap.heap[ipos]
node.pnodes[ipos].adjust()
node.rects[ipos] = heap.heap[ipos].mbr
ipos += 1
node.num = length
node.adjust()
parent.pnodes[parent.num-1] = new_node.pointer()
parent.rects[parent.num-1] = new_node.mbr
parent.num += 1
return None
new_node = node.split()
parent.pnodes[parent.num-1] = new_node.pointer()
parent.rects[parent.num-1] = new_node.mbr
parent.num += 1
return None
def insert(self, entry, doc):
"""
entry 是长度为 self.dimension 的数组
entry 中每一个维度都需要是数值型
"""
if self.root.num != self.threshold:
return self.insert_nonfull(self.root, entry, doc)
old_root = self.root
new_root = self.allocate_namenode()
new_root.isleaf = False
new_root.pnodes[0] = old_root.pointer()
new_root.rects[0] = old_root.mbr
new_root.num += 1
self.root = new_root
self.split(new_root, 0, old_root)
return self.insert_nonfull(new_root, entry, doc)
def insert_nonfull(self, node, entry, doc):
ipos = 0
min_expand = sys.maxint
min_expand_pos = 0
while ipos < node.num:
expand_area = node.pnodes[ipos].mbr.expand_area(entry)
if min_expand > expand_area:
min_expand = expand_area
min_expand_pos = ipos
ipos += 1
ipos = min_expand_pos
node.involve(entry)
if node.isleaf is True:
datanode = node.pnodes[ipos]
if datanode is None:
datanode = self.allocate_datanode()
node.pnodes[ipos] = datanode
node.num += 1
# 此处不用连接 DataNode 的链表,因为此处仅在初始化时运行一次
if datanode.isfull() is True:
self.split(node, ipos, datanode)
if node.pnodes[ipos].mbr.expand_area(entry) < \
node.pnodes[ipos+1].mbr.expand_area(entry):
ipos += 1
datanode = node.pnodes[ipos]
datanode.insert(entry, doc)
node.rects[ipos] = datanode.mbr
return None
child = node.pnodes[ipos]
if child.num == self.threshold:
self.split(node, ipos, child)
if node.pnodes[ipos].mbr.expand_area(entry) < \
node.pnodes[ipos+1].mbr.expand_area(entry):
child = node.pnodes[ipos+1]
return self.insert_nonfull(child, entry, doc)
def merge(self, node, ipos):
"""
将当前节点 位置(ipos) 对应的孩子与其重合面积最大的兄弟合并
"""
child = node.pnodes[ipos]
# 在 node 中寻找与 child 重合面积最大的兄弟
max_overlap_pos = node.most_overlap_pos(ipos)
mchild = node.pnodes[max_overlap_pos]
if node.isleaf is True:
child.merge(mchild)
self.deallocate_datanode(mchild)
else:
impos = 0
while impos < mchild.num:
child.rects[child.num+impos] = mchild.rects[impos]
child.pnodes[child.num+impos] = mchild.pnodes[impos]
impos += 1
child.num += mchild.num
child.adjust()
self.deallocate_namenode(mchild)
node.rects[max_overlap_pos] = node.rects[node.num-1]
node.pnodes[max_overlap_pos] = node.pnodes[node.num-1]
node.num -= 1
# node 的 mbr 没有变化,不用调用 adjust()
return ipos
def guarantee(self, node, ipos):
"""
确保 node.pnodes[ipos] 拥有至少 t 个孩子
注意: node 一定是非叶子节点
"""
child = node.pnodes[ipos]
if child.num > self.degree:
return ipos
# 在 node 中寻找与 child 重合面积最大的兄弟
ichild_pos, max_overlap, max_overlap_pos = 0, -1, -1
while ichild_pos < node.num:
if ipos == ichild_pos:
continue
candidate = node.pnodes[ichild_pos]
if candidate.num <= self.degree:
continue
overlap = child.overlap_area(candidate.mbr)
if max_overlap < overlap:
max_overlap = overlap
max_overlap_pos = ichild_pos
ichild_pos += 1
if max_overlap_pos > 0:
mchild = node.pnodes[max_overlap_pos]
# 在 mchild 中找到与 child 重合度最高的点, 将其合并到 child.pnodes 中
ichild_pos, max_overlap, max_overlap_pos = 0, -1, 0
while ichild_pos < mchild.num:
overlap = child.overlap_area(mchild.pnodes[ichild_pos].mbr)
if max_overlap < overlap:
max_overlap = overlap
max_overlap_pos = ichild_pos
ichild_pos += 1
child.pnodes[child.num] = mchild.pnodes[max_overlap_pos]
child.rects[child.num] = mchild.rects[max_overlap_pos]
child.num += 1
child.adjust()
impos = max_overlap_pos
while impos < mchild.num-1:
mchild.rects[impos] = mchild.rects[impos+1]
mchild.pnodes[impos] = mchild.pnodes[impos+1]
impos += 1
mchild.num -= 1
mchild.adjust()
return ipos
return self.merge(node, ipos)
def remove_key(self, node, entry):
ipos = 0
indexes = []
min_expand, min_expand_pos = sys.maxint, 0
while ipos < node.num:
expand_area = node.pnodes[ipos].mbr.expand_area(entry)
if expand_area == 0:
indexes.append(ipos)
ipos += 1
if len(indexes) == 0:
return None
if node.isleaf is False:
icpos = self.guarantee(node, ipos)
child = node.pnodes[icpos]
self.remove_key(child, entry)
# TODO
if len(indexes) == 0:
return []
if node.isleaf is True:
return map(lambda x: self.load_docs(node.pnodes[x]), indexes)
results = []
for ipos in indexes:
results.extend(self.search(rect, node.pnodes[ipos]))
return results
|
zegra1989/pytree
|
rtree.py
|
Python
|
mit
| 14,094
|
# -*- coding:utf-8 -*-
from ...errors.httpbadrequestexception import HttpBadRequestException
import saklient
# module saklient.cloud.errors.dnsaaaarecordnotfoundexception
class DnsAaaaRecordNotFoundException(HttpBadRequestException):
## 不適切な要求です。対応するAAAAレコードが見つかりません。
## @param {int} status
# @param {str} code=None
# @param {str} message=""
def __init__(self, status, code=None, message=""):
super(DnsAaaaRecordNotFoundException, self).__init__(status, code, "不適切な要求です。対応するAAAAレコードが見つかりません。" if message is None or message == "" else message)
|
hnakamur/saklient.python
|
saklient/cloud/errors/dnsaaaarecordnotfoundexception.py
|
Python
|
mit
| 688
|
"""Production settings and globals."""
from base import *
########## HOST CONFIGURATION
# See: https://docs.djangoproject.com/en/1.5/releases/1.5/#allowed-hosts-required-in-production
MAIN_HOST = ['openbilanci.staging.deppsviluppo.org',]
# Allowed hosts expansion: needed for servizi ai Comuni
HOSTS_COMUNI = [
'novara.comuni.deppsviluppo.org',
'rapallo.comuni.deppsviluppo.org',
'castiglionedellestiviere.comuni.deppsviluppo.org',
'firenze.comuni.deppsviluppo.org',
'terni.comuni.deppsviluppo.org'
]
ALLOWED_HOSTS += MAIN_HOST + HOSTS_COMUNI
########## END HOST CONFIGURATION
########## EMAIL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
########## END EMAIL CONFIGURATION
########## TOOLBAR CONFIGURATION
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
INSTALLED_APPS += (
'debug_toolbar',
)
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
INTERNAL_IPS = ('176.31.74.29',)
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
MIDDLEWARE_CLASSES = (
'debug_toolbar.middleware.DebugToolbarMiddleware',
) + MIDDLEWARE_CLASSES
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'SHOW_TEMPLATE_CONTEXT': True,
}
def show_toolbar(request):
print("IP Address for debug-toolbar: " + request.META['REMOTE_ADDR'])
return True
SHOW_TOOLBAR_CALLBACK = show_toolbar
DEBUG_TOOLBAR_PATCH_SETTINGS=False
########## END TOOLBAR CONFIGURATION
BILANCI_PATH = "/home/open_bilanci/dati/bilanci_subset"
OUTPUT_FOLDER = '../scraper_project/scraper/output/'
LISTA_COMUNI = 'listacomuni.csv'
LISTA_COMUNI_PATH = OUTPUT_FOLDER + LISTA_COMUNI
PATH_PREVENTIVI = BILANCI_PATH+"/%s/%s/Preventivo/%s.html"
PATH_CONSUNTIVI = BILANCI_PATH+"/%s/%s/Consuntivo/%s.html"
BILANCI_RAW_DB = 'bilanci_raw'
|
DeppSRL/open_bilanci
|
bilanci_project/bilanci/settings/staging.py
|
Python
|
mit
| 1,991
|
#!/usr/bin/env python3
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "web_sheets_django.settings.local")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
chuck1/web_sheets
|
old/web_sheets_django/manage.py
|
Python
|
mit
| 822
|
import os
from os import path
import sqlite3
from .link import Link
class Links(object):
def __init__(self):
self._links = {}
def add(self, link):
self._links[link.url] = link
def find_by_tag(self, tag):
return [link for link in self._links.values() if tag in link.tags]
def get_all(self):
return self._links.values()
def remove(self, link):
del self._links[link.url]
def find_by_url(self, url):
return self._links[url]
class SqliteLinks(object):
def __init__(self, table_gateways):
self._links_table = table_gateways['links']
self._tags_table = table_gateways['tags']
def add(self, link):
self._links_table.save(link.url, link.date)
self._tags_table.reset_tags(link.url, link.tags)
def find_by_tag(self, tag):
found = []
for url in self._tags_table.get_urls_of_links_with_tag(tag):
date = self._links_table.get_date(url)
tags = self._tags_table.get_tags(url)
found.append(Link(url, tags, date))
return found
def get_all(self):
all_links = []
for url, date in self._links_table.get_all():
tags = self._tags_table.get_tags(url)
all_links.append(Link(url, tags, date))
return all_links
def remove(self, link):
self._tags_table.remove_tags(link.url)
self._links_table.remove_url_and_date(link.url)
def find_by_url(self, url):
date = self._links_table.get_date(url)
tags = self._tags_table.get_tags(url)
return Link(url, tags, date)
class SqliteTable(object):
def __init__(self, sqlite_connection):
self._connection = sqlite_connection
self._set_up()
def _set_up(self):
with self._connection as connection:
connection.execute(self.SQL_COMMAND_FOR_TABLE_CREATION)
class LinksTable(SqliteTable):
SQL_COMMAND_FOR_TABLE_CREATION = '''
create table if not exists links(
url
primary key
not null,
date_saved
not null
)
'''
def get_all(self):
with self._connection as connection:
return connection.execute('select url, date_saved from links').fetchall()
def save(self, url, date):
with self._connection as connection:
connection.execute(
'insert or ignore into links(url, date_saved) values(?, ?)',
(url, date)
)
def get_date(self, url):
with self._connection as connection:
row = connection.execute(
'select date_saved from links where url = ?',
(url,)
).fetchone()
date = row[0]
return date
def remove_url_and_date(self, url):
with self._connection as connection:
connection.execute('delete from links where url = ?', (url,))
class TagsTable(SqliteTable):
SQL_COMMAND_FOR_TABLE_CREATION = '''
create table if not exists tags(
url
not null,
name
not null,
foreign key(url) references links(url)
on delete restrict
on update restrict
)
'''
def get_urls_of_links_with_tag(self, tag):
with self._connection as connection:
list_of_rows = connection.execute(
'select url from tags where name = ?',
(tag,)
).fetchall()
return tuple(url for (url,) in list_of_rows)
def get_tags(self, url):
with self._connection as connection:
list_of_rows = connection.execute(
'select name from tags where url = ?',
(url,)
).fetchall()
return tuple(tag for (tag,) in list_of_rows)
def reset_tags(self, url, tags):
self.remove_tags(url)
self.add_tags(url, tags)
def remove_tags(self, url):
with self._connection as connection:
connection.execute('delete from tags where url = ?', (url,))
def add_tags(self, url, tags):
with self._connection as connection:
connection.executemany(
'insert into tags(url, name) values(?, ?)',
[(url, tag) for tag in tags]
)
class SqliteConnectionFactory(object):
@staticmethod
def create_autoclosing_on_disk():
return AutoclosingSqliteConnection()
@classmethod
def create_in_memory(cls):
connection_to_in_memory_database = sqlite3.connect(':memory:')
cls._enable_enforcement_of_foreign_key_constraints(connection_to_in_memory_database)
return connection_to_in_memory_database
@staticmethod
def _enable_enforcement_of_foreign_key_constraints(sqlite_connection):
sqlite_connection.execute('pragma foreign_keys = on')
@classmethod
def create_on_disk(cls, data_directory):
connection_to_on_disk_database = sqlite3.connect(data_directory.path_to_database_file)
cls._enable_enforcement_of_foreign_key_constraints(connection_to_on_disk_database)
return connection_to_on_disk_database
class AutoclosingSqliteConnection(object):
def __init__(self, provider_of_sqlite_connection=None):
self._provider_of_sqlite_connection = provider_of_sqlite_connection if provider_of_sqlite_connection is not None \
else ProviderOfConnectionToOnDiskSqliteDatabase()
def __enter__(self):
self._current_connection = self._provider_of_sqlite_connection.get()
self._current_connection.__enter__()
return self._current_connection
def __exit__(self, type_, value, traceback):
self._current_connection.__exit__(type_, value, traceback)
self._current_connection.close()
return False
class ProviderOfConnectionToOnDiskSqliteDatabase(object):
def __init__(self):
self._directory = ApplicationDataDirectory()
def get(self):
return SqliteConnectionFactory.create_on_disk(self._directory)
class ApplicationDataDirectory(object):
@property
def path(self):
return path.expanduser('~/.linkstore/')
@property
def name_of_database_file(self):
return 'linkstore.sqlite'
@property
def path_to_database_file(self):
self._ensure_data_directory_exists()
return path.join(self.path, self.name_of_database_file)
def _ensure_data_directory_exists(self):
if path.exists(self.path):
return
os.mkdir(self.path)
|
angelsanz/linkstore
|
linkstore/links.py
|
Python
|
mit
| 6,633
|
"""
Django settings for dts_test_project project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
TENANT_APPS_DIR = os.path.join(BASE_DIR, os.pardir)
sys.path.insert(0, TENANT_APPS_DIR)
sys.path.insert(0, BASE_DIR)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'cl1)b#c&xmm36z3e(quna-vb@ab#&gpjtdjtpyzh!qn%bc^xxn'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
SHARED_APPS = (
'django_tenants', # mandatory
'customers', # you must list the app where your tenant model resides in
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
TENANT_APPS = (
'dts_test_app',
)
TENANT_MODEL = "customers.Client" # app.Model
TENANT_DOMAIN_MODEL = "customers.Domain" # app.Model
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
INSTALLED_APPS = list(SHARED_APPS) + [app for app in TENANT_APPS if app not in SHARED_APPS]
ROOT_URLCONF = 'dts_test_project.urls'
WSGI_APPLICATION = 'dts_test_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django_tenants.postgresql_backend',
'NAME': 'dts_test_project',
'USER': 'postgres',
'PASSWORD': os.environ.get('DATABASE_PASSWORD', 'root'),
'HOST': os.environ.get('DATABASE_HOST', 'localhost'),
'PORT': '',
}
}
DATABASE_ROUTERS = (
'django_tenants.routers.TenantSyncRouter',
)
MIDDLEWARE = (
'tenant_tutorial.middleware.TenantTutorialMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.contrib.messages.context_processors.messages',
)
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
|
sigma-geosistemas/django-tenants
|
dts_test_project/dts_test_project/settings.py
|
Python
|
mit
| 3,048
|
#!/usr/bin/env python
"""
Jedi EPC server.
Copyright (C) 2012 Takafumi Arakaki
Author: Takafumi Arakaki <aka.tkf at gmail.com>
This file is NOT part of GNU Emacs.
Jedi EPC server is free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
Jedi EPC server is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Jedi EPC server.
If not, see <http://www.gnu.org/licenses/>.
"""
import argparse
import glob
import itertools
import logging
import logging.handlers
import os
import re
import site
import sys
from collections import namedtuple
import jedi
import jedi.api
import epc
import epc.server
import sexpdata
logger = logging.getLogger('jediepcserver')
parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter,
description=__doc__)
parser.add_argument(
'--address', default='localhost')
parser.add_argument(
'--port', default=0, type=int)
parser.add_argument(
'--port-file', '-f', default='-', type=argparse.FileType('wt'),
help='file to write port on. default is stdout.')
parser.add_argument(
'--sys-path', '-p', default=[], action='append',
help='paths to be inserted at the top of `sys.path`.')
parser.add_argument(
'--sys-path-append', default=[], action='append',
help='paths to be appended at the end of `sys.path`.')
parser.add_argument(
'--virtual-env', '-v', default=[], action='append',
help='paths to be used as if VIRTUAL_ENV is set to it.')
parser.add_argument(
'--log', help='Save server log to this file.')
parser.add_argument(
'--log-level',
choices=['CRITICAL', 'ERROR', 'WARN', 'INFO', 'DEBUG'],
help='Logging level for log file.')
parser.add_argument(
'--log-rotate-max-size', default=0, type=int,
help='Rotate log file after it reaches this size',
)
parser.add_argument(
'--log-rotate-max-count', default=3, type=int,
help='Max number of log rotations before removal',
)
parser.add_argument(
'--log-traceback', action='store_true', default=False,
help='Include traceback in logging output.')
parser.add_argument(
'--pdb', dest='debugger', const='pdb', action='store_const',
help='start pdb when error occurs.')
parser.add_argument(
'--ipdb', dest='debugger', const='ipdb', action='store_const',
help='start ipdb when error occurs.')
PY3 = (sys.version_info[0] >= 3)
NEED_ENCODE = not PY3
LogSettings = namedtuple(
'LogSettings',
[
'log_file',
'log_level',
'log_rotate_max_size',
'log_rotate_max_count',
],
)
try:
jedi.create_environment
except AttributeError:
jedi_create_environment = None
else:
_cached_jedi_environments = {}
def jedi_create_environment(venv, safe=False):
"""Cache jedi environments to avoid startup cost."""
try:
return _cached_jedi_environments[venv]
except KeyError:
logger.info('Creating jedi environment: %s', venv)
if venv is None:
jedienv = jedi.api.environment.get_default_environment()
else:
jedienv = jedi.create_environment(venv, safe=safe)
_cached_jedi_environments[venv] = jedienv
return jedienv
def get_venv_sys_path(venv):
if jedi_create_environment is not None:
return jedi_create_environment(venv).get_sys_path()
from jedi.evaluate.sys_path import get_venv_path
return get_venv_path(venv)
class JediEPCHandler(object):
def __init__(self, sys_path=(), virtual_envs=(), sys_path_append=()):
self.script_kwargs = self._get_script_path_kwargs(
sys_path=sys_path,
virtual_envs=virtual_envs,
sys_path_append=sys_path_append,
)
def get_sys_path(self):
environment = self.script_kwargs.get('environment')
if environment is not None:
return environment.get_sys_path()
sys_path = self.script_kwargs.get('sys_path')
if sys_path is not None:
return sys_path
return sys.path
@classmethod
def _get_script_path_kwargs(cls, sys_path, virtual_envs, sys_path_append):
result = {}
if jedi_create_environment:
# Need to specify some environment explicitly to workaround
# https://github.com/davidhalter/jedi/issues/1242. Otherwise jedi
# will create a lot of child processes.
if virtual_envs:
primary_env, virtual_envs = virtual_envs[0], virtual_envs[1:]
primary_env = path_expand_vars_and_user(primary_env)
else:
primary_env = None
try:
result['environment'] = jedi_create_environment(primary_env)
except Exception:
logger.warning(
'Cannot create environment for %r', primary_env, exc_info=1
)
if primary_env is not None:
result['environment'] = jedi_create_environment(None)
if not sys_path and not virtual_envs and not sys_path_append:
# No additional path customizations.
return result
# Either multiple environments or custom sys_path extensions are
# specified, or jedi version doesn't support environments.
final_sys_path = []
final_sys_path.extend(path_expand_vars_and_user(p) for p in sys_path)
for p in virtual_envs:
final_sys_path.extend(get_venv_sys_path(path_expand_vars_and_user(p)))
final_sys_path.extend(
path_expand_vars_and_user(p) for p in sys_path_append
)
dupes = set()
def not_seen_yet(val):
if val in dupes:
return False
dupes.add(val)
return True
result['sys_path'] = [p for p in final_sys_path if not_seen_yet(p)]
return result
def jedi_script(self, source, line, column, source_path):
if NEED_ENCODE:
source = source.encode('utf-8')
source_path = source_path and source_path.encode('utf-8')
return jedi.Script(
source, line, column, source_path or '', **self.script_kwargs
)
def complete(self, *args):
def _wrap_completion_result(comp):
try:
docstr = comp.docstring()
except Exception:
logger.warning(
"Cannot get docstring for completion %s", comp, exc_info=1
)
docstr = ""
return dict(
word=comp.name,
doc=docstr,
description=candidates_description(comp),
symbol=candidate_symbol(comp),
)
return [
_wrap_completion_result(comp)
for comp in self.jedi_script(*args).completions()
]
def get_in_function_call(self, *args):
sig = self.jedi_script(*args).call_signatures()
call_def = sig[0] if sig else None
if not call_def:
return []
return dict(
# p.description should do the job. But jedi-vim use replace.
# So follow what jedi-vim does...
params=[PARAM_PREFIX_RE.sub('', p.description).replace('\n', '')
for p in call_def.params],
index=call_def.index,
call_name=call_def.name,
)
def _goto(self, method, *args):
"""
Helper function for `goto_assignments` and `usages`.
:arg method: `jedi.Script.goto_assignments` or `jedi.Script.usages`
:arg args: Arguments to `jedi_script`
"""
# `definitions` is a list. Each element is an instances of
# `jedi.api_classes.BaseOutput` subclass, i.e.,
# `jedi.api_classes.RelatedName` or `jedi.api_classes.Definition`.
definitions = method(self.jedi_script(*args))
return [dict(
column=d.column,
line_nr=d.line,
module_path=d.module_path if d.module_path != '__builtin__' else [],
module_name=d.module_name,
description=d.description,
) for d in definitions]
def goto(self, *args):
return self._goto(jedi.Script.goto_assignments, *args)
def related_names(self, *args):
return self._goto(jedi.Script.usages, *args)
def get_definition(self, *args):
definitions = self.jedi_script(*args).goto_definitions()
return [definition_to_dict(d) for d in definitions]
def defined_names(self, *args):
# XXX: there's a bug in Jedi that returns returns definitions from inside
# classes or functions even though all_scopes=False is set by
# default. Hence some additional filtering is in order.
#
# See https://github.com/davidhalter/jedi/issues/1202
top_level_names = [
defn
for defn in jedi.api.names(*args)
if defn.parent().type == 'module'
]
return list(map(get_names_recursively, top_level_names))
def get_jedi_version(self):
return [dict(
name=module.__name__,
file=getattr(module, '__file__', []),
version=get_module_version(module) or [],
) for module in [sys, jedi, epc, sexpdata]]
def candidate_symbol(comp):
"""
Return a character representing completion type.
:type comp: jedi.api.Completion
:arg comp: A completion object returned by `jedi.Script.completions`.
"""
try:
return comp.type[0].lower()
except (AttributeError, TypeError):
return '?'
def candidates_description(comp):
"""
Return `comp.description` in an appropriate format.
* Avoid return a string 'None'.
* Strip off all newlines. This is required for using
`comp.description` as candidate summary.
"""
desc = comp.description
return _WHITESPACES_RE.sub(' ', desc) if desc and desc != 'None' else ''
_WHITESPACES_RE = re.compile(r'\s+')
PARAM_PREFIX_RE = re.compile(r'^param\s+')
"""RE to strip unwanted "param " prefix returned by param.description."""
def definition_to_dict(d):
return dict(
doc=d.docstring(),
description=d.description,
desc_with_module=d.desc_with_module,
line_nr=d.line,
column=d.column,
module_path=d.module_path,
name=getattr(d, 'name', []),
full_name=getattr(d, 'full_name', []),
type=getattr(d, 'type', []),
)
def get_names_recursively(definition, parent=None):
"""
Fetch interesting defined names in sub-scopes under `definition`.
:type names: jedi.api_classes.Definition
"""
d = definition_to_dict(definition)
try:
d['local_name'] = parent['local_name'] + '.' + d['name']
except (AttributeError, TypeError):
d['local_name'] = d['name']
if definition.type == 'class':
ds = definition.defined_names()
return [d] + [get_names_recursively(c, d) for c in ds]
else:
return [d]
def get_module_version(module):
notfound = object()
for key in ['__version__', 'version']:
version = getattr(module, key, notfound)
if version is not notfound:
return version
try:
from pkg_resources import get_distribution, DistributionNotFound
try:
return get_distribution(module.__name__).version
except DistributionNotFound:
pass
except ImportError:
pass
def path_expand_vars_and_user(p):
return os.path.expandvars(os.path.expanduser(p))
def configure_logging(log_settings):
"""
:type log_settings: LogSettings
"""
if not log_settings.log_file:
return
fmter = logging.Formatter('%(asctime)s:' + logging.BASIC_FORMAT)
if log_settings.log_rotate_max_size > 0:
handler = logging.handlers.RotatingFileHandler(
filename=log_settings.log_file,
mode='w',
maxBytes=log_settings.log_rotate_max_size,
backupCount=log_settings.log_rotate_max_count,
)
else:
handler = logging.FileHandler(filename=log_settings.log_file, mode='w')
handler.setFormatter(fmter)
if log_settings.log_level:
logging.root.setLevel(log_settings.log_level.upper())
logging.root.addHandler(handler)
def jedi_epc_server(
address='localhost',
port=0,
port_file=sys.stdout,
sys_path=[],
virtual_env=[],
sys_path_append=[],
debugger=None,
log_traceback=None,
):
"""Start EPC server.
:type log_settings: LogSettings
"""
logger.debug(
'jedi_epc_server: sys_path=%r virtual_env=%r sys_path_append=%r',
sys_path, virtual_env, sys_path_append,
)
if not virtual_env and os.getenv('VIRTUAL_ENV'):
logger.debug(
'Taking virtual env from VIRTUAL_ENV: %r',
os.environ['VIRTUAL_ENV'],
)
virtual_env = [os.environ['VIRTUAL_ENV']]
handler = JediEPCHandler(
sys_path=sys_path,
virtual_envs=virtual_env,
sys_path_append=sys_path_append,
)
logger.debug(
'Starting Jedi EPC server with the following sys.path: %r',
handler.get_sys_path(),
)
server = epc.server.EPCServer((address, port))
server.register_function(handler.complete)
server.register_function(handler.get_in_function_call)
server.register_function(handler.goto)
server.register_function(handler.related_names)
server.register_function(handler.get_definition)
server.register_function(handler.defined_names)
server.register_function(handler.get_jedi_version)
@server.register_function
def toggle_log_traceback():
server.log_traceback = not server.log_traceback
return server.log_traceback
port_file.write(str(server.server_address[1])) # needed for Emacs client
port_file.write("\n")
port_file.flush()
if port_file is not sys.stdout:
port_file.close()
# This is not supported Python-EPC API, but I am using this for
# backward compatibility for Python-EPC < 0.0.4. In the future,
# it should be passed to the constructor.
server.log_traceback = bool(log_traceback)
if debugger:
server.set_debugger(debugger)
handler = logging.StreamHandler()
fmter = logging.Formatter('%(asctime)s:' + logging.BASIC_FORMAT)
handler.setFormatter(fmter)
handler.setLevel(logging.DEBUG)
server.logger.addHandler(handler)
server.logger.setLevel(logging.DEBUG)
return server
# def add_virtualenv_path(venv):
# """Add virtualenv's site-packages to `sys.path`."""
# venv = os.path.abspath(venv)
# paths = glob.glob(os.path.join(
# venv, 'lib', 'python*', 'site-packages'))
# if not paths:
# raise ValueError('Invalid venv: no site-packages found: %s' % venv)
# for path in paths:
# site.addsitedir(path)
def main(args=None):
ns = parser.parse_args(args)
ns_vars = vars(ns).copy()
log_settings = LogSettings(
log_file=ns_vars.pop('log'),
log_level=ns_vars.pop('log_level'),
log_rotate_max_size=ns_vars.pop('log_rotate_max_size'),
log_rotate_max_count=ns_vars.pop('log_rotate_max_count'),
)
configure_logging(log_settings)
server = jedi_epc_server(**ns_vars)
server.serve_forever()
server.logger.info('exit')
if __name__ == '__main__':
main()
|
kyon-bll/.dotfiles
|
.emacs.d/elpa/jedi-core-20191011.1750/jediepcserver.py
|
Python
|
mit
| 15,847
|
from django.conf import settings
BACKENDS = getattr(settings, 'FAIREPART_BACKENDS', (
'fairepart.backends.facebook.FacebookBackend',
'fairepart.backends.google.GoogleOAuth2Backend',
))
RELATION_LIST_PAGINATE_BY = getattr(settings, 'FAIREPART_RELATION_LIST_PAGINATE_BY', 5)
GOOGLE_APP_NAME = getattr(settings, 'FAIREPART_GOOGLE_APP_NAME', '')
|
thoas/django-fairepart
|
fairepart/settings.py
|
Python
|
mit
| 354
|
from frasco.ext import *
from frasco.assets import expose_package, register_assets_builder
from frasco.utils import join_url_rule
from flask import render_template
import os
import json
import re
import htmlmin
import codecs
class FrascoAngular(Extension):
name = "frasco_angular"
defaults = {"static_dir": None, # defaults to app.static_folder
"static_url_path": None, # defaults to app.static_url_path
"angular_template": "angular_layout.html",
"app_dir": "app",
"services_module": "services",
"services_name": "%s",
"templates_file": None,
"templates_module": "templatesCache",
"templates_search_paths": [],
"disable_templates_cache": None, # app.debug
"templates_matcher": r".*\.html$",
"add_app_dir_in_babel_extract": True}
def _init_app(self, app, state):
require_extension('frasco_assets', app)
expose_package(app, "frasco_angular", __name__)
if not state.options["static_dir"]:
state.options["static_dir"] = app.static_folder
if not state.options["static_url_path"]:
state.options["static_url_path"] = app.static_url_path
state.options['templates_search_paths'].append(
(os.path.join(state.options["static_dir"], state.options['app_dir']), state.options["static_url_path"] + '/' + state.options['app_dir'])
)
if state.options['templates_file']:
register_assets_builder(self.build_templates)
if has_extension('frasco_babel', app) and state.options['add_app_dir_in_babel_extract']:
app.extensions.frasco_babel.add_extract_dir(os.path.join(state.options['static_dir'], state.options['app_dir']),
'.', ['frasco.angular.babel.AngularCompatExtension'], [('javascript:**.js', {})])
@ext_stateful_method
def add_route(self, state, endpoint, rule, decorators=None, **options):
rules = rule if isinstance(rule, (list, tuple)) else [rule]
def func(*args, **kwargs):
return self.angular_view_response()
if decorators:
for decorator in reversed(decorators):
func = decorator(func)
for rule in rules:
self.get_app().add_url_rule(rule, endpoint, func, **options)
def angular_view_response(self):
return render_template(get_extension_state('frasco_angular').options['angular_template'])
@ext_stateful_method
def register_service_builder(self, state, api_version, filename):
def builder():
module = ("/* This file is auto-generated by frasco-angular. DO NOT MODIFY. */\n'use strict';\n"
"\n(function() {\n\nvar services = angular.module('%s', ['frasco']);\n") % state.options["services_module"]
for service in api_version.iter_services():
endpoints = {}
for rule, endpoint, func, options in service.iter_endpoints():
args = []
if hasattr(func, 'request_params'):
for p in reversed(func.request_params):
args.extend(p.names)
endpoints[endpoint] = [_convert_url_args(join_url_rule(service.url_prefix, rule)), args]
module += ("\nservices.factory('%s', ['frascoServiceFactory', function(frascoServiceFactory) {\n"
"return frascoServiceFactory.make('%s', '%s', [], %s);\n}]);\n") % \
(state.options['services_name'] % service.name, service.name, api_version.url_prefix,
json.dumps(endpoints, indent=2))
module += "\n})();"
_write_file(os.path.join(state.options["static_dir"], state.options["app_dir"], filename), module)
register_assets_builder(builder)
@ext_stateful_method
def build_templates(self, state):
module = [("/* This file is auto-generated by frasco-angular. DO NOT MODIFY. */\n'use strict';\n"
"\nangular.module('%s', []).run(['$templateCache', function($templateCache) {") % state.options["templates_module"]]
matcher = re.compile(state.options["templates_matcher"], re.I)
done = set()
def process_file(filename, path, relpath, url_path):
pathname = os.path.join(path, filename)
relname = "/".join([p for p in [url_path, os.path.relpath(path, relpath), filename] if p])
if pathname not in done and matcher.match(relname):
with codecs.open(pathname, 'r', 'utf-8') as f:
content = f.read()
module.append(" $templateCache.put('%s', %s);" % (relname, json.dumps(htmlmin.minify(content))))
done.add(pathname)
disable = state.options["disable_templates_cache"]
if (disable is None and not self.get_app().debug) or disable is False:
for templates_dir, url_path in state.options['templates_search_paths']:
for path, dirnames, filenames in os.walk(templates_dir):
for filename in filenames:
process_file(filename, path, templates_dir, url_path)
module = "\n".join(module) + "\n}]);"
filename = os.path.join(state.options["static_dir"], state.options["app_dir"], state.options['templates_file'])
_write_file(filename, module)
_url_arg_re = re.compile(r"<([a-z]+:)?([a-z0-9_]+)>")
def _convert_url_args(url):
return _url_arg_re.sub(r":\2", url)
def _write_file(filename, source):
if not os.path.exists(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
with codecs.open(filename, "w", "utf-8") as f:
f.write(source)
|
frascoweb/frasco
|
frasco/angular/__init__.py
|
Python
|
mit
| 5,809
|
""" Configuration for Flask app """
import os
import urllib
from flask import (Flask, abort, flash, Response)
from playhouse.flask_utils import FlaskDB
ADMIN_PASSWORD = 'secret'
APP_DIR = os.path.dirname(os.path.realpath(__file__))
DATABASE = 'sqliteext:///%s' % os.path.join(APP_DIR, 'blog.db')
DEBUG = False
SECRET_KEY = 'shhh, secret!' # Used by Flask to encrypt session cookie.
SITE_WIDTH = 800
app = Flask(__name__)
app.config.from_object(__name__)
flask_db = FlaskDB(app)
database = flask_db.database
from models import Entry, FTSEntry
database.create_tables([Entry, FTSEntry], safe=True)
# Setup routes
import views
app.add_url_rule('/login/', 'login', views.login, methods=['GET', 'POST'])
app.add_url_rule('/logout/', 'logout', views.logout, methods=['GET', 'POST'])
app.add_url_rule('/', 'index', views.index, methods=['GET'])
app.add_url_rule('/create', 'create', views.create, methods=['GET', 'POST'])
app.add_url_rule('/drafts', 'drafts', views.drafts, methods=['GET'])
app.add_url_rule('/<slug>', 'detail', views.detail, methods=['GET'])
app.add_url_rule('/<slug>/edit', 'edit', views.edit, methods=['GET', 'POST'])
@app.template_filter('clean_querystring')
def clean_querystring(request_args, *keys_to_remove, **new_values):
querystring = dict((key, value) for key, value in request_args.items())
for key in keys_to_remove:
querystring.pop(key, None)
querystring.update(new_values)
return urllib.urlencode(querystring)
@app.errorhandler(404)
def not_found(exc):
return Response('<h3>404 Error: Page Not found</h3>'), 404
|
econne01/flask_blog
|
app/app.py
|
Python
|
mit
| 1,579
|
# Notes on classes
class Sample():
def __init__(self, name, number):
self.name = name
self.number = number
def print_values(self):
print(f"name: {self.name}")
print(f"number: {self.number}")
class SampleWithProperties():
def __init__(self, name, number):
self.name = name
self.number = number
@property
def name(self):
# double underscore is to tell future devs to avoid variable
return self.__name
@property
def double_name(self):
# Can return calculated or other values besides fields
return 2 * self.__name
@property
def number(self):
return self.__number
@name.setter
def name(self, value):
# Often has some sort of validation or transformation code
self.__name = value
@number.setter
def number(self, value):
# Often has some sort of validation or transformation code
self.__number = value % 2
class SuperClass():
def __init__(self, name):
self.name = name
def speak(self):
print(f"Hey, ho {self.name}")
class SubClass(SuperClass):
def __init__(self, name, location):
super().__init__(name)
self.location = location
def shout_out(self):
print(f"{self.location} is where it's at")
def speak(self):
# Need to explicitly over ride parent methods
# calling it here, eg, super().speak()
# just calls it. If super.method() is not
# called, then only this code would run
print(f"{self.location}, let's go! ")
if __name__ == "__main__":
'''
# Demo Sample()
instance = Sample("fred", 3)
instance.print_values()
print(f"Access name field directly: {instance.name}")
instance.number += 100
print(f"Access number field directly: {instance.number}")
'''
'''
# Demo SampleWithProperties()
instance_with_props = SampleWithProperties("fred", 3)
# Directly accessing values
# Next line fails
# print(f"Access name field, direct: {instance_with_props.__name}")
# Python rewrites value names with intial __ to protect namespace
# not really a private value, but less likely to be accessed
print(f"Access name field, direct: {instance_with_props._SampleWithProperties__name}")
# Using getter to access values, looks like direct access but isn't
# name field
print(f"Access name field, getter: {instance_with_props.name}")
print(f"Access name field, getter: {instance_with_props.double_name}")
instance_with_props.name = "Barney"
print(f"Access name field, after setter: {instance_with_props.name}")
# number field
print(f"Access number field, before setter: {instance_with_props.number}")
instance_with_props.number = 4
print(f"Access number field, after setter: {instance_with_props.number}")
instance_with_props.number = 3
print(f"Access number field, after setter: {instance_with_props.number}")
'''
# Demo inheritance
# Show super class functions
instance_super = SuperClass("Johnny")
print(f"Name, super: {instance_super.name}")
print("")
# Show sub inherits name, methods
instance_sub = SubClass("Joey", "Lower East Side")
print(f"Name, super: {instance_sub.name}")
print(f"Method from super: ", end="")
instance_sub.super().speak()
print("")
# Show sub can override parent
print(f"Overide from super: ", end="")
instance_sub.speak()
# Figure out how to call the super method from the instance rather than from the class definition
|
daveinnyc/various
|
python-practice/class_demos.py
|
Python
|
mit
| 3,833
|
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Perform basic ELF security checks on a series of executables.
Exit status will be 0 if successful, and the program will be silent.
Otherwise the exit status will be 1 and it will log which executables failed which checks.
Needs `readelf` (for ELF) and `objdump` (for PE).
'''
import subprocess
import sys
import os
READELF_CMD = os.getenv('READELF', '/usr/bin/readelf')
OBJDUMP_CMD = os.getenv('OBJDUMP', '/usr/bin/objdump')
NONFATAL = {} # checks which are non-fatal for now but only generate a warning
def check_ELF_PIE(executable):
'''
Check for position independent executable (PIE), allowing for address space randomization.
'''
p = subprocess.Popen([READELF_CMD, '-h', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
ok = False
for line in stdout.splitlines():
line = line.split()
if len(line)>=2 and line[0] == 'Type:' and line[1] == 'DYN':
ok = True
return ok
def get_ELF_program_headers(executable):
'''Return type and flags for ELF program headers'''
p = subprocess.Popen([READELF_CMD, '-l', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
in_headers = False
count = 0
headers = []
for line in stdout.splitlines():
if line.startswith('Program Headers:'):
in_headers = True
if line == '':
in_headers = False
if in_headers:
if count == 1: # header line
ofs_typ = line.find('Type')
ofs_offset = line.find('Offset')
ofs_flags = line.find('Flg')
ofs_align = line.find('Align')
if ofs_typ == -1 or ofs_offset == -1 or ofs_flags == -1 or ofs_align == -1:
raise ValueError('Cannot parse elfread -lW output')
elif count > 1:
typ = line[ofs_typ:ofs_offset].rstrip()
flags = line[ofs_flags:ofs_align].rstrip()
headers.append((typ, flags))
count += 1
return headers
def check_ELF_NX(executable):
'''
Check that no sections are writable and executable (including the stack)
'''
have_wx = False
have_gnu_stack = False
for (typ, flags) in get_ELF_program_headers(executable):
if typ == 'GNU_STACK':
have_gnu_stack = True
if 'W' in flags and 'E' in flags: # section is both writable and executable
have_wx = True
return have_gnu_stack and not have_wx
def check_ELF_RELRO(executable):
'''
Check for read-only relocations.
GNU_RELRO program header must exist
Dynamic section must have BIND_NOW flag
'''
have_gnu_relro = False
for (typ, flags) in get_ELF_program_headers(executable):
# Note: not checking flags == 'R': here as linkers set the permission differently
# This does not affect security: the permission flags of the GNU_RELRO program header are ignored, the PT_LOAD header determines the effective permissions.
# However, the dynamic linker need to write to this area so these are RW.
# Glibc itself takes care of mprotecting this area R after relocations are finished.
# See also https://marc.info/?l=binutils&m=1498883354122353
if typ == 'GNU_RELRO':
have_gnu_relro = True
have_bindnow = False
p = subprocess.Popen([READELF_CMD, '-d', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
for line in stdout.splitlines():
tokens = line.split()
if len(tokens)>1 and tokens[1] == '(BIND_NOW)' or (len(tokens)>2 and tokens[1] == '(FLAGS)' and 'BIND_NOW' in tokens[2:]):
have_bindnow = True
return have_gnu_relro and have_bindnow
def check_ELF_Canary(executable):
'''
Check for use of stack canary
'''
p = subprocess.Popen([READELF_CMD, '--dyn-syms', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
ok = False
for line in stdout.splitlines():
if '__stack_chk_fail' in line:
ok = True
return ok
def get_PE_dll_characteristics(executable):
'''
Get PE DllCharacteristics bits.
Returns a tuple (arch,bits) where arch is 'i386:x86-64' or 'i386'
and bits is the DllCharacteristics value.
'''
p = subprocess.Popen([OBJDUMP_CMD, '-x', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
arch = ''
bits = 0
for line in stdout.splitlines():
tokens = line.split()
if len(tokens)>=2 and tokens[0] == 'architecture:':
arch = tokens[1].rstrip(',')
if len(tokens)>=2 and tokens[0] == 'DllCharacteristics':
bits = int(tokens[1],16)
return (arch,bits)
IMAGE_DLL_CHARACTERISTICS_HIGH_ENTROPY_VA = 0x0020
IMAGE_DLL_CHARACTERISTICS_DYNAMIC_BASE = 0x0040
IMAGE_DLL_CHARACTERISTICS_NX_COMPAT = 0x0100
def check_PE_DYNAMIC_BASE(executable):
'''PIE: DllCharacteristics bit 0x40 signifies dynamicbase (ASLR)'''
(arch,bits) = get_PE_dll_characteristics(executable)
reqbits = IMAGE_DLL_CHARACTERISTICS_DYNAMIC_BASE
return (bits & reqbits) == reqbits
# On 64 bit, must support high-entropy 64-bit address space layout randomization in addition to DYNAMIC_BASE
# to have secure ASLR.
def check_PE_HIGH_ENTROPY_VA(executable):
'''PIE: DllCharacteristics bit 0x20 signifies high-entropy ASLR'''
(arch,bits) = get_PE_dll_characteristics(executable)
if arch == 'i386:x86-64':
reqbits = IMAGE_DLL_CHARACTERISTICS_HIGH_ENTROPY_VA
else: # Unnecessary on 32-bit
assert(arch == 'i386')
reqbits = 0
return (bits & reqbits) == reqbits
def check_PE_NX(executable):
'''NX: DllCharacteristics bit 0x100 signifies nxcompat (DEP)'''
(arch,bits) = get_PE_dll_characteristics(executable)
return (bits & IMAGE_DLL_CHARACTERISTICS_NX_COMPAT) == IMAGE_DLL_CHARACTERISTICS_NX_COMPAT
CHECKS = {
'ELF': [
('PIE', check_ELF_PIE),
('NX', check_ELF_NX),
('RELRO', check_ELF_RELRO),
('Canary', check_ELF_Canary)
],
'PE': [
('DYNAMIC_BASE', check_PE_DYNAMIC_BASE),
('HIGH_ENTROPY_VA', check_PE_HIGH_ENTROPY_VA),
('NX', check_PE_NX)
]
}
def identify_executable(executable):
with open(filename, 'rb') as f:
magic = f.read(4)
if magic.startswith(b'MZ'):
return 'PE'
elif magic.startswith(b'\x7fELF'):
return 'ELF'
return None
if __name__ == '__main__':
retval = 0
for filename in sys.argv[1:]:
try:
etype = identify_executable(filename)
if etype is None:
print('%s: unknown format' % filename)
retval = 1
continue
failed = []
warning = []
for (name, func) in CHECKS[etype]:
if not func(filename):
if name in NONFATAL:
warning.append(name)
else:
failed.append(name)
if failed:
print('%s: failed %s' % (filename, ' '.join(failed)))
retval = 1
if warning:
print('%s: warning %s' % (filename, ' '.join(warning)))
except IOError:
print('%s: cannot open' % filename)
retval = 1
sys.exit(retval)
|
thelazier/dash
|
contrib/devtools/security-check.py
|
Python
|
mit
| 8,242
|
#!/usr/bin/env python
#
# Machine Description Interface C API
#
# This software is delivered under the terms of the MIT License
#
# Copyright (c) 2016 STMicroelectronics
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
#
from __future__ import print_function
import sys
class ENUM:
instructions_list = []
def __init__(self, ID, mnemonic, properties, parsing, encoding, short_desc, execution, description):
self.ID = ID
self.mnemonic = mnemonic
self.properties = properties
self.parsing = parsing
self.encoding = encoding
self.short_desc = short_desc
self.execution = execution
self.description = description
self.instructions_list.append(self)
@staticmethod
def emit_execution(out):
with open(out, "w") as outf:
print("/* BEGIN: Generated executions */", file=outf)
ENUM._emit_executions(outf)
print("/* END: Generated executions */", file=outf)
@staticmethod
def _emit_executions(out):
idx = 0;
print("#define P(idx) EXE_OPS(_operands,idx)", file=out)
print("#define NEXT_PC() (RR(PC,0) + _op_size)", file=out)
print("#define RR(rf,idx) EXE_CPU_RR((*_cpu_prev),rf,idx)", file=out)
print("#define RS(rf,idx) EXE_CPU_RS(_cpu,rf,idx)", file=out)
print("#define MR32(idx) EXE_MEM_FETCH32(_mem,idx)", file=out)
print("#define MS32(idx) EXE_MEM_SLICE32(_mem,idx)", file=out)
for inst in ENUM.instructions_list:
print("", file=out)
print("static int32_t _execution_%i /* %s */ (EXE_CTX_T _context, EXE_OPS_T _operands, size_t _op_size)" %
(idx, inst.ID), file=out)
print("{", file=out)
print(" CPU_T _cpu, *_cpu_prev = EXE_CTX_CPU(_context);", file=out)
print(" MEM_T _mem, *_mem_prev = EXE_CTX_MEM(_context);", file=out)
print(" EXE_CPU_CLONE(_cpu, _cpu_prev);", file=out)
print(" EXE_MEM_CLONE(_mem, _mem_prev);", file=out)
print(" RS(PC,0) = NEXT_PC();", file=out)
print(" %s;" % inst.execution, file=out)
print(" EXE_CPU_UPDATE(*_cpu_prev, &_cpu);", file=out);
print(" EXE_CPU_UPDATE(*_mem_prev, &_mem);", file=out);
print(" return 0;", file=out)
print("}", file=out);
idx += 1
print("#undef RF", file=out)
print("#undef MEM", file=out)
print("typedef int32_t (*EXE_FUNC_T)(EXE_CTX_T _context, EXE_OPS_T _operands, size_t _op_size);", file=out);
print("static const EXE_FUNC_T _executions[] = {", file=out)
idx = 0
for inst in ENUM.instructions_list:
print(" _execution_%i /* %s */," % (idx, inst.ID), file=out)
idx += 1
print("};", file=out)
execfile(sys.argv[1])
ENUM.emit_execution(sys.argv[2])
|
guillon/mdi
|
examples/mini/scripts/generate_executions.py
|
Python
|
mit
| 3,906
|
# -*- coding: utf-8 -*-
from setuptools import setup
setup(setup_requires=['pbr'],
pbr=True,
test_suite="")
|
CloudNiner/fadds-parser
|
setup.py
|
Python
|
mit
| 122
|
#!/usr/bin/env python
# encoding: utf-8
def run(whatweb, pluginname):
whatweb.recog_from_header(pluginname, "Webluker")
|
cflq3/getcms
|
plugins/Webluker_cdn.py
|
Python
|
mit
| 127
|
# """Django Actions Log settings file."""
#
from __future__ import unicode_literals
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
CREATE = 100
SUCCESS = 110
ACTIVATE = 130
AUTH = 150
VIEW = 180
UPDATE = 200
SUSPEND = 250
UNSUSPEND = 260
DELETE = 300
TERMINATE = 500
FAILED = 999
ERROR = 1000
LOG_ACTION_CHOICES_DEFAULT = [
(CREATE, _("create")),
(SUCCESS, _("success")),
(ACTIVATE, _("activate")),
(AUTH, _("authorize")),
(VIEW, _("view")),
(UPDATE, _("update")),
(SUSPEND, _("suspend")),
(UNSUSPEND, _("unsuspend")),
(DELETE, _("delete")),
(TERMINATE, _("terminate")),
(FAILED, _("failed")),
(ERROR, _("error")),
]
AL_LOG_ACTION_SETTINGS = getattr(
settings, 'AL_LOG_ACTION_CHOICES',
LOG_ACTION_CHOICES_DEFAULT
)
LOG_ACTION_CHOICES = [
(value[0], value[1])
for value in AL_LOG_ACTION_SETTINGS
]
|
shtalinberg/django-actions-logger
|
actionslog/settings.py
|
Python
|
mit
| 910
|
# -*- coding: utf-8 -*-
"""
This file is part of pyCMBS.
(c) 2012- Alexander Loew
For COPYING and LICENSE details, please refer to the LICENSE file
"""
import unittest
from pycmbs.icon import Icon
class TestPycmbsIcon(unittest.TestCase):
def setUp(self):
# requires local installation of ICON sample files!
self.gridfile ='../..//example_data/icon/r2b4_amip.nc'
self.datafile = '../../example_data/icon/rms0006_atm_phy_DOM01_ML_0001.nc'
def test_DummyTest(self):
pass
def test_IconInit(self):
x = Icon(None, None, 'None')
def test_IconInitMissingFile(self):
x = Icon('no.nc', 'nothing.nc', 'novar')
with self.assertRaises(ValueError):
x.read()
def test_IconInitMissingGridFile(self):
x = Icon(self.datafile, 'nothing.nc', 'novar')
with self.assertRaises(ValueError):
x.read()
#~ def test_IconReadOK(self):
#~ x = Icon(self.datafile, self.datafile, 'rsns')
#~ x.read()
if __name__ == "__main__":
unittest.main()
|
pygeo/pycmbs
|
pycmbs/tests/test_icon.py
|
Python
|
mit
| 1,061
|
#!/usr/bin/env python
"""Docstring."""
import re
from functools import reduce
from collections import Counter
from common import get_input
class SSLTester:
"""."""
def __init__(self, input_list=[]):
"""Initialize."""
self.input_list = input_list
def find_aba(self, seq):
"""Return list: all 'aba's found in the string <seq>."""
i = 0
abas = []
while i < len(seq) - 2:
if (seq[i] == seq[i + 2] and
seq[i] != seq[i + 1]):
abas.append(seq[i:i + 3])
i += 1
return abas
def batch_find_aba(self, seqs):
"""Return list: all 'aba's found from all strings in <seqs>."""
return reduce(lambda x, y: x + y, [self.find_aba(x) for x in seqs], [])
def has_corresponding_bab(self, input_key, seqs):
"""Return bool: <key> exists for any string in <seqs>."""
key = input_key[1::] + input_key[1]
return True in [key in x for x in seqs]
def ip_supports_ssl(self, ip_string):
"""Return bool: <ip_string> supports SSL.
True:
some string in <unbracketed> contains 'aba' pattern
AND some string in <bracketed> contains matching 'bab' pattern
"""
all_segments = re.split(r"\[(\w+)\]", ip_string)
bracketed = re.findall(r"\[(\w+)\]", ip_string)
unbracketed = [x for x in all_segments if x not in bracketed]
return True in [self.has_corresponding_bab(x, bracketed) for x in self.batch_find_aba(unbracketed)]
def count_supported_ips(self, input_list=None):
"""Return int: number of items in <input_list> which support SSL."""
input_list = input_list if input_list else self.input_list
return Counter([self.ip_supports_ssl(x) for x in input_list])[True]
if __name__ == "__main__":
tester = SSLTester(get_input())
print("Out of {} IPs, {} support SSL.".format(
len(tester.input_list),
tester.count_supported_ips()
))
|
tlake/advent-of-code
|
2016/day07_internet_protocol_version_7/python/src/part2.py
|
Python
|
mit
| 2,010
|
import sys
import EulerPy
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def readme():
with open('README.rst') as f:
return f.read()
def requirements():
install_requires = []
with open('requirements.txt') as f:
for line in f:
install_requires.append(line.strip())
# Terminal colors for Windows
if 'win32' in str(sys.platform).lower():
install_requires.append('colorama>=0.2.4')
return install_requires
setup(
name='EulerPy',
version=EulerPy.__version__,
description=EulerPy.__doc__.strip(),
long_description=readme(),
url='https://github.com/iKevinY/EulerPy',
author=EulerPy.__author__,
author_email='me@kevinyap.ca',
license=EulerPy.__license__,
packages=['EulerPy'],
entry_points={'console_scripts': ['euler = EulerPy.__main__:main']},
install_requires=requirements(),
classifiers=[
"License :: OSI Approved :: MIT License",
"Topic :: Utilities",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
],
keywords=['EulerPy', 'euler', 'project-euler', 'projecteuler'],
include_package_data=True,
zip_safe=False,
)
|
rahulg/eulerswift
|
setup.py
|
Python
|
mit
| 1,487
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from django_summernote.widgets import SummernoteInplaceWidget
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from .models import Speaker, Program
class EmailLoginForm(forms.Form):
email = forms.EmailField(
max_length=255,
label='',
widget=forms.TextInput(attrs={
'placeholder': 'Email address',
'class': 'form-control',
})
)
def clean(self):
cleaned_data = super(EmailLoginForm, self).clean()
return cleaned_data
class SpeakerForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(SpeakerForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.add_input(Submit('submit', _('Submit')))
class Meta:
model = Speaker
fields = ('desc', 'info', )
widgets = {
'desc': SummernoteInplaceWidget(),
}
labels = {
'desc': _('Profile'),
'info': _('Additional information'),
}
class ProgramForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(ProgramForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.add_input(Submit('submit', _('Submit')))
class Meta:
model = Program
fields = ('slide_url', 'video_url', 'is_recordable', 'desc', )
widgets = {
'desc': SummernoteInplaceWidget(),
}
labels = {
'slide_url': _('Slide URL'),
'video_url': _('Video URL'),
'is_recordable': _('Photography and recording is allowed'),
'desc': _('Description'),
}
|
pythonkr/pyconkr-2014
|
pyconkr/forms.py
|
Python
|
mit
| 1,760
|
import logging
from logging.handlers import RotatingFileHandler
from flask import Flask, render_template
from flask_login import LoginManager
from flask_restful import Api
from flask_wtf.csrf import CsrfProtect
from itsdangerous import URLSafeTimedSerializer
from sqlalchemy import create_engine
import AppConfig
from RestResources.Resources import PostsList, Posts
from services.Services import UserService
from views import Login, Common, Post, Admin
app = Flask(__name__)
CsrfProtect(app)
login_serializer = URLSafeTimedSerializer(AppConfig.APPSECRETKEY)
@app.errorhandler(404)
def not_found(error):
return render_template('404.html'), 404
# set the secret key. keep this really secret:
app.secret_key = AppConfig.APPSECRETKEY
def register_mods():
app.register_blueprint(Common.mod)
app.register_blueprint(Login.mod)
app.register_blueprint(Post.mod)
app.register_blueprint(Admin.mod)
def create_db_engine():
return create_engine(AppConfig.CONNECTIONSTRING, pool_recycle=3600, echo=True)
def build_db_engine():
AppConfig.DBENGINE = create_db_engine()
def init_login():
login_manager = LoginManager()
login_manager.init_app(app)
AppConfig.LOGINMANAGER = login_manager
# Create user loader function
@login_manager.user_loader
def load_user(user_id):
return UserService().getAll().filter_by(id=user_id).first()
@login_manager.token_loader
def get_user_token(token):
max_age = app.config["REMEMBER_COOKIE_DURATION"].total_seconds()
#Decrypt the Security Token, data = [username, hashpass]
data = login_serializer.loads(token, max_age=max_age)
userService = UserService()
#Find the User
user = userService.getById(data[0])
#Check Password and return user or None
if user and userService.validate(user.username, user.password):
return user
return None
def init_logger():
handler = RotatingFileHandler('FlaskTest.log', maxBytes=10000, backupCount=1)
handler.setLevel(logging.INFO)
app.logger.addHandler(handler)
def register_rest_api():
return Api(app)
def register_rest_resources():
api.add_resource(PostsList, '/api/posts')
api.add_resource(Posts, '/api/posts/<string:post_id>')
def set_app_configuration():
app.config['REMEMBER_COOKIE_DURATION'] = AppConfig.REMEMBER_COOKIE_DURATION
register_mods()
api = register_rest_api()
register_rest_resources()
build_db_engine()
init_login()
init_logger()
set_app_configuration()
app.run(AppConfig.APPHOST, AppConfig.APPPORT)
|
mandrive/FlaskTest
|
__init__.py
|
Python
|
mit
| 2,578
|
#
# Copyright IBM Corp. 2014
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Daniel Bolanos
# Date: 2015
# coding=utf-8
import json # json
import threading # multi threading
import os # for listing directories
import Queue # queue used for thread syncronization
import sys # system calls
import argparse # for parsing arguments
import base64 # necessary to encode in base64
# according to the RFC2045 standard
import requests # python HTTP requests library
# WebSockets
from autobahn.twisted.websocket import WebSocketClientProtocol, \
WebSocketClientFactory, connectWS
from twisted.python import log
from twisted.internet import ssl, reactor
with open("path_output.txt", "r") as file:
path_output = file.read()
class Utils:
@staticmethod
def getAuthenticationToken(hostname, serviceName, username, password):
uri = hostname + "/authorization/api/v1/token?url=" + hostname + '/' \
+ serviceName + "/api"
uri = uri.replace("wss://", "https://")
uri = uri.replace("ws://", "https://")
print uri
resp = requests.get(uri, auth=(username, password), verify=False,
headers={'Accept': 'application/json'},
timeout=(30, 30))
print resp.text
jsonObject = resp.json()
return jsonObject['token']
class WSInterfaceFactory(WebSocketClientFactory):
def __init__(self, queue, summary, dirOutput, contentType, model,
url=None, headers=None, debug=None):
WebSocketClientFactory.__init__(self, url=url, headers=headers)
self.queue = queue
self.summary = summary
self.dirOutput = dirOutput
self.contentType = contentType
self.model = model
self.queueProto = Queue.Queue()
self.openHandshakeTimeout = 10
self.closeHandshakeTimeout = 10
# start the thread that takes care of ending the reactor so
# the script can finish automatically (without ctrl+c)
endingThread = threading.Thread(target=self.endReactor, args=())
endingThread.daemon = True
endingThread.start()
def prepareUtterance(self):
try:
utt = self.queue.get_nowait()
self.queueProto.put(utt)
return True
except Queue.Empty:
print "getUtterance: no more utterances to process, queue is empty!"
return False
def endReactor(self):
self.queue.join()
print "about to stop the reactor!"
reactor.stop()
# this function gets called every time connectWS is called (once
# per WebSocket connection/session)
def buildProtocol(self, addr):
try:
utt = self.queueProto.get_nowait()
proto = WSInterfaceProtocol(self, self.queue, self.summary,
self.dirOutput, self.contentType)
proto.setUtterance(utt)
return proto
except Queue.Empty:
print ("queue should not be empty, otherwise this function "
"should not have been called")
return None
# WebSockets interface to the STT service
#
# note: an object of this class is created for each WebSocket
# connection, every time we call connectWS
class WSInterfaceProtocol(WebSocketClientProtocol):
def __init__(self, factory, queue, summary, dirOutput, contentType):
self.factory = factory
self.queue = queue
self.summary = summary
self.dirOutput = dirOutput
self.contentType = contentType
self.packetRate = 20
self.listeningMessages = 0
self.timeFirstInterim = -1
self.bytesSent = 0
self.chunkSize = 2000 # in bytes
super(self.__class__, self).__init__()
print dirOutput
print "contentType: " + str(self.contentType) + " queueSize: " + \
str(self.queue.qsize())
def setUtterance(self, utt):
self.uttNumber = utt[0]
self.uttFilename = utt[1]
self.summary[self.uttNumber] = {"hypothesis": "",
"status": {"code": "", "reason": ""}}
self.fileJson = self.dirOutput + "/" + str(self.uttNumber) + \
".json.txt"
try:
os.remove(self.fileJson)
except OSError:
pass
# helper method that sends a chunk of audio if needed (as required
# what the specified pacing is)
def maybeSendChunk(self, data):
def sendChunk(chunk, final=False):
self.bytesSent += len(chunk)
self.sendMessage(chunk, isBinary=True)
if final:
self.sendMessage(b'', isBinary=True)
if (self.bytesSent + self.chunkSize >= len(data)):
if (len(data) > self.bytesSent):
sendChunk(data[self.bytesSent:len(data)], True)
return
sendChunk(data[self.bytesSent:self.bytesSent + self.chunkSize])
self.factory.reactor.callLater(0.01, self.maybeSendChunk, data=data)
return
def onConnect(self, response):
print "onConnect, server connected: {0}".format(response.peer)
def onOpen(self):
print "onOpen"
data = {"action": "start", "content-type": str(self.contentType),
"continuous": True, "interim_results": True,
"inactivity_timeout": 600}
data['word_confidence'] = True
data['timestamps'] = True
data['max_alternatives'] = 3
print "sendMessage(init)"
# send the initialization parameters
self.sendMessage(json.dumps(data).encode('utf8'))
# start sending audio right away (it will get buffered in the
# STT service)
print self.uttFilename
f = open(str(self.uttFilename), 'rb')
self.bytesSent = 0
dataFile = f.read()
self.maybeSendChunk(dataFile)
print "onOpen ends"
def onMessage(self, payload, isBinary):
if isBinary:
print("Binary message received: {0} bytes".format(len(payload)))
else:
print(u"Text message received: {0}".format(payload.decode('utf8')))
# if uninitialized, receive the initialization response
# from the server
jsonObject = json.loads(payload.decode('utf8'))
if 'state' in jsonObject:
self.listeningMessages += 1
if (self.listeningMessages == 2):
print "sending close 1000"
# close the connection
self.sendClose(1000)
# if in streaming
elif 'results' in jsonObject:
jsonObject = json.loads(payload.decode('utf8'))
hypothesis = ""
# empty hypothesis
if (len(jsonObject['results']) == 0):
print "empty hypothesis!"
# regular hypothesis
else:
# dump the message to the output directory
jsonObject = json.loads(payload.decode('utf8'))
f = open(self.fileJson, "a")
f.write(json.dumps(jsonObject, indent=4, sort_keys=True))
f.close()
res = jsonObject['results'][0]
hypothesis = res['alternatives'][0]['transcript']
bFinal = (res['final'] == True)
if bFinal:
print "final hypothesis: \"" + hypothesis + "\""
self.summary[self.uttNumber]['hypothesis'] += hypothesis
else:
print "interim hyp: \"" + hypothesis + "\""
def onClose(self, wasClean, code, reason):
print("onClose")
print("WebSocket connection closed: {0}".format(reason), "code: ",
code, "clean: ", wasClean, "reason: ", reason)
self.summary[self.uttNumber]['status']['code'] = code
self.summary[self.uttNumber]['status']['reason'] = reason
# create a new WebSocket connection if there are still
# utterances in the queue that need to be processed
self.queue.task_done()
if self.factory.prepareUtterance() == False:
return
# SSL client context: default
if self.factory.isSecure:
contextFactory = ssl.ClientContextFactory()
else:
contextFactory = None
connectWS(self.factory, contextFactory)
# function to check that a value is a positive integer
def check_positive_int(value):
ivalue = int(value)
if ivalue < 1:
raise argparse.ArgumentTypeError(
"\"%s\" is an invalid positive int value" % value)
return ivalue
# function to check the credentials format
def check_credentials(credentials):
elements = credentials.split(":")
if (len(elements) == 2):
return elements
else:
raise argparse.ArgumentTypeError(
"\"%s\" is not a valid format for the credentials " % credentials)
if __name__ == '__main__':
# parse command line parameters
parser = argparse.ArgumentParser(
description=('client to do speech recognition using the WebSocket '
'interface to the Watson STT service'))
parser.add_argument(
'-credentials', action='store', dest='credentials',
help="Basic Authentication credentials in the form 'username:password'",
required=True, type=check_credentials)
parser.add_argument(
'-in', action='store', dest='fileInput', default='./recordings.txt',
help='text file containing audio files')
parser.add_argument(
'-out', action='store', dest='dirOutput', default='./output',
help='output directory')
parser.add_argument(
'-type', action='store', dest='contentType', default='audio/wav',
help='audio content type, for example: \'audio/l16; rate=44100\'')
parser.add_argument(
'-model', action='store', dest='model', default='en-US_BroadbandModel',
help='STT model that will be used')
parser.add_argument(
'-threads', action='store', dest='threads', default='1',
help='number of simultaneous STT sessions', type=check_positive_int)
parser.add_argument(
'-optout', action='store_true', dest='optOut',
help=('specify opt-out header so user data, such as speech and '
'hypotheses are not logged into the server'))
parser.add_argument(
'-tokenauth', action='store_true', dest='tokenauth',
help='use token based authentication')
args = parser.parse_args()
# create output directory if necessary
if (os.path.isdir(args.dirOutput)):
pass
else:
os.makedirs(args.dirOutput)
# logging
log.startLogging(sys.stdout)
# add audio files to the processing queue
q = Queue.Queue()
lines = [line.rstrip('\n') for line in open(args.fileInput)]
fileNumber = 0
for fileName in(lines):
print fileName
q.put((fileNumber, fileName))
fileNumber += 1
hostname = "stream.watsonplatform.net"
headers = {}
if (args.optOut is True):
headers['X-WDC-PL-OPT-OUT'] = '1'
# authentication header
if args.tokenauth:
headers['X-Watson-Authorization-Token'] = (
Utils.getAuthenticationToken(
"https://" + hostname, 'speech-to-text',
args.credentials[0], args.credentials[1]))
else:
string = args.credentials[0] + ":" + args.credentials[1]
headers["Authorization"] = "Basic " + base64.b64encode(string)
print headers
# create a WS server factory with our protocol
url = "wss://" + hostname + "/speech-to-text/api/v1/recognize?model=" \
+ args.model
summary = {}
factory = WSInterfaceFactory(q, summary, args.dirOutput, args.contentType,
args.model, url, headers, debug=False)
factory.protocol = WSInterfaceProtocol
for i in range(min(int(args.threads), q.qsize())):
factory.prepareUtterance()
# SSL client context: default
if factory.isSecure:
contextFactory = ssl.ClientContextFactory()
else:
contextFactory = None
connectWS(factory, contextFactory)
reactor.run()
# dump the hypotheses to the output file
fileHypotheses = path_output
f = open(fileHypotheses, "w")
counter = 1
successful = 0
emptyHypotheses = 0
for key, value in (sorted(summary.items())):
if value['status']['code'] == 1000:
print (value['status']['code'], " ",
value['hypothesis'].encode('utf-8'))
successful += 1
if value['hypothesis'][0] == "":
emptyHypotheses += 1
else:
print (value['status']['code'], " REASON: ",
value['status']['reason'])
f.write(value['hypothesis'].encode('utf-8') + "\n")
counter += 1
f.close()
print ("successful sessions: ", successful, " (",
len(summary) - successful, " errors) (" +
str(emptyHypotheses) + " empty hypotheses)")
|
vetlehjelmtvedt/TranscriptApp
|
sttClient.py
|
Python
|
mit
| 13,934
|
import uuid
from random import randint
from django.shortcuts import render
from django.http import HttpResponseRedirect
from .models import Url
def index(request):
if request.session.has_key("has_url"):
url = request.session.get("has_url")
del request.session['has_url']
return render(request, "miudo/index.html", locals())
return render(request, "miudo/index.html", {})
def make_url(request):
if request.method == "POST":
url = None # initial url
url_site = request.POST['url']
url_id = generate_key()
try:
url = Url.objects.get(url_id = url_id)
while url:
url_id = generate_key()
url = Url.objects.get(url_id = url_id)
create_url(request, url_id, url_site)
request.session["has_url"] = url_id
except Url.DoesNotExist:
create_url(request, url_id, url_site)
request.session["has_url"] = url_id
return HttpResponseRedirect("/")
def create_url(custom_request, url_id, url_site):
if custom_request.user.is_authenticated():
url = Url.objects.create(url_id = url_id, url_site = url_site,
url_author = custom_request.user)
else:
url = Url.objects.create(url_id = url_id, url_site = url_site)
url.save()
def generate_key():
to_choose = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
url_id = ""
while len(url_id) != 6:
i = randint(0, len(to_choose) - 1)
url_id += to_choose[i]
return url_id
def redirect_url(request, url_id=None):
try:
url = Url.objects.get(url_id = url_id)
url.url_clicked = url.url_clicked + 1
url.save()
except Url.DoesNotExist:
return render(request, "base/page_not_found.html", {})
return HttpResponseRedirect(url.url_site)
|
luisalves05/shortener-url
|
src/apps/miudo/views.py
|
Python
|
mit
| 1,890
|
from .tables import Base, Component, Mixture, Ref, Measurement, Listing, Property
from .utils import get_or_create
__all__ = ['Base', 'Component', 'Mixture', 'Ref', 'Measurement', 'Listing', 'Property', 'get_or_create']
|
Hariri-Institute-SAIL/materials
|
materials/db/__init__.py
|
Python
|
mit
| 221
|
import tornado.web
import tornado.ioloop
import os
from handlers import *
urls = [
(r'/', IndexHandler),
(r'/api/(?P<action>[a-zA-Z0-9-_]+)', ApiServiceHandler),
(r'/about', AboutHandler),
]
settings = {
"static_path" : os.path.join(os.path.dirname(__file__), "static"),
"template_path" : os.path.join(os.path.dirname(__file__), "templates"),
"debug" : True,
"gzip" : True,
"cookie_secret" : "asdf"
}
def main(addr):
application = tornado.web.Application(urls, **settings)
application.listen(8080, addr)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main("127.0.0.1")
|
plusplus7/EasyMemo2
|
server.py
|
Python
|
mit
| 666
|
from __future__ import absolute_import
import logging
try:
from redis import Redis
from rq import Queue
except ImportError:
Redis = None
Queue = None
from kaneda.exceptions import ImproperlyConfigured
from .base import BaseQueue
class RQQueue(BaseQueue):
"""
RQ queue
:param queue: queue instance of RQ class.
:param redis_url: Redis connection url where RQ will attend the async reporting requests.
:param queue_name: name of the queue being used by the RQ worker process.
"""
settings_namespace = 'RQ'
def __init__(self, queue=None, redis_url=None, queue_name='kaneda'):
if not Redis:
raise ImproperlyConfigured('You need to install redis to use the RQ queue.')
if not Queue:
raise ImproperlyConfigured('You need to install rq library to use the RQ queue.')
if queue:
if not isinstance(queue, Queue):
raise ImproperlyConfigured('"queue" parameter is not an instance of RQ queue.')
self.queue = queue
elif redis_url:
self.queue = Queue(queue_name, connection=Redis.from_url(redis_url))
else:
self.queue = Queue(queue_name, connection=Redis())
def report(self, name, metric, value, tags, id_):
try:
return self.queue.enqueue('kaneda.tasks.rq.report', name, metric, value, tags, id_)
except Exception as e:
logger = logging.getLogger(__name__)
logger.exception(e)
|
APSL/kaneda
|
kaneda/queues/rq.py
|
Python
|
mit
| 1,504
|
"""
.. module:: radical.pilot.controller.pilot_launcher_worker
.. moduleauthor:: Ole Weidner <ole.weidner@rutgers.edu>
"""
__copyright__ = "Copyright 2013-2014, http://radical.rutgers.edu"
__license__ = "MIT"
from unit_manager_controller import UnitManagerController
from pilot_manager_controller import PilotManagerController
from input_file_transfer_worker import InputFileTransferWorker
from output_file_transfer_worker import OutputFileTransferWorker
|
JensTimmerman/radical.pilot
|
src/radical/pilot/controller/__init__.py
|
Python
|
mit
| 462
|
class MiddlewareMixin(object):
def __init__(self, get_response=None):
super(MiddlewareMixin, self).__init__()
|
rollbar/pyrollbar
|
rollbar/contrib/django/utils.py
|
Python
|
mit
| 122
|
from json import JSONDecodeError
from flask import Blueprint, Flask, make_response, Response
from flask_cors import CORS
from google.appengine.api import wrap_wsgi_app
from werkzeug.routing import BaseConverter
from backend.api.handlers.district import (
district_events,
district_list_year,
district_rankings,
district_teams,
)
from backend.api.handlers.error import handle_404
from backend.api.handlers.event import (
event,
event_awards,
event_detail,
event_list_all,
event_list_year,
event_matches,
event_playoff_advancement,
event_teams,
event_teams_statuses,
)
from backend.api.handlers.helpers.profiled_jsonify import profiled_jsonify
from backend.api.handlers.match import match, zebra_motionworks
from backend.api.handlers.media import media_tags
from backend.api.handlers.status import status
from backend.api.handlers.team import (
team,
team_awards,
team_event_awards,
team_event_matches,
team_event_status,
team_events,
team_events_statuses_year,
team_history_districts,
team_history_robots,
team_list,
team_list_all,
team_matches,
team_media_tag,
team_media_year,
team_social_media,
team_years_participated,
)
from backend.api.handlers.trusted import (
add_event_media,
add_match_video,
add_match_zebra_motionworks_info,
delete_all_event_matches,
delete_event_matches,
update_event_alliances,
update_event_awards,
update_event_info,
update_event_matches,
update_event_rankings,
update_teams,
)
from backend.common.datafeed_parsers.exceptions import ParserInputException
from backend.common.flask_cache import configure_flask_cache
from backend.common.logging import configure_logging
from backend.common.middleware import install_middleware
from backend.common.url_converters import install_url_converters
class SimpleModelTypeConverter(BaseConverter):
regex = r"simple"
class ModelTypeConverter(BaseConverter):
regex = r"simple|keys"
class EventDetailTypeConverter(BaseConverter):
regex = r"alliances|district_points|insights|oprs|predictions|rankings"
configure_logging()
app = Flask(__name__)
app.wsgi_app = wrap_wsgi_app(app.wsgi_app)
install_middleware(app)
install_url_converters(app)
configure_flask_cache(app)
app.config["JSONIFY_PRETTYPRINT_REGULAR"] = True
app.url_map.converters["simple_model_type"] = SimpleModelTypeConverter
app.url_map.converters["model_type"] = ModelTypeConverter
app.url_map.converters["event_detail_type"] = EventDetailTypeConverter
api_v3 = Blueprint("apiv3", __name__, url_prefix="/api/v3")
CORS(
api_v3,
origins="*",
methods=["OPTIONS", "GET"],
allow_headers=["X-TBA-Auth-Key", "If-None-Match", "If-Modified-Since"],
)
# Overall Status
api_v3.add_url_rule("/status", view_func=status)
# District
api_v3.add_url_rule("/district/<string:district_key>/events", view_func=district_events)
api_v3.add_url_rule(
"/district/<string:district_key>/events/<model_type:model_type>",
view_func=district_events,
)
api_v3.add_url_rule("/district/<string:district_key>/teams", view_func=district_teams)
api_v3.add_url_rule(
"/district/<string:district_key>/teams/<model_type:model_type>",
view_func=district_teams,
)
api_v3.add_url_rule(
"/district/<string:district_key>/rankings", view_func=district_rankings
)
# District List
api_v3.add_url_rule("/districts/<int:year>", view_func=district_list_year)
# Event
api_v3.add_url_rule("/event/<string:event_key>", view_func=event)
api_v3.add_url_rule(
"/event/<string:event_key>/<simple_model_type:model_type>", view_func=event
)
api_v3.add_url_rule(
"/event/<string:event_key>/<event_detail_type:detail_type>",
view_func=event_detail,
)
api_v3.add_url_rule("/event/<string:event_key>/teams", view_func=event_teams)
api_v3.add_url_rule(
"/event/<string:event_key>/teams/<model_type:model_type>",
view_func=event_teams,
)
api_v3.add_url_rule(
"/event/<string:event_key>/teams/statuses", view_func=event_teams_statuses
)
api_v3.add_url_rule("event/<string:event_key>/matches", view_func=event_matches)
# api_v3.add_url_rule("event/<string:event_key>/matches/timeseries", view_func=TODO)
api_v3.add_url_rule(
"/event/<string:event_key>/matches/<model_type:model_type>",
view_func=event_matches,
)
api_v3.add_url_rule("/event/<string:event_key>/awards", view_func=event_awards)
api_v3.add_url_rule(
"/event/<string:event_key>/playoff_advancement", view_func=event_playoff_advancement
)
# Event List
api_v3.add_url_rule("/events/all", view_func=event_list_all)
api_v3.add_url_rule("/events/all/<model_type:model_type>", view_func=event_list_all)
api_v3.add_url_rule("/events/<int:year>", view_func=event_list_year)
api_v3.add_url_rule(
"/events/<int:year>/<model_type:model_type>", view_func=event_list_year
)
# Match
api_v3.add_url_rule("/match/<string:match_key>", view_func=match)
api_v3.add_url_rule(
"/match/<string:match_key>/<simple_model_type:model_type>", view_func=match
)
# api_v3.add_url_rule("/match/<string:match_key>/timeseries", view_func=TODO)
api_v3.add_url_rule(
"/match/<string:match_key>/zebra_motionworks", view_func=zebra_motionworks
)
# Media
api_v3.add_url_rule("/media/tags", view_func=media_tags)
# Team
api_v3.add_url_rule("/team/<string:team_key>", view_func=team)
api_v3.add_url_rule(
"/team/<string:team_key>/<simple_model_type:model_type>", view_func=team
)
# Team History
api_v3.add_url_rule(
"/team/<string:team_key>/years_participated", view_func=team_years_participated
)
api_v3.add_url_rule(
"/team/<string:team_key>/districts", view_func=team_history_districts
)
api_v3.add_url_rule("/team/<string:team_key>/robots", view_func=team_history_robots)
api_v3.add_url_rule("/team/<string:team_key>/social_media", view_func=team_social_media)
# Team Events
api_v3.add_url_rule("/team/<string:team_key>/events", view_func=team_events)
api_v3.add_url_rule(
"/team/<string:team_key>/events/<model_type:model_type>", view_func=team_events
)
api_v3.add_url_rule("/team/<string:team_key>/events/<int:year>", view_func=team_events)
api_v3.add_url_rule(
"/team/<string:team_key>/events/<int:year>/<model_type:model_type>",
view_func=team_events,
)
api_v3.add_url_rule(
"/team/<string:team_key>/events/<int:year>/statuses",
view_func=team_events_statuses_year,
)
# Team @ Event
api_v3.add_url_rule(
"/team/<string:team_key>/event/<string:event_key>/matches",
view_func=team_event_matches,
)
api_v3.add_url_rule(
"/team/<string:team_key>/event/<string:event_key>/matches/<model_type:model_type>",
view_func=team_event_matches,
)
api_v3.add_url_rule(
"/team/<string:team_key>/event/<string:event_key>/awards",
view_func=team_event_awards,
)
api_v3.add_url_rule(
"/team/<string:team_key>/event/<string:event_key>/status",
view_func=team_event_status,
)
# Team Awards
api_v3.add_url_rule("/team/<string:team_key>/awards", view_func=team_awards)
api_v3.add_url_rule("/team/<string:team_key>/awards/<int:year>", view_func=team_awards)
# Team Matches
api_v3.add_url_rule(
"/team/<string:team_key>/matches/<int:year>", view_func=team_matches
)
api_v3.add_url_rule(
"/team/<string:team_key>/matches/<int:year>/<model_type:model_type>",
view_func=team_matches,
)
# Team Media
api_v3.add_url_rule(
"/team/<string:team_key>/media/<int:year>", view_func=team_media_year
)
api_v3.add_url_rule(
"/team/<string:team_key>/media/tag/<string:media_tag>", view_func=team_media_tag
)
api_v3.add_url_rule(
"/team/<string:team_key>/media/tag/<string:media_tag>/<int:year>",
view_func=team_media_tag,
)
# Team List
api_v3.add_url_rule("/teams/all", view_func=team_list_all)
api_v3.add_url_rule("/teams/all/<model_type:model_type>", view_func=team_list_all)
api_v3.add_url_rule("/teams/<int:page_num>", view_func=team_list)
api_v3.add_url_rule(
"/teams/<int:page_num>/<model_type:model_type>", view_func=team_list
)
api_v3.add_url_rule("/teams/<int:year>/<int:page_num>", view_func=team_list)
api_v3.add_url_rule(
"/teams/<int:year>/<int:page_num>/<model_type:model_type>",
view_func=team_list,
)
# Trusted API
trusted_api = Blueprint("trusted_api", __name__, url_prefix="/api/trusted/v1")
CORS(
trusted_api,
origins="*",
methods=["OPTIONS", "POST"],
allow_headers=["Content-Type", "X-TBA-Auth-Id", "X-TBA-Auth-Sig"],
)
trusted_api.add_url_rule(
"/event/<string:event_key>/alliance_selections/update",
methods=["POST"],
view_func=update_event_alliances,
),
trusted_api.add_url_rule(
"/event/<string:event_key>/awards/update",
methods=["POST"],
view_func=update_event_awards,
),
trusted_api.add_url_rule(
"/event/<string:event_key>/info/update",
methods=["POST"],
view_func=update_event_info,
),
trusted_api.add_url_rule(
"/event/<string:event_key>/matches/update",
methods=["POST"],
view_func=update_event_matches,
)
trusted_api.add_url_rule(
"/event/<string:event_key>/matches/delete",
methods=["POST"],
view_func=delete_event_matches,
)
trusted_api.add_url_rule(
"/event/<string:event_key>/matches/delete_all",
methods=["POST"],
view_func=delete_all_event_matches,
)
trusted_api.add_url_rule(
"/event/<string:event_key>/match_videos/add",
methods=["POST"],
view_func=add_match_video,
)
trusted_api.add_url_rule(
"/event/<string:event_key>/media/add",
methods=["POST"],
view_func=add_event_media,
)
trusted_api.add_url_rule(
"/event/<string:event_key>/rankings/update",
methods=["POST"],
view_func=update_event_rankings,
)
trusted_api.add_url_rule(
"/event/<string:event_key>/team_list/update",
methods=["POST"],
view_func=update_teams,
)
trusted_api.add_url_rule(
"/event/<string:event_key>/zebra_motionworks/add",
methods=["POST"],
view_func=add_match_zebra_motionworks_info,
)
@trusted_api.errorhandler(JSONDecodeError)
@trusted_api.errorhandler(ParserInputException)
def handle_bad_input(e: Exception) -> Response:
return make_response(profiled_jsonify({"Error": f"{e}"}), 400)
app.register_blueprint(api_v3)
app.register_blueprint(trusted_api)
app.register_error_handler(404, handle_404)
|
the-blue-alliance/the-blue-alliance
|
src/backend/api/main.py
|
Python
|
mit
| 10,215
|
import _plotly_utils.basevalidators
class LegendgroupValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(self, plotly_name="legendgroup", parent_name="choropleth", **kwargs):
super(LegendgroupValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "style"),
**kwargs
)
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/choropleth/_legendgroup.py
|
Python
|
mit
| 412
|
#!/usr/bin/env python
from distutils.core import setup
execfile('modlunky/version.py')
with open('requirements.txt') as requirements:
required = requirements.read().splitlines()
kwargs = {
"name": "modlunky",
"version": str(__version__),
"packages": ["modlunky"],
"scripts": ["bin/modlunky"],
"description": "Library and Command Line Tool for Spelunky.",
"author": "Gary M. Josack",
"maintainer": "Gary M. Josack",
"author_email": "gary@byoteki.com",
"maintainer_email": "gary@byoteki.com",
"license": "MIT",
"url": "https://github.com/gmjosack/modlunky",
"download_url": "https://github.com/gmjosack/modlunky/archive/master.tar.gz",
"classifiers": [
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries",
"Topic :: Software Development :: Libraries :: Python Modules",
]
}
if required:
kwargs["install_requires"] = required
setup(**kwargs)
|
gmjosack/modlunky
|
setup.py
|
Python
|
mit
| 1,002
|
# -*- coding: utf-8 -*-
import datetime
import nose
from nose.tools import assert_equal
from pyomni.object.task import OmniTask
class TestOmniTask(object):
def test_get_xml(self):
return
if __name__ == '__main__':
nose.main(argv=['nosetests', '-s', '-v'], defaultTest=__file__)
|
taxpon/pyomni
|
tests/object/test_task.py
|
Python
|
mit
| 299
|
#!/usr/bin/env python
# encoding: utf-8
# The MIT License (MIT)
# Copyright (c) 2014 CNRS
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# AUTHORS
# Hervé BREDIN - http://herve.niderb.fr
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
PYANNOTE_URI = 'uri'
PYANNOTE_MODALITY = 'modality'
PYANNOTE_SEGMENT = 'segment'
PYANNOTE_TRACK = 'track'
PYANNOTE_LABEL = 'label'
PYANNOTE_SCORE = 'score'
PYANNOTE_IDENTITY = 'identity'
from .time import T, TStart, TEnd
from .segment import Segment, SlidingWindow
from .timeline import Timeline
from .annotation import Annotation
from .transcription import Transcription
from .scores import Scores
from .feature import SlidingWindowFeature
try:
from .notebook import notebook
except ImportError as e:
pass
|
grantjenks/pyannote-core
|
pyannote/core/__init__.py
|
Python
|
mit
| 1,810
|
from .fcn import FCN
|
ellisdg/3DUnetCNN
|
unet3d/models/pytorch/fcn/__init__.py
|
Python
|
mit
| 21
|
import numpy as np
def zero_mean_normalize_image_data(data, axis=(0, 1, 2)):
return np.divide(data - data.mean(axis=axis), data.std(axis=axis))
def foreground_zero_mean_normalize_image_data(data, channel_dim=4, background_value=0, tolerance=1e-5):
data = np.copy(data)
if data.ndim == channel_dim or data.shape[channel_dim] == 1:
# only 1 channel, so the std and mean calculations are straight forward
foreground_mask = np.abs(data) > (background_value + tolerance)
foreground = data[foreground_mask]
mean = foreground.mean()
std = foreground.std()
data[foreground_mask] = np.divide(foreground - mean, std)
return data
else:
# std and mean need to be calculated for each channel in the 4th dimension
for channel in range(data.shape[channel_dim]):
channel_data = data[..., channel]
channel_mask = np.abs(channel_data) > (background_value + tolerance)
channel_foreground = channel_data[channel_mask]
channel_mean = channel_foreground.mean()
channel_std = channel_foreground.std()
channel_data[channel_mask] = np.divide(channel_foreground - channel_mean, channel_std)
data[..., channel] = channel_data
return data
def zero_floor_normalize_image_data(data, axis=(0, 1, 2), floor_percentile=1, floor=0):
floor_threshold = np.percentile(data, floor_percentile, axis=axis)
if data.ndim != len(axis):
floor_threshold_shape = np.asarray(floor_threshold.shape * data.ndim)
floor_threshold_shape[np.asarray(axis)] = 1
floor_threshold = floor_threshold.reshape(floor_threshold_shape)
background = data <= floor_threshold
data = np.ma.masked_array(data - floor_threshold, mask=background)
std = data.std(axis=axis)
if data.ndim != len(axis):
std = std.reshape(floor_threshold_shape)
return np.divide(data, std).filled(floor)
def zero_one_window(data, axis=(0, 1, 2), ceiling_percentile=99, floor_percentile=1, floor=0, ceiling=1,
channels_axis=None):
"""
:param data: Numpy ndarray.
:param axis:
:param ceiling_percentile: Percentile value of the foreground to set to the ceiling.
:param floor_percentile: Percentile value of the image to set to the floor.
:param floor: New minimum value.
:param ceiling: New maximum value.
:param channels_axis:
:return:
"""
data = np.copy(data)
if len(axis) != data.ndim:
floor_threshold = np.percentile(data, floor_percentile, axis=axis)
if channels_axis is None:
channels_axis = find_channel_axis(data.ndim, axis=axis)
data = np.moveaxis(data, channels_axis, 0)
for channel in range(data.shape[0]):
channel_data = data[channel]
# find the background
bg_mask = channel_data <= floor_threshold[channel]
# use background to find foreground
fg = channel_data[bg_mask == False]
# find threshold based on foreground percentile
ceiling_threshold = np.percentile(fg, ceiling_percentile)
# normalize the data for this channel
data[channel] = window_data(channel_data, floor_threshold=floor_threshold[channel],
ceiling_threshold=ceiling_threshold, floor=floor, ceiling=ceiling)
data = np.moveaxis(data, 0, channels_axis)
else:
floor_threshold = np.percentile(data, floor_percentile)
fg_mask = data > floor_threshold
fg = data[fg_mask]
ceiling_threshold = np.percentile(fg, ceiling_percentile)
data = window_data(data, floor_threshold=floor_threshold, ceiling_threshold=ceiling_threshold, floor=floor,
ceiling=ceiling)
return data
def find_channel_axis(ndim, axis):
for i in range(ndim):
if i not in axis and (i - ndim) not in axis:
# I don't understand the second part of this if statement
# answer: it is checking ot make sure that the axis is not indexed in reverse (i.e. axis 3 might be
# indexed as -1)
channels_axis = i
return channels_axis
def static_windows(data, windows, floor=0, ceiling=1):
"""
Normalizes the data according to a set of predefined windows. This is helpful for CT normalization where the
units are static and radiologists often have a set of windowing parameters that the use that allow them to look at
different features in the image.
:param data: 3D numpy array.
:param windows:
:param floor: defaults to 0.
:param ceiling: defaults to 1.
:return: Array with data windows listed in the final dimension
"""
data = np.squeeze(data)
normalized_data = np.ones(data.shape + (len(windows),)) * floor
for i, (l, w) in enumerate(windows):
normalized_data[..., i] = radiology_style_windowing(data, l, w, floor=floor, ceiling=ceiling)
return normalized_data
def radiology_style_windowing(data, l, w, floor=0, ceiling=1):
upper = l + w/2
lower = l - w/2
return window_data(data, floor_threshold=lower, ceiling_threshold=upper, floor=floor, ceiling=ceiling)
def window_data(data, floor_threshold, ceiling_threshold, floor, ceiling):
data = (data - floor_threshold) / (ceiling_threshold - floor_threshold)
# set the data below the floor to equal the floor
data[data < floor] = floor
# set the data above the ceiling to equal the ceiling
data[data > ceiling] = ceiling
return data
def hist_match(source, template):
"""
Source: https://stackoverflow.com/a/33047048
Adjust the pixel values of a grayscale image such that its histogram
matches that of a target image
Arguments:
-----------
source: np.ndarray
Image to transform; the histogram is computed over the flattened
array
template: np.ndarray
Template image; can have different dimensions to source
Returns:
-----------
matched: np.ndarray
The transformed output image
"""
oldshape = source.shape
source = source.ravel()
template = template.ravel()
# get the set of unique pixel values and their corresponding indices and
# counts
s_values, bin_idx, s_counts = np.unique(source, return_inverse=True,
return_counts=True)
t_values, t_counts = np.unique(template, return_counts=True)
# take the cumsum of the counts and normalize by the number of pixels to
# get the empirical cumulative distribution functions for the source and
# template images (maps pixel value --> quantile)
s_quantiles = np.cumsum(s_counts).astype(np.float64)
s_quantiles /= s_quantiles[-1]
t_quantiles = np.cumsum(t_counts).astype(np.float64)
t_quantiles /= t_quantiles[-1]
# interpolate linearly to find the pixel values in the template image
# that correspond most closely to the quantiles in the source image
interp_t_values = np.interp(s_quantiles, t_quantiles, t_values)
return interp_t_values[bin_idx].reshape(oldshape)
|
ellisdg/3DUnetCNN
|
unet3d/utils/normalize.py
|
Python
|
mit
| 7,176
|
from django.template.base import Library
from django.core.urlresolvers import reverse
from projects.models import ProjectBuild
register = Library()
@register.simple_tag()
def build_url(build_id):
"""
Fetches the ProjectBuild for a given build_id, if any.
"""
try:
build = ProjectBuild.objects.get(build_id=build_id)
return reverse(
"project_projectbuild_detail",
kwargs={"project_pk": build.project.pk, "build_pk": build.pk})
except ProjectBuild.DoesNotExist:
return ""
|
timrchavez/capomastro
|
projects/templatetags/projects_tags.py
|
Python
|
mit
| 543
|
from allocine import allocine
from constants import *
from elcinema import elcinema
from imdb import imdby as Imdb
from rotten import rotten
from tmdb import tmdb
def search(query, site):
if site == "imdb":
provider = Imdb()
elif site == "elcinema":
provider = elcinema()
elif site == "rottentomatoes":
provider = rotten(rotten_key)
elif site == "themoviedatabase":
provider = tmdb(tmdb_key)
elif site == "allocine":
provider = allocine()
results = provider.search(query)
return results
def info(movie_id, site):
if site == "imdb":
provider = Imdb()
elif site == "elcinema":
provider = elcinema()
elif site == "rottentomatoes":
provider = rotten(rotten_key)
elif site == "themoviedatabase":
provider = tmdb(tmdb_key)
elif site == "allocine":
provider = allocine()
result = provider.info(movie_id)
return result
|
walidsa3d/shaman
|
shaman/providers/api.py
|
Python
|
mit
| 951
|
"""Access and control log capturing."""
import logging
import os
import re
import sys
from contextlib import contextmanager
from io import StringIO
from pathlib import Path
from typing import AbstractSet
from typing import Dict
from typing import Generator
from typing import List
from typing import Mapping
from typing import Optional
from typing import Tuple
from typing import TypeVar
from typing import Union
from _pytest import nodes
from _pytest._io import TerminalWriter
from _pytest.capture import CaptureManager
from _pytest.compat import final
from _pytest.compat import nullcontext
from _pytest.config import _strtobool
from _pytest.config import Config
from _pytest.config import create_terminal_writer
from _pytest.config import hookimpl
from _pytest.config import UsageError
from _pytest.config.argparsing import Parser
from _pytest.deprecated import check_ispytest
from _pytest.fixtures import fixture
from _pytest.fixtures import FixtureRequest
from _pytest.main import Session
from _pytest.store import StoreKey
from _pytest.terminal import TerminalReporter
DEFAULT_LOG_FORMAT = "%(levelname)-8s %(name)s:%(filename)s:%(lineno)d %(message)s"
DEFAULT_LOG_DATE_FORMAT = "%H:%M:%S"
_ANSI_ESCAPE_SEQ = re.compile(r"\x1b\[[\d;]+m")
caplog_handler_key = StoreKey["LogCaptureHandler"]()
caplog_records_key = StoreKey[Dict[str, List[logging.LogRecord]]]()
def _remove_ansi_escape_sequences(text: str) -> str:
return _ANSI_ESCAPE_SEQ.sub("", text)
class ColoredLevelFormatter(logging.Formatter):
"""A logging formatter which colorizes the %(levelname)..s part of the
log format passed to __init__."""
LOGLEVEL_COLOROPTS: Mapping[int, AbstractSet[str]] = {
logging.CRITICAL: {"red"},
logging.ERROR: {"red", "bold"},
logging.WARNING: {"yellow"},
logging.WARN: {"yellow"},
logging.INFO: {"green"},
logging.DEBUG: {"purple"},
logging.NOTSET: set(),
}
LEVELNAME_FMT_REGEX = re.compile(r"%\(levelname\)([+-.]?\d*s)")
def __init__(self, terminalwriter: TerminalWriter, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self._original_fmt = self._style._fmt
self._level_to_fmt_mapping: Dict[int, str] = {}
assert self._fmt is not None
levelname_fmt_match = self.LEVELNAME_FMT_REGEX.search(self._fmt)
if not levelname_fmt_match:
return
levelname_fmt = levelname_fmt_match.group()
for level, color_opts in self.LOGLEVEL_COLOROPTS.items():
formatted_levelname = levelname_fmt % {
"levelname": logging.getLevelName(level)
}
# add ANSI escape sequences around the formatted levelname
color_kwargs = {name: True for name in color_opts}
colorized_formatted_levelname = terminalwriter.markup(
formatted_levelname, **color_kwargs
)
self._level_to_fmt_mapping[level] = self.LEVELNAME_FMT_REGEX.sub(
colorized_formatted_levelname, self._fmt
)
def format(self, record: logging.LogRecord) -> str:
fmt = self._level_to_fmt_mapping.get(record.levelno, self._original_fmt)
self._style._fmt = fmt
return super().format(record)
class PercentStyleMultiline(logging.PercentStyle):
"""A logging style with special support for multiline messages.
If the message of a record consists of multiple lines, this style
formats the message as if each line were logged separately.
"""
def __init__(self, fmt: str, auto_indent: Union[int, str, bool, None]) -> None:
super().__init__(fmt)
self._auto_indent = self._get_auto_indent(auto_indent)
@staticmethod
def _update_message(
record_dict: Dict[str, object], message: str
) -> Dict[str, object]:
tmp = record_dict.copy()
tmp["message"] = message
return tmp
@staticmethod
def _get_auto_indent(auto_indent_option: Union[int, str, bool, None]) -> int:
"""Determine the current auto indentation setting.
Specify auto indent behavior (on/off/fixed) by passing in
extra={"auto_indent": [value]} to the call to logging.log() or
using a --log-auto-indent [value] command line or the
log_auto_indent [value] config option.
Default behavior is auto-indent off.
Using the string "True" or "on" or the boolean True as the value
turns auto indent on, using the string "False" or "off" or the
boolean False or the int 0 turns it off, and specifying a
positive integer fixes the indentation position to the value
specified.
Any other values for the option are invalid, and will silently be
converted to the default.
:param None|bool|int|str auto_indent_option:
User specified option for indentation from command line, config
or extra kwarg. Accepts int, bool or str. str option accepts the
same range of values as boolean config options, as well as
positive integers represented in str form.
:returns:
Indentation value, which can be
-1 (automatically determine indentation) or
0 (auto-indent turned off) or
>0 (explicitly set indentation position).
"""
if auto_indent_option is None:
return 0
elif isinstance(auto_indent_option, bool):
if auto_indent_option:
return -1
else:
return 0
elif isinstance(auto_indent_option, int):
return int(auto_indent_option)
elif isinstance(auto_indent_option, str):
try:
return int(auto_indent_option)
except ValueError:
pass
try:
if _strtobool(auto_indent_option):
return -1
except ValueError:
return 0
return 0
def format(self, record: logging.LogRecord) -> str:
if "\n" in record.message:
if hasattr(record, "auto_indent"):
# Passed in from the "extra={}" kwarg on the call to logging.log().
auto_indent = self._get_auto_indent(record.auto_indent) # type: ignore[attr-defined]
else:
auto_indent = self._auto_indent
if auto_indent:
lines = record.message.splitlines()
formatted = self._fmt % self._update_message(record.__dict__, lines[0])
if auto_indent < 0:
indentation = _remove_ansi_escape_sequences(formatted).find(
lines[0]
)
else:
# Optimizes logging by allowing a fixed indentation.
indentation = auto_indent
lines[0] = formatted
return ("\n" + " " * indentation).join(lines)
return self._fmt % record.__dict__
def get_option_ini(config: Config, *names: str):
for name in names:
ret = config.getoption(name) # 'default' arg won't work as expected
if ret is None:
ret = config.getini(name)
if ret:
return ret
def pytest_addoption(parser: Parser) -> None:
"""Add options to control log capturing."""
group = parser.getgroup("logging")
def add_option_ini(option, dest, default=None, type=None, **kwargs):
parser.addini(
dest, default=default, type=type, help="default value for " + option
)
group.addoption(option, dest=dest, **kwargs)
add_option_ini(
"--log-level",
dest="log_level",
default=None,
metavar="LEVEL",
help=(
"level of messages to catch/display.\n"
"Not set by default, so it depends on the root/parent log handler's"
' effective level, where it is "WARNING" by default.'
),
)
add_option_ini(
"--log-format",
dest="log_format",
default=DEFAULT_LOG_FORMAT,
help="log format as used by the logging module.",
)
add_option_ini(
"--log-date-format",
dest="log_date_format",
default=DEFAULT_LOG_DATE_FORMAT,
help="log date format as used by the logging module.",
)
parser.addini(
"log_cli",
default=False,
type="bool",
help='enable log display during test run (also known as "live logging").',
)
add_option_ini(
"--log-cli-level", dest="log_cli_level", default=None, help="cli logging level."
)
add_option_ini(
"--log-cli-format",
dest="log_cli_format",
default=None,
help="log format as used by the logging module.",
)
add_option_ini(
"--log-cli-date-format",
dest="log_cli_date_format",
default=None,
help="log date format as used by the logging module.",
)
add_option_ini(
"--log-file",
dest="log_file",
default=None,
help="path to a file when logging will be written to.",
)
add_option_ini(
"--log-file-level",
dest="log_file_level",
default=None,
help="log file logging level.",
)
add_option_ini(
"--log-file-format",
dest="log_file_format",
default=DEFAULT_LOG_FORMAT,
help="log format as used by the logging module.",
)
add_option_ini(
"--log-file-date-format",
dest="log_file_date_format",
default=DEFAULT_LOG_DATE_FORMAT,
help="log date format as used by the logging module.",
)
add_option_ini(
"--log-auto-indent",
dest="log_auto_indent",
default=None,
help="Auto-indent multiline messages passed to the logging module. Accepts true|on, false|off or an integer.",
)
_HandlerType = TypeVar("_HandlerType", bound=logging.Handler)
# Not using @contextmanager for performance reasons.
class catching_logs:
"""Context manager that prepares the whole logging machinery properly."""
__slots__ = ("handler", "level", "orig_level")
def __init__(self, handler: _HandlerType, level: Optional[int] = None) -> None:
self.handler = handler
self.level = level
def __enter__(self):
root_logger = logging.getLogger()
if self.level is not None:
self.handler.setLevel(self.level)
root_logger.addHandler(self.handler)
if self.level is not None:
self.orig_level = root_logger.level
root_logger.setLevel(min(self.orig_level, self.level))
return self.handler
def __exit__(self, type, value, traceback):
root_logger = logging.getLogger()
if self.level is not None:
root_logger.setLevel(self.orig_level)
root_logger.removeHandler(self.handler)
class LogCaptureHandler(logging.StreamHandler):
"""A logging handler that stores log records and the log text."""
stream: StringIO
def __init__(self) -> None:
"""Create a new log handler."""
super().__init__(StringIO())
self.records: List[logging.LogRecord] = []
def emit(self, record: logging.LogRecord) -> None:
"""Keep the log records in a list in addition to the log text."""
self.records.append(record)
super().emit(record)
def reset(self) -> None:
self.records = []
self.stream = StringIO()
def handleError(self, record: logging.LogRecord) -> None:
if logging.raiseExceptions:
# Fail the test if the log message is bad (emit failed).
# The default behavior of logging is to print "Logging error"
# to stderr with the call stack and some extra details.
# pytest wants to make such mistakes visible during testing.
raise
@final
class LogCaptureFixture:
"""Provides access and control of log capturing."""
def __init__(self, item: nodes.Node, *, _ispytest: bool = False) -> None:
check_ispytest(_ispytest)
self._item = item
self._initial_handler_level: Optional[int] = None
# Dict of log name -> log level.
self._initial_logger_levels: Dict[Optional[str], int] = {}
def _finalize(self) -> None:
"""Finalize the fixture.
This restores the log levels changed by :meth:`set_level`.
"""
# Restore log levels.
if self._initial_handler_level is not None:
self.handler.setLevel(self._initial_handler_level)
for logger_name, level in self._initial_logger_levels.items():
logger = logging.getLogger(logger_name)
logger.setLevel(level)
@property
def handler(self) -> LogCaptureHandler:
"""Get the logging handler used by the fixture.
:rtype: LogCaptureHandler
"""
return self._item._store[caplog_handler_key]
def get_records(self, when: str) -> List[logging.LogRecord]:
"""Get the logging records for one of the possible test phases.
:param str when:
Which test phase to obtain the records from. Valid values are: "setup", "call" and "teardown".
:returns: The list of captured records at the given stage.
:rtype: List[logging.LogRecord]
.. versionadded:: 3.4
"""
return self._item._store[caplog_records_key].get(when, [])
@property
def text(self) -> str:
"""The formatted log text."""
return _remove_ansi_escape_sequences(self.handler.stream.getvalue())
@property
def records(self) -> List[logging.LogRecord]:
"""The list of log records."""
return self.handler.records
@property
def record_tuples(self) -> List[Tuple[str, int, str]]:
"""A list of a stripped down version of log records intended
for use in assertion comparison.
The format of the tuple is:
(logger_name, log_level, message)
"""
return [(r.name, r.levelno, r.getMessage()) for r in self.records]
@property
def messages(self) -> List[str]:
"""A list of format-interpolated log messages.
Unlike 'records', which contains the format string and parameters for
interpolation, log messages in this list are all interpolated.
Unlike 'text', which contains the output from the handler, log
messages in this list are unadorned with levels, timestamps, etc,
making exact comparisons more reliable.
Note that traceback or stack info (from :func:`logging.exception` or
the `exc_info` or `stack_info` arguments to the logging functions) is
not included, as this is added by the formatter in the handler.
.. versionadded:: 3.7
"""
return [r.getMessage() for r in self.records]
def clear(self) -> None:
"""Reset the list of log records and the captured log text."""
self.handler.reset()
def set_level(self, level: Union[int, str], logger: Optional[str] = None) -> None:
"""Set the level of a logger for the duration of a test.
.. versionchanged:: 3.4
The levels of the loggers changed by this function will be
restored to their initial values at the end of the test.
:param int level: The level.
:param str logger: The logger to update. If not given, the root logger.
"""
logger_obj = logging.getLogger(logger)
# Save the original log-level to restore it during teardown.
self._initial_logger_levels.setdefault(logger, logger_obj.level)
logger_obj.setLevel(level)
if self._initial_handler_level is None:
self._initial_handler_level = self.handler.level
self.handler.setLevel(level)
@contextmanager
def at_level(
self, level: int, logger: Optional[str] = None
) -> Generator[None, None, None]:
"""Context manager that sets the level for capturing of logs. After
the end of the 'with' statement the level is restored to its original
value.
:param int level: The level.
:param str logger: The logger to update. If not given, the root logger.
"""
logger_obj = logging.getLogger(logger)
orig_level = logger_obj.level
logger_obj.setLevel(level)
handler_orig_level = self.handler.level
self.handler.setLevel(level)
try:
yield
finally:
logger_obj.setLevel(orig_level)
self.handler.setLevel(handler_orig_level)
@fixture
def caplog(request: FixtureRequest) -> Generator[LogCaptureFixture, None, None]:
"""Access and control log capturing.
Captured logs are available through the following properties/methods::
* caplog.messages -> list of format-interpolated log messages
* caplog.text -> string containing formatted log output
* caplog.records -> list of logging.LogRecord instances
* caplog.record_tuples -> list of (logger_name, level, message) tuples
* caplog.clear() -> clear captured records and formatted log output string
"""
result = LogCaptureFixture(request.node, _ispytest=True)
yield result
result._finalize()
def get_log_level_for_setting(config: Config, *setting_names: str) -> Optional[int]:
for setting_name in setting_names:
log_level = config.getoption(setting_name)
if log_level is None:
log_level = config.getini(setting_name)
if log_level:
break
else:
return None
if isinstance(log_level, str):
log_level = log_level.upper()
try:
return int(getattr(logging, log_level, log_level))
except ValueError as e:
# Python logging does not recognise this as a logging level
raise UsageError(
"'{}' is not recognized as a logging level name for "
"'{}'. Please consider passing the "
"logging level num instead.".format(log_level, setting_name)
) from e
# run after terminalreporter/capturemanager are configured
@hookimpl(trylast=True)
def pytest_configure(config: Config) -> None:
config.pluginmanager.register(LoggingPlugin(config), "logging-plugin")
class LoggingPlugin:
"""Attaches to the logging module and captures log messages for each test."""
def __init__(self, config: Config) -> None:
"""Create a new plugin to capture log messages.
The formatter can be safely shared across all handlers so
create a single one for the entire test session here.
"""
self._config = config
# Report logging.
self.formatter = self._create_formatter(
get_option_ini(config, "log_format"),
get_option_ini(config, "log_date_format"),
get_option_ini(config, "log_auto_indent"),
)
self.log_level = get_log_level_for_setting(config, "log_level")
self.caplog_handler = LogCaptureHandler()
self.caplog_handler.setFormatter(self.formatter)
self.report_handler = LogCaptureHandler()
self.report_handler.setFormatter(self.formatter)
# File logging.
self.log_file_level = get_log_level_for_setting(config, "log_file_level")
log_file = get_option_ini(config, "log_file") or os.devnull
if log_file != os.devnull:
directory = os.path.dirname(os.path.abspath(log_file))
if not os.path.isdir(directory):
os.makedirs(directory)
self.log_file_handler = _FileHandler(log_file, mode="w", encoding="UTF-8")
log_file_format = get_option_ini(config, "log_file_format", "log_format")
log_file_date_format = get_option_ini(
config, "log_file_date_format", "log_date_format"
)
log_file_formatter = logging.Formatter(
log_file_format, datefmt=log_file_date_format
)
self.log_file_handler.setFormatter(log_file_formatter)
# CLI/live logging.
self.log_cli_level = get_log_level_for_setting(
config, "log_cli_level", "log_level"
)
if self._log_cli_enabled():
terminal_reporter = config.pluginmanager.get_plugin("terminalreporter")
capture_manager = config.pluginmanager.get_plugin("capturemanager")
# if capturemanager plugin is disabled, live logging still works.
self.log_cli_handler: Union[
_LiveLoggingStreamHandler, _LiveLoggingNullHandler
] = _LiveLoggingStreamHandler(terminal_reporter, capture_manager)
else:
self.log_cli_handler = _LiveLoggingNullHandler()
log_cli_formatter = self._create_formatter(
get_option_ini(config, "log_cli_format", "log_format"),
get_option_ini(config, "log_cli_date_format", "log_date_format"),
get_option_ini(config, "log_auto_indent"),
)
self.log_cli_handler.setFormatter(log_cli_formatter)
def _create_formatter(self, log_format, log_date_format, auto_indent):
# Color option doesn't exist if terminal plugin is disabled.
color = getattr(self._config.option, "color", "no")
if color != "no" and ColoredLevelFormatter.LEVELNAME_FMT_REGEX.search(
log_format
):
formatter: logging.Formatter = ColoredLevelFormatter(
create_terminal_writer(self._config), log_format, log_date_format
)
else:
formatter = logging.Formatter(log_format, log_date_format)
formatter._style = PercentStyleMultiline(
formatter._style._fmt, auto_indent=auto_indent
)
return formatter
def set_log_path(self, fname: str) -> None:
"""Set the filename parameter for Logging.FileHandler().
Creates parent directory if it does not exist.
.. warning::
This is an experimental API.
"""
fpath = Path(fname)
if not fpath.is_absolute():
fpath = self._config.rootpath / fpath
if not fpath.parent.exists():
fpath.parent.mkdir(exist_ok=True, parents=True)
stream = fpath.open(mode="w", encoding="UTF-8")
if sys.version_info >= (3, 7):
old_stream = self.log_file_handler.setStream(stream)
else:
old_stream = self.log_file_handler.stream
self.log_file_handler.acquire()
try:
self.log_file_handler.flush()
self.log_file_handler.stream = stream
finally:
self.log_file_handler.release()
if old_stream:
old_stream.close()
def _log_cli_enabled(self):
"""Return whether live logging is enabled."""
enabled = self._config.getoption(
"--log-cli-level"
) is not None or self._config.getini("log_cli")
if not enabled:
return False
terminal_reporter = self._config.pluginmanager.get_plugin("terminalreporter")
if terminal_reporter is None:
# terminal reporter is disabled e.g. by pytest-xdist.
return False
return True
@hookimpl(hookwrapper=True, tryfirst=True)
def pytest_sessionstart(self) -> Generator[None, None, None]:
self.log_cli_handler.set_when("sessionstart")
with catching_logs(self.log_cli_handler, level=self.log_cli_level):
with catching_logs(self.log_file_handler, level=self.log_file_level):
yield
@hookimpl(hookwrapper=True, tryfirst=True)
def pytest_collection(self) -> Generator[None, None, None]:
self.log_cli_handler.set_when("collection")
with catching_logs(self.log_cli_handler, level=self.log_cli_level):
with catching_logs(self.log_file_handler, level=self.log_file_level):
yield
@hookimpl(hookwrapper=True)
def pytest_runtestloop(self, session: Session) -> Generator[None, None, None]:
if session.config.option.collectonly:
yield
return
if self._log_cli_enabled() and self._config.getoption("verbose") < 1:
# The verbose flag is needed to avoid messy test progress output.
self._config.option.verbose = 1
with catching_logs(self.log_cli_handler, level=self.log_cli_level):
with catching_logs(self.log_file_handler, level=self.log_file_level):
yield # Run all the tests.
@hookimpl
def pytest_runtest_logstart(self) -> None:
self.log_cli_handler.reset()
self.log_cli_handler.set_when("start")
@hookimpl
def pytest_runtest_logreport(self) -> None:
self.log_cli_handler.set_when("logreport")
def _runtest_for(self, item: nodes.Item, when: str) -> Generator[None, None, None]:
"""Implement the internals of the pytest_runtest_xxx() hooks."""
with catching_logs(
self.caplog_handler,
level=self.log_level,
) as caplog_handler, catching_logs(
self.report_handler,
level=self.log_level,
) as report_handler:
caplog_handler.reset()
report_handler.reset()
item._store[caplog_records_key][when] = caplog_handler.records
item._store[caplog_handler_key] = caplog_handler
yield
log = report_handler.stream.getvalue().strip()
item.add_report_section(when, "log", log)
@hookimpl(hookwrapper=True)
def pytest_runtest_setup(self, item: nodes.Item) -> Generator[None, None, None]:
self.log_cli_handler.set_when("setup")
empty: Dict[str, List[logging.LogRecord]] = {}
item._store[caplog_records_key] = empty
yield from self._runtest_for(item, "setup")
@hookimpl(hookwrapper=True)
def pytest_runtest_call(self, item: nodes.Item) -> Generator[None, None, None]:
self.log_cli_handler.set_when("call")
yield from self._runtest_for(item, "call")
@hookimpl(hookwrapper=True)
def pytest_runtest_teardown(self, item: nodes.Item) -> Generator[None, None, None]:
self.log_cli_handler.set_when("teardown")
yield from self._runtest_for(item, "teardown")
del item._store[caplog_records_key]
del item._store[caplog_handler_key]
@hookimpl
def pytest_runtest_logfinish(self) -> None:
self.log_cli_handler.set_when("finish")
@hookimpl(hookwrapper=True, tryfirst=True)
def pytest_sessionfinish(self) -> Generator[None, None, None]:
self.log_cli_handler.set_when("sessionfinish")
with catching_logs(self.log_cli_handler, level=self.log_cli_level):
with catching_logs(self.log_file_handler, level=self.log_file_level):
yield
@hookimpl
def pytest_unconfigure(self) -> None:
# Close the FileHandler explicitly.
# (logging.shutdown might have lost the weakref?!)
self.log_file_handler.close()
class _FileHandler(logging.FileHandler):
"""A logging FileHandler with pytest tweaks."""
def handleError(self, record: logging.LogRecord) -> None:
# Handled by LogCaptureHandler.
pass
class _LiveLoggingStreamHandler(logging.StreamHandler):
"""A logging StreamHandler used by the live logging feature: it will
write a newline before the first log message in each test.
During live logging we must also explicitly disable stdout/stderr
capturing otherwise it will get captured and won't appear in the
terminal.
"""
# Officially stream needs to be a IO[str], but TerminalReporter
# isn't. So force it.
stream: TerminalReporter = None # type: ignore
def __init__(
self,
terminal_reporter: TerminalReporter,
capture_manager: Optional[CaptureManager],
) -> None:
logging.StreamHandler.__init__(self, stream=terminal_reporter) # type: ignore[arg-type]
self.capture_manager = capture_manager
self.reset()
self.set_when(None)
self._test_outcome_written = False
def reset(self) -> None:
"""Reset the handler; should be called before the start of each test."""
self._first_record_emitted = False
def set_when(self, when: Optional[str]) -> None:
"""Prepare for the given test phase (setup/call/teardown)."""
self._when = when
self._section_name_shown = False
if when == "start":
self._test_outcome_written = False
def emit(self, record: logging.LogRecord) -> None:
ctx_manager = (
self.capture_manager.global_and_fixture_disabled()
if self.capture_manager
else nullcontext()
)
with ctx_manager:
if not self._first_record_emitted:
self.stream.write("\n")
self._first_record_emitted = True
elif self._when in ("teardown", "finish"):
if not self._test_outcome_written:
self._test_outcome_written = True
self.stream.write("\n")
if not self._section_name_shown and self._when:
self.stream.section("live log " + self._when, sep="-", bold=True)
self._section_name_shown = True
super().emit(record)
def handleError(self, record: logging.LogRecord) -> None:
# Handled by LogCaptureHandler.
pass
class _LiveLoggingNullHandler(logging.NullHandler):
"""A logging handler used when live logging is disabled."""
def reset(self) -> None:
pass
def set_when(self, when: str) -> None:
pass
def handleError(self, record: logging.LogRecord) -> None:
# Handled by LogCaptureHandler.
pass
|
nicoddemus/pytest
|
src/_pytest/logging.py
|
Python
|
mit
| 29,805
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-18 06:21
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('CareerTinder', '0004_auto_20160918_0152'),
]
operations = [
migrations.RenameField(
model_name='hiree',
old_name='first_name',
new_name='email',
),
migrations.RenameField(
model_name='hiree',
old_name='last_name',
new_name='name',
),
]
|
sarojaerabelli/HVGS
|
CareerTinderServer/CareerTinder/migrations/0005_auto_20160918_0221.py
|
Python
|
mit
| 573
|
# Database
DB_NAME = 'censusreporter_ke'
DB_USER = 'censusreporter_ke'
DB_PASSWORD = 'censusreporter_ke'
|
callmealien/wazimap_zambia
|
censusreporter/api/config.py
|
Python
|
mit
| 106
|
# This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
import bcrypt
class BCryptPassword(object):
def __init__(self, hash_):
self.hash = str(hash_)
def __eq__(self, value):
if not self.hash or not value:
# For security reasons we never consider an empty password/hash valid
return False
if isinstance(value, unicode):
value = value.encode('utf-8')
return bcrypt.checkpw(value, self.hash)
def __ne__(self, other):
return not (self == other)
def __hash__(self): # pragma: no cover
return hash(self.hash)
def __repr__(self):
return '<BCryptPassword({})>'.format(self.hash)
@staticmethod
def hash(value):
if isinstance(value, unicode):
value = value.encode('utf-8')
return bcrypt.hashpw(value, bcrypt.gensalt())
class PasswordProperty(object):
"""Defines a hashed password property.
When reading this property, it will return an object which will
let you use the ``==`` operator to compare the password against
a plaintext password. When assigning a value to it, it will be
hashed and stored in :attr:`attr` of the containing object.
:param attr: The attribute of the containing object where the
password hash is stored.
:param backend: The password backend that handles hashing/checking
passwords.
"""
def __init__(self, attr, backend=BCryptPassword):
self.attr = attr
self.backend = backend
def __get__(self, instance, owner):
return self.backend(getattr(instance, self.attr, None)) if instance is not None else self
def __set__(self, instance, value):
if not value:
raise ValueError('Password may not be empty')
setattr(instance, self.attr, self.backend.hash(value))
def __delete__(self, instance):
setattr(instance, self.attr, None)
|
mic4ael/indico
|
indico/util/passwords.py
|
Python
|
mit
| 2,145
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_pesapal.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
nickaigi/django-pesapal
|
manage.py
|
Python
|
mit
| 257
|
import os
from appdirs import AppDirs
class Config:
def __init__(self):
self.app_dir = get_app_dir()
def save_geometry(self, window):
path = os.path.join(self.app_dir, 'geometry.conf')
print(path)
with open(path, "w") as conf:
conf.write(window.geometry())
def read_geometry(self):
path = os.path.join(self.app_dir, 'geometry.conf')
try:
with open(path, "w") as conf:
geometry = conf.read()
# TODO Validate!
except IOError:
geometry = ''
print(geometry)
return geometry
def get_app_dir():
app_dirs = AppDirs('tkdraw', 'tkdraw')
user_config_dir = os.path.normpath(app_dirs.user_config_dir)
if not os.path.isdir(user_config_dir):
os.makedirs(user_config_dir)
return user_config_dir
|
bsiegfreid/tkdraw
|
tkdraw/config.py
|
Python
|
mit
| 859
|
import json
from tornado import gen
from tornado.httpclient import AsyncHTTPClient, HTTPError
from .exceptions import SparkPostAPIException
class TornadoTransport(object):
@gen.coroutine
def request(self, method, uri, headers, **kwargs):
if "data" in kwargs:
kwargs["body"] = kwargs.pop("data")
client = AsyncHTTPClient()
try:
response = yield client.fetch(uri, method=method, headers=headers,
**kwargs)
except HTTPError as ex:
raise SparkPostAPIException(ex.response)
if response.code == 204:
raise gen.Return(True)
if response.code == 200:
result = None
try:
result = json.loads(response.body.decode("utf-8"))
except:
pass
if result:
if 'results' in result:
raise gen.Return(result['results'])
raise gen.Return(result)
raise SparkPostAPIException(response)
|
thonkify/thonkify
|
src/lib/sparkpost/tornado/base.py
|
Python
|
mit
| 1,048
|
#!C:\Python27\python.exe
# Filename: GenericBytecode.py
# -*- coding: utf-8 -*-
import os
import Settings
'''
Generic Bytecode
Simply add, remove or modify bytecode for use in KHMS
'''
createFrame = ['aload_0', 'getfield', 'aload_0', 'dup', 'getfield', 'dup_x1',
'iconst_1', 'iadd', 'putfield', 'iload_1', 'aload_0', 'getfield', \
'invokevirtual', 'iadd', 'i2b', 'bastore', 'return']
writeDWord = ['aload_0', 'getfield', 'aload_0', 'dup', 'getfield', 'dup_x1', \
'iconst_1', 'iadd', 'putfield', 'iload_1', 'bipush', 'ishr', \
'i2b', 'bastore', 'aload_0', 'getfield', 'aload_0', 'dup', \
'getfield', 'dup_x1', 'iconst_1', 'iadd', 'putfield', 'iload_1', \
'bipush', 'ishr', 'i2b', 'bastore', 'aload_0', 'getfield', \
'aload_0', 'dup', 'getfield', 'dup_x1', 'iconst_1', 'iadd', \
'putfield', 'iload_1', 'bipush', 'ishr', 'i2b', 'bastore', \
'aload_0', 'getfield', 'aload_0', 'dup', 'getfield', 'dup_x1', \
'iconst_1', 'iadd', 'putfield', 'iload_1', 'i2b', 'bastore', 'return']
# writeWordBigEndian = ['aload_0', 'getfield', 'aload_0', 'dup', 'getfield', \
# 'dup_x1', 'iconst_1', 'iadd', 'putfield', 'iload_1', \
# 'i2b', 'bastore', 'return', 'aload_0', 'dup', 'getfield', \
# 'iconst_3', 'iadd', 'putfield', 'sipush', 'aload_0', \
# 'getfield', 'aload_0', 'getfield', 'iconst_3', 'isub', \
# 'baload', 'bipush', 'ishl', 'iand', 'sipush', 'aload_0', \
# 'getfield', 'aload_0', 'getfield', 'iconst_2', 'isub', \
# 'baload', 'bipush', 'ishl', 'iand', 'iadd', 'sipush', \
# 'aload_0', 'getfield', 'aload_0', 'getfield', 'iconst_1', \
# 'isub', 'baload', 'iand', 'iadd', 'ireturn']
writeWordBigEndian = ['aload_0',
'getfield Stream/buffer [B',
'aload_0',
'dup',
'getfield Stream/currentOffset I',
'dup_x1',
'iconst_1',
'iadd',
'putfield Stream/currentOffset I',
'iload_1',
'i2b',
'bastore',
'return']
writeWord = ['aload_0', 'getfield', 'aload_0', 'dup', 'getfield', 'dup_x1', \
'iconst_1', 'iadd', 'putfield', 'iload_1', 'bipush', 'ishr', \
'i2b', 'bastore', 'aload_0', 'getfield', 'aload_0', 'dup', \
'getfield', 'dup_x1', 'iconst_1', 'iadd', 'putfield', 'iload_1', \
'i2b', 'bastore', 'return']
writeDWordBigEndian = ['aload_0', 'getfield', 'aload_0', 'dup', 'getfield', \
'dup_x1', 'iconst_1', 'iadd', 'putfield', 'iload_1', \
'bipush', 'ishr', 'i2b', 'bastore', 'aload_0', \
'getfield', 'aload_0', 'dup', 'getfield', 'dup_x1', \
'iconst_1', 'iadd', 'putfield', 'iload_1', 'bipush', \
'ishr', 'i2b', 'bastore', 'aload_0', 'getfield', \
'aload_0', 'dup', 'getfield', 'dup_x1', 'iconst_1', \
'iadd', 'putfield', 'iload_1', 'i2b', 'bastore', 'return']
method403 = ['aload_0', 'getfield', 'aload_0', 'dup', 'getfield', 'dup_x1', \
'iconst_1', 'iadd', 'putfield', 'iload_1', 'i2b', 'bastore', \
'aload_0', 'getfield', 'aload_0', 'dup', 'getfield', 'dup_x1', \
'iconst_1', 'iadd', 'putfield', 'iload_1', 'bipush', 'ishr', \
'i2b', 'bastore', 'aload_0', 'getfield', 'aload_0', 'dup', \
'getfield', 'dup_x1', 'iconst_1', 'iadd', 'putfield', 'iload_1', \
'bipush', 'ishr', 'i2b', 'bastore', 'aload_0', 'getfield', \
'aload_0', 'dup', 'getfield', 'dup_x1', 'iconst_1', 'iadd', \
'putfield', 'iload_1', 'bipush', 'ishr', 'i2b', 'bastore', 'return']
writeQWord = ['aload_0', 'getfield', 'aload_0', 'dup', 'getfield', 'dup_x1', \
'iconst_1', 'iadd', 'putfield', 'lload_1', 'bipush', 'lshr', \
'l2i', 'i2b', 'bastore', 'aload_0', 'getfield', 'aload_0', \
'dup', 'getfield', 'dup_x1', 'iconst_1', 'iadd', 'putfield', \
'lload_1', 'bipush', 'lshr', 'l2i', 'i2b', 'bastore', 'aload_0', \
'getfield', 'aload_0', 'dup', 'getfield', 'dup_x1', 'iconst_1', \
'iadd', 'putfield', 'lload_1', 'bipush', 'lshr', 'l2i', 'i2b', \
'bastore', 'aload_0', 'getfield', 'aload_0', 'dup', 'getfield', \
'dup_x1', 'iconst_1', 'iadd', 'putfield', 'lload_1', 'bipush', \
'lshr', 'l2i', 'i2b', 'bastore', 'aload_0', 'getfield', \
'aload_0', 'dup', 'getfield', 'dup_x1', 'iconst_1', 'iadd', \
'putfield', 'lload_1', 'bipush', 'lshr', 'l2i', 'i2b', \
'bastore', 'aload_0', 'getfield', 'aload_0', 'dup', 'getfield', \
'dup_x1', 'iconst_1', 'iadd', 'putfield', 'lload_1', 'bipush', \
'lshr', 'l2i', 'i2b', 'bastore', 'aload_0', 'getfield', \
'aload_0', 'dup', 'getfield', 'dup_x1', 'iconst_1', 'iadd', \
'putfield', 'lload_1', 'bipush', 'lshr', 'l2i', 'i2b', \
'bastore', 'aload_0', 'getfield', 'aload_0', 'dup', 'getfield', \
'dup_x1', 'iconst_1', 'iadd', 'putfield', 'lload_1', 'l2i', \
'i2b', 'bastore', 'goto', 'astore_3', 'new', 'dup', \
'invokespecial', 'ldc', 'invokevirtual', 'lload_1', \
'invokevirtual', 'ldc', 'invokevirtual', 'aload_3', \
'invokevirtual', 'invokevirtual', 'invokevirtual', \
'invokestatic', 'new', 'dup', 'invokespecial', 'athrow', 'return']
writeString = ['aload_1', 'invokevirtual', 'iconst_0', 'aload_0', 'getfield', \
'aload_0', 'getfield', 'aload_1', 'invokevirtual', \
'invokestatic', 'aload_0', 'dup', 'getfield', 'aload_1', \
'invokevirtual', 'iadd', 'putfield', 'aload_0', 'getfield', \
'aload_0', 'dup', 'getfield', 'dup_x1', 'iconst_1', 'iadd', \
'putfield', 'bipush', 'bastore', 'return']
method424 = ['aload_0', 'getfield', 'aload_0', 'dup', 'getfield', 'dup_x1', \
'iconst_1', 'iadd', 'putfield', 'iload_1', 'ineg', 'i2b', \
'bastore', 'return']
method425 = ['aload_0', 'getfield', 'aload_0', 'dup', 'getfield', 'dup_x1', \
'iconst_1', 'iadd', 'putfield', 'sipush', 'iload_1', 'isub', \
'i2b', 'bastore', 'return']
method431 = ['aload_0', 'getfield', 'aload_0', 'dup', 'getfield', 'dup_x1', \
'iconst_1', 'iadd', 'putfield', 'iload_1', 'i2b', 'bastore', \
'aload_0', 'getfield', 'aload_0', 'dup', 'getfield', 'dup_x1', \
'iconst_1', 'iadd', 'putfield', 'iload_1', 'bipush', 'ishr', \
'i2b', 'bastore', 'return']
method432 = ['aload_0', 'getfield', 'aload_0', 'dup', 'getfield', 'dup_x1', \
'iconst_1', 'iadd', 'putfield', 'iload_1', 'bipush', 'ishr', \
'i2b', 'bastore', 'aload_0', 'getfield', 'aload_0', 'dup', \
'getfield', 'dup_x1', 'iconst_1', 'iadd', 'putfield', 'iload_1', \
'sipush', 'iadd', 'i2b', 'bastore', 'return']
method433 = ['aload_0', 'getfield', 'aload_0', 'dup', 'getfield', 'dup_x1', \
'iconst_1', 'iadd', 'putfield', 'iload_1', 'sipush', 'iadd', \
'i2b', 'bastore', 'aload_0', 'getfield', 'aload_0', 'dup', \
'getfield', 'dup_x1', 'iconst_1', 'iadd', 'putfield', 'iload_1', \
'bipush', 'ishr', 'i2b', 'bastore', 'return']
getNextKey = ['aload_0', 'dup', 'getfield', 'dup_x1', 'iconst_1', 'isub', \
'putfield', 'ifne', 'aload_0', 'invokespecial', 'aload_0', \
'sipush', 'putfield', 'aload_0', 'getfield', 'aload_0', \
'getfield', 'iaload', 'ireturn']
isaac = ['aload_0', 'dup', 'getfield', 'aload_0', 'dup', 'getfield', \
'iconst_1', 'iadd', 'dup_x1', 'putfield', 'iadd', 'putfield', \
'iconst_0', 'istore_1', 'goto', 'aload_0', 'getfield', 'iload_1', \
'iaload', 'istore_2', 'iload_1', 'iconst_3', 'iand', 'ifne', \
'aload_0', 'dup', 'getfield', 'aload_0', 'getfield', 'bipush', \
'ishl', 'ixor', 'putfield', 'goto', 'iload_1', 'iconst_3', 'iand', \
'iconst_1', 'if_icmpne', 'aload_0', 'dup', 'getfield', 'aload_0', \
'getfield', 'bipush', 'iushr', 'ixor', 'putfield', 'goto', \
'iload_1', 'iconst_3', 'iand', 'iconst_2', 'if_icmpne', 'aload_0', \
'dup', 'getfield', 'aload_0', 'getfield', 'iconst_2', 'ishl', \
'ixor', 'putfield', 'goto', 'iload_1', 'iconst_3', 'iand', \
'iconst_3', 'if_icmpne', 'aload_0', 'dup', 'getfield', 'aload_0', \
'getfield', 'bipush', 'iushr', 'ixor', 'putfield', 'aload_0', \
'dup', 'getfield', 'aload_0', 'getfield', 'iload_1', 'sipush', \
'iadd', 'sipush', 'iand', 'iaload', 'iadd', 'putfield', 'aload_0', \
'getfield', 'iload_1', 'aload_0', 'getfield', 'iload_2', 'sipush', \
'iand', 'iconst_2', 'ishr', 'iaload', 'aload_0', 'getfield', \
'iadd', 'aload_0', 'getfield', 'iadd', 'dup', 'istore_3', \
'iastore', 'aload_0', 'getfield', 'iload_1', 'aload_0', 'aload_0', \
'getfield', 'iload_3', 'bipush', 'ishr', 'sipush', 'iand', \
'iconst_2', 'ishr', 'iaload', 'iload_2', 'iadd', 'dup_x1', \
'putfield', 'iastore', 'iinc', 'iload_1', 'sipush', \
'if_icmplt', 'return']
initializeKeySet2 = ['ldc', 'dup', 'istore', 'dup', 'istore', 'dup', 'istore', \
'dup', 'istore', 'dup', 'istore_3', 'dup', 'istore_2', \
'dup', 'istore_1', 'istore', 'iconst_0', 'istore', \
'iload', 'iconst_4', 'if_icmpge', 'iload', 'iload_1', \
'bipush', 'ishl', 'ixor', 'istore', 'iload_3', 'iload', \
'iadd', 'istore_3', 'iload_1', 'iload_2', 'iadd', \
'istore_1', 'iload_1', 'iload_2', 'iconst_2', 'iushr', \
'ixor', 'istore_1', 'iload', 'iload_1', 'iadd', 'istore', \
'iload_2', 'iload_3', 'iadd', 'istore_2', 'iload_2', \
'iload_3', 'bipush', 'ishl', 'ixor', 'istore_2', \
'iload', 'iload_2', 'iadd', 'istore', 'iload_3', \
'iload', 'iadd', 'istore_3', 'iload_3', 'iload', \
'bipush', 'iushr', 'ixor', 'istore_3', 'iload', \
'iload_3', 'iadd', 'istore', 'iload', 'iload', 'iadd', \
'istore', 'iload', 'iload', 'bipush', 'ishl', 'ixor', \
'istore', 'iload', 'iload', 'iadd', 'istore', 'iload', \
'iload', 'iadd', 'istore', 'iload', 'iload', \
'iconst_4', 'iushr', 'ixor', 'istore', 'iload', \
'iload', 'iadd', 'istore', 'iload', 'iload', 'iadd', \
'istore', 'iload', 'iload', 'bipush', 'ishl', 'ixor', \
'istore', 'iload_1', 'iload', 'iadd', 'istore_1', \
'iload', 'iload', 'iadd', 'istore', 'iload', \
'iload', 'bipush', 'iushr', 'ixor', 'istore', \
'iload_2', 'iload', 'iadd', 'istore_2', 'iload', \
'iload_1', 'iadd', 'istore', 'iinc', 'goto', \
'iconst_0', 'istore', 'iload', 'sipush', 'if_icmpge', \
'iload', 'aload_0', 'getfield', 'iload', 'iaload', \
'iadd', 'istore', 'iload_1', 'aload_0', 'getfield', \
'iload', 'iconst_1', 'iadd', 'iaload', 'iadd', \
'istore_1', 'iload_2', 'aload_0', 'getfield', \
'iload', 'iconst_2', 'iadd', 'iaload', 'iadd', \
'istore_2', 'iload_3', 'aload_0', 'getfield', \
'iload', 'iconst_3', 'iadd', 'iaload', 'iadd', \
'istore_3', 'iload', 'aload_0', 'getfield', \
'iload', 'iconst_4', 'iadd', 'iaload', 'iadd', \
'istore', 'iload', 'aload_0', 'getfield', 'iload', \
'iconst_5', 'iadd', 'iaload', 'iadd', 'istore', \
'iload', 'aload_0', 'getfield', 'iload', 'bipush', \
'iadd', 'iaload', 'iadd', 'istore', 'iload', 'aload_0', \
'getfield', 'iload', 'bipush', 'iadd', 'iaload', 'iadd', \
'istore', 'iload', 'iload_1', 'bipush', 'ishl', 'ixor', \
'istore', 'iload_3', 'iload', 'iadd', 'istore_3', \
'iload_1', 'iload_2', 'iadd', 'istore_1', 'iload_1', \
'iload_2', 'iconst_2', 'iushr', 'ixor', 'istore_1', \
'iload', 'iload_1', 'iadd', 'istore', 'iload_2', \
'iload_3', 'iadd', 'istore_2', 'iload_2', 'iload_3', \
'bipush', 'ishl', 'ixor', 'istore_2', 'iload', \
'iload_2', 'iadd', 'istore', 'iload_3', 'iload', \
'iadd', 'istore_3', 'iload_3', 'iload', 'bipush', \
'iushr', 'ixor', 'istore_3', 'iload', 'iload_3', \
'iadd', 'istore', 'iload', 'iload', 'iadd', 'istore', \
'iload', 'iload', 'bipush', 'ishl', 'ixor', 'istore', \
'iload', 'iload', 'iadd', 'istore', 'iload', 'iload', \
'iadd', 'istore', 'iload', 'iload', 'iconst_4', \
'iushr', 'ixor', 'istore', 'iload', 'iload', 'iadd', \
'istore', 'iload', 'iload', 'iadd', 'istore', \
'iload', 'iload', 'bipush', 'ishl', 'ixor', 'istore', \
'iload_1', 'iload', 'iadd', 'istore_1', 'iload', \
'iload', 'iadd', 'istore', 'iload', 'iload', \
'bipush', 'iushr', 'ixor', 'istore', 'iload_2', \
'iload', 'iadd', 'istore_2', 'iload', 'iload_1', \
'iadd', 'istore', 'aload_0', 'getfield', 'iload', \
'iload', 'iastore', 'aload_0', 'getfield', 'iload', \
'iconst_1', 'iadd', 'iload_1', 'iastore', 'aload_0', \
'getfield', 'iload', 'iconst_2', 'iadd', 'iload_2', \
'iastore', 'aload_0', 'getfield', 'iload', \
'iconst_3', 'iadd', 'iload_3', 'iastore', 'aload_0', \
'getfield', 'iload', 'iconst_4', 'iadd', 'iload', \
'iastore', 'aload_0', 'getfield', 'iload', 'iconst_5', \
'iadd', 'iload', 'iastore', 'aload_0', 'getfield', \
'iload', 'bipush', 'iadd', 'iload', 'iastore', \
'aload_0', 'getfield', 'iload', 'bipush', 'iadd', \
'iload', 'iastore', 'iinc', 'goto', 'iconst_0', \
'istore', 'iload', 'sipush', 'if_icmpge', 'iload', \
'aload_0', 'getfield', 'iload', 'iaload', 'iadd', \
'istore', 'iload_1', 'aload_0', 'getfield', \
'iload', 'iconst_1', 'iadd', 'iaload', 'iadd', \
'istore_1', 'iload_2', 'aload_0', 'getfield', \
'iload', 'iconst_2', 'iadd', 'iaload', 'iadd', \
'istore_2', 'iload_3', 'aload_0', 'getfield', \
'iload', 'iconst_3', 'iadd', 'iaload', 'iadd', \
'istore_3', 'iload', 'aload_0', 'getfield', \
'iload', 'iconst_4', 'iadd', 'iaload', 'iadd', \
'istore', 'iload', 'aload_0', 'getfield', 'iload', \
'iconst_5', 'iadd', 'iaload', 'iadd', 'istore', \
'iload', 'aload_0', 'getfield', 'iload', 'bipush', \
'iadd', 'iaload', 'iadd', 'istore', 'iload', \
'aload_0', 'getfield', 'iload', 'bipush', 'iadd', \
'iaload', 'iadd', 'istore', 'iload', 'iload_1', \
'bipush', 'ishl', 'ixor', 'istore', 'iload_3', \
'iload', 'iadd', 'istore_3', 'iload_1', 'iload_2', \
'iadd', 'istore_1', 'iload_1', 'iload_2', \
'iconst_2', 'iushr', 'ixor', 'istore_1', 'iload', \
'iload_1', 'iadd', 'istore', 'iload_2', 'iload_3', \
'iadd', 'istore_2', 'iload_2', 'iload_3', 'bipush', \
'ishl', 'ixor', 'istore_2', 'iload', 'iload_2', \
'iadd', 'istore', 'iload_3', 'iload', 'iadd', \
'istore_3', 'iload_3', 'iload', 'bipush', 'iushr', \
'ixor', 'istore_3', 'iload', 'iload_3', 'iadd', \
'istore', 'iload', 'iload', 'iadd', 'istore', 'iload', \
'iload', 'bipush', 'ishl', 'ixor', 'istore', 'iload', \
'iload', 'iadd', 'istore', 'iload', 'iload', 'iadd', \
'istore', 'iload', 'iload', 'iconst_4', 'iushr', 'ixor', \
'istore', 'iload', 'iload', 'iadd', 'istore', 'iload', \
'iload', 'iadd', 'istore', 'iload', 'iload', 'bipush', \
'ishl', 'ixor', 'istore', 'iload_1', 'iload', 'iadd', \
'istore_1', 'iload', 'iload', 'iadd', 'istore', 'iload', \
'iload', 'bipush', 'iushr', 'ixor', 'istore', 'iload_2', \
'iload', 'iadd', 'istore_2', 'iload', 'iload_1', 'iadd', \
'istore', 'aload_0', 'getfield', 'iload', 'iload', \
'iastore', 'aload_0', 'getfield', 'iload', 'iconst_1', \
'iadd', 'iload_1', 'iastore', 'aload_0', 'getfield', \
'iload', 'iconst_2', 'iadd', 'iload_2', 'iastore', \
'aload_0', 'getfield', 'iload', 'iconst_3', 'iadd', \
'iload_3', 'iastore', 'aload_0', 'getfield', 'iload', \
'iconst_4', 'iadd', 'iload', 'iastore', 'aload_0', \
'getfield', 'iload', 'iconst_5', 'iadd', 'iload', \
'iastore', 'aload_0', 'getfield', 'iload', 'bipush', \
'iadd', 'iload', 'iastore', 'aload_0', 'getfield', \
'iload', 'bipush', 'iadd', 'iload', 'iastore', 'iinc', \
'goto', 'aload_0', 'invokespecial', 'aload_0', \
'sipush', 'putfield', 'return']
initializeKeySet3 = ['ldc', 'dup', 'istore', 'dup', 'istore', 'dup', 'istore', \
'dup', 'istore', 'dup', 'istore_3', 'dup', 'istore_2', \
'dup', 'istore_1', 'istore', 'iconst_0', 'istore', \
'goto', 'iload', 'iload_1', 'bipush', 'ishl', 'ixor', \
'istore', 'iload_3', 'iload', 'iadd', 'istore_3', \
'iload_1', 'iload_2', 'iadd', 'dup', 'istore_1', \
'iload_2', 'iconst_2', 'iushr', 'ixor', 'istore_1', \
'iload', 'iload_1', 'iadd', 'istore', 'iload_2', \
'iload_3', 'iadd', 'dup', 'istore_2', 'iload_3', \
'bipush', 'ishl', 'ixor', 'istore_2', 'iload', \
'iload_2', 'iadd', 'istore', 'iload_3', 'iload', \
'iadd', 'dup', 'istore_3', 'iload', 'bipush', \
'iushr', 'ixor', 'istore_3', 'iload', \
'iload_3', 'iadd', 'istore', 'iload', 'iload', \
'iadd', 'dup', 'istore', 'iload', 'bipush', \
'ishl', 'ixor', 'istore', 'iload', 'iload', \
'iadd', 'istore', 'iload', 'iload', 'iadd', \
'dup', 'istore', 'iload', 'iconst_4', \
'iushr', 'ixor', 'istore', 'iload', \
'iload', 'iadd', 'istore', 'iload', 'iload', \
'iadd', 'dup', 'istore', 'iload', 'bipush', \
'ishl', 'ixor', 'istore', 'iload_1', 'iload', \
'iadd', 'istore_1', 'iload', 'iload', 'iadd', \
'dup', 'istore', 'iload', 'bipush', 'iushr', \
'ixor', 'istore', 'iload_2', 'iload', 'iadd', \
'istore_2', 'iload', 'iload_1', 'iadd', \
'istore', 'iinc', 'iload', 'iconst_4', 'if_icmplt', \
'iconst_0', 'istore', 'goto', 'iload', 'aload_0', \
'getfield', 'iload', 'iaload', 'iadd', 'istore', \
'iload_1', 'aload_0', 'getfield', 'iload', \
'iconst_1', 'iadd', 'iaload', 'iadd', 'istore_1', \
'iload_2', 'aload_0', 'getfield', 'iload', \
'iconst_2', 'iadd', 'iaload', 'iadd', 'istore_2', \
'iload_3', 'aload_0', 'getfield', 'iload', 'iconst_3', \
'iadd', 'iaload', 'iadd', 'istore_3', 'iload', \
'aload_0', 'getfield', 'iload', 'iconst_4', 'iadd', \
'iaload', 'iadd', 'istore', 'iload', 'aload_0', \
'getfield', 'iload', 'iconst_5', 'iadd', 'iaload', \
'iadd', 'istore', 'iload', 'aload_0', 'getfield', \
'iload', 'bipush', 'iadd', 'iaload', 'iadd', \
'istore', 'iload', 'aload_0', 'getfield', 'iload', \
'bipush', 'iadd', 'iaload', 'iadd', 'istore', 'iload', \
'iload_1', 'bipush', 'ishl', 'ixor', 'istore', \
'iload_3', 'iload', 'iadd', 'istore_3', 'iload_1', \
'iload_2', 'iadd', 'dup', 'istore_1', 'iload_2', \
'iconst_2', 'iushr', 'ixor', 'istore_1', 'iload', \
'iload_1', 'iadd', 'istore', 'iload_2', 'iload_3', \
'iadd', 'dup', 'istore_2', 'iload_3', 'bipush', \
'ishl', 'ixor', 'istore_2', 'iload', 'iload_2', \
'iadd', 'istore', 'iload_3', 'iload', 'iadd', 'dup', \
'istore_3', 'iload', 'bipush', 'iushr', 'ixor', \
'istore_3', 'iload', 'iload_3', 'iadd', 'istore', \
'iload', 'iload', 'iadd', 'dup', 'istore', 'iload', \
'bipush', 'ishl', 'ixor', 'istore', 'iload', 'iload', \
'iadd', 'istore', 'iload', 'iload', 'iadd', 'dup', \
'istore', 'iload', 'iconst_4', 'iushr', 'ixor', \
'istore', 'iload', 'iload', 'iadd', 'istore', 'iload', \
'iload', 'iadd', 'dup', 'istore', 'iload', 'bipush', \
'ishl', 'ixor', 'istore', 'iload_1', 'iload', 'iadd', \
'istore_1', 'iload', 'iload', 'iadd', 'dup', 'istore', \
'iload', 'bipush', 'iushr', 'ixor', 'istore', \
'iload_2', 'iload', 'iadd', 'istore_2', 'iload', \
'iload_1', 'iadd', 'istore', 'aload_0', 'getfield', \
'iload', 'iload', 'iastore', 'aload_0', 'getfield', \
'iload', 'iconst_1', 'iadd', 'iload_1', 'iastore', \
'aload_0', 'getfield', 'iload', 'iconst_2', 'iadd', \
'iload_2', 'iastore', 'aload_0', 'getfield', 'iload', \
'iconst_3', 'iadd', 'iload_3', 'iastore', 'aload_0', \
'getfield', 'iload', 'iconst_4', 'iadd', 'iload', \
'iastore', 'aload_0', 'getfield', 'iload', 'iconst_5', \
'iadd', 'iload', 'iastore', 'aload_0', 'getfield', \
'iload', 'bipush', 'iadd', 'iload', 'iastore', \
'aload_0', 'getfield', 'iload', 'bipush', 'iadd', \
'iload', 'iastore', 'iinc', 'iload', 'sipush', \
'if_icmplt', 'iconst_0', 'istore', 'goto', 'iload', \
'aload_0', 'getfield', 'iload', 'iaload', 'iadd', \
'istore', 'iload_1', 'aload_0', 'getfield', 'iload', \
'iconst_1', 'iadd', 'iaload', 'iadd', 'istore_1', \
'iload_2', 'aload_0', 'getfield', 'iload', 'iconst_2', \
'iadd', 'iaload', 'iadd', 'istore_2', 'iload_3', \
'aload_0', 'getfield', 'iload', 'iconst_3', 'iadd', \
'iaload', 'iadd', 'istore_3', 'iload', 'aload_0', \
'getfield', 'iload', 'iconst_4', 'iadd', 'iaload', \
'iadd', 'istore', 'iload', 'aload_0', 'getfield', \
'iload', 'iconst_5', 'iadd', 'iaload', 'iadd', \
'istore', 'iload', 'aload_0', 'getfield', 'iload', \
'bipush', 'iadd', 'iaload', 'iadd', 'istore', 'iload', \
'aload_0', 'getfield', 'iload', 'bipush', 'iadd', \
'iaload', 'iadd', 'istore', 'iload', 'iload_1', \
'bipush', 'ishl', 'ixor', 'istore', 'iload_3', \
'iload', 'iadd', 'istore_3', 'iload_1', 'iload_2', \
'iadd', 'dup', 'istore_1', 'iload_2', 'iconst_2', \
'iushr', 'ixor', 'istore_1', 'iload', 'iload_1', \
'iadd', 'istore', 'iload_2', 'iload_3', 'iadd', 'dup', \
'istore_2', 'iload_3', 'bipush', 'ishl', 'ixor', \
'istore_2', 'iload', 'iload_2', 'iadd', 'istore', \
'iload_3', 'iload', 'iadd', 'dup', 'istore_3', \
'iload', 'bipush', 'iushr', 'ixor', 'istore_3', \
'iload', 'iload_3', 'iadd', 'istore', 'iload', \
'iload', 'iadd', 'dup', 'istore', 'iload', 'bipush', \
'ishl', 'ixor', 'istore', 'iload', 'iload', 'iadd', \
'istore', 'iload', 'iload', 'iadd', 'dup', 'istore', \
'iload', 'iconst_4', 'iushr', 'ixor', 'istore', \
'iload', 'iload', 'iadd', 'istore', 'iload', 'iload', \
'iadd', 'dup', 'istore', 'iload', 'bipush', 'ishl', \
'ixor', 'istore', 'iload_1', 'iload', 'iadd', \
'istore_1', 'iload', 'iload', 'iadd', 'dup', 'istore', \
'iload', 'bipush', 'iushr', 'ixor', 'istore', \
'iload_2', 'iload', 'iadd', 'istore_2', 'iload', \
'iload_1', 'iadd', 'istore', 'aload_0', 'getfield', \
'iload', 'iload', 'iastore', 'aload_0', 'getfield', \
'iload', 'iconst_1', 'iadd', 'iload_1', 'iastore', \
'aload_0', 'getfield', 'iload', 'iconst_2', 'iadd', \
'iload_2', 'iastore', 'aload_0', 'getfield', 'iload', \
'iconst_3', 'iadd', 'iload_3', 'iastore', 'aload_0', \
'getfield', 'iload', 'iconst_4', 'iadd', 'iload', \
'iastore', 'aload_0', 'getfield', 'iload', 'iconst_5', \
'iadd', 'iload', 'iastore', 'aload_0', 'getfield', \
'iload', 'bipush', 'iadd', 'iload', 'iastore', \
'aload_0', 'getfield', 'iload', 'bipush', 'iadd', \
'iload', 'iastore', 'iinc', 'iload', 'sipush', \
'if_icmplt', 'aload_0', 'invokespecial', \
'aload_0', 'sipush', 'putfield', 'return']
initializeKeySet4 = ['ldc', 'dup', 'istore', 'dup', 'istore', 'dup', \
'istore', 'dup', 'istore', 'dup', 'istore_3', 'dup', \
'istore_2', 'dup', 'istore_1', 'istore', 'iconst_0', \
'istore', 'iload', 'iconst_4', 'if_icmpge', \
'iload', 'iload_1', 'bipush', 'ishl', 'ixor', \
'istore', 'iload_3', 'iload', 'iadd', 'istore_3', \
'iload_1', 'iload_2', 'iadd', 'istore_1', 'iload_1', \
'iload_2', 'iconst_2', 'iushr', 'ixor', \
'istore_1', 'iload', 'iload_1', 'iadd', 'istore', \
'iload_2', 'iload_3', 'iadd', 'istore_2', 'iload_2', \
'iload_3', 'bipush', 'ishl', 'ixor', 'istore_2', \
'iload', 'iload_2', 'iadd', 'istore', 'iload_3', \
'iload', 'iadd', 'istore_3', 'iload_3', 'iload', \
'bipush', 'iushr', 'ixor', 'istore_3', 'iload', \
'iload_3', 'iadd', 'istore', 'iload', 'iload', 'iadd', \
'istore', 'iload', 'iload', 'bipush', 'ishl', 'ixor', \
'istore', 'iload', 'iload', 'iadd', 'istore', 'iload', \
'iload', 'iadd', 'istore', 'iload', 'iload', 'iconst_4', \
'iushr', 'ixor', 'istore', 'iload', 'iload', 'iadd', \
'istore', 'iload', 'iload', 'iadd', 'istore', 'iload', \
'iload', 'bipush', 'ishl', 'ixor', 'istore', 'iload_1', \
'iload', 'iadd', 'istore_1', 'iload', 'iload', 'iadd', \
'istore', 'iload', 'iload', 'bipush', 'iushr', 'ixor', \
'istore', 'iload_2', 'iload', 'iadd', 'istore_2', \
'iload', 'iload_1', 'iadd', 'istore', 'iinc', 'goto', \
'iconst_0', 'istore', 'iload', 'sipush', 'if_icmpge', \
'iload', 'aload_0', 'getfield', 'iload', 'iaload', \
'iadd', 'istore', 'iload_1', 'aload_0', 'getfield', \
'iload', 'iconst_1', 'iadd', 'iaload', 'iadd', \
'istore_1', 'iload_2', 'aload_0', 'getfield', 'iload', \
'iconst_2', 'iadd', 'iaload', 'iadd', 'istore_2', \
'iload_3', 'aload_0', 'getfield', 'iload', 'iconst_3', \
'iadd', 'iaload', 'iadd', 'istore_3', 'iload', \
'aload_0', 'getfield', 'iload', 'iconst_4', 'iadd', \
'iaload', 'iadd', 'istore', 'iload', 'aload_0', \
'getfield', 'iload', 'iconst_5', 'iadd', 'iaload', \
'iadd', 'istore', 'iload', 'aload_0', 'getfield', \
'iload', 'bipush', 'iadd', 'iaload', 'iadd', 'istore', \
'iload', 'aload_0', 'getfield', 'iload', 'bipush', \
'iadd', 'iaload', 'iadd', 'istore', 'iload', 'iload_1', \
'bipush', 'ishl', 'ixor', 'istore', 'iload_3', 'iload', \
'iadd', 'istore_3', 'iload_1', 'iload_2', 'iadd', \
'istore_1', 'iload_1', 'iload_2', 'iconst_2', 'iushr', \
'ixor', 'istore_1', 'iload', 'iload_1', 'iadd', \
'istore', 'iload_2', 'iload_3', 'iadd', 'istore_2', \
'iload_2', 'iload_3', 'bipush', 'ishl', 'ixor', \
'istore_2', 'iload', 'iload_2', 'iadd', 'istore', \
'iload_3', 'iload', 'iadd', 'istore_3', 'iload_3', \
'iload', 'bipush', 'iushr', 'ixor', 'istore_3', \
'iload', 'iload_3', 'iadd', 'istore', 'iload', \
'iload', 'iadd', 'istore', 'iload', 'iload', \
'bipush', 'ishl', 'ixor', 'istore', 'iload', \
'iload', 'iadd', 'istore', 'iload', 'iload', 'iadd', \
'istore', 'iload', 'iload', 'iconst_4', 'iushr', \
'ixor', 'istore', 'iload', 'iload', 'iadd', 'istore', \
'iload', 'iload', 'iadd', 'istore', 'iload', 'iload', \
'bipush', 'ishl', 'ixor', 'istore', 'iload_1', \
'iload', 'iadd', 'istore_1', 'iload', 'iload', 'iadd', \
'istore', 'iload', 'iload', 'bipush', 'iushr', 'ixor', \
'istore', 'iload_2', 'iload', 'iadd', 'istore_2', \
'iload', 'iload_1', 'iadd', 'istore', 'aload_0', \
'getfield', 'iload', 'iload', 'iastore', 'aload_0', \
'getfield', 'iload', 'iconst_1', 'iadd', 'iload_1', \
'iastore', 'aload_0', 'getfield', 'iload', \
'iconst_2', 'iadd', 'iload_2', 'iastore', 'aload_0', \
'getfield', 'iload', 'iconst_3', 'iadd', 'iload_3', \
'iastore', 'aload_0', 'getfield', 'iload', 'iconst_4', \
'iadd', 'iload', 'iastore', 'aload_0', 'getfield', \
'iload', 'iconst_5', 'iadd', 'iload', 'iastore', \
'aload_0', 'getfield', 'iload', 'bipush', 'iadd', \
'iload', 'iastore', 'aload_0', 'getfield', 'iload', \
'bipush', 'iadd', 'iload', 'iastore', 'iinc', \
'goto', 'iconst_0', 'istore', 'iload', 'sipush', \
'if_icmpge', 'iload', 'aload_0', 'getfield', 'iload', \
'iaload', 'iadd', 'istore', 'iload_1', 'aload_0', \
'getfield', 'iload', 'iconst_1', 'iadd', 'iaload', \
'iadd', 'istore_1', 'iload_2', 'aload_0', 'getfield', \
'iload', 'iconst_2', 'iadd', 'iaload', 'iadd', \
'istore_2', 'iload_3', 'aload_0', 'getfield', 'iload', \
'iconst_3', 'iadd', 'iaload', 'iadd', 'istore_3', \
'iload', 'aload_0', 'getfield', 'iload', 'iconst_4', \
'iadd', 'iaload', 'iadd', 'istore', 'iload', \
'aload_0', 'getfield', 'iload', 'iconst_5', 'iadd', \
'iaload', 'iadd', 'istore', 'iload', 'aload_0', \
'getfield', 'iload', 'bipush', 'iadd', 'iaload', \
'iadd', 'istore', 'iload', 'aload_0', 'getfield', \
'iload', 'bipush', 'iadd', 'iaload', 'iadd', 'istore', \
'iload', 'iload_1', 'bipush', 'ishl', 'ixor', 'istore', \
'iload_3', 'iload', 'iadd', 'istore_3', 'iload_1', \
'iload_2', 'iadd', 'istore_1', 'iload_1', 'iload_2', \
'iconst_2', 'iushr', 'ixor', 'istore_1', 'iload', \
'iload_1', 'iadd', 'istore', 'iload_2', 'iload_3', \
'iadd', 'istore_2', 'iload_2', 'iload_3', 'bipush', \
'ishl', 'ixor', 'istore_2', 'iload', 'iload_2', \
'iadd', 'istore', 'iload_3', 'iload', 'iadd', \
'istore_3', 'iload_3', 'iload', 'bipush', 'iushr', \
'ixor', 'istore_3', 'iload', 'iload_3', 'iadd', \
'istore', 'iload', 'iload', 'iadd', 'istore', \
'iload', 'iload', 'bipush', 'ishl', 'ixor', 'istore', \
'iload', 'iload', 'iadd', 'istore', 'iload', 'iload', \
'iadd', 'istore', 'iload', 'iload', 'iconst_4', \
'iushr', 'ixor', 'istore', 'iload', 'iload', 'iadd', \
'istore', 'iload', 'iload', 'iadd', 'istore', \
'iload', 'iload', 'bipush', 'ishl', 'ixor', \
'istore', 'iload_1', 'iload', 'iadd', 'istore_1', \
'iload', 'iload', 'iadd', 'istore', 'iload', \
'iload', 'bipush', 'iushr', 'ixor', 'istore', \
'iload_2', 'iload', 'iadd', 'istore_2', 'iload', \
'iload_1', 'iadd', 'istore', 'aload_0', 'getfield', \
'iload', 'iload', 'iastore', 'aload_0', 'getfield', \
'iload', 'iconst_1', 'iadd', 'iload_1', 'iastore', \
'aload_0', 'getfield', 'iload', 'iconst_2', 'iadd', \
'iload_2', 'iastore', 'aload_0', 'getfield', 'iload', \
'iconst_3', 'iadd', 'iload_3', 'iastore', 'aload_0', \
'getfield', 'iload', 'iconst_4', 'iadd', 'iload', \
'iastore', 'aload_0', 'getfield', 'iload', \
'iconst_5', 'iadd', 'iload', 'iastore', 'aload_0', \
'getfield', 'iload', 'bipush', 'iadd', 'iload', \
'iastore', 'aload_0', 'getfield', 'iload', 'bipush', \
'iadd', 'iload', 'iastore', 'iinc', 'goto', 'aload_0', \
'invokespecial', 'aload_0', 'sipush', 'putfield', 'return']
initializeKeySet = ['ldc', 'dup', 'istore', 'dup', 'istore', 'dup', \
'istore', 'dup', 'istore', 'dup', 'istore_3', \
'dup', 'istore_2', 'dup', 'istore_1', 'istore', \
'iconst_0', 'istore', 'goto', 'iload', 'iload_1', \
'bipush', 'ishl', 'ixor', 'istore', 'iload_3', \
'iload', 'iadd', 'istore_3', 'iload_1', \
'iload_2', 'iadd', 'istore_1', 'iload_1', \
'iload_2', 'iconst_2', 'iushr', 'ixor', \
'istore_1', 'iload', 'iload_1', 'iadd', 'istore', \
'iload_2', 'iload_3', 'iadd', 'istore_2', \
'iload_2', 'iload_3', 'bipush', 'ishl', 'ixor', \
'istore_2', 'iload', 'iload_2', 'iadd', 'istore', \
'iload_3', 'iload', 'iadd', 'istore_3', \
'iload_3', 'iload', 'bipush', 'iushr', 'ixor', \
'istore_3', 'iload', 'iload_3', 'iadd', \
'istore', 'iload', 'iload', 'iadd', 'istore', \
'iload', 'iload', 'bipush', 'ishl', 'ixor', \
'istore', 'iload', 'iload', 'iadd', 'istore', \
'iload', 'iload', 'iadd', 'istore', 'iload', \
'iload', 'iconst_4', 'iushr', 'ixor', 'istore', \
'iload', 'iload', 'iadd', 'istore', 'iload', \
'iload', 'iadd', 'istore', 'iload', 'iload', \
'bipush', 'ishl', 'ixor', 'istore', 'iload_1', \
'iload', 'iadd', 'istore_1', 'iload', 'iload', \
'iadd', 'istore', 'iload', 'iload', 'bipush', \
'iushr', 'ixor', 'istore', 'iload_2', 'iload', \
'iadd', 'istore_2', 'iload', 'iload_1', 'iadd', \
'istore', 'iinc', 'iload', 'iconst_4', \
'if_icmplt', 'iconst_0', 'istore', 'goto', \
'iload', 'aload_0', 'getfield', 'iload', \
'iaload', 'iadd', 'istore', 'iload_1', \
'aload_0', 'getfield', 'iload', 'iconst_1', \
'iadd', 'iaload', 'iadd', 'istore_1', \
'iload_2', 'aload_0', 'getfield', 'iload', \
'iconst_2', 'iadd', 'iaload', 'iadd', \
'istore_2', 'iload_3', 'aload_0', \
'getfield', 'iload', 'iconst_3', 'iadd', \
'iaload', 'iadd', 'istore_3', 'iload', \
'aload_0', 'getfield', 'iload', 'iconst_4', \
'iadd', 'iaload', 'iadd', 'istore', \
'iload', 'aload_0', 'getfield', 'iload', \
'iconst_5', 'iadd', 'iaload', 'iadd', \
'istore', 'iload', 'aload_0', 'getfield', \
'iload', 'bipush', 'iadd', 'iaload', \
'iadd', 'istore', 'iload', 'aload_0', \
'getfield', 'iload', 'bipush', 'iadd', \
'iaload', 'iadd', 'istore', 'iload', \
'iload_1', 'bipush', 'ishl', 'ixor', \
'istore', 'iload_3', 'iload', 'iadd', \
'istore_3', 'iload_1', 'iload_2', 'iadd', \
'istore_1', 'iload_1', 'iload_2', \
'iconst_2', 'iushr', 'ixor', 'istore_1', \
'iload', 'iload_1', 'iadd', 'istore', \
'iload_2', 'iload_3', 'iadd', 'istore_2', \
'iload_2', 'iload_3', 'bipush', 'ishl', \
'ixor', 'istore_2', 'iload', 'iload_2', \
'iadd', 'istore', 'iload_3', 'iload', \
'iadd', 'istore_3', 'iload_3', 'iload', \
'bipush', 'iushr', 'ixor', 'istore_3', \
'iload', 'iload_3', 'iadd', 'istore', \
'iload', 'iload', 'iadd', 'istore', \
'iload', 'iload', 'bipush', 'ishl', \
'ixor', 'istore', 'iload', 'iload', 'iadd', \
'istore', 'iload', 'iload', 'iadd', 'istore', \
'iload', 'iload', 'iconst_4', \
'iushr', 'ixor', 'istore', 'iload', 'iload', 'iadd', \
'istore', 'iload', 'iload', 'iadd', 'istore', 'iload', \
'iload', 'bipush', 'ishl', 'ixor', 'istore', \
'iload_1', 'iload', 'iadd', 'istore_1', 'iload', \
'iload', 'iadd', 'istore', 'iload', 'iload', \
'bipush', 'iushr', 'ixor', 'istore', 'iload_2', \
'iload', 'iadd', 'istore_2', 'iload', 'iload_1', \
'iadd', 'istore', 'aload_0', 'getfield', 'iload', \
'iload', 'iastore', 'aload_0', 'getfield', \
'iload', 'iconst_1', 'iadd', 'iload_1', 'iastore', \
'aload_0', 'getfield', 'iload', 'iconst_2', \
'iadd', 'iload_2', 'iastore', 'aload_0', \
'getfield', 'iload', 'iconst_3', 'iadd', 'iload_3', \
'iastore', 'aload_0', 'getfield', 'iload', \
'iconst_4', 'iadd', 'iload', 'iastore', \
'aload_0', 'getfield', 'iload', 'iconst_5', 'iadd', \
'iload', 'iastore', 'aload_0', 'getfield', 'iload', \
'bipush', 'iadd', 'iload', 'iastore', 'aload_0', \
'getfield', 'iload', 'bipush', 'iadd', 'iload', \
'iastore', 'iinc', 'iload', 'sipush', \
'if_icmplt', 'iconst_0', 'istore', 'goto', \
'iload', 'aload_0', 'getfield', 'iload', 'iaload', \
'iadd', 'istore', 'iload_1', 'aload_0', \
'getfield', 'iload', 'iconst_1', 'iadd', \
'iaload', 'iadd', 'istore_1', 'iload_2', \
'aload_0', 'getfield', 'iload', 'iconst_2', \
'iadd', 'iaload', 'iadd', 'istore_2', \
'iload_3', 'aload_0', 'getfield', 'iload', \
'iconst_3', 'iadd', 'iaload', 'iadd', 'istore_3', \
'iload', 'aload_0', 'getfield', 'iload', \
'iconst_4', 'iadd', 'iaload', 'iadd', 'istore', \
'iload', 'aload_0', 'getfield', 'iload', \
'iconst_5', 'iadd', 'iaload', 'iadd', \
'istore', 'iload', 'aload_0', 'getfield', \
'iload', 'bipush', 'iadd', 'iaload', \
'iadd', 'istore', 'iload', 'aload_0', \
'getfield', 'iload', 'bipush', 'iadd', \
'iaload', 'iadd', 'istore', 'iload', \
'iload_1', 'bipush', 'ishl', 'ixor', 'istore', \
'iload_3', 'iload', 'iadd', 'istore_3', \
'iload_1', 'iload_2', 'iadd', 'istore_1', \
'iload_1', 'iload_2', 'iconst_2', 'iushr', \
'ixor', 'istore_1', 'iload', 'iload_1', \
'iadd', 'istore', 'iload_2', 'iload_3', \
'iadd', 'istore_2', 'iload_2', 'iload_3', \
'bipush', 'ishl', 'ixor', 'istore_2', \
'iload', 'iload_2', 'iadd', 'istore', \
'iload_3', 'iload', 'iadd', 'istore_3', \
'iload_3', 'iload', 'bipush', 'iushr', \
'ixor', 'istore_3', 'iload', 'iload_3', \
'iadd', 'istore', 'iload', 'iload', \
'iadd', 'istore', 'iload', 'iload', 'bipush', \
'ishl', 'ixor', 'istore', 'iload', 'iload', \
'iadd', 'istore', 'iload', 'iload', 'iadd', \
'istore', 'iload', 'iload', 'iconst_4', 'iushr', \
'ixor', 'istore', 'iload', 'iload', 'iadd', \
'istore', 'iload', 'iload', 'iadd', 'istore', 'iload', \
'iload', 'bipush', 'ishl', 'ixor', 'istore', 'iload_1', \
'iload', 'iadd', 'istore_1', 'iload', 'iload', 'iadd', \
'istore', 'iload', 'iload', 'bipush', 'iushr', 'ixor', \
'istore', 'iload_2', 'iload', \
'iadd', 'istore_2', 'iload', 'iload_1', \
'iadd', 'istore', 'aload_0', 'getfield', 'iload', \
'iload', 'iastore', 'aload_0', 'getfield', \
'iload', 'iconst_1', 'iadd', 'iload_1', 'iastore', \
'aload_0', 'getfield', 'iload', 'iconst_2', 'iadd', \
'iload_2', 'iastore', 'aload_0', 'getfield', \
'iload', 'iconst_3', 'iadd', 'iload_3', 'iastore', \
'aload_0', 'getfield', 'iload', 'iconst_4', 'iadd', \
'iload', 'iastore', 'aload_0', 'getfield', 'iload', \
'iconst_5', 'iadd', 'iload', 'iastore', 'aload_0', \
'getfield', 'iload', 'bipush', 'iadd', 'iload', \
'iastore', 'aload_0', 'getfield', 'iload', 'bipush', \
'iadd', 'iload', 'iastore', 'iinc', 'iload', \
'sipush', 'if_icmplt', 'aload_0', 'invokespecial', \
'aload_0', 'sipush', 'putfield', 'return']
Generic_Methods = [
['createFrame', createFrame],
['writeDWord', writeDWord],
['writeWordBigEndian', writeWordBigEndian],
['writeWord', writeWord],
['writeDWordBigEndian', writeDWordBigEndian],
['method403', method403],
['writeQWord', writeQWord],
['writeString', writeString],
['method424', method424],
['method425', method425],
['method431', method431],
['method432', method432],
['method433', method433],
['getNextKey', getNextKey],
['isaac', isaac],
['initializeKeySet', initializeKeySet],
['initializeKeySet3', initializeKeySet3],
['initializeKeySet4', initializeKeySet4],
['initializeKeySet2', initializeKeySet2]
]
|
injectnique/KnuckleHeadedMcSpazatron
|
GenericBytecode.py
|
Python
|
mit
| 46,794
|
# -*- coding: utf-8 -*-
import unittest
from activity_feed import ActivityFeed
class UtilityTest(unittest.TestCase):
def test_key(self):
'should return the correct key for the non-aggregate feed'
a = ActivityFeed()
self.assertEquals(a.feed_key('david'), 'activity_feed:david')
def test_key_aggregate(self):
'should return the correct key for an aggregate feed'
a = ActivityFeed()
self.assertEqual(a.feed_key('david', True), 'activity_feed:aggregate:david')
def test_feederboard_for(self):
'should create a leaderboard using an existing Redis connection'
a = ActivityFeed()
feederboard_david = a.feederboard_for('david')
feederboard_person = a.feederboard_for('person')
self.assertEqual(feederboard_david is None, False)
self.assertEqual(feederboard_person is None, False)
|
simonz05/activity-feed
|
tests/test_utility.py
|
Python
|
mit
| 886
|
# Copyright (c) 2016-2017 The OrarioTreniBot Authors (see AUTHORS)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import json
import time
import botogram
import progressbar
import redis
import config
r = redis.StrictRedis(host=config.REDIS_HOST, port=config.REDIS_PORT, db=config.REDIS_DB, password=config.REDIS_PASSWORD)
bot = botogram.create(config.BOT_TOKEN)
def post(text, parse_mode="HTML", reply_markup=None, disable_web_page_preview=True, message=None):
users = []
for user in r.keys("user:*"):
users.append(int(user[5:]))
print("Sending global message...")
print("> Text", text, sep=": ")
print("> Reply Markup", reply_markup, sep=": ")
print("> Parse mode", parse_mode, sep=": ")
print("> Disable web page preview", disable_web_page_preview, sep=": ")
bar = progressbar.ProgressBar()
for user in bar(users):
if message:
message.edit(
"<b>Sending global message...</b>"
"\n<b>{value}/{max_value}</b> ({percentage}%)"
.format(value=bar.value, max_value=bar.max_value, percentage=round(bar.percentage, 1))
)
time.sleep(0.1)
user_hash = "user:" + str(user)
try:
bot.chat(user)
except botogram.APIError:
r.hset(user_hash, "active", False)
continue
try:
if r.hget(user_hash, "active").decode("utf-8") == "False":
continue
bot.api.call("sendMessage", {
"chat_id": user, "text": text, "parse_mode": parse_mode,
"disable_web_page_preview": disable_web_page_preview,
"reply_markup": json.dumps(reply_markup) if reply_markup else ""
})
except botogram.APIError:
r.hset(user_hash, "active", False)
finally:
time.sleep(0.5)
if message:
message.edit(
"<b>Sending global message...</b>"
"\n<b>{value}/{max_value}</b> ({percentage}%)"
.format(value=bar.value, max_value=bar.max_value, percentage=round(bar.percentage, 1))
)
time.sleep(0.1)
|
MarcoBuster/OrarioTreniBot
|
src/updates/global_messages.py
|
Python
|
mit
| 3,165
|
import sys, datetime, os, time
from mcstatus import MinecraftServer
# GLOBALS
# List containing all the mcstatus server objects
serverList = []
# appendLog() polls the server for population data and appends it to the log
# In: The hostname and population of a server, as well as the timestamp for its retrieval time
# Out: Nada.
def appendLog(host, time, pop):
fileName = host.replace(".", "")+ ".csv"
with open(fileName, "a") as logFile:
# If the file is empty add the column names
if not os.stat(fileName).st_size > 0:
logFile.write('"time","pop"')
# Write the data to the file
logFile.write("\n"+ '"'+ time+ '",'+ str(pop))
# getPopulations() Gets all the populations of the servers and calls appendLog() for each
# In: the list of servers
# Out: Nada
def getPopulations():
global serverList
print("Oy")
# Loop through all the servers
for serverObject in serverList:
status = serverObject.status()
pop = status.players.online
time = datetime.datetime.now().isoformat()
appendLog(serverObject.host, time, pop)
# Entry point.
def main():
global serverList
print("[mpl] Minecraft Population Logger")
print("[mpl] Version 0.1.0")
# No server URLs?
if len(sys.argv[1:]) < 1:
print("[mpl] Error: At least one URL must be specified.")
return 0
# Populate server list
for url in sys.argv[1:]:
serverObject = MinecraftServer.lookup(url)
# If the server was found
if serverObject != None:
print("[mpl]", url, "was found!")
serverList.append(serverObject)
else:
print("[mpl]", url, "not found! It will not be tracked.")
while 1:
getPopulations()
time.sleep(300)
# Run the main.
main()
|
IronJew/mc-pop-log
|
poplog.py
|
Python
|
mit
| 1,647
|
import django_filters
from django.contrib.auth.models import Group
from apps.authentication.models import OnlineUser as User
class PublicProfileFilter(django_filters.FilterSet):
year = django_filters.NumberFilter(field_name="year", method="filter_year")
group = django_filters.CharFilter(method="filter_group")
class Meta():
model = User
fields = ("year", "group")
def filter_year(self, queryset, name, value):
user_ids = [user.id for user in queryset.all() if user.year == value]
return User.objects.filter(pk__in=user_ids)
def filter_group(self, queryset, name, value):
group = Group.objects.filter(name=value)
return queryset.filter(groups__in=group)
|
dotKom/onlineweb4
|
apps/profiles/filters.py
|
Python
|
mit
| 728
|
from project_cron.utils import processutil
def open(app_name):
script = '''
if application "%s" is not running then
tell application "%s" to activate
end if
''' % (app_name, app_name)
processutil.call(['/usr/bin/osascript', '-e', script])
def close(app_name):
script = 'tell application "%s" to quit' % app_name
processutil.call(['/usr/bin/osascript', '-e', script])
|
ecleya/project_cron
|
project_cron/utils/apputil.py
|
Python
|
mit
| 424
|
# Copyright: Luis Pedro Coelho <luis@luispedro.org>, 2012-2018
# License: MIT
import numpy as np
def read_roi(fileobj):
'''
points = read_roi(fileobj)
Read ImageJ's ROI format
Parameters
----------
fileobj: should be a file-like object
Returns
-------
points: a list of points
'''
# This is based on:
# http://rsbweb.nih.gov/ij/developer/source/ij/io/RoiDecoder.java.html
# http://rsbweb.nih.gov/ij/developer/source/ij/io/RoiEncoder.java.html
SPLINE_FIT = 1
DOUBLE_HEADED = 2
OUTLINE = 4
OVERLAY_LABELS = 8
OVERLAY_NAMES = 16
OVERLAY_BACKGROUNDS = 32
OVERLAY_BOLD = 64
SUB_PIXEL_RESOLUTION = 128
DRAW_OFFSET = 256
pos = [4]
def get8():
pos[0] += 1
s = fileobj.read(1)
if not s:
raise IOError('readroi: Unexpected EOF')
return ord(s)
def get16():
b0 = get8()
b1 = get8()
return (b0 << 8) | b1
def get32():
s0 = get16()
s1 = get16()
return (s0 << 16) | s1
def getfloat():
v = np.int32(get32())
return v.view(np.float32)
magic = fileobj.read(4)
if magic != b'Iout':
raise IOError('Magic number not found')
version = get16()
# It seems that the roi type field occupies 2 Bytes, but only one is used
roi_type = get8()
# Discard second Byte:
get8()
if not (0 <= roi_type < 11):
raise ValueError('roireader: ROI type %s not supported' % roi_type)
if roi_type != 7:
raise ValueError('roireader: ROI type %s not supported (!= 7)' % roi_type)
top = get16()
left = get16()
bottom = get16()
right = get16()
n_coordinates = get16()
x1 = getfloat()
y1 = getfloat()
x2 = getfloat()
y2 = getfloat()
stroke_width = get16()
shape_roi_size = get32()
stroke_color = get32()
fill_color = get32()
subtype = get16()
if subtype != 0:
raise ValueError('roireader: ROI subtype {} not supported (!= 0)'.format(subtype))
options = get16()
arrow_style = get8()
arrow_head_size = get8()
rect_arc_size = get16()
position = get32()
header2offset = get32()
if options & SUB_PIXEL_RESOLUTION:
getc = getfloat
points = np.empty((n_coordinates, 2), dtype=np.float32)
else:
getc = get16
points = np.empty((n_coordinates, 2), dtype=np.int16)
points[:,1] = [getc() for i in range(n_coordinates)]
points[:,0] = [getc() for i in range(n_coordinates)]
points[:,1] += left
points[:,0] += top
points -= 1
return points
def read_roi_zip(fname):
'''
Reads all ROIs in a ZIP file
Parameters
----------
fname : str
Input filename
Returns
-------
rois: list of ROIs
Each ROI is a vector of 2D points
See Also
--------
read_roi: function, reads a single ROI
'''
import zipfile
with zipfile.ZipFile(fname) as zf:
return [read_roi(zf.open(n))
for n in zf.namelist()]
|
luispedro/imread
|
imread/ijrois.py
|
Python
|
mit
| 3,061
|
import _plotly_utils.basevalidators
class ShowticklabelsValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self, plotly_name="showticklabels", parent_name="heatmap.colorbar", **kwargs
):
super(ShowticklabelsValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
**kwargs
)
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/heatmap/colorbar/_showticklabels.py
|
Python
|
mit
| 446
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'ByStudent'
def zhanzhuanxc(p,q,e):
def egcd(a, b):
x,y, u,v = 0,1, 1,0
while a != 0:
q, r = b//a, b%a
m, n = x-u*q, y-v*q
b,a, x,y, u,v = a,r, u,v, m,n
gcd = b
return gcd, x, y
def modinv(a, m):
gcd, x, y = egcd(a, m)
if gcd != 1:
return None # modular inverse does not exist
else:
return x % m
phi_n = (p - 1) * (q - 1)
d = modinv(e, phi_n)
return int(d)
# print zhanzhuanxc(18443,49891,19)
|
ByStudent666/XsCrypto
|
XsCrypto/zhanzhuanxc.py
|
Python
|
mit
| 592
|
def hsd_inc_beh(rxd, txd):
'''|
| Specify the behavior, describe data processing; there is no notion
| of clock. Access the in/out interfaces via get() and append()
| methods. The "hsd_inc_beh" function does not return values.
|________'''
if rxd.hasPacket():
data = rxd.get() + 1
txd.append(data)
|
hnikolov/pihdf
|
examples/hsd_inc/src/hsd_inc_beh.py
|
Python
|
mit
| 340
|
""" Implementation of WaterFrame.plot_bar(key, ax=None, average_time=None)"""
import datetime
def plot_timebar(self, keys, ax=None, time_interval_mean=None):
"""
Make a bar plot of the input keys.
The bars are positioned at x with date/time. Their dimensions are given by height.
Parameters
----------
keys: list of str
keys of self.data to plot.
ax: matplotlib.axes object, optional (ax = None)
It is used to add the plot to an input axes object.
time_interval_mean: str, optional (time_interval_mean = None)
It calculates an average value of a time interval. You can find
all of the resample options here:
https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html
Returns
-------
ax: matplotlib.AxesSubplot
Axes of the plot.
"""
def format_year(x):
return datetime.datetime.\
strptime(x, '%Y-%m-%d %H:%M:%S').strftime('%Y')
def format_day(x):
return datetime.datetime.\
strptime(x, '%Y-%m-%d %H:%M:%S').strftime('%Y-%m-%d')
# Extract data
df = self.data[keys].dropna().reset_index().set_index('TIME')
df.index.rename("Date", inplace=True)
# Resample data
if time_interval_mean is None:
pass
else:
df = df.resample(time_interval_mean).mean()
if isinstance(keys, list):
ax = df[keys].plot.bar(ax=ax, legend=True)
else:
ax = df[keys].plot.bar(ax=ax)
# Write axes
try:
ax.set_ylabel(self.vocabulary[keys]['units'])
except KeyError:
print("Warning: We don't know the units of", keys,
"Please, add info into self.meaning[", keys, "['units']")
if time_interval_mean == 'A':
ax.set_xticklabels([format_year(x.get_text())
for x in ax.get_xticklabels()], rotation=60)
elif time_interval_mean == 'D':
ax.set_xticklabels([format_day(x.get_text())
for x in ax.get_xticklabels()], rotation=60)
return ax
|
rbardaji/oceanobs
|
mooda/waterframe/plot/plot_timebar.py
|
Python
|
mit
| 2,204
|
import unittest, uuid
from nixie.core import Nixie, KeyError
class NixieErrorsTestCase(unittest.TestCase):
def test_read_missing(self):
nx = Nixie()
self.assertIsNone(nx.read('missing'))
def test_update_missing(self):
nx = Nixie()
with self.assertRaises(KeyError):
nx.update('missing')
def test_update_with_wrong_value(self):
nx = Nixie()
key = nx.create()
with self.assertRaises(ValueError):
nx.update(key, 'a')
def test_delete_missing(self):
nx = Nixie()
with self.assertRaises(KeyError):
nx.delete('missing')
|
eiri/nixie
|
tests/test_nixie_errors.py
|
Python
|
mit
| 579
|
#!/usr/bin/env python3
"""CnbetaApi URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^apis/', include('CnbetaApis.urls')),
]
|
kagenZhao/cnBeta
|
CnbetaApi/CnbetaApi/urls.py
|
Python
|
mit
| 847
|
from key_vault_agent import KeyVaultAgent
class SecretsAgent(KeyVaultAgent):
def get_secret(self):
self.data_client.restore_secret()
pass
|
schaabs/sandbox
|
net/sandbox.keyvault/python/repl/secrets.py
|
Python
|
mit
| 160
|
from c2cgeoportal_admin.views.layertree import itemtypes_tables
itemtypes_tables.update({
'lu_int_wms': 'lux_layer_internal_wms',
'lu_ext_wms': 'lux_layer_external_wms',
})
|
Geoportail-Luxembourg/geoportailv3
|
geoportal/geoportailv3_geoportal/admin/admin.py
|
Python
|
mit
| 178
|
import socket
import fcntl
import struct
def get_ip_address(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15])
)[20:24])
>>> get_ip_address('lo')
'127.0.0.1'
>>> get_ip_address('eth0')
'38.113.228.130'
|
ActiveState/code
|
recipes/Python/439094_get_IP_address_associated_network_interface/recipe-439094.py
|
Python
|
mit
| 357
|
import discord
from discord.ext import commands
import time
import datetime
import pytz
class GameTime(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
async def time(self, ctx):
"""Displays current game time."""
locationName = self.bot.db.get_val("ServerInfo", "")
print(type(locationName))
print(locationName['CityName'])
embed = discord.Embed(title="Current time in {}".format(locationName['CityName']),description=get_gametime())
await ctx.send(embed=embed)
await ctx.message.delete_message()
def suffix(d):
return 'th' if 11<=d<=13 else {1:'st',2:'nd',3:'rd'}.get(d%10, 'th')
def get_rawtime():
return datetime.datetime.now(pytz.timezone('UTC'))
def get_gametime():
months = [
"Hammer",
"Alturiak",
"Ches",
"Tarsakh",
"Mirtul",
"Kythorn",
"Flamerule",
"Eleasis",
"Eleint",
"Marpenoth",
"Uktar",
"Nightal"]
aDate = datetime(2020, 10, 18, tzinfo=pytz.timezone('UTC'))
bDate = datetime.now(pytz.timezone('UTC'))
delta = bDate - aDate
gametime = datetime(2020, 10, 18, bDate.hour, bDate.minute, bDate.second) + timedelta(days=delta.days*3) + (timedelta(days=(bDate.hour//8-2)))
if gametime.hour == 0:
gametime_hour = 12
time_decor = "AM"
else:
gametime_hour = gametime.hour-12 if gametime.hour > 12 else gametime.hour
time_decor = "PM" if gametime.hour > 12 else "AM"
gametime_minute = "0{}".format(gametime.minute) if gametime.minute < 10 else gametime.minute
return "{}:{} {} UTC | {}{} of {}".format(gametime_hour, gametime_minute, time_decor, gametime.day, suffix(gametime.day), months[gametime.month-1])
def setup(bot):
bot.add_cog(GameTime(bot))
|
Eylesis/Botfriend
|
Cogs/GameTime.py
|
Python
|
mit
| 1,883
|
#!/usr/bin/env python
import unittest
import logging
import logging.config
import sys
import argparse
DESCRIPTION="""
Harness for tests in the cloaca/tests/ directory.
Run all tests with '--all' or provide a list dotted names
of specific tests (eg. legionary.TestLegionary.test_legionary).
"""
# Set up logging. See logging.json for config
def setup_logging(
default_path='test_logging.json',
default_level=logging.INFO):
"""Setup logging configuration
"""
import sys, os, json
path = default_path
if os.path.exists(path):
with open(path, 'rt') as f:
config = json.load(f)
logging.config.dictConfig(config)
else:
logging.basicConfig(level=default_level)
def main():
parser = argparse.ArgumentParser(
description=DESCRIPTION,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--all', action='store_true',
help='Run all tests instead of matching pattern.')
parser.add_argument('pattern', nargs='*',
help=('pattern(s) to match against, eg. "buildings" or '
'"architect.TestArchitect.test_lead_architect".'))
parser.add_argument('-v', '--verbose', action='store_true',
help='Use verbose test result reporting.')
parser.add_argument('-q', '--quiet', action='store_true',
help=('Suppress individual test result reporting. Still reports '
'summary information. Overrides --verbose.'))
parser.add_argument('--log-level', default='WARNING',
help=('Set app log level during tests. Valid arguments are: '
'DEBUG, INFO, WARNING, ERROR, CRITICAL. See logging module '
'documentation.'))
args = parser.parse_args()
setup_logging()
numeric_level = getattr(logging, args.log_level.upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: {0!s}'.format(args.log_level))
# This catches the children loggers like cloaca.game
logging.getLogger('cloaca').setLevel(numeric_level)
loader = unittest.defaultTestLoader
if args.all:
sys.stderr.write('Running all tests.\n')
suites = loader.discover('.', pattern='*.py')
else:
if len(args.pattern) == 0:
sys.stderr.write('ERROR: No tests specified.\n\n')
parser.print_help(file=sys.stderr)
return
sys.stderr.write('Running all tests matching the patterns ('
+ ', '.join(args.pattern) + ')\n')
suites = loader.loadTestsFromNames(args.pattern)
test_suite = unittest.TestSuite(suites)
# TextTestRunner takes verbosity that can be 0 (quiet), 1 (default),
# or 2 (verbose). Quiet overrides verbose.
if args.quiet:
verbosity = 0
elif args.verbose:
verbosity = 2
else:
verbosity=1
test_runner = unittest.TextTestRunner(verbosity=verbosity).run(test_suite)
if __name__ == '__main__':
main()
|
mhmurray/cloaca
|
cloaca/test/tests.py
|
Python
|
mit
| 3,034
|
import tkinter as tk
class ExampleApp(tk.Tk):
def __init__(self):
tk.Tk.__init__(self)
t = SimpleTable(self, 10,2)
t.pack(side="top", fill="x")
t.set(0,0,"Hello, world")
class SimpleTable(tk.Frame):
def __init__(self, parent, rows=10, columns=2):
# use black background so it "peeks through" to
# form grid lines
tk.Frame.__init__(self, parent, background="black")
self._widgets = []
for row in range(rows):
current_row = []
for column in range(columns):
label = tk.Label(self, text="%s/%s" % (row, column),
borderwidth=0, width=10, height = 10)
label.grid(row=row, column=column, sticky="nsew", padx=1, pady=1)
current_row.append(label)
self._widgets.append(current_row)
for column in range(columns):
self.grid_columnconfigure(column, weight=1)
def set(self, row, column, value):
widget = self._widgets[row][column]
widget.configure(text=value)
if __name__ == "__main__":
app = ExampleApp()
app.mainloop()
|
rajarahulray/iDetector
|
tests_and_ References/table_view_text_box.py
|
Python
|
mit
| 1,160
|
"""
Franca abstract syntax tree representation.
"""
from abc import ABCMeta
from collections import OrderedDict
class ASTException(Exception):
def __init__(self, message):
super(ASTException, self).__init__()
self.message = message
def __str__(self):
return self.message
class Package(object):
"""
AST representation of a Franca package.
"""
def __init__(self, name, file_name=None, imports=None,
interfaces=None, typecollections=None, comments=None):
"""
Constructs a new Package.
"""
self.name = name
self.files = [file_name] if file_name else []
self.imports = imports if imports else []
self.interfaces = interfaces if interfaces else OrderedDict()
self.typecollections = typecollections if typecollections else \
OrderedDict()
self.comments = comments if comments else OrderedDict()
for item in self.interfaces.values():
item.package = self
for item in self.typecollections.values():
item.package = self
def __contains__(self, namespace):
if not isinstance(namespace, str):
raise TypeError
res = namespace in self.typecollections or namespace in self.interfaces
return res
def __getitem__(self, namespace):
if not isinstance(namespace, str):
raise TypeError
elif namespace in self.typecollections:
return self.typecollections[namespace]
elif namespace in self.interfaces:
return self.interfaces[namespace]
else:
raise KeyError
def __iadd__(self, package):
if not isinstance(package, Package):
raise TypeError
# Ignore the name
self.files += package.files
for item in package.imports:
self.imports.append(item)
for item in package.interfaces.values():
if item.name in self:
raise ASTException("Interface member defined more than"
" once '{}'.".format(item.name))
self.interfaces[item.name] = item
item.package = self
for item in package.typecollections.values():
if item.name in self:
raise ASTException("Type collection member defined more than"
" once '{}'.".format(item.name))
self.typecollections[item.name] = item
item.package = self
return self
class Import(object):
def __init__(self, file_name, namespace=None):
self.file = file_name
self.namespace = namespace # None for "import model"
self.package_reference = None
self.namespace_reference = None
class Namespace(object):
__metaclass__ = ABCMeta
def __init__(self, name, flags=None, members=None, comments=None):
self.package = None
self.name = name
self.flags = flags if flags else [] # Unused
self.version = None
self.typedefs = OrderedDict()
self.enumerations = OrderedDict()
self.structs = OrderedDict()
self.arrays = OrderedDict()
self.maps = OrderedDict()
self.constants = OrderedDict()
self.comments = comments if comments else OrderedDict()
if members:
for member in members:
self._add_member(member)
def __contains__(self, name):
if not isinstance(name, str):
raise TypeError
res = name in self.typedefs or \
name in self.enumerations or \
name in self.structs or \
name in self.arrays or \
name in self.maps or \
name in self.constants
return res
def __getitem__(self, name):
if not isinstance(name, str):
raise TypeError
elif name in self.typedefs:
return self.typedefs[name]
elif name in self.enumerations:
return self.enumerations[name]
elif name in self.structs:
return self.structs[name]
elif name in self.arrays:
return self.arrays[name]
elif name in self.maps:
return self.maps[name]
elif name in self.constants[name]:
return self.constants[name]
else:
raise KeyError
def _add_member(self, member):
if isinstance(member, Version):
if not self.version:
self.version = member
else:
raise ASTException("Multiple version definitions.")
elif isinstance(member, Type):
if member.name in self:
raise ASTException(
"Duplicate namespace member '{}'.".format(member.name))
if isinstance(member, Typedef):
self.typedefs[member.name] = member
# Handle anonymous array special case.
if isinstance(member.type, Array):
member.type.namespace = self
elif isinstance(member, Enumeration):
self.enumerations[member.name] = member
elif isinstance(member, Struct):
self.structs[member.name] = member
# Handle anonymous array special case.
for field in member.fields.values():
if isinstance(field.type, Array):
field.type.namespace = self
elif isinstance(member, Array):
self.arrays[member.name] = member
# Handle anonymous array special case.
if isinstance(member.type, Array):
member.type.namespace = self
elif isinstance(member, Map):
self.maps[member.name] = member
# Handle anonymous array special case.
if isinstance(member.key_type, Array):
member.key_type.namespace = self
if isinstance(member.value_type, Array):
member.value_type.namespace = self
elif isinstance(member, Constant):
self.constants[member.name] = member
else:
raise ASTException("Unexpected namespace member type.")
member.namespace = self
else:
raise ValueError("Unexpected namespace member type.")
class TypeCollection(Namespace):
def __init__(self, name, flags=None, members=None, comments=None):
super(TypeCollection, self).__init__(name, flags=flags,
members=members, comments=comments)
class Type(object):
__metaclass__ = ABCMeta
def __init__(self, name=None, comments=None):
self.namespace = None
self.name = name if name else self.__class__.__name__
self.comments = comments if comments else OrderedDict()
class Typedef(Type):
def __init__(self, name, base_type, comments=None):
super(Typedef, self).__init__(name, comments)
self.type = base_type
class PrimitiveType(Type):
__metaclass__ = ABCMeta
def __init__(self):
super(PrimitiveType, self).__init__()
class Int8(PrimitiveType):
def __init__(self):
super(Int8, self).__init__()
class Int16(PrimitiveType):
def __init__(self):
super(Int16, self).__init__()
class Int32(PrimitiveType):
def __init__(self):
super(Int32, self).__init__()
class Int64(PrimitiveType):
def __init__(self):
super(Int64, self).__init__()
class UInt8(PrimitiveType):
def __init__(self):
super(UInt8, self).__init__()
class UInt16(PrimitiveType):
def __init__(self):
super(UInt16, self).__init__()
class UInt32(PrimitiveType):
def __init__(self):
super(UInt32, self).__init__()
class UInt64(PrimitiveType):
def __init__(self):
super(UInt64, self).__init__()
class Boolean(PrimitiveType):
def __init__(self):
super(Boolean, self).__init__()
class Float(PrimitiveType):
def __init__(self):
super(Float, self).__init__()
class Double(PrimitiveType):
def __init__(self):
super(Double, self).__init__()
class String(PrimitiveType):
def __init__(self):
super(String, self).__init__()
class ByteBuffer(PrimitiveType):
def __init__(self):
super(ByteBuffer, self).__init__()
class ComplexType(Type):
__metaclass__ = ABCMeta
def __init__(self, comments=None):
super(ComplexType, self).__init__(comments=comments)
class Value(Type):
_metaclass__ = ABCMeta
def __init__(self, value, value_type=None):
super(Value, self).__init__(value_type if value_type else self.__class__.__name__)
self.value = value
class IntegerValue(Value):
BINARY = 2
DECIMAL = 10
HEXADECIMAL = 16
def __init__(self, value, base=DECIMAL):
super(IntegerValue, self).__init__(value)
self.base = base
class BooleanValue(Value):
def __init__(self, value):
super(BooleanValue, self).__init__(value)
class FloatValue(Value):
def __init__(self, value):
super(FloatValue, self).__init__(value)
class DoubleValue(Value):
def __init__(self, value):
super(DoubleValue, self).__init__(value)
class StringValue(Value):
def __init__(self, value):
super(StringValue, self).__init__(value)
class Enumeration(ComplexType):
def __init__(self, name, enumerators=None, extends=None, flags=None, comments=None):
super(Enumeration, self).__init__(comments=comments)
self.name = name
self.enumerators = enumerators if enumerators else OrderedDict()
self.extends = extends
self.reference = None
self.flags = flags if flags else [] # Unused
class Enumerator(object):
def __init__(self, name, value=None, comments=None):
self.name = name
self.value = value
self.comments = comments if comments else OrderedDict()
class Struct(ComplexType):
def __init__(self, name, fields=None, extends=None, flags=None, comments=None):
super(Struct, self).__init__(comments=comments)
self.name = name
self.fields = fields if fields else OrderedDict()
self.extends = extends
self.reference = None
self.flags = flags if flags else []
class StructField(object):
def __init__(self, name, field_type, comments=None):
self.name = name
self.type = field_type
self.comments = comments if comments else OrderedDict()
class Array(ComplexType):
def __init__(self, name, element_type, comments=None):
super(Array, self).__init__(comments=comments)
self.name = name # None for implicit arrays.
self.type = element_type
class Map(ComplexType):
def __init__(self, name, key_type, value_type, comments=None):
super(Map, self).__init__(comments=comments)
self.name = name
self.key_type = key_type
self.value_type = value_type
class Constant(ComplexType):
def __init__(self, name, element_type, element_value, comments=None):
super(Constant, self).__init__(comments=comments)
self.name = name
self.type = element_type
self.value = element_value
class Reference(Type):
def __init__(self, name):
super(Reference, self).__init__()
self.name = name
self.reference = None
class Interface(Namespace):
def __init__(self, name, flags=None, members=None, extends=None, comments=None):
super(Interface, self).__init__(name=name, flags=flags, members=None, comments=comments)
self.attributes = OrderedDict()
self.methods = OrderedDict()
self.broadcasts = OrderedDict()
self.extends = extends
self.reference = None
if members:
for member in members:
self._add_member(member)
def __contains__(self, name):
if not isinstance(name, str):
raise TypeError
res = super(Interface, self).__contains__(name) or \
name in self.attributes or \
name in self.methods or \
name in self.broadcasts
return res
def __getitem__(self, name):
if not isinstance(name, str):
raise TypeError
elif name in self.attributes:
return self.attributes[name]
elif name in self.methods:
return self.methods[name]
elif name in self.broadcasts:
return self.broadcasts[name]
else:
return super(Interface, self).__getitem__(name)
def _add_member(self, member):
if isinstance(member, Type):
if member.name in self:
raise ASTException(
"Duplicate namespace member '{}'.".format(member.name))
if isinstance(member, Attribute):
self.attributes[member.name] = member
# Handle anonymous array special case.
if isinstance(member.type, Array):
member.type.namespace = self
elif isinstance(member, Method):
self.methods[member.name] = member
# Handle anonymous array special case.
for arg in member.in_args.values():
if isinstance(arg.type, Array):
arg.type.namespace = self
for arg in member.out_args.values():
if isinstance(arg.type, Array):
arg.type.namespace = self
elif isinstance(member, Broadcast):
self.broadcasts[member.name] = member
# Handle anonymous array special case.
for arg in member.out_args.values():
if isinstance(arg.type, Array):
arg.type.namespace = self
else:
super(Interface, self)._add_member(member)
member.namespace = self
else:
super(Interface, self)._add_member(member)
class Version(object):
def __init__(self, major, minor):
self.major = major
self.minor = minor
def __str__(self):
return "{}.{}".format(self.major, self.minor)
class Attribute(Type):
def __init__(self, name, attr_type, flags=None, comments=None):
super(Attribute, self).__init__(name, comments)
self.type = attr_type
self.flags = flags if flags else []
class Method(Type):
def __init__(self, name, flags=None,
in_args=None, out_args=None, errors=None, comments=None):
super(Method, self).__init__(name, comments)
self.flags = flags if flags else []
self.in_args = in_args if in_args else OrderedDict()
self.out_args = out_args if out_args else OrderedDict()
# Errors can be an OrderedDict() or a Reference to an enumeration.
self.errors = errors if errors else OrderedDict()
class Broadcast(Type):
def __init__(self, name, flags=None, out_args=None, comments=None):
super(Broadcast, self).__init__(name, comments)
self.flags = flags if flags else []
self.out_args = out_args if out_args else OrderedDict()
class Argument(object):
def __init__(self, name, arg_type, comments=None):
self.name = name
self.type = arg_type
self.comments = comments if comments else OrderedDict()
|
zayfod/pyfranca
|
pyfranca/ast.py
|
Python
|
mit
| 15,502
|
#!/usr/bin/env python
"""
Test code for blackjack game. Tests can be run with py.test or nosetests
"""
from __future__ import print_function
from unittest import TestCase
from card_games import blackjack
from card_games.blackjack import BlackJack
print(blackjack.__file__)
class TestRule(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_init(self):
mygame = BlackJack()
self.assertEqual(len(mygame.player_hand), 2) # Initial hand for Player
self.assertEqual(len(mygame.dealer_hand), 2) # Initial hand for Dealer
def test_player_bust(self):
mygame = BlackJack()
for cnt in range(10): # Draw 10 cards - Sure to loose
mygame.draw_card_player()
self.assertEqual(len(mygame.player_hand), 12) # Twelve cards in Player's hand
self.assertEqual(mygame.game_result(), 'bust') # Definitely a bust
|
sampathweb/game_app
|
card_games/test/test_blackjack.py
|
Python
|
mit
| 917
|
"""Automabot bot for Discord."""
from os import path
from setuptools import setup, find_packages
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), 'r', encoding='utf-8') as f:
long_description = f.read()
setup(
name='automabot',
version='0.0.1.dev20170604', # see PEP-0440
python_requires='>=3.6',
author='Maël Pedretti & Chea Dany',
author_email='mael.pedretti@he-arc.ch & dany.chea@he-arc.ch',
url='https://github.com/73VW/AutomaBot',
license='https://opensource.org/licenses/BSD-3-Clause',
description=__doc__,
long_description=long_description,
packages=find_packages(exclude=('contrib', 'docs', 'tests')),
keywords='discord asyncio bot',
classifiers=(
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Education',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Home Automation'
),
install_requires=(
'discord.py>=0.16.8',
'aiohttp>=1.0.0,<1.1.0',
'pyfiglet>=0.7.5',
'toml>=0.9.2'
),
extras_require={
'fast': ('cchardet', 'aiodns'), # making it faster (recommended)
'qa': ('flake8', 'isort', 'pycodestyle', 'pydocstyle', 'rstcheck'),
'docs': ('Sphinx>=1.6.0', 'sphinxcontrib-trio')
},
)
|
WyllVern/AutomaBot
|
setup.py
|
Python
|
mit
| 1,602
|
"""
Base classes for writing management commands (named commands which can
be executed through ``tipi.py``).
"""
import os
import sys
from ConfigParser import ConfigParser
from optparse import make_option, OptionParser
from virtualenv import resolve_interpreter
class CommandError(Exception):
"""
Exception class indicating a problem while executing a management
command.
If this exception is raised during the execution of a management
command, it will be caught and turned into a nicely-printed error
message to the appropriate output stream (i.e., stderr); as a
result, raising this exception (with a sensible description of the
error) is the preferred way to indicate that something has gone
wrong in the execution of a command.
"""
pass
class BaseCommand(object):
"""
The base class from which all management commands ultimately
derive.
Use this class if you want access to all of the mechanisms which
parse the command-line arguments and work out what code to call in
response; if you don't need to change any of that behavior,
consider using one of the subclasses defined in this file.
If you are interested in overriding/customizing various aspects of
the command-parsing and -execution behavior, the normal flow works
as follows:
1. ``tipi.py`` loads the command class
and calls its ``run_from_argv()`` method.
2. The ``run_from_argv()`` method calls ``create_parser()`` to get
an ``OptionParser`` for the arguments, parses them, performs
any environment changes requested by options like
``pythonpath``, and then calls the ``execute()`` method,
passing the parsed arguments.
3. The ``execute()`` method attempts to carry out the command by
calling the ``handle()`` method with the parsed arguments; any
output produced by ``handle()`` will be printed to standard
output.
4. If ``handle()`` raised a ``CommandError``, ``execute()`` will
instead print an error message to ``stderr``.
Thus, the ``handle()`` method is typically the starting point for
subclasses; many built-in commands and command types either place
all of their logic in ``handle()``, or perform some additional
parsing work in ``handle()`` and then delegate from it to more
specialized methods as needed.
Several attributes affect behavior at various steps along the way:
``args``
A string listing the arguments accepted by the command,
suitable for use in help messages; e.g., a command which takes
a list of application names might set this to '<appname
appname ...>'.
``help``
A short description of the command, which will be printed in
help messages.
``option_list``
This is the list of ``optparse`` options which will be fed
into the command's ``OptionParser`` for parsing arguments.
"""
# Metadata about this command.
option_list = (
make_option('-v', '--verbose', action='store', dest='verbose', default='1',
type='choice', choices=['0', '1', '2'],
help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
make_option('-p', '--python',
help='The Python interpreter to use, e.g., --python=python2.5 will use the python2.5 '
'interpreter to create the new environment. The default is the interpreter that '
'virtualenv was installed with (%s)' % sys.executable),
make_option('--traceback', action='store_true',
help='Print traceback on exception'),
)
help = ''
args = ''
#TODO syntax coloring support
#def __init__(self):
# #self.style = color_style()
# try:
# home = os.getenv('USERPROFILE') or os.getenv('HOME')
# config = ConfigParser(open(os.path.join(home, '.tipirc')))
# except IOError:
# pass
# except:
# pass
#
# self._interpreter = resolve_interpreter('python')
#
#@property
#def python_interpreter(self):
# return self._interpreter
def get_version(self):
"""
Return the Django version, which should be correct for all
built-in Django commands. User-supplied commands should
override this method.
"""
#TODO placeholder
return (0, 1, 0,)
def usage(self, subcommand):
"""
Return a brief description of how to use this command, by
default from the attribute ``self.help``.
"""
usage = '%%prog %s [options] %s' % (subcommand, self.args)
if self.help:
return '%s\n\n%s' % (usage, self.help)
else:
return usage
def create_parser(self, prog_name, subcommand):
"""
Create and return the ``OptionParser`` which will be used to
parse the arguments to this command.
"""
return OptionParser(prog=prog_name,
usage=self.usage(subcommand),
version=str(self.get_version()),
option_list=self.option_list)
def print_help(self, prog_name, subcommand):
"""
Print the help message for this command, derived from
``self.usage()``.
"""
parser = self.create_parser(prog_name, subcommand)
parser.print_help()
def run_from_argv(self, argv):
"""
Set up any environment changes requested, then run this command.
"""
parser = self.create_parser(argv[0], argv[1])
options, args = parser.parse_args(argv[2:])
self.execute(*args, **options.__dict__)
def execute(self, *args, **options):
"""
Try to execute this command. If the command raises a
``CommandError``, intercept it and print it sensibly to
stderr.
"""
try:
#output = self.handle(*args, **options)
print self.handle(*args, **options)
#if output:
# print output
except CommandError, e:
#sys.stderr.write(self.style.ERROR(str('Error: %s\n' % e)))
sys.stderr.write(str('Error: %s\n' % e))
sys.exit(1)
def handle(self, *args, **options):
"""
The actual logic of the command. Subclasses must implement
this method.
"""
raise NotImplementedError()
#class AppCommand(BaseCommand):
# """
# A management command which takes one or more installed application
# names as arguments, and does something with each of them.
#
# Rather than implementing ``handle()``, subclasses must implement
# ``handle_app()``, which will be called once for each application.
#
# """
# args = '<appname appname ...>'
#
# def handle(self, *app_labels, **options):
# from django.db import models
# if not app_labels:
# raise CommandError('Enter at least one appname.')
# try:
# app_list = [models.get_app(app_label) for app_label in app_labels]
# except (ImproperlyConfigured, ImportError), e:
# raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e)
# output = []
# for app in app_list:
# app_output = self.handle_app(app, **options)
# if app_output:
# output.append(app_output)
# return '\n'.join(output)
#
# def handle_app(self, app, **options):
# """
# Perform the command's actions for ``app``, which will be the
# Python module corresponding to an application name given on
# the command line.
#
# """
# raise NotImplementedError()
class LabelCommand(BaseCommand):
"""
A management command which takes one or more arbitrary arguments
(labels) on the command line, and does something with each of
them.
Rather than implementing ``handle()``, subclasses must implement
``handle_label()``, which will be called once for each label.
If the arguments should be names of installed applications, use
``AppCommand`` instead.
"""
args = '<label label ...>'
label = 'label'
def handle(self, *labels, **options):
if not labels:
raise CommandError('Enter at least one %s.' % self.label)
output = []
for label in labels:
label_output = self.handle_label(label, **options)
if label_output:
output.append(label_output)
return '\n'.join(output)
def handle_label(self, label, **options):
"""
Perform the command's actions for ``label``, which will be the
string as given on the command line.
"""
raise NotImplementedError()
#class NoArgsCommand(BaseCommand):
# """
# A command which takes no arguments on the command line.
#
# Rather than implementing ``handle()``, subclasses must implement
# ``handle_noargs()``; ``handle()`` itself is overridden to ensure
# no arguments are passed to the command.
#
# Attempting to pass arguments will raise ``CommandError``.
#
# """
# args = ''
#
# def handle(self, *args, **options):
# if args:
# raise CommandError("Command doesn't accept any arguments")
# return self.handle_noargs(**options)
#
# def handle_noargs(self, **options):
# """
# Perform this command's actions.
#
# """
# raise NotImplementedError()
#def copy_helper(style, app_or_project, name, directory, other_name=''):
# """
# Copies either a Django application layout template or a Django project
# layout template into the specified directory.
#
# """
# # style -- A color style object (see django.core.management.color).
# # app_or_project -- The string 'app' or 'project'.
# # name -- The name of the application or project.
# # directory -- The directory to which the layout template should be copied.
# # other_name -- When copying an application layout, this should be the name
# # of the project.
# import re
# import shutil
# other = {'project': 'app', 'app': 'project'}[app_or_project]
# if not re.search(r'^[_a-zA-Z]\w*$', name): # If it's not a valid directory name.
# # Provide a smart error message, depending on the error.
# if not re.search(r'^[_a-zA-Z]', name):
# message = 'make sure the name begins with a letter or underscore'
# else:
# message = 'use only numbers, letters and underscores'
# raise CommandError("%r is not a valid %s name. Please %s." % (name, app_or_project, message))
# top_dir = os.path.join(directory, name)
# try:
# os.mkdir(top_dir)
# except OSError, e:
# raise CommandError(e)
#
# # Determine where the app or project templates are. Use
# # django.__path__[0] because we don't know into which directory
# # django has been installed.
# template_dir = os.path.join(django.__path__[0], 'conf', '%s_template' % app_or_project)
#
# for d, subdirs, files in os.walk(template_dir):
# relative_dir = d[len(template_dir)+1:].replace('%s_name' % app_or_project, name)
# if relative_dir:
# os.mkdir(os.path.join(top_dir, relative_dir))
# for i, subdir in enumerate(subdirs):
# if subdir.startswith('.'):
# del subdirs[i]
# for f in files:
# if not f.endswith('.py'):
# # Ignore .pyc, .pyo, .py.class etc, as they cause various
# # breakages.
# continue
# path_old = os.path.join(d, f)
# path_new = os.path.join(top_dir, relative_dir, f.replace('%s_name' % app_or_project, name))
# fp_old = open(path_old, 'r')
# fp_new = open(path_new, 'w')
# fp_new.write(fp_old.read().replace('{{ %s_name }}' % app_or_project, name).replace('{{ %s_name }}' % other, other_name))
# fp_old.close()
# fp_new.close()
# try:
# shutil.copymode(path_old, path_new)
# _make_writeable(path_new)
# except OSError:
# sys.stderr.write(style.NOTICE("Notice: Couldn't set permission bits on %s. You're probably using an uncommon filesystem setup. No problem.\n" % path_new))
#
#def _make_writeable(filename):
# """
# Make sure that the file is writeable. Useful if our source is
# read-only.
#
# """
# import stat
# if sys.platform.startswith('java'):
# # On Jython there is no os.access()
# return
# if not os.access(filename, os.W_OK):
# st = os.stat(filename)
# new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR
# os.chmod(filename, new_permissions)
|
unbracketed/tipi
|
tipi/commands/base.py
|
Python
|
mit
| 12,932
|
from keras.models import Sequential
from keras.layers import LSTM
import numpy as np
model = Sequential()
ly = LSTM(2, activation='tanh', recurrent_activation='relu',implementation = 1, stateful=False, batch_input_shape=(5, 3, 3))
model.add(ly)
model.compile(optimizer='sgd', loss='mse')
kernel = np.ones((3, 8))
rec_kernel = np.ones((2, 8))
bias = np.array([1, 2, -1, 0, 3, 4, 5, -2])/10
k = 0
for h in range(0, 3):
for w in range(0, 8):
k += 1
kernel[h, w] = (k % 5 - 2)/10
k = 0
for h in range(0, 2):
for w in range(0, 8):
k += 1
rec_kernel[h, w] = (k % 5 - 2)/10
parameters = [kernel, rec_kernel, bias]
model.set_weights(parameters)
data = np.ndarray((5, 3, 3))
l = 0
for b in range(0, 5):
for h in range(0, 3):
for c in range(0, 3):
l += 1
data[b, h, c] = (l % 5 + 1)/10
output = model.predict(data, batch_size=5) # the batch_size has no impact on the result here
print(output)
print(model.summary())
print(model.get_config())
print(model.get_weights())
|
adamtiger/NNSharp
|
PythonUtils/LSTM.py
|
Python
|
mit
| 994
|
"""
[2015-04-01] Challenge #208 [Intermediate] ASCII Gradient Generator
https://www.reddit.com/r/dailyprogrammer/comments/3104wu/20150401_challenge_208_intermediate_ascii/
# [](#IntermediateIcon) _(Intermediate)_: ASCII Gradient Generator
A linear colour gradient is where an image transitions through a range of colours, [like
this](http://i.imgur.com/IPwnI8X.png). A gradient doesn't need to be directly horizontal or vertical - it can be
[diagonal](http://i.imgur.com/D4trkEk.png) too, or only be [longer or shorter](http://i.imgur.com/8CHx95i.png) than
usual. It can also cycle through [as many colours as you like](http://i.imgur.com/Br3xwXM.png).
A radial colour gradient is a similar concept, except the colours move [radially outwards like
this](http://i.imgur.com/C6SE6m3.png), rather than linearly across. Radial gradients can also be in [different
positions or with different colours](http://i.imgur.com/S19EOu3.png).
To describe a gradient, you need two things - the colours in it, and its location. Describing the location of a radial
gradient is easy: for a radial gradient [like this](http://i.imgur.com/dTvfj7f.png), you only need to know the center
of the gradient (the red dot), and the radius from the center at which the gradient finishes (`r`). To locate a linear
gradient [like this](http://i.imgur.com/kyZTQnK.png), you need to know two points - the start (red) and end (green)
location. The gradient colours run perpendicular to the line joining the start and end points.
Today, we won't be dealing with colours. Instead, we'll be dealing with characters on the screen. You'll accept the
parameters of a gradient, and you'll output the displayed gradient.
# Formal Inputs and Outputs
## Input Description
You will first accept the size of the output display, as a width and height in characters, like this:
40 30
This corresponds to a grid 40 across and 30 down, like this:
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
The grid follows **screen space**, so the **top-left** corner is position **(0, 0)**.
Next, you will accept the characters that make up the gradient 'colours', from start to finish (or from inside to
outside, for a radial gradient), like this: (note the space at the start)
.,:;xX&@
Any points outside the gradient will have the first/last character, depending on which side of the gradient they're on.
After this, you will accept the parameters of the gradient. This may take one of two forms:
* For a **radial** gradient, the next line will look like this:
`radial x y r`
Where **(`x`, `y`)** is the center of the gradient, and **`r`** is the radius of the gradient, both in pixels.
* For a **linear** gradient, the next line will look like this:
`linear x1 y1 x2 y2`
Where **(`x1`, `y1`)** is the start point of the gradient, and **(`x2`, `y2`)** is the end point of the gradient,
both in pixel measure.
## Output Description
You are to display the given gradient on a grid with the given size, like this:
@@@@@@@@@@@&&&&&XXXXXXXXX&&&&&@@@@@@@@@@
@@@@@@@@@@&&&&XXXXXXXXXXXXX&&&&@@@@@@@@@
@@@@@@@@&&&&XXXXXXxxxxxXXXXXX&&&&@@@@@@@
@@@@@@@&&&&XXXXxxxxxxxxxxxXXXX&&&&@@@@@@
@@@@@@@&&&XXXxxxxxx;;;xxxxxxXXX&&&@@@@@@
@@@@@@&&&XXXxxxx;;;;;;;;;xxxxXXX&&&@@@@@
@@@@@&&&XXXxxx;;;;;;;;;;;;;xxxXXX&&&@@@@
@@@@@&&XXXxxx;;;;:::::::;;;;xxxXXX&&@@@@
@@@@&&&XXxxx;;;:::::::::::;;;xxxXX&&&@@@
@@@@&&XXXxx;;;::::,,,,,::::;;;xxXXX&&@@@
@@@&&&XXxxx;;:::,,,,,,,,,:::;;xxxXX&&&@@
@@@&&XXXxx;;;::,,,,...,,,,::;;;xxXXX&&@@
@@@&&XXXxx;;:::,,.......,,:::;;xxXXX&&@@
@@@&&XXxxx;;::,,,... ...,,,::;;xxxXX&&@@
@@@&&XXxx;;;::,,... ...,,::;;;xxXX&&@@
@@@&&XXxx;;;::,,.. ..,,::;;;xxXX&&@@
@@@&&XXxx;;;::,,... ...,,::;;;xxXX&&@@
@@@&&XXxxx;;::,,,... ...,,,::;;xxxXX&&@@
@@@&&XXXxx;;:::,,.......,,:::;;xxXXX&&@@
@@@&&XXXxx;;;::,,,,...,,,,::;;;xxXXX&&@@
@@@&&&XXxxx;;:::,,,,,,,,,:::;;xxxXX&&&@@
@@@@&&XXXxx;;;::::,,,,,::::;;;xxXXX&&@@@
@@@@&&&XXxxx;;;:::::::::::;;;xxxXX&&&@@@
@@@@@&&XXXxxx;;;;:::::::;;;;xxxXXX&&@@@@
@@@@@&&&XXXxxx;;;;;;;;;;;;;xxxXXX&&&@@@@
@@@@@@&&&XXXxxxx;;;;;;;;;xxxxXXX&&&@@@@@
@@@@@@@&&&XXXxxxxxx;;;xxxxxxXXX&&&@@@@@@
@@@@@@@&&&&XXXXxxxxxxxxxxxXXXX&&&&@@@@@@
@@@@@@@@&&&&XXXXXXxxxxxXXXXXX&&&&@@@@@@@
@@@@@@@@@@&&&&XXXXXXXXXXXXX&&&&@@@@@@@@@
# Sample Inputs and Outputs
## Gradient 1
### Input
40 30
.,:;xX&@
radial 20 15 20
### Output
(shown above, in **Output Description**)
## Gradient 2
Notice how the colours appear in the reverse order, as the end point is to the *left* of the start point.
### Input
60 30
'"^+$
linear 30 30 0 0
### Output
$$$$$$$$$$$++++++++++^^^^^^^^^^""""""""""'''''''''
$$$$$$$$$$++++++++++^^^^^^^^^^""""""""""'''''''''
$$$$$$$$$++++++++++^^^^^^^^^^""""""""""'''''''''
$$$$$$$$++++++++++^^^^^^^^^^""""""""""'''''''''
$$$$$$$++++++++++^^^^^^^^^^""""""""""'''''''''
$$$$$$++++++++++^^^^^^^^^^""""""""""'''''''''
$$$$$++++++++++^^^^^^^^^^""""""""""'''''''''
$$$$++++++++++^^^^^^^^^^""""""""""'''''''''
$$$++++++++++^^^^^^^^^^""""""""""'''''''''
$$++++++++++^^^^^^^^^^""""""""""'''''''''
$++++++++++^^^^^^^^^^""""""""""'''''''''
++++++++++^^^^^^^^^^""""""""""'''''''''
+++++++++^^^^^^^^^^""""""""""'''''''''
++++++++^^^^^^^^^^""""""""""'''''''''
+++++++^^^^^^^^^^""""""""""'''''''''
++++++^^^^^^^^^^""""""""""'''''''''
+++++^^^^^^^^^^""""""""""'''''''''
++++^^^^^^^^^^""""""""""'''''''''
+++^^^^^^^^^^""""""""""'''''''''
++^^^^^^^^^^""""""""""'''''''''
+^^^^^^^^^^""""""""""'''''''''
^^^^^^^^^^""""""""""'''''''''
^^^^^^^^^""""""""""'''''''''
^^^^^^^^""""""""""'''''''''
^^^^^^^""""""""""'''''''''
^^^^^^""""""""""'''''''''
^^^^^""""""""""'''''''''
^^^^""""""""""'''''''''
^^^""""""""""'''''''''
^^""""""""""'''''''''
## Gradient 3
The gradient start/end/centre points don't have to be inside the grid!
### Input
40 40
aaabcccdeeefggg
radial -10 20 60
### Output
ccccccccccdddddeeeeeeeeeeeeeeeffffgggggg
cccccccccccdddddeeeeeeeeeeeeeefffffggggg
ccccccccccccdddddeeeeeeeeeeeeeeffffggggg
cccccccccccccdddddeeeeeeeeeeeeeffffggggg
cccccccccccccdddddeeeeeeeeeeeeefffffgggg
ccccccccccccccdddddeeeeeeeeeeeeeffffgggg
cccccccccccccccddddeeeeeeeeeeeeeffffgggg
cccccccccccccccdddddeeeeeeeeeeeeeffffggg
bcccccccccccccccddddeeeeeeeeeeeeeffffggg
bbccccccccccccccdddddeeeeeeeeeeeeffffggg
bbbccccccccccccccddddeeeeeeeeeeeeffffggg
bbbbcccccccccccccddddeeeeeeeeeeeeeffffgg
bbbbcccccccccccccddddeeeeeeeeeeeeeffffgg
bbbbbcccccccccccccddddeeeeeeeeeeeeffffgg
abbbbcccccccccccccddddeeeeeeeeeeeeffffgg
abbbbbccccccccccccddddeeeeeeeeeeeeffffgg
aabbbbccccccccccccddddeeeeeeeeeeeeffffgg
aabbbbccccccccccccddddeeeeeeeeeeeeffffgg
aabbbbccccccccccccddddeeeeeeeeeeeeffffgg
aabbbbccccccccccccddddeeeeeeeeeeeeffffgg
aabbbbccccccccccccddddeeeeeeeeeeeeffffgg
aabbbbccccccccccccddddeeeeeeeeeeeeffffgg
aabbbbccccccccccccddddeeeeeeeeeeeeffffgg
aabbbbccccccccccccddddeeeeeeeeeeeeffffgg
aabbbbccccccccccccddddeeeeeeeeeeeeffffgg
abbbbbccccccccccccddddeeeeeeeeeeeeffffgg
abbbbcccccccccccccddddeeeeeeeeeeeeffffgg
bbbbbcccccccccccccddddeeeeeeeeeeeeffffgg
bbbbcccccccccccccddddeeeeeeeeeeeeeffffgg
bbbbcccccccccccccddddeeeeeeeeeeeeeffffgg
bbbccccccccccccccddddeeeeeeeeeeeeffffggg
bbccccccccccccccdddddeeeeeeeeeeeeffffggg
bcccccccccccccccddddeeeeeeeeeeeeeffffggg
cccccccccccccccdddddeeeeeeeeeeeeeffffggg
cccccccccccccccddddeeeeeeeeeeeeeffffgggg
ccccccccccccccdddddeeeeeeeeeeeeeffffgggg
cccccccccccccdddddeeeeeeeeeeeeefffffgggg
cccccccccccccdddddeeeeeeeeeeeeeffffggggg
ccccccccccccdddddeeeeeeeeeeeeeeffffggggg
cccccccccccdddddeeeeeeeeeeeeeefffffggggg
# Notes
Got any cool challenge ideas? Submit them to /r/DailyProgrammer_Ideas!
"""
def main():
pass
if __name__ == "__main__":
main()
|
DayGitH/Python-Challenges
|
DailyProgrammer/DP20150401B.py
|
Python
|
mit
| 9,284
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('article', '0019_remove_article_ordering_featured'),
]
operations = [
migrations.RemoveField(
model_name='article',
name='issue',
),
]
|
F483/trainlessmagazine.com
|
article/migrations/0020_remove_article_issue.py
|
Python
|
mit
| 366
|
" tail boom flexibility "
from numpy import pi
from gpkit import Model, parse_variables, SignomialsEnabled
class TailBoomFlexibility(Model):
""" Tail Boom Flexibility Model
Variables
---------
Fne [-] tail boom flexibility factor
deda [-] wing downwash derivative
SMcorr 0.55 [-] corrected static margin
sph1 [-] flexibility helper variable 1
sph2 [-] flexibility helper variable 2
LaTex Strings
-------------
Fne F_{\mathrm{NE}}
deda d\\epsilon/d\\alpha
SMcorr SM_{\\mathrm{corr}}
"""
@parse_variables(__doc__, globals())
def setup(self, htail, hbending, wing):
mh = htail.mh
mw = wing.mw
Vh = htail.Vh
th = hbending.th
CLhmin = htail.CLhmin
CLwmax = wing.planform.CLmax
Sw = wing.planform.S
bw = wing.planform.b
lh = htail.lh
CM = wing.planform.CM
constraints = [
Fne >= 1 + mh*th,
sph1*(mw*Fne/mh/Vh) + deda <= 1,
sph2 <= Vh*CLhmin/CLwmax,
# (sph1 + sph2).mono_lower_bound({"sph1": .48, "sph2": .52}) >= (
# SMcorr + wing["C_M"]/wing["C_{L_{max}}"]),
deda >= mw*Sw/bw/4/pi/lh]
with SignomialsEnabled():
constraints.extend([sph1 + sph2 >= SMcorr + CM/CLwmax])
return constraints
|
convexengineering/gplibrary
|
gpkitmodels/SP/aircraft/tail/tail_boom_flex.py
|
Python
|
mit
| 1,453
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This script can generate automate scripts for open source python project.
Scroll to ``if __name__ == "__main__":`` for more info.
"""
from __future__ import print_function
import sys
import datetime
from os import walk, mkdir
from os.path import join, abspath, dirname, basename
def write(s, path, encoding="utf-8"):
"""Write string to text file.
"""
with open(path, "wb") as f:
f.write(s.encode(encoding))
def read(path, encoding="utf-8"):
"""Read string from text file.
"""
with open(path, "rb") as f:
return f.read().decode(encoding)
def initiate_project(
package_name,
repo_name,
python_version,
github_username,
author_name,
author_email,
maintainer_name,
maintainer_email,
year,
s3_bucket,
):
"""
Generate project start files.
"""
print("Initate '%s-project' from template ..." % package_name)
template_dir = join(dirname(abspath(__file__)), "template")
output_dir = join(dirname(abspath(__file__)), "%s-project" % package_name)
for src_dir, dir_list, file_list in walk(template_dir):
# destination directory
dst_dir = src_dir.replace(template_dir, output_dir, 1)
if basename(dst_dir) == "__package__":
dst_dir = join(dirname(dst_dir), package_name)
# make destination directory
try:
print(" Create '%s' ..." % dst_dir)
mkdir(dst_dir)
except:
pass
# files
for filename in file_list:
src = join(src_dir, filename)
dst = join(dst_dir, filename)
content = read(src).\
replace("{{ package_name }}", package_name).\
replace("{{ repo_name }}", repo_name).\
replace("{{ python_version }}", python_version).\
replace("{{ github_username }}", github_username).\
replace("{{ author_name }}", author_name).\
replace("{{ author_email }}", author_email).\
replace("{{ maintainer_name }}", maintainer_name).\
replace("{{ maintainer_email }}", maintainer_email).\
replace("{{ year }}", year).\
replace("{{ s3_bucket }}", s3_bucket)
print(" Create '%s' ..." % dst)
write(content, dst)
print(" Complete!")
if __name__ == "__main__":
# --- EDIT THESE VARIABLE based on your own situation ---
package_name = "picage" # IMPORTANT
repo_name = "{package_name}-project".format(package_name=package_name)
python_version = "python%s%s" % (
sys.version_info.major, sys.version_info.minor)
github_username = "MacHu-GWU" # IMPORTANT
author_name = "Sanhe Hu" # IMPORTANT
author_email = "husanhe@gmail.com" # IMPORTANT
maintainer_name = author_name
maintainer_email = author_email
year = str(datetime.datetime.utcnow().year)
s3_bucket = "www.wbh-doc.com" # IMPORTANT
initiate_project(
package_name,
repo_name,
python_version,
github_username,
author_name,
author_email,
maintainer_name,
maintainer_email,
year,
s3_bucket,
)
|
MacHu-GWU/elementary_math-project
|
start-a-project/init_project.py
|
Python
|
mit
| 3,304
|
"""
WSGI config for mozblog project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mozblog.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
|
renancamm/mozblog
|
mozblog/wsgi.py
|
Python
|
mit
| 483
|
from configparser import ConfigParser
import v20
# Create an object config
config = ConfigParser()
# Read the config
config.read("../API_Connection_Oanda/pyalgo.cfg")
ctx = v20.Context(
'api-fxpractice.oanda.com',
443,
True,
application = 'sample_code',
token = config['oanda_v20']['access_token'],
datetime_format = 'RFC3339')
# class oanda_info():
def get_Id_Account():
response = ctx.account.list()
# Ask for the Oanda ID Account
accounts = response.get('accounts')
# Show the ID
for account in accounts:
# account('Account: %s' %account)
print account
def get_instruments():
response = ctx.account.instruments(
config['oanda_v20']['account_id'])
instruments = response.get('instruments')
# instruments[0].dict()
for instrument in instruments:
ins = instrument.dict()
print('%20s | %10s' % (ins['displayName'],
ins['name']))
|
cgomezfandino/Project_PTX
|
API_Connection_Oanda/PTX_oandaInfo.py
|
Python
|
mit
| 972
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Stat',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date', models.DateTimeField(auto_now_add=True)),
('up', models.BigIntegerField()),
('down', models.BigIntegerField()),
('live_time', models.BigIntegerField()),
],
options={
},
bases=(models.Model,),
),
]
|
mtskelton/huawei-4g-stats
|
stats/migrations/0001_initial.py
|
Python
|
mit
| 718
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
#
# pipe.py
#
# Copyright 2014 Giorgio Gilestro <gg@kozak>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
# Listen from pipefile
# e.g.: echo "TEST COMMAND" > /tmp/pipefile
import os, tempfile
import logging
import threading
class pipe():
def __init__(self, pipefile, queue, actions):
"""
Reads from a pipe
"""
self.pipefile = pipefile
self.queue = queue
actions["pipe"] = {}
self.__makefifo()
self.listening_thread = threading.Thread(target=self.listen_from_pipe)
#self.listening_thread.daemon = True
self.isListening = True
self.listening_thread.start()
def transmit(self, received):
"""
"""
cmd = ("pipe", received)
self.queue.put(cmd)
def __makefifo(self):
"""
"""
try:
os.mkfifo(self.pipefile)
logging.debug("Listening to FIFO Pipe at %s" % self.pipefile)
return True
except:
logging.debug("Error creating FIFO Pipe %s. File already existing?" % self.pipefile)
return False
def listen_from_pipe(self):
"""
"""
while self.isListening:
logging.debug("Listening from PIPE %s" % self.pipefile)
with open(self.pipefile) as fifo:
self.transmit(fifo.read().strip())
if __name__ == '__main__':
p = pipe("pipefile", "none")
|
ggilestro/majordomo
|
listeners/pipe.py
|
Python
|
mit
| 2,193
|
from jsbuild.attrdict import AttrDict
from time import strftime
class Manifest(AttrDict):
def __init__(self,*args,**kwargs):
super(AttrDict, self).__init__(*args,**kwargs)
self._buffer_ = None
self._parent_ = None
if not self.__contains__('_dict_'):
self['_dict_'] = {}
self['_dict_']['timestamp'] = int(strftime("%Y%m%d%H%M"))
def __getitem__(self,name):
item = super(Manifest,self).__getitem__(name)
if isinstance(item,Manifest) and not item._parent_:
item._parent_ = self
elif isinstance(item,str):
root = self
while root._parent_: root = root._parent_
item = item%root._dict_
return item
|
azer/jsbuild
|
jsbuild/manifest.py
|
Python
|
mit
| 673
|
import os
from io import open
from setuptools import setup
about = {}
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'stringsheet', '__init__.py'), encoding='utf-8') as f:
for line in f:
if line.startswith('__'):
(key, value) = line.split('=')
about[key.strip()] = value.strip().strip('\'')
with open('README.rst', encoding='utf-8') as f:
readme = f.read()
setup(
name=about['__title__'],
version=about['__version__'],
description=about['__description__'],
long_description=readme,
author=about['__author__'],
author_email=about['__author_email__'],
url=about['__url__'],
license=about['__license__'],
packages=['stringsheet'],
install_requires=[
'httplib2',
'apiclient',
'lxml',
'google-api-python-client'
],
entry_points={
'console_scripts': [
'stringsheet = stringsheet.cli:main'
]
}
)
|
Tunous/StringSheet
|
setup.py
|
Python
|
mit
| 977
|
# Copyright (c) 2012 <Jaume Devesa (jaumedevesa@gmail.com)>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
gists.gists
~~~~~~~~~~~
This single-function module defines the input parameters and the subparsers,
and coordinates the 'handlers'->'actions'->'formatters' execution workflow
"""
import argparse
from actions import (list_gists, show, get, post, delete, update, authorize,
fork, star, unstar)
from handlers import (handle_list, handle_show, handle_update,
handle_authorize, handle_get, handle_post, handle_delete,
handle_fork, handle_star)
from formatters import (format_list, format_post, format_update,
format_get, format_show, format_delete,
format_authorize, format_star)
from version import VERSION
USER_MSG = ("github username. Use this user instead of the defined one in "
"the configuration file. If action demands authentication, a "
"password request will be prompt")
GIST_ID_MSG = ("identifier of the Gist. Execute 'gists list' to know Gists "
"identifiers")
def run(*args, **kwargs):
# Initialize argument's parser
description = 'Manage Github gists from CLI'
parser = argparse.ArgumentParser(description=description,
epilog="Happy Gisting!")
# Define subparsers to handle each action
subparsers = parser.add_subparsers(help="Available commands.")
# Add the subparsers
__add_list_parser(subparsers)
__add_show_parser(subparsers)
__add_get_parser(subparsers)
__add_create_parser(subparsers)
__add_update_parser(subparsers)
__add_delete_parser(subparsers)
__add_authorize_parser(subparsers)
__add_version_parser(subparsers)
__add_fork_parser(subparsers)
__add_star_parser(subparsers)
__add_unstar_parser(subparsers)
# Parse the arguments
args = parser.parse_args()
# Calling the handle_args function defined, parsing the args and return
# and object with the needed values to execute the function
parameters = args.handle_args(args)
# Passing the 'parameters' object as array of parameters
result = args.func(*parameters)
# Parsing the 'result' object to be output formatted.
# (that must be a single object)
result_formatted = args.formatter(result)
# Print the formatted output
print result_formatted
def __add_list_parser(subparsers):
""" Define the subparser to handle the 'list' functionality.
:param subparsers: the subparser entity
"""
# Add the subparser to handle the list of gists
parser_list = subparsers.add_parser("list", help="list a user's Gists")
parser_list.add_argument("-u", "--user", help=USER_MSG)
group1 = parser_list.add_mutually_exclusive_group()
group1.add_argument("-p", "--private", help="""return the private gists
besides the public ones. Needs authentication""",
action="store_true")
group1.add_argument("-s", "--starred", help="""return ONLY the starred
gists. Needs authentication""", action="store_true")
parser_list.set_defaults(handle_args=handle_list,
func=list_gists, formatter=format_list)
def __add_show_parser(subparsers):
""" Define the subparser to handle with the 'show' functionallity.
:param subparsers: the subparser entity
"""
# Add the subparser to handle the 'show' action
parser_show = subparsers.add_parser("show", help="""show a Gist. Shows
Gist metadata by default.
With '-f' (--filename) option, shows
the content of one of the Gist files
""")
parser_show.add_argument("gist_id", help=GIST_ID_MSG)
parser_show.add_argument("-f", "--filename", help="gist file to show")
parser_show.set_defaults(handle_args=handle_show, func=show,
formatter=format_show)
def __add_get_parser(subparsers):
""" Define the subparser to handle the 'get' functionality.
:param subparsers: the subparser entity
"""
# Add the subparser to handle the 'get' action
parser_get = subparsers.add_parser("get", help="""download a single gist
file. If the gist has just a single
file, argument '-f' (--filename) is not
needed""")
parser_get.add_argument("gist_id", help=GIST_ID_MSG)
parser_get.add_argument("-f", "--filename", help="file to download")
parser_get.add_argument("-o", "--output_dir", help="destination directory",
default=".")
parser_get.set_defaults(handle_args=handle_get, func=get,
formatter=format_get)
def __add_create_parser(subparsers):
""" Define the subparser to handle the 'create' functionality.
:param subparsers: the subparser entity
"""
# Add the subparser to handle the 'create' action
parser_post = subparsers.add_parser("create", help="""create a new gist.
Needs authentication""")
parser_post.add_argument("-u", "--user", help=USER_MSG)
parser_post.add_argument("-f", "--filenames", nargs='+', help="""specify
files to upload with Gist creation""",
required=True)
parser_post.add_argument("-p", "--private", help="""private Gist? ('false'
by default)""", action="store_true")
parser_post.add_argument("-i", "--input_dir", help="""input directory where
the source files are""")
parser_post.add_argument("-d", "--description", help="""description for
the Gist to create""")
parser_post.set_defaults(handle_args=handle_post, func=post,
formatter=format_post)
def __add_update_parser(subparsers):
""" Define the subparser to handle the 'update' functionality.
:param subparsers: the subparser entity
"""
# Add the subparser to handle the 'update' action
parser_update = subparsers.add_parser("update", help="""update a gist.
Needs authentication""")
parser_update.add_argument("gist_id", help=GIST_ID_MSG)
parser_update.add_argument("-u", "--user", help=USER_MSG)
group1 = parser_update.add_argument_group("file options",
"update Gist files")
group1.add_argument("-f", "--filenames", nargs='+',
help="Gist files to update")
group11 = group1.add_mutually_exclusive_group()
group11.add_argument("-n", "--new", action="store_true", help="""files
supplied are new for the Gist. '-f' (--filenames)
argument needed""",
default=False)
group11.add_argument("-r", "--remove", action="store_true",
help="""files supplied will be removed from the Gist.
'-f' (--filenames) argument needed""", default=False)
group1.add_argument("-i", "--input_dir", help="""directory where the files
are. Current directory by default""")
group2 = parser_update.add_argument_group('metadata options',
"update Gist metadata")
group2.add_argument("-d", "--description", help="update Gist description")
parser_update.set_defaults(handle_args=handle_update, func=update,
formatter=format_update)
def __add_delete_parser(subparsers):
""" Define the subparser to handle the 'delete' functionality.
:param subparsers: the subparser entity
"""
# Add the subparser to handle the 'delete' action
parser_delete = subparsers.add_parser("delete", help="""delete a Gist.
Needs authentication""")
parser_delete.add_argument("gist_id", help=GIST_ID_MSG)
parser_delete.add_argument("-u", "--user", help=USER_MSG)
parser_delete.set_defaults(handle_args=handle_delete, func=delete,
formatter=format_delete)
def __add_authorize_parser(subparsers):
""" Define the subparser to handle the 'authorize' functionallity.
:param subparsers: the subparser entity
"""
# Add the subparser to handle the 'authorize' action.
parser_authorize = subparsers.add_parser("authorize", help="""authorize
this project in github""")
parser_authorize.add_argument("-u", "--user", help="""your github user
. Needed to generate the auth token. """,
required=True)
parser_authorize.set_defaults(handle_args=handle_authorize, func=authorize,
formatter=format_authorize)
def __add_version_parser(subparsers):
""" Define the subparser to handle 'version' functionallity.
:param subparsers: the subparser entity
"""
parser_version = subparsers.add_parser("version", help="""print the version
of the release""")
parser_version.set_defaults(handle_args=lambda x: (None,),
func=lambda x: None,
formatter=lambda x: VERSION)
def __add_fork_parser(subparsers):
""" Define the subparser to handle 'fork' functionallity.
:param subparsers: the subparser entity
"""
parser_fork = subparsers.add_parser("fork", help="""fork another users'
Gists""")
parser_fork.add_argument("gist_id", help=GIST_ID_MSG)
parser_fork.add_argument("-u", "--user", help=USER_MSG)
parser_fork.set_defaults(handle_args=handle_fork, func=fork,
formatter=format_post)
def __add_star_parser(subparsers):
""" Define the subparser to handle 'star' functionallity.
:param subparsers: the subparser entity
"""
parser_star = subparsers.add_parser("star", help="star a Gist")
parser_star.add_argument("gist_id", help=GIST_ID_MSG)
parser_star.add_argument("-u", "--user", help=USER_MSG)
parser_star.set_defaults(handle_args=handle_star, func=star,
formatter=format_star)
def __add_unstar_parser(subparsers):
""" Define the subparser to handle 'unstar' functionallity.
:param subparsers: the subparser entity
"""
parser_unstar = subparsers.add_parser("unstar", help="unstar a Gist")
parser_unstar.add_argument("gist_id", help=GIST_ID_MSG)
parser_unstar.add_argument("-u", "--user", help=USER_MSG)
parser_unstar.set_defaults(handle_args=handle_star, func=unstar,
formatter=format_star)
|
jdevesa/gists
|
gists/gists.py
|
Python
|
mit
| 12,061
|
# -*- coding: utf-8 -*-
import unittest
from linked_list import (delete_node, list_cycle, remove_elements,
reverse_list)
from public import ListNode
class TestLinkedList(unittest.TestCase):
def test_delete_node(self):
so = delete_node.Solution()
head = ListNode(1)
head.next = ListNode(2)
head.next.next = ListNode(3)
head.next.next.next = ListNode(4)
so.deleteNode(head.next)
self.assertEqual(head.next.val, 3)
def test_has_cycle(self):
so = list_cycle.Solution()
self.assertFalse(so.hasCycle(None))
head = ListNode(1)
self.assertFalse(so.hasCycle(head))
head.next = head
self.assertTrue(so.hasCycle(head))
head.next = ListNode(2)
head.next.next = ListNode(3)
self.assertFalse(so.hasCycle(head))
head.next.next.next = head
self.assertTrue(so.hasCycle(head))
def test_detect_cycle(self):
so = list_cycle.Solution()
head = ListNode(1)
self.assertFalse(so.detectCycle(head))
self.assertFalse(so.detectCycle(None))
head.next = ListNode(2)
self.assertFalse(so.detectCycle(head))
cross = ListNode(3)
head.next.next = cross
head.next.next.next = ListNode(4)
head.next.next.next.next = ListNode(5)
head.next.next.next.next.next = cross
self.assertEqual(so.detectCycle(head), cross)
def test_remove_elements(self):
so = remove_elements.Solution()
self.assertFalse(so.removeElements(None, 0))
head = ListNode(1)
head.next = ListNode(2)
head.next.next = ListNode(2)
head.next.next.next = ListNode(3)
head.next.next.next.next = ListNode(4)
head = so.removeElements(head, 1)
self.assertEqual(head.val, 2)
head = so.removeElements(head, 2)
self.assertEqual(head.val, 3)
head = so.removeElements(head, 4)
self.assertFalse(head.next)
def test_reverse_linked_list(self):
so = reverse_list.Solution()
self.assertFalse(so.reverseList_iteratively(None))
head = ListNode(1)
head.next = ListNode(2)
head.next.next = ListNode(3)
self.assertEqual(so.reverseList_iteratively(head).val, 3)
self.assertFalse(so.reverseList_recursively(None))
head = ListNode(1)
head.next = ListNode(2)
head.next.next = ListNode(3)
self.assertEqual(so.reverseList_recursively(head).val, 3)
|
lycheng/leetcode
|
tests/test_linked_list.py
|
Python
|
mit
| 2,549
|
import numpy as np
import uncertainties.unumpy as unp
from uncertainties.unumpy import (nominal_values as noms, std_devs as stds)
import matplotlib.pyplot as plt
import matplotlib as mpl
from scipy.optimize import curve_fit
plt.rcParams['figure.figsize'] = (12, 8)
plt.rcParams['font.size'] = 13
plt.rcParams['lines.linewidth'] = 1
csfont = {'fontname': 'Times New Roman'}
# U_mess = b_n
n, b_n = np.genfromtxt('rechteck.txt', unpack=True, skip_header=2)
x = np.log(n)
y = np.log(b_n)
def f(x, a, b):
return a * x + b
params, covariance = curve_fit(f, x, y)
errors = np.sqrt(np.diag(covariance))
print('a =', params[0], '+-', errors[0])
print('b =', params[1], '+-', errors[1])
# a =-1.19204784746 +- 0.0898910034039
# b = 0.326461420388 +- 0.123423011824
# mit ungeraden Oberwellen:
# a = -0.909247906044 +- 0.0770070259187
# b = 0.409244475522 +- 0.144772240047
x_plot = np.linspace(min(x), max(x))
plt.plot(x_plot, f(x_plot, *params), 'b-', label='linearer Fit')
plt.plot(x, y, 'rx', label='Messwerte')
plt.ylabel(r'$\mathrm{log(b_n)}$')
plt.xlabel(r'$\mathrm{log(n)}$')
# plt.title('Messungen')
plt.grid()
plt.legend()
plt.tight_layout()
plt.savefig('bilder/rechteck.pdf')
plt.show()
|
pascalgutjahr/Praktikum-1
|
Fourier/rechteck.py
|
Python
|
mit
| 1,202
|
# -*- coding: utf-8 -*-
"""
This is part of WebScout software
Docs EN: http://hack4sec.pro/wiki/index.php/WebScout_en
Docs RU: http://hack4sec.pro/wiki/index.php/WebScout
License: MIT
Copyright (c) Anton Kuzmin <http://anton-kuzmin.ru> (ru) <http://anton-kuzmin.pro> (en)
Job class for FormBruter module
"""
from classes.jobs.GeneratorJob import GeneratorJob
class FormBruterJob(GeneratorJob):
""" Job class for FormBruter module """
collection_name = 'form_bruter'
|
hack4sec/ws-cli
|
classes/jobs/FormBruterJob.py
|
Python
|
mit
| 479
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class VirtualNetworkGatewayConnectionListEntity(Resource):
"""A common class for general resource information.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param authorization_key: The authorizationKey.
:type authorization_key: str
:param virtual_network_gateway1: Required. The reference to virtual
network gateway resource.
:type virtual_network_gateway1:
~azure.mgmt.network.v2017_11_01.models.VirtualNetworkConnectionGatewayReference
:param virtual_network_gateway2: The reference to virtual network gateway
resource.
:type virtual_network_gateway2:
~azure.mgmt.network.v2017_11_01.models.VirtualNetworkConnectionGatewayReference
:param local_network_gateway2: The reference to local network gateway
resource.
:type local_network_gateway2:
~azure.mgmt.network.v2017_11_01.models.VirtualNetworkConnectionGatewayReference
:param connection_type: Required. Gateway connection type. Possible values
are: 'Ipsec','Vnet2Vnet','ExpressRoute', and 'VPNClient. Possible values
include: 'IPsec', 'Vnet2Vnet', 'ExpressRoute', 'VPNClient'
:type connection_type: str or
~azure.mgmt.network.v2017_11_01.models.VirtualNetworkGatewayConnectionType
:param routing_weight: The routing weight.
:type routing_weight: int
:param shared_key: The IPSec shared key.
:type shared_key: str
:ivar connection_status: Virtual network Gateway connection status.
Possible values are 'Unknown', 'Connecting', 'Connected' and
'NotConnected'. Possible values include: 'Unknown', 'Connecting',
'Connected', 'NotConnected'
:vartype connection_status: str or
~azure.mgmt.network.v2017_11_01.models.VirtualNetworkGatewayConnectionStatus
:ivar tunnel_connection_status: Collection of all tunnels' connection
health status.
:vartype tunnel_connection_status:
list[~azure.mgmt.network.v2017_11_01.models.TunnelConnectionHealth]
:ivar egress_bytes_transferred: The egress bytes transferred in this
connection.
:vartype egress_bytes_transferred: long
:ivar ingress_bytes_transferred: The ingress bytes transferred in this
connection.
:vartype ingress_bytes_transferred: long
:param peer: The reference to peerings resource.
:type peer: ~azure.mgmt.network.v2017_11_01.models.SubResource
:param enable_bgp: EnableBgp flag
:type enable_bgp: bool
:param use_policy_based_traffic_selectors: Enable policy-based traffic
selectors.
:type use_policy_based_traffic_selectors: bool
:param ipsec_policies: The IPSec Policies to be considered by this
connection.
:type ipsec_policies:
list[~azure.mgmt.network.v2017_11_01.models.IpsecPolicy]
:param resource_guid: The resource GUID property of the
VirtualNetworkGatewayConnection resource.
:type resource_guid: str
:ivar provisioning_state: The provisioning state of the
VirtualNetworkGatewayConnection resource. Possible values are: 'Updating',
'Deleting', and 'Failed'.
:vartype provisioning_state: str
:param etag: Gets a unique read-only string that changes whenever the
resource is updated.
:type etag: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'virtual_network_gateway1': {'required': True},
'connection_type': {'required': True},
'connection_status': {'readonly': True},
'tunnel_connection_status': {'readonly': True},
'egress_bytes_transferred': {'readonly': True},
'ingress_bytes_transferred': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'authorization_key': {'key': 'properties.authorizationKey', 'type': 'str'},
'virtual_network_gateway1': {'key': 'properties.virtualNetworkGateway1', 'type': 'VirtualNetworkConnectionGatewayReference'},
'virtual_network_gateway2': {'key': 'properties.virtualNetworkGateway2', 'type': 'VirtualNetworkConnectionGatewayReference'},
'local_network_gateway2': {'key': 'properties.localNetworkGateway2', 'type': 'VirtualNetworkConnectionGatewayReference'},
'connection_type': {'key': 'properties.connectionType', 'type': 'str'},
'routing_weight': {'key': 'properties.routingWeight', 'type': 'int'},
'shared_key': {'key': 'properties.sharedKey', 'type': 'str'},
'connection_status': {'key': 'properties.connectionStatus', 'type': 'str'},
'tunnel_connection_status': {'key': 'properties.tunnelConnectionStatus', 'type': '[TunnelConnectionHealth]'},
'egress_bytes_transferred': {'key': 'properties.egressBytesTransferred', 'type': 'long'},
'ingress_bytes_transferred': {'key': 'properties.ingressBytesTransferred', 'type': 'long'},
'peer': {'key': 'properties.peer', 'type': 'SubResource'},
'enable_bgp': {'key': 'properties.enableBgp', 'type': 'bool'},
'use_policy_based_traffic_selectors': {'key': 'properties.usePolicyBasedTrafficSelectors', 'type': 'bool'},
'ipsec_policies': {'key': 'properties.ipsecPolicies', 'type': '[IpsecPolicy]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, *, virtual_network_gateway1, connection_type, id: str=None, location: str=None, tags=None, authorization_key: str=None, virtual_network_gateway2=None, local_network_gateway2=None, routing_weight: int=None, shared_key: str=None, peer=None, enable_bgp: bool=None, use_policy_based_traffic_selectors: bool=None, ipsec_policies=None, resource_guid: str=None, etag: str=None, **kwargs) -> None:
super(VirtualNetworkGatewayConnectionListEntity, self).__init__(id=id, location=location, tags=tags, **kwargs)
self.authorization_key = authorization_key
self.virtual_network_gateway1 = virtual_network_gateway1
self.virtual_network_gateway2 = virtual_network_gateway2
self.local_network_gateway2 = local_network_gateway2
self.connection_type = connection_type
self.routing_weight = routing_weight
self.shared_key = shared_key
self.connection_status = None
self.tunnel_connection_status = None
self.egress_bytes_transferred = None
self.ingress_bytes_transferred = None
self.peer = peer
self.enable_bgp = enable_bgp
self.use_policy_based_traffic_selectors = use_policy_based_traffic_selectors
self.ipsec_policies = ipsec_policies
self.resource_guid = resource_guid
self.provisioning_state = None
self.etag = etag
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/v2017_11_01/models/virtual_network_gateway_connection_list_entity_py3.py
|
Python
|
mit
| 7,889
|