code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('management', '0016_demoresourcedata_validation_result'),
]
operations = [
migrations.AlterField(
model_name='demoresourcedata',
name='ip_address',
field=models.GenericIPAddressField(),
),
]
|
gregoil/rotest
|
src/rotest/management/migrations/0017_auto_20181202_0752.py
|
Python
|
mit
| 434
|
import os
import unittest
from pyfluka.utils import ShellUtils
class TestShellUtils(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
os.removedirs("testdir")
def testMkDir(self):
self.assertFalse(os.path.exists("testdir"))
ShellUtils.mkdir("testdir")
self.assertTrue(os.path.exists("testdir"))
|
morgenst/pyfluka
|
tests/TestShellUtils.py
|
Python
|
mit
| 364
|
"""
Ethiopian Movie Database.
"""
__author__ = "EtMDB Developers (developers@etmdb.com)"
__date__ = "Date: 25/05/2017"
__version__ = "Version: 1.0"
__Copyright__ = "Copyright: @etmdb"
""" etmdbots URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^bot/', include('messengerbot.urls')),
]
|
etmdb/facebookbot
|
etmdbots/urls.py
|
Python
|
mit
| 1,010
|
'''
sentlex.py - Lexicon Management Classes
This module implements class structure for sentment lexicons used elsewhere in SentimentAnalysis.
Sentiment lexicons encapsulate information from publicly available Lexicons from research literature in a common Pythonic interface.
What a Lexicon object does:
- for a given term, return positive/negative/neutral sentiment info, based on underlying lexicon.
- terms are divided into parts of speech (Adj/Verb/Noun/Adverb).
- some capability for comparing lexicons.
TODO:
- niceties - inform user of NLTK and corpus requirements
'''
from __future__ import absolute_import
from __future__ import print_function
import os
import nltk
from . import sentlexutil
#
# Lexicon super-class
#
class Lexicon(object):
'''
Lexicon class is a generic class holding in memory databases for a sentiment lexicon.
A lexicon is defined by 4 dictionaries mapping terms to sentiment data in the form of tuples:
A['word'] = [(id, 1, 0), (id, 0.5, 0) ... ]
Where tuple values are (sense_id, positive, negative), extracted from a knowledge source for that particular word/POS
'''
def __init__(self):
# Initialize class vars
self.A = {}
self.V = {}
self.R = {}
self.N = {}
self.LexName = 'Superclass'
self.LexFreq = None
self._is_loaded = False
self._is_compiled = False
# Baseline words used to QA a lexicon
self.baselinewords = ['good', 'bad', 'pretty', 'awful', 'excellent',
'misfortune', 'incompetent', 'tough',
'inadequate', 'terrible', 'blue', 'closed']
@property
def is_loaded(self):
return self._is_loaded
@property
def is_compiled(self):
return self._is_compiled
def _termdistro(self, A):
'''
Given a dictionary of terms associated with a part-of-speech A,
Calculates valence distribution inside the lexicon.
'''
postotal = 0
negtotal = 0
neutral = 0
for itemlist in A:
for item in A[itemlist]:
if (item[1] > item[2]):
postotal = postotal + 1
elif (item[2] > item[1]):
negtotal = negtotal + 1
else:
neutral = neutral + 1
return (postotal, negtotal, neutral)
def print_info(self):
'''
Print information about this lexicon. Does not work for dynamic/composite lexicons.
'''
print("Lexicon Sizes:")
print(" - Adjective (A): " + str(len(self.A)))
print(" - Verb (V) : " + str(len(self.V)))
print(" - Adverb (R) : " + str(len(self.R)))
print(" - Noun (N) : " + str(len(self.N)))
print(" ")
print("Score Distributions:")
(postotal, negtotal, neutral) = self._termdistro(self.A)
print(" - Adjectives (A): POS=" + str(postotal) + " , NEG=" +
str(negtotal) + " , NEUTRAL=" + str(neutral))
(postotal, negtotal, neutral) = self._termdistro(self.V)
print(" - Verbs (V): POS=" + str(postotal) + " , NEG=" +
str(negtotal) + " , NEUTRAL=" + str(neutral))
(postotal, negtotal, neutral) = self._termdistro(self.R)
print(" - Adverbs (R): POS=" + str(postotal) + " , NEG=" +
str(negtotal) + " , NEUTRAL=" + str(neutral))
(postotal, negtotal, neutral) = self._termdistro(self.N)
print(" - Noun (N): POS=" + str(postotal) + " , NEG=" +
str(negtotal) + " , NEUTRAL=" + str(neutral))
def get_info(self):
'''
Like printinfo, but returns json for machine processing
'''
infod = {}
infod['a'], infod['v'], infod['r'], infod['n'] = {}, {}, {}, {}
# sizes
infod['a']['size'] = len(self.A)
infod['v']['size'] = len(self.V)
infod['r']['size'] = len(self.R)
infod['n']['size'] = len(self.N)
# distros
infod['a']['pos'], infod['a']['neg'], infod['a']['neutral'] = self._termdistro(self.A)
infod['v']['pos'], infod['v']['neg'], infod['v']['neutral'] = self._termdistro(self.V)
infod['r']['pos'], infod['r']['neg'], infod['r']['neutral'] = self._termdistro(self.R)
infod['n']['pos'], infod['n']['neg'], infod['n']['neutral'] = self._termdistro(self.N)
return infod
def hasnoun(self, term):
'''
Returns True/False to query whether term is present in dict
'''
return term in self.N
def hasverb(self, term):
return term in self.V
def hasadverb(self, term):
return term in self.R
def hasadjective(self, term):
return term in self.A
def get_name(self):
'''
Returns name for this lexicon
'''
return self.LexName
def set_name(self, newname):
'''
Sets lexicon name other than default
'''
self.LexName = newname
def compare(self, L, pos):
'''
Compares current lexicon with "L" on "pos" part of speech ('a','v','n','r')
'''
def getsign(x):
if x > 0.0:
return 1
elif x == 0.0:
return 0
else:
return -1
if pos == 'a':
lComp = self.A
elif pos == 'v':
lComp = self.V
elif pos == 'n':
lComp = self.N
elif pos == 'r':
lComp = self.R
# Intersection
I = []
for term in lComp:
if (pos == 'a' and L.hasadjective(term))\
or (pos == 'v' and L.hasverb(term))\
or (pos == 'r' and L.hasadverb(term))\
or (pos == 'n' and L.hasnoun(term)):
I.append(term)
print(" POS = " + pos + ". Intersection of " + self.LexName +
" and " + L.get_name() + " -> " + str(len(I)))
# % Agreement between lexicons - we consider terms in agreement if overall valence (positive-negative) is of same sign.
agree = 0.0
for term in I:
if (pos == 'a' and (getsign(L.getadjective(term)[0] - L.getadjective(term)[1]) == getsign(self.getadjective(term)[0] - self.getadjective(term)[1])))\
or (pos == 'v' and (getsign(L.getverb(term)[0] - L.getverb(term)[1]) == getsign(self.getverb(term)[0] - self.getverb(term)[1])))\
or (pos == 'r' and (getsign(L.getadverb(term)[0] - L.getadverb(term)[1]) == getsign(self.getadverb(term)[0] - self.getadverb(term)[1])))\
or (pos == 'n' and (getsign(L.getnoun(term)[0] - L.getnoun(term)[1]) == getsign(self.getnoun(term)[0] - self.getnoun(term)[1]))):
agree = agree + 1
if I:
print(" POS = " + pos + ". % Agreement on (" + self.LexName +
") Intersec. (" + L.get_name() + ") -> " + str(agree / (len(I) + 0.0)))
return {'intersect': len(I), 'agree': agree / (len(I) + 0.0)}
def getbestvalues(self, key, A):
'''
Returns score from dictionary as a proportion of how many pos/neg scores are found.
This is necessary when terms are associated with more than 1 sense, and no disambiguation is possible.
- Returns tuple (pos,neg)
- Tuple is (0,0) if key not found
'''
if key not in A:
return (0, 0)
posval = 0.0
negval = 0.0
items = 0
foundpos = 0.0
foundneg = 0.0
# loop through all orientation synsets for this key
for T in A[key]:
posval += T[1]
negval += T[2]
items += 1
if T[1] > T[2]:
foundpos += 1.0
if T[2] > T[1]:
foundneg += 1.0
if ((foundpos == 0) and (foundneg == 0)):
return (0, 0)
else:
return ((foundpos / (foundpos + foundneg)) * (posval / max(items, 1)), (foundneg / (foundpos + foundneg)) * (negval / max(items, 1)))
def compile_frequency(self):
'''
Pre load frequency table data for using with get_freq() calls.
Frequency data comes from the SUBTLEXus study, available from:
http://expsy.ugent.be/subtlexus/
'''
# load frequency list
curpath = os.path.dirname(os.path.abspath(__file__))
datapath = os.path.join(curpath, 'data/SUBTLEXus.txt')
# build frequency table, ignoring upercase/lowercase differences
f = open(datapath, 'r')
self.LexFreq = {}
# first we read raw counts, then estimate prob. of ocurrences
for line in f.readlines():
rec = line.split('\t')
word = rec[0]
wordfreq = rec[1]
if word and wordfreq.isdigit():
if word in self.LexFreq:
self.LexFreq[word] += float(wordfreq)
else:
self.LexFreq[word] = float(wordfreq)
f.close()
# estimate probabilities
CORPUS_SIZE = 51000000.0
for w in self.LexFreq:
self.LexFreq[w] = self.LexFreq[w] / CORPUS_SIZE
self._is_compiled = True
def get_freq(self, term):
'''
Retrieves word frequency based on SUBTLEXus corpus data.
Word frequency is given by count(w)/corpus size
'''
assert self.LexFreq and self.is_compiled, "Please initialize frequency distributions with compile_frequency()"
if term in self.LexFreq:
return self.LexFreq[term]
else:
return 0.0
def printstdterms(self):
'''
Print scores for a list of standard terms for QA, we use only adjectives for now.
'''
print(self.get_name())
for adj in self.baselinewords:
print(" %s (%.2f,%.2f) " % (adj, self.getadjective(
adj)[0], self.getadjective(adj)[1]), end=' ')
print('\n')
##
#
# Resource Lexicon
#
##
class ResourceLexicon(Lexicon):
'''
Sentiment lexicon based on an existing data source (resource)
This lexicon obtains sentiment information via the load() method - a loader function that understands the underlying data format.
Word sentiment information uses getbestvalues() - an average of tuples (pos, neg) for all known senses of the word (if more than one exists).
The loader functions take the form:
f(pos, datafile)
And return a dict mapping words to tuples of (pos,neg) pairs:
D[word] = [ (word, p,n), (word, p,n) ... ]
Representing all known (p,n) values for word in the given part of speech.
Note that it is common for a word to map to more than a single sense, thus multiple data points are allowed.
Sample loader functions for various knowledge resources can be found in the sentlexutil module.
'''
def __init__(self, name=None, loader=None):
super(ResourceLexicon, self).__init__()
if name:
self.LexName = name
if loader:
self.f_loader = loader
def load(self, datafile):
'''
Loads lexicon from file into dictionaries
'''
assert self.f_loader, 'This lexicon does not have an associated loader function.'
self.A = self.f_loader('a', datafile)
self.V = self.f_loader('v', datafile)
self.R = self.f_loader('r', datafile)
self.N = self.f_loader('n', datafile)
self.compile_frequency()
self._is_loaded = True
return True
def getadjective(self, term):
'''
Returns tuple (pos,neg) for sentiment scores for adjective. (0,0) if not found.
'''
return self.getbestvalues(term, self.A)
def getadverb(self, term):
'''
Returns tuple (pos,neg) for sentiment scores for adverb. (0,0) if not found.
'''
return self.getbestvalues(term, self.R)
def getverb(self, term):
'''
Returns tuple (pos,neg) for sentiment scores for verb. (0,0) if not found.
Verb must be in canonical form.
'''
return self.getbestvalues(term, self.V)
def getnoun(self, term):
'''
Returns tuple (pos,neg) for sentiment scores for noun. (0,0) if not found.
'''
return self.getbestvalues(term, self.N)
##
#
# Composite Lexicon
#
##
class CompositeLexicon(Lexicon):
'''
CompositeLexicon - a subclass of Lexicon, extracts sentiment from a series of lexicons organised hierarchically.
Lexicons are added via addlexicon() call. To retrieve sentiment of a term, each added lexicon will be consulted
per order of inclusion:
.addLexicon(L1)
.addLexicon(L2)
.addLexicon(L3)
means lexicon will be searched in order:
L1 -> L2 -> L3
this ensures if iformation about a word exists in L1, it will be used first.
Lexicons should be added from most accurate to least accurate.
'''
def __init__(self):
super(CompositeLexicon, self).__init__()
self.LexName = 'Composite'
self.LLIST = []
self.factor = 1.0
self.pos_bias = 1.0
self.neg_bias = 1.0
def add_lexicon(self, L):
self.LLIST.append(L)
self.LexName += ' ' + L.get_name()
def set_factor(self, newval):
'''
updates confidence factor used when looking for values over the lexicon list
'''
self.factor = newval
def set_bias(self, pos, neg):
'''
Numeric values determining how to adjust positive/negative sentiment.
These values apply to 2nd lexicon onwards
'''
self.pos_bias = pos
self.neg_bias = neg
def compile_frequency(self):
for L in self.LLIST:
L.compile_frequency()
if self.LLIST:
# frequencies are independent of lexicon content so we use the first one
self.LexFreq = self.LLIST[0].LexFreq
@property
def is_compiled(self):
return (all([L.is_compiled for L in self.LLIST]) and self.LexFreq)
@property
def is_loaded(self):
return all([L.is_loaded for L in self.LLIST])
def _scan_lexlist_val(self, lexlist, term, f_checker, f_getter, notfound_val):
'''
Generic scanner, iterates lexicon list for term, using "checker" and "getter"
functions to check presence and return specific value as needed.
notfound_cal is returned if no lexicons contain term (as per checker func)
At each iteration, confidence is decremeted by self.factor
'''
confidence_val = 1.0
for L in lexlist:
if getattr(L, f_checker)(term):
termval = getattr(L, f_getter)(term)
# return found values for this term, times the lexicon confidence
return (termval[0] * confidence_val * self.pos_bias, termval[1] * confidence_val * self.neg_bias)
confidence_val *= self.factor
return notfound_val
def _scan_lexlist_presence(self, lexlist, term, f_checker):
'''
Generic scanner, iterates lexicon list for term presence
'''
for L in lexlist:
if getattr(L, f_checker)(term):
return True
return False
def getnoun(self, term):
return self._scan_lexlist_val(self.LLIST, term, "hasnoun", "getnoun", (0, 0))
def getverb(self, term):
return self._scan_lexlist_val(self.LLIST, term, "hasverb", "getverb", (0, 0))
def getadverb(self, term):
return self._scan_lexlist_val(self.LLIST, term, "hasadverb", "getadverb", (0, 0))
def getadjective(self, term):
return self._scan_lexlist_val(self.LLIST, term, "hasadjective", "getadjective", (0, 0))
def hasnoun(self, term):
return self._scan_lexlist_presence(self.LLIST, term, "hasnoun")
def hasverb(self, term):
return self._scan_lexlist_presence(self.LLIST, term, "hasverb")
def hasadverb(self, term):
return self._scan_lexlist_presence(self.LLIST, term, "hasadverb")
def hasadjective(self, term):
return self._scan_lexlist_presence(self.LLIST, term, "hasadjective")
##
# Sample Lexicons
#
# - Use the classes below to create lexicons from specific knowledge resources shipped with the package.
#
##
class MobyLexicon(ResourceLexicon):
'''
This lexicon uses the Moby thesaurus to build a sentiment lexicon by expansion of
seeded words.
More on the Moby project: http://en.wikipedia.org/wiki/Moby_Project
Lexicon is first used as part of research in:
Ohana, Bruno, Brendan Tierney, and S. Delany. "Domain Independent Sentiment Classification with Many Lexicons."
Advanced Information Networking and Applications (WAINA),
2011 IEEE Workshops of International Conference on. IEEE, 2011.
'''
def __init__(self):
curpath = os.path.dirname(os.path.abspath(__file__))
datapath = os.path.join(curpath, 'data/GB1_S.lex')
super(MobyLexicon, self).__init__('Moby-GB', sentlexutil.readMoby)
self.load(datapath)
class SWN3Lexicon(ResourceLexicon):
'''
Implements SWN3.0 lexicon as a class. SentiWordNet is available from:
http://sentiwordnet.isti.cnr.it
And is distributed with a CC-SA license:
http://creativecommons.org/licenses/by-sa/3.0/
See further details on file SentiWordNet_3.0.0.lex in the data directory.
'''
def __init__(self):
curpath = os.path.dirname(os.path.abspath(__file__))
datapath = os.path.join(curpath, 'data/SentiWordNet_3.0.0.lex')
super(SWN3Lexicon, self).__init__('SWN3', sentlexutil.readSWN3)
self.load(datapath)
class UICLexicon(ResourceLexicon):
'''
The UIC lexicon is based upon positive and negative word lists from Univ. of Chicago Illinois.
The source word list can be downloaded from:
http://www.cs.uic.edu/~liub/FBS/sentiment-analysis.html
And referenced on:
Minqing Hu and Bing Liu. "Mining and Summarizing Customer Reviews."
Proceedings of the ACM SIGKDD International Conference on Knowledge
Discovery and Data Mining (KDD-2004), Aug 22-25, 2004, Seattle,
Washington, USA,
'''
def __init__(self):
curpath = os.path.dirname(os.path.abspath(__file__))
datapath = os.path.join(curpath, 'data/uic.lex')
super(UICLexicon, self).__init__('UIC', sentlexutil.readUIC)
self.load(datapath)
|
bohana/sentlex
|
sentlex/sentlex.py
|
Python
|
mit
| 18,601
|
import pathlib
import subprocess
import sys
import unittest
import numpy
import pytest
from cupy.cuda import nccl
from cupy import testing
from cupyx.distributed import init_process_group
nccl_available = nccl.available
def _run_test(test_name, dtype=None):
# subprocess is required not to interfere with cupy module imported in top
# of this file
runner_path = pathlib.Path(__file__).parent / 'comm_runner.py'
args = [sys.executable, runner_path, test_name]
if dtype is not None:
args.append(numpy.dtype(dtype).char)
proc = subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdoutdata, stderrdata = proc.communicate()
assert stderrdata.decode() == ''
assert proc.returncode == 0
@pytest.mark.skipif(not nccl_available, reason='nccl is not installed')
@testing.multi_gpu(2)
class TestNCCLBackend:
@testing.for_all_dtypes(no_bool=True)
def test_broadcast(self, dtype):
_run_test('broadcast', dtype)
@testing.for_all_dtypes(no_bool=True)
def test_reduce(self, dtype):
_run_test('reduce', dtype)
@testing.for_all_dtypes(no_bool=True)
def test_all_reduce(self, dtype):
_run_test('all_reduce', dtype)
@testing.for_all_dtypes(no_bool=True)
def test_reduce_scatter(self, dtype):
_run_test('reduce_scatter', dtype)
@testing.for_all_dtypes(no_bool=True)
def test_all_gather(self, dtype):
_run_test('all_gather', dtype)
@testing.for_all_dtypes(no_bool=True)
def test_send_and_recv(self, dtype):
_run_test('send_and_recv', dtype)
@testing.for_all_dtypes(no_bool=True)
def test_send_recv(self, dtype):
_run_test('send_recv', dtype)
@testing.for_all_dtypes(no_bool=True)
def test_scatter(self, dtype):
_run_test('scatter', dtype)
@testing.for_all_dtypes(no_bool=True)
def test_gather(self, dtype):
_run_test('gather', dtype)
@testing.for_all_dtypes(no_bool=True)
def test_all_to_all(self, dtype):
_run_test('all_to_all', dtype)
def test_barrier(self):
_run_test('barrier')
@pytest.mark.skipif(not nccl_available, reason='nccl is not installed')
class TestInitDistributed(unittest.TestCase):
@testing.multi_gpu(2)
def test_init(self):
_run_test('init')
def test_invalid_backend(self):
with pytest.raises(ValueError):
init_process_group(1, 0, backend='mpi')
def test_invalid_n_devices(self):
with pytest.raises(ValueError):
init_process_group(0, 0)
with pytest.raises(ValueError):
init_process_group(-1, 0)
def test_invalid_rank(self):
with pytest.raises(ValueError):
init_process_group(2, -1)
with pytest.raises(ValueError):
init_process_group(2, 3)
|
cupy/cupy
|
tests/cupyx_tests/distributed_tests/test_comm.py
|
Python
|
mit
| 2,852
|
#!/usr/bin/env python
"""Convert text and (start, end type) annotations into HTML."""
__author__ = 'Sampo Pyysalo'
__license__ = 'MIT'
import sys
import json
import re
import unicodedata
from collections import namedtuple
from collections import defaultdict
from itertools import chain
# the tag to use to mark annotated spans
TAG='span'
# vertical space between span boxes at different heights in pixels
# (including border)
VSPACE = 2
# text line height w/o annotations
BASE_LINE_HEIGHT = 24
# "effectively zero" height for formatting tags
EPSILON = 0.0001
class Span(object):
"""Represents a marked span of text.
Spans can represent either annotations or formatting. The former
are rendered as text highligts, the latter as HTML formatting tags
such as <i> and <p>.
"""
def __init__(self, start, end, type_, formatting=None):
"""Initialize annotation or formatting span.
If formatting is None, determine whether or not this is a
formatting tag heuristically based on type_.
"""
self.start = start
self.end = end
self.type = type_
if formatting is not None:
self.formatting = formatting
else:
self.formatting = is_formatting_type(self.type)
self.nested = set()
self._height = None
self.start_marker = None
# generate link (<a> tag) with given href if not None
self.href = None
def tag(self):
"""Return HTML tag to use to render this marker."""
# Formatting tags render into HTML tags according to a custom
# mapping. Other tags with href render into links (<a>) unless
# they nest other spans (nested links are illegal, see
# http://www.w3.org/TR/html401/struct/links.html#h-12.2.2),
# and the remaining into the generic TAG.
if not self.formatting:
if self.href is not None and self.height() == 0:
return 'a'
else:
return TAG
else:
return type_to_formatting_tag(self.type)
def markup_type(self):
"""Return a coarse variant of the type that can be used as a label in
HTML markup (tag, CSS class name, etc)."""
return html_safe_string(coarse_type(self.type))
def sort_height(self):
"""Relative height of this tag for sorting purposes."""
# For the purposes of sorting, count the height of formatting
# tags similarly to other tags, adding a very small value,
# EPSILON, to give correct sort order.
if not self.formatting:
return self.height()
else:
# This +1 compensates for the lack of own height in height()
return self.height() + 1 + EPSILON
def height(self):
"""Relative height of this tag (except for sorting)."""
# Formatting tags have effectively zero height, i.e. they
# should not affect the height of tags that wrap them. TODO:
# this still leaves a height+1 effect when a formatting tag is
# the only one nested by a regular one; fix this.
ownh = 1 if not self.formatting else 0
if self._height is None:
if not self.nested:
self._height = 0
else:
self._height = max([n.height() for n in self.nested]) + ownh
return self._height
# Span type to HTML tag mapping for formatting spans. Note that these
# are URIs we came up with and not likely to be adopted by many tools
# (see e.g. https://github.com/spyysalo/knowtator2oa/issues/1).
FORMATTING_TYPE_TAG_MAP = {
'http://www.w3.org/TR/html/#b': 'b',
'http://www.w3.org/TR/html/#i': 'i',
'http://www.w3.org/TR/html/#u': 'u',
'http://www.w3.org/TR/html/#sup': 'sup',
'http://www.w3.org/TR/html/#sub': 'sub',
'http://purl.obolibrary.org/obo/IAO_0000314': 'section',
# forms used in initial CRAFT RDFization
'http://craft.ucdenver.edu/iao/bold': 'b',
'http://craft.ucdenver.edu/iao/italic': 'i',
'http://craft.ucdenver.edu/iao/underline': 'u',
'http://craft.ucdenver.edu/iao/sub': 'sub',
'http://craft.ucdenver.edu/iao/sup': 'sup',
}
def is_formatting_type(type_):
"""Return True if the given type can be assumed to identify a
formatting tag such as bold or italic, False otherwise."""
return type_ in FORMATTING_TYPE_TAG_MAP
def type_to_formatting_tag(type_):
"""Return the HTML tag corresponding to the given formatting type."""
tag = FORMATTING_TYPE_TAG_MAP.get(type_, type_)
return html_safe_string(tag) # just in case
class Marker(object):
def __init__(self, span, offset, is_end, cont_left=False,
cont_right=False):
self.span = span
self.offset = offset
self.is_end = is_end
self.cont_left = cont_left
self.cont_right = cont_right
self.covered_left = False
self.covered_right = False
# at identical offsets, ending markers sort highest-last,
# starting markers highest-first.
self.sort_idx = self.span.sort_height() * (1 if self.is_end else -1)
# store current start marker in span to allow ending markers
# to affect tag style
if not is_end:
self.span.start_marker = self
# attributes in generated HTML
self._attributes = defaultdict(list)
def add_attribute(self, name, value):
self._attributes[name].append(value)
def get_attributes(self):
return sorted([(k, ' '.join(v)) for k, v in self._attributes.items()])
def attribute_string(self):
return ' '.join('%s="%s"' % (k, v) for k, v in self.get_attributes())
def fill_style_attributes(self):
self.add_attribute('class', 'ann')
self.add_attribute('class', 'ann-h%d' % self.span.height())
self.add_attribute('class', 'ann-t%s' % self.span.markup_type())
# TODO: this will produce redundant class combinations in
# cases (e.g. "continueleft openleft")
if self.cont_left:
self.add_attribute('class', 'ann-contleft')
if self.cont_right:
self.add_attribute('class', 'ann-conright')
if self.covered_left:
self.add_attribute('class', 'ann-openleft')
if self.covered_right:
self.add_attribute('class', 'ann-openright')
def __unicode__(self):
if self.is_end:
return u'</%s>' % self.span.tag()
elif self.span.formatting:
# Formatting tags take no style
return u'<%s>' % self.span.tag()
else:
self.fill_style_attributes()
attributes = self.attribute_string()
return u'<%s %s>' % (self.span.tag(), attributes)
def marker_sort(a, b):
return cmp(a.offset, b.offset) or cmp(a.sort_idx, b.sort_idx)
def leftmost_sort(a, b):
c = cmp(a.start, b.start)
return c if c else cmp(b.end-b.start, a.end-a.start)
def longest_sort(a, b):
c = cmp(b.end-b.start, a.end-a.start)
return c if c else cmp(a.start, b.start)
def resolve_heights(spans):
# algorithm for determining visualized span height:
# 1) define strict total order of spans (i.e. for each pair of
# spans a, b, either a < b or b < a, with standard properties for
# "<")
# 2) traverse spans leftmost-first, keeping list of open spans,
# and for each span, sort open spans in defined order and add
# later spans to "nested" collections of each earlier span (NOTE:
# this step is simple, but highly sub-optimal)
# 3) resolve height as 0 for spans with no nested spans and
# max(height(n)+1) for n in nested for others.
open_span = []
for s in sorted(spans, leftmost_sort):
open_span = [o for o in open_span if o.end > s.start]
open_span.append(s)
# TODO: use a sorted container instead.
open_span.sort(longest_sort)
# WARNING: O(n^3) worst case!
# TODO: I think that only spans just before and just after the
# inserted span can have meaningful changes in their "nested"
# collections. Ignore others.
for i in range(len(open_span)):
for j in range(i+1, len(open_span)):
open_span[i].nested.add(open_span[j])
return max(s.height() for s in spans) if spans else -1
LEGEND_CSS=""".legend {
float:right;
margin: 20px;
border: 1px solid gray;
font-size: 90%;
background-color: #eee;
padding: 10px;
border-radius: 6px;
-moz-border-radius: 6px;
-webkit-border-radius: 6px;
box-shadow: 0 5px 10px rgba(0, 0, 0, 0.2);
-moz-box-shadow: 0 5px 10px rgba(0, 0, 0, 0.2);
-webkit-box-shadow: 0 5px 10px rgba(0, 0, 0, 0.2);
line-height: normal;
font-family: sans-serif;
}
.legend span {
display: block;
padding: 2px;
margin: 2px;
}
.clearfix { /* from bootstrap, to avoid legend overflow */
*zoom: 1;
}
.clearfix:before,
.clearfix:after {
display: table;
line-height: 0;
content: "";
}
.clearfix:after {
clear: both;
}"""
BASE_CSS=""".ann {
border: 1px solid gray;
background-color: lightgray;
border-radius: 3px;
-moz-border-radius: 3px;
-webkit-border-radius: 3px;
}
.ann-openright {
border-right: none;
}
.ann-openleft {
border-left: none;
}
.ann-contright {
border-right: none;
border-top-right-radius: 0;
border-bottom-right-radius: 0;
}
.ann-contleft {
border-left: none;
border-top-left-radius: 0;
border-bottom-left-radius: 0;
}"""
def line_height_css(height):
if height == 0:
return ''
else:
return 'line-height: %dpx;\n' % (BASE_LINE_HEIGHT+2*height*VSPACE)
def generate_css(max_height, color_map, legend):
css = [LEGEND_CSS] if legend else []
css.append(BASE_CSS)
for i in range(max_height+1):
css.append(""".ann-h%d {
padding-top: %dpx;
padding-bottom: %dpx;
%s
}""" % (i, i*VSPACE, i*VSPACE, line_height_css(i)))
for t, c in color_map.items():
css.append(""".ann-t%s {
background-color: %s;
border-color: %s;
}""" % (html_safe_string(t), c, darker_color(c)))
return '\n'.join(css)
def uniq(s):
"""Return unique items in given sequence, preserving order."""
# http://stackoverflow.com/a/480227
seen = set()
return [ i for i in s if i not in seen and not seen.add(i)]
def generate_legend(types, colors):
parts = ['''<div class="legend">Legend<table>''']
for f, c in zip(types, colors):
t = html_safe_string(f)
tagl, tagr = '<%s class="ann ann-t%s">' % (TAG, t), '</%s>' % TAG
parts.append('<tr><td>%s%s%s</td></tr>' % (tagl, f, tagr))
parts.append('</table></div>')
return ''.join(parts)
# Mapping from known ontology ID prefixes to coarse human-readable types.
# (These are mostly CRAFT strings at the moment.)
prefix_to_coarse_type = {
'http://www.ncbi.nlm.nih.gov/taxonomy/': 'taxonomy',
'http://purl.obolibrary.org/obo/GO_': 'Gene Ontology',
'http://purl.obolibrary.org/obo/SO_': 'Sequence Ontology',
'http://purl.obolibrary.org/obo/PR_': 'Protein Ontology',
'http://www.ncbi.nlm.nih.gov/gene/': 'NCBI Gene',
'http://purl.obolibrary.org/obo/CHEBI_': 'ChEBI',
'http://purl.obolibrary.org/obo/NCBITaxon_': 'NCBI Taxon',
'http://purl.obolibrary.org/obo/CL_': 'Cell Ontology',
'http://purl.obolibrary.org/obo/BFO_': 'Basic Formal Ontology',
'http://purl.obolibrary.org/obo/DOID_': 'Disease Ontology',
'http://purl.obolibrary.org/obo/BTO_': 'BRENDA Tissue Ontology',
'http://purl.obolibrary.org/obo/NCBITaxon_taxonomic_rank': 'Rank',
'http://purl.obolibrary.org/obo/NCBITaxon_species': 'Species',
'http://purl.obolibrary.org/obo/NCBITaxon_subspecies': 'Subspecies',
'http://purl.obolibrary.org/obo/NCBITaxon_phylum': 'Phylym',
'http://purl.obolibrary.org/obo/NCBITaxon_kingdom': 'Kingdom',
'http://purl.obolibrary.org/obo/IAO_0000314': 'section',
# STRING and STITCH DBs
'http://string-db.org/interactions/': 'stringdb',
'http://stitchdb-db.org/interactions/': 'stitchdb',
}
def coarse_type(type_):
"""Return short, coarse, human-readable type for given type.
For example, for "http://purl.obolibrary.org/obo/SO_0000704 return
e.g. "Sequence Ontology".
"""
# TODO: consider caching
# Known mappings
for prefix, value in prefix_to_coarse_type.iteritems():
if type_.startswith(prefix):
return value
# Not known, apply heuristics. TODO: these are pretty crude and
# probably won't generalize well. Implement more general approach.
# start: body e.g. http://purl.obolibrary.org/obo/BFO_000000
try:
parsed = urlparse.urlparse(type_)
type_str = parsed.path
except Exception, e:
type_str = type_
parts = type_str.strip('/').split('/')
# split path: parts e.g. ['obo', 'SO_0000704'] or ['gene', '15139']
if len(parts) > 1 and parts[-2] == 'obo':
return parts[-1].split('_')[0]
elif parts[0] == 'gene':
return parts[0]
return type_str.strip('/').split('/')[-1]
def _add_formatting_spans(spans, text):
"""Add formatting spans based on text."""
# Skip if there are any formatting types in the user-provided data
# on the assumption that users able to do formatting will want
# full control.
if any(s for s in spans if is_formatting_type(s.type)):
return spans
# Add sections based on newlines in the text
offset = 0
section = 'http://purl.obolibrary.org/obo/IAO_0000314'
for s in re.split('(\n)', text):
if s and not s.isspace():
spans.append(Span(offset, offset+len(s), section, formatting=True))
offset += len(s)
return spans
def _filter_empty_spans(spans):
filtered = []
for span in spans:
if span.start == span.end:
print 'Warning: ignoring empty span'
else:
filtered.append(span)
return filtered
def _standoff_to_html(text, standoffs, legend, tooltips, links):
"""standoff_to_html() implementation, don't invoke directly."""
# Convert standoffs to Span objects.
spans = [Span(so.start, so.end, so.type) for so in standoffs]
# Add formatting such as paragraph breaks if none are provided.
spans = _add_formatting_spans(spans, text)
# Filter out empty spans (not currently supported)
spans = _filter_empty_spans(spans)
# Generate mapping from detailed to coarse types. Coarse types
# group detailed types for purposes of assigning display colors
# etc.
types = uniq(s.type for s in spans if not s.formatting)
type_to_coarse = { t: coarse_type(t) for t in types }
coarse_types = uniq(type_to_coarse.values())
# Pick a color for each coarse type.
types = uniq(s.type for s in spans if not s.formatting)
colors = span_colors(coarse_types)
color_map = dict(zip(coarse_types, colors))
# generate legend if requested
if not legend:
legend_html = ''
else:
# full_forms = uniq(so.type for so in standoffs)
# type_to_full_form = { html_safe_string(f) : f for f in full_forms }
# legend_types = [ type_to_full_form[t] for t in types ]
legend_html = generate_legend(coarse_types, colors)
# resolve height of each span by determining span nesting
max_height = resolve_heights(spans)
# Generate CSS as combination of boilerplate and height-specific
# styles up to the required maximum height.
css = generate_css(max_height, color_map, legend)
# Decompose into separate start and end markers for conversion
# into tags.
markers = []
for s in spans:
markers.append(Marker(s, s.start, False))
markers.append(Marker(s, s.end, True))
markers.sort(marker_sort)
# process markers to generate additional start and end markers for
# instances where naively generated spans would cross.
i, o, out = 0, 0, []
open_span = set()
while i < len(markers):
if o != markers[i].offset:
out.append(text[o:markers[i].offset])
o = markers[i].offset
# collect markers opening or closing at this position and
# determine max opening/closing marker height
to_open, to_close = [], []
max_change_height = -1
last = None
for j in range(i, len(markers)):
if markers[j].offset != o:
break
if markers[j].is_end:
to_close.append(markers[j])
else:
to_open.append(markers[j])
max_change_height = max(max_change_height, markers[j].span.height())
last = j
# open spans of height < max_change_height must close to avoid
# crossing tags; add also to spans to open to re-open and
# make note of lowest "covered" depth.
min_cover_height = float('inf') # TODO
for s in open_span:
if s.height() < max_change_height and s.end != o:
s.start_marker.cont_right = True
to_open.append(Marker(s, o, False, True))
to_close.append(Marker(s, o, True))
min_cover_height = min(min_cover_height, s.height())
# mark any tags behind covering ones so that they will be
# drawn without the crossing border
for m in to_open:
if m.span.height() > min_cover_height:
m.covered_left = True
for m in to_close:
if m.span.height() > min_cover_height:
m.span.start_marker.covered_right = True
# reorder (note: might be unnecessary in cases; in particular,
# close tags will typically be identical, so only their number
# matters)
to_open.sort(marker_sort)
to_close.sort(marker_sort)
# add tags to stream
for m in to_close:
out.append(m)
open_span.remove(m.span)
for m in to_open:
out.append(m)
open_span.add(m.span)
i = last+1
out.append(text[o:])
if legend_html:
out = [legend_html] + out
# add in attributes to trigger tooltip display
if tooltips:
for m in (o for o in out if isinstance(o, Marker) and not o.is_end):
m.add_attribute('class', 'hint--top')
# TODO: useful, not renundant info
m.add_attribute('data-hint', m.span.type)
# add in links for spans with HTML types if requested
if links:
for m in (o for o in out if isinstance(o, Marker) and not o.is_end):
# TODO: better heuristics
if m.span.type.startswith('http://'):
m.span.href = m.span.type
m.add_attribute('href', m.span.href)
m.add_attribute('target', '_blank')
return css, u''.join(unicode(o) for o in out)
def darker_color(c, amount=0.3):
"""Given HTML-style #RRGGBB color string, return variant that is
darker by the given amount."""
import colorsys
if c and c[0] == '#':
c = c[1:]
if len(c) != 6:
raise ValueError
r, g, b = map(lambda h: int(h, 16)/255., [c[0:2],c[2:4],c[4:6]])
h, s, v = colorsys.rgb_to_hsv(r, g, b)
v *= 1.0-amount
r, g, b = [255*x for x in colorsys.hsv_to_rgb(h, s, v)]
return '#%02x%02x%02x' % (r, g, b)
def random_colors(n, seed=None):
import random
import colorsys
random.seed(seed)
# based on http://stackoverflow.com/a/470747
colors = []
for i in range(n):
hsv = (1.*i/n, 0.9 + random.random()/10, 0.9 + random.random()/10)
rgb = tuple(255*x for x in colorsys.hsv_to_rgb(*hsv))
colors.append('#%02x%02x%02x' % rgb)
return colors
# Kelly's high-contrast colors [K Kelly, Color Eng., 3 (6) (1965)],
# via http://stackoverflow.com/a/4382138. Changes: black excluded as
# not applicable here, plus some reordering (numbers in comments give
# original order).
kelly_colors = [
# '#000000', # 2 black
'#FFB300', # 3 yellow
'#007D34', # 10 green
'#FF6800', # 5 orange
'#A6BDD7', # 6 light blue
'#C10020', # 7 red
'#CEA262', # 8 buff
'#817066', # 9 gray
# '#FFFFFF', # 1 white
'#803E75', # 4 purple
'#F6768E', # 11 purplish pink
'#00538A', # 12 blue
'#FF7A5C', # 13 yellowish pink
'#53377A', # 14 violet
'#FF8E00', # 15 orange yellow
'#B32851', # 16 purplish red
'#F4C800', # 17 greenish yellow
'#7F180D', # 18 reddish brown
'#93AA00', # 19 yellow green
'#593315', # 20 yellowish brown
'#F13A13', # 21 reddish orange
'#232C16', # 22 olive green
]
# Pre-set colors
type_color_map = {
'Organism_subdivision': '#ddaaaa',
'Anatomical_system': '#ee99cc',
'Organ': '#ff95ee',
'Multi-tissue_structure': '#e999ff',
'Tissue': '#cf9fff',
'Developing_anatomical_structure': '#ff9fff',
'Cell': '#cf9fff',
'Cellular_component': '#bbc3ff',
'Organism_substance': '#ffeee0',
'Immaterial_anatomical_entity': '#fff9f9',
'Pathological_formation': '#aaaaaa',
'Cancer': '#999999',
}
def span_colors(types):
missing = [t for t in types if t not in type_color_map]
if len(missing) <= len(kelly_colors):
fill = kelly_colors[:len(missing)]
else:
fill = random_colors(len(missing), 1)
colors = []
i = 0
for t in types:
if t in type_color_map:
colors.append(type_color_map[t])
else:
colors.append(fill[i])
i += 1
return colors
# exceptions for html_safe_string
_html_safe_map = {
'(': u'LEFT-PAREN',
')': u'RIGHT-PAREN',
}
def html_safe_string(s, encoding='utf-8'):
"""Given a non-empty string, return a variant that can be used as
a label in HTML markup (tag, CSS class name, etc)."""
# TODO: consider caching
if not s or s.isspace():
raise ValueError('empty string "%s"' % s)
if isinstance(s, unicode):
c = s
else:
c = s.decode(encoding)
# specific exceptions
c = _html_safe_map.get(c, c)
# adapted from http://stackoverflow.com/q/5574042
c = unicodedata.normalize('NFKD', c).encode('ascii', 'ignore')
c = re.sub(r'[^_a-zA-Z0-9-]', '-', c)
c = re.sub(r'--+', '-', c)
c = c.strip('-')
if c and c[0].isdigit():
c = '_' + c
if len(c) == 0:
c = 'NON-STRING-TYPE'
# Sanity check from http://stackoverflow.com/a/449000, see also
# http://www.w3.org/TR/CSS21/grammar.html#scanner
assert re.match(r'^-?[_a-zA-Z]+[_a-zA-Z0-9-]*', c), \
'Internal error: failed to normalize "%s"' % s
return c
def json_to_standoffs(j):
try:
spans = json.loads(j)
except ValueError, e:
print >> sys.stderr, 'json.loads failed for "%s": %s' % (j, e)
raise
MyStandoff = namedtuple('MyStandoff', 'start end type')
# if any span is missing a type specification, assign a
# simple numeric new one without repeating.
missing_count = len([s for s in spans if len(s) < 3])
standoffs, i = [], 0
for s in spans:
if len(s) < 1:
continue # ignore empties
if len(s) < 2:
s = [s[0], s[0]] # map singletons to zero-width
if len(s) < 3:
s = [s[0], s[1], 'type-%d' % (i+1)]
i += 1
standoffs.append(MyStandoff(*s))
return standoffs
def _header_html(css, links):
return """<!DOCTYPE html>
<html>
<head>
%s
<style type="text/css">
html {
background-color: #eee;
font-family: sans;
}
body {
background-color: #fff;
border: 1px solid #ddd;
padding: 15px; margin: 15px;
line-height: %dpx
}
section {
padding: 5px;
}
%s
/* This is a hack to correct for hint.css making blocks too high. */
.hint, [data-hint] { display: inline; }
/* Block linking from affecting styling */
a.ann {
text-decoration: none;
color: inherit;
}
</style>
</head>
<body class="clearfix">""" % (links, BASE_LINE_HEIGHT, css)
def _trailer_html():
return """</body>
</html>"""
def standoff_to_html(text, standoffs, legend=True, tooltips=False,
links=False):
"""Create HTML representation of given text and standoff
annotations.
"""
css, body = _standoff_to_html(text, standoffs, legend, tooltips, links)
# Note: tooltips are not generated by default because their use
# depends on the external CSS library hint.css and this script
# aims to be standalone in its basic application.
if not tooltips:
links_string = ''
else:
links_string = '<link rel="stylesheet" href="static/css/hint.css">'
return (_header_html(css, links_string) + body + _trailer_html())
def main(argv=None):
if argv is None:
argv = sys.argv
if len(argv) == 4 and argv[1] == '-n':
argv = argv[:1] + argv[2:]
legend = False
else:
legend = True
if len(argv) != 3:
print >> sys.stderr, 'Usage:', argv[0], '[-n] TEXT SOJSON'
print >> sys.stderr, ' e.g.', argv[0], '\'Bob, UK\' \'[[0,3,"Person"],[5,7,"GPE"]]\''
return 1
text = argv[1]
standoffs = json_to_standoffs(argv[2])
print standoff_to_html(text, standoffs, legend)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
restful-open-annotation/restoa-explorer
|
so2html.py
|
Python
|
mit
| 25,363
|
import wegene
wegene.Configuration.BASE_URI = 'https://api.wegene.com'
wegene.Configuration.o_auth_access_token = '<A Valid Access Token with Proper Scope>'
profile_id = ''
try:
user = wegene.WeGeneUser().get_user()
profile_id = user.profiles[0].id
print('--- Profile ---')
print(profile_id)
print(user.profiles[0].format)
print(user.profiles[0].name)
except Exception as e:
print(e.response_body)
try:
risk = wegene.Risk().get_risk(profile_id, 88)
print('--- Risk ---')
print(risk.caseid)
print(risk.description)
print(risk.risk)
print(risk.genotypes.data[0].rsid)
except Exception as e:
print(e.response_body)
try:
athletigen = wegene.Athletigen().get_athletigen(profile_id, 1487)
print('--- Athletigen ---')
print(athletigen.caseid)
print(athletigen.description)
print(athletigen.rank)
print(athletigen.genotypes.data[0].rsid)
print(athletigen.genotypes.data[0].genotype)
except Exception as e:
print(e.response_body)
try:
skin = wegene.Skin().get_skin(profile_id, 1522)
print('--- Skin ---')
print(skin.caseid)
print(skin.description)
print(skin.rank)
print(skin.genotypes.data[0].rsid)
print(skin.genotypes.data[0].genotype)
except Exception as e:
print(e.response_body)
try:
psychology = wegene.Psychology().get_psychology(profile_id, 1557)
print('--- Psychology ---')
print(psychology.caseid)
print(psychology.description)
print(psychology.rank)
print(psychology.genotypes.data[0].rsid)
print(psychology.genotypes.data[0].genotype)
except Exception as e:
print(e.response_body)
try:
ancestry = wegene.Ancestry().get_ancestry(profile_id)
print('--- Ancestry Composition ---')
print(ancestry.block.chinese_nation)
print(ancestry.area.han_southern)
except Exception as e:
print(e.response_body)
try:
haplogroups = wegene.Haplogroups().get_haplogroups(profile_id)
print('--- Haplogroups ---')
print(haplogroups.y)
print(haplogroups.mt)
except Exception as e:
print(e.response_body)
try:
demographics = wegene.Demographics().get_demographics(profile_id)
print('--- Demographics ---')
print(demographics.surname)
print(demographics.population)
except Exception as e:
print(e.response_body)
try:
drug = wegene.Health().get_drug(profile_id, 1481)
print('--- Drug ---')
print(drug.caseid)
print(drug.description)
print(drug.tsummary)
print(drug.genotypes.data[0].rsid)
print(drug.genotypes.data[0].genotype)
except Exception as e:
print(e.response_body)
try:
metabolism = wegene.Health().get_metabolism(profile_id, 5)
print('--- Metabolism ---')
print(metabolism.caseid)
print(metabolism.description)
print(metabolism.rank)
print(metabolism.genotypes.data[0].rsid)
print(metabolism.genotypes.data[0].genotype)
except Exception as e:
print(e.response_body)
try:
carrier = wegene.Health().get_carrier(profile_id, 184)
print('--- Carrier ---')
print(carrier.caseid)
print(carrier.description)
print(carrier.genotypes.data[0].rsid)
for allele in carrier.genotypes.data:
print(allele.genotype)
print(allele.tsummary)
except Exception as e:
print(e.response_body)
try:
traits = wegene.Health().get_traits(profile_id, 34)
print('--- Traits ---')
print(traits.caseid)
print(traits.description)
print(traits.tsummary)
print(traits.genotypes.data[0].rsid)
print(traits.genotypes.data[0].genotype)
except Exception as e:
print(e.response_body)
try:
allele = wegene.Allele().get_allele(profile_id, ['rs671'])
print('--- Allele ---')
print(allele['RS671'])
except Exception as e:
print(e.response_body)
|
xraywu/wegene-python-sdk
|
example/query/example.py
|
Python
|
mit
| 3,770
|
#! /usr/bin/env python
# coding=utf8
from BotModule import BotModule
import urllib2
import unicodedata
from BeautifulSoup import BeautifulSoup
import os, sys, re, time, datetime
# This seems like somebody did not know what they were doing...
def toFloat(s):
f = 0.0
try:
f = float(s)
except:
pass
return f
def toInt(s):
f = 0
try:
f = int(s)
except:
pass
return f
#TODO: Need a Regex here?
def formatString(str):
while str.count(" ") != 0:
str = str.replace(" ", " ")
return str
class MensaModule(BotModule):
def __init__(self):
self.lastFetch = 0
self.lastDaily = 0
self.cache = None
self.foo = False
return
def getMeals(self):
if self.cache != None and (self.lastFetch + 60.0*60.0) > time.time():
return self.cache
url = "http://www.studentenwerk-karlsruhe.de/de/essen/?view=ok&STYLE=popup_plain&c=moltke"
page = urllib2.urlopen(url)
soup = BeautifulSoup(page)
soup = BeautifulSoup(str(soup.body.table))
days = soup.findAll("table", {"width" : "700"})
mensa = []
dayNumber = 0
for day in days:
linenames = []
for line in day.findAll("td", {"width" : "20%"}):
s = line.find(text=True)
s = unicodedata.normalize('NFD', s).encode('utf-8','replace')
linenames.append(str(s))
meals = []
for i in range(len(linenames)):
meals.append([])
linenumber = 0
for meal in day.findAll("table"):
for foo in meal.findAll("tr"):
description = foo.findAll(text=True)
s = ''.join(description).replace("\n"," ").replace("€", "Eur")
s = unicodedata.normalize('NFD', s).encode('utf-8','replace')
meals[linenumber].append(formatString(str(s)))
linenumber = linenumber + 1
dayNumber = dayNumber + 1
mensa.append([linenames, meals])
self.cache = mensa
self.lastFetch = time.time()
return mensa
def buildMensaplan(self, dayOffset):
mensaplan = []
#empty = True
meals = self.getMeals()
for i in range(len(meals[dayOffset][0])):
send = False
line = "[" + meals[dayOffset][0][i] + "]" + ""
for f in range(len(meals[dayOffset][1][i])):
price = 0.0
currentMeal = meals[dayOffset][1][i][f]
r = re.compile("(\d+,\d\d) E")
prices = r.findall(currentMeal)
if(len(prices) == 1):
price = toFloat(prices[0].replace(",","."))
if price > 1.0:
send = True
if f != 0:
line = line + " - "
line = line + currentMeal
if send:
#self.sendPrivateMessage(nick, line)
line = formatString(line)
mensaplan.append(line)
#empty = False
return mensaplan
def tick(self):
now = datetime.datetime.now()
ti = toInt(now.strftime("%H%M"))
offset = (60*60*24) - 60*2
if ti in (1100, 1200) and (self.lastDaily + offset) < time.time() and datetime.date.weekday(datetime.date.today()) <= 4:
mensaplan = self.buildMensaplan(0)
if len(mensaplan) == 0:
return
self.sendPublicMessage("Mensaplan für heute, via http://tinyurl.com/mensa-moltke")
for s in mensaplan:
self.sendPublicMessage(s)
self.lastDaily = time.time()
def command(self, nick, cmd, args, type):
if cmd == "!mensa":
try:
dayOffset = 0
if len(args) == 1:
time = args[0].lower()
if time == "heute" or time == "today":
dayOffset = 0
elif time == "morgen" or time == "tomorrow":
dayOffset = 1
elif time == "übermorgen" or time == "uebermorgen":
dayOffset = 2
else:
dayOffset = toInt(args[0])
if dayOffset < 0 or dayOffset >= 5:
self.sendPrivateMessage(nick, "Mensa: Fehler, Tagesoffset ausserhalb der gültigen Reichweite.")
return
mensaplan = self.buildMensaplan(dayOffset)
if len(mensaplan) == 0:
self.sendPrivateMessage(nick, "Keine Gerichte gefunden.")
else:
self.sendPrivateMessage(nick, "Mensaplan via http://tinyurl.com/mensa-moltke")
for s in mensaplan:
self.sendPrivateMessage(nick, s)
except:
self.sendPrivateMessage(nick, "Exception returned. Fixme!")
def help(self, nick):
self.sendPrivateMessage(nick, "!mensa [offset] - Mensaplan der laufenden Woche. offset = [0-5] oder 'heute'/'morgen', default 'heute'")
return
|
fsi-hska/fsiBot
|
modules/MensaModule.py
|
Python
|
mit
| 4,149
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: Moises Gautier Gomez
# Proyecto fin de carrera - Ing. en Informatica
# Universidad de Granada
# Configuracion del ORM de Django para su uso externo a la aplicacion
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "secproject.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
from iptables import Iptables
# Creamos un objeto del tipo Source y operamos con él
class Controller(object):
"""
Clase Controller para el manejo de las distintas fuentes
"""
# Nombre de la fuente
source = ""
# Argumentos que pasar a la ejecucion de los Thread
args = ()
# Informacion de la fuente a la hora de crear su hilo asociado
source_info = {}
# PID del proceso padre (en este caso del proceso Main)
parent_pid = str(os.getpid())
# PID del proceso hijo (en este caso de la fuente que corresponda)
child_pid = str(os.getpid())
# Nombre del Thread segun la nomenclatura generado por threading
name_thread = ""
def get_source(self):
"""
Metodo que nos devuelve el tipo de fuente
Returns: String con el valor de la fuente procesada
"""
return self.source
def get_parent_pid(self):
"""
Metodo que nos devuelve el pid asociado al hilo principal (Main)
Returns: String con el valor del PID del hilo principal en ejecucion (Main)
"""
return self.parent_pid
def get_child_pid(self):
"""
Metodo que nos devuelve el pid asociado al hilo hijo (de la fuente procesada)
Returns: String con el valor del PID del hilo hijo en ejecucion (de la fuente procesada)
"""
return self.child_pid
def get_configuration(self):
"""
Metodo que nos devuelve la ruta del archivo de configuracion de la fuente
Returns: String con el valor de la ruta del archivo de configuracion de la fuente
"""
return self.source_info['C']
def get_type_source(self):
"""
Metodo que nos devuelve el tipo de la fuente a procesar
Returns: String con el valor del tipo de la fuente a procesar
"""
return self.source_info['T']
def get_model_source(self):
"""
Metodo que nos devuelve el modelo de la fuente a procesar
Returns: String con el valor del modelo de la fuente a procesar
"""
return self.source_info['M']
def get_log_processing(self):
"""
Metodo que nos devuelve la ruta del archivo de log para el procesamiento y obtención de eventos
Returns: String con el valor de la ruta del archivo de log
"""
return self.source_info['P']
def get_source_info(self):
"""
Metodo que nos devuelve toda la informacion previa de la fuente antes de su ejecucion
Returns: Diccionario con la informacion previa de la fuente antes de su ejecucion
"""
return self.source_info
def get_info(self):
"""
Metodo que nos imprime por salida estandar toda la informacion perteneciente a la fuente
Returns: None
"""
print "--------------------------------------------------"
print "Source --> ", self.get_source()
print "Parent pid --> ", self.get_parent_pid()
print "Child pid --> ", self.get_child_pid()
print "Type source --> ", self.get_type_source()
print "Model source --> ", self.get_model_source()
print "Configuration file --> ", self.get_configuration()
print "Log processing --> ", self.get_log_processing()
print "Thread name --> ", self.name_thread
print "--------------------------------------------------"
class IptablesController(Controller):
"""
Clase IptablesController que hereda el comportamiento de Controller y que se encarga de la ejecucion
de la clase Iptables para la ejecucion de los eventos asociados.
"""
def set_source_info(source_info):
"""
Metodo interno de la clase IptablesController que sirve para la modificacion de los parametros
de configuracion previos a la inicializacion del hilo de procesado iptables
Args:
source_info: Diccionario con la informacion previa de configuracion para la ejecucion de iptables
Returns: Devuelve la configuracion a aplicar sobre la ejecucion de iptables
"""
print "--------------------------------------------------"
print "Introduce los parametros de la configuracion de la fuente - iptables"
print "Valores por defecto ----"
print "[1] Ruta procesamiento: \'/var/log/iptables.log\',"
print "[2] Configuración fuente: \'.kernel/conf/iptables-conf.conf\'"
print "[3] Salir de la configuración"
print "Si no quieres modificar el campo introduce Intro en la selección"
print "--------------------------------------------------"
choose = 0
path_source = ""
config_source = ""
set_config = {}
while choose != str(3):
print "--------------------------------------------------"
choose = str(raw_input('Introduce parámetro a modificar ([3] - Saltar este paso, [0] - Ayuda): '))
print "--------------------------------------------------"
if choose == str(0):
print "--------------------------------------------------"
print "Introduce los parametros de la configuracion de la fuente - iptables"
print "Valores por defecto ----"
print "[1] Ruta procesamiento: \'/var/log/iptables.log\',"
print "[2] Configuración fuente: \'.kernel/conf/iptables-conf.conf\'"
print "[3] Salir de la configuración"
print "--------------------------------------------------"
elif choose == str(1):
print "--------------------------------------------------"
path_source = str(raw_input('Ruta de procesamiento de logs Ejemplo(/var/log/iptables.log): '))
print "--------------------------------------------------"
if not source_info['P'] == path_source:
if not path_source:
set_config['P'] = path_source
else:
set_config['P'] = source_info['P']
elif choose == str(2):
print "--------------------------------------------------"
config_source = str(
raw_input('Archivo de configuración fuente Ejemplo(./kernel/conf/iptables-conf.conf): '))
print "--------------------------------------------------"
if not source_info['C'] == config_source:
if not config_source:
set_config['C'] = config_source
else:
set_config['C'] = source_info['C']
elif choose == str(3):
if not path_source and not config_source:
return source_info
else:
set_config = {
'T': source_info['T'],
'M': source_info['M'],
'P': path_source,
'C': config_source
}
elif choose:
pass
return set_config
# Si hago uso del fork da el error del Runtime y ejecuta otro pid igual a este
# si comento el fork funciona correctamente pero al finalizar este proceso Main
# finaliza toda la ejecucion de procesamiento
# Relacionado con el metodo start() de threading:
#
# Start the thread’s activity.
# It must be called at most once per thread object. It arranges for the object’s run() method to be invoked in a
# separate thread of control.
# This method will raise a RuntimeError if called more than once on the same thread object.
# child_pid = os.fork()
source = "iptables"
source_info = {'T': 'Firewall',
'M': 'iptables',
'P': '/var/log/iptables.log',
'C': './kernel/conf/iptables-conf.conf'}
args = (1,)
settings = set_source_info(source_info)
# Solo cambio el archivo de configuracion y el archivo de log dado que para este caso siempre habra valor
# fijo para el tipo de fuente que es y el modelo al que representa dentro del sistema.
# Si no se ha modificado nada esta asignacion sera inutil, en caso opuesto se modifica el diccionario
if not settings == source_info:
source_info = settings
try:
thread_iptables = Iptables(
args=args,
source_info=source_info
)
thread_iptables.start()
name_thread = thread_iptables.getName()
except Exception as ex:
print "Exception --> ", ex
class SnortController(Controller):
"""
Clase de ejemplo Snort
"""
source = "snort"
class GlancesController(Controller):
"""
Clase de ejemplo Glances
"""
source = "glances"
|
MGautier/security-sensor
|
trunk/version-1-0/webapp/secproject/controller.py
|
Python
|
mit
| 9,212
|
import urllib
import json
import numpy as np
import pandas as pd
import multiprocessing
from multiprocessing.pool import ThreadPool
from functools import partial
import time
from yelp.client import Client
from yelp.oauth1_authenticator import Oauth1Authenticator
import config as cf
class Timer:
def __init__(self, fnname):
self.name = fnname
def __enter__(self):
self.start = time.clock()
return self
def __exit__(self, *args):
self.end = time.clock()
self.interval = self.end - self.start
print(self.name + " took " + str(self.interval) + " sec.")
def _fetch_url(url):
f = urllib.urlopen(url)
response = json.loads(f.read())
return response
def fs_loc_list(lat, lng, query):
print("using four square")
fs_secret=cf.read_api_config('fs_secret')
fs_client=cf.read_api_config('fs_client')
srchquery="https://api.foursquare.com/v2/venues/search?near=calgary,ab&query="
srchquery+=query
srchquery+="&v=20150214&m=foursquare&client_secret=" + fs_secret + "&client_id=" + fs_client
res = _fetch_url(srchquery)
#print res
loc_list = []
name = []
address = []
for i in range(len(res['response']['venues'])):
lat=res['response']['venues'][i]['location']['lat']
lng=res['response']['venues'][i]['location']['lng']
name.append(res['response']['venues'][i]['name'])
loc_list.append([lat, lng])
address.append(res['response']['venues'][i]['location']['formattedAddress'][0])
gps_array = np.array(loc_list)
name = np.array(name)
address = np.array(address)
return gps_array, name, address
def go_loc_list(lat, lng, query):
# lat = 51.135494
# lng = -114.158389
# query = 'japanese restaurant'
print("using google")
query += " Calgary AB"
loc_p = 'location'+str(lat)+','+str(lng)
qry_list = query.strip().split(' ')
qry_p = 'query=' + qry_list[0]
for i in qry_list[1:]:
qry_p += '+'
qry_p += i
rad_p = 'radius=10000'
api_key = "key=" + cf.read_api_config('google') # yyc Calgary key google places api web service
srch = 'https://maps.googleapis.com/maps/api/place/textsearch/json?'
srch += qry_p + '&'
srch += loc_p + '&'
srch += rad_p + '&'
srch += api_key
res = _fetch_url(srch)
# return res
# print(res)
loc_list = []
name = []
address = []
for loc in res['results']:
lat = loc['geometry']['location']['lat']
lng = loc['geometry']['location']['lng']
loc_list.append([lat, lng])
name.append(loc['name'])
address.append(loc['formatted_address'])
while('next_page_token' in res and len(name)<40):
page_token = "pagetoken=" + res['next_page_token']
srch = 'https://maps.googleapis.com/maps/api/place/textsearch/json?'
# srch += qry_p + '&'
srch += page_token +"&"
srch += api_key
res = _fetch_url(srch)
for loc in res['results']:
lat = loc['geometry']['location']['lat']
lng = loc['geometry']['location']['lng']
loc_list.append([lat, lng])
name.append(loc['name'])
address.append(loc['formatted_address'])
gps_array = np.array(loc_list)
name = np.array(name)
address = np.array(address)
# print name
# print address
return gps_array, name, address
def yelp_batch(lat_lng_pairs, query):
print("using yelp")
with Timer("yelp query"):
partial_yelp = partial(_yelp_batch_indivitual, query=query)
workers = multiprocessing.cpu_count()
# p=multiprocessing.Pool(workers)
p=ThreadPool(100)
result = p.map(partial_yelp, lat_lng_pairs)
p.close()
p.join()
df = pd.concat(result, ignore_index=True)
df.drop_duplicates('name', inplace = True)
print("Total no of raw results " + str(len(df)))
return df
def _yelp_batch_indivitual(lat_lng, query):
return yelp_loc_list(lat_lng[0], lat_lng[1], query)
def _yelp_rec_batch_indivitual(rec, query):
return yelp_rec_list(rec, query)
def yelp_rec_batch(rec_array, query):
print("using yelp")
with Timer("yelp query"):
partial_yelp = partial(_yelp_rec_batch_indivitual, query=query)
# workers = multiprocessing.cpu_count()
# p=multiprocessing.Pool(workers)
p=ThreadPool(100)
result = p.map(partial_yelp, rec_array)
p.close()
p.join()
df = pd.concat(result, ignore_index=True)
df.drop_duplicates('name', inplace = True)
print("Total no of raw results " + str(len(df)))
return df
def yelp_rec_list(rec, query):
"""
return yelp results based on user lat and lng, search query
:param lat:
:param lng:
:param query:
:return: dataframe object, columns=['name', 'address', 'image_url', 'yelp_url', 'review_count', 'ratings_img_url', 'lat','lon']
"""
def get_yelp(rectange):
auth = Oauth1Authenticator( consumer_key=cf.read_api_config('yelp_consumer_key'),
consumer_secret=cf.read_api_config('yelp_consumer_secret'),
token=cf.read_api_config('yelp_token'),
token_secret=cf.read_api_config('yelp_token_secret'))
client = Client(auth)
df = pd.DataFrame(columns=['name', 'address', 'image_url', 'yelp_url', 'review_count', 'ratings_img_url', 'lat','lon'])
for i in range(0, 1):
# if(len(df) < 20 and len(df) != 0):
# break
response = client.search_by_bounding_box(rectange[0], rectange[1], rectange[2], rectange[3], term=query, limit='20', sort='0') # meter)
# response = client.search_by_coordinates( lat, lng, accuracy=None, altitude=None, altitude_accuracy=None, term=query, limit='20', radius_filter=radius_filter, sort='0', offset=str(i*20)) # meter
for loc in response.businesses:
df.loc[len(df)+1]=[loc.name,
' '.join(loc.location.display_address),
loc.image_url, loc.url,
loc.review_count,
loc.rating_img_url,
loc.location.coordinate.latitude,
loc.location.coordinate.longitude]
# df.drop_duplicates('name', inplace = True)
# print("no of raw results " + str(len(df)))
return df
df = get_yelp(rec)
# if(len(df)<20):
# df = get_yelp('20000')
df[['review_count']] = df[['review_count']].astype(int)
print("no of raw results " + str(len(df)))
return df
def yelp_loc_list(lat, lng, query):
"""
return yelp results based on user lat and lng, search query
:param lat:
:param lng:
:param query:
:return: dataframe object, columns=['name', 'address', 'image_url', 'yelp_url', 'review_count', 'ratings_img_url', 'lat','lon']
"""
auth = Oauth1Authenticator( consumer_key=cf.read_api_config('yelp_consumer_key'),
consumer_secret=cf.read_api_config('yelp_consumer_secret'),
token=cf.read_api_config('yelp_token'),
token_secret=cf.read_api_config('yelp_token_secret'))
client = Client(auth)
def get_yelp(radius_filter):
df = pd.DataFrame(columns=['name', 'address', 'image_url', 'yelp_url', 'review_count', 'ratings_img_url', 'lat','lon'])
for i in range(0, 2):
if(len(df) < 20 and len(df) != 0):
break
response = client.search_by_coordinates( lat, lng, accuracy=None, altitude=None, altitude_accuracy=None, term=query, limit='20', radius_filter=radius_filter, sort='0', offset=str(i*20)) # meter
for loc in response.businesses:
df.loc[len(df)+1]=[loc.name,
' '.join(loc.location.display_address),
loc.image_url, loc.url,
loc.review_count,
loc.rating_img_url,
loc.location.coordinate.latitude,
loc.location.coordinate.longitude]
# df.drop_duplicates('name', inplace = True)
# print("no of raw results " + str(len(df)))
return df
df = get_yelp('3000')
# if(len(df)<20):
# df = get_yelp('20000')
df[['review_count']] = df[['review_count']].astype(int)
# print("no of raw results " + str(len(df)))
return df
if __name__=="__main__":
lat = 51.0454027
lng = -114.05651890000001
query = "restaurant"
# print(yelp_loc_list(lat, lng, query))
print(yelp_batch([[51.0454027, -114.05652], [51.0230, -114.123]], query))
|
FiniteElementries/OneBus
|
Database/query_api.py
|
Python
|
mit
| 8,957
|
import unittest
from word2number import w2n
class TestW2N(unittest.TestCase):
def test_positives(self):
self.assertEqual(w2n.word_to_num("two million three thousand nine hundred and eighty four"), 2003984)
self.assertEqual(w2n.word_to_num("nineteen"), 19)
self.assertEqual(w2n.word_to_num("two thousand and nineteen"), 2019)
self.assertEqual(w2n.word_to_num("two million three thousand and nineteen"), 2003019)
self.assertEqual(w2n.word_to_num('three billion'), 3000000000)
self.assertEqual(w2n.word_to_num('three million'), 3000000)
self.assertEqual(w2n.word_to_num('one hundred twenty three million four hundred fifty six thousand seven hundred and eighty nine')
, 123456789)
self.assertEqual(w2n.word_to_num('eleven'), 11)
self.assertEqual(w2n.word_to_num('nineteen billion and nineteen'), 19000000019)
self.assertEqual(w2n.word_to_num('one hundred and forty two'), 142)
self.assertEqual(w2n.word_to_num('112'), 112)
self.assertEqual(w2n.word_to_num('11211234'), 11211234)
self.assertEqual(w2n.word_to_num('five'), 5)
self.assertEqual(w2n.word_to_num('two million twenty three thousand and forty nine'), 2023049)
self.assertEqual(w2n.word_to_num('two point three'), 2.3)
self.assertEqual(w2n.word_to_num('two million twenty three thousand and forty nine point two three six nine'), 2023049.2369)
self.assertEqual(w2n.word_to_num('one billion two million twenty three thousand and forty nine point two three six nine'), 1002023049.2369)
self.assertEqual(w2n.word_to_num('point one'), 0.1)
self.assertEqual(w2n.word_to_num('point'), 0)
self.assertEqual(w2n.word_to_num('point nineteen'), 0)
self.assertEqual(w2n.word_to_num('one hundred thirty-five'), 135)
self.assertEqual(w2n.word_to_num('hundred'), 100)
self.assertEqual(w2n.word_to_num('thousand'), 1000)
self.assertEqual(w2n.word_to_num('million'), 1000000)
self.assertEqual(w2n.word_to_num('billion'), 1000000000)
self.assertEqual(w2n.word_to_num('nine point nine nine nine'), 9.999)
self.assertEqual(w2n.word_to_num('seventh point nineteen'), 0)
def test_negatives(self):
self.assertRaises(ValueError, w2n.word_to_num, '112-')
self.assertRaises(ValueError, w2n.word_to_num, '-')
self.assertRaises(ValueError, w2n.word_to_num, 'on')
self.assertRaises(ValueError, w2n.word_to_num, 'million million')
self.assertRaises(ValueError, w2n.word_to_num, 'three million million')
self.assertRaises(ValueError, w2n.word_to_num, 'million four million')
self.assertRaises(ValueError, w2n.word_to_num, 'thousand million')
self.assertRaises(ValueError, w2n.word_to_num, 'one billion point two million twenty three thousand and forty nine point two three six nine')
self.assertRaises(ValueError, w2n.word_to_num, 112)
if __name__ == '__main__':
unittest.main()
|
akshaynagpal/w2n
|
unit_testing.py
|
Python
|
mit
| 3,007
|
"""
Django settings for project.
Generated by 'django-admin startproject' using Django 1.9.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ne9cqx2k__=1j5(kjrg)9yfjh!%*5tb)5q@fk&u3wsh)*k@dpc'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'channels',
'connect4'
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'app.urls'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
# Extra places for collectstatic to find static files.
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static'),
]
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
#STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
#'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
os.path.join(BASE_DIR, 'connect4', 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
],
},
},
]
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Update to Postgres db backend
import dj_database_url
db_from_env = dj_database_url.config()
DATABASES['default'].update(db_from_env)
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-gb'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
# Channel settings
CHANNEL_LAYERS = {
"default": {
"BACKEND": "asgi_redis.RedisChannelLayer",
"CONFIG": {
"hosts": [os.environ.get('REDIS_URL', 'redis://localhost:6379')],
},
"ROUTING": "app.routing.channel_routing",
},
}
|
chitnguyen169/connect4CaseStudy
|
app/settings.py
|
Python
|
mit
| 4,650
|
import functools
import re
from django.db import connections, connection
from six import text_type
import sqlparse
from . import app_settings
EXPLORER_PARAM_TOKEN = "$$"
# SQL Specific Things
def passes_blacklist(sql):
clean = functools.reduce(lambda sql, term: sql.upper().replace(term, ""), [t.upper() for t in app_settings.EXPLORER_SQL_WHITELIST], sql)
fails = [bl_word for bl_word in app_settings.EXPLORER_SQL_BLACKLIST if bl_word in clean.upper()]
return not any(fails), fails
def get_connection():
return connections[app_settings.EXPLORER_CONNECTION_NAME] if app_settings.EXPLORER_CONNECTION_NAME else connection
def _format_field(field):
return field.get_attname_column()[1], field.get_internal_type()
def param(name):
return "%s%s%s" % (EXPLORER_PARAM_TOKEN, name, EXPLORER_PARAM_TOKEN)
def swap_params(sql, params):
p = params.items() if params else {}
for k, v in p:
regex = re.compile("\$\$%s(?:\:([^\$]+))?\$\$" % str(k).lower(), re.I)
sql = regex.sub(text_type(v), sql)
return sql
def extract_params(text):
regex = re.compile("\$\$([a-z0-9_]+)(?:\:([^\$]+))?\$\$")
params = re.findall(regex, text.lower())
# We support Python 2.6 so can't use a dict comprehension
return dict(zip([p[0] for p in params], [p[1] if len(p) > 1 else '' for p in params]))
# Helpers
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.auth.views import login
from django.contrib.auth import REDIRECT_FIELD_NAME
def safe_login_prompt(request):
defaults = {
'template_name': 'admin/login.html',
'authentication_form': AuthenticationForm,
'extra_context': {
'title': 'Log in',
'app_path': request.get_full_path(),
REDIRECT_FIELD_NAME: request.get_full_path(),
},
}
return login(request, **defaults)
def shared_dict_update(target, source):
for k_d1 in target:
if k_d1 in source:
target[k_d1] = source[k_d1]
return target
def safe_cast(val, to_type, default=None):
try:
return to_type(val)
except ValueError:
return default
def get_int_from_request(request, name, default):
val = request.GET.get(name, default)
return safe_cast(val, int, default) if val else None
def get_params_from_request(request):
val = request.GET.get('params', None)
try:
d = {}
tuples = val.split('|')
for t in tuples:
res = t.split(':')
d[res[0]] = res[1]
return d
except Exception:
return None
def get_params_for_url(query):
if query.params:
return '|'.join(['%s:%s' % (p, v) for p, v in query.params.items()])
def url_get_rows(request):
return get_int_from_request(request, 'rows', app_settings.EXPLORER_DEFAULT_ROWS)
def url_get_query_id(request):
return get_int_from_request(request, 'query_id', None)
def url_get_log_id(request):
return get_int_from_request(request, 'querylog_id', None)
def url_get_show(request):
return bool(get_int_from_request(request, 'show', 1))
def url_get_fullscreen(request):
return bool(get_int_from_request(request, 'fullscreen', 0))
def url_get_params(request):
return get_params_from_request(request)
def allowed_query_pks(user_id):
return app_settings.EXPLORER_GET_USER_QUERY_VIEWS().get(user_id, [])
def user_can_see_query(request, **kwargs):
if not request.user.is_anonymous() and 'query_id' in kwargs:
return int(kwargs['query_id']) in allowed_query_pks(request.user.id)
return False
def fmt_sql(sql):
return sqlparse.format(sql, reindent=True, keyword_case='upper')
def noop_decorator(f):
return f
def get_s3_session():
import boto3
return boto3.Session(
aws_access_key_id=app_settings.S3_ACCESS_KEY,
aws_secret_access_key=app_settings.S3_SECRET_KEY
)
def get_s3_bucket():
session = get_s3_session()
return session.resource('s3').Bucket(app_settings.S3_BUCKET)
|
enstrategic/django-sql-explorer
|
explorer/utils.py
|
Python
|
mit
| 4,026
|
import time
import requests
from requests.compat import urljoin
from twitch.conf import backoff_config
from twitch.constants import BASE_URL
DEFAULT_TIMEOUT = 10
class TwitchAPI(object):
"""Twitch API client."""
def __init__(self, client_id, oauth_token=None):
"""Initialize the API."""
super(TwitchAPI, self).__init__()
self._client_id = client_id
self._oauth_token = oauth_token
self._initial_backoff, self._max_retries = backoff_config()
def _get_request_headers(self):
"""Prepare the headers for the requests."""
headers = {
"Accept": "application/vnd.twitchtv.v5+json",
"Client-ID": self._client_id,
}
if self._oauth_token:
headers["Authorization"] = "OAuth {}".format(self._oauth_token)
return headers
def _request_get(self, path, params=None, json=True, url=BASE_URL):
"""Perform a HTTP GET request."""
url = urljoin(url, path)
headers = self._get_request_headers()
response = requests.get(url, params=params, headers=headers)
if response.status_code >= 500:
backoff = self._initial_backoff
for _ in range(self._max_retries):
time.sleep(backoff)
backoff_response = requests.get(
url, params=params, headers=headers, timeout=DEFAULT_TIMEOUT
)
if backoff_response.status_code < 500:
response = backoff_response
break
backoff *= 2
response.raise_for_status()
if json:
return response.json()
else:
return response
def _request_post(self, path, data=None, params=None, url=BASE_URL):
"""Perform a HTTP POST request.."""
url = urljoin(url, path)
headers = self._get_request_headers()
response = requests.post(
url, json=data, params=params, headers=headers, timeout=DEFAULT_TIMEOUT
)
response.raise_for_status()
if response.status_code == 200:
return response.json()
def _request_put(self, path, data=None, params=None, url=BASE_URL):
"""Perform a HTTP PUT request."""
url = urljoin(url, path)
headers = self._get_request_headers()
response = requests.put(
url, json=data, params=params, headers=headers, timeout=DEFAULT_TIMEOUT
)
response.raise_for_status()
if response.status_code == 200:
return response.json()
def _request_delete(self, path, params=None, url=BASE_URL):
"""Perform a HTTP DELETE request."""
url = urljoin(url, path)
headers = self._get_request_headers()
response = requests.delete(
url, params=params, headers=headers, timeout=DEFAULT_TIMEOUT
)
response.raise_for_status()
if response.status_code == 200:
return response.json()
|
tsifrer/python-twitch-client
|
twitch/api/base.py
|
Python
|
mit
| 3,006
|
#!/usr/bin/env python3
'''
Convert debug info for C interpreter debugger.
Usage: tools/gen_debug_info.py src/game/game_debuginfo.h
'''
import sys
import sundog_info
out = sys.stdout
def gen(out):
proclist = sundog_info.load_metadata()
info = []
for k,v in proclist._map.items():
if v.name is not None:
info.append((k, v))
info.sort()
out.write('// clang-format off\n')
for k,v in info:
if v.name is None:
continue
# get name without arguments
name = v.name
if '(' in name:
name = name[0:name.find('(')]
# output
out.write('{ { { { "%s" } }, 0x%x }, "%s" },' % (k[0].decode(), k[1], name))
out.write('\n')
out.write(' // clang-format on\n')
if __name__ == '__main__':
with open(sys.argv[1], 'w') as f:
gen(f)
|
laanwj/sundog
|
tools/gen_debug_info.py
|
Python
|
mit
| 857
|
"""Routines related to PyPI, indexes"""
from __future__ import absolute_import
import cgi
import itertools
import logging
import mimetypes
import os
import posixpath
import re
import sys
from collections import namedtuple
from pip._vendor import html5lib, requests, six
from pip._vendor.distlib.compat import unescape
from pip._vendor.packaging import specifiers
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.packaging.version import parse as parse_version
from pip._vendor.requests.exceptions import HTTPError, SSLError
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request
from pip._internal.download import HAS_TLS, is_url, path_to_url, url_to_path
from pip._internal.exceptions import (
BestVersionAlreadyInstalled, DistributionNotFound, InvalidWheelFilename,
UnsupportedWheel,
)
from pip._internal.models.candidate import InstallationCandidate
from pip._internal.models.format_control import FormatControl
from pip._internal.models.index import PyPI
from pip._internal.models.link import Link
from pip._internal.pep425tags import get_supported
from pip._internal.utils.compat import ipaddress
from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import (
ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, normalize_path,
remove_auth_from_url,
)
from pip._internal.utils.packaging import check_requires_python
from pip._internal.wheel import Wheel, wheel_ext
__all__ = ['FormatControl', 'PackageFinder']
SECURE_ORIGINS = [
# protocol, hostname, port
# Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
("https", "*", "*"),
("*", "localhost", "*"),
("*", "127.0.0.0/8", "*"),
("*", "::1/128", "*"),
("file", "*", None),
# ssh is always secure.
("ssh", "*", "*"),
]
logger = logging.getLogger(__name__)
def _get_content_type(url, session):
"""Get the Content-Type of the given url, using a HEAD request"""
scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)
if scheme not in {'http', 'https'}:
# FIXME: some warning or something?
# assertion error?
return ''
resp = session.head(url, allow_redirects=True)
resp.raise_for_status()
return resp.headers.get("Content-Type", "")
def _handle_get_page_fail(link, reason, url, meth=None):
if meth is None:
meth = logger.debug
meth("Could not fetch URL %s: %s - skipping", link, reason)
def _get_html_page(link, session=None):
if session is None:
raise TypeError(
"_get_html_page() missing 1 required keyword argument: 'session'"
)
url = link.url
url = url.split('#', 1)[0]
# Check for VCS schemes that do not support lookup as web pages.
from pip._internal.vcs import VcsSupport
for scheme in VcsSupport.schemes:
if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
logger.debug('Cannot look at %s URL %s', scheme, link)
return None
try:
filename = link.filename
for bad_ext in ARCHIVE_EXTENSIONS:
if filename.endswith(bad_ext):
content_type = _get_content_type(url, session=session)
if content_type.lower().startswith('text/html'):
break
else:
logger.debug(
'Skipping page %s because of Content-Type: %s',
link,
content_type,
)
return
logger.debug('Getting page %s', url)
# Tack index.html onto file:// URLs that point to directories
(scheme, netloc, path, params, query, fragment) = \
urllib_parse.urlparse(url)
if (scheme == 'file' and
os.path.isdir(urllib_request.url2pathname(path))):
# add trailing slash if not present so urljoin doesn't trim
# final segment
if not url.endswith('/'):
url += '/'
url = urllib_parse.urljoin(url, 'index.html')
logger.debug(' file: URL is directory, getting %s', url)
resp = session.get(
url,
headers={
"Accept": "text/html",
# We don't want to blindly returned cached data for
# /simple/, because authors generally expecting that
# twine upload && pip install will function, but if
# they've done a pip install in the last ~10 minutes
# it won't. Thus by setting this to zero we will not
# blindly use any cached data, however the benefit of
# using max-age=0 instead of no-cache, is that we will
# still support conditional requests, so we will still
# minimize traffic sent in cases where the page hasn't
# changed at all, we will just always incur the round
# trip for the conditional GET now instead of only
# once per 10 minutes.
# For more information, please see pypa/pip#5670.
"Cache-Control": "max-age=0",
},
)
resp.raise_for_status()
# The check for archives above only works if the url ends with
# something that looks like an archive. However that is not a
# requirement of an url. Unless we issue a HEAD request on every
# url we cannot know ahead of time for sure if something is HTML
# or not. However we can check after we've downloaded it.
content_type = resp.headers.get('Content-Type', 'unknown')
if not content_type.lower().startswith("text/html"):
logger.debug(
'Skipping page %s because of Content-Type: %s',
link,
content_type,
)
return
inst = HTMLPage(resp.content, resp.url, resp.headers)
except HTTPError as exc:
_handle_get_page_fail(link, exc, url)
except SSLError as exc:
reason = "There was a problem confirming the ssl certificate: "
reason += str(exc)
_handle_get_page_fail(link, reason, url, meth=logger.info)
except requests.ConnectionError as exc:
_handle_get_page_fail(link, "connection error: %s" % exc, url)
except requests.Timeout:
_handle_get_page_fail(link, "timed out", url)
else:
return inst
class PackageFinder(object):
"""This finds packages.
This is meant to match easy_install's technique for looking for
packages, by reading pages and looking for appropriate links.
"""
def __init__(self, find_links, index_urls, allow_all_prereleases=False,
trusted_hosts=None, process_dependency_links=False,
session=None, format_control=None, platform=None,
versions=None, abi=None, implementation=None,
prefer_binary=False):
"""Create a PackageFinder.
:param format_control: A FormatControl object or None. Used to control
the selection of source packages / binary packages when consulting
the index and links.
:param platform: A string or None. If None, searches for packages
that are supported by the current system. Otherwise, will find
packages that can be built on the platform passed in. These
packages will only be downloaded for distribution: they will
not be built locally.
:param versions: A list of strings or None. This is passed directly
to pep425tags.py in the get_supported() method.
:param abi: A string or None. This is passed directly
to pep425tags.py in the get_supported() method.
:param implementation: A string or None. This is passed directly
to pep425tags.py in the get_supported() method.
"""
if session is None:
raise TypeError(
"PackageFinder() missing 1 required keyword argument: "
"'session'"
)
# Build find_links. If an argument starts with ~, it may be
# a local file relative to a home directory. So try normalizing
# it and if it exists, use the normalized version.
# This is deliberately conservative - it might be fine just to
# blindly normalize anything starting with a ~...
self.find_links = []
for link in find_links:
if link.startswith('~'):
new_link = normalize_path(link)
if os.path.exists(new_link):
link = new_link
self.find_links.append(link)
self.index_urls = index_urls
self.dependency_links = []
# These are boring links that have already been logged somehow:
self.logged_links = set()
self.format_control = format_control or FormatControl(set(), set())
# Domains that we won't emit warnings for when not using HTTPS
self.secure_origins = [
("*", host, "*")
for host in (trusted_hosts if trusted_hosts else [])
]
# Do we want to allow _all_ pre-releases?
self.allow_all_prereleases = allow_all_prereleases
# Do we process dependency links?
self.process_dependency_links = process_dependency_links
# The Session we'll use to make requests
self.session = session
# The valid tags to check potential found wheel candidates against
self.valid_tags = get_supported(
versions=versions,
platform=platform,
abi=abi,
impl=implementation,
)
# Do we prefer old, but valid, binary dist over new source dist
self.prefer_binary = prefer_binary
# If we don't have TLS enabled, then WARN if anyplace we're looking
# relies on TLS.
if not HAS_TLS:
for link in itertools.chain(self.index_urls, self.find_links):
parsed = urllib_parse.urlparse(link)
if parsed.scheme == "https":
logger.warning(
"pip is configured with locations that require "
"TLS/SSL, however the ssl module in Python is not "
"available."
)
break
def get_formatted_locations(self):
lines = []
if self.index_urls and self.index_urls != [PyPI.simple_url]:
lines.append(
"Looking in indexes: {}".format(", ".join(
remove_auth_from_url(url) for url in self.index_urls))
)
if self.find_links:
lines.append(
"Looking in links: {}".format(", ".join(self.find_links))
)
return "\n".join(lines)
def add_dependency_links(self, links):
# FIXME: this shouldn't be global list this, it should only
# apply to requirements of the package that specifies the
# dependency_links value
# FIXME: also, we should track comes_from (i.e., use Link)
if self.process_dependency_links:
deprecated(
"Dependency Links processing has been deprecated and will be "
"removed in a future release.",
replacement="PEP 508 URL dependencies",
gone_in="18.2",
issue=4187,
)
self.dependency_links.extend(links)
@staticmethod
def _sort_locations(locations, expand_dir=False):
"""
Sort locations into "files" (archives) and "urls", and return
a pair of lists (files,urls)
"""
files = []
urls = []
# puts the url for the given file path into the appropriate list
def sort_path(path):
url = path_to_url(path)
if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
urls.append(url)
else:
files.append(url)
for url in locations:
is_local_path = os.path.exists(url)
is_file_url = url.startswith('file:')
if is_local_path or is_file_url:
if is_local_path:
path = url
else:
path = url_to_path(url)
if os.path.isdir(path):
if expand_dir:
path = os.path.realpath(path)
for item in os.listdir(path):
sort_path(os.path.join(path, item))
elif is_file_url:
urls.append(url)
elif os.path.isfile(path):
sort_path(path)
else:
logger.warning(
"Url '%s' is ignored: it is neither a file "
"nor a directory.", url,
)
elif is_url(url):
# Only add url with clear scheme
urls.append(url)
else:
logger.warning(
"Url '%s' is ignored. It is either a non-existing "
"path or lacks a specific scheme.", url,
)
return files, urls
def _candidate_sort_key(self, candidate):
"""
Function used to generate link sort key for link tuples.
The greater the return value, the more preferred it is.
If not finding wheels, then sorted by version only.
If finding wheels, then the sort order is by version, then:
1. existing installs
2. wheels ordered via Wheel.support_index_min(self.valid_tags)
3. source archives
If prefer_binary was set, then all wheels are sorted above sources.
Note: it was considered to embed this logic into the Link
comparison operators, but then different sdist links
with the same version, would have to be considered equal
"""
support_num = len(self.valid_tags)
build_tag = tuple()
binary_preference = 0
if candidate.location.is_wheel:
# can raise InvalidWheelFilename
wheel = Wheel(candidate.location.filename)
if not wheel.supported(self.valid_tags):
raise UnsupportedWheel(
"%s is not a supported wheel for this platform. It "
"can't be sorted." % wheel.filename
)
if self.prefer_binary:
binary_preference = 1
pri = -(wheel.support_index_min(self.valid_tags))
if wheel.build_tag is not None:
match = re.match(r'^(\d+)(.*)$', wheel.build_tag)
build_tag_groups = match.groups()
build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
else: # sdist
pri = -(support_num)
return (binary_preference, candidate.version, build_tag, pri)
def _validate_secure_origin(self, logger, location):
# Determine if this url used a secure transport mechanism
parsed = urllib_parse.urlparse(str(location))
origin = (parsed.scheme, parsed.hostname, parsed.port)
# The protocol to use to see if the protocol matches.
# Don't count the repository type as part of the protocol: in
# cases such as "git+ssh", only use "ssh". (I.e., Only verify against
# the last scheme.)
protocol = origin[0].rsplit('+', 1)[-1]
# Determine if our origin is a secure origin by looking through our
# hardcoded list of secure origins, as well as any additional ones
# configured on this PackageFinder instance.
for secure_origin in (SECURE_ORIGINS + self.secure_origins):
if protocol != secure_origin[0] and secure_origin[0] != "*":
continue
try:
# We need to do this decode dance to ensure that we have a
# unicode object, even on Python 2.x.
addr = ipaddress.ip_address(
origin[1]
if (
isinstance(origin[1], six.text_type) or
origin[1] is None
)
else origin[1].decode("utf8")
)
network = ipaddress.ip_network(
secure_origin[1]
if isinstance(secure_origin[1], six.text_type)
else secure_origin[1].decode("utf8")
)
except ValueError:
# We don't have both a valid address or a valid network, so
# we'll check this origin against hostnames.
if (origin[1] and
origin[1].lower() != secure_origin[1].lower() and
secure_origin[1] != "*"):
continue
else:
# We have a valid address and network, so see if the address
# is contained within the network.
if addr not in network:
continue
# Check to see if the port patches
if (origin[2] != secure_origin[2] and
secure_origin[2] != "*" and
secure_origin[2] is not None):
continue
# If we've gotten here, then this origin matches the current
# secure origin and we should return True
return True
# If we've gotten to this point, then the origin isn't secure and we
# will not accept it as a valid location to search. We will however
# log a warning that we are ignoring it.
logger.warning(
"The repository located at %s is not a trusted or secure host and "
"is being ignored. If this repository is available via HTTPS we "
"recommend you use HTTPS instead, otherwise you may silence "
"this warning and allow it anyway with '--trusted-host %s'.",
parsed.hostname,
parsed.hostname,
)
return False
def _get_index_urls_locations(self, project_name):
"""Returns the locations found via self.index_urls
Checks the url_name on the main (first in the list) index and
use this url_name to produce all locations
"""
def mkurl_pypi_url(url):
loc = posixpath.join(
url,
urllib_parse.quote(canonicalize_name(project_name)))
# For maximum compatibility with easy_install, ensure the path
# ends in a trailing slash. Although this isn't in the spec
# (and PyPI can handle it without the slash) some other index
# implementations might break if they relied on easy_install's
# behavior.
if not loc.endswith('/'):
loc = loc + '/'
return loc
return [mkurl_pypi_url(url) for url in self.index_urls]
def find_all_candidates(self, project_name):
"""Find all available InstallationCandidate for project_name
This checks index_urls, find_links and dependency_links.
All versions found are returned as an InstallationCandidate list.
See _link_package_versions for details on which files are accepted
"""
index_locations = self._get_index_urls_locations(project_name)
index_file_loc, index_url_loc = self._sort_locations(index_locations)
fl_file_loc, fl_url_loc = self._sort_locations(
self.find_links, expand_dir=True,
)
dep_file_loc, dep_url_loc = self._sort_locations(self.dependency_links)
file_locations = (Link(url) for url in itertools.chain(
index_file_loc, fl_file_loc, dep_file_loc,
))
# We trust every url that the user has given us whether it was given
# via --index-url or --find-links
# We explicitly do not trust links that came from dependency_links
# We want to filter out any thing which does not have a secure origin.
url_locations = [
link for link in itertools.chain(
(Link(url) for url in index_url_loc),
(Link(url) for url in fl_url_loc),
(Link(url) for url in dep_url_loc),
)
if self._validate_secure_origin(logger, link)
]
logger.debug('%d location(s) to search for versions of %s:',
len(url_locations), project_name)
for location in url_locations:
logger.debug('* %s', location)
canonical_name = canonicalize_name(project_name)
formats = self.format_control.get_allowed_formats(canonical_name)
search = Search(project_name, canonical_name, formats)
find_links_versions = self._package_versions(
# We trust every directly linked archive in find_links
(Link(url, '-f') for url in self.find_links),
search
)
page_versions = []
for page in self._get_pages(url_locations, project_name):
logger.debug('Analyzing links from page %s', page.url)
with indent_log():
page_versions.extend(
self._package_versions(page.iter_links(), search)
)
dependency_versions = self._package_versions(
(Link(url) for url in self.dependency_links), search
)
if dependency_versions:
logger.debug(
'dependency_links found: %s',
', '.join([
version.location.url for version in dependency_versions
])
)
file_versions = self._package_versions(file_locations, search)
if file_versions:
file_versions.sort(reverse=True)
logger.debug(
'Local files found: %s',
', '.join([
url_to_path(candidate.location.url)
for candidate in file_versions
])
)
# This is an intentional priority ordering
return (
file_versions + find_links_versions + page_versions +
dependency_versions
)
def find_requirement(self, req, upgrade):
"""Try to find a Link matching req
Expects req, an InstallRequirement and upgrade, a boolean
Returns a Link if found,
Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
"""
all_candidates = self.find_all_candidates(req.name)
# Filter out anything which doesn't match our specifier
compatible_versions = set(
req.specifier.filter(
# We turn the version object into a str here because otherwise
# when we're debundled but setuptools isn't, Python will see
# packaging.version.Version and
# pkg_resources._vendor.packaging.version.Version as different
# types. This way we'll use a str as a common data interchange
# format. If we stop using the pkg_resources provided specifier
# and start using our own, we can drop the cast to str().
[str(c.version) for c in all_candidates],
prereleases=(
self.allow_all_prereleases
if self.allow_all_prereleases else None
),
)
)
applicable_candidates = [
# Again, converting to str to deal with debundling.
c for c in all_candidates if str(c.version) in compatible_versions
]
if applicable_candidates:
best_candidate = max(applicable_candidates,
key=self._candidate_sort_key)
else:
best_candidate = None
if req.satisfied_by is not None:
installed_version = parse_version(req.satisfied_by.version)
else:
installed_version = None
if installed_version is None and best_candidate is None:
logger.critical(
'Could not find a version that satisfies the requirement %s '
'(from versions: %s)',
req,
', '.join(
sorted(
{str(c.version) for c in all_candidates},
key=parse_version,
)
)
)
raise DistributionNotFound(
'No matching distribution found for %s' % req
)
best_installed = False
if installed_version and (
best_candidate is None or
best_candidate.version <= installed_version):
best_installed = True
if not upgrade and installed_version is not None:
if best_installed:
logger.debug(
'Existing installed version (%s) is most up-to-date and '
'satisfies requirement',
installed_version,
)
else:
logger.debug(
'Existing installed version (%s) satisfies requirement '
'(most up-to-date version is %s)',
installed_version,
best_candidate.version,
)
return None
if best_installed:
# We have an existing version, and its the best version
logger.debug(
'Installed version (%s) is most up-to-date (past versions: '
'%s)',
installed_version,
', '.join(sorted(compatible_versions, key=parse_version)) or
"none",
)
raise BestVersionAlreadyInstalled
logger.debug(
'Using version %s (newest of versions: %s)',
best_candidate.version,
', '.join(sorted(compatible_versions, key=parse_version))
)
return best_candidate.location
def _get_pages(self, locations, project_name):
"""
Yields (page, page_url) from the given locations, skipping
locations that have errors.
"""
seen = set()
for location in locations:
if location in seen:
continue
seen.add(location)
page = self._get_page(location)
if page is None:
continue
yield page
_py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
def _sort_links(self, links):
"""
Returns elements of links in order, non-egg links first, egg links
second, while eliminating duplicates
"""
eggs, no_eggs = [], []
seen = set()
for link in links:
if link not in seen:
seen.add(link)
if link.egg_fragment:
eggs.append(link)
else:
no_eggs.append(link)
return no_eggs + eggs
def _package_versions(self, links, search):
result = []
for link in self._sort_links(links):
v = self._link_package_versions(link, search)
if v is not None:
result.append(v)
return result
def _log_skipped_link(self, link, reason):
if link not in self.logged_links:
logger.debug('Skipping link %s; %s', link, reason)
self.logged_links.add(link)
def _link_package_versions(self, link, search):
"""Return an InstallationCandidate or None"""
version = None
if link.egg_fragment:
egg_info = link.egg_fragment
ext = link.ext
else:
egg_info, ext = link.splitext()
if not ext:
self._log_skipped_link(link, 'not a file')
return
if ext not in SUPPORTED_EXTENSIONS:
self._log_skipped_link(
link, 'unsupported archive format: %s' % ext,
)
return
if "binary" not in search.formats and ext == wheel_ext:
self._log_skipped_link(
link, 'No binaries permitted for %s' % search.supplied,
)
return
if "macosx10" in link.path and ext == '.zip':
self._log_skipped_link(link, 'macosx10 one')
return
if ext == wheel_ext:
try:
wheel = Wheel(link.filename)
except InvalidWheelFilename:
self._log_skipped_link(link, 'invalid wheel filename')
return
if canonicalize_name(wheel.name) != search.canonical:
self._log_skipped_link(
link, 'wrong project name (not %s)' % search.supplied)
return
if not wheel.supported(self.valid_tags):
self._log_skipped_link(
link, 'it is not compatible with this Python')
return
version = wheel.version
# This should be up by the search.ok_binary check, but see issue 2700.
if "source" not in search.formats and ext != wheel_ext:
self._log_skipped_link(
link, 'No sources permitted for %s' % search.supplied,
)
return
if not version:
version = egg_info_matches(egg_info, search.supplied, link)
if version is None:
self._log_skipped_link(
link, 'Missing project version for %s' % search.supplied)
return
match = self._py_version_re.search(version)
if match:
version = version[:match.start()]
py_version = match.group(1)
if py_version != sys.version[:3]:
self._log_skipped_link(
link, 'Python version is incorrect')
return
try:
support_this_python = check_requires_python(link.requires_python)
except specifiers.InvalidSpecifier:
logger.debug("Package %s has an invalid Requires-Python entry: %s",
link.filename, link.requires_python)
support_this_python = True
if not support_this_python:
logger.debug("The package %s is incompatible with the python"
"version in use. Acceptable python versions are:%s",
link, link.requires_python)
return
logger.debug('Found link %s, version: %s', link, version)
return InstallationCandidate(search.supplied, version, link)
def _get_page(self, link):
return _get_html_page(link, session=self.session)
def egg_info_matches(
egg_info, search_name, link,
_egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)):
"""Pull the version part out of a string.
:param egg_info: The string to parse. E.g. foo-2.1
:param search_name: The name of the package this belongs to. None to
infer the name. Note that this cannot unambiguously parse strings
like foo-2-2 which might be foo, 2-2 or foo-2, 2.
:param link: The link the string came from, for logging on failure.
"""
match = _egg_info_re.search(egg_info)
if not match:
logger.debug('Could not parse version from link: %s', link)
return None
if search_name is None:
full_match = match.group(0)
return full_match.split('-', 1)[-1]
name = match.group(0).lower()
# To match the "safe" name that pkg_resources creates:
name = name.replace('_', '-')
# project name and version must be separated by a dash
look_for = search_name.lower() + "-"
if name.startswith(look_for):
return match.group(0)[len(look_for):]
else:
return None
def _determine_base_url(document, page_url):
"""Determine the HTML document's base URL.
This looks for a ``<base>`` tag in the HTML document. If present, its href
attribute denotes the base URL of anchor tags in the document. If there is
no such tag (or if it does not have a valid href attribute), the HTML
file's URL is used as the base URL.
:param document: An HTML document representation. The current
implementation expects the result of ``html5lib.parse()``.
:param page_url: The URL of the HTML document.
"""
for base in document.findall(".//base"):
href = base.get("href")
if href is not None:
return href
return page_url
def _get_encoding_from_headers(headers):
"""Determine if we have any encoding information in our headers.
"""
if headers and "Content-Type" in headers:
content_type, params = cgi.parse_header(headers["Content-Type"])
if "charset" in params:
return params['charset']
return None
_CLEAN_LINK_RE = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
def _clean_link(url):
"""Makes sure a link is fully encoded. That is, if a ' ' shows up in
the link, it will be rewritten to %20 (while not over-quoting
% or other characters)."""
return _CLEAN_LINK_RE.sub(lambda match: '%%%2x' % ord(match.group(0)), url)
class HTMLPage(object):
"""Represents one page, along with its URL"""
def __init__(self, content, url, headers=None):
self.content = content
self.url = url
self.headers = headers
def __str__(self):
return self.url
def iter_links(self):
"""Yields all links in the page"""
document = html5lib.parse(
self.content,
transport_encoding=_get_encoding_from_headers(self.headers),
namespaceHTMLElements=False,
)
base_url = _determine_base_url(document, self.url)
for anchor in document.findall(".//a"):
if anchor.get("href"):
href = anchor.get("href")
url = _clean_link(urllib_parse.urljoin(base_url, href))
pyrequire = anchor.get('data-requires-python')
pyrequire = unescape(pyrequire) if pyrequire else None
yield Link(url, self.url, requires_python=pyrequire)
Search = namedtuple('Search', 'supplied canonical formats')
"""Capture key aspects of a search.
:attribute supplied: The user supplied package.
:attribute canonical: The canonical package name.
:attribute formats: The formats allowed for this package. Should be a set
with 'binary' or 'source' or both in it.
"""
|
TeamSPoon/logicmoo_workspace
|
packs_web/butterfly/lib/python3.7/site-packages/pip/_internal/index.py
|
Python
|
mit
| 34,791
|
import argparse
import os
import requests
API_URL = "https://api.assemblyai.com/v2/"
def get_transcription(transcription_id):
"""Requests the transcription from the API and returns the JSON
response."""
endpoint = "".join([API_URL, "transcript/{}".format(transcription_id)])
headers = {"authorization": os.getenv('ASSEMBLYAI_KEY')}
response = requests.get(endpoint, headers=headers)
return response.json()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("transcription_id")
args = parser.parse_args()
transcription_id = args.transcription_id
response_json = get_transcription(transcription_id)
print(response_json)
if response_json['status'] == "completed":
for word in response_json['words']:
print(word['text'], end=" ")
else:
print("current status of transcription request: {}".format(
response_json['status']))
|
fullstackpython/blog-code-examples
|
transcribe-speech-text-script/get_transcription.py
|
Python
|
mit
| 953
|
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
import json
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import BadSymbol
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import OrderNotFound
from ccxt.base.decimal_to_precision import TICK_SIZE
from ccxt.base.precise import Precise
class ndax(Exchange):
def describe(self):
return self.deep_extend(super(ndax, self).describe(), {
'id': 'ndax',
'name': 'NDAX',
'countries': ['CA'], # Canada
'rateLimit': 1000,
'pro': True,
'has': {
'CORS': None,
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'addMargin': False,
'cancelAllOrders': True,
'cancelOrder': True,
'createDepositAddress': True,
'createOrder': True,
'createReduceOnlyOrder': False,
'editOrder': True,
'fetchAccounts': True,
'fetchBalance': True,
'fetchBorrowRate': False,
'fetchBorrowRateHistories': False,
'fetchBorrowRateHistory': False,
'fetchBorrowRates': False,
'fetchBorrowRatesPerSymbol': False,
'fetchCurrencies': True,
'fetchDepositAddress': True,
'fetchDeposits': True,
'fetchFundingHistory': False,
'fetchFundingRate': False,
'fetchFundingRateHistory': False,
'fetchFundingRates': False,
'fetchIndexOHLCV': False,
'fetchIsolatedPositions': False,
'fetchLedger': True,
'fetchLeverage': False,
'fetchLeverageTiers': False,
'fetchMarkets': True,
'fetchMarkOHLCV': False,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': True,
'fetchOrderTrades': True,
'fetchPosition': False,
'fetchPositions': False,
'fetchPositionsRisk': False,
'fetchPremiumIndexOHLCV': False,
'fetchTicker': True,
'fetchTrades': True,
'fetchWithdrawals': True,
'reduceMargin': False,
'setLeverage': False,
'setMarginMode': False,
'setPositionMode': False,
'signIn': True,
'withdraw': True,
},
'timeframes': {
'1m': '60',
'5m': '300',
'15m': '900',
'30m': '1800',
'1h': '3600',
'2h': '7200',
'4h': '14400',
'6h': '21600',
'12h': '43200',
'1d': '86400',
'1w': '604800',
'1M': '2419200',
'4M': '9676800',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/108623144-67a3ef00-744e-11eb-8140-75c6b851e945.jpg',
'test': {
'public': 'https://ndaxmarginstaging.cdnhop.net:8443/AP',
'private': 'https://ndaxmarginstaging.cdnhop.net:8443/AP',
},
'api': {
'public': 'https://api.ndax.io:8443/AP',
'private': 'https://api.ndax.io:8443/AP',
},
'www': 'https://ndax.io',
'doc': [
'https://apidoc.ndax.io/',
],
'fees': 'https://ndax.io/fees',
'referral': 'https://one.ndax.io/bfQiSL',
},
'api': {
'public': {
'get': [
'Activate2FA',
'Authenticate2FA',
'AuthenticateUser',
'GetL2Snapshot',
'GetLevel1',
'GetValidate2FARequiredEndpoints',
'LogOut',
'GetTickerHistory',
'GetProduct',
'GetProducts',
'GetInstrument',
'GetInstruments',
'Ping',
'trades', # undocumented
'GetLastTrades', # undocumented
'SubscribeLevel1',
'SubscribeLevel2',
'SubscribeTicker',
'SubscribeTrades',
'SubscribeBlockTrades',
'UnsubscribeBlockTrades',
'UnsubscribeLevel1',
'UnsubscribeLevel2',
'UnsubscribeTicker',
'UnsubscribeTrades',
'Authenticate', # undocumented
],
},
'private': {
'get': [
'GetUserAccountInfos',
'GetUserAccounts',
'GetUserAffiliateCount',
'GetUserAffiliateTag',
'GetUserConfig',
'GetAllUnredactedUserConfigsForUser',
'GetUnredactedUserConfigByKey',
'GetUserDevices',
'GetUserReportTickets',
'GetUserReportWriterResultRecords',
'GetAccountInfo',
'GetAccountPositions',
'GetAllAccountConfigs',
'GetTreasuryProductsForAccount',
'GetAccountTrades',
'GetAccountTransactions',
'GetOpenTradeReports',
'GetAllOpenTradeReports',
'GetTradesHistory',
'GetOpenOrders',
'GetOpenQuotes',
'GetOrderFee',
'GetOrderHistory',
'GetOrdersHistory',
'GetOrderStatus',
'GetOmsFeeTiers',
'GetAccountDepositTransactions',
'GetAccountWithdrawTransactions',
'GetAllDepositRequestInfoTemplates',
'GetDepositInfo',
'GetDepositRequestInfoTemplate',
'GetDeposits',
'GetDepositTicket',
'GetDepositTickets',
'GetOMSWithdrawFees',
'GetWithdrawFee',
'GetWithdraws',
'GetWithdrawTemplate',
'GetWithdrawTemplateTypes',
'GetWithdrawTicket',
'GetWithdrawTickets',
],
'post': [
'AddUserAffiliateTag',
'CancelUserReport',
'RegisterNewDevice',
'SubscribeAccountEvents',
'UpdateUserAffiliateTag',
'GenerateTradeActivityReport',
'GenerateTransactionActivityReport',
'GenerateTreasuryActivityReport',
'ScheduleTradeActivityReport',
'ScheduleTransactionActivityReport',
'ScheduleTreasuryActivityReport',
'CancelAllOrders',
'CancelOrder',
'CancelQuote',
'CancelReplaceOrder',
'CreateQuote',
'ModifyOrder',
'SendOrder',
'SubmitBlockTrade',
'UpdateQuote',
'CancelWithdraw',
'CreateDepositTicket',
'CreateWithdrawTicket',
'SubmitDepositTicketComment',
'SubmitWithdrawTicketComment',
'GetOrderHistoryByOrderId',
],
},
},
'fees': {
'trading': {
'tierBased': False,
'percentage': True,
'maker': 0.2 / 100,
'taker': 0.25 / 100,
},
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
'uid': True,
# these credentials are required for signIn() and withdraw()
'login': True,
'password': True,
# 'twofa': True,
},
'precisionMode': TICK_SIZE,
'exceptions': {
'exact': {
'Not_Enough_Funds': InsufficientFunds, # {"status":"Rejected","errormsg":"Not_Enough_Funds","errorcode":101}
'Server Error': ExchangeError, # {"result":false,"errormsg":"Server Error","errorcode":102,"detail":null}
'Resource Not Found': OrderNotFound, # {"result":false,"errormsg":"Resource Not Found","errorcode":104,"detail":null}
},
'broad': {
'Invalid InstrumentId': BadSymbol, # {"result":false,"errormsg":"Invalid InstrumentId: 10000","errorcode":100,"detail":null}
'This endpoint requires 2FACode along with the payload': AuthenticationError,
},
},
'options': {
'omsId': 1,
'orderTypes': {
'Market': 1,
'Limit': 2,
'StopMarket': 3,
'StopLimit': 4,
'TrailingStopMarket': 5,
'TrailingStopLimit': 6,
'BlockTrade': 7,
},
},
})
def sign_in(self, params={}):
self.check_required_credentials()
if self.login is None or self.password is None:
raise AuthenticationError(self.id + ' signIn() requires exchange.login, exchange.password')
request = {
'grant_type': 'client_credentials', # the only supported value
}
response = self.publicGetAuthenticate(self.extend(request, params))
#
# {
# "Authenticated":true,
# "Requires2FA":true,
# "AuthType":"Google",
# "AddtlInfo":"",
# "Pending2FaToken": "6f5c4e66-f3ee-493e-9227-31cc0583b55f"
# }
#
sessionToken = self.safe_string(response, 'SessionToken')
if sessionToken is not None:
self.options['sessionToken'] = sessionToken
return response
pending2faToken = self.safe_string(response, 'Pending2FaToken')
if pending2faToken is not None:
if self.twofa is None:
raise AuthenticationError(self.id + ' signIn() requires exchange.twofa credentials')
self.options['pending2faToken'] = pending2faToken
request = {
'Code': self.oath(),
}
response = self.publicGetAuthenticate2FA(self.extend(request, params))
#
# {
# "Authenticated": True,
# "UserId":57765,
# "SessionToken":"4a2a5857-c4e5-4fac-b09e-2c4c30b591a0"
# }
#
sessionToken = self.safe_string(response, 'SessionToken')
self.options['sessionToken'] = sessionToken
return response
return response
def fetch_currencies(self, params={}):
omsId = self.safe_integer(self.options, 'omsId', 1)
request = {
'omsId': omsId,
}
response = self.publicGetGetProducts(self.extend(request, params))
#
# [
# {
# "OMSId":1,
# "ProductId":1,
# "Product":"BTC",
# "ProductFullName":"Bitcoin",
# "ProductType":"CryptoCurrency",
# "DecimalPlaces":8,
# "TickSize":0.0000000100000000000000000000,
# "NoFees":false,
# "IsDisabled":false,
# "MarginEnabled":false
# },
# ]
#
result = {}
for i in range(0, len(response)):
currency = response[i]
id = self.safe_string(currency, 'ProductId')
name = self.safe_string(currency, 'ProductFullName')
type = self.safe_string(currency, 'ProductType')
code = self.safe_currency_code(self.safe_string(currency, 'Product'))
precision = self.safe_number(currency, 'TickSize')
isDisabled = self.safe_value(currency, 'IsDisabled')
active = not isDisabled
result[code] = {
'id': id,
'name': name,
'code': code,
'type': type,
'precision': precision,
'info': currency,
'active': active,
'deposit': None,
'withdraw': None,
'fee': None,
'limits': {
'amount': {
'min': None,
'max': None,
},
'withdraw': {
'min': None,
'max': None,
},
},
}
return result
def fetch_markets(self, params={}):
omsId = self.safe_integer(self.options, 'omsId', 1)
request = {
'omsId': omsId,
}
response = self.publicGetGetInstruments(self.extend(request, params))
#
# [
# {
# "OMSId":1,
# "InstrumentId":3,
# "Symbol":"LTCBTC",
# "Product1":3,
# "Product1Symbol":"LTC",
# "Product2":1,
# "Product2Symbol":"BTC",
# "InstrumentType":"Standard",
# "VenueInstrumentId":3,
# "VenueId":1,
# "SortIndex":0,
# "SessionStatus":"Running",
# "PreviousSessionStatus":"Stopped",
# "SessionStatusDateTime":"2020-11-25T19:42:15.245Z",
# "SelfTradePrevention":true,
# "QuantityIncrement":0.0000000100000000000000000000,
# "PriceIncrement":0.0000000100000000000000000000,
# "MinimumQuantity":0.0100000000000000000000000000,
# "MinimumPrice":0.0000010000000000000000000000,
# "VenueSymbol":"LTCBTC",
# "IsDisable":false,
# "MasterDataId":0,
# "PriceCollarThreshold":0.0000000000000000000000000000,
# "PriceCollarPercent":0.0000000000000000000000000000,
# "PriceCollarEnabled":false,
# "PriceFloorLimit":0.0000000000000000000000000000,
# "PriceFloorLimitEnabled":false,
# "PriceCeilingLimit":0.0000000000000000000000000000,
# "PriceCeilingLimitEnabled":false,
# "CreateWithMarketRunning":true,
# "AllowOnlyMarketMakerCounterParty":false,
# "PriceCollarIndexDifference":0.0000000000000000000000000000,
# "PriceCollarConvertToOtcEnabled":false,
# "PriceCollarConvertToOtcClientUserId":0,
# "PriceCollarConvertToOtcAccountId":0,
# "PriceCollarConvertToOtcThreshold":0.0000000000000000000000000000,
# "OtcConvertSizeThreshold":0.0000000000000000000000000000,
# "OtcConvertSizeEnabled":false,
# "OtcTradesPublic":true,
# "PriceTier":0
# },
# ]
#
result = []
for i in range(0, len(response)):
market = response[i]
id = self.safe_string(market, 'InstrumentId')
# lowercaseId = self.safe_string_lower(market, 'symbol')
baseId = self.safe_string(market, 'Product1')
quoteId = self.safe_string(market, 'Product2')
base = self.safe_currency_code(self.safe_string(market, 'Product1Symbol'))
quote = self.safe_currency_code(self.safe_string(market, 'Product2Symbol'))
sessionStatus = self.safe_string(market, 'SessionStatus')
isDisable = self.safe_value(market, 'IsDisable')
sessionRunning = (sessionStatus == 'Running')
result.append({
'id': id,
'symbol': base + '/' + quote,
'base': base,
'quote': quote,
'settle': None,
'baseId': baseId,
'quoteId': quoteId,
'settleId': None,
'type': 'spot',
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'active': (sessionRunning and not isDisable),
'contract': False,
'linear': None,
'inverse': None,
'contractSize': None,
'expiry': None,
'expiryDatetime': None,
'strike': None,
'optionType': None,
'precision': {
'amount': self.safe_number(market, 'QuantityIncrement'),
'price': self.safe_number(market, 'PriceIncrement'),
},
'limits': {
'leverage': {
'min': None,
'max': None,
},
'amount': {
'min': self.safe_number(market, 'MinimumQuantity'),
'max': None,
},
'price': {
'min': self.safe_number(market, 'MinimumPrice'),
'max': None,
},
'cost': {
'min': None,
'max': None,
},
},
'info': market,
})
return result
def parse_order_book(self, orderbook, symbol, timestamp=None, bidsKey='bids', asksKey='asks', priceKey=6, amountKey=8):
nonce = None
result = {
'symbol': symbol,
'bids': [],
'asks': [],
'timestamp': None,
'datetime': None,
'nonce': None,
}
for i in range(0, len(orderbook)):
level = orderbook[i]
if timestamp is None:
timestamp = self.safe_integer(level, 2)
else:
newTimestamp = self.safe_integer(level, 2)
timestamp = max(timestamp, newTimestamp)
if nonce is None:
nonce = self.safe_integer(level, 0)
else:
newNonce = self.safe_integer(level, 0)
nonce = max(nonce, newNonce)
bidask = self.parse_bid_ask(level, priceKey, amountKey)
levelSide = self.safe_integer(level, 9)
side = asksKey if levelSide else bidsKey
result[side].append(bidask)
result['bids'] = self.sort_by(result['bids'], 0, True)
result['asks'] = self.sort_by(result['asks'], 0)
result['timestamp'] = timestamp
result['datetime'] = self.iso8601(timestamp)
result['nonce'] = nonce
return result
def fetch_order_book(self, symbol, limit=None, params={}):
omsId = self.safe_integer(self.options, 'omsId', 1)
self.load_markets()
market = self.market(symbol)
limit = 100 if (limit is None) else limit # default 100
request = {
'omsId': omsId,
'InstrumentId': market['id'],
'Depth': limit, # default 100
}
response = self.publicGetGetL2Snapshot(self.extend(request, params))
#
# [
# [
# 0, # 0 MDUpdateId
# 1, # 1 Number of Unique Accounts
# 123, # 2 ActionDateTime in Posix format X 1000
# 0, # 3 ActionType 0(New), 1(Update), 2(Delete)
# 0.0, # 4 LastTradePrice
# 0, # 5 Number of Orders
# 0.0, # 6 Price
# 0, # 7 ProductPairCode
# 0.0, # 8 Quantity
# 0, # 9 Side
# ],
# [97244115,1,1607456142963,0,19069.32,1,19069.31,8,0.140095,0],
# [97244115,0,1607456142963,0,19069.32,1,19068.64,8,0.0055,0],
# [97244115,0,1607456142963,0,19069.32,1,19068.26,8,0.021291,0],
# [97244115,1,1607456142964,0,19069.32,1,19069.32,8,0.099636,1],
# [97244115,0,1607456142964,0,19069.32,1,19069.98,8,0.1,1],
# [97244115,0,1607456142964,0,19069.32,1,19069.99,8,0.141604,1],
# ]
#
return self.parse_order_book(response, symbol)
def parse_ticker(self, ticker, market=None):
#
# fetchTicker
#
# {
# "OMSId":1,
# "InstrumentId":8,
# "BestBid":19069.31,
# "BestOffer":19069.32,
# "LastTradedPx":19069.32,
# "LastTradedQty":0.0001,
# "LastTradeTime":1607040406424,
# "SessionOpen":19069.32,
# "SessionHigh":19069.32,
# "SessionLow":19069.32,
# "SessionClose":19069.32,
# "Volume":0.0001,
# "CurrentDayVolume":0.0001,
# "CurrentDayNotional":1.906932,
# "CurrentDayNumTrades":1,
# "CurrentDayPxChange":0.00,
# "Rolling24HrVolume":0.000000000000000000000000000,
# "Rolling24HrNotional":0.00000000000000000000000,
# "Rolling24NumTrades":0,
# "Rolling24HrPxChange":0,
# "TimeStamp":"1607040406425",
# "BidQty":0,
# "AskQty":0,
# "BidOrderCt":0,
# "AskOrderCt":0,
# "Rolling24HrPxChangePercent":0,
# }
#
timestamp = self.safe_integer(ticker, 'TimeStamp')
marketId = self.safe_string(ticker, 'InstrumentId')
market = self.safe_market(marketId, market)
symbol = self.safe_symbol(marketId, market)
last = self.safe_number(ticker, 'LastTradedPx')
percentage = self.safe_number(ticker, 'Rolling24HrPxChangePercent')
change = self.safe_number(ticker, 'Rolling24HrPxChange')
open = self.safe_number(ticker, 'SessionOpen')
baseVolume = self.safe_number(ticker, 'Rolling24HrVolume')
quoteVolume = self.safe_number(ticker, 'Rolling24HrNotional')
vwap = self.vwap(baseVolume, quoteVolume)
return self.safe_ticker({
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_number(ticker, 'SessionHigh'),
'low': self.safe_number(ticker, 'SessionLow'),
'bid': self.safe_number(ticker, 'BestBid'),
'bidVolume': None, # self.safe_number(ticker, 'BidQty'), always shows 0
'ask': self.safe_number(ticker, 'BestOffer'),
'askVolume': None, # self.safe_number(ticker, 'AskQty'), always shows 0
'vwap': vwap,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': change,
'percentage': percentage,
'average': None,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
'info': ticker,
}, market)
def fetch_ticker(self, symbol, params={}):
omsId = self.safe_integer(self.options, 'omsId', 1)
self.load_markets()
market = self.market(symbol)
request = {
'omsId': omsId,
'InstrumentId': market['id'],
}
response = self.publicGetGetLevel1(self.extend(request, params))
#
# {
# "OMSId":1,
# "InstrumentId":8,
# "BestBid":19069.31,
# "BestOffer":19069.32,
# "LastTradedPx":19069.32,
# "LastTradedQty":0.0001,
# "LastTradeTime":1607040406424,
# "SessionOpen":19069.32,
# "SessionHigh":19069.32,
# "SessionLow":19069.32,
# "SessionClose":19069.32,
# "Volume":0.0001,
# "CurrentDayVolume":0.0001,
# "CurrentDayNotional":1.906932,
# "CurrentDayNumTrades":1,
# "CurrentDayPxChange":0.00,
# "Rolling24HrVolume":0.000000000000000000000000000,
# "Rolling24HrNotional":0.00000000000000000000000,
# "Rolling24NumTrades":0,
# "Rolling24HrPxChange":0,
# "TimeStamp":"1607040406425",
# "BidQty":0,
# "AskQty":0,
# "BidOrderCt":0,
# "AskOrderCt":0,
# "Rolling24HrPxChangePercent":0,
# }
#
return self.parse_ticker(response, market)
def parse_ohlcv(self, ohlcv, market=None):
#
# [
# 1501603632000, # 0 DateTime
# 2700.33, # 1 High
# 2687.01, # 2 Low
# 2687.01, # 3 Open
# 2687.01, # 4 Close
# 24.86100992, # 5 Volume
# 0, # 6 Inside Bid Price
# 2870.95, # 7 Inside Ask Price
# 1 # 8 InstrumentId
# ]
#
return [
self.safe_integer(ohlcv, 0),
self.safe_number(ohlcv, 3),
self.safe_number(ohlcv, 1),
self.safe_number(ohlcv, 2),
self.safe_number(ohlcv, 4),
self.safe_number(ohlcv, 5),
]
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
omsId = self.safe_integer(self.options, 'omsId', 1)
self.load_markets()
market = self.market(symbol)
request = {
'omsId': omsId,
'InstrumentId': market['id'],
'Interval': self.timeframes[timeframe],
}
duration = self.parse_timeframe(timeframe)
now = self.milliseconds()
if since is None:
if limit is not None:
request['FromDate'] = self.ymdhms(now - duration * limit * 1000)
request['ToDate'] = self.ymdhms(now)
else:
request['FromDate'] = self.ymdhms(since)
if limit is None:
request['ToDate'] = self.ymdhms(now)
else:
request['ToDate'] = self.ymdhms(self.sum(since, duration * limit * 1000))
response = self.publicGetGetTickerHistory(self.extend(request, params))
#
# [
# [1607299260000,19069.32,19069.32,19069.32,19069.32,0,19069.31,19069.32,8,1607299200000],
# [1607299320000,19069.32,19069.32,19069.32,19069.32,0,19069.31,19069.32,8,1607299260000],
# [1607299380000,19069.32,19069.32,19069.32,19069.32,0,19069.31,19069.32,8,1607299320000],
# ]
#
return self.parse_ohlcvs(response, market, timeframe, since, limit)
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# [
# 6913253, # 0 TradeId
# 8, # 1 ProductPairCode
# 0.03340802, # 2 Quantity
# 19116.08, # 3 Price
# 2543425077, # 4 Order1
# 2543425482, # 5 Order2
# 1606935922416, # 6 Tradetime
# 0, # 7 Direction
# 1, # 8 TakerSide
# 0, # 9 BlockTrade
# 0, # 10 Either Order1ClientId or Order2ClientId
# ]
#
# fetchMyTrades(private)
#
# {
# "OMSId":1,
# "ExecutionId":16916567,
# "TradeId":14476351,
# "OrderId":2543565231,
# "AccountId":449,
# "AccountName":"igor@ccxt.trade",
# "SubAccountId":0,
# "ClientOrderId":0,
# "InstrumentId":8,
# "Side":"Sell",
# "OrderType":"Market",
# "Quantity":0.1230000000000000000000000000,
# "RemainingQuantity":0.0000000000000000000000000000,
# "Price":19069.310000000000000000000000,
# "Value":2345.5251300000000000000000000,
# "CounterParty":"7",
# "OrderTradeRevision":1,
# "Direction":"NoChange",
# "IsBlockTrade":false,
# "Fee":1.1727625650000000000000000000,
# "FeeProductId":8,
# "OrderOriginator":446,
# "UserName":"igor@ccxt.trade",
# "TradeTimeMS":1607565031569,
# "MakerTaker":"Taker",
# "AdapterTradeId":0,
# "InsideBid":19069.310000000000000000000000,
# "InsideBidSize":0.2400950000000000000000000000,
# "InsideAsk":19069.320000000000000000000000,
# "InsideAskSize":0.0997360000000000000000000000,
# "IsQuote":false,
# "CounterPartyClientUserId":1,
# "NotionalProductId":2,
# "NotionalRate":1.0000000000000000000000000000,
# "NotionalValue":2345.5251300000000000000000000,
# "NotionalHoldAmount":0,
# "TradeTime":637431618315686826
# }
#
# fetchOrderTrades
#
# {
# "Side":"Sell",
# "OrderId":2543565235,
# "Price":18600.000000000000000000000000,
# "Quantity":0.0000000000000000000000000000,
# "DisplayQuantity":0.0000000000000000000000000000,
# "Instrument":8,
# "Account":449,
# "AccountName":"igor@ccxt.trade",
# "OrderType":"Limit",
# "ClientOrderId":0,
# "OrderState":"FullyExecuted",
# "ReceiveTime":1607585844956,
# "ReceiveTimeTicks":637431826449564182,
# "LastUpdatedTime":1607585844959,
# "LastUpdatedTimeTicks":637431826449593893,
# "OrigQuantity":0.1230000000000000000000000000,
# "QuantityExecuted":0.1230000000000000000000000000,
# "GrossValueExecuted":2345.3947500000000000000000000,
# "ExecutableValue":0.0000000000000000000000000000,
# "AvgPrice":19068.250000000000000000000000,
# "CounterPartyId":0,
# "ChangeReason":"Trade",
# "OrigOrderId":2543565235,
# "OrigClOrdId":0,
# "EnteredBy":446,
# "UserName":"igor@ccxt.trade",
# "IsQuote":false,
# "InsideAsk":19069.320000000000000000000000,
# "InsideAskSize":0.0997360000000000000000000000,
# "InsideBid":19068.250000000000000000000000,
# "InsideBidSize":1.3300010000000000000000000000,
# "LastTradePrice":19068.250000000000000000000000,
# "RejectReason":"",
# "IsLockedIn":false,
# "CancelReason":"",
# "OrderFlag":"0",
# "UseMargin":false,
# "StopPrice":0.0000000000000000000000000000,
# "PegPriceType":"Unknown",
# "PegOffset":0.0000000000000000000000000000,
# "PegLimitOffset":0.0000000000000000000000000000,
# "IpAddress":"x.x.x.x",
# "ClientOrderIdUuid":null,
# "OMSId":1
# }
#
priceString = None
amountString = None
cost = None
timestamp = None
id = None
marketId = None
side = None
orderId = None
takerOrMaker = None
fee = None
type = None
if isinstance(trade, list):
priceString = self.safe_string(trade, 3)
amountString = self.safe_string(trade, 2)
timestamp = self.safe_integer(trade, 6)
id = self.safe_string(trade, 0)
marketId = self.safe_string(trade, 1)
takerSide = self.safe_value(trade, 8)
side = 'sell' if takerSide else 'buy'
orderId = self.safe_string(trade, 4)
else:
timestamp = self.safe_integer_2(trade, 'TradeTimeMS', 'ReceiveTime')
id = self.safe_string(trade, 'TradeId')
orderId = self.safe_string_2(trade, 'OrderId', 'OrigOrderId')
marketId = self.safe_string_2(trade, 'InstrumentId', 'Instrument')
priceString = self.safe_string(trade, 'Price')
amountString = self.safe_string(trade, 'Quantity')
cost = self.safe_number_2(trade, 'Value', 'GrossValueExecuted')
takerOrMaker = self.safe_string_lower(trade, 'MakerTaker')
side = self.safe_string_lower(trade, 'Side')
type = self.safe_string_lower(trade, 'OrderType')
feeCost = self.safe_number(trade, 'Fee')
if feeCost is not None:
feeCurrencyId = self.safe_string(trade, 'FeeProductId')
feeCurrencyCode = self.safe_currency_code(feeCurrencyId)
fee = {
'cost': feeCost,
'currency': feeCurrencyCode,
}
price = self.parse_number(priceString)
amount = self.parse_number(amountString)
if cost is None:
cost = self.parse_number(Precise.string_mul(priceString, amountString))
symbol = self.safe_symbol(marketId, market)
return {
'info': trade,
'id': id,
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'order': orderId,
'type': type,
'side': side,
'takerOrMaker': takerOrMaker,
'price': price,
'amount': amount,
'cost': cost,
'fee': fee,
}
def fetch_trades(self, symbol, since=None, limit=None, params={}):
omsId = self.safe_integer(self.options, 'omsId', 1)
self.load_markets()
market = self.market(symbol)
request = {
'omsId': omsId,
'InstrumentId': market['id'],
}
if limit is not None:
request['Count'] = limit
response = self.publicGetGetLastTrades(self.extend(request, params))
#
# [
# [6913253,8,0.03340802,19116.08,2543425077,2543425482,1606935922416,0,1,0,0],
# [6913254,8,0.01391671,19117.42,2543427510,2543427811,1606935927998,1,1,0,0],
# [6913255,8,0.000006,19107.81,2543430495,2543430793,1606935933881,2,0,0,0],
# ]
#
return self.parse_trades(response, market, since, limit)
def fetch_accounts(self, params={}):
if not self.login:
raise AuthenticationError(self.id + ' fetchAccounts() requires exchange.login email credential')
omsId = self.safe_integer(self.options, 'omsId', 1)
self.check_required_credentials()
request = {
'omsId': omsId,
'UserId': self.uid,
'UserName': self.login,
}
response = self.privateGetGetUserAccounts(self.extend(request, params))
#
# [449] # comma-separated list of account ids
#
result = []
for i in range(0, len(response)):
accountId = self.safe_string(response, i)
result.append({
'id': accountId,
'type': None,
'currency': None,
'info': accountId,
})
return result
def parse_balance(self, response):
result = {
'info': response,
'timestamp': None,
'datetime': None,
}
for i in range(0, len(response)):
balance = response[i]
currencyId = self.safe_string(balance, 'ProductId')
if currencyId in self.currencies_by_id:
code = self.safe_currency_code(currencyId)
account = self.account()
account['total'] = self.safe_string(balance, 'Amount')
account['used'] = self.safe_string(balance, 'Hold')
result[code] = account
return self.safe_balance(result)
def fetch_balance(self, params={}):
omsId = self.safe_integer(self.options, 'omsId', 1)
self.load_markets()
self.load_accounts()
defaultAccountId = self.safe_integer_2(self.options, 'accountId', 'AccountId', int(self.accounts[0]['id']))
accountId = self.safe_integer_2(params, 'accountId', 'AccountId', defaultAccountId)
params = self.omit(params, ['accountId', 'AccountId'])
request = {
'omsId': omsId,
'AccountId': accountId,
}
response = self.privateGetGetAccountPositions(self.extend(request, params))
#
# [
# {
# "OMSId":1,
# "AccountId":449,
# "ProductSymbol":"BTC",
# "ProductId":1,
# "Amount":10.000000000000000000000000000,
# "Hold":0,
# "PendingDeposits":0.0000000000000000000000000000,
# "PendingWithdraws":0.0000000000000000000000000000,
# "TotalDayDeposits":10.000000000000000000000000000,
# "TotalMonthDeposits":10.000000000000000000000000000,
# "TotalYearDeposits":10.000000000000000000000000000,
# "TotalDayDepositNotional":10.000000000000000000000000000,
# "TotalMonthDepositNotional":10.000000000000000000000000000,
# "TotalYearDepositNotional":10.000000000000000000000000000,
# "TotalDayWithdraws":0,
# "TotalMonthWithdraws":0,
# "TotalYearWithdraws":0,
# "TotalDayWithdrawNotional":0,
# "TotalMonthWithdrawNotional":0,
# "TotalYearWithdrawNotional":0,
# "NotionalProductId":8,
# "NotionalProductSymbol":"USDT",
# "NotionalValue":10.000000000000000000000000000,
# "NotionalHoldAmount":0,
# "NotionalRate":1
# },
# ]
#
return self.parse_balance(response)
def parse_ledger_entry_type(self, type):
types = {
'Trade': 'trade',
'Deposit': 'transaction',
'Withdraw': 'transaction',
'Transfer': 'transfer',
'OrderHold': 'trade',
'WithdrawHold': 'transaction',
'DepositHold': 'transaction',
'MarginHold': 'trade',
'ManualHold': 'trade',
'ManualEntry': 'trade',
'MarginAcquisition': 'trade',
'MarginRelinquish': 'trade',
'MarginQuoteHold': 'trade',
}
return self.safe_string(types, type, type)
def parse_ledger_entry(self, item, currency=None):
#
# {
# "TransactionId":2663709493,
# "ReferenceId":68,
# "OMSId":1,
# "AccountId":449,
# "CR":10.000000000000000000000000000,
# "DR":0.0000000000000000000000000000,
# "Counterparty":3,
# "TransactionType":"Other",
# "ReferenceType":"Deposit",
# "ProductId":1,
# "Balance":10.000000000000000000000000000,
# "TimeStamp":1607532331591
# }
#
id = self.safe_string(item, 'TransactionId')
account = self.safe_string(item, 'AccountId')
referenceId = self.safe_string(item, 'ReferenceId')
referenceAccount = self.safe_string(item, 'Counterparty')
type = self.parse_ledger_entry_type(self.safe_string(item, 'ReferenceType'))
currencyId = self.safe_string(item, 'ProductId')
code = self.safe_currency_code(currencyId, currency)
credit = self.safe_number(item, 'CR')
debit = self.safe_number(item, 'DR')
amount = None
direction = None
if credit > 0:
amount = credit
direction = 'in'
elif debit > 0:
amount = debit
direction = 'out'
timestamp = self.safe_integer(item, 'TimeStamp')
before = None
after = self.safe_number(item, 'Balance')
if direction == 'out':
before = self.sum(after, amount)
elif direction == 'in':
before = max(0, after - amount)
status = 'ok'
return {
'info': item,
'id': id,
'direction': direction,
'account': account,
'referenceId': referenceId,
'referenceAccount': referenceAccount,
'type': type,
'currency': code,
'amount': amount,
'before': before,
'after': after,
'status': status,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'fee': None,
}
def fetch_ledger(self, code=None, since=None, limit=None, params={}):
omsId = self.safe_integer(self.options, 'omsId', 1)
self.load_markets()
self.load_accounts()
defaultAccountId = self.safe_integer_2(self.options, 'accountId', 'AccountId', int(self.accounts[0]['id']))
accountId = self.safe_integer_2(params, 'accountId', 'AccountId', defaultAccountId)
params = self.omit(params, ['accountId', 'AccountId'])
request = {
'omsId': omsId,
'AccountId': accountId,
}
if limit is not None:
request['Depth'] = limit
response = self.privateGetGetAccountTransactions(self.extend(request, params))
#
# [
# {
# "TransactionId":2663709493,
# "ReferenceId":68,
# "OMSId":1,
# "AccountId":449,
# "CR":10.000000000000000000000000000,
# "DR":0.0000000000000000000000000000,
# "Counterparty":3,
# "TransactionType":"Other",
# "ReferenceType":"Deposit",
# "ProductId":1,
# "Balance":10.000000000000000000000000000,
# "TimeStamp":1607532331591
# },
# ]
#
currency = None
if code is not None:
currency = self.currency(code)
return self.parse_ledger(response, currency, since, limit)
def parse_order_status(self, status):
statuses = {
'Accepted': 'open',
'Rejected': 'rejected',
'Working': 'open',
'Canceled': 'canceled',
'Expired': 'expired',
'FullyExecuted': 'closed',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
#
# createOrder
#
# {
# "status":"Accepted",
# "errormsg":"",
# "OrderId": 2543565231
# }
#
# editOrder
#
# {
# "ReplacementOrderId": 1234,
# "ReplacementClOrdId": 1561,
# "OrigOrderId": 5678,
# "OrigClOrdId": 91011,
# }
#
# fetchOpenOrders, fetchClosedOrders
#
# {
# "Side":"Buy",
# "OrderId":2543565233,
# "Price":19010,
# "Quantity":0.345,
# "DisplayQuantity":0.345,
# "Instrument":8,
# "Account":449,
# "AccountName":"igor@ccxt.trade",
# "OrderType":"Limit",
# "ClientOrderId":0,
# "OrderState":"Working",
# "ReceiveTime":1607579326003,
# "ReceiveTimeTicks":637431761260028981,
# "LastUpdatedTime":1607579326005,
# "LastUpdatedTimeTicks":637431761260054714,
# "OrigQuantity":0.345,
# "QuantityExecuted":0,
# "GrossValueExecuted":0,
# "ExecutableValue":0,
# "AvgPrice":0,
# "CounterPartyId":0,
# "ChangeReason":"NewInputAccepted",
# "OrigOrderId":2543565233,
# "OrigClOrdId":0,
# "EnteredBy":446,
# "UserName":"igor@ccxt.trade",
# "IsQuote":false,
# "InsideAsk":19069.32,
# "InsideAskSize":0.099736,
# "InsideBid":19068.25,
# "InsideBidSize":1.330001,
# "LastTradePrice":19068.25,
# "RejectReason":"",
# "IsLockedIn":false,
# "CancelReason":"",
# "OrderFlag":"AddedToBook",
# "UseMargin":false,
# "StopPrice":0,
# "PegPriceType":"Unknown",
# "PegOffset":0,
# "PegLimitOffset":0,
# "IpAddress":null,
# "ClientOrderIdUuid":null,
# "OMSId":1
# }
#
id = self.safe_string_2(order, 'ReplacementOrderId', 'OrderId')
timestamp = self.safe_integer(order, 'ReceiveTime')
lastTradeTimestamp = self.safe_integer(order, 'LastUpdatedTime')
marketId = self.safe_string(order, 'Instrument')
symbol = self.safe_symbol(marketId, market)
side = self.safe_string_lower(order, 'Side')
type = self.safe_string_lower(order, 'OrderType')
clientOrderId = self.safe_string_2(order, 'ReplacementClOrdId', 'ClientOrderId')
price = self.safe_string(order, 'Price')
amount = self.safe_string(order, 'OrigQuantity')
filled = self.safe_string(order, 'QuantityExecuted')
cost = self.safe_string(order, 'GrossValueExecuted')
average = self.safe_string(order, 'AvgPrice')
stopPrice = self.parse_number(self.omit_zero(self.safe_string(order, 'StopPrice')))
status = self.parse_order_status(self.safe_string(order, 'OrderState'))
return self.safe_order({
'id': id,
'clientOrderId': clientOrderId,
'info': order,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': lastTradeTimestamp,
'status': status,
'symbol': symbol,
'type': type,
'timeInForce': None,
'postOnly': None,
'side': side,
'price': price,
'stopPrice': stopPrice,
'cost': cost,
'amount': amount,
'filled': filled,
'average': average,
'remaining': None,
'fee': None,
'trades': None,
}, market)
def create_order(self, symbol, type, side, amount, price=None, params={}):
omsId = self.safe_integer(self.options, 'omsId', 1)
self.load_markets()
self.load_accounts()
defaultAccountId = self.safe_integer_2(self.options, 'accountId', 'AccountId', int(self.accounts[0]['id']))
accountId = self.safe_integer_2(params, 'accountId', 'AccountId', defaultAccountId)
clientOrderId = self.safe_integer_2(params, 'ClientOrderId', 'clientOrderId')
params = self.omit(params, ['accountId', 'AccountId', 'clientOrderId', 'ClientOrderId'])
market = self.market(symbol)
orderSide = 0 if (side == 'buy') else 1
request = {
'InstrumentId': int(market['id']),
'omsId': omsId,
'AccountId': accountId,
'TimeInForce': 1, # 0 Unknown, 1 GTC by default, 2 OPG execute as close to opening price as possible, 3 IOC immediate or canceled, 4 FOK fill-or-kill, 5 GTX good 'til executed, 6 GTD good 'til date
# 'ClientOrderId': clientOrderId, # defaults to 0
# If self order is order A, OrderIdOCO refers to the order ID of an order B(which is not the order being created by self call).
# If order B executes, then order A created by self call is canceled.
# You can also set up order B to watch order A in the same way, but that may require an update to order B to make it watch self one, which could have implications for priority in the order book.
# See CancelReplaceOrder and ModifyOrder.
# 'OrderIdOCO': 0, # The order ID if One Cancels the Other.
# 'UseDisplayQuantity': False, # If you enter a Limit order with a reserve, you must set UseDisplayQuantity to True
'Side': orderSide, # 0 Buy, 1 Sell, 2 Short, 3 unknown an error condition
'Quantity': float(self.amount_to_precision(symbol, amount)),
'OrderType': self.safe_integer(self.options['orderTypes'], self.capitalize(type)), # 0 Unknown, 1 Market, 2 Limit, 3 StopMarket, 4 StopLimit, 5 TrailingStopMarket, 6 TrailingStopLimit, 7 BlockTrade
# 'PegPriceType': 3, # 1 Last, 2 Bid, 3 Ask, 4 Midpoint
# 'LimitPrice': float(self.price_to_precision(symbol, price)),
}
# If OrderType=1(Market), Side=0(Buy), and LimitPrice is supplied, the Market order will execute up to the value specified
if price is not None:
request['LimitPrice'] = float(self.price_to_precision(symbol, price))
if clientOrderId is not None:
request['ClientOrderId'] = clientOrderId
response = self.privatePostSendOrder(self.extend(request, params))
#
# {
# "status":"Accepted",
# "errormsg":"",
# "OrderId": 2543565231
# }
#
return self.parse_order(response, market)
def edit_order(self, id, symbol, type, side, amount, price=None, params={}):
omsId = self.safe_integer(self.options, 'omsId', 1)
self.load_markets()
self.load_accounts()
defaultAccountId = self.safe_integer_2(self.options, 'accountId', 'AccountId', int(self.accounts[0]['id']))
accountId = self.safe_integer_2(params, 'accountId', 'AccountId', defaultAccountId)
clientOrderId = self.safe_integer_2(params, 'ClientOrderId', 'clientOrderId')
params = self.omit(params, ['accountId', 'AccountId', 'clientOrderId', 'ClientOrderId'])
market = self.market(symbol)
orderSide = 0 if (side == 'buy') else 1
request = {
'OrderIdToReplace': int(id),
'InstrumentId': int(market['id']),
'omsId': omsId,
'AccountId': accountId,
'TimeInForce': 1, # 0 Unknown, 1 GTC by default, 2 OPG execute as close to opening price as possible, 3 IOC immediate or canceled, 4 FOK fill-or-kill, 5 GTX good 'til executed, 6 GTD good 'til date
# 'ClientOrderId': clientOrderId, # defaults to 0
# If self order is order A, OrderIdOCO refers to the order ID of an order B(which is not the order being created by self call).
# If order B executes, then order A created by self call is canceled.
# You can also set up order B to watch order A in the same way, but that may require an update to order B to make it watch self one, which could have implications for priority in the order book.
# See CancelReplaceOrder and ModifyOrder.
# 'OrderIdOCO': 0, # The order ID if One Cancels the Other.
# 'UseDisplayQuantity': False, # If you enter a Limit order with a reserve, you must set UseDisplayQuantity to True
'Side': orderSide, # 0 Buy, 1 Sell, 2 Short, 3 unknown an error condition
'Quantity': float(self.amount_to_precision(symbol, amount)),
'OrderType': self.safe_integer(self.options['orderTypes'], self.capitalize(type)), # 0 Unknown, 1 Market, 2 Limit, 3 StopMarket, 4 StopLimit, 5 TrailingStopMarket, 6 TrailingStopLimit, 7 BlockTrade
# 'PegPriceType': 3, # 1 Last, 2 Bid, 3 Ask, 4 Midpoint
# 'LimitPrice': float(self.price_to_precision(symbol, price)),
}
# If OrderType=1(Market), Side=0(Buy), and LimitPrice is supplied, the Market order will execute up to the value specified
if price is not None:
request['LimitPrice'] = float(self.price_to_precision(symbol, price))
if clientOrderId is not None:
request['ClientOrderId'] = clientOrderId
response = self.privatePostCancelReplaceOrder(self.extend(request, params))
#
# {
# "replacementOrderId": 1234,
# "replacementClOrdId": 1561,
# "origOrderId": 5678,
# "origClOrdId": 91011,
# }
#
return self.parse_order(response, market)
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
omsId = self.safe_integer(self.options, 'omsId', 1)
self.load_markets()
self.load_accounts()
defaultAccountId = self.safe_integer_2(self.options, 'accountId', 'AccountId', int(self.accounts[0]['id']))
accountId = self.safe_integer_2(params, 'accountId', 'AccountId', defaultAccountId)
params = self.omit(params, ['accountId', 'AccountId'])
request = {
'omsId': omsId,
'AccountId': accountId,
# 'InstrumentId': market['id'],
# 'TradeId': 123, # If you specify TradeId, GetTradesHistory can return all states for a single trade
# 'OrderId': 456, # If specified, the call returns all trades associated with the order
# 'UserId': integer. The ID of the logged-in user. If not specified, the call returns trades associated with the users belonging to the default account for the logged-in user of self OMS.
# 'StartTimeStamp': long integer. The historical date and time at which to begin the trade report, in POSIX format. If not specified, reverts to the start date of self account on the trading venue.
# 'EndTimeStamp': long integer. Date at which to end the trade report, in POSIX format.
# 'Depth': integer. In self case, the count of trades to return, counting from the StartIndex. If Depth is not specified, returns all trades between BeginTimeStamp and EndTimeStamp, beginning at StartIndex.
# 'StartIndex': 0 # from the most recent trade 0 and moving backwards in time
# 'ExecutionId': 123, # The ID of the individual buy or sell execution. If not specified, returns all.
}
market = None
if symbol is not None:
market = self.market(symbol)
request['InstrumentId'] = market['id']
if since is not None:
request['StartTimeStamp'] = int(since / 1000)
if limit is not None:
request['Depth'] = limit
response = self.privateGetGetTradesHistory(self.extend(request, params))
#
# [
# {
# "OMSId":1,
# "ExecutionId":16916567,
# "TradeId":14476351,
# "OrderId":2543565231,
# "AccountId":449,
# "AccountName":"igor@ccxt.trade",
# "SubAccountId":0,
# "ClientOrderId":0,
# "InstrumentId":8,
# "Side":"Sell",
# "OrderType":"Market",
# "Quantity":0.1230000000000000000000000000,
# "RemainingQuantity":0.0000000000000000000000000000,
# "Price":19069.310000000000000000000000,
# "Value":2345.5251300000000000000000000,
# "CounterParty":"7",
# "OrderTradeRevision":1,
# "Direction":"NoChange",
# "IsBlockTrade":false,
# "Fee":1.1727625650000000000000000000,
# "FeeProductId":8,
# "OrderOriginator":446,
# "UserName":"igor@ccxt.trade",
# "TradeTimeMS":1607565031569,
# "MakerTaker":"Taker",
# "AdapterTradeId":0,
# "InsideBid":19069.310000000000000000000000,
# "InsideBidSize":0.2400950000000000000000000000,
# "InsideAsk":19069.320000000000000000000000,
# "InsideAskSize":0.0997360000000000000000000000,
# "IsQuote":false,
# "CounterPartyClientUserId":1,
# "NotionalProductId":2,
# "NotionalRate":1.0000000000000000000000000000,
# "NotionalValue":2345.5251300000000000000000000,
# "NotionalHoldAmount":0,
# "TradeTime":637431618315686826
# }
# ]
#
return self.parse_trades(response, market, since, limit)
def cancel_all_orders(self, symbol=None, params={}):
omsId = self.safe_integer(self.options, 'omsId', 1)
self.load_markets()
self.load_accounts()
defaultAccountId = self.safe_integer_2(self.options, 'accountId', 'AccountId', int(self.accounts[0]['id']))
accountId = self.safe_integer_2(params, 'accountId', 'AccountId', defaultAccountId)
params = self.omit(params, ['accountId', 'AccountId'])
request = {
'omsId': omsId,
'AccountId': accountId,
}
if symbol is not None:
market = self.market(symbol)
request['IntrumentId'] = market['id']
response = self.privatePostCancelAllOrders(self.extend(request, params))
#
# {
# "result":true,
# "errormsg":null,
# "errorcode":0,
# "detail":null
# }
#
return response
def cancel_order(self, id, symbol=None, params={}):
omsId = self.safe_integer(self.options, 'omsId', 1)
self.load_markets()
self.load_accounts()
# defaultAccountId = self.safe_integer_2(self.options, 'accountId', 'AccountId', int(self.accounts[0]['id']))
# accountId = self.safe_integer_2(params, 'accountId', 'AccountId', defaultAccountId)
# params = self.omit(params, ['accountId', 'AccountId'])
market = None
if symbol is not None:
market = self.market(symbol)
request = {
'omsId': omsId,
# 'AccountId': accountId,
}
clientOrderId = self.safe_integer_2(params, 'clientOrderId', 'ClOrderId')
if clientOrderId is not None:
request['ClOrderId'] = clientOrderId
else:
request['OrderId'] = int(id)
params = self.omit(params, ['clientOrderId', 'ClOrderId'])
response = self.privatePostCancelOrder(self.extend(request, params))
order = self.parse_order(response, market)
return self.extend(order, {
'id': id,
'clientOrderId': clientOrderId,
})
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
omsId = self.safe_integer(self.options, 'omsId', 1)
self.load_markets()
self.load_accounts()
defaultAccountId = self.safe_integer_2(self.options, 'accountId', 'AccountId', int(self.accounts[0]['id']))
accountId = self.safe_integer_2(params, 'accountId', 'AccountId', defaultAccountId)
params = self.omit(params, ['accountId', 'AccountId'])
market = None
if symbol is not None:
market = self.market(symbol)
request = {
'omsId': omsId,
'AccountId': accountId,
}
response = self.privateGetGetOpenOrders(self.extend(request, params))
#
# [
# {
# "Side":"Buy",
# "OrderId":2543565233,
# "Price":19010,
# "Quantity":0.345,
# "DisplayQuantity":0.345,
# "Instrument":8,
# "Account":449,
# "AccountName":"igor@ccxt.trade",
# "OrderType":"Limit",
# "ClientOrderId":0,
# "OrderState":"Working",
# "ReceiveTime":1607579326003,
# "ReceiveTimeTicks":637431761260028981,
# "LastUpdatedTime":1607579326005,
# "LastUpdatedTimeTicks":637431761260054714,
# "OrigQuantity":0.345,
# "QuantityExecuted":0,
# "GrossValueExecuted":0,
# "ExecutableValue":0,
# "AvgPrice":0,
# "CounterPartyId":0,
# "ChangeReason":"NewInputAccepted",
# "OrigOrderId":2543565233,
# "OrigClOrdId":0,
# "EnteredBy":446,
# "UserName":"igor@ccxt.trade",
# "IsQuote":false,
# "InsideAsk":19069.32,
# "InsideAskSize":0.099736,
# "InsideBid":19068.25,
# "InsideBidSize":1.330001,
# "LastTradePrice":19068.25,
# "RejectReason":"",
# "IsLockedIn":false,
# "CancelReason":"",
# "OrderFlag":"AddedToBook",
# "UseMargin":false,
# "StopPrice":0,
# "PegPriceType":"Unknown",
# "PegOffset":0,
# "PegLimitOffset":0,
# "IpAddress":null,
# "ClientOrderIdUuid":null,
# "OMSId":1
# }
# ]
#
return self.parse_orders(response, market, since, limit)
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
omsId = self.safe_integer(self.options, 'omsId', 1)
self.load_markets()
self.load_accounts()
defaultAccountId = self.safe_integer_2(self.options, 'accountId', 'AccountId', int(self.accounts[0]['id']))
accountId = self.safe_integer_2(params, 'accountId', 'AccountId', defaultAccountId)
params = self.omit(params, ['accountId', 'AccountId'])
request = {
'omsId': omsId,
'AccountId': accountId,
# 'ClientOrderId': clientOrderId,
# 'OriginalOrderId': id,
# 'OriginalClientOrderId': long integer,
# 'UserId': integer,
# 'InstrumentId': market['id'],
# 'StartTimestamp': since,
# 'EndTimestamp': self.milliseconds(),
# 'Depth': limit,
# 'StartIndex': 0,
}
market = None
if symbol is not None:
market = self.market(symbol)
request['InstrumentId'] = market['id']
if since is not None:
request['StartTimeStamp'] = int(since / 1000)
if limit is not None:
request['Depth'] = limit
response = self.privateGetGetOrdersHistory(self.extend(request, params))
#
# [
# {
# "Side":"Buy",
# "OrderId":2543565233,
# "Price":19010.000000000000000000000000,
# "Quantity":0.0000000000000000000000000000,
# "DisplayQuantity":0.3450000000000000000000000000,
# "Instrument":8,
# "Account":449,
# "AccountName":"igor@ccxt.trade",
# "OrderType":"Limit",
# "ClientOrderId":0,
# "OrderState":"Canceled",
# "ReceiveTime":1607579326003,
# "ReceiveTimeTicks":637431761260028981,
# "LastUpdatedTime":1607580965346,
# "LastUpdatedTimeTicks":637431777653463754,
# "OrigQuantity":0.3450000000000000000000000000,
# "QuantityExecuted":0.0000000000000000000000000000,
# "GrossValueExecuted":0.0000000000000000000000000000,
# "ExecutableValue":0.0000000000000000000000000000,
# "AvgPrice":0.0000000000000000000000000000,
# "CounterPartyId":0,
# "ChangeReason":"UserModified",
# "OrigOrderId":2543565233,
# "OrigClOrdId":0,
# "EnteredBy":446,
# "UserName":"igor@ccxt.trade",
# "IsQuote":false,
# "InsideAsk":19069.320000000000000000000000,
# "InsideAskSize":0.0997360000000000000000000000,
# "InsideBid":19068.250000000000000000000000,
# "InsideBidSize":1.3300010000000000000000000000,
# "LastTradePrice":19068.250000000000000000000000,
# "RejectReason":"",
# "IsLockedIn":false,
# "CancelReason":"UserModified",
# "OrderFlag":"AddedToBook, RemovedFromBook",
# "UseMargin":false,
# "StopPrice":0.0000000000000000000000000000,
# "PegPriceType":"Unknown",
# "PegOffset":0.0000000000000000000000000000,
# "PegLimitOffset":0.0000000000000000000000000000,
# "IpAddress":"x.x.x.x",
# "ClientOrderIdUuid":null,
# "OMSId":1
# },
# ]
#
return self.parse_orders(response, market, since, limit)
def fetch_order(self, id, symbol=None, params={}):
omsId = self.safe_integer(self.options, 'omsId', 1)
self.load_markets()
self.load_accounts()
defaultAccountId = self.safe_integer_2(self.options, 'accountId', 'AccountId', int(self.accounts[0]['id']))
accountId = self.safe_integer_2(params, 'accountId', 'AccountId', defaultAccountId)
params = self.omit(params, ['accountId', 'AccountId'])
market = None
if symbol is not None:
market = self.market(symbol)
request = {
'omsId': omsId,
'AccountId': accountId,
'OrderId': int(id),
}
response = self.privateGetGetOrderStatus(self.extend(request, params))
#
# {
# "Side":"Sell",
# "OrderId":2543565232,
# "Price":0.0000000000000000000000000000,
# "Quantity":0.0000000000000000000000000000,
# "DisplayQuantity":0.0000000000000000000000000000,
# "Instrument":8,
# "Account":449,
# "AccountName":"igor@ccxt.trade",
# "OrderType":"Market",
# "ClientOrderId":0,
# "OrderState":"FullyExecuted",
# "ReceiveTime":1607569475591,
# "ReceiveTimeTicks":637431662755912377,
# "LastUpdatedTime":1607569475596,
# "LastUpdatedTimeTicks":637431662755960902,
# "OrigQuantity":1.0000000000000000000000000000,
# "QuantityExecuted":1.0000000000000000000000000000,
# "GrossValueExecuted":19068.270478610000000000000000,
# "ExecutableValue":0.0000000000000000000000000000,
# "AvgPrice":19068.270478610000000000000000,
# "CounterPartyId":0,
# "ChangeReason":"Trade",
# "OrigOrderId":2543565232,
# "OrigClOrdId":0,
# "EnteredBy":446,
# "UserName":"igor@ccxt.trade",
# "IsQuote":false,
# "InsideAsk":19069.320000000000000000000000,
# "InsideAskSize":0.0997360000000000000000000000,
# "InsideBid":19069.310000000000000000000000,
# "InsideBidSize":0.2400950000000000000000000000,
# "LastTradePrice":19069.310000000000000000000000,
# "RejectReason":"",
# "IsLockedIn":false,
# "CancelReason":"",
# "OrderFlag":"0",
# "UseMargin":false,
# "StopPrice":0.0000000000000000000000000000,
# "PegPriceType":"Unknown",
# "PegOffset":0.0000000000000000000000000000,
# "PegLimitOffset":0.0000000000000000000000000000,
# "IpAddress":"x.x.x.x",
# "ClientOrderIdUuid":null,
# "OMSId":1
# }
#
return self.parse_order(response, market)
def fetch_order_trades(self, id, symbol=None, since=None, limit=None, params={}):
omsId = self.safe_integer(self.options, 'omsId', 1)
self.load_markets()
self.load_accounts()
# defaultAccountId = self.safe_integer_2(self.options, 'accountId', 'AccountId', int(self.accounts[0]['id']))
# accountId = self.safe_integer_2(params, 'accountId', 'AccountId', defaultAccountId)
# params = self.omit(params, ['accountId', 'AccountId'])
market = None
if symbol is not None:
market = self.market(symbol)
request = {
'OMSId': int(omsId),
# 'AccountId': accountId,
'OrderId': int(id),
}
response = self.privatePostGetOrderHistoryByOrderId(self.extend(request, params))
#
# [
# {
# "Side":"Sell",
# "OrderId":2543565235,
# "Price":18600.000000000000000000000000,
# "Quantity":0.0000000000000000000000000000,
# "DisplayQuantity":0.0000000000000000000000000000,
# "Instrument":8,
# "Account":449,
# "AccountName":"igor@ccxt.trade",
# "OrderType":"Limit",
# "ClientOrderId":0,
# "OrderState":"FullyExecuted",
# "ReceiveTime":1607585844956,
# "ReceiveTimeTicks":637431826449564182,
# "LastUpdatedTime":1607585844959,
# "LastUpdatedTimeTicks":637431826449593893,
# "OrigQuantity":0.1230000000000000000000000000,
# "QuantityExecuted":0.1230000000000000000000000000,
# "GrossValueExecuted":2345.3947500000000000000000000,
# "ExecutableValue":0.0000000000000000000000000000,
# "AvgPrice":19068.250000000000000000000000,
# "CounterPartyId":0,
# "ChangeReason":"Trade",
# "OrigOrderId":2543565235,
# "OrigClOrdId":0,
# "EnteredBy":446,
# "UserName":"igor@ccxt.trade",
# "IsQuote":false,
# "InsideAsk":19069.320000000000000000000000,
# "InsideAskSize":0.0997360000000000000000000000,
# "InsideBid":19068.250000000000000000000000,
# "InsideBidSize":1.3300010000000000000000000000,
# "LastTradePrice":19068.250000000000000000000000,
# "RejectReason":"",
# "IsLockedIn":false,
# "CancelReason":"",
# "OrderFlag":"0",
# "UseMargin":false,
# "StopPrice":0.0000000000000000000000000000,
# "PegPriceType":"Unknown",
# "PegOffset":0.0000000000000000000000000000,
# "PegLimitOffset":0.0000000000000000000000000000,
# "IpAddress":"x.x.x.x",
# "ClientOrderIdUuid":null,
# "OMSId":1
# },
# ]
#
grouped = self.group_by(response, 'ChangeReason')
trades = self.safe_value(grouped, 'Trade', [])
return self.parse_trades(trades, market, since, limit)
def fetch_deposit_address(self, code, params={}):
omsId = self.safe_integer(self.options, 'omsId', 1)
self.load_markets()
self.load_accounts()
defaultAccountId = self.safe_integer_2(self.options, 'accountId', 'AccountId', int(self.accounts[0]['id']))
accountId = self.safe_integer_2(params, 'accountId', 'AccountId', defaultAccountId)
params = self.omit(params, ['accountId', 'AccountId'])
currency = self.currency(code)
request = {
'omsId': omsId,
'AccountId': accountId,
'ProductId': currency['id'],
'GenerateNewKey': False,
}
response = self.privateGetGetDepositInfo(self.extend(request, params))
#
# {
# "result":true,
# "errormsg":null,
# "statuscode":0,
# "AssetManagerId":1,
# "AccountId":57922,
# "AssetId":16,
# "ProviderId":23,
# "DepositInfo":"[\"0x8A27564b5c30b91C93B1591821642420F323a210\"]"
# }
#
return self.parse_deposit_address(response, currency)
def parse_deposit_address(self, depositAddress, currency=None):
#
# fetchDepositAddress, createDepositAddress
#
# {
# "result":true,
# "errormsg":null,
# "statuscode":0,
# "AssetManagerId":1,
# "AccountId":449,
# "AssetId":1,
# "ProviderId":1,
# "DepositInfo":"[\"r3e95RwVsLH7yCbnMfyh7SA8FdwUJCB4S2?memo=241452010\"]"
# }
#
depositInfoString = self.safe_string(depositAddress, 'DepositInfo')
depositInfo = json.loads(depositInfoString)
depositInfoLength = len(depositInfo)
lastString = self.safe_string(depositInfo, depositInfoLength - 1)
parts = lastString.split('?memo=')
address = self.safe_string(parts, 0)
tag = self.safe_string(parts, 1)
code = None
if currency is not None:
code = currency['code']
self.check_address(address)
return {
'currency': code,
'address': address,
'tag': tag,
'network': None,
'info': depositAddress,
}
def create_deposit_address(self, code, params={}):
request = {
'GenerateNewKey': True,
}
return self.fetch_deposit_address(code, self.extend(request, params))
def fetch_deposits(self, code=None, since=None, limit=None, params={}):
omsId = self.safe_integer(self.options, 'omsId', 1)
self.load_markets()
self.load_accounts()
defaultAccountId = self.safe_integer_2(self.options, 'accountId', 'AccountId', int(self.accounts[0]['id']))
accountId = self.safe_integer_2(params, 'accountId', 'AccountId', defaultAccountId)
params = self.omit(params, ['accountId', 'AccountId'])
currency = None
if code is not None:
currency = self.currency(code)
request = {
'omsId': omsId,
'AccountId': accountId,
}
response = self.privateGetGetDeposits(self.extend(request, params))
#
# [
# {
# "OMSId":1,
# "DepositId":44,
# "AccountId":449,
# "SubAccountId":0,
# "ProductId":4,
# "Amount":200.00000000000000000000000000,
# "LastUpdateTimeStamp":637431291261187806,
# "ProductType":"CryptoCurrency",
# "TicketStatus":"FullyProcessed",
# "DepositInfo":"{}",
# "DepositCode":"ab0e23d5-a9ce-4d94-865f-9ab464fb1de3",
# "TicketNumber":71,
# "NotionalProductId":13,
# "NotionalValue":200.00000000000000000000000000,
# "FeeAmount":0.0000000000000000000000000000,
# },
# ]
#
return self.parse_transactions(response, currency, since, limit)
def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
omsId = self.safe_integer(self.options, 'omsId', 1)
self.load_markets()
self.load_accounts()
defaultAccountId = self.safe_integer_2(self.options, 'accountId', 'AccountId', int(self.accounts[0]['id']))
accountId = self.safe_integer_2(params, 'accountId', 'AccountId', defaultAccountId)
params = self.omit(params, ['accountId', 'AccountId'])
currency = None
if code is not None:
currency = self.currency(code)
request = {
'omsId': omsId,
'AccountId': accountId,
}
response = self.privateGetGetWithdraws(self.extend(request, params))
#
# [
# {
# "Amount": 0.0,
# "FeeAmount": 0.0,
# "NotionalValue": 0.0,
# "WithdrawId": 0,
# "AssetManagerId": 0,
# "AccountId": 0,
# "AssetId": 0,
# "TemplateForm": "{\"TemplateType\": \"TetherRPCWithdraw\",\"Comment\": \"TestWithdraw\",\"ExternalAddress\": \"ms6C3pKAAr8gRCcnVebs8VRkVrjcvqNYv3\"}",
# "TemplateFormType": "TetherRPCWithdraw",
# "omsId": 0,
# "TicketStatus": 0,
# "TicketNumber": 0,
# "WithdrawTransactionDetails": "",
# "WithdrawType": "",
# "WithdrawCode": "490b4fa3-53fc-44f4-bd29-7e16be86fba3",
# "AssetType": 0,
# "Reaccepted": True,
# "NotionalProductId": 0
# },
# ]
#
return self.parse_transactions(response, currency, since, limit)
def parse_transaction_status_by_type(self, status, type=None):
statusesByType = {
'deposit': {
'New': 'pending', # new ticket awaiting operator review
'AdminProcessing': 'pending', # an admin is looking at the ticket
'Accepted': 'pending', # an admin accepts the ticket
'Rejected': 'rejected', # admin rejects the ticket
'SystemProcessing': 'pending', # automatic processing; an unlikely status for a deposit
'FullyProcessed': 'ok', # the deposit has concluded
'Failed': 'failed', # the deposit has failed for some reason
'Pending': 'pending', # Account Provider has set status to pending
'Confirmed': 'pending', # Account Provider confirms the deposit
'AmlProcessing': 'pending', # anti-money-laundering process underway
'AmlAccepted': 'pending', # anti-money-laundering process successful
'AmlRejected': 'rejected', # deposit did not stand up to anti-money-laundering process
'AmlFailed': 'failed', # anti-money-laundering process failed/did not complete
'LimitsAccepted': 'pending', # deposit meets limits for fiat or crypto asset
'LimitsRejected': 'rejected', # deposit does not meet limits for fiat or crypto asset
},
'withdrawal': {
'New': 'pending', # awaiting operator review
'AdminProcessing': 'pending', # An admin is looking at the ticket
'Accepted': 'pending', # withdrawal will proceed
'Rejected': 'rejected', # admin or automatic rejection
'SystemProcessing': 'pending', # automatic processing underway
'FullyProcessed': 'ok', # the withdrawal has concluded
'Failed': 'failed', # the withdrawal failed for some reason
'Pending': 'pending', # the admin has placed the withdrawal in pending status
'Pending2Fa': 'pending', # user must click 2-factor authentication confirmation link
'AutoAccepted': 'pending', # withdrawal will be automatically processed
'Delayed': 'pending', # waiting for funds to be allocated for the withdrawal
'UserCanceled': 'canceled', # withdraw canceled by user or Superuser
'AdminCanceled': 'canceled', # withdraw canceled by Superuser
'AmlProcessing': 'pending', # anti-money-laundering process underway
'AmlAccepted': 'pending', # anti-money-laundering process complete
'AmlRejected': 'rejected', # withdrawal did not stand up to anti-money-laundering process
'AmlFailed': 'failed', # withdrawal did not complete anti-money-laundering process
'LimitsAccepted': 'pending', # withdrawal meets limits for fiat or crypto asset
'LimitsRejected': 'rejected', # withdrawal does not meet limits for fiat or crypto asset
'Submitted': 'pending', # withdrawal sent to Account Provider; awaiting blockchain confirmation
'Confirmed': 'pending', # Account Provider confirms that withdrawal is on the blockchain
'ManuallyConfirmed': 'pending', # admin has sent withdrawal via wallet or admin function directly; marks ticket as FullyProcessed; debits account
'Confirmed2Fa': 'pending', # user has confirmed withdraw via 2-factor authentication.
},
}
statuses = self.safe_value(statusesByType, type, {})
return self.safe_string(statuses, status, status)
def parse_transaction(self, transaction, currency=None):
#
# fetchDeposits
#
# {
# "OMSId":1,
# "DepositId":44,
# "AccountId":449,
# "SubAccountId":0,
# "ProductId":4,
# "Amount":200.00000000000000000000000000,
# "LastUpdateTimeStamp":637431291261187806,
# "ProductType":"CryptoCurrency",
# "TicketStatus":"FullyProcessed",
# "DepositInfo":"{}",
# "DepositCode":"ab0e23d5-a9ce-4d94-865f-9ab464fb1de3",
# "TicketNumber":71,
# "NotionalProductId":13,
# "NotionalValue":200.00000000000000000000000000,
# "FeeAmount":0.0000000000000000000000000000,
# }
#
# fetchWithdrawals
#
# {
# "Amount": 0.0,
# "FeeAmount": 0.0,
# "NotionalValue": 0.0,
# "WithdrawId": 0,
# "AssetManagerId": 0,
# "AccountId": 0,
# "AssetId": 0,
# "TemplateForm": "{\"TemplateType\": \"TetherRPCWithdraw\",\"Comment\": \"TestWithdraw\",\"ExternalAddress\": \"ms6C3pKAAr8gRCcnVebs8VRkVrjcvqNYv3\"}",
# "TemplateFormType": "TetherRPCWithdraw",
# "omsId": 0,
# "TicketStatus": 0,
# "TicketNumber": 0,
# "WithdrawTransactionDetails": "",
# "WithdrawType": "",
# "WithdrawCode": "490b4fa3-53fc-44f4-bd29-7e16be86fba3",
# "AssetType": 0,
# "Reaccepted": True,
# "NotionalProductId": 0
# }
#
id = self.safe_string(transaction, 'DepositId')
txid = None
currencyId = self.safe_string(transaction, 'ProductId')
code = self.safe_currency_code(currencyId, currency)
timestamp = None
type = None
if 'DepositId' in transaction:
type = 'deposit'
elif 'WithdrawId' in transaction:
type = 'withdrawal'
templateFormString = self.safe_string(transaction, 'TemplateForm')
address = None
updated = self.safe_integer(transaction, 'LastUpdateTimeStamp')
if templateFormString is not None:
templateForm = json.loads(templateFormString)
address = self.safe_string(templateForm, 'ExternalAddress')
txid = self.safe_string(templateForm, 'TxId')
timestamp = self.safe_integer(templateForm, 'TimeSubmitted')
updated = self.safe_integer(templateForm, 'LastUpdated', updated)
addressTo = address
status = self.parse_transaction_status_by_type(self.safe_string(transaction, 'TicketStatus'), type)
amount = self.safe_number(transaction, 'Amount')
feeCost = self.safe_number(transaction, 'FeeAmount')
fee = None
if feeCost is not None:
fee = {'currency': code, 'cost': feeCost}
return {
'info': transaction,
'id': id,
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'address': address,
'addressTo': addressTo,
'addressFrom': None,
'tag': None,
'tagTo': None,
'tagFrom': None,
'type': type,
'amount': amount,
'currency': code,
'status': status,
'updated': updated,
'fee': fee,
}
def withdraw(self, code, amount, address, tag=None, params={}):
tag, params = self.handle_withdraw_tag_and_params(tag, params)
# self method required login, password and twofa key
sessionToken = self.safe_string(self.options, 'sessionToken')
if sessionToken is None:
raise AuthenticationError(self.id + ' call signIn() method to obtain a session token')
if self.twofa is None:
raise AuthenticationError(self.id + ' withdraw() requires exchange.twofa credentials')
self.check_address(address)
omsId = self.safe_integer(self.options, 'omsId', 1)
self.load_markets()
self.load_accounts()
defaultAccountId = self.safe_integer_2(self.options, 'accountId', 'AccountId', int(self.accounts[0]['id']))
accountId = self.safe_integer_2(params, 'accountId', 'AccountId', defaultAccountId)
params = self.omit(params, ['accountId', 'AccountId'])
currency = self.currency(code)
withdrawTemplateTypesRequest = {
'omsId': omsId,
'AccountId': accountId,
'ProductId': currency['id'],
}
withdrawTemplateTypesResponse = self.privateGetGetWithdrawTemplateTypes(withdrawTemplateTypesRequest)
#
# {
# result: True,
# errormsg: null,
# statuscode: "0",
# TemplateTypes: [
# {AccountProviderId: "14", TemplateName: "ToExternalBitcoinAddress", AccountProviderName: "BitgoRPC-BTC"},
# {AccountProviderId: "20", TemplateName: "ToExternalBitcoinAddress", AccountProviderName: "TrezorBTC"},
# {AccountProviderId: "31", TemplateName: "BTC", AccountProviderName: "BTC Fireblocks 1"}
# ]
# }
#
templateTypes = self.safe_value(withdrawTemplateTypesResponse, 'TemplateTypes', [])
firstTemplateType = self.safe_value(templateTypes, 0)
if firstTemplateType is None:
raise ExchangeError(self.id + ' withdraw() could not find a withdraw template type for ' + currency['code'])
templateName = self.safe_string(firstTemplateType, 'TemplateName')
withdrawTemplateRequest = {
'omsId': omsId,
'AccountId': accountId,
'ProductId': currency['id'],
'TemplateType': templateName,
'AccountProviderId': firstTemplateType['AccountProviderId'],
}
withdrawTemplateResponse = self.privateGetGetWithdrawTemplate(withdrawTemplateRequest)
#
# {
# result: True,
# errormsg: null,
# statuscode: "0",
# Template: "{\"TemplateType\":\"ToExternalBitcoinAddress\",\"Comment\":\"\",\"ExternalAddress\":\"\"}"
# }
#
template = self.safe_string(withdrawTemplateResponse, 'Template')
if template is None:
raise ExchangeError(self.id + ' withdraw() could not find a withdraw template for ' + currency['code'])
withdrawTemplate = json.loads(template)
withdrawTemplate['ExternalAddress'] = address
if tag is not None:
if 'Memo' in withdrawTemplate:
withdrawTemplate['Memo'] = tag
withdrawPayload = {
'omsId': omsId,
'AccountId': accountId,
'ProductId': currency['id'],
'TemplateForm': self.json(withdrawTemplate),
'TemplateType': templateName,
}
withdrawRequest = {
'TfaType': 'Google',
'TFaCode': self.oath(),
'Payload': self.json(withdrawPayload),
}
response = self.privatePostCreateWithdrawTicket(self.deep_extend(withdrawRequest, params))
return {
'info': response,
'id': self.safe_string(response, 'Id'),
}
def nonce(self):
return self.milliseconds()
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'][api] + '/' + self.implode_params(path, params)
query = self.omit(params, self.extract_params(path))
if api == 'public':
if path == 'Authenticate':
auth = self.login + ':' + self.password
auth64 = self.string_to_base64(auth)
headers = {
'Authorization': 'Basic ' + self.decode(auth64),
# 'Content-Type': 'application/json',
}
elif path == 'Authenticate2FA':
pending2faToken = self.safe_string(self.options, 'pending2faToken')
if pending2faToken is not None:
headers = {
'Pending2FaToken': pending2faToken,
# 'Content-Type': 'application/json',
}
query = self.omit(query, 'pending2faToken')
if query:
url += '?' + self.urlencode(query)
elif api == 'private':
self.check_required_credentials()
sessionToken = self.safe_string(self.options, 'sessionToken')
if sessionToken is None:
nonce = str(self.nonce())
auth = nonce + self.uid + self.apiKey
signature = self.hmac(self.encode(auth), self.encode(self.secret))
headers = {
'Nonce': nonce,
'APIKey': self.apiKey,
'Signature': signature,
'UserId': self.uid,
}
else:
headers = {
'APToken': sessionToken,
}
if method == 'POST':
headers['Content-Type'] = 'application/json'
body = self.json(query)
else:
if query:
url += '?' + self.urlencode(query)
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if code == 404:
raise AuthenticationError(self.id + ' ' + body)
if response is None:
return
#
# {"status":"Rejected","errormsg":"Not_Enough_Funds","errorcode":101}
# {"result":false,"errormsg":"Server Error","errorcode":102,"detail":null}
#
message = self.safe_string(response, 'errormsg')
if (message is not None) and (message != ''):
feedback = self.id + ' ' + body
self.throw_exactly_matched_exception(self.exceptions['exact'], message, feedback)
self.throw_broadly_matched_exception(self.exceptions['broad'], body, feedback)
raise ExchangeError(feedback)
|
ccxt/ccxt
|
python/ccxt/ndax.py
|
Python
|
mit
| 95,009
|
"""
GitHub methods that are functionally separate from anything Sublime-related.
"""
import re
from collections import namedtuple
from webbrowser import open as open_in_browser
from functools import partial
import sublime
from ..common import interwebs
GitHubRepo = namedtuple("GitHubRepo", ("url", "fqdn", "owner", "repo", "token"))
class FailedGithubRequest(Exception):
pass
def parse_remote(remote):
"""
Given a line of output from `git remote -v`, parse the string and return
an object with original url, FQDN, owner, repo, and the token to use for
this particular FQDN (if available).
"""
if remote.endswith(".git"):
remote = remote[:-4]
if remote.startswith("git@"):
url = remote.replace(":", "/").replace("git@", "http://")
elif remote.startswith("http"):
url = remote
else:
return None
match = re.match(r"https?://([a-zA-Z-\.0-9]+)/([a-zA-Z-\.0-9]+)/([a-zA-Z-\.0-9]+)/?", url)
if not match:
return None
fqdn, owner, repo = match.groups()
savvy_settings = sublime.load_settings("GitSavvy.sublime-settings")
api_tokens = savvy_settings.get("api_tokens")
token = api_tokens and api_tokens.get(fqdn, None) or None
return GitHubRepo(url, fqdn, owner, repo, token)
def open_file_in_browser(rel_path, remote, commit_hash, start_line=None, end_line=None):
"""
Open the URL corresponding to the provided `rel_path` on `remote`.
"""
github_repo = parse_remote(remote)
if not github_repo:
return None
line_numbers = "#L{}-L{}".format(start_line, end_line) if start_line is not None else ""
url = "{repo_url}/blob/{commit_hash}/{path}{lines}".format(
repo_url=github_repo.url,
commit_hash=commit_hash,
path=rel_path,
lines=line_numbers
)
open_in_browser(url)
def get_api_fqdn(github_repo):
"""
Determine if the provided GitHub repo object refers to a GitHub-
Enterprise instance or to publically hosted GitHub.com, and
indicate what base FQDN to use for API requests.
"""
if github_repo.fqdn[-10:] == "github.com":
return False, "api.github.com"
return True, github_repo.fqdn
def query_github(api_url_template, github_repo):
"""
Takes a URL template that takes `owner` and `repo` template variables
and as a GitHub repo object. Do a GET for the provided URL and return
the response payload, if successful. If unsuccessfuly raise an error.
"""
is_enterprise, fqdn = get_api_fqdn(github_repo)
base_path = "/api/v3" if is_enterprise else ""
path = base_path + api_url_template.format(
owner=github_repo.owner,
repo=github_repo.repo
)
auth = (github_repo.token, "x-oauth-basic") if github_repo.token else None
response = interwebs.get(fqdn, 443, path, https=True, auth=auth)
if response.status < 200 or response.status > 299 or not response.is_json:
raise FailedGithubRequest(response.payload)
return response.payload
get_issues = partial(query_github, "/repos/{owner}/{repo}/issues")
get_contributors = partial(query_github, "/repos/{owner}/{repo}/contributors")
|
ypersyntelykos/GitSavvy
|
github/github.py
|
Python
|
mit
| 3,189
|
#Este programa muestra el codigo para entrenar una red neuronal basado en el
#bloque Resnet
#Para empezar se carga las librerias necesarias de Keras y de Numpy
from keras.models import Model
from keras.models import Sequential
from keras.layers import merge
from keras.layers import normalization
from keras.layers import Flatten
from keras.layers import AveragePooling2D
from keras.layers import Convolution2D
from keras.layers import Input
from keras.layers import Activation
from keras.layers import Dense
from keras.layers import MaxPooling2D
from keras.layers import Dropout
from keras.callbacks import ModelCheckpoint,Callback,ReduceLROnPlateau
import numpy as np
#Ya que el código es casi el mismo para los 8 entrenamientos (solo varia el modelo
# y/o la base de datos) se usa un nombre código
codename='mapahsl_resnet'
data_dir='Data/mapahsl/'
#Esto declara el uso de un registro del learning rate, el cual es útil para ver
#cuantas veces se estanca el entrenamiento y si el entrenamiento avanza o no.
registro_lr=[]
class showlr(Callback):
def on_train_begin(self, logs={}):
registro_lr = np.load(codename +'.npy',[])
def on_epoch_end(self, batch, logs={}):
lr = self.model.optimizer.lr.get_value()
registro_lr.append(lr)
np.save(codename + '.npy',registro_lr)
print(lr)
# Se carga la data necesaria para el entrenamiento (en este caso, la base de
#datos de los mapas HSL obtenidos de las imagenes de cacao sano y enfermos)
#del sistema y se les da una forma aceptable para Keras
#Los datos para el entreamiento
train1 = np.load(data_dir+'train/negativo/db.npy').tolist()
train=train1.copy()
train2 = np.load(data_dir+'train/positivo/db.npy').tolist()
for i in range(len(train2)):
train.append(train2[i])
train=np.array(train)
train=train.reshape(train.shape[0],1, 45, 64)
train_labels=np.array([0]*len(train1)+[1]*len(train2))
train_labels=train_labels.reshape(train_labels.shape[0],1)
#los datos para la validacion
val1 = np.load(data_dir+'validation/negativo/db.npy').tolist()
val=val1.copy()
val2 = np.load(data_dir+'validation/positivo/db.npy').tolist()
for i in range(len(val2)):
val.append(val2[i])
val=np.array(val)
val=val.reshape(val.shape[0],1,45, 64)
val_labels=np.array([0]*len(val1)+[1]*len(val2))
val_labels=val_labels.reshape(val_labels.shape[0],1)
img_height,img_width = train.shape[2],train.shape[3]
#Esta variable indica el numero de elementos a buscar, sin embargo al colocar
# 1 se vuelve una clasificacion binaria. donde 1 es un elemento de una clase y
# 0 del otro. En este caso, positivo (infectado) tiene valor 1
nb_classes = 1
#A continuacion, se muestra el código del módelo a entrenar el cual usa
#el bloque Resnet
#####################################################################
#Se inicia el modelo con Sequential
model = Sequential()
#Se prepara la entrada para una imagen de 1 solo canal de 45x 64 pixeles
#el cual es el mapa de color HSL
Input_0=Input(shape=(1,45,64))
#Se inicia usando una serie de 64 filtros de 7x7 los cuales convolucionan
#con la imagen para que puedan obtener caracteristicas de los mapas de color de
#la imagen de cacao
tronco_conv1=Convolution2D(64, 7, 7,
subsample=(2, 2),
border_mode='same')(Input_0)
#Se reduce con un Subsampling de 2x2 para reducir el tamaño de la imagen
#convolucionada
tronco_pool1=MaxPooling2D(pool_size=(2, 2),
strides=[2,2],
border_mode='valid',
dim_ordering='default')(tronco_conv1)
#Ahora se iniciara el bloque Resnet
rama1_res_BN1=normalization.BatchNormalization(mode=0,
axis=-1)(tronco_pool1)
rama1_res_activation1=Activation('relu')(rama1_res_BN1)
rama1_dp1=Dropout(0.5)(rama1_res_activation1)
rama1_res_conv1=Convolution2D(64, 3, 3,
subsample=(1, 1),
border_mode='same')(rama1_dp1)
rama1_res_BN2=normalization.BatchNormalization(mode=0,
axis=-1)(rama1_res_conv1)
rama1_res_activation2=Activation('relu')(rama1_res_BN2)
rama1_dp2=Dropout(0.5)(rama1_res_activation2)
rama1_res_conv2=Convolution2D(64, 3, 3,
subsample=(1, 1),
border_mode='same')(rama1_dp2)
#Se termina el bloque de resnet sumando el inicio del bloque con el
#final del bloque
reconexion_res_tronco=merge([rama1_res_conv2,tronco_pool1], mode="sum")
#Se hace un subsampling promediando de matrices de 2x2 en matrices de 2x2
tronco_pool2=AveragePooling2D(pool_size=(4, 4),
strides=[4,4],
border_mode='valid',
dim_ordering='default')(reconexion_res_tronco)
#*Todos los dropouts apagan neuronas momentaneamente para
#que el sistema aprenda de maneras diferentes y no de una sola manera,
#sino podria tender a memorizar los elementos de entreamiento envés de
#aprender a diferenciarlos
tronco_dp1=Dropout(0.5)(tronco_pool2)
tronco_flat=Flatten()(tronco_dp1)
#Se reduce todo a un solo valor que indique si es o no un cacao infectado
tronco_fc1=Dense(1,activation="sigmoid")(tronco_flat)
model=Model(input=Input_0,output=tronco_fc1)
#Se compila en el sistema Keras usando Theano o Tensorflow, ademas se define el
#elemento a minimizar para la optimización y las metricas a mostrar(precisión,
#loss,etc)
model.compile(loss='binary_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
#######################################################################
#Ahora agregamos al entrenamiento caracteristicas útiles en el caso de
#accidentes y para realizar el registro del entrenamiento
#Esta es la direccion donde se guarda un modelo despues de entrenar cada ciclo
#y la funcion que la guarda
direccion='modelos/'+codename+'/weights.{epoch:04d}-{acc:.4f}-{loss:.4f}-{val_acc:.4f}-{val_loss:.4f}.hdf5'
checkpoint = ModelCheckpoint(direccion,
monitor='val_acc',
verbose=1,
save_best_only=False,
save_weights_only=False,
mode='max')
#Esta caracteristica reduce el learning rate cada vez que se el loss no disminuye
reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.9,patience=1, verbose=1, cooldown=0, min_lr=0)
#Esta caracteristica llama a la función explicada anteriormente que almacena el valor del
#learning rate
histlr=showlr()
#Esta caracteristica llma a las anteriores para agregarlos al entrenamiento
callbacks_list = [checkpoint,reduce_lr,histlr]
#Ahora se entrena el sistema. batch_size define el tamaño de la porción de
#que va a memoria ram para el entrenamiento. Asi va de porción en porción hasta
#acabar un ciclo. nb_epoch, define el numero de ciclos de entrenamiento
#callbacks llama a las características adicionales
#verbose sirve para mostrar el proceso del entrenamiento en el prompt de la linea
#de comandos.
#val y train es la data que se usa para la validacion y el entrenamiento
#respectivamente.
model.fit(train, train_labels,
batch_size=8,
nb_epoch=1000,
callbacks=callbacks_list,shuffle=True,
verbose=1, validation_data=(val, val_labels))
|
a-bacilio/Codigo-de-tesis-descarte
|
mapahsl_resnet.py
|
Python
|
mit
| 7,546
|
from __future__ import absolute_import, unicode_literals
import django
from django import forms
from django.conf import settings
from django.contrib.admin.templatetags.admin_static import static
from django.contrib.admin.widgets import AdminTextareaWidget
from django.template import Context
from django.template.loader import get_template
from django.utils.safestring import mark_safe
__all__ = [
'AdminHStoreWidget'
]
class BaseAdminHStoreWidget(AdminTextareaWidget):
"""
Base admin widget class for default-admin and grappelli-admin widgets
"""
admin_style = 'default'
@property
def media(self):
internal_js = [
"django_hstore/underscore-min.js",
"django_hstore/hstore-widget.js"
]
js = [static("admin/js/%s" % path) for path in internal_js]
return forms.Media(js=js)
def render(self, name, value, attrs=None):
if attrs is None:
attrs = {}
# it's called "original" because it will be replaced by a copy
attrs['class'] = 'hstore-original-textarea'
# get default HTML from AdminTextareaWidget
html = super(BaseAdminHStoreWidget, self).render(name, value, attrs)
# prepare template context
template_context = Context({
'field_name': name,
'STATIC_URL': settings.STATIC_URL,
'use_svg': django.VERSION >= (1, 9), # use svg icons if django >= 1.9
})
# get template object
template = get_template('hstore_%s_widget.html' % self.admin_style)
# render additional html
additional_html = template.render(template_context)
# append additional HTML and mark as safe
html = html + additional_html
html = mark_safe(html)
return html
class DefaultAdminHStoreWidget(BaseAdminHStoreWidget):
"""
Widget that displays the HStore contents
in the default django-admin with a nice interactive UI
"""
admin_style = 'default'
class GrappelliAdminHStoreWidget(BaseAdminHStoreWidget):
"""
Widget that displays the HStore contents
in the django-admin with a nice interactive UI
designed for django-grappelli
"""
admin_style = 'grappelli'
if 'grappelli' in settings.INSTALLED_APPS:
AdminHStoreWidget = GrappelliAdminHStoreWidget
else:
AdminHStoreWidget = DefaultAdminHStoreWidget
|
djangonauts/django-hstore
|
django_hstore/widgets.py
|
Python
|
mit
| 2,389
|
# -*- coding: utf-8 -*-
#
# Guidoc documentation build configuration file, created by
# sphinx-quickstart on Sun Aug 7 16:13:07 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import alabaster
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['alabaster']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Guidoc'
copyright = u'2016, Kevin Thibedeau'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.9.0'
# The full version, including alpha/beta/rc tags.
release = '0.9.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'description': 'Tkinter gui generator',
'show_powered_by': False,
'logo_text_align': 'center',
'font_family': 'Verdana, Geneva, sans-serif',
'github_user': 'kevinpt',
'github_repo': 'guidoc',
'github_button': True
}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [alabaster.get_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = 'images/guidoc_icon.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'**': [
'about.html',
'relations.html',
'localtoc.html',
'projects.html',
'searchbox.html'
],
'index': [
'about.html',
'download.html',
'relations.html',
'localtoc.html',
'projects.html',
'searchbox.html'
]
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Guidocdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'Guidoc.tex', u'Guidoc Documentation',
u'Kevin Thibedeau', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'guidoc', u'Guidoc Documentation',
[u'Kevin Thibedeau'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Guidoc', u'Guidoc Documentation',
u'Kevin Thibedeau', 'Guidoc', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
kevinpt/guidoc
|
doc/conf.py
|
Python
|
mit
| 8,757
|
#!/usr/bin/python3
import os
import sys
import http.server
import socketserver
import socket
import shutil
from base64 import b64encode
from urllib.parse import quote
from os.path import basename, splitext, join, isfile
from collections import defaultdict
from subprocess import run
from distutils.dir_util import copy_tree
from distutils.file_util import copy_file
build_dir = 'build'
source_dir = 'source'
dest_dir = 'built_static'
css_dir = join(build_dir, 'css')
images_dir = join(build_dir, 'images')
class TemporaryTCPServer(socketserver.TCPServer):
def server_bind(self):
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind(self.server_address)
def serve(port):
os.chdir(dest_dir)
handler = http.server.SimpleHTTPRequestHandler
httpd = TemporaryTCPServer(("", port), handler)
print("[serve] serving on port " + str(port))
httpd.serve_forever()
def clean():
shutil.rmtree(build_dir)
shutil.rmtree(dest_dir)
def build():
copy_tree(source_dir, build_dir, update=1)
make_fallback_images(images_dir)
print('[create] _images.scss ', end='')
save_images_css(images_dir, join(css_dir, '_images.scss'))
print('[ok]')
run_sass(css_dir, join(dest_dir, 'css'))
print('[update] asis ', end='')
copy_tree(join(source_dir, 'asis'), join(dest_dir, 'asis'), update=1)
print('[ok]')
def run_sass(css_source_dir, css_dest_dir):
os.makedirs(css_dest_dir, exist_ok=True)
for (dirpath, dirnames, filenames) in os.walk(css_source_dir):
for f in filenames:
name, ext = splitext(f)
if ext == '.scss' and name[0] != '_':
print("[sass] " + f + ' ', end='')
run([
'sass',
join(css_source_dir, f),
join(css_dest_dir, name + '.css')
], check = True)
print("[ok]")
elif ext == '.css':
print("[copy] " + f + ' ', end='')
copy_file(join(css_source_dir, f), join(css_dest_dir, f), update=1)
print("[ok]")
break
def make_fallback_images(images_dir):
images = find_built_images(images_dir)
for image, files in images.items():
f = files[0]
pngimage = image + '.png'
if pngimage not in files:
print("[create] " + pngimage + ' ', end='')
run([
'convert',
'-background', 'none',
join(images_dir, f),
join(images_dir, pngimage)
], check = True)
print("[ok]")
def images_in_dir(dir):
vectors = []
rasters = []
dumb_rasters = []
lossy = []
for (dirpath, dirnames, filenames) in os.walk(dir):
for f in filenames:
name, ext = splitext(basename(f))
if ext in ['.svg']:
vectors += [f]
if ext in ['.png']:
rasters += [f]
if ext in ['.gif']:
dumb_rasters += [f]
if ext in ['.jpg', '.jpeg']:
lossy += [f]
break
return vectors + rasters + dumb_rasters + lossy
def find_built_images(images_dir):
images = defaultdict(list)
for image in images_in_dir(images_dir):
name, _ = splitext(basename(image))
images[name] += [image]
return dict(images)
def images_to_css(images_dir):
images = find_built_images(images_dir)
csseses = []
for name, files in images.items():
css = '.image-' + name + " {\n"
files_and_extensions = [(f, splitext(f)[1][1:]) for f in files]
for image, ext in [(f, ext) for f, ext in files_and_extensions if ext != 'svg']:
data = raster_data(join(images_dir, image), ext)
css += 'background-image: url(' + data + ");\n"
for svg, ext in [(f, ext) for f, ext in files_and_extensions if ext == 'svg']:
data = xml_data(join(images_dir, svg), ext)
css += 'background-image: url(' + data + "), linear-gradient(transparent, transparent);\n"
css += "}\n"
csseses += [css]
return "\n".join(csseses)
def save_images_css(images_dir, css_file):
with open(css_file, 'w') as f:
f.write(images_to_css(images_dir))
def raster_data(image_filename, ext):
with open(image_filename, 'rb') as f:
data = b64encode(f.read()).decode('utf-8')
return 'data:image/' + ext + ';base64,' + data
def xml_data(image_filename, ext):
with open(image_filename, 'r') as f:
data = quote(f.read())
return 'data:image/' + ext + '+xml;charset=US-ASCII,' + data
def image_data(image_filename):
_, ext = splitext(image_filename)
if ext == '.svg':
return xml_data(image_filename, ext)
else:
return raster_data(image_filename, ext)
if __name__ == '__main__':
try:
arg = sys.argv[1]
except IndexError:
arg = None
if arg == 'build':
build()
elif arg == 'clean':
clean()
elif arg == 'serve':
try:
port = int(sys.argv[2])
except IndexError:
port = 8000
build()
serve(port)
else:
print('please use "build", "clean" or "serve" as a first argument.')
|
bitterfly/kuho
|
examples/html_test/static/generate.py
|
Python
|
mit
| 5,311
|
__program_name__ = 'RabbitHole'
__version__ = '1.0.0'
|
LeadPipeSoftware/LeadPipe.RabbitHole
|
RabbitHole/__init__.py
|
Python
|
mit
| 54
|
from utils.testcase import EndpointTestCase
from rest_framework import status
from rest_framework.test import APIClient
from player.models import Room
import sure
class TestRooms(EndpointTestCase):
def test_get(self):
client = APIClient()
response = client.get('/rooms')
response.status_code.should.eql(status.HTTP_200_OK)
response.data.should.have.key('count')
response.data.should.have.key('next')
response.data.should.have.key('previous')
response.data.should.have.key('results')
response.data['results'].should.be.a(list)
response.data['results'].should.have.length_of(1)
self.assertResponseEqualsRoom(response.data['results'][0], Room.objects.all().first(), check_token=False)
|
Amoki/Amoki-Music
|
endpoints/tests/test_rooms.py
|
Python
|
mit
| 772
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import time
import numpy as np
from six.moves import range
import acq4.util.ptime as ptime
from acq4.Manager import logMsg
from acq4.devices.OptomechDevice import OptomechDevice
from acq4.util import Qt
from acq4.util.HelpfulException import HelpfulException
from acq4.util.Mutex import Mutex
from .DeviceGui import ScannerDeviceGui
from .TaskGui import ScannerTaskGui
from ..Device import Device, DeviceTask
from pyqtgraph.debug import Profiler
class Scanner(Device, OptomechDevice):
sigShutterChanged = Qt.Signal()
def __init__(self, dm, config, name):
Device.__init__(self, dm, config, name)
OptomechDevice.__init__(self, dm, config, name)
self.config = config
self.lock = Mutex(Qt.QMutex.Recursive)
self.devGui = None
self.lastRunTime = None
self.calibrationIndex = None
self.targetList = [1.0, {}] ## stores the grids and points used by TaskGui so that they persist
self.currentCommand = [0,0] ## The last requested voltage values (but not necessarily the current voltage applied to the mirrors)
self.currentVoltage = [0, 0]
self.shutterOpen = True ## indicates whether the virtual shutter is closed (the beam is steered to its 'off' position).
if 'offVoltage' in config:
self.setShutterOpen(False)
dm.declareInterface(name, ['scanner'], self)
#def quit(self):
#Device.quit(self)
##if os.path.isfile(self.targetFileName):
##os.delete(self.targetFileName)
def setCommand(self, vals):
"""Requests to set the command output to the mirrors.
If the virtual shutter is closed, then the mirrors are not moved until
the shutter is opened.
"""
with self.lock:
self.currentCommand = vals
if self.getShutterOpen():
self._setVoltage(vals)
else:
logMsg("Virtual shutter closed, not setting mirror position.", msgType='warning')
def setPosition(self, pos, laser):
"""Set the position of the xy mirrors to a point in the image
If the virtual shutter is closed, then the mirrors are not moved until
the shutter is opened.
"""
prof = Profiler('Scanner.setPosition', disabled=True)
with self.lock:
(x, y) = pos
prof.mark()
vals = self.mapToScanner(x, y, laser)
prof.mark()
self.setCommand(vals)
prof.mark()
prof.finish()
def setShutterOpen(self, o):
"""Immediately move mirrors to the 'off' position or back.
This method controls a "virtual" shutter that works by moving the scan
mirrors to a location outside the optical train (defined by the
`offVoltage` option in the scanner's configuration). While the virtual
shutter is closed, all commands to set the mirror voltage are disabled.
When the virtual shutter is opened, the mirrors move to the most recent
position requested.
"""
self.shutterOpen = o
if o:
self._setVoltage(self.getCommand())
else:
shVals = self.getShutterVals()
if shVals is None:
raise Exception("Scan mirrors are not configured for virtual shuttering; can not open.")
self._setVoltage(shVals)
self.sigShutterChanged.emit()
def getShutterOpen(self):
"""Return whether the virtual shutter is currently open"""
if 'offVoltage' not in self.config:
return True
return self.shutterOpen
def getShutterVals(self):
'''Return the voltage settings required to steer the beam to its 'off' position.'''
return self.config.get('offVoltage', None)
def getCommand(self):
"""Return the last command value that was requested.
This is also the current output voltage to the mirrors, unless:
1) The virtual shutter is closed
2) The current command is outside the allowed limits
3) Someone has called _setVoltage when they should have called setCommand"""
vals = []
with self.lock:
vals = self.currentCommand[:]
return vals
def _setVoltage(self, vals):
'''Immediately sets the voltage value on the mirrors.
Does check virtual shutter state; most likely you want to use setCommand instead.'''
with self.lock:
## make sure we have not requested a command outside the allowed limits
(mn, mx) = self.config['commandLimits']
for i in [0,1]:
x = ['XAxis', 'YAxis'][i]
daq = self.config[x]['device']
chan = self.config[x]['channel']
dev = self.dm.getDevice(daq)
clipped = max(mn, min(mx, vals[i]))
dev.setChannelValue(chan, clipped, block=True)
self.currentVoltage = vals
def getVoltage(self):
with self.lock:
return self.currentVoltage
def getDaqName(self):
return self.config['XAxis']['device']
def mapToScanner(self, x, y, laser, opticState=None):
"""Convert global coordinates to voltages required to set scan mirrors
*laser* and *opticState* are used to look up the correct calibration data.
If *opticState* is not given, then the current optical state is used instead.
"""
if opticState is None:
opticState = self.getDeviceStateKey() ## this tells us about objectives, filters, etc
cal = self.getCalibration(laser, opticState)
if cal is None:
raise HelpfulException("The scanner device '%s' is not calibrated for this combination of laser and objective (%s, %s)" % (self.name(), laser, str(opticState)))
## map from global coordinates to parent
parentPos = self.mapGlobalToParent((x,y))
if isinstance(parentPos, Qt.QPointF):
x = parentPos.x()
y = parentPos.y()
else:
x = parentPos[0]
y = parentPos[1]
## map to voltages using calibration
cal = cal['params']
x2 = x**2
y2 = y**2
x1 = cal[0][0] + cal[0][1] * x + cal[0][2] * y + cal[0][3] * x2 + cal[0][4] * y2
y1 = cal[1][0] + cal[1][1] * x + cal[1][2] * y + cal[1][3] * x2 + cal[1][4] * y2
#print "voltage:", x1, y1
return [x1, y1]
def getCalibrationIndex(self):
with self.lock:
if self.calibrationIndex is None:
index = self.readConfigFile('index')
self.calibrationIndex = index
return self.calibrationIndex
def writeCalibrationDefaults(self, state):
with self.lock:
self.writeConfigFile(state, 'defaults')
def loadCalibrationDefaults(self):
with self.lock:
state = self.readConfigFile('defaults')
return state
def writeCalibrationIndex(self, index):
with self.lock:
self.writeConfigFile(index, 'index')
self.calibrationIndex = index
def getCalibration(self, laser, opticState=None):
with self.lock:
index = self.getCalibrationIndex()
if opticState is None:
opticState = self.getDeviceStateKey() ## this tells us about objectives, filters, etc
if laser in index:
index1 = index[laser]
else:
logMsg("Warning: No calibration found for laser %s" % laser, msgType='warning')
return None
if opticState in index1:
index2 = index1[opticState]
else:
logMsg("Warning: No calibration found for state: %s" % opticState, msgType='warning')
return None
return index2.copy()
def storeCameraConfig(self, camera):
"""Store the configuration to be used when calibrating this camera"""
camDev = self.dm.getDevice(camera)
params = camDev.listParams()
params = [p for p in params if params[p][1] and params[p][2]] ## Select only readable and writable parameters
state = camDev.getParams(params)
self.writeConfigFile(state, camera+'Config.cfg')
def getCameraConfig(self, camera):
return self.readConfigFile(camera+'Config.cfg')
def createTask(self, cmd, parentTask):
with self.lock:
return ScannerTask(self, cmd, parentTask)
def taskInterface(self, taskRunner):
with self.lock:
return ScannerTaskGui(self, taskRunner)
def deviceInterface(self, win):
with self.lock:
return ScannerDeviceGui(self, win)
class ScannerTask(DeviceTask):
"""
Options for Scanner task:
position: (x,y) A calibrated position (in real physical coordinates) to set
before starting the task. Requires 'camera' and 'laser' are
also specified.
command: (x,y) Voltage values to set before starting the task.
This option overrides 'position'.
xPosition: Array of x positions. (requires yPosition)
yPosition: Array of y positions. (requires xPosition)
xCommand: Array of x voltages. Overrides x/yPosition.
xCommand: Array of y voltages. Overrides x/yPosition.
camera: The camera to use for calibrated positions
laser: The laser to use for calibrated positions
simulateShutter: auto-generate position commands such that the mirrors are
in 'off' position except when laser is active
program: A list of high-level directives for generating position commands
"""
def __init__(self, dev, cmd, parentTask):
DeviceTask.__init__(self, dev, cmd, parentTask)
self.cmd = cmd
self.daqTasks = []
self.spotSize = None
# We use this flag to exit from the sleep loop in start() in case the
# task is aborted during that time.
self.aborted = False
self.abortLock = Mutex(recursive=True)
def getConfigOrder(self):
deps = []
if self.cmd.get('simulateShutter', False):
deps.append(self.cmd['laser'])
if self.cmd.get('simulateShutter', False) or 'program' in self.cmd:
deps.append(self.dev.getDaqName())
return ([],deps)
def configure(self):
prof = Profiler('ScannerTask.configure', disabled=True)
with self.dev.lock:
prof.mark('got lock')
## If shuttering is requested, make sure the (virtual) shutter is closed now
if self.cmd.get('simulateShutter', False):
self.dev.setShutterOpen(False)
## Set position of mirrors now
if 'command' in self.cmd:
self.dev.setCommand(self.cmd['command'])
prof.mark('set command')
elif 'position' in self.cmd: ## 'command' overrides 'position'
#print " set position:", self.cmd['position']
self.dev.setPosition(self.cmd['position'], self.cmd['laser'])
prof.mark('set pos')
## record spot size from calibration data
if 'laser' in self.cmd:
cal = self.dev.getCalibration(self.cmd['laser'])
if cal is None:
raise Exception("Scanner is not calibrated for: %s, %s" % (self.cmd['laser'], self.dev.getDeviceStateKey()))
self.spotSize = cal['spot'][1]
prof.mark('getSpotSize')
## If position arrays are given, translate into voltages
if 'xPosition' in self.cmd or 'yPosition' in self.cmd:
if 'xPosition' not in self.cmd or 'yPosition' not in self.cmd:
raise Exception('xPosition and yPosition must be given together or not at all.')
self.cmd['xCommand'], self.cmd['yCommand'] = self.dev.mapToScanner(self.cmd['xPosition'], self.cmd['yPosition'], self.cmd['laser'])
prof.mark('position arrays')
# Deprecated - this should be done by the task creator instead.
# The 'program' key is now ignored as meta-data.
# elif 'program' in self.cmd:
# self.generateProgramArrays(self.cmd)
# prof.mark('program')
## If shuttering is requested, generate proper arrays and shutter the laser now
if self.cmd.get('simulateShutter', False):
self.generateShutterArrays(tasks[self.cmd['laser']], self.cmd['duration'])
prof.mark('shutter')
prof.finish()
def generateShutterArrays(self, laserTask, duration):
"""In the absence of a shutter, use this to direct the beam 'off-screen' when shutter would normally be closed."""
##get waveform from laser
laser = laserTask.cmd['QSwitch']['command']
offPos = self.dev.getShutterVals()
if 'xCommand' not in self.cmd: ## If no command was specified, then we just use the current command values whenever the shutter is open
x, y = self.dev.getCommand()
self.cmd['xCommand'] = np.empty(len(laser), dtype=float)
self.cmd['yCommand'] = np.empty(len(laser), dtype=float)
self.cmd['xCommand'][:] = x
self.cmd['yCommand'][:] = y
## Find all regions where the laser is activated, make sure the shutter opens 10ms before each
shutter = np.zeros(len(laser), dtype=bool)
dif = laser[1:] - laser[:-1]
ons = np.argwhere(dif==1)[:,0]
offs = np.argwhere(dif==-1)[:,0]
dt = duration / len(laser)
npts = int(10e-3 / dt)
ons -= npts
mask = np.zeros(len(laser), dtype=bool)
for i in range(len(ons)):
on = max(0, ons[i])
mask[on:offs[i]] = True
self.cmd['xCommand'][~mask] = offPos[0]
self.cmd['yCommand'][~mask] = offPos[1]
# Deprecated - this should be done by the task creator instead.
# def generateProgramArrays(self, command):
# arr = ScanProgram.generateVoltageArray(self.dev, command)
# self.cmd['xCommand'] = arr[0] ## arrays of voltage values
# self.cmd['yCommand'] = arr[1]
def createChannels(self, daqTask):
## make sure we have not requested a command outside the allowed limits
(mn, mx) = self.dev.config['commandLimits']
self.daqTasks = []
with self.dev.lock:
## If buffered waveforms are requested in the command, configure them here.
for cmdName, channel in [('xCommand', 'XAxis'), ('yCommand', 'YAxis')]:
#cmdName = axis[0]
#channel = axis[1]
if cmdName not in self.cmd:
continue
#print 'adding channel1: ', channel
chConf = self.dev.config[channel]
#if chConf[0] != daqTask.devName():
if chConf['device'] != daqTask.devName():
continue
#print 'adding channel2: ', channel
daqTask.addChannel(chConf['channel'], 'ao')
self.daqTasks.append(daqTask) ## remember task so we can stop it later on
clipped = np.clip(self.cmd[cmdName], mn, mx)
daqTask.setWaveform(chConf['channel'], clipped)
def stop(self, abort=False):
if abort:
with self.abortLock:
print("Abort!")
self.aborted = True
with self.dev.lock:
for t in self.daqTasks:
t.stop(abort=abort)
self.dev.lastRunTime = ptime.time()
def start(self):
#print "start"
with self.dev.lock:
lastRunTime = self.dev.lastRunTime
if lastRunTime is None:
#print " no wait"
return
# Task specifies that we have a minimum wait from the end of the previous
# task to the start of this one. This is used in photostimulation experiments
# that require variable downtime depending on the proximity of subsequent
# stimulations.
if 'minWaitTime' in self.cmd:
while True:
now = ptime.time()
wait = min(0.1, self.cmd['minWaitTime'] - (now - lastRunTime))
if wait <= 0:
break
with self.abortLock:
if self.aborted:
return
time.sleep(wait)
def getResult(self):
#result = {}
#for k in ['position', 'command']:
#if k in self.cmd:
#result[k] = self.cmd[k]
result = self.cmd.copy()
if self.spotSize is not None:
result['spotSize'] = self.spotSize
### These arrays stick around and cause memory errors if we don't get rid of them.
## For some reason, the top-level task command dict is not being collected (refcount=2, but gc.get_referrers=[])
## So until that issue is solved, we need to make sure that extra data is cleaned out.
if 'xCommand' in self.cmd:
self.cmd['xCommand'] = "deleted in ScannerTask.getResult()"
if 'yCommand' in self.cmd:
self.cmd['yCommand'] = "deleted in ScannerTask.getResult()"
return result
def storeResult(self, dirHandle):
result = self.getResult()
dirHandle.setInfo({self.dev.name(): result})
|
acq4/acq4
|
acq4/devices/Scanner/Scanner.py
|
Python
|
mit
| 18,048
|
# -*- coding: utf-8 -*-
import os
from pyhammer.tasks.taskbase import TaskBase
from pyhammer.utils import execProg
class VsTestTask(TaskBase):
"""Cs Project Build Step"""
def __init__( self, csProjectPath ):
super(VsTestTask, self).__init__()
self.command = """vstest.console.exe \"%s\"""" % ( csProjectPath )
self.csProjectPath = csProjectPath
def do( self ):
self.reporter.message( self.command )
self.reporter.message( self.csProjectPath )
self.reporter.message( "BUILD CS PROJECT: %s" % self.csProjectPath )
return execProg( self.command, self.reporter, os.path.dirname(self.csProjectPath) ) == 0
|
webbers/pyhammer
|
pyhammer/tasks/helpers/vstesttask.py
|
Python
|
mit
| 680
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('dashboards', '0002_auto_20150615_0916'),
('users', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='dashboard',
name='owners',
field=models.ManyToManyField(to='users.User'),
preserve_default=True,
),
]
|
alphagov/stagecraft
|
stagecraft/apps/dashboards/migrations/0003_dashboard_owners.py
|
Python
|
mit
| 484
|
import gpioRap as gpioRap
import RPi.GPIO as GPIO
import subprocess
import time
import random
#Create GpioRap class using BCM pin numbers
gpioRapper = gpioRap.GpioRap(GPIO.BCM)
#Create an LED, which should be attached to pin 17
white1 = gpioRapper.createLED(4)
white2 = gpioRapper.createLED(17)
red1 = gpioRapper.createLED(21)
red2 = gpioRapper.createLED(22)
# Define GPIO to use on Pi
GPIO_PIR = 24
# Set pir pin as input
GPIO.setup(GPIO_PIR,GPIO.IN)
try:
Current_State = 0
Previous_State = 0
# Loop until PIR output is 0
while GPIO.input(GPIO_PIR)==1:
Current_State = 0
redeyecounter = 0
#Loop until exception (ctrl c)
while True:
# Read PIR state
Current_State = GPIO.input(GPIO_PIR)
if Current_State==1 and Previous_State==0:
# PIR is triggered
print " Motion detected!"
# turn on red and white lights
red1.on()
red2.on()
white1.on()
white2.on()
# play random sound
soundno = random.randint(1,6)
subprocess.call(["mplayer","/home/pi/dev/pumpkin/"+str(soundno)+".m4a"], shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Record previous state
Previous_State=1
#elif Current_State==1 and Previous_State==1:
elif Current_State==0 and Previous_State==1:
# PIR has returned to ready state
print " Ready"
# turn off red and white lights
red1.off()
red2.off()
white1.off()
white2.off()
Previous_State=0
elif Current_State==0 and Previous_State==0:
#in steady state, incremenet flash red eye state
redeyecounter+=1
#every 5 seconds (ish) of steady state, flash red eyes
if redeyecounter == 500:
redeyecounter = 0
for count in range(0,3):
red1.on()
red2.on()
time.sleep(0.1)
red1.off()
red2.off()
time.sleep(0.1)
# Wait for 10 milliseconds
time.sleep(0.01)
except KeyboardInterrupt:
print "Stopped"
finally:
#Cleanup
gpioRapper.cleanup()
|
martinohanlon/pumpkinpi
|
pumpkinpi.py
|
Python
|
mit
| 2,293
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# pyvows testing engine
# https://github.com/heynemann/pyvows
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2013 Richard Lupton r.lupton@gmail.com
from pyvows import Vows, expect
from pyvows.reporting import VowsDefaultReporter
from pyvows.runner.abc import VowsTopicError
from StringIO import StringIO
# These tests check that the reporting, which happens after all tests
# have run, correctly shows the errors raised in topic functions.
@Vows.batch
class ErrorReporting(Vows.Context):
class TracebackOfTopicError:
def setup(self):
# The eval_context() method of the result object is called by
# the reporter to decide if a context was successful or
# not. Here we are testing the reporting of errors, so provide
# a mock result which always says it has failed.
class MockResult:
def eval_context(self, context):
return False
self.reporter = VowsDefaultReporter(MockResult(), 0)
# Patch the print_traceback() method to just record its
# arguments.
self.print_traceback_args = None
def print_traceback(*args, **kwargs):
self.print_traceback_args = args
self.reporter.print_traceback = print_traceback
class AContextWithATopicError:
def topic(self):
# Simulate a context whose topic() function raised an error
mock_exc_info = ('type', 'value', 'traceback')
context = {
'contexts': [],
'error': VowsTopicError('topic', mock_exc_info),
'filename': '/path/to/vows.py',
'name': 'TestContext',
'tests': [],
'topic_elapsed': 0
}
return context
def reporter_should_call_print_traceback_with_the_exception(self, context):
self.parent.print_traceback_args = None
self.parent.reporter.print_context('TestContext', context, file=StringIO())
expect(self.parent.print_traceback_args).to_equal(('type', 'value', 'traceback'))
class ASuccessfulContext:
def topic(self):
# Simulate a context whose topic() didn't raise an error
context = {
'contexts': [],
'error': None,
'filename': '/path/to/vows.py',
'name': 'TestContext',
'tests': [],
'topic_elapsed': 0
}
return context
def reporter_should_not_call_print_traceback(self, context):
self.parent.print_traceback_args = None
self.parent.reporter.print_context('TestContext', context, file=StringIO())
expect(self.parent.print_traceback_args).to_equal(None)
|
marcelometal/pyvows
|
tests/reporting/error_reporting_vows.py
|
Python
|
mit
| 3,044
|
# (c) 2017 Gregor Mitscha-Baude
import numpy as np
from nanopores import user_params, user_param
import nanopores.models.pughpore as pugh
from folders import fields
ddata = {2: dict(name="Dpugh", Nmax=1e5, dim=2, r=0.11, h=1.0),
3: dict(name="Dpugh", Nmax=2e6, dim=3, r=0.11, h=2.0)}
physp = dict(
bV = -0.08,
Qmol = 5.,
bulkcon = 1000.,
dnaqsdamp = 0.5882,
)
default = {
2: dict(physp, dim=2, h=.75, Nmax=1e5, diffusivity_data=ddata[2]),
3: dict(physp, dim=3, h=1.25, Nmax=7e5, diffusivity_data=ddata[3],
stokesiter=True)}
dim = user_param(dim=3)
params = user_params(default[dim])
x0 = user_param(x0=[0.,0.,0.])
cache = user_param(cache=True)
result = pugh.F_explicit([x0], name="pugh_vsc_test", cache=cache, **params)
print result
print result["J"]
|
mitschabaude/nanopores
|
scripts/forcefield/ff_one.py
|
Python
|
mit
| 808
|
import rupes
import twitter
import time
import logging
def main():
# Start logging
logging.basicConfig(filename="/homec/organis2/rupes_murdoch/rupes.log", level='DEBUG')
# Connect to API
consumer_key = 'F1aosrucfBbYnJwZLfUrQLxh9'
consumer_secret = open("/homec/organis2/rupes_murdoch/private/consumer-secret.txt").read().strip()
access_key = '3008197577-8GDVTaizZa1k3vP9KfXSybO7FZfyOmUR8TTn0Re'
access_secret = open("/homec/organis2/rupes_murdoch/private/access-token-secret.txt").read().strip()
api = twitter.Api(consumer_key=consumer_key, consumer_secret=consumer_secret,
access_token_key=access_key, access_token_secret=access_secret)
# Get since_id
since_ref = open("/homec/organis2/rupes_murdoch/since_ref.txt", "r")
since_id = since_ref.read().strip()
since_id = long(since_id) if since_id != 'None' else None
since_ref.close()
logging.info("Starting script with since_id=%s" % str(since_id))
returncount = 200
statuses = api.GetUserTimeline(screen_name='rupertmurdoch', count=returncount,
since_id=since_id, trim_user=True, exclude_replies=True)
if len(statuses) == 0:
# logging.debug("No new statuses")
return
# Create a Deruped tweet
deruped = None
tid = None
tweet = None
# Iterate backwards until a valid tweet appears
for s in reversed(statuses):
tweet = s.text
tid = s.id
deruped = rupes.derupe(tweet)
if deruped:
break
# Post tweet
if deruped:
api.PostUpdate(status=deruped, in_reply_to_status_id = tid)
logging.info("Posted \"%s\" from tweet \"%s\"" % (deruped, tweet))
else:
logging.info("Skipping Tweet \"%s\"" % (tweet))
# Save ID of last tweet dealt with
since_ref = open("/homec/organis2/rupes_murdoch/since_ref.txt", "w")
since_ref.write(str(tid))
since_ref.close()
if __name__=='__main__':
main()
|
organisciak/rupe_bot
|
rupe_bot.py
|
Python
|
mit
| 2,010
|
from ..Security import *
from ..Resources import *
from ...Library import *
def getAccessToken(refreshToken, preferredServerName=None):
#Decode and validate refreshToken
credentials = Crypt.getUsernameAndPasswordFromToken(refreshToken)
if not credentials:
return response.makeError(constants.ERROR_USER_REFRESH_TOKEN_INVALID, "Invalid Refresh token")
u = User.getUserByUsername(credentials['username'])
#Lookup server information and check if server exists
if not preferredServerName:
preferredServerName = CloudletServer.getBestCloudletServerCandidate()
else:
#Check if allocation is possible
preferredServer = CloudletServer.getCloudletServerByDomainName(preferredServerName)
if not preferredServer:
return response.makeError(constants.ERRROR_CLOUDLET_SERVER_NO_SUCH_NAME, description="Cloudlet server does not exist")
if not CloudletServer.isAllocationPossible(preferredServer):
return response.makeError(constants.ERRROR_CLOUDLET_SERVER_ALLOCATION_NOT_POSSIBLE, description="Allocation in cloudlet server not possible")
#Check if user has a disk image created
if not u.diskImage:
#Allocate a diskImage
print 'Disk Image had not been created'
appServer = preferredServer.getBestApplicationServerCandidate()
u.addDiskImage(UserDisk.createImage(appServer))
#TODO: Command Application Server to create a disk Image
else:
#Check if disk image resides in the cloudlet server
if not any(any(location==tServer.key() for location in u.diskImage.locations) for tServer in preferredServer.getApplicationServers()):
print "Disk Image not found in cloudlet server"
CloudletServer.shiftDiskImageToCloudletServer(u.diskImage,preferredServer)
#Get server key and return access token
return response.makeResponse({'accessToken':
Crypt.getAccessToken(
credentials['username'],
credentials['password'],
preferredServer.secretKey,
preferredServer.serverIV),
'IPAddress': preferredServer.IPAddress})
|
KJSCE-C12/VDOC
|
Source/Cloud/VDOC/Depends/Models/UserModel/AccessToken.py
|
Python
|
mit
| 1,997
|
#!/usr/bin/env python -B
class binary:
def transform(self,input,params={}):
return ' '.join(format(ord(x), 'b') for x in input)
|
Alevsk/stringTransformer
|
representations/binary.py
|
Python
|
mit
| 131
|
from setuptools import setup
setup(
name="slackerr",
version="0.0.0",
url="https://github.com/olanmatt/slackerr",
author="Matt Olan",
author_email="hello@olanmatt.com",
description="Pipe directly to Slack from your shell",
long_description=open('README.md').read(),
py_modules=['slackerr'],
entry_points={
'console_scripts': ['slackerr = slackerr:slackerr']
},
install_requires=['slackclient']
)
|
olanmatt/slackerr
|
setup.py
|
Python
|
mit
| 449
|
import os
import sys
import re
import os.path
from setuptools import setup, find_packages, Extension
from distutils.errors import CCompilerError, DistutilsExecError, DistutilsPlatformError
# a define the version string inside the package, see:
# https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
VERSIONFILE = "abel/_version.py"
verstrline = open(VERSIONFILE, "rt").read()
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(VSRE, verstrline, re.M)
if mo:
version = mo.group(1)
else:
raise RuntimeError("Unable to find version string in %s." % (VERSIONFILE,))
# try to import numpy and Cython to build Cython extensions:
try:
import numpy as np
from Cython.Distutils import build_ext
import Cython.Compiler.Options
Cython.Compiler.Options.annotate = False
_cython_installed = True
except ImportError:
_cython_installed = False
build_ext = object # avoid a syntax error in TryBuildExt
setup_args = {}
print('='*80)
print('Warning: Cython extensions will not be built as Cython is not installed!\n'\
' This means that the abel.direct C implementation will not be available.')
print('='*80)
if _cython_installed: # if Cython is installed, we will try to build direct-C
if sys.platform == 'win32':
extra_compile_args = ['/Ox', '/fp:fast']
libraries = []
else:
extra_compile_args = ['-Ofast']
libraries = ["m"]
# Optional compilation of Cython modules adapted from
# https://github.com/bsmurphy/PyKrige which was itself
# adapted from a StackOverflow post
ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError)
class TryBuildExt(build_ext):
"""Class to build the direct-C extensions."""
def build_extensions(self):
"""Try to build the direct-C extension."""
try:
build_ext.build_extensions(self)
except ext_errors:
print("**************************************************")
print("WARNING: Cython extensions failed to build (used in abel.direct).\n"
"Typical reasons for this problem are:\n"
" - a C compiler is not installed or not found\n"
" - issues using mingw compiler on Windows 64bit (experimental support for now)\n"
"This only means that the abel.direct C implementation will not be available.\n")
print("**************************************************")
if os.environ.get('CI'):
# running on Travis CI or Appveyor CI
if sys.platform == 'win32' and sys.version_info < (3, 0):
# Cython extensions are not built on Appveyor (Win)
# for PY2.7. See PR #185
pass
else:
raise
else:
# regular install, Cython extensions won't be compiled
pass
except:
raise
ext_modules = [
Extension("abel.lib.direct",
[os.path.join("abel", "lib", "direct.pyx")],
include_dirs=[np.get_include()],
libraries=libraries,
extra_compile_args=extra_compile_args)]
setup_args = {'cmdclass': {'build_ext': TryBuildExt},
'include_dirs': [np.get_include()],
'ext_modules': ext_modules}
with open('README.rst') as file:
long_description = file.read()
setup(name='PyAbel',
version=version,
description='A Python package for forward and inverse Abel transforms',
author='The PyAbel Team',
url='https://github.com/PyAbel/PyAbel',
license='MIT',
packages=find_packages(),
install_requires=["numpy >= 1.16", # last for Python 2
"setuptools >= 44.0", # last for Python 2
"scipy >= 1.2", # oldest tested
"six >= 1.10.0"],
package_data={'abel': ['tests/data/*']},
long_description=long_description,
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 4 - Beta',
# Indicate who your project is intended for
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Physics',
'Topic :: Scientific/Engineering :: Medical Science Apps.',
'Topic :: Software Development :: Libraries :: Python Modules',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
**setup_args
)
|
PyAbel/PyAbel
|
setup.py
|
Python
|
mit
| 5,505
|
balance = float(raw_input("Enter the outstanding balance on your credit card:"))
annual_interest_rate = float(raw_input("Enter the annual credit card interest rate as a decimal:"))
minimum_monthly_pmt = float(raw_input("Enter the minimum monthly payment as a decimal"))
|
royshouvik/6.00SC
|
Unit 1/ps3/ps1a.py
|
Python
|
mit
| 269
|
#!/usr/bin/env python
# -*- coding:utf-8 mode:python; tab-width:4; indent-tabs-mode:nil; py-indent-offset:4 -*-
##
"""
test_local
~~~~~~~~~~~~~~
Test data export functions
"""
import sys
import unittest
from src.EMSL_local import EMSL_local
class LocalTestCase(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_gamess_us_am_pass(self):
#GAMESS-US angular momentum check passes for max am <= G
el = EMSL_local(fmt="gamess-us", debug=False)
el.get_basis("pCs-3", ["Cl"])
self.assertFalse(el.am_too_large)
self.assertEquals("G", el.max_am)
def test_gamess_us_am_fail(self):
#GAMESS-US angular momentum check fails for max am <= I
el = EMSL_local(fmt="gamess-us", debug=False)
el.get_basis("cc-pv6z", ["Cl"])
self.assertFalse(el.am_too_large)
self.assertEqual("I", el.max_am)
def test_gamess_us_am_L(self):
#GAMESS-US angular momentum check special case for SP "L" basis
el = EMSL_local(fmt="gamess-us", debug=False)
el.get_basis("6-31G", ["Cl"])
self.assertFalse(el.am_too_large)
self.assertEqual("P", el.max_am)
def test_nwchem_am_pass(self):
#NWChem angular momentum check passes for max am <= I
el = EMSL_local(fmt="nwchem", debug=False)
el.get_basis("cc-pv6z", ["Ne"])
self.assertFalse(el.am_too_large)
self.assertEqual("I", el.max_am)
def test_nwchem_am_fail(self):
#NWchem angular momentum check fails for max am > I
el = EMSL_local(fmt="nwchem", debug=False)
el.get_basis("cc-pv8z", ["Ne"])
self.assertTrue(el.am_too_large)
self.assertEqual("L", el.max_am)
def test_gaussian94_am(self):
#There is no upper am limit for this format! But verify max_am
el = EMSL_local(fmt="g94", debug=False)
el.get_basis("cc-pv8z", ["Ne"])
self.assertFalse(el.am_too_large)
self.assertEqual("L", el.max_am)
def test_cartesian_or_spherical(self):
#most basis sets treated as using spherical (pure) functions, while
#a few older ones are treated as using cartesians
expected_cartesian = ["3-21G", "4-31G", "6-31G", "6-31G*",
"6-31G**", "DZ (Dunning)", "DZP (Dunning)"]
el = EMSL_local(fmt="nwchem", debug=False)
assigned = {}
names = el.get_available_basis_sets()
names.sort()
for name, description in names:
fn_type = el.spherical_or_cartesian(name)
try:
assigned[fn_type].append(name)
except KeyError:
assigned[fn_type] = [name]
self.assertEqual(expected_cartesian, assigned["cartesian"])
def test_get_available_basis_sets_name_filter(self):
#test get_available_basis_sets with basis set name filtering
el = EMSL_local(fmt="nwchem")
basis_names = ["cc-pVTZ", "fakename", "6-31G"]
expected = [("6-31G", "VDZ Valence Double Zeta: 2 Funct.'s/Valence AO"),
("cc-pVTZ", "VTZ2P Valence Triple Zeta + Polarization on All Atoms")]
names = el.get_available_basis_sets(allowed_basis_names=basis_names)
self.assertEqual(expected, names)
def test_get_available_basis_sets_element_filter(self):
#test get_available_basis_sets with element filtering
el = EMSL_local(fmt="nwchem")
elements = ["Hg", "Pb", "Th"]
expected = [("ANO-RCC",
"full ANO-RCC basis, reduce to get MB, VDZP, VTZP and VQZP quality"),
("CRENBL ECP", "N/A"),
("CRENBL ECP-number2", "1D UNCONTR Uncontracted"),
("SARC-DKH", "N/A"),
("SARC-ZORA",
"Segmented all-electron relativistically contracted basis sets for ZORA"),
("Stuttgart RLC ECP", "DZ Double Zeta Basis Set designed for an ECP"),
("Stuttgart RLC ECP-number2", "N/A"),
("UGBS", "UGBS basis by de Castro and Jorge")]
names = el.get_available_basis_sets(elements=elements)
self.assertEqual(expected, names)
def test_get_available_basis_sets_combined_filter(self):
#test get_available_basis_sets with element + basis set name filtering
el = EMSL_local(fmt="nwchem")
elements = ["Hg", "Pb", "Th"]
basis_names = ["SARC-ZORA", "ANO-RCC"]
expected = [("ANO-RCC",
"full ANO-RCC basis, reduce to get MB, VDZP, VTZP and VQZP quality"),
("SARC-ZORA",
"Segmented all-electron relativistically contracted basis sets for ZORA")]
names = el.get_available_basis_sets(elements=elements,
allowed_basis_names=basis_names)
self.assertEqual(expected, names)
def test_get_available_basis_sets_fs_basic(self):
#test that we can get the name of supplemental basis set stored on
#the file system
el = EMSL_local(fmt="nwchem")
expected = [("g3mp2large", "db/nwchem/g3mp2large.nwbas"),
("g3largexp", "db/nwchem/g3largexp.nwbas")]
names = el.get_available_basis_sets_fs("nwchem")
self.assertEqual(sorted(expected), sorted(names))
def test_get_available_elements_fs(self):
#test that we can get elements from supplemental basis set stored on
#the file system
el = EMSL_local(fmt="nwchem")
expected = ['H', 'He', 'Li', 'Be', 'B', 'C', 'N', 'O', 'F', 'Ne',
'Na', 'Mg', 'Al', 'Si', 'P', 'S', 'Cl', 'Ar', 'K', 'Ca',
'Ga', 'Ge', 'As', 'Se', 'Br', 'Kr']
names = el.get_available_elements_fs("nwchem", "g3mp2large")
self.assertEqual(expected, names)
def test_get_available_elements(self):
#verify element listing from standard db-stored basis set
el = EMSL_local(fmt="nwchem")
expected = ['H', 'He', 'Li', 'Be', 'B', 'C', 'N', 'O', 'F', 'Ne',
'Na', 'Mg', 'Al', 'Si', 'P', 'S', 'Cl', 'Ar', 'K', 'Ca',
'Ga', 'Ge', 'As', 'Se', 'Br', 'Kr', "I"]
names = el.get_available_elements("6-311G")
self.assertEqual(expected, names)
def test_get_available_elements_fused(self):
#element data is automatically supplemented via basis sets stored
#on the file system
el = EMSL_local(fmt="nwchem")
expected = ['H', 'He', 'Li', 'Be', 'B', 'C', 'N', 'O', 'F', 'Ne',
'Na', 'Mg', 'Al', 'Si', 'P', 'S', 'Cl', 'Ar', 'K', 'Ca',
'Ga', 'Ge', 'As', 'Se', 'Br', 'Kr']
names = el.get_available_elements("g3mp2large")
self.assertEqual(expected, names)
def test_get_available_basis_sets_fs_name_filter(self):
#test that we can get the name of supplemental basis set stored on
#the file system -- with name filtering
el = EMSL_local(fmt="nwchem")
expected = []
basis_names = ["g3mp2gigantic"]
names = el.get_available_basis_sets_fs("nwchem", allowed_basis_names=basis_names)
self.assertEqual(expected, names)
def test_get_available_basis_sets_fs_element_filter(self):
#test that we can get the name of supplemental basis set stored on
#the file system -- with element filtering
el = EMSL_local(fmt="nwchem")
expected1 = [("g3mp2large", "db/nwchem/g3mp2large.nwbas")]
expected2 = []
elements1 = ["Ar", "Kr"]
elements2 = ["Kr", "Xe"]
#g3mp2large has krypton parameters but not xenon, so second retrieval
#should produce nothing
names1 = el.get_available_basis_sets_fs("nwchem", elements=elements1)
self.assertEqual(expected1, names1)
names2 = el.get_available_basis_sets_fs("nwchem", elements=elements2)
self.assertEqual(expected2, names2)
def test_get_available_basis_sets_supplemented(self):
#test get_available_basis_sets supplemented with basis data from
#the file system
el = EMSL_local(fmt="nwchem")
basis_names = ["cc-pVTZ", "g3mp2large", "6-31G"]
expected = [("6-31G", "VDZ Valence Double Zeta: 2 Funct.'s/Valence AO"),
("cc-pVTZ", "VTZ2P Valence Triple Zeta + Polarization on All Atoms"),
("g3mp2large", "db/nwchem/g3mp2large.nwbas")]
names = el.get_available_basis_sets(allowed_basis_names=basis_names)
self.assertEqual(expected, names)
def test_get_basis_supplemented(self):
#test that basis set data gets auto-translated when there
#is no "native" version available but an .nwbas to convert
el = EMSL_local(fmt="g94")
elements = ["Li", "Cl"]
result = el.get_basis("g3mp2large", elements=elements)
self.assertTrue("BASIS SET reformatted" in result[0])
def runSuite(cls, verbosity=2, name=None):
"""Run a unit test suite and return status code.
@param cls: class that the suite should be constructed from
@type cls : class
@param verbosity: verbosity level to pass to test runner
@type verbosity : int
@param name: name of a specific test in the suite to run
@type name : str
@return: unit test run status code
@rtype : int
"""
try:
if name:
suite = unittest.makeSuite(cls, name)
else:
suite = unittest.makeSuite(cls)
return unittest.TextTestRunner(verbosity=verbosity).run(suite)
except SystemExit:
pass
def runTests():
try:
test_name = sys.argv[1]
except IndexError:
test_name = None
if test_name:
result = runSuite(LocalTestCase, name = test_name)
else:
result = runSuite(LocalTestCase)
return result
if __name__ == '__main__':
runTests()
|
mattbernst/ebsel
|
tests/test_local.py
|
Python
|
mit
| 9,944
|
"""
Define forms for Polls application.
"""
from django import forms
from .models import Poll, PollAnswer
__author__ = "pesusieni999"
__copyright__ = "Copyright 2017, MtG website Project"
__credits__ = ["pesusieni999"]
__license__ = "MIT"
__version__ = "0.0.1"
__maintainer__ = "pesusieni999"
__email__ = "pesusieni999@gmail.com"
__status__ = "Development"
class PollForm(forms.ModelForm):
"""
Form for creating and modifying Polls.
"""
class Meta:
model = Poll
POLL_TYPE_CHOICES = ((True, 'Single selection'), (False, 'Multiple selection'))
POLL_PUBLIC_CHOICES = ((True, 'Public'), (False, 'Private'))
fields = ('name', 'question', 'single_selection', 'public')
labels = {
'single_selection': 'Vote type',
'public': 'Vote publicity'
}
widgets = {
'name': forms.TextInput(attrs={'class': 'form-control form-field'}),
'question': forms.Textarea(attrs={'class': 'form-control form-field'}),
'public': forms.Select(
choices=POLL_PUBLIC_CHOICES,
attrs={'class': 'form-control form-field'}
),
'single_selection': forms.Select(
choices=POLL_TYPE_CHOICES,
attrs={'class': 'form-control form-field'}
),
# 'end_time': forms.DateTimeInput(attrs={'class': 'datetimepicker'}),
}
class PollOptionForm(forms.Form):
"""
Form for defining poll options (that can be voted).
"""
text = forms.CharField(
max_length=128,
label="Option",
widget=forms.TextInput(attrs={'class': 'form-control form-field'})
)
class VoteForm(forms.Form):
"""
Form for voting.
"""
def __init__(self, *args, **kwargs):
# TODO: Add initialization with polls options.
# TODO: Need poll options, and if poll allows multiple choices.
super(VoteForm, self).__init__(*args, **kwargs)
|
pesusieni999/mtgwebsite
|
pollapp/forms.py
|
Python
|
mit
| 1,986
|
from unittest import TestCase
from unittest.mock import MagicMock
from project_checker.checker.buildservice import Target
class ServiceStub:
pass
class TargetTest(TestCase):
def test_branches_creation_no_branches(self):
service = MagicMock()
target = Target('name', service)
target.report_result()
service.run.assert_called_with('name')
|
micwypych/github-cmake-project-checker
|
project_checker/tests/targettest.py
|
Python
|
mit
| 381
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class PacketCaptureResult(Model):
"""Information about packet capture session.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar name: Name of the packet capture session.
:vartype name: str
:ivar id: ID of the packet capture operation.
:vartype id: str
:param etag: Default value: "A unique read-only string that changes
whenever the resource is updated." .
:type etag: str
:param target: Required. The ID of the targeted resource, only VM is
currently supported.
:type target: str
:param bytes_to_capture_per_packet: Number of bytes captured per packet,
the remaining bytes are truncated. Default value: 0 .
:type bytes_to_capture_per_packet: int
:param total_bytes_per_session: Maximum size of the capture output.
Default value: 1073741824 .
:type total_bytes_per_session: int
:param time_limit_in_seconds: Maximum duration of the capture session in
seconds. Default value: 18000 .
:type time_limit_in_seconds: int
:param storage_location: Required.
:type storage_location:
~azure.mgmt.network.v2017_10_01.models.PacketCaptureStorageLocation
:param filters:
:type filters:
list[~azure.mgmt.network.v2017_10_01.models.PacketCaptureFilter]
:param provisioning_state: The provisioning state of the packet capture
session. Possible values include: 'Succeeded', 'Updating', 'Deleting',
'Failed'
:type provisioning_state: str or
~azure.mgmt.network.v2017_10_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'id': {'readonly': True},
'target': {'required': True},
'storage_location': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'target': {'key': 'properties.target', 'type': 'str'},
'bytes_to_capture_per_packet': {'key': 'properties.bytesToCapturePerPacket', 'type': 'int'},
'total_bytes_per_session': {'key': 'properties.totalBytesPerSession', 'type': 'int'},
'time_limit_in_seconds': {'key': 'properties.timeLimitInSeconds', 'type': 'int'},
'storage_location': {'key': 'properties.storageLocation', 'type': 'PacketCaptureStorageLocation'},
'filters': {'key': 'properties.filters', 'type': '[PacketCaptureFilter]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(self, *, target: str, storage_location, etag: str="A unique read-only string that changes whenever the resource is updated.", bytes_to_capture_per_packet: int=0, total_bytes_per_session: int=1073741824, time_limit_in_seconds: int=18000, filters=None, provisioning_state=None, **kwargs) -> None:
super(PacketCaptureResult, self).__init__(**kwargs)
self.name = None
self.id = None
self.etag = etag
self.target = target
self.bytes_to_capture_per_packet = bytes_to_capture_per_packet
self.total_bytes_per_session = total_bytes_per_session
self.time_limit_in_seconds = time_limit_in_seconds
self.storage_location = storage_location
self.filters = filters
self.provisioning_state = provisioning_state
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/v2017_10_01/models/packet_capture_result_py3.py
|
Python
|
mit
| 3,946
|
#! /usr/bin/python
#Change the value with your raster filename here
raster_file = 'aspect15m.tif'
output_file = 'classified.tiff'
classification_values = [67.5,292.5,360] #The interval values to classify
classification_output_values = [1,0,1] #The value assigned to each interval
from osgeo import gdal
from osgeo.gdalconst import *
import numpy
import time
# Set the timer
startTime = time.time()
#Opening the raster file
dataset = gdal.Open(raster_file, GA_ReadOnly )
#Reading the raster properties
band = dataset.GetRasterBand(1)
projectionfrom = dataset.GetProjection()
geotransform = dataset.GetGeoTransform()
xsize = band.XSize
ysize = band.YSize
datatype = band.DataType
# Get the block size
rows = dataset.RasterYSize
cols = dataset.RasterXSize
blockSizes = band.GetBlockSize()
xBSize = blockSizes[0]
yBSize = blockSizes[1]
originX = geotransform[0]
originY = geotransform[3]
pixelWidth = geotransform[1]
pixelHeight = geotransform[5]
# Setup output
driver = gdal.GetDriverByName('GTiff')
outDataset = driver.Create(output_file, xsize, ysize, 1, GDT_Float32)
outBand = outDataset.GetRasterBand(1)
#Reading the raster values using numpy
data = band.ReadAsArray(0, 0, cols, rows)
data = data.astype(numpy.float)
# use numpy select to reclassify the image
outData = numpy.select([data == -9999, (data >= 0) & (data <= 67.5), (data > 67.5) & (data < 292), data >=272],[-9999,1,-9999,1])
outBand.WriteArray(outData, 0, 0)
outBand.FlushCache()
outBand.SetNoDataValue(-9999)
# # Georefrence the image and set the projection
outDataset.SetGeoTransform(geotransform)
outDataset.SetProjection(projectionfrom)
outBand = None
output_dataset = None
endTime = time.time()
print 'The script took ' + str(endTime - startTime) + ' seconds.'
|
andy3092/Miscellaneous-Scripts
|
reclassify/reclassify.py
|
Python
|
mit
| 1,788
|
from proteus import Context
# TODO - add weak/strong and direct/indirect solver options
##########################################################
# The default options for the context are set below.
# nLevels - this must be an iteger value >=1
# name - this must be a string
# numeric_scheme: currently implemented -
# TH - Taylor Hood
# C0P1C0P1 - stabilized continous linears velocity and pressure
# parallel - this must be a boolean value
##########################################################
opts = Context.Options([
("nLevels", 1, "number of levels of uniform refinement"),
("name", "drivenCavityNSETrial", "output data file name"),
("numeric_scheme", "THQuads", "specify the numerical scheme being used"),
("useWeakBoundaryConditions", True, "Flag: False-Strong boundary conditions, True-Weak boundary conditions"),
("solveIteratively", True, "Flag: False-Direct Solver, True-Iterative Solver"),
("solveInParallel", False,"Flag: False-Serial Solve, True-Parallel Solve"),
("schur_solver", "Qp", "Flag: Options - selfp, Qp, PCD, LSC"),
("RE_through_bdy",True,"Flag: is the reynolds number enforced through boundary condition?")
])
# TODO - add asserts and documentation explaining options
nLevels = opts.nLevels
name = opts.name
numeric_scheme = opts.numeric_scheme
useWeakBoundaryConditions = opts.useWeakBoundaryConditions
solveIteratively = opts.solveIteratively
solveInParallel = opts.solveInParallel
schur_solver = opts.schur_solver
RE_through_bdy = opts.RE_through_bdy
|
erdc/proteus
|
proteus/tests/solver_tests/import_modules/nseDrivenCavity_2d.py
|
Python
|
mit
| 1,561
|
"""
Read the CSV file and get a distinct count of records for each value in the
parameter_name field, show the results on the console and save them to a
single csv file.
Expected output:
+--------------------+------+
| parameter_name| count|
+--------------------+------+
| Benzene|469375|
|Chromium VI (TSP) LC| 948|
| 12-Dichloropropane|198818|
|Acrolein - Unveri...| 91268|
| Tetrachloroethylene|245750|
| Lead PM10 STP| 57289|
| Nickel PM10 STP| 46572|
| Nickel (TSP) STP|119639|
| Chromium PM10 STP| 45137|
| Ethylene dibromide|195493|
| Mercury PM10 STP| 12528|
| Chromium (TSP) STP|119733|
|cis-13-Dichloropr...|182596|
| Vinyl chloride|222726|
| Trichloroethylene|237081|
| Lead PM2.5 LC|600171|
| Chloroform|245517|
| Manganese PM10 STP| 47263|
| Acetaldehyde|169218|
| Beryllium PM2.5 LC| 1565|
+--------------------+------+
"""
# Import what we need from PySpark
from pyspark.sql import SparkSession
# Create a spark session
spark = SparkSession.builder.appName("Group By Count Distinct").getOrCreate()
df = spark.read.csv("/tmp/data/epa_hap_daily_summary.csv",
header=True,
mode="DROPMALFORMED")
# Get the distinct count of records grouped by parameter_name
df_new = df.groupby("parameter_name").count().distinct()
# Show the results
df_new.show()
# Write the dataframe to a single csv file
# This isn't performant on a huge data but is a good example for a single node
df_new.coalesce(1).write.format('csv').save("/tmp/data/parameter_names_and_counts")
|
rdempsey/pyspark-for-data-processing
|
scripts/csv_group_by_count_distinct.py
|
Python
|
mit
| 1,607
|
import sys
import os
import fudge
import textwrap
from fudge.patcher import patch_object
from mock import patch, ANY
from nose.plugins.attrib import attr
from tests import fixture_path
from virtstrap import constants
from virtstrap.testing import *
from virtstrap.locker import site_packages_dir
from virtstrap_local.commands.install import InstallCommand
PACKAGES_DIR = fixture_path('packages')
def test_initialize_command():
command = InstallCommand()
class SpecialFake(fudge.Fake):
def __iter__(self):
return iter(self.__iter_patch__())
def fake_requirements(names):
requirements = []
for name in names:
fake = fudge.Fake(name)
fake.has_attr(name=name)
fake.provides('to_pip_str').returns(name)
requirements.append(fake)
return requirements
def test_special_fake_works():
fake = SpecialFake()
fake.expects('__iter_patch__').returns(fake_requirements(['test1']))
looped = False
for req in fake:
looped = True
assert req.name == 'test1'
assert looped, 'Did not iterate correctly'
def pip_requirements(project):
from subprocess import Popen, PIPE
proc = Popen([project.bin_path('pip'), 'freeze'], stdout=PIPE)
stdout, stderr = proc.communicate()
return stdout.splitlines()
class TestInstallCommand(object):
def setup(self):
self.command = InstallCommand()
self.pip_index_ctx = ContextUser(temp_pip_index(PACKAGES_DIR))
self.index_url = self.pip_index_ctx.enter()
self.temp_proj_ctx = ContextUser(temp_project())
self.project, self.options, self.temp_dir = self.temp_proj_ctx.enter()
# FIXME need to do something about the temp_proj_ctx
self.options.upgrade = False
self.old_sys_path = sys.path
site_packages = site_packages_dir(base_dir=self.project.env_path())
sys.path.append(site_packages)
def teardown(self):
sys.path = self.old_sys_path
self.temp_proj_ctx.exit()
self.pip_index_ctx.exit()
@attr('slow')
@patch('subprocess.call')
def test_run_pip_install_no_upgrade(self, mock_call):
mock_call.return_value = 0
self.command.run_pip_install(self.project, 'somepath', False)
mock_call.assert_called_with([ANY, 'install', '-r', 'somepath'])
@attr('slow')
@patch('subprocess.call')
def test_run_pip_install_with_upgrade(self, mock_call):
mock_call.return_value = 0
self.command.run_pip_install(self.project, 'somepath', True)
mock_call.assert_called_with([ANY, 'install', '-r', 'somepath', '--upgrade'])
@attr('slow')
@hide_subprocess_stdout
@fudge.test
def test_run_install(self):
# Install should process the requirements
# and create a requirement_set
# The requirement_set is then turned into a
# string and written to a requirements file to be
# used by pip and install the requirements
project = self.project
options = self.options
temp_dir = self.temp_dir
fake_req_set = SpecialFake()
(project.__patch_method__('process_config_section')
.returns(fake_req_set))
fake_req_set_iter = fake_requirements(['test1'])
fake_req_set.expects('__iter_patch__').returns(fake_req_set_iter)
self.command.run(project, options)
requirements_file = open(constants.VE_LOCK_FILENAME)
requirements_data = requirements_file.read()
assert 'test1==0.2' in requirements_data
@attr('slow')
@hide_subprocess_stdout
@fudge.test
def test_run_install_with_upgrade(self):
# Install should process the requirements
# and create a requirement_set
# The requirement_set is then turned into a
# string and written to a requirements file to be
# used by pip and install the requirements
project = self.project
options = self.options
options.upgrade = True
temp_dir = self.temp_dir
fake_req_set = SpecialFake()
(project.__patch_method__('process_config_section')
.returns(fake_req_set))
fake_req_set_iter = fake_requirements(['test1'])
fake_req_set.expects('__iter_patch__').returns(fake_req_set_iter)
self.command.run(project, options)
requirements_file = open(constants.VE_LOCK_FILENAME)
requirements_data = requirements_file.read()
assert 'test1==0.2' in requirements_data
@attr('slow')
@hide_subprocess_stdout
@fudge.test
def test_run_install_multiple_packages(self):
project = self.project
options = self.options
temp_dir = self.temp_dir
fake_req_set = SpecialFake()
(project.__patch_method__('process_config_section')
.returns(fake_req_set))
fake_req_set_iter = fake_requirements(['test1', 'test5'])
fake_req_set.expects('__iter_patch__').returns(fake_req_set_iter)
self.command.run(project, options)
requirements_file = open(constants.VE_LOCK_FILENAME)
requirements_data = requirements_file.read()
expected_packages = ['test1==0.2', 'test2==1.3',
'test3==0.10.1', 'test5==1.4.3']
for package in expected_packages:
assert package in requirements_data
@attr('slow')
@hide_subprocess_stdout
@fudge.test
def test_run_install_using_lock_file(self):
project = self.project
options = self.options
temp_dir = self.temp_dir
fake_req_set = SpecialFake()
(project.__patch_method__('process_config_section')
.returns(fake_req_set))
lock_file_path = project.path(constants.VE_LOCK_FILENAME)
lock_file = open(lock_file_path, 'w')
lock_file.write(textwrap.dedent("""
test1 (test1==0.1)
test2 (test2==1.3)
test3 (test3==0.10.1)
test5 (test5==1.4.3)
"""))
lock_file.close()
fake_req_set_iter = fake_requirements(['test1', 'test5'])
fake_req_set.expects('__iter_patch__').returns(fake_req_set_iter)
self.command.run(project, options)
pip_packages = pip_requirements(project)
expected_packages = ['test1==0.1', 'test2==1.3',
'test3==0.10.1', 'test5==1.4.3']
for package in expected_packages:
assert package in pip_packages
@attr('slow')
@hide_subprocess_stdout
@fudge.test
def test_run_install_using_lock_file_repeated_deps(self):
# This is a regression test
project = self.project
options = self.options
temp_dir = self.temp_dir
fake_req_set = SpecialFake()
(project.__patch_method__('process_config_section')
.returns(fake_req_set))
lock_file_path = project.path(constants.VE_LOCK_FILENAME)
lock_file = open(lock_file_path, 'w')
lock_file.write(textwrap.dedent("""
test1 (test1==0.1)
test5 (test5==1.4.3)
test2 (test2==1.3)
test3 (test3==0.10.1)
test2 (test2==1.3)
test3 (test3==0.10.1)
"""))
lock_file.close()
fake_req_set_iter = fake_requirements(['test1', 'test5',
'test2', 'test3'])
fake_req_set.expects('__iter_patch__').returns(fake_req_set_iter)
self.command.run(project, options)
pip_packages = pip_requirements(project)
expected_packages = ['test1==0.1', 'test2==1.3',
'test3==0.10.1', 'test5==1.4.3']
for package in expected_packages:
assert package in pip_packages
class TestInstallCommandOutsideOfDirectory(object):
def setup(self):
self.command = InstallCommand()
self.pip_index_ctx = ContextUser(temp_pip_index(PACKAGES_DIR))
self.index_url = self.pip_index_ctx.enter()
self.temp_proj_ctx = ContextUser(temp_project(False))
self.project, self.options, self.temp_dir = self.temp_proj_ctx.enter()
self.options.upgrade = False
self.old_sys_path = sys.path
site_packages = site_packages_dir(base_dir=self.project.env_path())
sys.path.append(site_packages)
def teardown(self):
sys.path = self.old_sys_path
self.temp_proj_ctx.exit()
self.pip_index_ctx.exit()
@attr('slow')
@hide_subprocess_stdout
@fudge.test
def test_run_install_using_lock_file_outside(self):
project = self.project
options = self.options
temp_dir = self.temp_dir
fake_req_set = SpecialFake()
(project.__patch_method__('process_config_section')
.returns(fake_req_set))
lock_file_path = project.path(constants.VE_LOCK_FILENAME)
lock_file = open(lock_file_path, 'w')
lock_file.write(textwrap.dedent("""
test1 (test1==0.1)
test2 (test2==1.3)
test3 (test3==0.10.1)
test5 (test5==1.4.3)
"""))
lock_file.close()
fake_req_set_iter = fake_requirements(['test1', 'test5'])
fake_req_set.expects('__iter_patch__').returns(fake_req_set_iter)
self.command.run(project, options)
pip_packages = pip_requirements(project)
expected_packages = ['test1==0.1', 'test2==1.3',
'test3==0.10.1', 'test5==1.4.3']
for package in expected_packages:
assert package in pip_packages
|
ravenac95/virtstrap
|
virtstrap-local/tests/test_install_command.py
|
Python
|
mit
| 9,478
|
"""
__graph_MT_post__SwcToEcuMapping.py___________________________________________________________
Automatically generated graphical appearance ---> MODIFY DIRECTLY WITH CAUTION
_____________________________________________________________________________________
"""
import tkFont
from graphEntity import *
from GraphicalForm import *
from ATOM3Constraint import *
class graph_MT_post__SwcToEcuMapping(graphEntity):
def __init__(self, x, y, semObject = None):
self.semanticObject = semObject
self.sizeX, self.sizeY = 145, 80
graphEntity.__init__(self, x, y)
self.ChangesAtRunTime = 0
self.constraintList = []
if self.semanticObject: atribs = self.semanticObject.attributesToDraw()
else: atribs = None
self.graphForms = []
self.imageDict = self.getImageDict()
def DrawObject(self, drawing, showGG = 0):
self.dc = drawing
if showGG and self.semanticObject: self.drawGGLabel(drawing)
h = drawing.create_oval(self.translate([160.0, 60.0, 160.0, 60.0]), tags = (self.tag, 'connector'), outline = '', fill = '' )
self.connectors.append( h )
h = drawing.create_rectangle(self.translate([21.0, 19.0, 164.0, 97.0]), tags = self.tag, stipple = '', width = 1, outline = 'black', fill = 'skyblue1')
self.gf8 = GraphicalForm(drawing, h, "gf8")
self.graphForms.append(self.gf8)
font = tkFont.Font( family='Arial', size=12, weight='normal', slant='roman', underline=0)
h = drawing.create_text(self.translate([93.0, 35.0, 93.0, 12.0])[:2], tags = self.tag, font=font, fill = 'black', anchor = 'center', text = 'MT_post__SwcToEcuMapping', width = '0', justify= 'left', stipple='' )
self.gf33 = GraphicalForm(drawing, h, 'gf33', fontObject=font)
self.graphForms.append(self.gf33)
helv12 = tkFont.Font ( family="Helvetica", size=12, weight="bold" )
h = drawing.create_text(self.translate([-3, -3]), font=helv12,
tags = (self.tag, self.semanticObject.getClass()),
fill = "black",
text=self.semanticObject.MT_label__.toString())
self.attr_display["MT_label__"] = h
self.gf_label = GraphicalForm(drawing, h, 'gf_label', fontObject=helv12)
self.graphForms.append(self.gf_label)
def postCondition( self, actionID, * params):
return None
def preCondition( self, actionID, * params):
return None
def getImageDict( self ):
imageDict = dict()
return imageDict
new_class = graph_MT_post__SwcToEcuMapping
|
levilucio/SyVOLT
|
GM2AUTOSAR_MM/graph_MT_post__SwcToEcuMapping.py
|
Python
|
mit
| 2,655
|
from django.template.defaultfilters import stringfilter
from django import template
register = template.Library()
@register.filter(name='replace')
@stringfilter
def replace(value, arg):
return value.replace(arg, '')
|
leandromaia/fleet_control
|
resources/templatetags/resources_extras.py
|
Python
|
mit
| 222
|
import sys
from openmc import Filter, Nuclide
from openmc.filter import _FILTER_TYPES
import openmc.checkvalue as cv
if sys.version_info[0] >= 3:
basestring = str
# Acceptable tally arithmetic binary operations
_TALLY_ARITHMETIC_OPS = ['+', '-', '*', '/', '^']
class CrossScore(object):
"""A special-purpose tally score used to encapsulate all combinations of two
tally's scores as an outer product for tally arithmetic.
Parameters
----------
left_score : str or CrossScore
The left score in the outer product
right_score : str or CrossScore
The right score in the outer product
binary_op : str
The tally arithmetic binary operator (e.g., '+', '-', etc.) used to
combine two tally's scores with this CrossNuclide
Attributes
----------
left_score : str or CrossScore
The left score in the outer product
right_score : str or CrossScore
The right score in the outer product
binary_op : str
The tally arithmetic binary operator (e.g., '+', '-', etc.) used to
combine two tally's scores with this CrossScore
"""
def __init__(self, left_score=None, right_score=None, binary_op=None):
self._left_score = None
self._right_score = None
self._binary_op = None
if left_score is not None:
self.left_score = left_score
if right_score is not None:
self.right_score = right_score
if binary_op is not None:
self.binary_op = binary_op
def __hash__(self):
return hash(repr(self))
def __eq__(self, other):
return str(other) == str(self)
def __ne__(self, other):
return not self == other
def __deepcopy__(self, memo):
existing = memo.get(id(self))
# If this is the first time we have tried to copy this object, create a copy
if existing is None:
clone = type(self).__new__(type(self))
clone._left_score = self.left_score
clone._right_score = self.right_score
clone._binary_op = self.binary_op
memo[id(self)] = clone
return clone
# If this object has been copied before, return the first copy made
else:
return existing
def __repr__(self):
string = '({0} {1} {2})'.format(self.left_score,
self.binary_op, self.right_score)
return string
@property
def left_score(self):
return self._left_score
@property
def right_score(self):
return self._right_score
@property
def binary_op(self):
return self._binary_op
@left_score.setter
def left_score(self, left_score):
cv.check_type('left_score', left_score, (basestring, CrossScore))
self._left_score = left_score
@right_score.setter
def right_score(self, right_score):
cv.check_type('right_score', right_score, (basestring, CrossScore))
self._right_score = right_score
@binary_op.setter
def binary_op(self, binary_op):
cv.check_type('binary_op', binary_op, (basestring, CrossScore))
cv.check_value('binary_op', binary_op, _TALLY_ARITHMETIC_OPS)
self._binary_op = binary_op
class CrossNuclide(object):
"""A special-purpose nuclide used to encapsulate all combinations of two
tally's nuclides as an outer product for tally arithmetic.
Parameters
----------
left_nuclide : Nuclide or CrossNuclide
The left nuclide in the outer product
right_nuclide : Nuclide or CrossNuclide
The right nuclide in the outer product
binary_op : str
The tally arithmetic binary operator (e.g., '+', '-', etc.) used to
combine two tally's nuclides with this CrossNuclide
Attributes
----------
left_nuclide : Nuclide or CrossNuclide
The left nuclide in the outer product
right_nuclide : Nuclide or CrossNuclide
The right nuclide in the outer product
binary_op : str
The tally arithmetic binary operator (e.g., '+', '-', etc.) used to
combine two tally's nuclides with this CrossNuclide
"""
def __init__(self, left_nuclide=None, right_nuclide=None, binary_op=None):
self._left_nuclide = None
self._right_nuclide = None
self._binary_op = None
if left_nuclide is not None:
self.left_nuclide = left_nuclide
if right_nuclide is not None:
self.right_nuclide = right_nuclide
if binary_op is not None:
self.binary_op = binary_op
def __hash__(self):
return hash(repr(self))
def __eq__(self, other):
return str(other) == str(self)
def __ne__(self, other):
return not self == other
def __deepcopy__(self, memo):
existing = memo.get(id(self))
# If this is the first time we have tried to copy this object, create a copy
if existing is None:
clone = type(self).__new__(type(self))
clone._left_nuclide = self.left_nuclide
clone._right_nuclide = self.right_nuclide
clone._binary_op = self.binary_op
memo[id(self)] = clone
return clone
# If this object has been copied before, return the first copy made
else:
return existing
def __repr__(self):
string = ''
# If the Summary was linked, the left nuclide is a Nuclide object
if isinstance(self.left_nuclide, Nuclide):
string += '(' + self.left_nuclide.name
# If the Summary was not linked, the left nuclide is the ZAID
else:
string += '(' + str(self.left_nuclide)
string += ' ' + self.binary_op + ' '
# If the Summary was linked, the right nuclide is a Nuclide object
if isinstance(self.right_nuclide, Nuclide):
string += self.right_nuclide.name + ')'
# If the Summary was not linked, the right nuclide is the ZAID
else:
string += str(self.right_nuclide) + ')'
return string
@property
def left_nuclide(self):
return self._left_nuclide
@property
def right_nuclide(self):
return self._right_nuclide
@property
def binary_op(self):
return self._binary_op
@left_nuclide.setter
def left_nuclide(self, left_nuclide):
cv.check_type('left_nuclide', left_nuclide, (Nuclide, CrossNuclide))
self._left_nuclide = left_nuclide
@right_nuclide.setter
def right_nuclide(self, right_nuclide):
cv.check_type('right_nuclide', right_nuclide, (Nuclide, CrossNuclide))
self._right_nuclide = right_nuclide
@binary_op.setter
def binary_op(self, binary_op):
cv.check_type('binary_op', binary_op, basestring)
cv.check_value('binary_op', binary_op, _TALLY_ARITHMETIC_OPS)
self._binary_op = binary_op
class CrossFilter(object):
"""A special-purpose filter used to encapsulate all combinations of two
tally's filter bins as an outer product for tally arithmetic.
Parameters
----------
left_filter : Filter or CrossFilter
The left filter in the outer product
right_filter : Filter or CrossFilter
The right filter in the outer product
binary_op : str
The tally arithmetic binary operator (e.g., '+', '-', etc.) used to
combine two tally's filter bins with this CrossFilter
Attributes
----------
type : str
The type of the crossfilter (e.g., 'energy / energy')
left_filter : Filter or CrossFilter
The left filter in the outer product
right_filter : Filter or CrossFilter
The right filter in the outer product
binary_op : str
The tally arithmetic binary operator (e.g., '+', '-', etc.) used to
combine two tally's filter bins with this CrossFilter
bins : dict of Iterable
A dictionary of the bins from each filter keyed by the types of the
left / right filters
num_bins : Integral
The number of filter bins (always 1 if aggregate_filter is defined)
stride : Integral
The number of filter, nuclide and score bins within each of this
crossfilter's bins.
"""
def __init__(self, left_filter=None, right_filter=None, binary_op=None):
left_type = left_filter.type
right_type = right_filter.type
self._type = '({0} {1} {2})'.format(left_type, binary_op, right_type)
self._bins = {}
self._stride = None
self._left_filter = None
self._right_filter = None
self._binary_op = None
if left_filter is not None:
self.left_filter = left_filter
self._bins['left'] = left_filter.bins
if right_filter is not None:
self.right_filter = right_filter
self._bins['right'] = right_filter.bins
if binary_op is not None:
self.binary_op = binary_op
def __hash__(self):
return hash((self.left_filter, self.right_filter))
def __eq__(self, other):
return str(other) == str(self)
def __ne__(self, other):
return not self == other
def __repr__(self):
string = 'CrossFilter\n'
filter_type = '({0} {1} {2})'.format(self.left_filter.type,
self.binary_op,
self.right_filter.type)
filter_bins = '({0} {1} {2})'.format(self.left_filter.bins,
self.binary_op,
self.right_filter.bins)
string += '{0: <16}{1}{2}\n'.format('\tType', '=\t', filter_type)
string += '{0: <16}{1}{2}\n'.format('\tBins', '=\t', filter_bins)
return string
def __deepcopy__(self, memo):
existing = memo.get(id(self))
# If this is the first time we have tried to copy this object, create a copy
if existing is None:
clone = type(self).__new__(type(self))
clone._left_filter = self.left_filter
clone._right_filter = self.right_filter
clone._binary_op = self.binary_op
clone._type = self.type
clone._bins = self._bins
clone._stride = self.stride
memo[id(self)] = clone
return clone
# If this object has been copied before, return the first copy made
else:
return existing
@property
def left_filter(self):
return self._left_filter
@property
def right_filter(self):
return self._right_filter
@property
def binary_op(self):
return self._binary_op
@property
def type(self):
return self._type
@property
def bins(self):
return self._bins['left'], self._bins['right']
@property
def num_bins(self):
if self.left_filter is not None and self.right_filter is not None:
return self.left_filter.num_bins * self.right_filter.num_bins
else:
return 0
@property
def stride(self):
return self._stride
@type.setter
def type(self, filter_type):
if filter_type not in _FILTER_TYPES.values():
msg = 'Unable to set CrossFilter type to "{0}" since it ' \
'is not one of the supported types'.format(filter_type)
raise ValueError(msg)
self._type = filter_type
@left_filter.setter
def left_filter(self, left_filter):
cv.check_type('left_filter', left_filter, (Filter, CrossFilter))
self._left_filter = left_filter
self._bins['left'] = left_filter.bins
@right_filter.setter
def right_filter(self, right_filter):
cv.check_type('right_filter', right_filter, (Filter, CrossFilter))
self._right_filter = right_filter
self._bins['right'] = right_filter.bins
@binary_op.setter
def binary_op(self, binary_op):
cv.check_type('binary_op', binary_op, basestring)
cv.check_value('binary_op', binary_op, _TALLY_ARITHMETIC_OPS)
self._binary_op = binary_op
@stride.setter
def stride(self, stride):
self._stride = stride
def get_bin_index(self, filter_bin):
"""Returns the index in the CrossFilter for some bin.
Parameters
----------
filter_bin : 2-tuple
A 2-tuple where each value corresponds to the bin of interest
in the left and right filter, respectively. A bin is the integer
ID for 'material', 'surface', 'cell', 'cellborn', and 'universe'
Filters. The bin is an integer for the cell instance ID for
'distribcell' Filters. The bin is a 2-tuple of floats for 'energy'
and 'energyout' filters corresponding to the energy boundaries of
the bin of interest. The bin is a (x,y,z) 3-tuple for 'mesh'
filters corresponding to the mesh cell of interest.
Returns
-------
filter_index : Integral
The index in the Tally data array for this filter bin.
"""
left_index = self.left_filter.get_bin_index(filter_bin[0])
right_index = self.right_filter.get_bin_index(filter_bin[0])
filter_index = left_index * self.right_filter.num_bins + right_index
return filter_index
def get_pandas_dataframe(self, datasize, summary=None):
"""Builds a Pandas DataFrame for the CrossFilter's bins.
This method constructs a Pandas DataFrame object for the CrossFilter
with columns annotated by filter bin information. This is a helper
method for the Tally.get_pandas_dataframe(...) method. This method
recursively builds and concatenates Pandas DataFrames for the left
and right filters and crossfilters.
This capability has been tested for Pandas >=0.13.1. However, it is
recommended to use v0.16 or newer versions of Pandas since this method
uses Pandas' Multi-index functionality.
Parameters
----------
datasize : Integral
The total number of bins in the tally corresponding to this filter
summary : None or Summary
An optional Summary object to be used to construct columns for
distribcell tally filters (default is None). The geometric
information in the Summary object is embedded into a Multi-index
column with a geometric "path" to each distribcell instance.
NOTE: This option requires the OpenCG Python package.
Returns
-------
pandas.DataFrame
A Pandas DataFrame with columns of strings that characterize the
crossfilter's bins. Each entry in the DataFrame will include one
or more binary operations used to construct the crossfilter's bins.
The number of rows in the DataFrame is the same as the total number
of bins in the corresponding tally, with the filter bins
appropriately tiled to map to the corresponding tally bins.
See also
--------
Tally.get_pandas_dataframe(), Filter.get_pandas_dataframe()
"""
# If left and right filters are identical, do not combine bins
if self.left_filter == self.right_filter:
df = self.left_filter.get_pandas_dataframe(datasize, summary)
# If left and right filters are different, combine their bins
else:
left_df = self.left_filter.get_pandas_dataframe(datasize, summary)
right_df = self.right_filter.get_pandas_dataframe(datasize, summary)
left_df = left_df.astype(str)
right_df = right_df.astype(str)
df = '(' + left_df + ' ' + self.binary_op + ' ' + right_df + ')'
return df
|
kellyrowland/openmc
|
openmc/cross.py
|
Python
|
mit
| 15,897
|
import json
import socket
import struct
import time
from threading import Thread, Lock, Event
from Queue import Queue, Empty as EmptyQueue
# python struct pack format
# c char string of length 1 1
# B unsigned char integer 1
# H unsigned short integer 2
# I unsigned long integer 4
# Q unsigned long long integer 8
# TODO: assemble (un)pack strings with results from id command
class EOF(Exception):
def __init__(self, inner=None):
Exception.__init__(
self, str(inner) if inner else "EOF"
)
class HandshakeError(Exception):
def __init__(self):
Exception.__init__(
self, 'handshake error, received message did not match'
)
class ProtocolError(Exception):
pass
JDWP_HEADER_SIZE = 11
CMD_PKT = '0'
REPLY_PKT = '1'
REPLY_PACKET_TYPE = 0x80
HANDSHAKE_MSG = 'JDWP-Handshake'
JOIN_TIMEOUT = 0.2
EVENT_BREAKPOINT = 2
EVENT_CLASS_PREPARE = 8
EVENT_METHOD_ENTRY = 40
EVENT_METHOD_EXIT_WITH_RETURN_VALUE = 42
EVENTREQUEST_MODKIND_CLASSMATCH = 5
SUSPEND_NONE = 0
SUSPEND_ALL = 2
LEN_METHOD_ENTRY_AND_EXIT_WITH_RETURN_VALUE_HEADER = 43
LEN_CLASS_PREPARE_HEADER = 27
class JDWPConnection(Thread):
def __init__(self, addr, port, trace=False):
Thread.__init__(self)
self.bindqueue = Queue()
self.reply_pkt_map = {}
self.cmd_pkt_queue = Queue()
self.socket_conn = socket.create_connection((addr, port))
self.next_id = 1
self.trace = trace
self.unplug_flag = Event()
self.lock = Lock()
self.breakpoint_handler = None
self.class_prepare_handler = None
def set_breakpoint_handler(self, handler):
self.breakpoint_handler = handler
def set_class_prepare_handler(self, handler):
self.class_prepare_handler = handler
def do_read(self, amt):
"""
Read data from the socket
"""
req = amt
buf = ''
while req:
pkt = self.socket_conn.recv(req)
if not pkt: raise EOF()
buf += pkt
req -= len(pkt)
if self.trace:
print "===> RX:", repr(buf)
return buf
def do_write(self, data):
"""
Write data to the socket
"""
try:
if self.trace:
print "===> TX:", repr(data)
self.socket_conn.sendall(data)
except Exception as exc:
raise EOF(exc)
def read(self, sz):
"""
Read data with size sz
"""
if sz == 0:
return ''
pkt = self.do_read(sz)
if not len(pkt):
# raise exception if there is nothing to read
raise EOF()
return pkt
def write_id_size(self):
"""
Send the id size cmd to the VM
"""
length = JDWP_HEADER_SIZE
ident = self.acquire_ident()
flags = 0
cmd = 0x0107
header = struct.pack('>IIBH', length, ident, flags, cmd)
return self.do_write(header)
def read_id_size(self):
"""
Parse the read id size result
"""
head = self.read_header()
if head[0] != 20 + JDWP_HEADER_SIZE:
raise ProtocolError('expected size of an idsize response')
if head[2] != REPLY_PACKET_TYPE:
raise ProtocolError('expected first server message to be a response')
if head[1] != 1:
raise ProtocolError('expected first server message to be 1')
body = self.read(20)
data = struct.unpack(">IIIII", body)
self.sizes = list(data)
setattr(self, "fieldIDSize", self.sizes[0])
setattr(self, "methodIDSize", self.sizes[1])
setattr(self, "objectIDSize", self.sizes[2])
setattr(self, "threadIDSize", self.sizes[2])
setattr(self, "referenceTypeIDSize", self.sizes[3])
setattr(self, "frameIDSize", self.sizes[4])
return None
def read_handshake(self):
"""
Read the jdwp handshake
"""
data = self.read(len(HANDSHAKE_MSG))
if data != HANDSHAKE_MSG:
raise HandshakeError()
def write_handshake(self):
"""
Write the jdwp handshake
"""
return self.do_write(HANDSHAKE_MSG)
def read_header(self):
"""
Read the header
"""
header = self.read(JDWP_HEADER_SIZE)
data = struct.unpack(">IIBH", header)
return data
def process_data_from_vm(self):
"""
Handle data from the VM, both the response from VM initated by the
Debugger and VM's request initated by the VM
"""
size, ident, flags, code = self.read_header()
size -= JDWP_HEADER_SIZE
data = self.read(size)
try:
# We process binds after receiving messages to prevent a race
while True:
# With False passed to bindqueue.get, it will trigger EmptyQueue exception
# get pending queue from bindqueue, and ack it by queue.put in process_packet
self.set_bind(*self.bindqueue.get(False))
except EmptyQueue:
pass
self.process_packet(ident, code, data, flags)
def set_bind(self, pkt_type, ident, chan):
"""
Bind the queue for REPLY_PKT
not for CMD_PKT, they're buffered or handled by handlers
"""
if pkt_type == REPLY_PKT:
self.reply_pkt_map[ident] = chan
def process_packet(self, ident, code, data, flags):
"""
Handle packets from VM
"""
# reply pkt shows only once
if flags == REPLY_PACKET_TYPE:
chan = self.reply_pkt_map.get(ident)
if not chan:
return
return chan.put((ident, code, data))
elif not self.unplug_flag.is_set(): # command packets are buffered
if code == 0x4064:
event_kind = struct.unpack(">BIB", data[:6])[2]
if event_kind in [EVENT_METHOD_ENTRY, EVENT_METHOD_EXIT_WITH_RETURN_VALUE]:
self.cmd_pkt_queue.put((ident, code, data))
elif event_kind == EVENT_BREAKPOINT:
Thread(target=self.breakpoint_handler, args=[data]).start()
elif event_kind == EVENT_CLASS_PREPARE:
Thread(target=self.class_prepare_handler, args=[data]).start()
def get_cmd_packets(self):
ret_list = []
while True:
try:
ret_list.append(self.cmd_pkt_queue.get(False))
except EmptyQueue:
break
return ret_list
def acquire_ident(self):
"""
Get a request id
"""
ident = self.next_id
self.next_id += 2
return ident
def write_request_data(self, ident, flags, code, body):
"""
Write the request data to jdwp
"""
size = len(body) + JDWP_HEADER_SIZE
header = struct.pack(">IIcH", size, ident, flags, code)
self.do_write(header)
return self.do_write(body)
def request(self, code, data='', timeout=None):
"""
send a request, then waits for a response; returns response
conn.request returns code and buf
"""
# create a new queue to get the response of this request
queue = Queue()
with self.lock:
ident = self.acquire_ident()
self.bindqueue.put((REPLY_PKT, ident, queue))
self.write_request_data(ident, chr(0x0), code, data)
try:
return queue.get(1, timeout)
except EmptyQueue:
return None, None, None
def run(self):
"""
Thread function for jdwp
"""
try:
while True:
self.process_data_from_vm()
except EOF:
print "EOF"
def start(self):
"""
Start the jdwp connection
"""
self.write_handshake()
self.read_handshake()
self.write_id_size()
self.read_id_size()
self.unplug()
Thread.start(self)
def plug(self):
self.unplug_flag.clear()
def unplug(self):
self.unplug_flag.set()
def stop(self):
"""
close the JDWP connection
"""
try:
self.unplug()
self.join(timeout=JOIN_TIMEOUT)
self.socket_conn.shutdown(socket.SHUT_RDWR)
self.socket_conn.close()
except Exception, e:
pass
class JDWPHelper():
def __init__(self, jdwp_conn):
self.jdwp_conn = jdwp_conn
self.jdwp_conn.set_breakpoint_handler(self.breakpoint_handler)
self.jdwp_conn.set_class_prepare_handler(self.class_prepare_handler)
self.class_id2name = {}
self.method_id2name = {}
def VirtualMachine_ClassesBySignature(self, signature):
cmd = 0x0102
signature_utf8 = unicode(signature).encode("utf-8")
header_data = struct.pack(">I%ds" % len(signature_utf8), len(signature_utf8), signature_utf8)
return self.jdwp_conn.request(cmd, header_data)
def VirtualMachine_Suspend(self):
cmd = 0x0108
return self.jdwp_conn.request(cmd)
def VirtualMachine_Resume(self):
cmd = 0x0109
return self.jdwp_conn.request(cmd)
def ReferenceType_Signature(self, ref_id):
cmd = 0x0201
header_data = struct.pack(">Q", ref_id)
return self.jdwp_conn.request(cmd, header_data)
def ReferenceType_Methods(self, ref_id):
cmd = 0x0205
header_data = struct.pack(">Q", ref_id)
return self.jdwp_conn.request(cmd, header_data)
def StringReference_Value(self, str_id):
cmd = 0x0a01
header_data = struct.pack(">Q", str_id)
return self.jdwp_conn.request(cmd, header_data)
def EventRequest_Set_METHOD_ENTRY(self, class_pattern):
return self.EventRequest_Set_workload_classmatch(class_pattern, EVENT_METHOD_ENTRY, SUSPEND_NONE)
def EventRequest_Set_METHOD_EXIT_WITH_RETURN_VALUE(self, class_pattern):
return self.EventRequest_Set_workload_classmatch(class_pattern, EVENT_METHOD_EXIT_WITH_RETURN_VALUE, SUSPEND_NONE)
def EventRequest_Set_CLASS_PREPARE(self, class_pattern):
return self.EventRequest_Set_workload_classmatch(class_pattern, EVENT_CLASS_PREPARE, SUSPEND_ALL)
def EventRequest_Set_workload_classmatch(self, class_pattern, event_kind, suspend_policy):
cmd = 0x0f01
modifiers = 1
class_pattern_utf8 = unicode(class_pattern).encode("utf-8")
modifier_data = struct.pack(">BI%ds" % len(class_pattern_utf8),
EVENTREQUEST_MODKIND_CLASSMATCH,
len(class_pattern_utf8), class_pattern_utf8)
header_data = struct.pack(">BBI", event_kind, suspend_policy, modifiers)
ret = self.jdwp_conn.request(cmd, header_data + modifier_data)
# return requestID's
return event_kind, struct.unpack(">I", ret[2])[0],
def EventRequest_Clear(self, event_kind, request_id):
cmd = 0x0f02
header_data = struct.pack(">BI", int(event_kind), int(request_id))
return self.jdwp_conn.request(cmd, header_data)
def parse_return_value(self, return_value):
basic_parser = {
"Z": lambda x: ("boolean", struct.unpack(">?", x)[0]),
"B": lambda x: ("byte", chr(struct.unpack(">B", x)[0])),
"C": lambda x: ("char", x.encode("utf8", "ignore")),
"S": lambda x: ("short", struct.unpack(">h", x)[0]),
"I": lambda x: ("int", struct.unpack(">i", x)[0]),
"J": lambda x: ("long", struct.unpack(">q", x)[0]),
"F": lambda x: ("float", struct.unpack(">f", x)[0]),
"D": lambda x: ("double", struct.unpack(">d", x)[0]),
"[": lambda x: ("array", struct.unpack(">Q", x)[0]),
"L": lambda x: ("object", struct.unpack(">Q", x)[0]),
"s": lambda x: ("string", struct.unpack(">Q", x)[0]),
"t": lambda x: ("thread", struct.unpack(">Q", x)[0]),
"g": lambda x: ("thread_group", struct.unpack(">Q", x)[0]),
"l": lambda x: ("class_loader", struct.unpack(">Q", x)[0]),
"c": lambda x: ("class_object", struct.unpack(">Q", x)[0]),
"V": lambda x: ("void", None)
}
if return_value[0] not in basic_parser:
return "unknown", return_value
else:
ret_type, ret_data = basic_parser[return_value[0]](return_value[1:])
if ret_type == "string":
ident, code, str_data = self.StringReference_Value(ret_data)
if not code:
str_len = struct.unpack(">I", str_data[:4])[0]
ret_data = struct.unpack(">%ds" % str_len, str_data[4:])[0].decode("utf8", "ignore").encode("utf8")
else: # string finding error, return null object
ret_type = "object"
ret_data = 0
return ret_type, ret_data
def update_class_method_info_by_class_names(self, class_name_list):
"""
self.class_id2sig = {
"classId": "className"
}
self.method_id2name = {
"classId": {
"methodId": {
"name": "methodName"
"signature": "methodSignature"
}
}
}
"""
new_class_id2name = {}
# get class id by class name
for class_name in class_name_list:
class_sig = "L%s;" % class_name.replace(".", "/")
ident, code, data = self.VirtualMachine_ClassesBySignature(class_sig)
classes = struct.unpack(">I", data[:4])[0]
for i in range(classes):
ref_type_tag = struct.unpack(">B", data[4:5])[0]
type_id = struct.unpack(">Q", data[5:5 + self.jdwp_conn.referenceTypeIDSize])[0]
if not type_id in self.class_id2name:
new_class_id2name[type_id] = class_name
self.class_id2name[type_id] = class_name
# for each class get its method id and method name
for class_id, class_name in new_class_id2name.iteritems():
self.method_id2name[class_id] = {}
ident, code, data = self.ReferenceType_Methods(class_id)
declared = struct.unpack(">I", data[:4])[0]
declared_offset = 4
for i in range(declared):
method_id = struct.unpack(">Q", data[declared_offset:declared_offset + self.jdwp_conn.methodIDSize])[0]
declared_offset += self.jdwp_conn.methodIDSize
name_len = struct.unpack(">I", data[declared_offset:declared_offset + 4])[0]
declared_offset += 4
name = struct.unpack(">%ds" % name_len, data[declared_offset:declared_offset + name_len])[0]
declared_offset += name_len
signature_len = struct.unpack(">I", data[declared_offset:declared_offset + 4])[0]
declared_offset += 4
signature = struct.unpack(">%ds" % signature_len, data[declared_offset:declared_offset + signature_len])[0]
# add mod bits as well
declared_offset += signature_len + 4
self.method_id2name[class_id][method_id] = {
"name": name,
"signature": signature
}
def parse_cmd_packets(self, cmd_packets):
ret_list = []
class_id_set = set()
for cmd_packet in cmd_packets:
ident, code, data = cmd_packet
parsed_header = struct.unpack(">BIBIQBQQQ", data[:LEN_METHOD_ENTRY_AND_EXIT_WITH_RETURN_VALUE_HEADER])
parsed_packet = {
#"id": ident,
#"command": code,
#"suspendPolicy": parsed_header[0],
#"events": parsed_header[1],
"eventKind": parsed_header[2],
#"requestID": parsed_header[3],
"thread": parsed_header[4],
#"typeTag": parsed_header[5],
"classID": parsed_header[6],
"methodID": parsed_header[7],
"methodLocation": parsed_header[8],
}
if parsed_packet["eventKind"] == EVENT_METHOD_EXIT_WITH_RETURN_VALUE:
ret_data = self.parse_return_value(data[LEN_METHOD_ENTRY_AND_EXIT_WITH_RETURN_VALUE_HEADER:])
parsed_packet["returnType"] = ret_data[0]
parsed_packet["returnValue"] = ret_data[1]
ret_list.append(parsed_packet)
class_id_set.add(parsed_header[6])
for parsed_packet in ret_list:
class_id = parsed_packet.pop("classID")
method_id = parsed_packet.pop("methodID")
class_name = self.class_id2name[class_id]
method_info = self.method_id2name[class_id][method_id]
parsed_packet["classMethodName"] = ".".join([class_name, method_info["name"]])
parsed_packet["signature"] = method_info["signature"]
return ret_list
def breakpoint_handler(self, data):
pass
def class_prepare_handler(self, data):
data_idx = 0
suspend_policy, events, event_kind, request_id, thread, ref_type_tag, type_id = struct.unpack(">BIBIQBQ", data[data_idx:LEN_CLASS_PREPARE_HEADER])
data_idx = LEN_CLASS_PREPARE_HEADER
signature_len = struct.unpack(">I", data[data_idx:data_idx + 4])[0]
data_idx += 4
signature = struct.unpack(">%ds" % signature_len, data[data_idx:data_idx + signature_len])[0]
class_name = signature[len("L"):-len(";")].replace("/", ".")
self.update_class_method_info_by_class_names([class_name])
self.VirtualMachine_Resume()
|
yzygitzh/ReDroid
|
dsm_patcher/scripts/jdwp.py
|
Python
|
mit
| 17,910
|
#!/bin/env python
# -*- coding: utf-8 -*-
'Django package for modernizr:' \
' JavaScript library that detects HTML5' \
' and CSS3 features in the user`s browser'
from setuptools import setup
setup(
name='django-modernizr',
version='2.8.3',
url='http://modernizr.com',
description=globals()['__doc__'],
author='Faruk Ates, Paul Irish, Alex Sexton',
maintainer='Renat Galimov',
maintainer_email='renat2017@gmail.com',
license='MIT License',
keywords=['django', 'modernizr'],
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=['django_modernizr'],
package_data={'django_modernizr': ['static/django_modernizr/js/*.js']}
)
|
ITrex/django-modernizr
|
setup.py
|
Python
|
mit
| 1,043
|
from django.core.urlresolvers import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from django.contrib.auth.models import User
from tracking.models import Organization, Clinic, ClinicUser, \
ReferringEntity, ReferringReportSetting, ClinicReportSetting
class LoginBaseTest(APITestCase):
''' a base class for rest testcases which need login '''
def setUp(self):
'''initial default user to be login in '''
self.default_pass = 'pass1234'
self.user = User.objects.create_superuser(username='user1',
email='user1@email.com',
password=self.default_pass)
self.clinic = Clinic.objects.create(clinic_name="clinic1")
self.clinic_user = ClinicUser.objects.create(
clinic=self.clinic,
user=self.user)
def _login(self):
''' do login on client '''
return self.client.login(username=self.user.username,
password=self.default_pass)
class OrganizationTest(LoginBaseTest):
''' testcases class for Organization Rest api '''
def test_add(self):
''' add api test '''
self._login()
url = reverse('rest_api:organization-list')
data = {'org_name': 'org1', 'clinic': self.clinic.id}
self.assertEqual(Organization.objects.count(), 0)
response = self.client.post(url, data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(Organization.objects.count(), 1)
self.assertEqual(Organization.objects.get().org_name, 'org1')
def test_add_not_authorized(self):
''' call add api while not authorized '''
url = reverse('rest_api:organization-list')
data = {'org_name': 'org1', 'clinic': self.clinic.id}
self.assertEqual(Organization.objects.count(), 0)
response = self.client.post(url, data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(Organization.objects.count(), 0)
def test_get(self):
''' get api test '''
self._login()
org1 = Organization.objects.create(org_name='org1', clinic=self.clinic)
url = reverse('rest_api:organization-detail', args=(org1.id,))
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['org_name'], 'org1')
def test_get_not_authorized(self):
''' call get api while not authorized '''
org1 = Organization.objects.create(org_name='org1', clinic=self.clinic)
url = reverse('rest_api:organization-detail', args=(org1.id,))
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_update(self):
''' update api test '''
self._login()
org1 = Organization.objects.create(org_name='org1', clinic=self.clinic)
url = reverse('rest_api:organization-detail', args=(org1.id,))
data = {'org_name': 'org2'}
response = self.client.patch(url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(Organization.objects.count(), 1)
org1 = Organization.objects.get(id=org1.id)
self.assertEqual(org1.org_name, 'org2')
def test_update_not_authorized(self):
''' call update api while not authorized '''
org1 = Organization.objects.create(org_name='org1', clinic=self.clinic)
url = reverse('rest_api:organization-detail', args=(org1.id,))
data = {'org_name': 'org2'}
response = self.client.patch(url, data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_list(self):
''' list api test '''
orgs = [Organization.objects.create(org_name='org{0}'.format(i),
clinic=self.clinic)
for i in range(5)]
self._login()
url = reverse('rest_api:organization-list')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
results = response.data['results']
self.assertEqual(len(results), len(orgs))
def test_list_not_authorized(self):
''' call list api while not authorized '''
url = reverse('rest_api:organization-list')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class ReferringReportSettingTest(LoginBaseTest):
''' testcases class for Organization Rest api '''
def test_add(self):
''' add api test '''
self._login()
url = reverse('rest_api:referringreportsetting-list')
organization = Organization.objects.create(
org_name='org1',
clinic_id=self.clinic.id)
referring_entity = ReferringEntity.objects.create(
entity_name='phys1', organization_id=organization.id)
data = {'enabled': True,
'period': ReferringReportSetting.PERIOD_DAILY,
'report_name': 'thankyou',
'referring_entity': referring_entity.id}
self.assertEqual(ReferringReportSetting.objects.count(), 0)
response = self.client.post(url, data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(ReferringReportSetting.objects.count(), 1)
rs = ReferringReportSetting.objects.get()
self.assertEqual(rs.enabled, True)
self.assertEqual(rs.period, ReferringReportSetting.PERIOD_DAILY)
self.assertEqual(rs.report_name, 'thankyou')
self.assertEqual(rs.referring_entity, referring_entity)
def test_add_invalid(self):
''' add api test '''
self._login()
url = reverse('rest_api:referringreportsetting-list')
organization = Organization.objects.create(
org_name='org1',
clinic_id=self.clinic.id)
referring_entity = ReferringEntity.objects.create(
entity_name='phys1', organization_id=organization.id)
data = {'enabled': True,
'period': 'Invalid',
'report_name': 'thankyou',
'referring_entity': referring_entity.id}
self.assertEqual(ReferringReportSetting.objects.count(), 0)
response = self.client.post(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_add_bulk(self):
''' add api test '''
self._login()
url = reverse('rest_api:referringreportsetting-list')
organization = Organization.objects.create(
org_name='org1',
clinic_id=self.clinic.id)
ref1 = ReferringEntity.objects.create(
entity_name='phys1', organization_id=organization.id,
clinic_id=self.clinic.id)
ref2 = ReferringEntity.objects.create(
entity_name='phys2', organization_id=organization.id,
clinic_id=self.clinic.id)
data = {'enabled': True,
'period': ReferringReportSetting.PERIOD_DAILY,
'report_name': 'thankyou',
'referring_entity': '*',
'bulk': True}
self.assertEqual(ReferringReportSetting.objects.count(), 0)
response = self.client.post(url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(ReferringReportSetting.objects.count(), 2)
rs = ReferringReportSetting.objects.all()
self.assertSetEqual({r.referring_entity.id for r in rs},
{ref1.id, ref2.id})
class ClinicReportSettingTest(LoginBaseTest):
''' testcases class for Organization Rest api '''
def test_add(self):
''' add api test '''
self._login()
url = reverse('rest_api:clinicreportsetting-list')
data = {'enabled': True,
'period': ClinicReportSetting.PERIOD_DAILY,
'report_name': 'visit_history',
'clinic_user': self.clinic_user.id}
self.assertEqual(ClinicReportSetting.objects.count(), 0)
response = self.client.post(url, data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(ClinicReportSetting.objects.count(), 1)
rs = ClinicReportSetting.objects.get()
self.assertEqual(rs.enabled, True)
self.assertEqual(rs.period, ClinicReportSetting.PERIOD_DAILY)
self.assertEqual(rs.report_name, 'visit_history')
self.assertEqual(rs.clinic_user, self.clinic_user)
def test_add_invalid(self):
''' add api test '''
self._login()
url = reverse('rest_api:clinicreportsetting-list')
data = {'enabled': True,
'period': 'Invalid',
'report_name': 'visit_history',
'clinic_user': self.clinic_user.id}
self.assertEqual(ClinicReportSetting.objects.count(), 0)
response = self.client.post(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_add_bulk(self):
''' add api test '''
self._login()
url = reverse('rest_api:clinicreportsetting-list')
user2 = User.objects.create_user(username='user2',
email='user1@email.com',
password=self.default_pass)
clinic_user2 = ClinicUser.objects.create(
clinic=self.clinic,
user=user2)
data = {'enabled': True,
'period': ClinicReportSetting.PERIOD_DAILY,
'report_name': 'visit_history',
'clinic_user': '*',
'bulk': True}
self.assertEqual(ClinicReportSetting.objects.count(), 0)
response = self.client.post(url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(ClinicReportSetting.objects.count(), 2)
rs = ClinicReportSetting.objects.all()
self.assertSetEqual({r.clinic_user.id for r in rs},
{self.clinic_user.id, clinic_user2.id})
|
Heteroskedastic/Dr-referral-tracker
|
tracking/tests/test_rest_api.py
|
Python
|
mit
| 10,291
|
#!/usr/bin/env python
from __future__ import print_function
import subprocess
import shlex
import os
import sys
from setuptools import setup, Command
pypy = False
if 'pypy' in sys.version.lower():
pypy = True
about = {}
with open('__about__.py') as f:
exec(f.read(), about)
class Test(Command):
''' Test application with the following:
pep8 conformance (style)
pyflakes validation (static analysis)
no print statements (breaks wsgi)
nosetests (code tests) [--with-integration] [--run-failed]
'''
description = 'Test {0} source code'.format(about['__title__'])
user_options = [('run-failed', None,
'Run only the previously failed tests.'),
('nose-only', None, 'Run only the nose tests.')]
boolean_options = ['run-failed', 'nose-only']
_files = ['__about__.py', 'wsgisubdomain.py']
_test_requirements = ['flake8', 'nose', 'disabledoc', 'coverage']
@property
def files(self):
return ' '.join(self._files)
def initialize_options(self):
self.run_failed = False
# Disable the flake8 tests in pypy due to bug in pep8 module
self.nose_only = pypy
self.with_integration = False
self.flake8 = 'flake8 {0} tests/'.format(self.files)
def finalize_options(self):
pass
def _no_print_statements(self):
cmd = 'grep -rnw print {0}'.format(self.files)
p = subprocess.Popen(shlex.split(cmd), close_fds=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
err = p.stderr.read().strip()
if err:
msg = 'ERROR: stderr not empty for print statement grep: {0}'
print(msg.format(err))
raise SystemExit(-1)
output = p.stdout.read().strip()
if output:
print('ERROR: Found print statements in source code:')
print(output)
raise SystemExit(-1)
def _get_py_files(self, basepath, subpath=''):
files = []
badchars = ['.', '_', '~']
path = os.path.join(basepath, subpath)
for f in os.listdir(path):
if (not f.endswith('.py') or
any(map(lambda c: f.startswith(c), badchars))):
continue
files.append(os.path.join(subpath, f))
return files
def _get_nose_command(self):
nosecmd = ('nosetests -v -w tests/ --all-modules '
'--with-coverage --disable-docstring '
'--cover-erase --cover-min-percentage=100')
if self.run_failed:
nosecmd += ' --failed'
nose = ' '.join(shlex.split(nosecmd))
return nose
def _check_module(self, module):
cmd = '/usr/bin/env python -c "import {0}"'.format(module)
try:
subprocess.check_call(shlex.split(cmd))
except subprocess.CalledProcessError:
msg = 'Python package "{0}" is required to run the tests'
print(msg.format(module))
raise SystemExit(-1)
def _check_test_packages(self):
for m in self._test_requirements:
self._check_module(m)
def run(self):
print('Checking test packages installed...')
self._check_test_packages()
cmds = [self._get_nose_command()]
if not self.nose_only:
print('Checking no print statements in code...')
self._no_print_statements()
cmds = [self.flake8] + cmds
cmds = filter(bool, cmds)
if not cmds:
print('No action taken.')
SystemExit(-2)
for cmd in cmds:
print('Executing command: {0}'.format(cmd))
c = shlex.split(cmd)
try:
subprocess.check_call(c)
except subprocess.CalledProcessError:
print('Command failed: {0}'.format(cmd))
raise SystemExit(-1)
raise SystemExit(0)
setup(name=about['__title__'],
version=about['__version__'],
description=about['__description__'],
long_description=open('README.md').read(),
author='Steve Leonard',
author_email='sleonard76@gmail.com',
url='https://github.com/xsleonard/wsgisubdomain',
platforms='any',
py_modules=['__about__', 'wsgisubdomain'],
install_requires=[],
cmdclass=dict(test=Test),
license='MIT',
keywords='wsgi subdomain',
classifiers=(
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: PyPy',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
))
|
xsleonard/wsgisubdomain
|
setup.py
|
Python
|
mit
| 5,133
|
import os
from setuptools import setup, find_packages
f = open(os.path.join(os.path.dirname(__file__), 'README.md'))
readme = f.read()
f.close()
test_requires = []
name='ENML2HTML'
setup(
name=name,
version='0.0.1',
description='This is a python library for converting ENML (Evernote Markup Language, http://dev.evernote.com/start/core/enml.php) to/from HTML.',
long_description=readme,
author='Carl Lee',
author_email='ljbha007@gmail.com',
url='https://github.com/CarlLee/ENML_PY/tree/master',
packages=find_packages(),
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
test_suite='test',
)
|
CarlLee/ENML_PY
|
setup.py
|
Python
|
mit
| 842
|
"""
Created on 2012-12-28
@author: Administrator
"""
import urllib.request
req = urllib.request.Request('http://www.python.org/fish.html')
try:
urllib.request.urlopen(req)
except urllib.error.HTTPError as e:
print(e.code)
print(e.read())
response = urllib.request.urlopen('http://python.org/')
html = response.read()
|
quchunguang/test
|
testpy3/testurllib.py
|
Python
|
mit
| 333
|
import json
import tensorflow as tf
from data import datasets
from patchy import PatchySan
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string('config', None,
"""Path to the configuration json file of the
dataset.""")
def dataset(config):
"""Reads and initializes a dataset specified by a passed configuration.
Args:
config: Configuration object.
Returns:
A dataset.
"""
if config['name'] in datasets:
return datasets[config['name']].create(config)
elif config['name'] == 'patchy_san':
return PatchySan.create(config)
else:
raise ValueError('Dataset not found.')
def main(argv=None):
"""Runs the script."""
if not tf.gfile.Exists(FLAGS.config):
raise ValueError('{} does not exist.'.format(FLAGS.config))
with open(FLAGS.config, 'r') as f:
config = json.load(f)
dataset(config)
if __name__ == '__main__':
tf.app.run()
|
rusty1s/graph-based-image-classification
|
dataset.py
|
Python
|
mit
| 995
|
from sys import argv
script, user_name = argv
prompt = '> '
print ("Hi %s, I'm the %s script." % (user_name, script))
print ("I'd like to ask you a few questions.")
print ("Do you like me %s?" % user_name)
likes = input(prompt)
print ("Where do you live %s?" % user_name)
lives = input(prompt)
print ("What kind of computer do you have?")
computer = input(prompt)
print ("""
Alright, so you said %r about liking me.
You live in %r. Not sure where that is.
And you have a %r computer. Nice.
""" % (likes, lives, computer))
|
Paul-Haley/LPTHW_python3
|
ex14.py
|
Python
|
mit
| 527
|
import abc
class Writer(object):
"""
Common interface for all the writer operators.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def write_iterable(self, events_iterable):
"""
Consumes the members of the iterable, writing them one by one.
"""
raise NotImplementedError()
@abc.abstractmethod
def write_event(self, event):
raise NotImplementedError()
@abc.abstractmethod
def write_combo(self, events):
"""
Writes all the events simultaniously. Useful for example for ctrl+something
combinations.
"""
raise NotImplementedError()
def close(self):
pass
|
GMadorell/panoptes
|
src/operators/writers/writer.py
|
Python
|
mit
| 694
|
#coding:utf-8
import os
pyList = ["excel_to_csv.py",
"csv_to_csharp.py"
]
for py in pyList:
os.system(py)
print("all_do ok.")
input()
|
tjandy/work
|
excelToCode/all_do.py
|
Python
|
mit
| 163
|
import unittest
import json
import os
import shutil
import mock
from sqlalchemy import *
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from latte.TimeTracker import TimeTracker
from latte.Config import Config
from latte.Log import Log
from latte.Base import Base
class testTimeTracker(unittest.TestCase):
""" A test class for the TimeTracker class """
sleepTime = 0
timetracker = None
def setUp(self):
""" Set up data used in tests """
self.config = mock.Mock()
def get(*args, **kwargs):
if args[0] == 'app_path':
return 'tests/latte/'
elif args[0] == 'sleep_time':
return 5
elif args[0] == 'stats_path':
return 'stats/'
elif args[0] == 'ignore_keywords':
return ['ignore', 'keyw']
self.config.get = get
self.config.getint = get
engine = create_engine('sqlite:///:memory:')
self.session = sessionmaker(bind=engine)
Base.metadata.create_all(engine)
self.timetracker = TimeTracker(self.config, session=self.session())
def tearDown(self):
self.session().rollback()
def testGettingEmptyLog(self):
""" Tests if getWindowTime with empty log returns None """
self.assertEqual(self.timetracker.get_window_time(u'Bogus'), None)
def testAddTimeToNonExistingWindows(self):
""" Test adding time to non existing window titles """
window = u'Non existing window 1'
window_class = u'New class'
window_instance = u'New instance'
self.timetracker.log(window, window_class, window_instance)
self.assertEqual(self.timetracker.get_window_time(window), self.config.get('sleep_time'))
def testAddTimeToExistingWindows(self):
window = u'Testing Window 1'
window_class = u'Window class 1'
window_instance = u'Instance 1'
self.timetracker.log(window, window_class, window_instance)
self.timetracker.log(window, window_class, window_instance)
self.assertEqual(self.timetracker.get_window_time(window), self.config.get('sleep_time') * 2)
def testGetWindowStats(self):
window = u'Some window'
window_class = u'Some class'
window_instance = u'Some instance'
self.timetracker.log(window, window_class, window_instance)
data = self.timetracker.get_window_stats(window)
self.assertIs(type(self.timetracker.get_window_stats(window)), Log)
def testContainsIgnoredKeywords(self):
window = u'Some string with ignored keywords'
self.assertTrue(self.timetracker.contains_ignored_keywords(window))
window2 = u'Doesn\'t contain bad words'
self.assertFalse(self.timetracker.contains_ignored_keywords(window2))
def testAddLogWithIgnoredKeywords(self):
window = u'Some string with ignored keywords'
window_class = u'Window class'
window_instance = u'Window instance'
self.assertFalse(self.timetracker.log(window, window_class, window_instance))
|
Vesuvium/Latte
|
tests/timetracker_test.py
|
Python
|
mit
| 3,131
|
# -*- coding: utf-8 -*-
"""
SQLpie License (MIT License)
Copyright (c) 2011-2016 André Lessa, http://sqlpie.com
See LICENSE file.
"""
from flask import g
import sqlpie
class Model(object):
__tablename = "models"
class record(object):
def __init__(self, p):
self.id, self.model, self.subject_bucket, self.predicate, self.model_id = p[0], p[1], p[2], p[3], p[4]
self.bucket_id, self.predicate_id, self.last_observation = p[5], p[6], p[7]
def create(self, model, subject_bucket, predicate, model_id, subject_bucket_id, predicate_id, last_observation=971920500):
sql = "INSERT INTO " + self.__tablename + " (model, bucket, predicate, "
sql += "model_id, bucket_id, predicate_id, last_observation) "
sql += "VALUES (%s, %s, %s, UNHEX(%s), UNHEX(%s), UNHEX(%s), FROM_UNIXTIME(%s)) "
sql += " ON DUPLICATE KEY UPDATE last_observation=FROM_UNIXTIME(%s)"
g.cursor.execute(sql, (model, subject_bucket, predicate, model_id, subject_bucket_id, predicate_id, last_observation, last_observation))
if sqlpie.Util.is_debug():
print g.cursor._executed
def set_last_observation(self, model_id, last_observation):
sql = "UPDATE " + self.__tablename + " SET last_observation = %s WHERE model_id = UNHEX(%s)"
g.cursor.execute(sql, (last_observation, model_id))
if sqlpie.Util.is_debug():
print g.cursor._executed
def get(self, model_id):
sql = "SELECT id, model, bucket, predicate, HEX(model_id), HEX(bucket_id), HEX(predicate_id), last_observation FROM "
sql += self.__tablename + " WHERE model_id = UNHEX(%s)"
g.cursor.execute(sql, (model_id,))
if sqlpie.Util.is_debug():
print g.cursor._executed
db_record = g.cursor.fetchone()
response = None
if db_record:
response = Model.record(db_record)
return response
@staticmethod
def reset():
sql = "TRUNCATE " + Model.__tablename
g.cursor.execute(sql)
|
lessaworld/SQLpie
|
sqlpie/models/model.py
|
Python
|
mit
| 2,049
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
##===-----------------------------------------------------------------------------*- Python -*-===##
## _____ _
## / ____| (_)
## | (___ ___ __ _ _ _ ___ _ __ _
## \___ \ / _ \/ _` | | | |/ _ \| |/ _` |
## ____) | __/ (_| | |_| | (_) | | (_| |
## |_____/ \___|\__, |\__,_|\___/|_|\__,_| - Game Engine (2016-2017)
## | |
## |_|
##
## This file is distributed under the MIT License (MIT).
## See LICENSE.txt for details.
##
##===------------------------------------------------------------------------------------------===##
##
## This script updates all the license files.
##
##===------------------------------------------------------------------------------------------===##
license_template = """{0}==={1}-*- {2} -*-==={0}
{0} _____ _
{0} / ____| (_)
{0} | (___ ___ __ _ _ _ ___ _ __ _
{0} \___ \ / _ \/ _` | | | |/ _ \| |/ _` |
{0} ____) | __/ (_| | |_| | (_) | | (_| |
{0} |_____/ \___|\__, |\__,_|\___/|_|\__,_| - Game Engine (2016-2017)
{0} | |
{0} |_|
{0}
{0} This file is distributed under the MIT License (MIT).
{0} See LICENSE.txt for details.
{0}
{0}===------------------------------------------------------------------------------------------==={0}"""
from argparse import ArgumentParser
from sys import exit, stderr, argv
from os import fsencode, listdir, path, walk
import io
def update_license(file, comment, lang):
print("Updating " + file + " ...")
new_data = None
with io.open(file, 'r', newline='\n') as f:
read_data = f.read()
first_line = '{0}==={1}-*- {2} -*-==={0}'.format(comment, (82 - len(lang)) * '-', lang)
last_line = '{0}==={1}==={0}'.format(comment, 90 * '-')
first_idx = read_data.find(first_line)
last_idx = read_data.find(last_line) + len(last_line)
if first_idx == -1 or last_idx == -1:
return
replacement_str = read_data[first_idx:last_idx]
new_data = read_data.replace(replacement_str,
license_template.format(comment, (82 - len(lang)) * '-', lang))
with io.open(file, 'w', newline='\n') as f:
f.write(new_data)
def main():
parser = ArgumentParser("license-update.py",
description="Update the license of all files.")
parser.add_argument("dirs", help="directories to traverse", metavar='dir', nargs='+')
parser.add_argument("-v", "--verbose", dest="verbose", action='store_true',
help="enable verbose logging")
args = parser.parse_args()
for dir in args.dirs:
for root, sub_folders, files in walk(dir):
for filename in files:
if filename.endswith(".py"):
lang = 'Python'
comment = '##'
elif filename.endswith(".cpp") or filename.endswith(".h") or \
filename.endswith(".inc"):
lang = 'C++'
comment = '//'
elif filename.endswith(".cmake") or filename == "CMakeLists.txt":
lang = 'CMake'
comment = '##'
else:
continue
file = path.join(root, filename)
update_license(file, comment, lang)
if __name__ == '__main__':
main()
|
thfabian/sequoia
|
scripts/update-header.py
|
Python
|
mit
| 3,935
|
"""AoC Day 5
Usage:
day5.py <input>
day5.py (-h | --help)
day5.py --version
Options:
-h --help Show this screen.
--version Show version.
"""
from docopt import docopt
import hashlib
def find_8ch_ordered_pw(input):
pw = ""
count = 0
while len(pw) < 8:
m = hashlib.md5()
m.update(input + str(count))
if m.hexdigest().startswith("00000"):
pw += str(m.hexdigest()[5])
count += 1
print pw
def find_8ch_position_pw(input):
pw = list(" ")
added_chars = 0
count = 0
while added_chars < 8:
m = hashlib.md5()
m.update(input + str(count))
if m.hexdigest().startswith("00000"):
pos = int(m.hexdigest()[5], 16)
c = str(m.hexdigest()[6])
if pos < 8 and pw[pos] == " ":
pw[pos] = c
added_chars += 1
count += 1
print ''.join(pw)
if __name__ == '__main__':
arguments = docopt(__doc__, version='1')
find_8ch_ordered_pw(arguments["<input>"])
find_8ch_position_pw(arguments["<input>"])
|
arink/advent-of-code
|
2016/day5/day5.py
|
Python
|
mit
| 1,100
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-07-21 13:40
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('loans', '0005_auto_20160721_1629'),
]
operations = [
migrations.AlterField(
model_name='interesttype',
name='type',
field=models.CharField(choices=[('simple', 'Simple'), ('compound', 'Compound')], max_length=255),
),
]
|
lubegamark/senkumba
|
loans/migrations/0006_auto_20160721_1640.py
|
Python
|
mit
| 511
|
# Copyright 2000-2010 Michael Hudson-Doyle <micahel@gmail.com>
# Antonio Cuni
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from pyrepl import commands, reader
from pyrepl.reader import Reader
def prefix(wordlist, j = 0):
d = {}
i = j
try:
while 1:
for word in wordlist:
d[word[i]] = 1
if len(d) > 1:
return wordlist[0][j:i]
i += 1
d = {}
except IndexError:
return wordlist[0][j:i]
import re
def stripcolor(s):
return stripcolor.regexp.sub('', s)
stripcolor.regexp = re.compile(r"\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[m|K]")
def real_len(s):
return len(stripcolor(s))
def left_align(s, maxlen):
stripped = stripcolor(s)
if len(stripped) > maxlen:
# too bad, we remove the color
return stripped[:maxlen]
padding = maxlen - len(stripped)
return s + ' '*padding
def build_menu(cons, wordlist, start, use_brackets, sort_in_column):
if use_brackets:
item = "[ %s ]"
padding = 4
else:
item = "%s "
padding = 2
maxlen = min(max(map(real_len, wordlist)), cons.width - padding)
cols = cons.width // (maxlen + padding)
rows = (len(wordlist) - 1)//cols + 1
if sort_in_column:
# sort_in_column=False (default) sort_in_column=True
# A B C A D G
# D E F B E
# G C F
#
# "fill" the table with empty words, so we always have the same amout
# of rows for each column
missing = cols*rows - len(wordlist)
wordlist = wordlist + ['']*missing
indexes = [(i%cols)*rows + i//cols for i in range(len(wordlist))]
wordlist = [wordlist[i] for i in indexes]
menu = []
i = start
for r in range(rows):
row = []
for col in range(cols):
row.append(item % left_align(wordlist[i], maxlen))
i += 1
if i >= len(wordlist):
break
menu.append( ''.join(row) )
if i >= len(wordlist):
i = 0
break
if r + 5 > cons.height:
menu.append(" %d more... "%(len(wordlist) - i))
break
return menu, i
# this gets somewhat user interface-y, and as a result the logic gets
# very convoluted.
#
# To summarise the summary of the summary:- people are a problem.
# -- The Hitch-Hikers Guide to the Galaxy, Episode 12
#### Desired behaviour of the completions commands.
# the considerations are:
# (1) how many completions are possible
# (2) whether the last command was a completion
# (3) if we can assume that the completer is going to return the same set of
# completions: this is controlled by the ``assume_immutable_completions``
# variable on the reader, which is True by default to match the historical
# behaviour of pyrepl, but e.g. False in the ReadlineAlikeReader to match
# more closely readline's semantics (this is needed e.g. by
# fancycompleter)
#
# if there's no possible completion, beep at the user and point this out.
# this is easy.
#
# if there's only one possible completion, stick it in. if the last thing
# user did was a completion, point out that he isn't getting anywhere, but
# only if the ``assume_immutable_completions`` is True.
#
# now it gets complicated.
#
# for the first press of a completion key:
# if there's a common prefix, stick it in.
# irrespective of whether anything got stuck in, if the word is now
# complete, show the "complete but not unique" message
# if there's no common prefix and if the word is not now complete,
# beep.
# common prefix -> yes no
# word complete \/
# yes "cbnu" "cbnu"
# no - beep
# for the second bang on the completion key
# there will necessarily be no common prefix
# show a menu of the choices.
# for subsequent bangs, rotate the menu around (if there are sufficient
# choices).
class complete(commands.Command):
def do(self):
r = self.reader
stem = r.get_stem()
if r.assume_immutable_completions and \
r.last_command_is(self.__class__):
completions = r.cmpltn_menu_choices
else:
r.cmpltn_menu_choices = completions = \
r.get_completions(stem)
if len(completions) == 0:
r.error("no matches")
elif len(completions) == 1:
if r.assume_immutable_completions and \
len(completions[0]) == len(stem) and \
r.last_command_is(self.__class__):
r.msg = "[ sole completion ]"
r.dirty = 1
r.insert(completions[0][len(stem):])
else:
p = prefix(completions, len(stem))
if p:
r.insert(p)
if r.last_command_is(self.__class__):
if not r.cmpltn_menu_vis:
r.cmpltn_menu_vis = 1
r.cmpltn_menu, r.cmpltn_menu_end = build_menu(
r.console, completions, r.cmpltn_menu_end,
r.use_brackets, r.sort_in_column)
r.dirty = 1
elif stem + p in completions:
r.msg = "[ complete but not unique ]"
r.dirty = 1
else:
r.msg = "[ not unique ]"
r.dirty = 1
class self_insert(commands.self_insert):
def do(self):
commands.self_insert.do(self)
r = self.reader
if r.cmpltn_menu_vis:
stem = r.get_stem()
if len(stem) < 1:
r.cmpltn_reset()
else:
completions = [w for w in r.cmpltn_menu_choices
if w.startswith(stem)]
if completions:
r.cmpltn_menu, r.cmpltn_menu_end = build_menu(
r.console, completions, 0,
r.use_brackets, r.sort_in_column)
else:
r.cmpltn_reset()
class CompletingReader(Reader):
"""Adds completion support
Adds instance variables:
* cmpltn_menu, cmpltn_menu_vis, cmpltn_menu_end, cmpltn_choices:
*
"""
# see the comment for the complete command
assume_immutable_completions = True
use_brackets = True # display completions inside []
sort_in_column = False
def collect_keymap(self):
return super(CompletingReader, self).collect_keymap() + (
(r'\t', 'complete'),)
def __init__(self, console):
super(CompletingReader, self).__init__(console)
self.cmpltn_menu = ["[ menu 1 ]", "[ menu 2 ]"]
self.cmpltn_menu_vis = 0
self.cmpltn_menu_end = 0
for c in [complete, self_insert]:
self.commands[c.__name__] = c
self.commands[c.__name__.replace('_', '-')] = c
def after_command(self, cmd):
super(CompletingReader, self).after_command(cmd)
if not isinstance(cmd, self.commands['complete']) \
and not isinstance(cmd, self.commands['self_insert']):
self.cmpltn_reset()
def calc_screen(self):
screen = super(CompletingReader, self).calc_screen()
if self.cmpltn_menu_vis:
ly = self.lxy[1]
screen[ly:ly] = self.cmpltn_menu
self.screeninfo[ly:ly] = [(0, [])]*len(self.cmpltn_menu)
self.cxy = self.cxy[0], self.cxy[1] + len(self.cmpltn_menu)
return screen
def finish(self):
super(CompletingReader, self).finish()
self.cmpltn_reset()
def cmpltn_reset(self):
self.cmpltn_menu = []
self.cmpltn_menu_vis = 0
self.cmpltn_menu_end = 0
self.cmpltn_menu_choices = []
def get_stem(self):
st = self.syntax_table
SW = reader.SYNTAX_WORD
b = self.buffer
p = self.pos - 1
while p >= 0 and st.get(b[p], SW) == SW:
p -= 1
return ''.join(b[p+1:self.pos])
def get_completions(self, stem):
return []
def test():
class TestReader(CompletingReader):
def get_completions(self, stem):
return [s for l in map(lambda x:x.split(),self.history)
for s in l if s and s.startswith(stem)]
reader = TestReader()
reader.ps1 = "c**> "
reader.ps2 = "c/*> "
reader.ps3 = "c|*> "
reader.ps4 = "c\*> "
while reader.readline():
pass
if __name__=='__main__':
test()
|
timm/timmnix
|
pypy3-v5.5.0-linux64/lib_pypy/pyrepl/completing_reader.py
|
Python
|
mit
| 9,446
|
"""
__graph_MT_pre__Null.py___________________________________________________________
Automatically generated graphical appearance ---> MODIFY DIRECTLY WITH CAUTION
__________________________________________________________________________
"""
import tkFont
from graphEntity import *
from GraphicalForm import *
from ATOM3Constraint import *
class graph_MT_pre__Null(graphEntity):
def __init__(self, x, y, semObject = None):
self.semanticObject = semObject
self.sizeX, self.sizeY = 173, 91
graphEntity.__init__(self, x, y)
self.ChangesAtRunTime = 0
self.constraintList = []
if self.semanticObject: atribs = self.semanticObject.attributesToDraw()
else: atribs = None
self.graphForms = []
self.imageDict = self.getImageDict()
def DrawObject(self, drawing, showGG = 0):
self.dc = drawing
if showGG and self.semanticObject: self.drawGGLabel(drawing)
h = drawing.create_oval(self.translate([209.0, 88.0, 209.0, 88.0]), tags = (self.tag, 'connector'), outline = '', fill = '' )
self.connectors.append( h )
h = drawing.create_rectangle(self.translate([38.0, 38.0, 209.0, 127.0]), tags = self.tag, stipple = '', width = 1, outline = 'black', fill = 'cyan')
self.gf5 = GraphicalForm(drawing, h, "gf5")
self.graphForms.append(self.gf5)
font = tkFont.Font( family='Arial', size=12, weight='normal', slant='roman', underline=0)
h = drawing.create_text(self.translate([107.0, 66.0, 107.0, 12.0])[:2], tags = self.tag, font=font, fill = 'black', anchor = 'center', text = 'MT_pre__Null', width = '0', justify= 'left', stipple='' )
self.gf56 = GraphicalForm(drawing, h, 'gf56', fontObject=font)
self.graphForms.append(self.gf56)
helv12 = tkFont.Font ( family="Helvetica", size=12, weight="bold" )
h = drawing.create_text(self.translate([-3, -3]), font=helv12,
tags = (self.tag, self.semanticObject.getClass()),
fill = "black",
text=self.semanticObject.MT_label__.toString())
self.attr_display["MT_label__"] = h
self.gf_label = GraphicalForm(drawing, h, 'gf_label', fontObject=helv12)
self.graphForms.append(self.gf_label)
def postCondition( self, actionID, * params):
return None
def preCondition( self, actionID, * params):
return None
def getImageDict( self ):
imageDict = dict()
return imageDict
new_class = graph_MT_pre__Null
|
levilucio/SyVOLT
|
UMLRT2Kiltera_MM/graph_MT_pre__Null.py
|
Python
|
mit
| 2,595
|
"""
Name : xmltoJson.py
Author : Jerry M. Reghunadh
Version : 1.0
Comment : XML to Badgerfish JSON converter
Badgerfish convention -> http://badgerfish.ning.com/
"""
import xml.sax
import json
"Class for SAX ContentHandle"
class XMLSAXContentHandler (xml.sax.ContentHandler):
_key = []
data = {}
_currData = {}
_prevData = []
_namespace = {}
def __init__(self,):
xml.sax.ContentHandler.__init__(self)
self._key = []
self.data = {}
self._currData = {}
self._prevData = []
self._namespace = {}
def startElement(self, name, attrs):
if len(self.data) == 0 :
self.data[name] = {}
self._currData = self.data
self._key.append(name)
self._prevData.append(self.data)
else:
if self._key[len(self._key) - 1] not in self._currData.keys():
self._currData[self._key[len(self._key) - 1]] = {}
if name not in self._currData[self._key[len(self._key) - 1]].keys():
self._currData[self._key[len(self._key) - 1]][name] = {}
self._prevData.append(self._currData)
self._currData = self._currData[self._key[len(self._key) - 1]]
if len(self._namespace) > 0:
self._currData[name]["@xmlns"] = self._namespace
self._key.append(name)
if( len(attrs) > 0 ):
for _key in attrs.getNames():
if _key[0:5] == "xmlns":
if "@xmlns" not in self._currData[name].keys():
self._currData[name]["@xmlns"] = {}
if len(_key) == 5:
self._currData[name]["@xmlns"]["$"] = attrs.getValue(_key)
else:
self._currData[name]["@xmlns"][_key[6:len(_key)]] = attrs.getValue(_key)
self._namespace = self._currData[name]["@xmlns"]
else:
self._currData[name]["@"+_key] = attrs.getValue(_key)
def characters(self, content):
if content != " ":
if self._key[len(self._key) - 1] not in self._currData.keys():
self._currData[self._key[len(self._key) - 1]] = {}
if "$" not in self._currData[self._key[len(self._key) - 1]].keys():
self._currData[self._key[len(self._key) - 1]]["$"] = content
else:
if len(self._currData[self._key[len(self._key) - 1]]) == 1:
temp = self._currData[self._key[len(self._key) - 1]]
self._currData[self._key[len(self._key) - 1]] = []
self._currData[self._key[len(self._key) - 1]].append(temp)
temp = {}
temp["$"] = content
self._currData[self._key[len(self._key) - 1]].append(temp)
def endElement(self, name):
self._key.pop()
self._currData = self._prevData.pop()
"Call this function after importing the package."
def to_json(xmlString):
handler = XMLSAXContentHandler()
xml.sax.parseString(xmlString, handler)
return json.dumps(handler.data)
|
jerrymannel/xml_json_converter
|
xmlToJson.py
|
Python
|
mit
| 2,608
|
#!/usr/bin/python
import re
import csv
names = {
"<" : "gal",
")" : "per",
"|" : "bar",
">" : "gar",
"[" : "sel",
"\\" : "bas",
"#" : "hax",
";" : "sem",
"$" : "buc",
"-" : "hep",
"]" : "ser",
"_" : "cab",
"{" : "kel",
"~" : "sig",
"%" : "cen",
"}" : "ker",
"'" : "soq",
":" : "col",
"^" : "ket",
"*" : "tar",
"," : "com",
"+" : "lus",
"`" : "tec",
"\"" : "doq",
"&" : "pam",
"=" : "tis",
"." : "dot",
"@" : "pat",
"?" : "wut",
"/" : "fas",
"(" : "pel",
"!" : "zap"}
runef = open("runelist")
digraphs = []
phonemictexts = {}
symbols = {}
for line in runef:
if len(line) < 3:
continue
digraph = line.strip()
phonemictext = digraph
for graph,text in names.iteritems():
phonemictext = phonemictext.replace(graph,text)
digraphs.append(digraph)
phonemictexts[digraph] = phonemictext
phonemictexts[phonemictexts[digraph]] = digraph
symbols[digraph] = "%" + phonemictext[0] + phonemictext[2:4] + phonemictext[5]
symbols[symbols[digraph]] = digraph
#for i,j,k in zip(digraphs,phonemictexts,symbols):
# print i,k,j
hoonf = open("hoon.hoon")
genemode = False
genes = {}
for line in hoonf:
if not genemode:
if line[0:8] == "++ gene":
genemode = True
continue
if line[0:2] == "++":
break
m = re.match(r'.*\[(%....) (.*)\].*',line)
if not m:
continue
if not m.group(1) in symbols:
continue
genes[symbols[m.group(1)]] = m.group(0).strip()
genes[genes[symbols[m.group(1)]]] = m.group(1)
hoonf.close()
hoonf = open("hoon.hoon")
apmode = False
aps = {}
for line in hoonf:
if not apmode:
if line[0:7] == "++ ap\n":
apmode = True
continue
if line[0:2] == "++":
break
m = re.match(r'.*\[(%....) \*] {1,5}([^ ].*)',line)
if not m:
continue
if not m.group(1) in symbols:
continue
aps[symbols[m.group(1)]] = m.group(0).strip()
aps[aps[symbols[m.group(1)]]] = m.group(1)
# Save information to csv file
csvwriter = csv.writer(open('runes.csv','wb'),delimiter='\t',quoting=csv.QUOTE_MINIMAL,lineterminator='\n')
csvwriter.writerow(['digraph','symbol','name','gene','ap'])
for i in digraphs:
csvwriter.writerow([i,symbols[i],phonemictexts[i], genes[i] if i in genes else '', aps[i] if i in aps else ''])
for i in digraphs:
f = open("runes/" + phonemictexts[i] + ".txt",'w')
f.write(i + " " + symbols[i] + " " + phonemictexts[i] + "\n")
if i in genes:
f.write("\n gene:\n " + genes[i] + "\n")
if i in aps:
f.write("\n ap:\n " + aps[i] + "\n")
|
philipcmonk/hoonrunedocs
|
runegen.py
|
Python
|
mit
| 2,718
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from . import models
class UserAdmin(admin.ModelAdmin):
search_fields = ['username', 'email']
list_display = ['username', 'email']
admin.site.register(models.User, UserAdmin)
|
scailer/picarchive
|
apps/account/admin.py
|
Python
|
mit
| 244
|
# This code is supporting material for the book
# Building Machine Learning Systems with Python
# by Willi Richert and Luis Pedro Coelho
# published by PACKT Publishing
#
# It is made available under the MIT License
import os
import sys
from matplotlib import pylab
import numpy as np
DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "data")
CHART_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "charts")
for d in [DATA_DIR, CHART_DIR]:
if not os.path.exists(d):
os.mkdir(d)
# Put your directory to the different music genres here
GENRE_DIR = None
GENRE_LIST = ["classical", "jazz", "country", "pop", "rock", "metal"]
# Put your directory to the test dir here
TEST_DIR = None
if GENRE_DIR is None or TEST_DIR is None:
print("Please set GENRE_DIR and TEST_DIR in utils.py")
sys.exit(1)
def plot_confusion_matrix(cm, genre_list, name, title):
pylab.clf()
pylab.matshow(cm, fignum=False, cmap='Blues', vmin=0, vmax=1.0)
ax = pylab.axes()
ax.set_xticks(range(len(genre_list)))
ax.set_xticklabels(genre_list)
ax.xaxis.set_ticks_position("bottom")
ax.set_yticks(range(len(genre_list)))
ax.set_yticklabels(genre_list)
pylab.title(title)
pylab.colorbar()
pylab.grid(False)
pylab.show()
pylab.xlabel('Predicted class')
pylab.ylabel('True class')
pylab.grid(False)
pylab.savefig(
os.path.join(CHART_DIR, "confusion_matrix_%s.png" % name), bbox_inches="tight")
def plot_pr(auc_score, name, precision, recall, label=None):
pylab.clf()
pylab.figure(num=None, figsize=(5, 4))
pylab.grid(True)
pylab.fill_between(recall, precision, alpha=0.5)
pylab.plot(recall, precision, lw=1)
pylab.xlim([0.0, 1.0])
pylab.ylim([0.0, 1.0])
pylab.xlabel('Recall')
pylab.ylabel('Precision')
pylab.title('P/R curve (AUC = %0.2f) / %s' % (auc_score, label))
filename = name.replace(" ", "_")
pylab.savefig(
os.path.join(CHART_DIR, "pr_" + filename + ".png"), bbox_inches="tight")
def plot_roc(auc_score, name, tpr, fpr, label=None):
pylab.clf()
pylab.figure(num=None, figsize=(5, 4))
pylab.grid(True)
pylab.plot([0, 1], [0, 1], 'k--')
pylab.plot(fpr, tpr)
pylab.fill_between(fpr, tpr, alpha=0.5)
pylab.xlim([0.0, 1.0])
pylab.ylim([0.0, 1.0])
pylab.xlabel('False Positive Rate')
pylab.ylabel('True Positive Rate')
pylab.title('ROC curve (AUC = %0.2f) / %s' %
(auc_score, label), verticalalignment="bottom")
pylab.legend(loc="lower right")
filename = name.replace(" ", "_")
pylab.savefig(
os.path.join(CHART_DIR, "roc_" + filename + ".png"), bbox_inches="tight")
def show_most_informative_features(vectorizer, clf, n=20):
c_f = sorted(zip(clf.coef_[0], vectorizer.get_feature_names()))
top = zip(c_f[:n], c_f[:-(n + 1):-1])
for (c1, f1), (c2, f2) in top:
print "\t%.4f\t%-15s\t\t%.4f\t%-15s" % (c1, f1, c2, f2)
def plot_log():
pylab.clf()
x = np.arange(0.001, 1, 0.001)
y = np.log(x)
pylab.title('Relationship between probabilities and their logarithm')
pylab.plot(x, y)
pylab.grid(True)
pylab.xlabel('P')
pylab.ylabel('log(P)')
filename = 'log_probs.png'
pylab.savefig(os.path.join(CHART_DIR, filename), bbox_inches="tight")
def plot_feat_importance(feature_names, clf, name):
pylab.clf()
coef_ = clf.coef_
important = np.argsort(np.absolute(coef_.ravel()))
f_imp = feature_names[important]
coef = coef_.ravel()[important]
inds = np.argsort(coef)
f_imp = f_imp[inds]
coef = coef[inds]
xpos = np.array(range(len(coef)))
pylab.bar(xpos, coef, width=1)
pylab.title('Feature importance for %s' % (name))
ax = pylab.gca()
ax.set_xticks(np.arange(len(coef)))
labels = ax.set_xticklabels(f_imp)
for label in labels:
label.set_rotation(90)
filename = name.replace(" ", "_")
pylab.savefig(os.path.join(
CHART_DIR, "feat_imp_%s.png" % filename), bbox_inches="tight")
def plot_feat_hist(data_name_list, filename=None):
pylab.clf()
num_rows = 1 + (len(data_name_list) - 1) / 2
num_cols = 1 if len(data_name_list) == 1 else 2
pylab.figure(figsize=(5 * num_cols, 4 * num_rows))
for i in range(num_rows):
for j in range(num_cols):
pylab.subplot(num_rows, num_cols, 1 + i * num_cols + j)
x, name = data_name_list[i * num_cols + j]
pylab.title(name)
pylab.xlabel('Value')
pylab.ylabel('Density')
# the histogram of the data
max_val = np.max(x)
if max_val <= 1.0:
bins = 50
elif max_val > 50:
bins = 50
else:
bins = max_val
n, bins, patches = pylab.hist(
x, bins=bins, normed=1, facecolor='green', alpha=0.75)
pylab.grid(True)
if not filename:
filename = "feat_hist_%s.png" % name
pylab.savefig(os.path.join(CHART_DIR, filename), bbox_inches="tight")
def plot_bias_variance(data_sizes, train_errors, test_errors, name):
pylab.clf()
pylab.ylim([0.0, 1.0])
pylab.xlabel('Data set size')
pylab.ylabel('Error')
pylab.title("Bias-Variance for '%s'" % name)
pylab.plot(
data_sizes, train_errors, "-", data_sizes, test_errors, "--", lw=1)
pylab.legend(["train error", "test error"], loc="upper right")
pylab.grid(True)
pylab.savefig(os.path.join(CHART_DIR, "bv_" + name + ".png"))
|
krahman/BuildingMachineLearningSystemsWithPython
|
ch09/utils.py
|
Python
|
mit
| 5,569
|
from django.conf.urls import url
from . import views
app_name='blog'
urlpatterns=[
url(r'^$',views.index,name='index'),
url(r'^post/(?P<post_id>[0-9]+)/$', views.view_post, name='view_post'),
]
|
sahilrider/DjangoApps
|
blog/urls.py
|
Python
|
mit
| 207
|
"""
File: beam.py
Purpose: Defines the Beam note construct.
"""
from structure.abstract_note_collective import AbstractNoteCollective
from structure.note import Note
from structure.tuplet import Tuplet
from fractions import Fraction
from timemodel.duration import Duration
class Beam(AbstractNoteCollective):
"""
Beam is a grouping operation, having a set scaling ratio of 1/2, but unbounded aggregate duration.
The basic idea of a beam is that for a stand alone beam, you can only add Note's of duration 1/4 or less.
That duration is retained under the beam.
However when a beam is added to a beam, it takes an additional reduction factor of 1/2.
Note that these factors aggregate multiplicatively through self.contextual_reduction_factor
"""
FACTOR = Fraction(1, 2)
NOTE_QUALIFIER_DURATION = Duration(1, 4)
def __init__(self, abstract_note_list=list()):
"""
Constructor
Args:
abstract_note_list: list of notes, beams, and tuplets to add consecutively under the beam.
"""
AbstractNoteCollective.__init__(self)
self.append(abstract_note_list)
@property
def duration(self):
"""
This is an override of AbstractNoteCollective.duration.
Tuplet and Beam override this to do a simple summation of linearly laid out notes and sub-notes.
The reason is that the layout algorithm of these subclasses cannot use the relative_position
attribute as the algorithm determines that.
"""
d = Duration(0)
for note in self.sub_notes:
d += note.duration
return d
def append(self, notes):
"""
Append a set of abstract notes to the beam
Args:
notes: either a list of notes or a single note to add to the beam.
"""
if isinstance(notes, list):
for n in notes:
self.append(n)
return
elif isinstance(notes, Note) or isinstance(notes, AbstractNoteCollective):
self.add(notes, len(self.sub_notes))
def add(self, note, index):
"""
Beams can only add less than 1/4 notes, and arbitrary beams and tuplets.
Only added beams incur a reduction factor of 1/2
For collective notes, always apply the factor.
"""
if note.parent is not None:
raise Exception('Cannot add note already assigned a parent')
if index < 0 or index > len(self.sub_notes):
raise Exception('add note, index {0} not in range[0, {1}]'.format(index, len(self.sub_notes)))
if isinstance(note, Note):
'''
For standard notation, the following test should be made.
However, the structure is quite general and can support other durations.
For that reason, we opt to take out this check, which could be returned of only standard classic
durations are supported.
if note.base_duration >= Duration(1, 4):
raise Exception(
"Attempt to add note with duration {0} greater than or equal to {1}".
format(note.duration, Beam.NOTE_QUALIFIER_DURATION))
'''
new_factor = self.contextual_reduction_factor
elif isinstance(note, Beam):
new_factor = self.contextual_reduction_factor * Beam.FACTOR
elif isinstance(note, Tuplet):
new_factor = self.contextual_reduction_factor
else:
raise Exception('illegal type {0}'.format(type(note)))
self.sub_notes.insert(index, note)
note.parent = self
note.apply_factor(new_factor)
# The following call will adjust layout from this point right upward
self.upward_forward_reloc_layout(note)
# see if prior note is tied, and if so, break the tie.
first_note = note
if not isinstance(note, Note):
first_note = note.get_first_note()
# If empty beam or tuplet is added, there is nothing to look for in terms of ties.
if first_note is None:
return
prior = first_note.prior_note()
if prior is not None and prior.is_tied_to:
prior.untie()
# notify up the tree of what has changed
self.notes_added([note])
def __str__(self):
base = 'Beam(Dur({0})Off({1})f={2})'.format(self.duration, self.relative_position,
self.contextual_reduction_factor)
s = base + '[' + (']' if len(self.sub_notes) == 0 else '\n')
for n in self.sub_notes:
s += ' ' + str(n) + '\n'
s += ']' if len(self.sub_notes) != 0 else ''
return s
|
dpazel/music_rep
|
structure/beam.py
|
Python
|
mit
| 4,873
|
import unittest
from biokbase.narrative.widgetmanager import WidgetManager
import IPython
import mock
import os
from .util import ConfigTests
from .narrative_mock.mockclients import get_mock_client
"""
Tests for the WidgetManager class
"""
__author__ = "Bill Riehl <wjriehl@lbl.gov>"
class WidgetManagerTestCase(unittest.TestCase):
@classmethod
def setUpClass(self):
config = ConfigTests()
os.environ[
"KB_WORKSPACE_ID"
] = "12345" # That's the same workspace as my luggage!
app_specs_list = config.load_json_file(config.get("specs", "app_specs_file"))
app_specs_dict = dict()
for s in app_specs_list:
app_specs_dict[s["info"]["id"]] = s
self.wm = WidgetManager()
self.good_widget = "kbaseTabTable"
self.bad_widget = "notAWidget"
self.good_tag = "release"
self.bad_tag = "notATag"
self.widget_with_consts = "kbaseContigSetView"
def test_widgetmanager_reload(self):
self.wm.load_widget_info(verbose=True)
def test_widgetmanager_instantiated(self):
self.assertIsInstance(self.wm, WidgetManager)
def test_widget_inputs(self):
self.wm.print_widget_inputs(self.good_widget)
def test_widget_inputs_bad(self):
with self.assertRaises(ValueError):
self.wm.print_widget_inputs(self.bad_widget)
def test_widget_constants(self):
constants = self.wm.get_widget_constants(self.widget_with_consts)
self.assertTrue("ws" in constants)
def test_widget_constants_bad(self):
with self.assertRaises(ValueError):
self.wm.get_widget_constants(self.bad_widget)
def test_show_output_widget(self):
self.assertIsInstance(
self.wm.show_output_widget(
self.good_widget,
{"obj": "TestObject"},
upas={"obj": "1/2/3"},
check_widget=True,
),
IPython.core.display.Javascript,
)
def test_show_output_widget_bad(self):
with self.assertRaises(ValueError):
self.wm.show_output_widget(
self.bad_widget,
{"bad": "inputs"},
upas={"bad": "1/2/3"},
check_widget=True,
)
def test_show_advanced_viewer_widget(self):
title = "Widget Viewer"
cell_id = "abcde"
widget_name = "CustomOutputDemo"
widget_js = self.wm.show_advanced_viewer_widget(
widget_name,
{"param1": "value1", "param2": "value2"},
{"param1": "value1", "param2": "value2"},
title=title,
cell_id=cell_id,
tag="dev",
check_widget=True,
)
self.assertIsInstance(widget_js, IPython.core.display.Javascript)
widget_code = widget_js.data
self.assertIn("widget: '{}'".format(widget_name), widget_code)
self.assertIn('cellId: "{}"'.format(cell_id), widget_code)
self.assertIn("title: '{}'".format(title), widget_code)
def test_show_advanced_viewer_widget_bad(self):
with self.assertRaises(ValueError):
self.wm.show_advanced_viewer_widget(
self.bad_widget, {"bad": "inputs"}, {"bad": "state"}, check_widget=True
)
def test_show_external_widget(self):
widget = self.wm.show_external_widget(
"contigSet", "My ContigSet View", {"objectRef": "6402/3/8"}, {}
)
self.assertIsInstance(widget, IPython.core.display.Javascript)
def test_show_external_widget_list(self):
widget = self.wm.show_external_widget(
["widgets", "0.1.0", "genomeComparison"],
"Genome Comparison Demo",
{"objectRef": "6402/5/2"},
{},
auth_required=True,
)
self.assertIsInstance(widget, IPython.core.display.Javascript)
@mock.patch("biokbase.narrative.widgetmanager.clients.get", get_mock_client)
def test_show_data_cell(self):
"""
Tests - should do the following:
def show_data_widget(self, upa, title=None, cell_id=None, tag="release"):
fail message with no upa
fail message with malformed upa
shouldn't care what title or cell_id are, but should test to make sure they wind up in
output code properly
fail if type spec'd app isn't present for some tag
otherwise, succeed and produce JS code.
test mocks.
"""
js_obj = self.wm.show_data_widget("18836/5/1", "some title", "no_id")
print(js_obj.data)
self.assertIsValidCellCode(
js_obj, {}, "viewer", "kbaseGenomeView", "no_id", "some title"
)
@mock.patch("biokbase.narrative.widgetmanager.clients.get", get_mock_client)
def test_infer_upas(self):
test_result_upa = "18836/5/1"
upas = self.wm.infer_upas(
"testCrazyExample",
{
"obj_id1": 1,
"obj_id2": 2,
"obj_name1": "foo",
"obj_name2": "bar/baz",
"obj_names": ["a", "b", "c"],
"obj_ref1": "1/2/3",
"obj_ref2": "foo/bar",
"obj_refs": ["7/8/9", "0/1/2"],
"ws_name": "some_ws",
"extra_param": "extra_value",
"other_extra_param": 0,
},
)
self.assertEqual(upas["obj_id1"], test_result_upa)
self.assertEqual(upas["obj_id2"], test_result_upa)
self.assertEqual(upas["obj_name1"], test_result_upa)
self.assertEqual(upas["obj_name2"], test_result_upa)
self.assertEqual(upas["obj_names"], [test_result_upa] * 3)
self.assertEqual(upas["obj_ref1"], "1/2/3")
self.assertEqual(upas["obj_ref2"], test_result_upa)
self.assertEqual(upas["obj_refs"], [test_result_upa] * 2)
self.assertEqual(len(upas.keys()), 8)
@mock.patch("biokbase.narrative.widgetmanager.clients.get", get_mock_client)
def test_infer_upas_none(self):
"""
Test infer_upas when no upas are given. Should return an empty dict.
"""
upas = self.wm.infer_upas(
"testCrazyExample",
{"some_param": "some_value", "another_param": "another_value"},
)
self.assertIsInstance(upas, dict)
self.assertFalse(upas)
@mock.patch("biokbase.narrative.widgetmanager.clients.get", get_mock_client)
def test_infer_upas_simple_widget(self):
"""
Test infer_upas against the "default" widget - i.e. params don't matter and UPAs don't matter.
"""
upas = self.wm.infer_upas(
"kbaseDefaultNarrativeOutput",
{
"some_param": "some_value",
"another_param": "another_value",
"obj_ref": "1/2/3",
"ws_name": "some_workspace",
},
)
self.assertIsInstance(upas, dict)
self.assertFalse(upas)
@mock.patch("biokbase.narrative.widgetmanager.clients.get", get_mock_client)
def test_infer_upas_nulls(self):
"""
Test infer_upas when None is passed to it as an object name. Fields with None
as input should not map to an UPA.
"""
test_result_upa = "18836/5/1"
upas = self.wm.infer_upas(
"testCrazyExample",
{
"obj_id1": None,
"obj_id2": None,
"obj_name1": "foo",
"obj_name2": "bar/baz",
"obj_names": ["a", "b", "c"],
"obj_ref1": "1/2/3",
"obj_ref2": "foo/bar",
"obj_refs": ["7/8/9", "0/1/2"],
"ws_name": "some_ws",
"extra_param": "extra_value",
"other_extra_param": 0,
},
)
self.assertIsInstance(upas, dict)
self.assertNotIn("obj_id1", upas)
self.assertNotIn("obj_id2", upas)
self.assertEqual(upas["obj_name1"], test_result_upa)
self.assertEqual(upas["obj_name2"], test_result_upa)
self.assertEqual(upas["obj_names"], [test_result_upa] * 3)
self.assertEqual(upas["obj_ref1"], "1/2/3")
self.assertEqual(upas["obj_ref2"], test_result_upa)
self.assertEqual(upas["obj_refs"], [test_result_upa] * 2)
@mock.patch("biokbase.narrative.widgetmanager.clients.get", get_mock_client)
def test_missing_env_path(self):
backup_dir = os.environ["NARRATIVE_DIR"]
del os.environ["NARRATIVE_DIR"]
test_wm = WidgetManager()
self.assertIsInstance(test_wm.widget_param_map, dict)
self.assertFalse(test_wm.widget_param_map)
os.environ["NARRATIVE_DIR"] = backup_dir
def assertIsValidCellCode(self, js_obj, data, type, widget, cellId, title):
code_lines = js_obj.data.strip().split("\n")
self.assertTrue(
code_lines[0].strip().startswith("element.html(\"<div id='kb-vis")
)
self.assertEqual(
code_lines[1].strip(),
"require(['kbaseNarrativeOutputCell'], function(KBaseNarrativeOutputCell) {",
)
self.assertTrue(
code_lines[2]
.strip()
.startswith(r"var w = new KBaseNarrativeOutputCell($('#kb-vis")
)
if __name__ == "__main__":
unittest.main()
|
kbase/narrative
|
src/biokbase/narrative/tests/test_widgetmanager.py
|
Python
|
mit
| 9,378
|
import math
import unittest
def get_order_price(sub_total):
return math.ceil(sub_total * 1.16 * 100)/100
class GetOrderPriceTest(unittest.TestCase):
def test_applies_16percent_vat_for_1(self):
order_price = get_order_price(1.00)
self.assertEqual(1.16, order_price)
def test_applies_16percent_vat_for_odd(self):
order_price = get_order_price(7.57)
self.assertEqual(8.79, order_price)
def test_applies_16percent_vat_for_10(self):
order_price = get_order_price(10.00)
self.assertEqual(11.60, order_price)
if __name__ == '__main__':
unittest.main()
|
mamachanko/tdd-talk
|
order_price.py
|
Python
|
mit
| 622
|
from evennia import Command as BaseCommand
from evennia import utils
from evennia.commands.default.muxcommand import MuxCommand
from world import rules
from world import english_utils
import time
class Command(BaseCommand):
"""
Inherit from this if you want to create your own command styles
from scratch. Note that Evennia's default commands inherits from
MuxCommand instead.
Note that the class's `__doc__` string (this text) is
used by Evennia to create the automatic help entry for
the command, so make sure to document consistently here.
Each Command implements the following methods, called
in this order (only func() is actually required):
- at_pre_cmd(): If this returns True, execution is aborted.
- parse(): Should perform any extra parsing needed on self.args
and store the result on self.
- func(): Performs the actual work.
- at_post_cmd(): Extra actions, often things done after
every command, like prompts.
"""
def at_post_cmd(self):
"""
This hook is called after the command has finished executing
(after self.func()).
"""
caller = self.caller
if caller.db.health != None:
if (float(caller.db.health) / float(caller.db.max_health)) > 0.80:
prompt_hp_color = "|g"
elif (float(caller.db.health) / float(caller.db.max_health)) > 0.36:
prompt_hp_color = "|y"
else:
prompt_hp_color = "|r"
if caller.db.stamina > 6:
prompt_stamina_color = "|g"
elif caller.db.stamina > 3:
prompt_stamina_color = "|y"
else:
prompt_stamina_color = "|r"
prompt = "%sHealth|n: %s%s|n - |gMagic|n: Asleep - %sStamina|n: %s%s." % (
prompt_hp_color, prompt_hp_color, caller.db.health, prompt_stamina_color, prompt_stamina_color,
caller.db.stamina)
caller.msg(prompt)
@staticmethod
def show_balance(self):
caller = self.caller
# This happens if someone doesn't have balance back yet: the skill gives a message and aborts.
if time.time() < caller.db.balance_time:
if caller.db.balance_time - time.time() > 3:
caller.msg("You need 3 more seconds!")
elif caller.db.balance_time - time.time() > 2:
caller.msg("You need 2 more seconds!")
elif caller.db.balance_time - time.time() > 1:
caller.msg("You need 1 more second!")
elif caller.db.balance_time - time.time() > 0:
caller.msg("You've almost regained balance!")
return True
class CmdDeposit(BaseCommand):
"""
Deposit some silver sovereigns into the bank.
Usage:
deposit <silver>
Hint: The fruit is fake.
"""
key = "deposit"
#aliases = ["lend", "donate"]
locks = "cmd:all()"
arg_regex = r"\s|$"
help_category = "Business"
def parse(self):
"Very trivial parser"
self.target = self.args.strip()
def func(self):
value = self.target
value_s = str(value)
caller = self.caller
if not value:
caller.msg("Deposit how much?")
return
elif not str.isdigit(value_s):
caller.msg("You must specify a number (and only a number) that you wish to deposit.")
# caller.search handles error messages
return
elif caller.db.silver_carried < int(value):
caller.msg("That's more silver than you're carrying!")
return
elif caller.db.silver_carried >= int(value):
string = "You deposit {:,} silver sovereigns into your Tower bank account.".format(int(value))
caller.msg(string)
caller.db.silver_carried = caller.db.silver_carried - int(value)
caller.db.tower_bank_account = caller.db.tower_bank_account + int(value)
return
class CmdWithdraw(BaseCommand):
"""
Withdraw some silver sovereigns from the bank.
Usage:
withdraw <silver>
Hint: The fruit is fake.
"""
key = "withdraw"
#aliases = ["lend", "donate"]
locks = "cmd:all()"
arg_regex = r"\s|$"
help_category = "Business"
def parse(self):
"Very trivial parser"
self.target = self.args.strip()
def func(self):
value = self.target
value_s = str(value)
caller = self.caller
if not value:
caller.msg("Withdraw how much?")
return
elif not str.isdigit(value_s):
caller.msg("You must specify a number (and only a number) that you wish to withdraw.")
# caller.search handles error messages
return
elif caller.db.tower_bank_account < int(value):
caller.msg("That's more silver than you have in your account!")
return
elif caller.db.tower_bank_account >= int(value):
string = "You withdraw {:,} silver sovereigns from your Tower bank account.".format(int(value))
caller.msg(string)
caller.db.silver_carried = caller.db.silver_carried + int(value)
caller.db.tower_bank_account = caller.db.tower_bank_account - int(value)
return
class CmdBalance(BaseCommand):
"""
Check the outstanding balance in your bank account.
"""
key = "balance"
#aliases = ["lend", "donate"]
locks = "cmd:all()"
arg_regex = r"\s|$"
help_category = "Business"
def func(self):
string = "You have {:,} silver sovereigns available for withdrawal from your Tower bank account.".format(self.caller.db.tower_bank_account)
self.caller.msg(string)
class CmdDonate(BaseCommand):
"""
Donate some silver sovereigns to Corinth's city coffers. City offers are primarily used for law enforcement and military purposes.
Usage:
donate <silver>
"""
key = "donate"
#aliases = ["lend", "donate"]
locks = "cmd:all()"
arg_regex = r"\s|$"
help_category = "Business"
def parse(self):
"Very trivial parser"
self.target = self.args.strip()
def func(self):
value = self.target
value_s = str(value)
caller = self.caller
sphere = caller.search("#609", global_search=True)
if not value:
caller.msg("Donate how much to Corinth?")
return
elif not str.isdigit(value_s):
caller.msg("You must specify a number (and only a number) that you wish to donate.")
# caller.search handles error messages
return
elif caller.db.silver_carried < int(value):
caller.msg("That's more silver than you're carrying!")
return
elif caller.db.silver_carried >= int(value):
string = "You deposit {:,} silver sovereigns into Corinth's city coffers.".format(int(value))
caller.msg(string)
caller.db.silver_carried = caller.db.silver_carried - int(value)
sphere.db.coffers = sphere.db.coffers + int(value)
return
# class CmdBuy(BaseCommand):
# """
# Buy an item from a shop.
#
# Usage:
# deposit <silver>
#
# Hint: The fruit is fake.
# """
# key = "buy"
# #aliases = ["lend", "donate"]
# locks = "cmd:all()"
# arg_regex = r"\s|$"
#
# def parse(self):
# "Very trivial parser"
# self.target = self.args.strip()
#
# def func(self):
#
# value = self.target
# value_s = str(value)
#
# caller = self.caller
#
# if not value:
# caller.msg("Deposit how much?")
# return
# elif not str.isdigit(value_s):
# caller.msg("You must specify a number (and only a number) that you wish to deposit.")
# # caller.search handles error messages
# return
# elif caller.db.silver_carried < int(value):
# caller.msg("That's more silver than you're carrying!")
# return
# elif caller.db.silver_carried >= int(value):
# string = "You deposit {:,} silver sovereigns into your Tower bank account.".format(int(value))
# caller.msg(string)
# caller.db.silver_carried = caller.db.silver_carried - int(value)
# caller.db.tower_bank_account = caller.db.tower_bank_account + int(value)
# return
|
whitehorse-io/encarnia
|
Encarnia/commands/business.py
|
Python
|
mit
| 8,790
|
"""
gspread.exceptions
~~~~~~~~~~~~~~~~~~
Exceptions used in gspread.
"""
class GSpreadException(Exception):
"""A base class for gspread's exceptions."""
class SpreadsheetNotFound(GSpreadException):
"""Trying to open non-existent or inaccessible spreadsheet."""
class WorksheetNotFound(GSpreadException):
"""Trying to open non-existent or inaccessible worksheet."""
class CellNotFound(GSpreadException):
"""Cell lookup exception."""
class NoValidUrlKeyFound(GSpreadException):
"""No valid key found in URL."""
class IncorrectCellLabel(GSpreadException):
"""The cell label is incorrect."""
class APIError(GSpreadException):
def __init__(self, response):
super().__init__(self._extract_text(response))
self.response = response
def _extract_text(self, response):
return self._text_from_detail(response) or response.text
def _text_from_detail(self, response):
try:
errors = response.json()
return errors["error"]
except (AttributeError, KeyError, ValueError):
return None
|
burnash/gspread
|
gspread/exceptions.py
|
Python
|
mit
| 1,099
|
# coding: utf-8
import os
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_bootstrap import Bootstrap
from flask_pagedown import PageDown
from wechat_sdk import WechatConf, WechatBasic
from wechat_sdk.exceptions import OfficialAPIError
# 应用初始化
app = Flask(__name__)
app.debug = True
app.config.from_object('config')
db = SQLAlchemy(app)
login_manager = LoginManager(app)
login_manager.session_protection = 'strong'
login_manager.login_view = 'alibaba'
bootstrap = Bootstrap(app)
pagedown = PageDown(app)
# wechat部分初始化
def refresh_access_token():
# 刷新access_token
try:
at = wechat.get_access_token()
app.config['WECHAT_ACCESS_TOKEN'] = at['access_token']
app.config['WECHAT_ACCESS_TOKEN_EXPIRES_AT'] = at['access_token_expires_at']
except OfficialAPIError as e:
print "'errcode:' %s \n errmsg: %s" % (e.errcode, e.errmsg)
conf = WechatConf(
token=app.config['WECHAT_TOKEN'],
appid=app.config['WECHAT_APPID'],
appsecret=app.config['WECHAT_APPSECRET'],
encrypt_mode='compatible',
access_token=app.config['WECHAT_ACCESS_TOKEN'],
access_token_expires_at=app.config['WECHAT_ACCESS_TOKEN_EXPIRES_AT']
)
wechat = WechatBasic(conf=conf)
# refresh_access_token()
from . import views
|
guan080/personal_website
|
app/__init__.py
|
Python
|
mit
| 1,335
|
# from https://ruby-doc.com/docs/ProgrammingRuby/html/tut_modules.html
from inherits_import import MajorScales, PentatonicScales
def majorNum():
pass
def pentaNum():
pass
class FakePentatonicScales():
def pentaNum(self):
if self.numNotes is None:
self.numNotes = 5
return self.numNotes
class ScaleDemo(MajorScales, PentatonicScales):
def __init__(self):
self.numNotes = None
print(self.majorNum())
print(self.pentaNum())
def nothing(self):
pass
class ScaleDemoLimited(MajorScales):
def __init__(self):
self.numNotes = None
print(self.majorNum())
sd = ScaleDemo()
majorNum()
|
scottrogowski/code2flow
|
tests/test_code/py/inherits/inherits.py
|
Python
|
mit
| 688
|
from collections.abc import Mapping
from pathlib import Path
import attr
from configparser import ConfigParser
from .hand import Range
from .constants import Position
@attr.s(slots=True)
class _Situation:
utg = attr.ib()
utg1 = attr.ib()
utg2 = attr.ib()
utg3 = attr.ib()
utg4 = attr.ib()
co = attr.ib()
btn = attr.ib()
sb = attr.ib()
bb = attr.ib()
inaction = attr.ib()
outaction = attr.ib()
comment = attr.ib()
@attr.s(slots=True)
class _Spot:
position = attr.ib()
range = attr.ib()
posindex = attr.ib()
_POSITIONS = {"utg", "utg1", "utg2", "utg3", "utg4", "co", "btn", "sb", "bb"}
class Strategy(Mapping):
def __init__(self, strategy, source="<string>"):
self._config = ConfigParser(default_section="strategy", interpolation=None)
self._config.read_string(strategy, source)
self._situations = dict()
for name in self._config.sections():
# configparser set non-specified values to '', we want default to None
attr_names = [a.name for a in attr.fields(_Situation)]
values = dict.fromkeys(attr_names, None)
for key, val in self._config[name].items():
# filter out fields not implemented, otherwise it would
# cause TypeError for _Situation constructor
if (not val) or (key not in attr_names):
continue
elif key in _POSITIONS:
values[key] = Range(val)
else:
values[key] = val
self._situations[name] = _Situation(**values)
self._tuple = tuple(self._situations.values())
@classmethod
def from_file(cls, filename):
strategy = Path(filename).read_text()
return cls(strategy, source=filename)
def __getattr__(self, name):
# Strategy uses only _Situation._fields, but this way .strategy files are more flexible,
# because can contain extra values without breaking anything
return self._config["strategy"][name]
def __iter__(self):
return iter(self._situations)
def items(self):
return self._situations.items()
def keys(self):
return self._situations.keys()
def get(self, key, default=None):
return self._situations.get(key, default)
def __getitem__(self, key):
if isinstance(key, str):
return self._situations.__getitem__(key)
elif isinstance(key, int):
return self._tuple[key]
raise TypeError("You can lookup by int or str")
def values(self):
return self._situations.values()
def __contains__(self, key):
return self._situations.__contains__(key)
def __len__(self):
return len(self._situations)
def get_first_spot(self, situation=0):
situation = self[situation]
for posindex, position in enumerate(Position):
range = getattr(situation, position.name.lower())
if range:
return _Spot(position, range, posindex)
|
pokerregion/poker
|
poker/strategy.py
|
Python
|
mit
| 3,076
|
# Copyright (c) 2017 Elias Riedel Gårding
# Licensed under the MIT License
import numpy as np
import itertools as it
class Node:
"""A class of nodes for use in decoding algorithms.
The root of the tree is a node Node(code). The children of a node are
created by node.extend().
Instance variables:
∙ ConvolutionalCode code
∙ Node parent (or None)
∙ depth ∊ ℕ
and for all nodes except the root:
∙ input_block ∊ ℤ2^k
∙ codeword ∊ ℤ2^n"""
def __init__(self, code, parent=None, input_block=None):
self.code = code
self.parent = parent
self.depth = 0 if self.is_root() else self.parent.depth + 1
if input_block is not None:
assert not self.is_root()
self.input_block = input_block
# codeword is set by the call to parent.extend()
def is_root(self):
return self.parent is None
def reversed_input_history(self, stop_at=None):
"""The reversed input history back to, but not including, the node
stop_at. If stop_at is None (the default), return the entire input
history."""
node = self
while not (node.is_root() or node is stop_at):
yield node.input_block
node = node.parent
def input_history(self, stop_at=None):
"""The reversed input history back to, but not including, the node
stop_at. If stop_at is None (the default), return the entire input
history."""
return reversed(list(self.reversed_input_history(stop_at)))
def extend(self, possible_input_blocks=None):
"""Creates and yields the children of this node.
if possible_input_blocks is None (the default), assumes that all binary
input vectors of length k are possible."""
# The next codeword is (if the next time is t)
# ct = Gt b1 + G(t-1) b2 + ... + G1 bt.
# Compute everything but the last term (i.e. the above with bt = 0)
# and call it c.
zero = np.zeros([self.code.k, 1], int)
c = self.code.encode_reversed(
it.chain([zero], self.reversed_input_history()))
# Iterate over all possible input blocks
if possible_input_blocks is None:
possible_input_blocks = it.product([0,1], repeat=self.code.k)
for bits in possible_input_blocks:
input_block = np.array([bits]).transpose()
# Create a new node
node = self.__class__(self.code, self, input_block)
# Calculate the expected output at the new node
node.codeword = c ^ (self.code.Gs[0] @ input_block % 2)
yield node
def first_common_ancestor(a, b):
# Let a be the deepest node
if a.depth < b.depth:
a, b = b, a
# Make sure a and b are at the same depth
while a.depth > b.depth:
a = a.parent
# Step upwards until a common ancestor is found
while a is not b:
a = a.parent
b = b.parent
return a
|
eliasrg/SURF2017
|
code/separate/coding/convolutional/node.py
|
Python
|
mit
| 3,065
|
import logging
import os
import shutil
import click
from livereload import Server
from .server import app
logging.basicConfig(format='[%(levelname)s]:%(message)s', level=logging.INFO)
config_text = 'site_name: My Docs\n'
todo_text = """name: todos
description: 待办项
model:
id:
verbose: id
type: int
regex: \d+
title:
verbose: 标题
type: string
content:
verbose: 内容
type: string
category:
verbose: 类别
type: string
tags:
verbose: 标签
type: string
action:
list:
args: [search,limit,offset,ordering]
return: [id,title]
retrieve:
return: [id,title,content,category,tags]
create:
send: [title,content,category,tags]
replace:
send: [id,title,content,category,tags]
destroy:
return: [id]
update:
send: [title,content,category,tags]
"""
@click.group(context_settings={'help_option_names': ['-h', '--help']})
def cli():
"""
XDocs - Documentation drives developing.
"""
@cli.command(name="new")
@click.argument('output_dir')
def new(output_dir):
"""Create a new XDocs project"""
docs_dir = os.path.join(output_dir, 'docs')
config_path = os.path.join(output_dir, 'xdocs.yml')
todo_path = os.path.join(docs_dir, 'Todo.yml')
if os.path.exists(output_dir):
logging.warning('Directory already exists.')
return
logging.info('Creating project directory: %s', output_dir)
os.mkdir(output_dir)
logging.info('Writing config file: %s', config_path)
open(config_path, 'w').write(config_text)
logging.info('Writing initial docs: %s', todo_path)
os.mkdir(docs_dir)
open(todo_path, 'w').write(todo_text)
logging.info('Project %s created!', output_dir)
@cli.command(name="run")
@click.option('-p', '--port',
help='IP address and port to serve documentation locally (default:"localhost:8000")',
metavar='<IP:PORT>')
def run(port):
"""Run the builtin development server"""
script_path = os.path.dirname(os.path.realpath(__file__))
base_path = os.getcwd()
source_path = os.path.join(base_path, 'docs')
config_path = os.path.join(base_path, 'xdocs.yml')
client_path = os.path.join(base_path, 'client')
if not os.path.exists(config_path):
logging.warning('The Directory is not a XDocs project.')
return
# if os.path.exists("client"):
# shutil.rmtree("client")
if not os.path.exists("client"):
logging.info('Coping web pages')
shutil.copytree(script_path + "/client", "client")
with open(client_path + "/resource", 'w') as f:
for file in os.listdir(source_path):
if '.yml' in file:
f.writelines("- " + file + "\n")
if not port:
port = 8888
live_server = Server(app.wsgi)
live_server.watch(source_path)
live_server.serve(restart_delay=0, open_url_delay=True, host="0.0.0.0", port=int(port))
if __name__ == '__main__':
cli()
|
gaojiuli/XDocs
|
xdocs/__main__.py
|
Python
|
mit
| 3,052
|
# -*- coding: utf-8 -*-
import os
from flask import Flask
from .db import db
from .schema_validator import jsonschema
from .sessions import ItsdangerousSessionInterface
class SubdomainDispatcher(object):
"""Dispatch requests to a specific app based on the subdomain.
"""
def __init__(self, domain, config=None, debug=False):
'''
:param domain: The domain the dispatcher is attached to.
:type domain: string
:param config:
:param debug: Force debug
:type debug: boolean
'''
self.config = config
self.factories = {}
self.apps = {}
self.domain = domain
self.debug = debug
def add_app_factory(self, name, factory):
self.factories[name] = factory
def initialize(self):
for name, factory in self.factories.items():
self.apps[name] = factory(self.config)
self.apps[name].debug = self.debug
def by_name(self, name):
return self.apps[name]
def get_application(self, environ):
"""Get the proper application for the given http environment
:param environ: The environment dict
"""
host = environ['HTTP_HOST'].split(':')[0]
parts = host.split('.')
if len(parts) != 3:
return self.apps['public']
if parts[0] == 'admin':
return self.apps['admin']
# Save the parsed subdomain to DOORBOT_ACCOUNT_HOST
environ['DOORBOT_ACCOUNT_HOST'] = parts[0]
# Check for API
path = environ.get('PATH_INFO', '')
if path.startswith('/api'):
# Strip the beginning of the path to match the app routing.
environ['PATH_INFO'] = path.rsplit('/', 1)[1]
return self.apps['api']
return self.apps['dashboard']
def __call__(self, environ, start_response):
app = self.get_application(environ)
return app(environ, start_response)
def create_admin_app(config=None):
app = Flask(__name__)
if config:
app.config.from_pyfile(config)
app.url_map.strict_slashes = False
db.init_app(app)
from .container import container
container.init_app(app)
from .views.admin import (accounts)
app.register_blueprint(accounts)
app.session_interface = ItsdangerousSessionInterface()
return app
def create_dashboard_app(config=None):
app = Flask(__name__)
if config:
app.config.from_pyfile(config)
app.url_map.strict_slashes = False
db.init_app(app)
from .container import container
container.init_app(app)
from .views.dashboard import blueprints
for blueprint in blueprints:
app.register_blueprint(blueprint)
return app
def create_public_app(config=None):
app = Flask(__name__)
if config:
app.config.from_pyfile(config)
app.url_map.strict_slashes = False
db.init_app(app)
from .container import container
container.init_app(app)
from .views.public import (public)
app.register_blueprint(public)
return app
def create_api_app(config=None):
app = Flask(__name__)
if config:
app.config.from_pyfile(config)
app.config['JSONSCHEMA_DIR'] = os.path.abspath('doorbot/views/api/schemas')
app.url_map.strict_slashes = False
jsonschema.init_app(app)
db.init_app(app)
from .container import container
container.init_app(app)
from .views.api import (
account, auth, devices, doors, integrations, notifications, people
)
from .views.api.lib.json_serializer import ApiJsonEncoder
app.json_encoder = ApiJsonEncoder
app.register_blueprint(auth)
app.register_blueprint(account)
app.register_blueprint(devices)
app.register_blueprint(doors)
app.register_blueprint(integrations)
app.register_blueprint(notifications)
app.register_blueprint(people)
return app
|
masom/doorbot-api-python
|
doorbot/factory.py
|
Python
|
mit
| 3,919
|
"""
GLM connected by a sparse network and Gaussian weights.
"""
import numpy as np
SbmWeightedModel = \
{
# Number of neurons (parametric model!)
'N' : 1,
# Parameters of the nonlinearity
'nonlinearity' :
{
'type' : 'explinear'
},
# Parameters of the bias
'bias' :
{
'type' : 'constant',
'mu' : 20.0,
'sigma' : 0.25
},
# Parameters of the background model
'bkgd' :
{
#'type' : 'basis',
'type' : 'no_stimulus',
'D_stim' : 1, # Dimensionality of the stimulus
'dt_max' : 0.3,
'mu' : 0,
'sigma' : 0.5,
'basis' :
{
'type' : 'cosine',
'n_eye' : 0,
'n_cos' : 3,
'a': 1.0/120,
'b': 0.5,
'orth' : False,
'norm' : True
}
},
# Parameters of the impulse responses
'impulse' :
{
'type' : 'normalized',
'dt_max' : 0.2,
'alpha' : 1,
'basis' :
{
'type' : 'cosine',
'n_eye' : 0,
'n_cos' : 5,
#'type' : 'exp',
#'n_eye' : 0,
#'n_exp' : 5,
'a': 1.0/120,
'b': 0.5,
'orth' : False,
'norm' : True
}
},
#'impulse' :
# {
# 'type' : 'exponential',
# 'dt_max' : 0.2,
# 'tau0' : 0.07,
# 'sigma': 0.1
# },
# Parameters of the network
'network' :
{
'weight' :
{
'type' : 'gaussian',
'mu' : 0.0,
'sigma' : 3.0,
'mu_refractory' : -3.0,
'sigma_refractory' : 0.1
},
'graph' :
{
'type' : 'sbm',
'R' : 2,
'b0' : 1,
'b1' : 1,
'alpha0' : np.ones(3)
}
},
}
|
slinderman/theano_pyglm
|
pyglm/models/sbm_weighted_model.py
|
Python
|
mit
| 2,306
|
from .custom_generator import CustomGenerator
__all__ = ['CustomGenerator']
|
maxalbert/tohu
|
tohu/v6/custom_generator/__init__.py
|
Python
|
mit
| 76
|
# -*- coding:utf-8 -*-
# filename:setup
__author__ = 'yibai'
from setuptools import setup, find_packages
setup(
name='yibai-sms-python-sdk',
version='1.0.0',
keywords=('yibai', 'sms', 'sdk'),
description='yibai python sdk',
license='MIT',
install_requires=['requests>=2.9.1'],
author='shomop',
author_email='lanran@shomop.com',
packages=find_packages(),
platforms='any',
)
|
100sms/yibai-python-sdk
|
yibai-sms-python-sdk-1.0.0/setup.py
|
Python
|
mit
| 433
|
# Python 2 Fix
from __future__ import division
import sys
import json
import os.path
import datetime
import storjcore
from flask import make_response, jsonify, request
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from sqlalchemy import desc
from dataserv.run import app, db, cache, manager
from dataserv.Farmer import Farmer
from dataserv.config import logging
logger = logging.getLogger(__name__)
# Helper functions
def secs_to_mins(seconds):
if seconds < 60:
return "{0} second(s)".format(int(seconds))
elif seconds < 3600:
return "{0} minute(s)".format(int(seconds / 60))
else:
return "{0} hour(s)".format(int(seconds / 3600))
def online_farmers():
# maximum number of minutes since the last check in for
# the farmer to be considered an online farmer
online_time = app.config["ONLINE_TIME"]
# find the time object online_time minutes in the past
current_time = datetime.datetime.utcnow()
time_ago = current_time - datetime.timedelta(minutes=online_time)
# give us all farmers that have been around for the past online_time
q = db.session.query(Farmer)
q = q.filter(Farmer.last_seen > time_ago)
q = q.order_by(desc(Farmer.height), Farmer.id)
return q.all()
def disable_caching():
return app.config["DISABLE_CACHING"]
# Routes
@app.route('/')
def index():
return "Hello World."
@app.route('/api/register/<btc_addr>', methods=["GET"])
def register(btc_addr):
logger.info("CALLED /api/register/{0}".format(btc_addr))
return register_with_payout(btc_addr, btc_addr)
@app.route('/api/register/<btc_addr>/<payout_addr>', methods=["GET"])
def register_with_payout(btc_addr, payout_addr):
logger.info("CALLED /api/register/{0}/{1}".format(btc_addr, payout_addr))
error_msg = "Registration Failed: {0}"
try:
user = Farmer(btc_addr)
user.authenticate(dict(request.headers))
user.register(payout_addr)
return make_response(user.to_json(), 200)
except ValueError:
msg = "Invalid Bitcoin address."
logger.warning(msg)
return make_response(error_msg.format(msg), 400)
except LookupError:
msg = "Address already is registered."
logger.warning(msg)
return make_response(error_msg.format(msg), 409)
except storjcore.auth.AuthError:
msg = "Invalid authentication headers."
logger.warning(msg)
return make_response(error_msg.format(msg), 401)
@app.route('/api/ping/<btc_addr>', methods=["GET"])
def ping(btc_addr):
logger.info("CALLED /api/ping/{0}".format(btc_addr))
error_msg = "Ping Failed: {0}"
try:
user = Farmer(btc_addr)
def before_commit(): # lazy authentication
user.authenticate(dict(request.headers))
user.ping(before_commit_callback=before_commit)
return make_response("Ping accepted.", 200)
except ValueError:
msg = "Invalid Bitcoin address."
logger.warning(msg)
return make_response(error_msg.format(msg), 400)
except LookupError:
msg = "Farmer not found."
logger.warning(msg)
return make_response(error_msg.format(msg), 404)
except storjcore.auth.AuthError:
msg = "Invalid authentication headers."
logger.warning(msg)
return make_response(error_msg.format(msg), 401)
@app.route('/api/address', methods=["GET"])
@cache.cached(timeout=app.config["CACHING_TIME"], unless=disable_caching)
def get_address():
logger.info("CALLED /api/address")
return jsonify({"address": app.config["ADDRESS"]})
@app.route('/api/online', methods=["GET"])
@cache.cached(timeout=app.config["CACHING_TIME"], unless=disable_caching)
def online():
"""Display a readable list of online farmers."""
logger.info("CALLED /api/online")
output = ""
current_time = datetime.datetime.utcnow()
text = "{0} | Last Seen: {1} | Height: {2}<br/>"
for farmer in online_farmers():
last_seen = secs_to_mins((current_time - farmer.last_seen).seconds)
output += text.format(farmer.payout_addr, last_seen, farmer.height)
return output
@app.route('/api/online/json', methods=["GET"])
@cache.cached(timeout=app.config["CACHING_TIME"], unless=disable_caching)
def online_json():
"""Display a machine readable list of online farmers."""
logger.info("CALLED /api/online/json")
payload = {
"farmers": [
json.loads(farmer.to_json()) for farmer in online_farmers()
]
}
resp = jsonify(payload)
resp.headers['Access-Control-Allow-Origin'] = '*'
return resp
@app.route('/api/total', methods=["GET"])
@cache.cached(timeout=app.config["CACHING_TIME"], unless=disable_caching)
def total():
logger.info("CALLED /api/total")
# Add up number of shards
all_farmers = online_farmers()
total_shards = sum([farmer.height for farmer in all_farmers])
total_farmers = len(all_farmers)
# BYTE_SIZE / 1 TB
total_size = (total_shards * (app.config["BYTE_SIZE"] / (1024 ** 4)))
# Increment by 1 every TOTAL_UPDATE minutes
epoch = datetime.datetime(1970, 1, 1)
epoch_mins = (datetime.datetime.utcnow() - epoch).total_seconds()/60
id_val = epoch_mins / app.config["TOTAL_UPDATE"]
json_data = {'id': int(id_val),
'total_TB': round(total_size, 2),
'total_farmers': total_farmers}
resp = jsonify(json_data)
resp.headers['Access-Control-Allow-Origin'] = '*'
return resp
@app.route('/api/height/<btc_addr>/<int:height>', methods=["GET"])
def set_height(btc_addr, height):
logger.info("CALLED /api/height/{0}/{1}".format(btc_addr, height))
error_msg = "Set height failed: {0}"
try:
user = Farmer(btc_addr)
user.authenticate(dict(request.headers))
if height <= app.config["HEIGHT_LIMIT"]:
user.set_height(height)
return make_response("Height accepted.", 200)
else:
msg = "Height limit exceeded."
logger.warning(msg)
raise OverflowError(msg)
except OverflowError:
msg = "Height limit exceeded."
logger.warning(msg)
return make_response(msg, 413)
except ValueError:
msg = "Invalid Bitcoin address."
logger.warning(msg)
return make_response(msg, 400)
except LookupError:
msg = "Farmer not found."
logger.warning(msg)
return make_response(msg, 404)
except storjcore.auth.AuthError:
msg = "Invalid authentication headers."
logger.warning(msg)
return make_response(error_msg.format(msg), 401)
if __name__ == '__main__':
app.run(debug=True)
|
littleskunk/dataserv
|
dataserv/app.py
|
Python
|
mit
| 6,697
|
import os
import sys
import datetime as dt
sys.path.insert(0, os.path.abspath(".."))
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinx.ext.autosectionlabel',
'sphinx_rtd_theme',
]
source_suffix = '.rst'
master_doc = 'index'
project = u'Confuse'
copyright = '2012-{}, Adrian Sampson & contributors'.format(
dt.date.today().year
)
version = '1.7'
release = '1.7.0'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
# -- Options for HTML output --------------------------------------------------
html_theme = 'sphinx_rtd_theme'
htmlhelp_basename = 'Confusedoc'
|
sampsyo/confit
|
docs/conf.py
|
Python
|
mit
| 641
|
# -*- coding: UTF-8 -*-
from django.contrib.auth.models import User, Group
from patient.models import Patient
import random
import os
import django
import codecs
from django.utils import timezone
from datetime import timedelta
from measurement.models import Measurement
from threshold_value.models import ThresholdValue
from alarm.models import Alarm
"""
Running this file will create a measurements with a alarm for up to 5000 of the patients in the system.
Up to 10 of the alarms will be untreated.
To run this file:
$ make shell
>>> execfile('create_alarm_test_data.py')
"""
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "api.settings")
django.setup()
class CreateAlarmTestData():
def __init__(self):
self.num_alarms_created = 0
def create_random_measurement(self, patient):
measurement_time = timezone.now()
measurement_type = random.choice(['O', 'P', 'T'])
measurement_value = round(random.uniform(1, 30), 1)
if random.randint(0, 1) and measurement_type != 'O':
measurement_value += 110
measurement = Measurement.objects.create(
patient=patient,
value=measurement_value,
time=measurement_time,
type=measurement_type
)
return measurement
def create_alarm(self, measurement):
alarm = Alarm.objects.create(
measurement=measurement,
is_treated=(self.num_alarms_created > 10),
reason=(measurement.value > 50)
)
self.num_alarms_created += 1
return alarm
def run():
c = CreateAlarmTestData()
print 'Creating alarms and measurements. Watch your database grow.'
patients = list(Patient.objects.all()[:5000])
num_patients = len(patients)
random.shuffle(patients)
i = 0
for patient in patients:
measurement = c.create_random_measurement(patient)
c.create_alarm(measurement)
i += 1
if i % 50 == 0:
print str(float(100 * i) / num_patients) + "% done"
print "Done!"
run()
|
sigurdsa/angelika-api
|
create_alarm_test_data.py
|
Python
|
mit
| 2,056
|
from threading import Thread
import time
def test():
print('----test----')
time.sleep(1)
for i in range(5):
t = Thread(target=test) # 创建线程
t.start()
|
kaideyi/KDYSample
|
kYPython/FluentPython/BasicLearn/MultiProcess/Thread.py
|
Python
|
mit
| 176
|
#!/usr/bin/env python
#------------------------------------------------------------------------------
#
# sensor metadata-extraction profiles - QuickBird products
#
# Project: EO Metadata Handling
# Authors: Martin Paces <martin.paces@eox.at>
#
#-------------------------------------------------------------------------------
# Copyright (C) 2013 EOX IT Services GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies of this Software or works derived from this Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#-------------------------------------------------------------------------------
from .common import (
GDAL_TYPES, OGC_TYPE_DEFS,
check, extract, extattr,
)
import re
from .interfaces import Profile
from lxml import etree
import ns_opt20
import numpy as np
import geom as ig
import datetime as dt
def get_footprint_and_center(xml, n=10):
elm = None
for _elm in xml.find("//IMFFilename"):
if _elm.tag.startswith("BAND_"):
elm = _elm
break
vlist = [
(float(extract(elm, "./ULLon")), float(extract(elm, "./ULLat"))),
(float(extract(elm, "./URLon")), float(extract(elm, "./URLat"))),
(float(extract(elm, "./LRLon")), float(extract(elm, "./LRLat"))),
(float(extract(elm, "./LLLon")), float(extract(elm, "./LLLat"))),
]
lon, lat = [], []
lon_cnt, lat_cnt = 0.0, 0.0
for i in xrange(len(vlist)):
lon0, lat0 = vlist[i]
lon1, lat1 = vlist[(i+1)%len(vlist)]
lon.append(np.linspace(lon0, lon1, n, False))
lat.append(np.linspace(lat0, lat1, n, False))
lon_cnt += lon0
lat_cnt += lat0
lon_cnt *= 0.25
lat_cnt *= 0.25
lon, lat = np.concatenate(lon), np.concatenate(lat)
if hasattr(np, 'nditer'):
wkt0 = ",".join("%.9g %.9g"%(x, y) for x, y in np.nditer([lon, lat]))
else:
wkt0 = ",".join("%.9g %.9g"%(x, y) for x, y in zip(lon, lat))
wkt0 = "EPSG:4326;POLYGON((%s, %.9g %.9g))"%(wkt0, lon[0], lat[0])
wkt1 = "EPSG:4326;POINT(%.9g %.9g)"%(lon_cnt, lat_cnt)
return ig.parseGeom(wkt0), ig.parseGeom(wkt1)
class ProfileQuickBird(Profile):
mode = {"Multi":"MS", "P":"PAN"}
c_types = {8: "uint8", 16: "uint16",}
bands = {
"P": "PAN",
"B": "Blue",
"G": "Green",
"R": "Red",
"N": "NIR",
}
bands_verbose = {
"P": "Panchromatic Band",
"B": "Blue Band",
"G": "Green Band",
"R": "Red Band",
"N": "Near Infra-Red Band",
}
nilval = {
"reason": "http://www.opengis.net/def/nil/OGC/0/inapplicable",
"value": 0,
}
platform = {
"QB02": ("QuickBird", "2")
}
@classmethod
def check_profile(cls, xml):
""" check whether the profile is applicable"""
if xml.getroot().tag != "QuickBird":
return False
cnt = 0
for elm in xml.getroot():
if elm.tag.startswith("PRODUCT_"):
cnt += 1
if cnt == 0:
return False
elif cnt > 1:
raise ValueError("Multi-product QB products not supported!")
#return False
if xml.find("/PRODUCT_1/IMFFilename") is None:
return False
cnt = 0
for elm in xml.find("/PRODUCT_1/IMFFilename"):
if elm.tag.startswith("IMAGE_"):
cnt += 1
if cnt == 0:
return False
elif cnt > 1:
raise ValueError("Multi-product QB products not supported!")
#return False
if xml.find("/PRODUCT_1/IMFFilename/IMAGE_1") is None:
return False
return True
@classmethod
def get_identifier(cls, xml):
""" get dataset's unique identifier """
satid = extract(xml, "//IMAGE_1/satId")
prodid = extattr(xml, "//IMFFilename", "href")[:-4]
return "%s_%s"%(satid, prodid)
@classmethod
def get_parent_id(cls, xml):
""" get collections's unique identifier """
satid = extract(xml, "//IMAGE_1/satId")
mode = cls.mode[extract(xml, "//IMFFilename/bandId")]
geom = "RAW"
return "%s:%s:%s"%(satid, mode, geom)
@classmethod
def extract_range_type(cls, xml):
return cls.extract_range_type_sloppy(xml)
@classmethod
def extract_range_type_sloppy(cls, xml):
""" Extract range definition applicable to all product
of the same type.
"""
base_name = cls.get_parent_id(xml)
dtype = cls.c_types[int(extract(xml, "//IMFFilename/bitsPerPixel"))]
gdal_dtype = check(GDAL_TYPES.get(dtype), 'data_type')
ogc_dtype = check(OGC_TYPE_DEFS.get(dtype), 'data_type')
bands = []
nbands = 0
for elm in xml.find("//IMFFilename"):
if not elm.tag.startswith("BAND_"):
continue
bandid = elm.tag.partition("_")[2]
nbands += 1
bands.append({
"identifier": bandid,
"name": cls.bands[bandid],
"description": cls.bands_verbose[bandid],
"nil_values": [cls.nilval],
"definition": ogc_dtype,
"data_type": gdal_dtype,
"gdal_interpretation": "Undefined",
"uom": "none",
})
return {
"name": "%s:%d:%s"%(base_name, nbands, dtype),
"bands": bands,
}
@classmethod
def extract_eop_metadata(cls, xml, ns_opt=None, **kwarg):
""" Extract range definition applicable to all product
of the same type.
"""
ns_opt = ns_opt or ns_opt20
ns_eop = ns_opt.ns_eop
ns_gml = ns_opt.ns_gml
ns_om = ns_opt.ns_om
OPT = ns_opt.E
EOP = ns_eop.E
OM = ns_om.E
#GML = ns_gml.E
satid = extract(xml, "//IMAGE_1/satId")
platform, pindex = cls.platform[satid]
time_acq_start = extract(xml, "//IMAGE_1/firstLineTime")
nrow = int(extract(xml, "//IMFFilename/numRows"))
trow = float(extract(xml, "//IMAGE_1/exposureDuration"))
tstart = dt.datetime(*(int(v) for v in re.split(r'[^\d]', time_acq_start)[:-1]))
tstop = tstart + dt.timedelta(seconds=(nrow-1)*trow)
time_acq_stop = "%sZ"%tstop.isoformat()
time_prod = extract(xml, "//IMFFilename/generationTime")
eo_equipment = EOP.EarthObservationEquipment(
ns_gml.getRandomId(),
EOP.platform(EOP.Platform(
EOP.shortName(platform),
EOP.serialIdentifier(pindex),
EOP.orbitType("LEO"),
)),
EOP.instrument(EOP.Instrument(
EOP.shortName(satid),
)),
EOP.sensor(EOP.Sensor(
EOP.sensorType("OPTICAL"),
)),
EOP.acquisitionParameters(EOP.Acquisition(
EOP.orbitNumber(extract(xml, "//IMAGE_1/revNumber")),
EOP.lastOrbitNumber(extract(xml, "//IMAGE_1/revNumber")),
EOP.orbitDirection("DESCENDING"),
EOP.illuminationAzimuthAngle(extract(xml, "//IMAGE_1/sunAz"), {"uom": "deg"}),
EOP.illuminationElevationAngle(extract(xml, "//IMAGE_1/sunEl"), {"uom": "deg"}),
EOP.incidenceAngle("%g"%(90.0-float(extract(xml, "//IMAGE_1/satEl"))), {"uom": "deg"}),
)),
)
metadata = EOP.EarthObservationMetaData(
EOP.identifier(cls.get_identifier(xml)),
EOP.parentIdentifier(cls.get_parent_id(xml)),
EOP.acquisitionType("NOMINAL"),
EOP.productType("IMAGE"),
EOP.status("ACQUIRED"),
)
result = OPT.EarthObservationResult(ns_gml.getRandomId())
cloud_cover = float(extract(xml, "//IMAGE_1/cloudCover"))
if 0 <= cloud_cover and cloud_cover <= 1:
result.append(OPT.cloudCoverPercentage("%.1f"%(100.0*cloud_cover), {"uom":"%"}))
xml_eop = OPT.EarthObservation(
ns_gml.getRandomId(),
ns_eop.getSchemaLocation("OPT"),
#EOP.parameter(), #optional
OM.phenomenonTime(ns_gml.getTimePeriod(time_acq_start, time_acq_stop)),
#OM.resultQuality(), #optional
OM.resultTime(ns_gml.getTimeInstant(time_prod)),
#OM.validTime(), # optional
OM.procedure(eo_equipment),
OM.observedProperty({"nillReason": "unknown"}),
OM.featureOfInterest(
ns_eop.getFootprint(*get_footprint_and_center(xml))
),
OM.result(result),
EOP.metaDataProperty(metadata),
)
xml_eop = etree.ElementTree(xml_eop)
#xml_eop.getroot().addprevious(ns_eop.getSchematronPI())
return xml_eop
|
DREAM-ODA-OS/tools
|
metadata/profiles/quickbird.py
|
Python
|
mit
| 9,730
|
from datetime import date
import os
import tempfile
from unittest import mock
from PIL import Image
from django.core.files import File
from django.core.files.uploadedfile import SimpleUploadedFile
from django.test import TestCase
from django.utils.timezone import now
from events.models import Event, Attachment
class EventModelTests(TestCase):
def test_representation(self):
event = Event(name='Test', date=date(2012, 12, 21), organizer="Organizer")
self.assertEqual(repr(event), '<Event Test at 2012-12-21 by Organizer>')
def test_representation_date_only(self):
event = Event(date=date(2012, 12, 21))
self.assertEqual(repr(event), '<Event unbenannt at 2012-12-21>')
def test_representation_date_and_name(self):
event = Event(name='Test', date=date(2012, 12, 21))
self.assertEqual(repr(event), '<Event Test at 2012-12-21>')
def test_representation_date_and_organizer(self):
event = Event(date=date(2012, 12, 21), organizer="Organizer")
self.assertEqual(repr(event), '<Event unbenannt at 2012-12-21 by Organizer>')
def test_string_representation(self):
event = Event(name='Test', date=date(2012, 12, 21), organizer="Organizer")
self.assertEqual(str(event), 'Test at 2012-12-21 by Organizer')
def test_string_representation_date_only(self):
event = Event(date=date(2012, 12, 21))
self.assertEqual(str(event), 'unbenannt at 2012-12-21')
def test_string_representation_date_and_name(self):
event = Event(name='Test', date=date(2012, 12, 21))
self.assertEqual(str(event), 'Test at 2012-12-21')
def test_string_representation_date_and_organizer(self):
event = Event(date=date(2012, 12, 21), organizer="Organizer")
self.assertEqual(str(event), 'unbenannt at 2012-12-21 by Organizer')
class AttachmentModelTests(TestCase):
fixtures = ['events_views_testdata']
def tearDown(self):
Attachment.objects.all().delete() # not only delete DB entries but also the actual files from disk
def test_repesentation(self):
attachment = Attachment.objects.get(pk=1)
self.assertEqual(repr(attachment), "<Attachment test.pdf for Test Event at 2016-07-20 by Person P>")
def test_string_representation(self):
attachment = Attachment.objects.get(pk=1)
self.assertEqual(str(attachment), "test.pdf")
@mock.patch('random.choice', lambda *args, **kwargs: 'x')
def test_get_path_and_set_filename(self):
attachment = Attachment.objects.get(pk=1)
name = '/foo/bar/ baz .TXT'
path = attachment._get_path_and_set_filename(name)
self.assertEqual(attachment.name, ' baz .TXT')
self.assertEqual(path, 'event_attachments/%s_baz_xxxxx.txt' % now().strftime("%Y/%m/%Y%m%d-%H%M"))
@mock.patch('random.choice', lambda *args, **kwargs: 'x')
def test_build_thumbnail(self):
attachment = Attachment.objects.get(pk=1)
self.assertEqual(attachment.thumbnail, "")
attachment.attachment = SimpleUploadedFile(
name='Lena.jpg',
content=open(os.path.join(os.path.dirname(__file__), 'files', 'Lena.jpg'), 'rb').read(),
content_type='image/jpeg')
attachment.save()
self.assertEqual(attachment.name, 'Lena.jpg')
self.assertEqual(str(attachment.thumbnail),
'event_attachments/%s_Lena_xxxxx.thumbnail.jpg' % now().strftime("%Y/%m/%Y%m%d-%H%M"))
self.assertTrue(os.path.exists(attachment.thumbnail.path))
thumbnail = Image.open(attachment.thumbnail)
self.assertEqual(thumbnail.height, attachment.MAX_HEIGHT)
self.assertLess(thumbnail.width, attachment.MAX_WIDTH)
def test_auto_delete_file_on_delete(self):
event = Event.objects.get(pk=1)
with tempfile.NamedTemporaryFile() as f1:
attachment = Attachment(attachment=File(f1), event=event)
attachment.save()
file_path = attachment.attachment.path
self.assertTrue(os.path.exists(file_path))
attachment.delete()
self.assertFalse(os.path.exists(file_path))
def test_auto_delete_file_on_change_with_changed_attachment(self):
event = Event.objects.get(pk=1)
with tempfile.NamedTemporaryFile() as f1:
attachment = Attachment(attachment=File(f1), event=event)
attachment.save()
old_file_path = attachment.attachment.path
self.assertTrue(os.path.exists(old_file_path))
with tempfile.NamedTemporaryFile() as f2:
attachment.attachment = File(f2)
attachment.save()
new_file_path = attachment.attachment.path
self.assertNotEqual(old_file_path, new_file_path)
self.assertTrue(os.path.exists(new_file_path))
self.assertFalse(os.path.exists(old_file_path))
def test_auto_delete_file_on_change_with_unchanged_attachment(self):
event = Event.objects.get(pk=1)
with tempfile.NamedTemporaryFile() as f1:
attachment = Attachment(attachment=File(f1), event=event)
attachment.save() # create attachment
old_file_path = attachment.attachment.path
self.assertTrue(os.path.exists(old_file_path))
attachment.save() # save unchanged attachment
self.assertTrue(os.path.exists(old_file_path))
|
Strassengezwitscher/Strassengezwitscher
|
crowdgezwitscher/events/tests/test_models.py
|
Python
|
mit
| 5,459
|
# -*- coding: utf-8 -*-
import os
import csv
import unittest
import pytest
from sqlalchemy.orm import Session, session
from skosprovider_sqlalchemy.models import Base, Initialiser
from skosprovider_sqlalchemy.utils import (
import_provider,
VisitationCalculator
)
from tests import DBTestCase
def _get_menu():
from skosprovider.providers import (
SimpleCsvProvider
)
ifile = open(
os.path.join(os.path.dirname(__file__), 'data', 'menu.csv'),
"r"
)
reader = csv.reader(ifile)
csvprovider = SimpleCsvProvider(
{'id': 'MENU'},
reader
)
ifile.close()
return csvprovider
def _get_geo():
from skosprovider.providers import DictionaryProvider
geo = DictionaryProvider(
{'id': 'GEOGRAPHY'},
[
{
'id': '1',
'labels': [
{'type': 'prefLabel', 'language': 'en', 'label': 'World'}
],
'narrower': [2, 3]
}, {
'id': 2,
'labels': [
{'type': 'prefLabel', 'language': 'en', 'label': 'Europe'}
],
'narrower': [4, 5, 10], 'broader': [1]
}, {
'id': 3,
'labels': [
{
'type': 'prefLabel', 'language': 'en',
'label': 'North-America'
}
],
'narrower': [6], 'broader': [1]
}, {
'id': 4,
'labels': [
{'type': 'prefLabel', 'language': 'en', 'label': 'Belgium'}
],
'narrower': [7, 8, 9], 'broader': [2], 'related': [10]
}, {
'id': 5,
'labels': [
{
'type': 'prefLabel', 'language': 'en',
'label': 'United Kingdom'
}
],
'broader': [2]
}, {
'id': 6,
'labels': [
{
'type': 'prefLabel', 'language': 'en',
'label': 'United States of America'
}
],
'broader': [3]
}, {
'id': 7,
'labels': [
{
'type': 'prefLabel',
'language': 'en',
'label': 'Flanders'
}, {
'type': 'prefLabel',
'language': 'nl-BE',
'label': 'Vlaanderen'
}
],
'broader': [4]
}, {
'id': 8,
'labels': [
{
'type': 'prefLabel',
'language': 'en',
'label': 'Brussels'
}
],
'broader': [4]
}, {
'id': 9,
'labels': [
{
'type': 'prefLabel',
'language': 'en',
'label': 'Wallonie'
}
],
'broader': [4]
}, {
'id': 10,
'labels': [
{
'type': 'prefLabel',
'language': 'nl',
'label': 'Nederland'
}
],
'related': [4]
}, {
'id': '333',
'type': 'collection',
'labels': [
{
'type': 'prefLabel', 'language': 'en',
'label': 'Places where dutch is spoken'
}
],
'members': ['4', '7', 8, 10]
}
]
)
return geo
def _get_buildings():
from skosprovider.providers import DictionaryProvider
buildings = DictionaryProvider(
{'id': 'BUILDINGS'},
[
{
'id': '1',
'labels': [
{
'type': 'prefLabel',
'language': 'en',
'label': 'Fortifications'
}
],
'narrower': [2],
'matches': {
'exact': ['http://vocab.getty.edu/aat/300006888']
}
}, {
'id': 2,
'labels': [
{'type': 'prefLabel', 'language': 'en', 'label': 'Castle'}
],
'broader': [1, 3],
'matches': {
'broad': ['http://vocab.getty.edu/aat/300006888']
}
}, {
'id': 3,
'labels': [
{
'type': 'prefLabel',
'language': 'en',
'label': 'Habitations'
}
],
'narrower': [2, 4],
'matches': {
'close': ['http://vocab.getty.edu/aat/300005425']
}
}, {
'id': 4,
'labels': [
{'type': 'prefLabel', 'language': 'en', 'label': 'Huts'},
{'type': 'prefLabel', 'language': None, 'label': 'Hutten'}
],
'broader': [3],
'matches': {
'exact': ['http://vocab.getty.edu/aat/300004824']
}
}
]
)
return buildings
def _get_materials():
from skosprovider.providers import DictionaryProvider
materials = DictionaryProvider(
{'id': 'MATERIALS'},
[
{
'id': '1',
'labels': [
{
'type': 'prefLabel',
'language': 'en',
'label': 'Cardboard'
}
],
'narrower': [2],
'related': [3],
'subordinate_arrays': [56]
}, {
'id': '789',
'type': 'collection',
'labels': [
{
'type': 'prefLabel',
'language': 'en',
'label': 'Wood by Tree'
}
],
'members': [654]
}
]
)
return materials
def _get_heritage_types():
import json
typology_data = json.load(
open(os.path.join(os.path.dirname(__file__), 'data', 'typologie.js')),
)['typologie']
from skosprovider.providers import DictionaryProvider
from skosprovider.uri import UriPatternGenerator
from skosprovider.skos import ConceptScheme
heritage_types = DictionaryProvider(
{'id': 'HERITAGE_TYPES'},
typology_data,
uri_generator=UriPatternGenerator('https://id.erfgoed.net/thesauri/erfgoedtypes/%s'),
concept_scheme=ConceptScheme(
uri='https://id.erfgoed.net/thesauri/erfgoedtypes',
labels=[
{'label': 'Erfgoedtypes', 'type': 'prefLabel', 'language': 'nl-BE'},
{'label': 'Heritagetypes', 'type': 'prefLabel', 'language': 'en'}
],
notes=[
{
'note': 'Different types of heritage.',
'type': 'definition',
'language': 'en'
}, {
'note': 'Verschillende types van erfgoed.',
'type': 'definition',
'language': 'nl'
}
],
languages=['nl', 'en']
)
)
return heritage_types
def _get_event_types():
import json
event_data = json.load(
open(os.path.join(os.path.dirname(__file__), 'data', 'gebeurtenis.js')),
)['gebeurtenis']
from skosprovider.providers import DictionaryProvider
from skosprovider.uri import UriPatternGenerator
heritage_types = DictionaryProvider(
{'id': 'EVENT_TYPES'},
event_data,
uri_generator=UriPatternGenerator('https://id.erfgoed.net/thesauri/gebeurtenistypes/%s')
)
return heritage_types
class TestImportProviderTests(DBTestCase):
def setUp(self):
Base.metadata.create_all(self.engine)
self.session = self.session_maker()
Initialiser(self.session).init_all()
def tearDown(self):
self.session.rollback()
session.close_all_sessions()
Base.metadata.drop_all(self.engine)
def _get_cs(self):
from skosprovider_sqlalchemy.models import (
ConceptScheme as ConceptSchemeModel
)
return ConceptSchemeModel(
id=68,
uri='urn:x-skosprovider:cs:68'
)
def test_empty_provider(self):
from skosprovider_sqlalchemy.models import (
ConceptScheme as ConceptSchemeModel
)
from skosprovider.providers import DictionaryProvider
p = DictionaryProvider({'id': 'EMPTY'}, [])
cs = self._get_cs()
self.session.add(cs)
import_provider(p, cs, self.session)
scheme = self.session.query(ConceptSchemeModel).get(68)
assert scheme == cs
def test_menu(self):
from skosprovider_sqlalchemy.models import (
Concept as ConceptModel
)
csvprovider = _get_menu()
cs = self._get_cs()
self.session.add(cs)
import_provider(csvprovider, cs, self.session)
lobster = self.session.query(ConceptModel) \
.filter(ConceptModel.conceptscheme == cs) \
.filter(ConceptModel.concept_id == 11) \
.one()
assert 11 == lobster.concept_id
assert 'urn:x-skosprovider:menu:11' == lobster.uri
assert 'Lobster Thermidor' == str(lobster.label())
assert 1 == len(lobster.notes)
def test_geo(self):
from skosprovider_sqlalchemy.models import (
Concept as ConceptModel,
Collection as CollectionModel
)
geoprovider = _get_geo()
cs = self._get_cs()
self.session.add(cs)
import_provider(geoprovider, cs, self.session)
world = self.session.query(ConceptModel) \
.filter(ConceptModel.conceptscheme == cs) \
.filter(ConceptModel.concept_id == 1) \
.one()
assert world.concept_id == 1
assert 'urn:x-skosprovider:geography:1' == world.uri
assert 'World' == str(world.label('en'))
assert 1 == len(world.labels)
assert 2 == len(world.narrower_concepts)
dutch = self.session.query(CollectionModel) \
.filter(CollectionModel.conceptscheme == cs) \
.filter(CollectionModel.concept_id == 333) \
.one()
assert 333 == dutch.concept_id
assert 'urn:x-skosprovider:geography:333' == dutch.uri
assert 'collection' == dutch.type
assert 1 == len(dutch.labels)
assert 4 == len(dutch.members)
netherlands = self.session.query(ConceptModel) \
.filter(ConceptModel.conceptscheme == cs) \
.filter(ConceptModel.concept_id == 10) \
.one()
assert 10 == netherlands.concept_id
assert 'concept' == netherlands.type
assert 1 == len(netherlands.labels)
assert 2 == netherlands.broader_concepts.pop().concept_id
assert 1 == len(netherlands.related_concepts)
def test_buildings(self):
from skosprovider_sqlalchemy.models import (
Concept as ConceptModel
)
buildingprovider = _get_buildings()
cs = self._get_cs()
self.session.add(cs)
import_provider(buildingprovider, cs, self.session)
castle = self.session.query(ConceptModel) \
.filter(ConceptModel.conceptscheme == cs) \
.filter(ConceptModel.concept_id == 2) \
.one()
assert 2 == len(castle.broader_concepts)
hut = self.session.query(ConceptModel) \
.filter(ConceptModel.conceptscheme == cs) \
.filter(ConceptModel.concept_id == 4) \
.one()
assert 1 == len(hut.broader_concepts)
assert 1 == len(hut.matches)
assert 'exactMatch' == hut.matches[0].matchtype_id
assert 'http://vocab.getty.edu/aat/300004824' == hut.matches[0].uri
def test_heritage_types(self):
from skosprovider_sqlalchemy.models import (
Concept as ConceptModel,
)
heritagetypesprovider = _get_heritage_types()
cs = self._get_cs()
self.session.add(cs)
import_provider(heritagetypesprovider, cs, self.session)
bomen = self.session.query(ConceptModel) \
.filter(ConceptModel.conceptscheme == cs) \
.filter(ConceptModel.concept_id == 72) \
.one()
assert 2 == len(bomen.narrower_collections)
assert 2 == len(cs.labels)
assert 'Erfgoedtypes' == cs.label('nl').label
assert 2 == len(cs.notes)
assert 2 == len(cs.languages)
def test_event_types(self):
from skosprovider_sqlalchemy.models import (
Concept as ConceptModel,
)
eventtypesprovider = _get_event_types()
cs = self._get_cs()
self.session.add(cs)
import_provider(eventtypesprovider, cs, self.session)
archeologische_opgravingen = self.session.query(ConceptModel) \
.filter(ConceptModel.conceptscheme == cs) \
.filter(ConceptModel.concept_id == 38) \
.one()
assert 3 == len(archeologische_opgravingen.narrower_collections)
def test_materials(self):
from skosprovider_sqlalchemy.models import (
Thing as ThingModel,
)
materialsprovider = _get_materials()
cs = self._get_cs()
self.session.add(cs)
import_provider(materialsprovider, cs, self.session)
materials = self.session.query(ThingModel) \
.filter(ThingModel.conceptscheme == cs) \
.all()
assert 2 == len(materials)
class TestVisitationCalculator(DBTestCase):
def setUp(self):
Base.metadata.create_all(self.engine)
self.session = self.session_maker()
Initialiser(self.session).init_all()
def tearDown(self):
self.session.rollback()
session.close_all_sessions()
Base.metadata.drop_all(self.engine)
def _get_cs(self):
from skosprovider_sqlalchemy.models import (
ConceptScheme as ConceptSchemeModel
)
return ConceptSchemeModel(
id=1,
uri='urn:x-skosprovider:cs:1'
)
def test_empty_provider(self):
from skosprovider.providers import DictionaryProvider
p = DictionaryProvider({'id': 'EMPTY'}, [])
cs = self._get_cs()
self.session.add(cs)
import_provider(p, cs, self.session)
vc = VisitationCalculator(self.session)
v = vc.visit(cs)
assert 0 == len(v)
def test_provider_invalid_language(self):
from skosprovider.providers import DictionaryProvider
with self.assertRaises(ValueError):
p = DictionaryProvider({'id': 'EMPTY'}, [
{
'id': '1',
'labels': [
{
'type': 'prefLabel',
'language': 'nederlands',
'label': 'Versterkingen'
}
]
}
])
cs = self._get_cs()
self.session.add(cs)
import_provider(p, cs, self.session)
def test_menu(self):
csvprovider = _get_menu()
cs = self._get_cs()
self.session.add(cs)
import_provider(csvprovider, cs, self.session)
vc = VisitationCalculator(self.session)
visit = vc.visit(cs)
assert 11 == len(visit)
for v in visit:
assert v['lft'] + 1 == v['rght']
assert 1 == v['depth']
def test_menu_sorted(self):
csvprovider = _get_menu()
cs = self._get_cs()
self.session.add(cs)
import_provider(csvprovider, cs, self.session)
vc = VisitationCalculator(self.session)
visit = vc.visit(cs)
assert 11 == len(visit)
left = 1
for v in visit:
assert v['lft'] == left
left += 2
def test_geo(self):
from skosprovider_sqlalchemy.models import (
Concept as ConceptModel
)
geoprovider = _get_geo()
cs = self._get_cs()
self.session.add(cs)
import_provider(geoprovider, cs, self.session)
vc = VisitationCalculator(self.session)
visit = vc.visit(cs)
assert 10 == len(visit)
world = visit[0]
assert self.session.query(ConceptModel).get(world['id']).concept_id == 1
assert 1 == world['lft']
assert 20 == world['rght']
assert 1 == world['depth']
for v in visit:
if v['id'] == 3:
assert v['lft'] + 3 == v['rght']
assert 2 == v['depth']
if v['id'] == 6:
assert v['lft'] + 1 == v['rght']
assert 3 == v['depth']
def test_buildings(self):
from skosprovider_sqlalchemy.models import (
Concept as ConceptModel
)
buildingprovider = _get_buildings()
cs = self._get_cs()
self.session.add(cs)
import_provider(buildingprovider, cs, self.session)
vc = VisitationCalculator(self.session)
visit = vc.visit(cs)
assert len(visit) == 5
# Check that castle is present twice
ids = [self.session.query(ConceptModel).get(v['id']).concept_id for v in visit]
assert ids.count(2) == 2
for v in visit:
# Check that fortification has one child
if v['id'] == 1:
assert v['lft'] + 3 == v['rght']
assert 1 == v['depth']
# Check that habitations has two children
if v['id'] == 3:
assert v['lft'] + 5 == v['rght']
assert 1 == v['depth']
# Check that castle has no children
if v['id'] == 2:
assert v['lft'] + 1 == v['rght']
assert 2 == v['depth']
|
koenedaele/skosprovider_sqlalchemy
|
tests/test_utils.py
|
Python
|
mit
| 18,760
|
#!/usr/bin/env python
# Light each LED in sequence, and repeat.
import opc, time
from ledlib.colordefs import *
numLEDs =512
numChase = 4
chase_size = 4
gap_size = 3
frame_delay = 0.04
# strip0 = pixels 0-63
# strip1 = 64-127
# strip2 = 128-191
# etc.
StripSize = 30
Bases = [ 0, 64, 128, 192, 256, 320, 384, 448 ]
client = opc.Client('127.0.0.1:7890')
pixels = [ colortable["MUTED_GRAY"] ] * numLEDs
for base in Bases:
for resolevel in range(9):
print ("Reso = ", resolevel)
pixels [ base + gap_size + (resolevel*2) ] = RESO_COLORS[resolevel]
pixels [ base + gap_size + (resolevel*2) +1 ] = RESO_COLORS[resolevel]
client.put_pixels(pixels)
client.put_pixels(pixels)
|
bbulkow/MagnusFlora
|
led/colorsample.py
|
Python
|
mit
| 687
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
.. Licence MIT
.. codeauthor:: Jan Lipovský <janlipovsky@gmail.com>, janlipovsky.cz
"""
import pytest
@pytest.mark.parametrize("text, expected", [
("xx[http://httpbin.org/status/200](http://httpbin.org/status/210)trololo",
['http://httpbin.org/status/200', 'http://httpbin.org/status/210']),
("This is text with markdown URL[http://httpbin.org/status/200]("
"http://httpbin.org/status/210)",
['http://httpbin.org/status/200', 'http://httpbin.org/status/210']),
("[http://httpbin.org/status/200](http://httpbin.org/status/210)",
['http://httpbin.org/status/200', 'http://httpbin.org/status/210']),
])
def test_find_urls(urlextract, text, expected):
"""
Testing find_urls returning all URLs
:param fixture urlextract: fixture holding URLExtract object
:param str text: text in which we should find links
:param list(str) expected: list of URLs that has to be found in text
"""
assert expected == urlextract.find_urls(text)
|
lipoja/URLFinder
|
tests/unit/test_markdown.py
|
Python
|
mit
| 1,039
|
import asyncio
import sys
import time
import unittest
from unittest import mock
import pytest
from engineio import asyncio_socket
from engineio import exceptions
from engineio import packet
from engineio import payload
def AsyncMock(*args, **kwargs):
"""Return a mock asynchronous function."""
m = mock.MagicMock(*args, **kwargs)
async def mock_coro(*args, **kwargs):
return m(*args, **kwargs)
mock_coro.mock = m
return mock_coro
def _run(coro):
"""Run the given coroutine."""
return asyncio.get_event_loop().run_until_complete(coro)
@unittest.skipIf(sys.version_info < (3, 5), 'only for Python 3.5+')
class TestSocket(unittest.TestCase):
def _get_read_mock_coro(self, payload):
mock_input = mock.MagicMock()
mock_input.read = AsyncMock()
mock_input.read.mock.return_value = payload
return mock_input
def _get_mock_server(self):
mock_server = mock.Mock()
mock_server.ping_timeout = 0.2
mock_server.ping_interval = 0.2
mock_server.ping_interval_grace_period = 0.001
mock_server.async_handlers = False
mock_server.max_http_buffer_size = 128
mock_server._async = {
'asyncio': True,
'create_route': mock.MagicMock(),
'translate_request': mock.MagicMock(),
'make_response': mock.MagicMock(),
'websocket': 'w',
}
mock_server._async['translate_request'].return_value = 'request'
mock_server._async['make_response'].return_value = 'response'
mock_server._trigger_event = AsyncMock()
def bg_task(target, *args, **kwargs):
return asyncio.ensure_future(target(*args, **kwargs))
def create_queue(*args, **kwargs):
queue = asyncio.Queue(*args, **kwargs)
queue.Empty = asyncio.QueueEmpty
return queue
mock_server.start_background_task = bg_task
mock_server.create_queue = create_queue
return mock_server
def test_create(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
assert s.server == mock_server
assert s.sid == 'sid'
assert not s.upgraded
assert not s.closed
assert hasattr(s.queue, 'get')
assert hasattr(s.queue, 'put')
assert hasattr(s.queue, 'task_done')
assert hasattr(s.queue, 'join')
def test_empty_poll(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
with pytest.raises(exceptions.QueueEmpty):
_run(s.poll())
def test_poll(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
pkt1 = packet.Packet(packet.MESSAGE, data='hello')
pkt2 = packet.Packet(packet.MESSAGE, data='bye')
_run(s.send(pkt1))
_run(s.send(pkt2))
assert _run(s.poll()) == [pkt1, pkt2]
def test_poll_none(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
_run(s.queue.put(None))
assert _run(s.poll()) == []
def test_poll_none_after_packet(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
pkt = packet.Packet(packet.MESSAGE, data='hello')
_run(s.send(pkt))
_run(s.queue.put(None))
assert _run(s.poll()) == [pkt]
assert _run(s.poll()) == []
def test_schedule_ping(self):
mock_server = self._get_mock_server()
mock_server.ping_interval = 0.01
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
s.send = AsyncMock()
async def schedule_ping():
s.schedule_ping()
await asyncio.sleep(0.05)
_run(schedule_ping())
assert s.last_ping is not None
assert s.send.mock.call_args_list[0][0][0].encode() == '2'
def test_schedule_ping_closed_socket(self):
mock_server = self._get_mock_server()
mock_server.ping_interval = 0.01
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
s.send = AsyncMock()
s.closed = True
async def schedule_ping():
s.schedule_ping()
await asyncio.sleep(0.05)
_run(schedule_ping())
assert s.last_ping is None
s.send.mock.assert_not_called()
def test_pong(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
s.schedule_ping = mock.MagicMock()
_run(s.receive(packet.Packet(packet.PONG, data='abc')))
s.schedule_ping.assert_called_once_with()
def test_message_sync_handler(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
_run(s.receive(packet.Packet(packet.MESSAGE, data='foo')))
mock_server._trigger_event.mock.assert_called_once_with(
'message', 'sid', 'foo', run_async=False
)
def test_message_async_handler(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
mock_server.async_handlers = True
_run(s.receive(packet.Packet(packet.MESSAGE, data='foo')))
mock_server._trigger_event.mock.assert_called_once_with(
'message', 'sid', 'foo', run_async=True
)
def test_invalid_packet(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
with pytest.raises(exceptions.UnknownPacketError):
_run(s.receive(packet.Packet(packet.OPEN)))
def test_timeout(self):
mock_server = self._get_mock_server()
mock_server.ping_interval = 6
mock_server.ping_interval_grace_period = 2
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
s.last_ping = time.time() - 9
s.close = AsyncMock()
_run(s.send('packet'))
s.close.mock.assert_called_once_with(wait=False, abort=False)
def test_polling_read(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'foo')
pkt1 = packet.Packet(packet.MESSAGE, data='hello')
pkt2 = packet.Packet(packet.MESSAGE, data='bye')
_run(s.send(pkt1))
_run(s.send(pkt2))
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'sid=foo'}
packets = _run(s.handle_get_request(environ))
assert packets == [pkt1, pkt2]
def test_polling_read_error(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'foo')
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'sid=foo'}
with pytest.raises(exceptions.QueueEmpty):
_run(s.handle_get_request(environ))
def test_polling_write(self):
mock_server = self._get_mock_server()
mock_server.max_http_buffer_size = 1000
pkt1 = packet.Packet(packet.MESSAGE, data='hello')
pkt2 = packet.Packet(packet.MESSAGE, data='bye')
p = payload.Payload(packets=[pkt1, pkt2]).encode().encode('utf-8')
s = asyncio_socket.AsyncSocket(mock_server, 'foo')
s.receive = AsyncMock()
environ = {
'REQUEST_METHOD': 'POST',
'QUERY_STRING': 'sid=foo',
'CONTENT_LENGTH': len(p),
'wsgi.input': self._get_read_mock_coro(p),
}
_run(s.handle_post_request(environ))
assert s.receive.mock.call_count == 2
def test_polling_write_too_large(self):
mock_server = self._get_mock_server()
pkt1 = packet.Packet(packet.MESSAGE, data='hello')
pkt2 = packet.Packet(packet.MESSAGE, data='bye')
p = payload.Payload(packets=[pkt1, pkt2]).encode().encode('utf-8')
mock_server.max_http_buffer_size = len(p) - 1
s = asyncio_socket.AsyncSocket(mock_server, 'foo')
s.receive = AsyncMock()
environ = {
'REQUEST_METHOD': 'POST',
'QUERY_STRING': 'sid=foo',
'CONTENT_LENGTH': len(p),
'wsgi.input': self._get_read_mock_coro(p),
}
with pytest.raises(exceptions.ContentTooLongError):
_run(s.handle_post_request(environ))
def test_upgrade_handshake(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'foo')
s._upgrade_websocket = AsyncMock()
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'sid=foo',
'HTTP_CONNECTION': 'Foo,Upgrade,Bar',
'HTTP_UPGRADE': 'websocket',
}
_run(s.handle_get_request(environ))
s._upgrade_websocket.mock.assert_called_once_with(environ)
def test_upgrade(self):
mock_server = self._get_mock_server()
mock_server._async['websocket'] = mock.MagicMock()
mock_ws = AsyncMock()
mock_server._async['websocket'].return_value = mock_ws
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
s.connected = True
environ = "foo"
_run(s._upgrade_websocket(environ))
mock_server._async['websocket'].assert_called_once_with(
s._websocket_handler
)
mock_ws.mock.assert_called_once_with(environ)
def test_upgrade_twice(self):
mock_server = self._get_mock_server()
mock_server._async['websocket'] = mock.MagicMock()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
s.connected = True
s.upgraded = True
environ = "foo"
with pytest.raises(IOError):
_run(s._upgrade_websocket(environ))
def test_upgrade_packet(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
s.connected = True
_run(s.receive(packet.Packet(packet.UPGRADE)))
r = _run(s.poll())
assert len(r) == 1
assert r[0].encode() == packet.Packet(packet.NOOP).encode()
def test_upgrade_no_probe(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
s.connected = True
ws = mock.MagicMock()
ws.wait = AsyncMock()
ws.wait.mock.return_value = packet.Packet(packet.NOOP).encode()
_run(s._websocket_handler(ws))
assert not s.upgraded
def test_upgrade_no_upgrade_packet(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
s.connected = True
s.queue.join = AsyncMock(return_value=None)
ws = mock.MagicMock()
ws.send = AsyncMock()
ws.wait = AsyncMock()
probe = 'probe'
ws.wait.mock.side_effect = [
packet.Packet(packet.PING, data=probe).encode(),
packet.Packet(packet.NOOP).encode(),
]
_run(s._websocket_handler(ws))
ws.send.mock.assert_called_once_with(
packet.Packet(packet.PONG, data=probe).encode()
)
assert _run(s.queue.get()).packet_type == packet.NOOP
assert not s.upgraded
def test_upgrade_not_supported(self):
mock_server = self._get_mock_server()
mock_server._async['websocket'] = None
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
s.connected = True
environ = "foo"
_run(s._upgrade_websocket(environ))
mock_server._bad_request.assert_called_once_with()
def test_close_packet(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
s.connected = True
s.close = AsyncMock()
_run(s.receive(packet.Packet(packet.CLOSE)))
s.close.mock.assert_called_once_with(wait=False, abort=True)
def test_websocket_read_write(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
s.connected = False
s.queue.join = AsyncMock(return_value=None)
foo = 'foo'
bar = 'bar'
s.poll = AsyncMock(
side_effect=[[packet.Packet(packet.MESSAGE, data=bar)], None]
)
ws = mock.MagicMock()
ws.send = AsyncMock()
ws.wait = AsyncMock()
ws.wait.mock.side_effect = [
packet.Packet(packet.MESSAGE, data=foo).encode(),
None,
]
_run(s._websocket_handler(ws))
assert s.connected
assert s.upgraded
assert mock_server._trigger_event.mock.call_count == 2
mock_server._trigger_event.mock.assert_has_calls(
[
mock.call('message', 'sid', 'foo', run_async=False),
mock.call('disconnect', 'sid'),
]
)
ws.send.mock.assert_called_with('4bar')
def test_websocket_upgrade_read_write(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
s.connected = True
s.queue.join = AsyncMock(return_value=None)
foo = 'foo'
bar = 'bar'
probe = 'probe'
s.poll = AsyncMock(
side_effect=[
[packet.Packet(packet.MESSAGE, data=bar)],
exceptions.QueueEmpty,
]
)
ws = mock.MagicMock()
ws.send = AsyncMock()
ws.wait = AsyncMock()
ws.wait.mock.side_effect = [
packet.Packet(packet.PING, data=probe).encode(),
packet.Packet(packet.UPGRADE).encode(),
packet.Packet(packet.MESSAGE, data=foo).encode(),
None,
]
_run(s._websocket_handler(ws))
assert s.upgraded
assert mock_server._trigger_event.mock.call_count == 2
mock_server._trigger_event.mock.assert_has_calls(
[
mock.call('message', 'sid', 'foo', run_async=False),
mock.call('disconnect', 'sid'),
]
)
ws.send.mock.assert_called_with('4bar')
def test_websocket_upgrade_with_payload(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
s.connected = True
s.queue.join = AsyncMock(return_value=None)
probe = 'probe'
ws = mock.MagicMock()
ws.send = AsyncMock()
ws.wait = AsyncMock()
ws.wait.mock.side_effect = [
packet.Packet(packet.PING, data=probe).encode(),
packet.Packet(packet.UPGRADE, data='2').encode(),
]
_run(s._websocket_handler(ws))
assert s.upgraded
def test_websocket_upgrade_with_backlog(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
s.connected = True
s.queue.join = AsyncMock(return_value=None)
probe = 'probe'
foo = 'foo'
ws = mock.MagicMock()
ws.send = AsyncMock()
ws.wait = AsyncMock()
ws.wait.mock.side_effect = [
packet.Packet(packet.PING, data=probe).encode(),
packet.Packet(packet.UPGRADE, data='2').encode(),
]
s.upgrading = True
_run(s.send(packet.Packet(packet.MESSAGE, data=foo)))
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'sid=sid'}
packets = _run(s.handle_get_request(environ))
assert len(packets) == 1
assert packets[0].encode() == '6'
packets = _run(s.poll())
assert len(packets) == 1
assert packets[0].encode() == '4foo'
_run(s._websocket_handler(ws))
assert s.upgraded
assert not s.upgrading
packets = _run(s.handle_get_request(environ))
assert len(packets) == 1
assert packets[0].encode() == '6'
def test_websocket_read_write_wait_fail(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
s.connected = False
s.queue.join = AsyncMock(return_value=None)
foo = 'foo'
bar = 'bar'
s.poll = AsyncMock(
side_effect=[
[packet.Packet(packet.MESSAGE, data=bar)],
[packet.Packet(packet.MESSAGE, data=bar)],
exceptions.QueueEmpty,
]
)
ws = mock.MagicMock()
ws.send = AsyncMock()
ws.wait = AsyncMock()
ws.wait.mock.side_effect = [
packet.Packet(packet.MESSAGE, data=foo).encode(),
RuntimeError,
]
ws.send.mock.side_effect = [None, RuntimeError]
_run(s._websocket_handler(ws))
assert s.closed
def test_websocket_upgrade_with_large_packet(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
s.connected = True
s.queue.join = AsyncMock(return_value=None)
probe = 'probe'
ws = mock.MagicMock()
ws.send = AsyncMock()
ws.wait = AsyncMock()
ws.wait.mock.side_effect = [
packet.Packet(packet.PING, data=probe).encode(),
packet.Packet(packet.UPGRADE, data='2' * 128).encode(),
]
with pytest.raises(ValueError):
_run(s._websocket_handler(ws))
assert not s.upgraded
def test_websocket_ignore_invalid_packet(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
s.connected = False
s.queue.join = AsyncMock(return_value=None)
foo = 'foo'
bar = 'bar'
s.poll = AsyncMock(
side_effect=[
[packet.Packet(packet.MESSAGE, data=bar)],
exceptions.QueueEmpty,
]
)
ws = mock.MagicMock()
ws.send = AsyncMock()
ws.wait = AsyncMock()
ws.wait.mock.side_effect = [
packet.Packet(packet.OPEN).encode(),
packet.Packet(packet.MESSAGE, data=foo).encode(),
None,
]
_run(s._websocket_handler(ws))
assert s.connected
assert mock_server._trigger_event.mock.call_count == 2
mock_server._trigger_event.mock.assert_has_calls(
[
mock.call('message', 'sid', foo, run_async=False),
mock.call('disconnect', 'sid'),
]
)
ws.send.mock.assert_called_with('4bar')
def test_send_after_close(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
_run(s.close(wait=False))
with pytest.raises(exceptions.SocketIsClosedError):
_run(s.send(packet.Packet(packet.NOOP)))
def test_close_after_close(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
_run(s.close(wait=False))
assert s.closed
assert mock_server._trigger_event.mock.call_count == 1
mock_server._trigger_event.mock.assert_called_once_with(
'disconnect', 'sid'
)
_run(s.close())
assert mock_server._trigger_event.mock.call_count == 1
def test_close_and_wait(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
s.queue = mock.MagicMock()
s.queue.put = AsyncMock()
s.queue.join = AsyncMock()
_run(s.close(wait=True))
s.queue.join.mock.assert_called_once_with()
def test_close_without_wait(self):
mock_server = self._get_mock_server()
s = asyncio_socket.AsyncSocket(mock_server, 'sid')
s.queue = mock.MagicMock()
s.queue.put = AsyncMock()
s.queue.join = AsyncMock()
_run(s.close(wait=False))
assert s.queue.join.mock.call_count == 0
|
miguelgrinberg/python-engineio
|
tests/asyncio/test_asyncio_socket.py
|
Python
|
mit
| 19,848
|
from __future__ import division
import unittest
import numpy as np
from ..features.f0_contour_features import ContourFeatures
__author__ = 'Jakob Abesser'
class TestFeatures(unittest.TestCase):
""" Unit tests for ContourFeatures class
"""
def setUp(self, show_plot=False):
""" Generate test vibrato contour
"""
# hopsize = 512, sample rate = 44.1 kHz
dt = 512/44100
# contour length = 100 frames
N = 100
n = np.arange(N)
# fundamental frequency = 440 Hz
self.f_0_hz = 440
# modulation frequency = 3 Hz
self.f_mod_hz = 3
# modulation lift = 4 Hz
self.freq_hz = self.f_0_hz + 4.*np.sin(2*np.pi*n*dt*self.f_mod_hz)
# time frames
self.time_sec = np.arange(N)*dt
# frequency deviation from f0 in cent
self.freq_rel_cent = 1200*np.log2(self.freq_hz/self.f_0_hz)
if show_plot:
import matplotlib.pyplot as pl
pl.figure()
pl.plot(self.time_sec, self.freq_hz)
pl.show(block=False)
def test_contour_features(self):
""" Unit test for f0_contour_features() """
features, feature_labels = ContourFeatures().process(self.time_sec,
self.freq_hz,
self.freq_rel_cent)
assert len(features) == len(feature_labels)
for _ in range(len(feature_labels)):
print("{} : {}".format(feature_labels[_], features[_]))
# test some features
self.assertAlmostEqual(features[feature_labels.index("mod_freq_hz")], self.f_mod_hz, places=1)
self.assertGreater(features[feature_labels.index("mod_num_periods")], 5)
self.assertGreater(features[feature_labels.index("mod_dom")], .5)
self.assertLess(features[feature_labels.index("lin_f0_slope")], .01)
if __name__ == "__main__":
unittest.main()
|
jakobabesser/pymus
|
pymus/test/test_features.py
|
Python
|
mit
| 1,992
|
"""Export Value and Reward Map.
Get a model and export all the path and score
for grid 28x28
Author: Yuhuang Hu
Email : duguyue100@gmail.com
"""
from __future__ import print_function
import os
import cPickle as pickle
import numpy as np
import keras.backend as K
import matplotlib.pyplot as plt
import rlvision
from rlvision import utils
from rlvision.vin import vin_model, get_layer_output
from rlvision.utils import process_map_data
from rlvision.grid import GridSampler
def get_action(a):
if a == 0:
return -1, -1
if a == 1:
return 0, -1
if a == 2:
return 1, -1
if a == 3:
return -1, 0
if a == 4:
return 1, 0
if a == 5:
return -1, 1
if a == 6:
return 0, 1
if a == 7:
return 1, 1
return None
def find_goal(m):
return np.argwhere(m.max() == m)[0][::-1]
def predict(im, pos, model, k):
im_ary = np.array([im]).transpose((0, 2, 3, 1)) \
if K.image_data_format() == 'channels_last' else np.array([im])
res = model.predict([im_ary,
np.array([pos])])
action = np.argmax(res)
reward = get_layer_output(model, 'reward', im_ary)
value = get_layer_output(model, 'value{}'.format(k), im_ary)
reward = np.reshape(reward, im.shape[1:])
value = np.reshape(value, im.shape[1:])
return action, reward, value
file_name = os.path.join(rlvision.RLVISION_DATA,
"chain_data", "grid28_with_idx.pkl")
model_file = os.path.join(
rlvision.RLVISION_MODEL, "grid28-po",
"vin-model-po-28-77-0.89.h5")
im_data, state_data, label_data, sample_idx = process_map_data(
file_name, return_full=True)
model = vin_model(l_s=im_data.shape[2], k=20)
model.load_weights(model_file)
sampler = GridSampler(im_data, state_data, label_data, sample_idx, (28, 28))
gt_collector = []
po_collector = []
diff_collector = []
grid, state, label, goal = sampler.get_grid(77)
gt_collector.append(state)
step_map = np.zeros((2, 28, 28))
step_map[0] = np.ones((28, 28))
step_map[1] = grid[1]
pos = [state[0, 0], state[0, 1]]
path = [(pos[0], pos[1])]
start = (pos[0], pos[1])
for step in xrange(32):
masked_img = utils.mask_grid((pos[1], pos[0]),
grid[0], 3, one_is_free=False)
step_map[0] = utils.accumulate_map(step_map[0], masked_img,
one_is_free=False)
action, reward, value = predict(step_map, pos, model, 20)
dx, dy = get_action(action)
pos[0] = pos[0] + dx
pos[1] = pos[1] + dy
path.append((pos[0], pos[1]))
plt.figure()
plt.imshow(value, cmap="jet")
plt.colorbar()
plt.scatter(x=[start[0]], y=[start[1]], marker="*", c="orange", s=50)
plt.scatter(x=[pos[0]], y=[pos[1]], marker=".", c="purple", s=50)
plt.scatter(x=[goal[0]], y=[goal[1]], marker="*", c="black", s=50)
plt.savefig("grid28-77-%d.png" % (step), dpi=300)
if pos[0] == goal[0] and pos[1] == goal[1]:
print ("[MESSAGE] Found the path!")
break
|
ToniRV/Learning-to-navigate-without-a-map
|
rlvision/tests/vin_po_export_value_reward_28.py
|
Python
|
mit
| 3,042
|
from mojo.events import addObserver
from mojo.drawingTools import *
class PointCount(object):
def __init__(self):
addObserver(self, "drawPointCount", "draw")
def drawPointCount(self, info):
glyph = info["glyph"]
if glyph is None:
return
scale = info["scale"]
pointCount = 0
for contour in glyph:
pointCount += len(contour)
selectedCount = len(glyph.selection)
fill(1, 0, 0)
stroke(None)
fontSize(10/scale)
text("points: %s | selected: %s" %(pointCount, selectedCount), (glyph.width+10, 10))
PointCount()
|
typemytype/RoboFontExamples
|
pointCounter/pointCounter.py
|
Python
|
mit
| 702
|
import os, sys, numpy as np, tensorflow as tf
from pathlib import Path
import time
sys.path.append(str(Path(__file__).resolve().parents[1]))
import convnet_10_hidden
__package__ = 'convnet_10_hidden'
from . import network
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
BATCH_SIZE = 250
FILENAME = os.path.basename(__file__)
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
SUMMARIES_DIR = SCRIPT_DIR
SAVE_PATH = SCRIPT_DIR + "/network.ckpt"
### configure devices for this eval script.
USE_DEVICE = '/gpu:0'
session_config = tf.ConfigProto(log_device_placement=True)
session_config.gpu_options.allow_growth = True
# this is required if want to use GPU as device.
# see: https://github.com/tensorflow/tensorflow/issues/2292
session_config.allow_soft_placement = True
if __name__ == "__main__":
with tf.Graph().as_default() as g:
# inference()
input, logits = network.inference()
labels, loss_op = network.loss(logits)
train = network.training(loss_op, 1e-1)
eval = network.evaluation(logits, labels)
init = tf.initialize_all_variables()
with tf.Session(config=session_config) as sess:
# Merge all the summaries and write them out to /tmp/mnist_logs (by default)
# to see the tensor graph, fire up the tensorboard with --logdir="./train"
merged = tf.merge_all_summaries()
train_writer = tf.train.SummaryWriter(SUMMARIES_DIR + '/summaries/train', sess.graph)
test_writer = tf.train.SummaryWriter(SUMMARIES_DIR + '/summaries/test')
saver = tf.train.Saver()
sess.run(init)
saver.restore(sess, SAVE_PATH)
# now let's test!
TEST_BATCH_SIZE = np.shape(mnist.test.labels)[0]
while True:
output, loss_value, accuracy = sess.run([logits, loss_op, eval], feed_dict={
input: mnist.test.images,
labels: mnist.test.labels
})
print("- MNIST Test accuracy is ", accuracy / TEST_BATCH_SIZE)
time.sleep(5.0)
|
kinshuk4/MoocX
|
misc/deep_learning_notes/Proj_Centroid_Loss_LeNet/convnet_10_hidden/MNIST_eval.py
|
Python
|
mit
| 2,180
|
from __future__ import print_function
import sys
from datetime import datetime, timedelta
from time import time, sleep
from redis import Redis
import requests
redis = Redis()
URLS = (
"www.spokesman.com",
"www.google.com",
"www.yahoo.com",
"www.example.com",
"www.gibberish.com",
"www.a.com",
"www.b.com",
"www.c.com",
"www.d.com",
"www.forza.com",
"www.bbc.co.uk",
"www.canada.ca",
"www.twitter.com",
"www.amazon.com"
)
def sharecount(url):
"""Get social share count for given url."""
queue_url(url)
process_urls()
print(url, get_share_count(url))
def queue_url(url):
"""Add url to request queue and/or increase score."""
return url, redis.zincrby("url_queue",1,url)
def push_to_requested_queue(url):
"""Flush request queue and """
time_period = timedelta(seconds=30).seconds
requests_per_period = 15 # Twitter API
print("Processing: {}".format(url))
timenow = time()
requested_queue = redis.hgetall('requested_queue')
for rurl, timestamp in requested_queue.items():
time_diff = timenow - float(timestamp)
if time_diff > time_period:
# Flush old requests
redis.hdel('requested_queue',rurl)
requested_queue.pop(rurl, None)
count = redis.hlen('requested_queue')
if not url in requested_queue and count < requests_per_period:
redis.hset('requested_queue',url,timenow)
twitter(url)
return True
def twitter(url):
raise DeprecationWarning('This url has been removed by Twitter, '
'with no replacement available')
r = requests.get('http://urls.api.twitter.com/1/urls/count.json?url={}&callback=?'.format(url))
if r.status_code == 200:
count = r.json().get('count',0)
redis.hset(url,"twitter_count",count)
return count
def process_urls():
"""Process url with highest score."""
processing = redis.zrevrange("url_queue",0,0)
if push_to_requested_queue(processing[0]):
return redis.zrem("url_queue", processing[0])
def get_share_count(url):
return redis.hgetall(url)
if __name__ == '__main__':
for url in URLS:
sharecount(url)
sleep(10)
|
dangayle/sharecounts
|
sharecounts/sharecounts.py
|
Python
|
mit
| 2,316
|
import time
from app.validation.abstract_validator import AbstractValidator
from app.validation.validation_result import ValidationResult
class DateTypeCheck(AbstractValidator):
def validate(self, user_answer):
"""
Validate that the users answer is a valid date
:param user_answer: The answer the user provided for the response
:return: ValidationResult(): An object containing the result of the validation
"""
result = ValidationResult(False)
try:
time.strptime(user_answer, "%d/%m/%Y")
return ValidationResult(True)
except ValueError:
result.errors.append(AbstractValidator.INVALID_DATE)
except TypeError:
result.errors.append(AbstractValidator.INVALID_DATE)
return result
|
qateam123/eq
|
app/validation/date_type_check.py
|
Python
|
mit
| 811
|