repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
BeamNG/crashrpt
|
processing/scripts/basic_stats.py
|
Python
|
bsd-3-clause
| 1,335
| 0.020225
|
# This script calculates how many error reports are in each subdirectory
# and how many error reports are in total.
# Edit in_dir and out_file parameters as you need.
import os
in_dir = "D:/Projects/CrashRpt/valid_reports"
out_file = "stats.txt"
f = open(out_file, "w")
def get_txt_file_count(dirname):
count = 0
for root, dirs, files in os.walk(dirname, True):
for file in files:
if file[-4:] != ".txt":
continue;
count += 1
break;
return count
multimap = dict()
for root, dirs, files in os.walk(in_dir):
for dir in dirs:
dir_name = os.path.join
|
(root, dir)
report_count_in_dir = get_txt_file_count(dir_name)
if report_count_in_dir in multimap.keys():
multimap[report_count_in_dir].append(dir)
|
else:
multimap[report_count_in_dir] = [dir]
ordered_list = list(multimap.keys())
ordered_list.sort()
ordered_list.reverse()
total_count = 0
total_groups = 0
for count in ordered_list:
total_groups += len(multimap[count]);
total_count += count * len(multimap[count])
f.write("Total %d reports (100%%) in %d directories\n"%(total_count, total_groups))
n = 1
for key in ordered_list:
for dir in multimap[key]:
percent = key/total_count*100
f.write("%d. %d reports (%0.1f%%) in '%s'\n"%(n, key, percent, dir))
n = n+1
f.close()
|
ASzc/nagoya
|
cfg/koji-builder/setup.py
|
Python
|
lgpl-3.0
| 1,421
| 0.000704
|
#!/usr/bin/env python2
# References:
# https://fedoraproject.org/wiki/Koji/ServerHowTo
# https://github.com/sbadakhc/kojak/blob/master/scripts/install/install
import util.cfg as cfg
import util.pkg as pkg
import util.cred as cred
from util.log import log
#
# Setup
#
log.info("General update")
pkg.clean()
pkg.update()
log.info("Install EPEL")
pkg.install("https://dl.fedoraproject.org/pub/epel/6/x86_64/epel-release-6-8.noarch.rpm")
#
# Kojid (Koji Builder)
#
log.info("Install Koji Builder")
pkg.install("koji-builder")
koji_url = dict()
koji_url["web"] = "http://koji/koji"
koji_url["top"] = "http://koji/kojifiles"
koji_url["hub"] = "http://koji/kojihub"
log.info("Configure Koji Builder")
with cfg.mod_ini("/etc/kojid/kojid.conf") as i:
i.kojid.sleeptime = 2
i.kojid.maxjobs = 20
i.kojid.server = koji_url["hub"]
i.kojid.topurl = koji_url["top"]
# i.kojid.cert is set at runtime
i.kojid.ca = cred.ca_crt
i.kojid.serverca = cred.ca_crt
i.kojid.smtphost = "koji"
i.kojid.from_addr = "Koji Build System <buildsys@kojibuilder>"
#
# Koji CLI
#
log.info("Configure Koji CLI")
|
with cfg.mod_ini("/etc/koji.conf") as i:
i.koji.server = koji_url["hub"]
i.koji.weburl = koji_url["web"]
i.koji.topurl = koji_url["top"]
i.koji.topdir = "/mnt/koji"
i.koji.cert = cred.user["kojiadmin"].pem
i.koji.ca = c
|
red.ca_crt
i.koji.serverca = cred.ca_crt
pkg.clean()
|
isaachenrion/jets
|
src/architectures/nmp/adjacency/__init__.py
|
Python
|
bsd-3-clause
| 399
| 0.002506
|
from .simple import SIMPLE_AD
|
JACENCIES
from .combo import ComboAdjacency, LearnedComboAdjacency
def construct_adjacency(matrix, **kwargs):
if isinstance(matrix, (list,)):
if kwargs.get('learned_tradeoff', False):
return LearnedComboAdjacency(adj_list=matrix, **kwargs)
return ComboAd
|
jacency(adj_list=matrix, **kwargs)
return SIMPLE_ADJACENCIES[matrix](**kwargs)
|
AutorestCI/azure-sdk-for-python
|
azure-mgmt-rdbms/azure/mgmt/rdbms/postgresql/models/name_availability.py
|
Python
|
mit
| 1,227
| 0
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class NameAvailability(Model):
"""Represents a resource name availability.
:param message: Error Message.
:type message: str
:param name_available: Indicates whether the resource name is available.
:type name_available: bool
:param reason: Reason for name being unavailable.
:type reason: str
"""
_attribute_map = {
|
'message': {'key': 'message', 'type': 'str'},
'name_available': {'key': 'nameAvailable', 'type': 'bool'},
'reason': {'key': 'reason', 'type': 'str'},
}
def __init__(self, message=None, name_available=None, reaso
|
n=None):
self.message = message
self.name_available = name_available
self.reason = reason
|
jlazovskis/conditioning
|
examples/examples-curves-surfaces.py
|
Python
|
gpl-3.0
| 1,355
| 0.04428
|
# Name: Examples for using conditioning number finders for curves and surfaces
# Description: Contains some examples with descriptions of how to use the functions
# Created: 2016-08-18
# Author: Janis Lazovskis
# Navigate to the conditioning directory
# Run Python 2
# Example (curve)
execfile('curves.py')
x = variety()
x0,x1,x2 = sp.var('x0,x1,x2')
x.varlist = [x0,x1,x2]
x.func = x0*x0 + x1*x2 - x1*x0
x.points = [[1,1,0], [2,1,-2]]
cnumcurve(x)
# Non-example (curve)
# Use the above, but instead, put:
x.points = [[1,1,0], [2,1,-2], [0,0,0]]
# Then cnumcurve will return an empty list saying the last point isn't in P^2
cnumcurve(x)
# Non-example (curve)
# Use the above, but instead, put:
x.points = [[1,1,0], [2,1,-2], [1,1,1]]
# Then cnumcurve will return an empty list saying the last point isn't on the curve
cnumcurve(x)
# Example surface
execfile('surfaces.py')
x = variety()
x0,x1,x2,x3 = sp.var('x0,x1,x2,x3')
x.varlist = [x0,x1,x2,x3]
x.func
|
= x0*x1 - x2*x3
x.points = [[1,1
|
,1,1], [0,1,1,0], [0,1,0,1], [2,1,1,2]]
cnumsurface(x)
# Non-example (surface)
execfile('surfaces.py')
x = variety()
x0,x1,x2,x3 = sp.var('x0,x1,x2,x3')
x.varlist = [x0,x1,x2,x3]
x.func = x0*x0*x1 - x2*x3*x3 + x0*x1*x2 +x2*x2*x2
x.points = [[0,1,1,1], [1,0,1,1], [1,0,2,2], [1,1,-1,1]]
# This will raise an error because the curve is not smooth
cnumsurface(x)
|
FR4NK-W/osourced-scion
|
python/test/lib/path_store_test.py
|
Python
|
apache-2.0
| 22,153
| 0.000135
|
# Copyright 2015 ETH Zurich
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:mod:`lib_path_store_test` --- lib.path_store unit tests
==========================================================================
"""
# Stdlib
import math
from unittest.mock import patch, MagicMock
# External packages
import nose
import nose.tools as ntools
# SCION
from lib.errors import SCIONPathPolicyViolated
from lib.packet.pcb import PathSegment
from lib.path_store import (
PathPolicy,
PathStore,
PathStoreRecord
)
from test.testcommon import create_mock, create_mock_full
class TestPathPolicyCheckFilters(object):
"""
Unit tests for lib.path_store.PathPolicy.check_filters
"""
def _setup(self, unwanted=None, reasons=None, remote_ia=None):
inst = PathPolicy()
inst._check_unwanted_ases = create_mock()
inst._check_unwanted_ases.return_value = unwanted
inst._check_property_ranges = create_mock()
inst._check_property_ranges.return_value = reasons
inst._check_remote_ifid = create_mock()
inst._check_remote_ifid.return_value = remote_ia
pcb = create_mock(["short_desc"], class_=PathSegment)
return inst, pcb
def test_basic(self):
inst, pcb = self._setup()
# Call
inst.check_filters(pcb)
def test_unwanted_ases(self):
inst, pcb = self._setup("unwanted AS")
# Call
ntools.assert_raises(SCIONPathPolicyViolated, inst.check_filters, pcb)
def test_property_ranges(self):
inst, pcb = self._setup(reasons="reasons")
ntools.assert_raises(SCIONPathPolicyViolated, inst.check_filters, pcb)
class TestPathPolicyCheckPropertyRanges(object):
"""
Unit tests for lib.path_store.PathPolicy._check_property_ranges
"""
def _setup(self, max_bw=20):
inst = PathPolicy()
inst.property_ranges = {
'PeerLinks': [0, 1], 'HopsLength': [0, 1], 'DelayTime': [0, 1],
'GuaranteedBandwidth': [0, max_bw],
'AvailableBandwidth': [0, max_bw], 'TotalBandwidth': [0, max_bw]
}
pcb = create_mock(["get_n_peer_links", "get_n_hops", "get_timestamp"])
return inst, pcb
@patch("lib.path_store.SCIONTime.get_time", new_callable=create_mock)
def test_success(self, get_time):
inst, pcb = self._setup()
pcb.get_n_peer_links.return_value = 0.5
pcb.get_n_hops.return_value = 0.5
pcb.get_timestamp.return_value = 0.5
# Call
ntools.eq_(inst._check_property_ranges(pcb), [])
@patch("lib.path_store.SCIONTime.get_time", new_callable=create_mock)
def test_failure(self, get_time):
inst, pcb = self._setup(max_bw=9)
pcb.get_n_peer_links.return_value = 2
pcb.get_n_hops.return_value = -1
pcb.get_timestamp.return_value = -0.1
# Call
ntools.eq_(len(inst._check_property_ranges(pcb)), 6)
@patch("lib.path_store.SCIONTime.get_time", new_callable=create_mock)
def test_no_checks(self, get_time):
inst, pcb = self._setup(max_bw=9)
for key in inst.property_ranges:
inst.property_ranges[key] = []
pcb.get_n_peer_links.return_value = 2
pcb.get_n_hops.return_value = -1
pcb.get_timestamp.return_value = -0.1
# Call
ntools.eq_(inst._check_property_ranges(pcb), [])
class TestPathPolicyParseDict(object):
"""
Unit tests for lib.path_store.PathPolicy.parse_dict
"""
def test_basic(self):
dict_ = {
|
}
dict_['BestSetSize'] = "best_set_size"
dict_['CandidatesSetSize'] = "candidates_set_size"
dict_['HistoryLimit'] = "history_limit"
dict_['UpdateAfterNumber'] = "update_after_number"
dict_['UpdateAfterTime'] = "update_after_time"
dict_['UnwantedASes'] = "1-11,2-12"
dict_['PropertyRanges'] = {'key1': "1-11", 'key2': "2-12"}
dict_
|
['PropertyWeights'] = "property_weights"
pth_pol2 = PathPolicy()
pth_pol2.parse_dict(dict_)
ntools.eq_(pth_pol2.best_set_size, "best_set_size")
ntools.eq_(pth_pol2.candidates_set_size, "candidates_set_size")
ntools.eq_(pth_pol2.history_limit, "history_limit")
ntools.eq_(pth_pol2.update_after_number, "update_after_number")
ntools.eq_(pth_pol2.update_after_time, "update_after_time")
ntools.eq_(pth_pol2.property_ranges, {'key1': (1, 11), 'key2': (2, 12)})
ntools.eq_(pth_pol2.property_weights, "property_weights")
class TestPathStoreRecordInit(object):
"""
Unit tests for lib.path_store.PathStoreRecord.__init__
"""
@patch("lib.path_store.PathStoreRecord.update", autospec=True)
@patch("lib.path_store.SCIONTime.get_time", new_callable=create_mock)
def test(self, get_time, update):
pcb = create_mock(['get_hops_hash', 'get_n_hops', 'get_n_peer_links'],
class_=PathSegment)
get_time.return_value = PathStoreRecord.DEFAULT_OFFSET + 1
# Call
inst = PathStoreRecord(pcb)
# Tests
ntools.eq_(inst.id, pcb.get_hops_hash.return_value)
ntools.eq_(inst.peer_links, pcb.get_n_peer_links.return_value)
ntools.eq_(inst.hops_length, pcb.get_n_hops.return_value)
ntools.eq_(inst.fidelity, 0)
ntools.eq_(inst.disjointness, 0)
ntools.eq_(inst.last_sent_time, 1)
ntools.eq_(inst.guaranteed_bandwidth, 0)
ntools.eq_(inst.available_bandwidth, 0)
ntools.eq_(inst.total_bandwidth, 0)
update.assert_called_once_with(inst, pcb)
class TestPathStoreRecordUpdate(object):
"""
Unit tests for lib.path_store.PathStoreRecord.update
"""
@patch("lib.path_store.SCIONTime.get_time", new_callable=create_mock)
@patch("lib.path_store.PathStoreRecord.__init__", autospec=True,
return_value=None)
def test(self, init, get_time):
inst = PathStoreRecord("pcb")
get_time.return_value = 100
pcb = create_mock(["copy", "get_hops_hash", "get_timestamp",
"get_expiration_time"])
inst.id = pcb.get_hops_hash.return_value
pcb.get_timestamp.return_value = 95
# Call
inst.update(pcb)
# Tests
pcb.copy.assert_called_once_with()
ntools.eq_(inst.delay_time, 5)
ntools.eq_(inst.last_seen_time, 100)
ntools.eq_(inst.expiration_time, pcb.get_expiration_time.return_value)
class TestPathStoreRecordUpdateFidelity(object):
"""
Unit tests for lib.path_store.PathStoreRecord.update_fidelity
"""
@patch("lib.path_store.SCIONTime.get_time", new_callable=create_mock)
@patch("lib.path_store.PathStoreRecord.__init__", autospec=True,
return_value=None)
def test_basic(self, init, time_):
path_policy = PathPolicy()
path_policy.property_weights['PeerLinks'] = 10
path_policy.property_weights['HopsLength'] = 1
path_policy.property_weights['Disjointness'] = 2
path_policy.property_weights['LastSentTime'] = 3
path_policy.property_weights['LastSeenTime'] = 4
path_policy.property_weights['DelayTime'] = 5
path_policy.property_weights['ExpirationTime'] = 6
path_policy.property_weights['GuaranteedBandwidth'] = 7
path_policy.property_weights['AvailableBandwidth'] = 8
path_policy.property_weights['TotalBandwidth'] = 9
pth_str_rec = PathStoreRecord("pcb")
pth_str_rec.peer_links = 10 ** 5
pth_str_rec.hops_length = (1 / (10 ** 4))
pth_str_rec.disjointness = 10 ** 3
pth_str_rec.last_sent_time = -99
pth_str_rec.last_seen_time = 10
pth_str_rec.delay_time
|
alrusdi/leadwerks-blender-exporter
|
io_scene_leadwerks/leadwerks/constants.py
|
Python
|
gpl-3.0
| 700
| 0.001429
|
# -*- coding: utf-8 -*-
MDL_BYTE = 1
MDL_UNSIGNED_BYTE = 2
MDL_SHORT = 3
MDL_UNSIGNED_SHORT = 4
MDL_HALF = 5
MDL_INT = 6
MDL_UNSIGNED_INT = 7
MDL_FLOAT = 8
MDL_DOUBLE = 9
MDL_FILE = 1
MDL_NODE = 2
MDL_MESH = 3
MDL_BONE = 4
MDL_VERTEXARRAY = 5
MDL_INDICEARRAY = 6
MDL_PROPERTIES = 7
MDL_ANIMATIONKEYS = 8
MDL_AABB = 9
MDL_SURFACE = 10
MDL_NEWTONCOLLISIONTREE = 11
MDL_POSITION = 1
MDL_NORMAL = 2
MDL_TEXTURE_COORD = 3
MDL_COLOR = 4
MDL_T
|
ANGENT = 5
MDL_BINORMAL = 6
MDL_BONEINDICE = 7
MDL_BONEWEIGHT = 8
MDL_POINTS = 1
MDL_LINE_STRIP = 2
MDL_LINE_LOOP = 3
MDL_LINES = 4
MDL_TRIANGLE_STRIP = 5
MDL_TRIANGLE_FAN = 6
MDL_TRIANGLES = 7
MDL_QUAD_STRIP = 8
M
|
DL_QUADS = 9
MDL_POLYGON = 10
MDL_VERSION = 2
|
genome/dindel-tgi
|
python/utils/Fasta.py
|
Python
|
gpl-3.0
| 2,034
| 0.013766
|
import sys, os
class FastaTarget:
def __init__(self):
self.tid = ''
self.len = ''
self.offset = -1
self.blen = -1
self.llen = -1
class FastaIndex:
def __init__(self, fname=''):
self.f = open(fname, 'r')
self.ft = {}
for line in self.f.readlines():
dat = line.split()
if len(dat)==5:
tid = dat[0]
self.ft[tid] = FastaTarget()
self.ft[tid].tid = tid
self.ft[tid].len = int(dat[1])
self.ft[tid].offset = int(dat[2])
self.ft[tid].blen = int(dat[3])
self.ft[tid].llen = int(dat[4])
# print 'Fasta: ', tid, int(dat[1]), int(dat[2]), int(dat[3]), int(dat[4])
self.f.close()
class Fasta:
def __init__(self, fname = '/nfs/users/nfs_c/caa/s103/ref/human_b36_male.fa'):
self.fa = open(fname,'r')
self.fai = FastaIndex(fname+'.fai')
def get(self, tid, pos1based, len):
pos = pos1based - 1
try:
idx = self.fai.ft[tid]
except KeyError:
print 'KeyError: ', tid
raise NameError('KeyError')
fpos = idx.offset+ ( int(pos)/idx.blen)*idx.llen + (int(pos)%idx.blen)
self.fa.seek(fpos,0)
numread=0
seq = []
while numread<len:
char = self.fa.read(1)
if char!='\n':
seq.append(char)
numread +=1
return seq
def getChromosomes(faFile = ''):
faiFile = "%s.fai" % (faFile)
if not os.path.exists(faiFile):
raise NameError("Cannot find fai file for %s" % faFile)
faidx = FastaIndex(faiFile)
fachr = faidx.ft.keys()
chromosomes = []
autosomal =
|
["%d" % c for c in range(1,23)]
autosomal.extend(['X','Y'])
for chrom in autosomal:
if chrom in fachr:
chromosomes.append(chrom)
chromosomes.extend( l
|
ist (set(fachr) - set(autosomal)))
return chromosomes
|
stifoon/navitia
|
source/sindri/sindri/saver/edsaver.py
|
Python
|
agpl-3.0
| 4,169
| 0.000481
|
# encoding: utf-8
# Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# IRC #navitia on freenode
# https://groups.google.com/d/forum/navitia
# www.navitia.io
import logging
import datetime
from sqlalchemy import Table, MetaData, select, create_engine
import sqlalchemy
from sindri.saver.message import persist_message
from sindri.saver.at_perturbation import persist_at_perturbation
from sindri.saver.utils import FunctionalError, TechnicalError
class EdRealtimeSaver(object):
"""
Classe responsable de l'enregistrement en base de donnée des événements
temps réel.
"""
def __init__(self, config):
self.__engine = create_engine(config.ed_connection_string)
self.meta = MetaData(self.__engine)
self.message_table = Table('message', self.meta, autoload=True,
schema='realtime')
self.localized_message_table = Table('localized_message', self.meta,
autoload=True, schema='realtime')
self.at_perturbation_table = Table('at_perturbation', self.meta,
autoload=True, schema='realtime')
def persist_message(self, message):
self.__persist(message, persist_message)
def persist_at_perturbation(self, perturbation):
self.__persist(perturbation, persist_at_perturbation)
def __persist(self, item, callback):
"""
fonction englobant toute la gestion d'erreur lié à la base de donnée
et la gestion de la transaction associé
:param item l'objet à enregistré
:param callback fonction charger de l'enregistrement de l'objet
à proprement parler dont la signature est (meta, conn, item)
meta etant un objet MetaData
conn la connection à la base de donnée
item etant l'objet à enregistrer
"""
logger = logging.getLogger('sindri')
conn = None
try:
conn = self.__engine.connect()
transaction = conn.begin()
except sqlalchemy.exc.SQLAlchemyError as e:
logger.exception('error durring transaction')
raise TechnicalError('problem with databases: ' + str(e))
try:
callback(self.meta, conn, item)
transaction.commit()
except (sqlalchemy.exc.IntegrityError, sqlalchemy.exc.DataError) as e:
logger.exception('error durring transaction')
transaction.rollback()
raise FunctionalError(str(e))
except sqlalchemy.exc.SQL
|
AlchemyError as e:
logger.exception('error durring transaction')
if not hasattr(e, 'connection_invalidated') \
or not e.connection_invalidated:
transaction.rollback()
raise TechnicalError('problem with databases: ' + str(e))
except:
logger.exception('error durring transaction')
try:
transaction.rollback()
|
except:
pass
raise
finally:
if conn:
conn.close()
|
ksh/gpirecertification
|
tools/etl/remote_edited.py
|
Python
|
apache-2.0
| 6,082
| 0.002466
|
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Remote environment manager for extract-transform-load utilities."""
__author__ = [
'johncox@google.com',
]
import os
import sys
import appengine_config
# Override SERVER_SOFTWARE before doing any App Engine imports so import-time
# detection of dev mode, done against SERVER_SOFTWARE of 'Development*', fails.
# Once imports are done, this environment variable can be reset as needed (for
# tests, etc.). pylint: disable-msg=g-import-not-at-top
SERVER_SOFTWARE = 'Production Emulation'
if appengine_config.PRODUCTION_MODE:
sys.exit('Running etl/tools/remote.py in production is not supported.')
os.environ['SERVER_SOFTWARE'] = SERVER_SOFTWARE
from google.appengine.ext.remote_api import remote_api_stub
from google.appengine.tools import appengine_rpc
from google.appengine.tools import remote_api_shell
# String. Used to detect appspot.com servers.
_APPSPOT_SERVER_SUFFIX = 'appspot.com'
# String. Password used when a password is not necessary.
_BOGUS_PASSWORD = '9p1tra1n1n9'
# String. Infix for google.com application ids.
_GOOGLE_APPLICATION_INFIX = 'google.com'
# String. Prefix App Engine uses application ids in the dev appserver.
_LOCAL_APPLICATION_ID_PREFIX = 'dev~'
# String. Prefix used to detect if a server is running locally.
_LOCAL_SERVER_PREFIX = 'localhost'
# String. Prefix App Engine uses for application ids in production.
_REMOTE_APPLICATION_ID_PREFIX = 's~'
# String. Email address used unless os.environ['USER_EMAIL'] is set in tests.
_TEST_EMAIL = 'gpionlinetraining'
# String. os.ENVIRON[
|
'SERVER_SOFTWARE'] value tha
|
t indicates we're running under
# the test environment.
TEST_SERVER_SOFTWARE = 'Test'
class Error(Exception):
"""Base error type."""
class EnvironmentAuthenticationError(Error):
"""Raised when establishing an environment fails due to bad credentials."""
class Environment(object):
"""Sets up the execution environment to use remote_api for RPCs.
As with any use of remote_api, this has three important caveats:
1. By going through the Remote API rather than your application's handlers,
you are bypassing any business logic in those handlers. It is easy in
this way to accidentally corrupt the system receiving your RPCs.
2. There is no guarantee that the code running on the system receiving your
RPCs is the same version as the code running locally. It is easy to have
version skew that corrupts the destination system.
3. Execution is markedly slower than running in production.
"""
def __init__(
self, application_id, server, path='/_ah/remote_api'):
"""Constructs a new Environment.
Args:
application_id: string. The application id of the environment
(myapp).
server: string. The full name of the server to connect to
(myurl.appspot.com).
path: string. The URL of your app's remote api entry point.
"""
#self._application_id = application_id
#self._path = path
#self._server = server
self._application_id = "gpirecertification"
self._path = "http://gpirecertification.appspot.com/gpirecert"
self._server = "gpirecertification.appspot.com/gpirecert"
@staticmethod
def _dev_appserver_auth_func():
"""Auth function to run for dev_appserver (bogus password)."""
# return raw_input('Email: '), _BOGUS_PASSWORD
return "gpionlinetraining", "9p1tra1n1n9"
@staticmethod
def _test_auth_func():
"""Auth function to run in tests (bogus username and password)."""
# return os.environ.get('USER_EMAIL', _TEST_EMAIL), _BOGUS_PASSWORD
return "gpionlinetraining", "9p1tra1n1n9"
def _get_auth_func(self):
"""Returns authentication function for the remote API."""
if os.environ.get('SERVER_SOFTWARE', '').startswith(
TEST_SERVER_SOFTWARE):
return self._test_auth_func
elif self._is_localhost():
return self._dev_appserver_auth_func
else:
return remote_api_shell.auth_func
def _get_internal_application_id(self):
"""Returns string containing App Engine's internal id representation."""
prefix = _REMOTE_APPLICATION_ID_PREFIX
if self._is_localhost():
prefix = _LOCAL_APPLICATION_ID_PREFIX
elif not self._is_appspot():
prefix = '%s%s:' % (prefix, _GOOGLE_APPLICATION_INFIX)
return prefix + self._application_id
def _get_secure(self):
"""Returns boolean indicating whether or not to use https."""
return not self._is_localhost()
def _is_appspot(self):
"""Returns True iff server is appspot.com."""
return self._server.endswith(_APPSPOT_SERVER_SUFFIX)
def _is_localhost(self):
"""Returns True if environment is dev_appserver and False otherwise."""
return self._server.startswith(_LOCAL_SERVER_PREFIX)
def establish(self):
"""Establishes the environment for RPC execution."""
try:
remote_api_stub.ConfigureRemoteApi(
self._get_internal_application_id(), self._path,
self._get_auth_func(), servername=self._server,
save_cookies=True, secure=self._get_secure(),
rpc_server_factory=appengine_rpc.HttpRpcServer)
remote_api_stub.MaybeInvokeAuthentication()
except AttributeError:
raise EnvironmentAuthenticationError
|
carlochess/proyectoComplejidad
|
Organizar/segundaParte.py
|
Python
|
apache-2.0
| 4,712
| 0.004244
|
from __future__ import print_function
from lector import *
from lpSolv import *
import sys
import math
def generarRestriccionesTipo(matriz, tipo):
numeroRestricciones = (n* (n-1)) / 2
if tipo == 0 or tipo == 1:
for j in range(0, n):
restriccionCanecaI = []
for k in range(0, i * n):
if k >= i * j and k < i * j + i:
objPos = k % i
if tipo == 0:
restriccionCanecaI.append(items[objPos].getPeso())
elif tipo == 1:
restriccionCanecaI.append(items[objPos].getVolumen())
else:
restriccionCanecaI.append(0)
if tipo == 0:
for k in range(0, numeroRestricciones):
restriccionCanecaI.append(0)
restriccionCanecaI.append("<=")
restriccionCanecaI.append(caneca.getPeso())
elif tipo == 1:
for k in range(0, numeroRestricciones):
restriccionCanecaI.append(0)
restriccionCanecaI.append("<=")
restriccionCanecaI.append(caneca.getVolumen())
matriz.append(restriccionCanecaI)
elif tipo == 2:
for j in range(0, i):
restriccionCanecaI = []
for k in range(0, i * n + numeroRestricciones):
if (k % i) -j == 0 and k < i * n:
restriccionCanecaI.append(1)
else:
restriccionCanecaI.append(0)
restriccionCanecaI.append("=")
restriccionCanecaI.append(1)
matriz.append(restriccionCanecaI)
elif tipo == 3:
r = 0
for u in range(0, n):
for v in range(u + 1, n):
mult = 1
for numEq in range(0, 2):
restriccionCanecaI = []
for k in range(0, i * n):
if k >= u * i and k < u * i + i:
restriccionCanecaI.append(mult * items[k % i].getPeso())
elif k >= v * i and k < v * i + i:
restriccionCanecaI.append(-mult * items[k % i].getPeso())
else:
restriccionCanecaI.append(0)
for k in range(0, numeroRestricciones):
if k == r:
restriccionCanecaI.append(-1)
else:
restriccionCanecaI.append(0)
restriccionCanecaI.append("<=")
restriccionCanecaI.append(0)
matriz.append(restriccionCanecaI)
mult = mult * -1
r += 1
def generarFuncObj(matriz):
restriccionCanecaI = []
numeroRestricciones = (n* (n-1)) / 2
for k in range(0, i * n):
restriccionCanecaI.append(0)
for k in range(0, numeroRestricciones):
restriccionCanecaI.append(1)
matriz.append(restriccionCanecaI)
def generarRestricciones():
generarRestriccionesTipo(primeraRestriccion, 1)
generarRestriccionesTipo(segundaRestriccion, 0)
generarRestriccionesTipo(terceraRestriccion, 2)
generarRestriccionesTipo(cuartaRestriccion, 3)
def generarFuncionObjetivo():
generarFuncObj(funcObj)
def printMatrix(testMatrix):
for i in range(len(testMatrix)):
for j in range(len(testMatrix[i])):
print(testMatrix[i][j], end=" ")
print(" : ",len(testMatrix[i]))
print()
def hallarN():
sumaPesos = 0
sumaVolumenes = 0
for i in range(len(items)):
sumaPesos
|
+= it
|
ems[i].getPeso()
for i in range(len(items)):
sumaVolumenes += items[i].getVolumen()
pesos = math.ceil(sumaPesos / float(caneca.getPeso()))
volumenes = math.ceil(sumaVolumenes / float(caneca.getVolumen()))
if(pesos >= volumenes):
return pesos
else:
return volumenes
param = (sys.argv[1] if len(sys.argv) > 1 else -1)
o = leerArchivo(param)
caneca = o[0]
items = o[1]
numeroProblema = o[2] + "_2.lp"
n = int(hallarN())
i = len(items)
funcObj = []
primeraRestriccion = []
segundaRestriccion = []
terceraRestriccion = []
cuartaRestriccion = []
matriz = []
numeroRestricciones=(n* (n-1)) / 2
generarRestricciones()
generarFuncionObjetivo()
for e in funcObj:
matriz.append(e)
for e in primeraRestriccion:
matriz.append(e)
for e in segundaRestriccion:
matriz.append(e)
for e in terceraRestriccion:
matriz.append(e)
for e in cuartaRestriccion:
matriz.append(e)
print("Items ", items)
print("Caneca ", caneca)
printMatrix(matriz)
resolverParte2(matriz,n,i,numeroProblema,numeroRestricciones)
|
toway/towaymeetups
|
mba/resources.py
|
Python
|
gpl-3.0
| 34,486
| 0.008427
|
# coding: utf-8
import os
from UserDict import DictMixin
from fnmatch import fnmatch
from datetime import datetime
from datetime import date
import pytz
from pyramid.threadlocal import get_current_registry
from pyramid.traversal import resource_path
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import DateTime, Date
from sqlalchemy import ForeignKey
from sqlalchemy import Integer, Float
from sqlalchemy import LargeBinary
from sqlalchemy import String
from sqlalchemy import Unicode
from sqlalchemy import UnicodeText
from sqlalchemy import UniqueConstraint
from sqlalchemy import Table, select
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.ext.orderinglist import ordering_list
from sqlalchemy.orm import backref
from sqlalchemy.orm import deferred
from sqlalchemy.orm import object_mapper
from sqlalchemy.orm import relation, relationship
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.sql import and_
from sqlalchemy.sql import select
from sqlalchemy.util import classproperty
from transaction import commit
from zope.deprecation.deprecation import deprecated
from zope.interface import implements
import kotti
from kotti import Base
from kotti import DBSession
from kotti import get_settings
from kotti import metadata
from kotti.interfaces import INode
from kotti.interfaces import IContent
from kotti.interfaces import IDocument
from kotti.interfaces import IFile
from kotti.interfaces import IImage
from kotti.interfaces import IDefaultWorkflow
from kotti.migrate import stamp_heads
from kotti.security import PersistentACLMixin
from kotti.security import has_permission
from kotti.security import view_permitted, SITE_ACL
from kotti.security import Principals, get_principals
from kotti.sqla import ACLType
from kotti.sqla import JsonType
from kotti.sqla import MutationList
from kotti.sqla import NestedMutationDict
from kotti.util import ViewLink
#from kotti.util import _
from kotti.util import camel_case_to_name
from kotti.util import get_paste_items
from kotti.util import camel_case_to_name
from kotti.resources import Document
from mba import _
TZ_HK = pytz.timezone('Asia/Hong_Kong')
friend = Table(
'friends', Base.metadata,
Column('user_a_id', Integer, ForeignKey('mba_users.id'), primary_key=True),
Column('user_b_id', Integer, ForeignKey('mba_users.id'), primary_key=True),
Column('status', Integer, default=0) # 0: No friend yet, 1: friend already
)
# Meetup Invitation
class MeetupInvitation(Base):
id = Column('id', Integer, nullable=False, primary_key=True, autoincrement=True)
inviter_id = Column('inviter_id',Integer, ForeignKey('mba_users.id')) #邀请者
inviter = relationship("MbaUser", foreign_keys="[MeetupInvitation.inviter_id]")
invitee_id = Column('invitee_id', Integer, ForeignKey('mba_users.id') ) #被邀请者
invitee = relationship("MbaUser", foreign_keys="[MeetupInvitation.invitee_id]")
meetup_id = Column(Integer, ForeignKey('acts.id'))
meetup = relationship('Act')
status = Column(Integer, default=0) # 0 : unread, 1: ignore 2:accept, 3: reject 4: deleted
class UserInterest(Base):
interest_id = Column(Integer, ForeignKey('interests.id'), primary_key=True)
user_id = Column(Integer, ForeignKey('mba_users.id'), primary_key=True)
# interest = relationship('Interest', backref='interest_items')
# name = association_proxy('interest', 'name')
user = relationship("MbaUser",
backref=backref("user_interests",
cascade="all, delete-orphan")
)
interest = relationship("Interest")
interest_name = association_proxy("interest", "name")
@classmethod
def _interest_find_or_create(cls, name):
with DBSession.no_autoflush:
interest = DBSession.query(Interest).filter_by(name=name).first()
if interest is None:
interest = Interest(name=name)
return cls(interest=interest)
class UserSkill(Base):
interest_id = Column(Integer, ForeignKey('interests.id'), primary_key=True)
user_id = Column(Integer, ForeignKey('mba_users.id'), primary_key=True)
user = relationship("MbaUser",
backref=backref("user_skills",
cascade="all, delete-orphan")
)
skill = relationship("Interest")
skill_name = association_proxy("skill", "name")
@classmethod
def _interest_find_or_create(cls, name):
with DBSession.no_autoflush:
interest = DBSession.query(Interest).filter_by(name=name).first()
if interest is None:
interest = Interest(name=name)
return cls(skill=interest)
class Interest(Base):
__table_args__ = (
UniqueConstraint('name'),
)
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String(250), nullable=False)
description = Column(UnicodeText())
def __init__(self, name, **kw):
self.name = name
Base.__init__(self,**kw)
# def __repr__(self):
# return (self.name)
@property
def users(self):
return [rel.user for rel in self.interest_items]
#TODO for deleting
class PositionCollect(Base):
position_id = Column(Integer, ForeignKey('positions.id', ondelete='cascade'), primary_key=True)
user_id = Column(Integer, ForeignKey('mba_users.id', ondelete='cascade'), primary_key=True)
create_date = Column(DateTime(), default=datetime.now(tz=None))
position = relationship('Position', backref='position_items')
@classmethod
def _create(cls, p):
if p is None:
raise Exception('position can not be None')
return cls(position=p)
class Visit(Base):
user_id1 = Column('user_id1', Integer, ForeignKey('mba_users.id'), primary_key=True)
user_id2 = Column('user_id2', Integer, ForeignKey('mba_users.id'), primary_key=True)
visit_date = Column(DateTime(), default=datetime.now(tz=None))
# 1 <--> 1
user = relationship("MbaUser", foreign_keys="[Visit.user_id2]")
class City(Base):
__tablename__ = 'city'
__table_args__ = (
UniqueConstraint('name'),
)
id = Column(Integer, primary_key=True)
name = Column(Unicode(50), nullable=False)
acts = relationship("Act", backref='city', order_by='desc(Act.creation_date)')
usercity = relationship("MbaUser", backref='city', order_by='desc(MbaUser.creation_date)')
@classmethod
def _find_or_create(cls, name):
with DBSession.no_autoflush:
obj = DBSession.query(City).filter_by(name=name).first()
if obj is N
|
one:
obj = City(name=name)
# print 'cannt find city create one'
#return cls(city=obj)
return obj
class UserBetween(Base):
city_id
|
= Column(Integer, ForeignKey('city.id'), primary_key=True)
user_id = Column(Integer, ForeignKey('mba_users.id'), primary_key=True)
user = relationship("MbaUser",
backref=backref("user_between",
cascade="all, delete-orphan")
)
city = relationship("City")
city_name = association_proxy("city", "name")
@classmethod
def _city_find_or_create(cls, name):
city = City._find_or_create(name=name)
return cls(city=city)
class Message(Base):
id = Column(Integer, primary_key=True, autoincrement=True)
sender_id = Column(Integer, ForeignKey('mba_users.id'))
sender = relationship("MbaUser", foreign_keys="[Message.sender_id]")
reciever_id = Column(Integer, ForeignKey('mba_users.id'))
reciever = relationship("MbaUser", foreign_keys="[Message.reciever_id]")
# message type,
# 0: system message
# 1: admin message
# 2: friend private message
# 10: somebody ask to be friend
# 11: friends invite me some person
# 12: friends invite me some meetup
type = Column(Integer)
content = Column(String(500))
status = Column(Integer,default=0) # 0: unread, 1:r
|
Richert/BrainNetworks
|
CMC/config/AdEx_net.py
|
Python
|
apache-2.0
| 2,934
| 0.008521
|
from pyrates.utility import grid_search_annarchy, plot_timeseries
from ANNarchy import Projection, Population, TimedArray, setup, Network, Monitor, Uniform, Normal, \
EIF_cond_exp_isfa_ista
from pyrates.utility import pyrates_from_annarchy
import matplotlib.pyplot as plt
import numpy as np
# parameters
############
T = 1000.0 # simulation time (ms)
dt = 1e-2 # integration step-size (ms)
Ne = 100 # number of excitatory neurons
Ni = 100 # number of inhibitory neurons
c_min = 0.1
c_max = 1.0
# network definition
####################
setup(method='explicit', dt=dt)
# Neuron definition
neuron = EIF_cond_exp_isfa_ista()
neuron.equations = """
I = g_exc * (e_rev_E - v) + g_inh * (e_rev_I - v) + i_offset * Normal(0.2, 1.0)
tau_m * dv/dt = (v_rest - v + delta_T * exp((v-v_thresh)/delta_T)) + tau_m/cm*(I - w) : init=-70.6
tau_w * dw/dt = a * (v - v_rest) / 1000.0 - w
tau_syn_E * dg_exc/dt = - g_exc : exponential
tau_syn_I * dg_inh/dt = - g_inh : exponential
"""
# population setup
pop = Population(Ne + Ni, neuron=neuron)
E = pop[:Ne]
I = pop[Ne:]
# projection setup
C_ei = Projection(pre=E,
|
post=I, target='exc', name='EI
|
')
C_ie = Projection(pre=I, post=E, target='inh', name='IE')
#C_ee = Projection(E, E, 'exc', name='EE')
#C_ii = Projection(I, I, 'inh', name='II')
C_ei.connect_fixed_probability(0.1, weights=Uniform(c_min, c_max))
C_ie.connect_fixed_probability(0.1, weights=Uniform(c_min, c_max))
#C_ee.connect_fixed_probability(0.3, weights=Uniform(c_min, c_max))
#C_ii.connect_fixed_probability(0.3, weights=Uniform(c_min, c_max))
# input
#steps = int(T/dt)
#I_e_tmp = 5.0 + np.random.randn(steps, Ne) * 50.0 * np.sqrt(dt) # input current for excitatory neurons
#I_i_tmp = 4.0 + np.random.randn(steps, Ni) * 44.0 * np.sqrt(dt) # input current for inhibitory neurons
#I_e = TimedArray(rates=I_e_tmp, name="E_inp")
#I_i = TimedArray(rates=I_i_tmp, name="I_inp")
#inp_e = Projection(pre=I_e, post=E, target='exc')
#inp_i = Projection(pre=I_i, post=I, target='exc')
#inp_e.connect_one_to_one(1.0)
#inp_i.connect_one_to_one(1.0)
E.i_offset = 5.0
I.i_offset = 2.0
# monitoring
obs_e = Monitor(E, variables=['spike', 'v'], start=True)
obs_i = Monitor(I, variables=['spike', 'v'], start=True)
# simulation
############
# annarchy simulation
net = Network(everything=True)
net.compile()
net.simulate(duration=T)
# conversion to pyrates
rate_e = pyrates_from_annarchy(monitors=[net.get(obs_e)], vars=['spike'], pop_average=True)
rate_i = pyrates_from_annarchy(monitors=[net.get(obs_i)], vars=['spike'], pop_average=True)
v_e = pyrates_from_annarchy(monitors=[net.get(obs_e)], vars=['v'], pop_average=False)
v_i = pyrates_from_annarchy(monitors=[net.get(obs_i)], vars=['v'], pop_average=False)
# visualization
###############
plt.plot(rate_e)
plt.plot(rate_i)
plt.figure()
plt.plot(v_e)
plt.figure()
plt.plot(v_i)
plt.show()
|
candywater/ProCon
|
aoj/volumn5/0532/0532aoj.py
|
Python
|
mit
| 390
| 0.097436
|
#python3
'''
'''
imp
|
ort sys
while 1:
try:
h1,m1,s1,h2,m2,s2=map(int,input().split())
except EOFError: break
'''<- this is /* in c/c++
m1=input()
s1=input()
h2=input()
m2=input()
s2=input()
'''
s1=s2+60-s1
s2=s1%60
s1=s1/60
m1=m2+60-1-m1+s1
m2=m1%60
m1=m1/60
h2=h2-h1-1+m1
#print("d d d"%(h,m,s)) <- wrong format
print('%d'%h2,'%
|
d'%m2,'%d'%s2)
|
googleapis/python-aiplatform
|
samples/generated_samples/aiplatform_generated_aiplatform_v1beta1_vizier_service_suggest_trials_sync.py
|
Python
|
apache-2.0
| 1,664
| 0.001803
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for SuggestTrials
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-aiplatform
# [START aiplatform_generated_aiplatform_v1beta1_VizierService_SuggestTrials_sync]
from google.cloud import aiplatform_v1beta1
def sample_suggest_trials():
# Create a client
client = aiplatform_v1beta
|
1.VizierServiceClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.SuggestTrialsRequest(
|
parent="parent_value",
suggestion_count=1744,
client_id="client_id_value",
)
# Make the request
operation = client.suggest_trials(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
# [END aiplatform_generated_aiplatform_v1beta1_VizierService_SuggestTrials_sync]
|
sonnykr/blog
|
SignUps/views.py
|
Python
|
apache-2.0
| 389
| 0.010283
|
from django.shortcuts import render, render_to_response, RequestContext
from .forms import SignUpForm
|
# Create your views here.
def home(request):
form = SignUpForm(request.POST or None)
if form.is_valid():
save_it = form.save(commit=False)
save_it.save()
return render_to_response("signup.html", locals(), context_instance=Reque
|
stContext(request))
|
ad-m/foundation-manager
|
foundation/cases/migrations/0002_auto_20160512_1041.py
|
Python
|
bsd-3-clause
| 888
| 0.002252
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-05-12 08:41
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations
|
.Migration):
initial = True
dependencies = [
('cases', '0001_initial'),
('offices', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.AddField(
|
model_name='case',
name='created_by',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='case',
name='office',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='offices.Office'),
),
]
|
SpamExperts/SpamPAD
|
tests/functional/test_plugins/test_header_eval.py
|
Python
|
gpl-2.0
| 87,077
| 0.002516
|
#coding:utf8
"""Tests the HeaderEval Plugin"""
from __future__ import absolute_import
import datetime
import unittest
import tests.util
# Load plugin and report matched RULES and SCORE
PRE_CONFIG = """
loadplugin Mail::SpamAssassin::Plugin::HeaderEval
report _SCORE_
report _TESTS_
"""
class TestFunctionalCheckForFakeAolRelayInRcvd(tests.util.TestBase):
def test_check_for_fake_aol_relay_in_rcvd_match(self):
config = "header TEST_RULE eval:check_for_fake_aol_relay_in_rcvd()"
email = ("Received: from unknown (HELO mta05bw.bigpond.com) (80.71.176.130) "
"by rly-xw01.mx.aol.com with QMQP; Sat, 15 Jun 2002 "
"23:37:16 -0000")
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['TEST_RULE'])
def test_check_for_fake_aol_relay_in_rcvd_not_match1(self):
config = "header TEST_RULE eval:check_for_fake_aol_relay_in_rcvd()"
email = ("Received: from rly-xj02.mx.aol.com (rly-xj02.mail.aol.com "
"[172.20.116.39]) by omr-r05.mx.aol.com (v83.35) with "
"ESMTP id RELAYIN7-0501132011; Wed, 01 May 2002 "
"13:20:11 -0400")
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_fake_aol_relay_in_rcvd_not_match2(self):
config = "header TEST_RULE eval:check_for_fake_aol_relay_in_rcvd()"
email = ("Received: from logs-tr.proxy.aol.com (logs-tr.proxy.aol.com "
"[152.163.201.132
|
]) by rly-ip01.mx.aol.com "
"(8.8.8/8.8.8/AOL-5.0.0) with ESMTP id NAA08955 for "
"<sapient-alumni@yahoogroups.com>; Thu, 4 Apr 2002 13:11:20 "
"-0500 (EST)")
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_fake_aol_relay_in_rcvd_not_match_aol(self):
config = "header TEST_RULE eval:check_for_fak
|
e_aol_relay_in_rcvd()"
email = ("Received: by 10.28.54.13 with SMTP id d13csp1785386wma; Mon, "
"28 Nov 2016 07:40:07 -0800 (PST)")
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
class TestFunctionalCheckForFarawayCharset(tests.util.TestBase):
def test_check_for_faraway_charset_in_headers_match_subject(self):
config = ("header TEST_RULE eval:check_for_faraway_charset_in_headers()\n"
"ok_locales ru")
email = "Subject: This is a test subject"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['TEST_RULE'])
def test_check_for_faraway_charset_in_headers_match_from(self):
config = ("header TEST_RULE eval:check_for_faraway_charset_in_headers()\n"
"ok_locales ru")
email = "From: This is a test subject"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['TEST_RULE'])
def test_check_for_faraway_charset_in_headers_not_match(self):
config = "header TEST_RULE eval:check_for_faraway_charset_in_headers()"
email = "Subject: This is a test subject"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_faraway_charset_in_headers_with_all_locales(self):
config = ("header TEST_RULE eval:check_for_faraway_charset_in_headers()\n"
"ok_locales all")
email = "Subject: This is a test subject"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
class TestFunctionalCheckForUniqueSubjectId(tests.util.TestBase):
def test_check_for_unique_subject_id_starting_with_special_char_match(self):
config = "header TEST_RULE eval:check_for_unique_subject_id()"
email = "Subject: This is a test subject :3ad41d421"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['TEST_RULE'])
def test_check_for_unique_subject_id_in_parenthesis_match(self):
config = "header TEST_RULE eval:check_for_unique_subject_id()"
email = "Subject: This is a test subject (7217vPhZ0-478TLdy5829qicU9-0@26)"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['TEST_RULE'])
def test_check_for_unique_subject_id_starting_with_number_sign(self):
config = "header TEST_RULE eval:check_for_unique_subject_id()"
email = "Subject: This is a test subject #30D7"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['TEST_RULE'])
def test_check_for_unique_subject_id_not_match(self):
config = "header TEST_RULE eval:check_for_unique_subject_id()"
email = "Subject: This is a test subject 7217vPhZ0-478TLdy5829qicU9-0@26"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
class TestFunctionalCheckIllegalCharsInHeader(tests.util.TestBase):
def test_check_illegal_chars_in_header_match_ratio_and_count(self):
config = "header TEST_RULE eval:check_illegal_chars('MyHeader','0.5','2')"
email = u"MyHeader: ὲὲaa"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['TEST_RULE'])
def test_check_illegal_chars_in_header_not_match_ratio_and_count(self):
config = "header TEST_RULE eval:check_illegal_chars('MyHeader','0.6','2')"
email = u"MyHeader: ὲὲaaa"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_no_illegal_chars_in_header(self):
config = "header TEST_RULE eval:check_illegal_chars('MyHeader','0.5','1')"
email = u"MyHeader: aaaa"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_illegal_chars_in_header_match_if_ratio_and_count_zero(self):
config = "header TEST_RULE eval:check_illegal_chars('MyHeader','0','0')"
email = u"MyHeader: aaaa"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['TEST_RULE'])
def test_check_illegal_chars_if_empty_header(self):
config = "header TEST_RULE eval:check_illegal_chars('MyHeader','0','0')"
email = u"MyHeader:"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def text_check_illegal_chars_multiple_subject_exemptions(self):
config = "header TEST_RULE eval:check_illegal_chars('Subject','0.5','3')"
email = u"Subject: ®¢£aaa"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['TEST_RULE'])
def text_check_illegal_chars_single_subject_exemption_registered(self):
config = "header TEST_RULE eval:check_illegal_chars('Subject','0.33','1')"
email = u"Subject: ®aa";
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def text_check_illegal_chars_single_subject_exemption_cent(self):
config = "header TEST_RULE eval:check_illegal_chars('Subject','0.33','1')"
email = u"Subject: a¢a"
self.setup_conf(config=config, pre_config=P
|
anandology/pyjamas
|
library/gwt/ui/VerticalPanel.py
|
Python
|
apache-2.0
| 1,733
| 0.001731
|
# Copyright 2006 James Tauber and contributors
# Copyright (C) 2009 Luke Kenneth Casson Leighton <lkcl@lkcl.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyjamas import DOM
from pyjamas import Factory
from pyjamas.ui.CellPanel import CellPanel
from pyjamas.ui import HasHorizontalAlignment
from pyjamas.ui import HasVerticalAlignment
cl
|
ass VerticalPanel(CellPanel):
def insert(self, widget, beforeIndex):
widget.removeFromParent()
tr = DOM.createTR()
td = DOM.createTD()
DOM.insertChild(self.getBody(), tr, beforeIndex)
DOM.appendChild(tr, td)
CellPanel.insert(self, widget, td, beforeIndex)
sel
|
f.setCellHorizontalAlignment(widget, self.horzAlign)
self.setCellVerticalAlignment(widget, self.vertAlign)
def remove(self, widget):
if isinstance(widget, int):
widget = self.getWidget(widget)
if widget.getParent() != self:
return False
td = DOM.getParent(widget.getElement())
tr = DOM.getParent(td)
DOM.removeChild(self.getBody(), tr)
CellPanel.remove(self, widget)
return True
Factory.registerClass('pyjamas.ui.VerticalPanel', 'VerticalPanel', VerticalPanel)
|
jbaayen/reinteract
|
lib/reinteract/base_notebook_window.py
|
Python
|
bsd-2-clause
| 10,711
| 0.00112
|
# Copyright 2008-2009 Owen Taylor
#
# This file is part of Reinteract and distributed under the terms
# of the BSD license. See the file COPYING in the Reinteract
# distribution for full details.
#
########################################################################
import os
import re
import sys
import gtk
from application import application
from base_window import BaseWindow
from library_editor import LibraryEditor
from notebook import LibraryFile, NotebookFile, WorksheetFile
from window_builder import WindowBuilder
from worksheet_editor import WorksheetEditor
class BaseNotebookWindow(BaseWindow):
def __init__(self, notebook):
BaseWindow.__init__(self, notebook)
self.state = application.state.get_notebook_state(notebook.folder)
# We'll call window.set_default_size() later with an appropriate
# default size for the BaseNotebookWindow subclass. The size set by
# window.resize() takes precedence.
(width, height) = self.state.get_size()
if width != -1 and height != -1:
self.window.resize(width, height)
self.window.connect('configure-event', self.on_configure_event)
self.path = notebook.folder
self.editors = []
self.nb_widget = gtk.Notebook()
self.nb_widget.connect_after('switch-page', self.on_page_switched)
self.nb_widget.connect('page-reordered', self.on_page_reordered)
self._fill_content()
self.main_vbox.show_all()
self.__initial_editor = None
open_file_paths = self.state.get_open_files()
current_file = self.state.get_current_file()
for path in open_file_paths:
if not path in self.notebook.files:
continue
file = self.notebook.files[path]
self.open_file(file)
current_file_editor = None
if current_file is not None:
filename = os.path.join(notebook.folder, current_file)
for editor in self.editors:
if editor.filename == filename:
current_file_editor = editor
if current_file_editor is None and len(self.editors) > 0:
current_file_editor = self.editors[0]
if current_file_editor is not None:
self._make_editor_current(current_file_editor)
current_file_editor.view.grab_focus()
self.__update_title()
#######################################################
# Implemented by subclasses
#######################################################
def _fill_contents(self, editor):
raise NotImplementedError()
def _add_editor(self, editor):
self.editors.append(editor)
self.nb_widget.add(editor.widget)
editor.widget._notebook_window_editor = editor
editor.connect('notify::title', self.on_editor_notify_title)
editor.connect('notify::filename', self.on_editor_notify_filename)
editor.connect('notify::modified', self.on_editor_notify_modified)
editor.connect('notify::state', self.on_editor_notify_state)
self._update_editor_title(editor)
self._update_editor_state(editor)
self._update_open_files()
def _close_editor(self, editor):
if not editor.confirm_discard():
return
if editor == self.current_editor:
# Either we'll switch page and a new editor will be set, or we have no pages left
self.current_editor = None
if editor == self.__in
|
itial_editor:
self.__initial_editor = None
self.editors.remove(editor)
editor.widget._notebook_window_editor = None
editor.close()
self.__update_title()
self._update_open_files()
self.update_sensitivity()
def _update_editor_state(self, editor):
self.upd
|
ate_sensitivity()
def _update_editor_title(self, editor):
if editor == self.current_editor:
self.__update_title()
#######################################################
# Overrides
#######################################################
def _add_actions(self, action_group):
BaseWindow._add_actions(self, action_group)
action_group.add_actions([
('notebook-properties', gtk.STOCK_PROPERTIES, "Notebook Prop_erties", None, None, self.on_notebook_properties),
('new-worksheet', gtk.STOCK_NEW, "_New Worksheet", "<control>n", None, self.on_new_worksheet),
('new-library', gtk.STOCK_NEW, "New _Library", "", None, self.on_new_library),
('calculate-all', gtk.STOCK_REFRESH, "Calculate _All", "<control><shift>Return", None, self.on_calculate_all),
])
def _close_current(self):
if self.current_editor:
self._close_editor(self.current_editor)
def _close_window(self):
if not self._confirm_discard():
return
BaseWindow._close_window(self)
#######################################################
# Utility
#######################################################
def _make_editor_current(self, editor):
self.nb_widget.set_current_page(self.nb_widget.page_num(editor.widget))
def __close_initial_editor(self):
if self.__initial_editor and not self.__initial_editor.filename and not self.__initial_editor.modified:
self._close_editor(self.__initial_editor)
self.__initial_editor = None
def __new_worksheet(self):
editor = WorksheetEditor(self.notebook)
self._add_editor(editor)
self._make_editor_current(editor)
return editor
def __new_library(self):
editor = LibraryEditor(self.notebook)
self._add_editor(editor)
self._make_editor_current(editor)
return editor
def __update_title(self, *args):
if self.current_editor:
title = self.current_editor.title + " - " + os.path.basename(self.notebook.folder) + " - Reinteract"
else:
title = os.path.basename(self.notebook.folder) + " - Reinteract"
self.window.set_title(title)
def _confirm_discard(self, before_quit=False):
for editor in self.editors:
if editor.modified:
# Let the user see what they are discard or not discarding
self.window.present_with_time(gtk.get_current_event_time())
self._make_editor_current(editor)
if not editor.confirm_discard(before_quit=before_quit):
return False
return True
def _update_open_files(self):
open_file_paths = []
for child in self.nb_widget.get_children():
file = child._notebook_window_editor.file
if not file:
continue
open_file_paths.append(file.path)
self.state.set_open_files(open_file_paths)
def _update_current_file(self):
file = self.current_editor.file
if file is not None:
self.state.set_current_file(file.path)
else:
self.state.set_current_file(None)
def _update_size(self, width, height):
self.state.set_size(width, height)
#######################################################
# Callbacks
#######################################################
def on_notebook_properties(self, action):
builder = WindowBuilder('notebook-properties')
builder.dialog.set_transient_for(self.window)
builder.dialog.set_title("%s - Properties" % self.notebook.info.name)
builder.name_entry.set_text(self.notebook.info.name)
builder.name_entry.set_sensitive(False)
builder.description_text_view.get_buffer().props.text = self.notebook.info.description
response = builder.dialog.run()
if response == gtk.RESPONSE_OK:
self.notebook.info.description = builder.description_text_view.get_buffer().props.text
builder.dialog.destroy()
def on_new_worksheet(self, action):
self.__new_worksheet()
def on_new_library(self, action):
self.__new_
|
OpenDroneMap/OpenDroneMap
|
opendm/osfm.py
|
Python
|
gpl-3.0
| 22,421
| 0.004906
|
"""
OpenSfM related utils
"""
import os, shutil, sys, json, argparse
import yaml
from opendm import io
from opendm import log
from opendm import system
from opendm import context
from opendm import camera
from opendm.utils import get_depthmap_resolution
from opendm.photo import find_largest_photo_dim
from opensfm.large import metadataset
from opensfm.large import tools
from opensfm.actions import undistort
from opensfm.dataset import DataSet
from opendm.multispectral import get_photos_by_band
class OSFMContext:
def __init__(self, opensfm_project_path):
self.opensfm_project_path = opensfm_project_path
def run(self, command):
system.run('%s/bin/opensfm %s "%s"' %
(context.opensfm_path, command, self.opensfm_project_path))
def is_reconstruction_done(self):
tracks_file = os.path.join(self.opensfm_project_path, 'tracks.csv')
reconstruction_file = os.path.join(self.opensfm_project_path, 'reconstruction.json')
return io.file_exists(tracks_file) and io.file_exists(reconstruction_file)
def reconstruct(self, rerun=False):
tracks_file = os.path.join(self.opensfm_project_path, 'tracks.csv')
reconstruction_file = os.path.join(self.opensfm_project_path, 'reconstruction.json')
if not io.file_exists(tracks_file) or rerun:
self.run('create_tracks')
else:
log.ODM_WARNING('Found a valid OpenSfM tracks file in: %s' % tracks_file)
if not io.file_exists(reconstruction_file) or rerun:
self.run('reconstruct')
else:
log.ODM_WARNING('Found a valid OpenSfM reconstruction file in: %s' % reconstruction_file)
# Check that a reconstruction file has been created
if not self.reconstructed():
log.ODM_ERROR("The program could not process this dataset using the current settings. "
"Check that the images have enough overlap, "
"that there are enough recognizable features "
"and that the images are in focus. "
"You could also try to increase the --min-num-features parameter."
"The program will now exit.")
exit(1)
def setup(self, args, images_path, reconstruction, append_config = [], rerun=False):
"""
Setup a OpenSfM project
"""
if rerun and io.dir_exists(self.opensfm_project_path):
shutil.rmtree(self.opensfm_project_path)
if not io.dir_exists(self.opensfm_project_path):
system.mkdir_p(self.opensfm_project_path)
list_path = os.path.join(self.opensfm_project_path, 'image_list.txt')
if not io.file_exists(list_path) or rerun:
if reconstruction.multi_camera:
photos = get_photos_by_band(reconstruction.multi_camera, args.primary_band)
if len(photos) < 1:
raise Exception("Not enough images in selected band %s" % args.primary_band.lower())
log.ODM_INFO("Reconstruction will use %s images from %s band" % (len(photos), args.primary_band.lower()))
else:
photos = reconstruction.photos
# create file list
has_alt = True
has_gps = False
with open(list_path, 'w') as fout:
for photo in photos:
if not photo.altitude:
has_alt = False
if photo.latitude is not None and photo.longitude is not None:
has_gps = True
fout.write('%s\n' % os.path.join(images_path, photo.filename))
# check for image_groups.txt (split-merge)
image_groups_file = os.path.join(args.project_path, "image_groups.txt")
if io.file_exists(image_groups_file):
log.ODM_INFO("Copied image_groups.txt to OpenSfM directory")
io.copy(image_groups_file, os.path.join(self.opensfm_project_path, "image_groups.txt"))
# check for cameras
if args.cameras:
try:
camera_overrides = camera.get_opensfm_camera_models(args.cameras)
with open(os.path.join(self.opensfm_project_path, "camera_models_overrides.json"), 'w') as f:
f.write(json.dumps(camera_overrides))
log.ODM_INFO("Wrote camera_models_overrides.json to OpenSfM directory")
except Exception as e:
log.ODM_WARNING("Cannot set camera_models_overrides.json: %s" % str(e))
use_bow = args.matcher_type == "bow"
feature_type = "SIFT"
# GPSDOP override if we have GPS accuracy information (such as RTK)
if 'gps_accuracy_is_set' in args:
log.ODM_INFO("Forcing GPS DOP to %s for all images" % args.gps_accuracy)
log.ODM_INFO("Writing exif overrides")
exif_overrides = {}
for p in photos:
if 'gps_accuracy_is_set' in args:
dop = args.gps_accuracy
elif p.get_gps_dop() is not None:
dop = p.get_gps_dop()
else:
dop = args.gps_accuracy # default value
if p.latitude is not None and p.longitude is not None:
exif_overrides[p.filename] = {
'gps': {
'latitude': p.latitude,
'longitude': p.longitude,
'altitude': p.altitude if p.altitude is not None else 0,
'dop': dop,
}
}
with open(os.path.join(self.opensfm_project_path, "exif_overrides.json"), 'w') as f:
f.write(json.dumps(exif_overrides))
# Check image masks
masks = []
for p in photos:
if p.mask is not None:
masks.append((p.filename, os.path.join(images_path, p.mask)))
if masks:
log.ODM_INFO("Found %s image masks" % len(masks))
with open(os.path.join(self.opensfm_project_path, "mask_list.txt"), 'w') as f:
for fname, mask in masks:
f.write("{} {}\n".format(fname, mask))
# Compute feature_process_size
feature_process_size = 2048 # default
if 'resize_to_is_set' in args:
# Legacy
log.ODM_WARNING("Legacy option --resize-to (this might be removed in a future version). Use --feature-quality instead.")
feature_process_size = int(args.resize_to)
else:
feature_quality_scale = {
'ultra': 1,
'high': 0.5,
'medium': 0.25,
'low': 0.125,
'lowest': 0.0675,
}
max_dim = find_largest_photo_dim(photos)
if max_dim > 0:
log.ODM_INFO("Maximum photo dimensions: %spx" % str(max_dim))
feature_process_size = int(max_dim * feature_quality_scale[args.feature_quality])
else:
log.ODM_WARNING("Cannot compute max image dimensions, going with defaults")
depthmap_resolution = get_depthmap_resolution(args, photos)
# create config file for OpenSfM
config = [
"use_exif_size: no",
"flann_algorithm: KDTREE", # more stable, faster than KMEANS
"feature_process_size: %s" % feature_process_size,
|
"feature_min_frames: %s" % args.min_num_features,
"processes: %s" % args.max_concurrency,
"matching_gps_neighbors: %s
|
" % args.matcher_neighbors,
"matching_gps_distance: %s" % args.matcher_distance,
"depthmap_method: %s" % args.opensfm_depthmap_method,
"depthmap_resolution: %s" % depthmap_reso
|
biocore/american-gut-web
|
amgut/lib/data_access/ag_data_access.py
|
Python
|
bsd-3-clause
| 57,541
| 0
|
from __future__ import division
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The American Gut Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
"""
Centralized database access for the American Gut web portal
"""
import logging
from uuid import UUID
import psycopg2
import bcrypt
import numpy as np
import pandas as pd
import random
import string
from amgut.lib.data_access.sql_connection import TRN
# character sets for kit id, passwords and verification codes
KIT_ALPHA = "abcdefghjkmnpqrstuvwxyz" # removed i, l and o for clarity
KIT_PASSWD = '1234567890'
KIT_VERCODE = KIT_PASSWD
KIT_PASSWD_NOZEROS = KIT_PASSWD[0:-1]
KIT_VERCODE_NOZEROS = KIT_PASSWD_NOZEROS
class AGDataAccess(object):
"""Data Access implementation for all the American Gut web portal
"""
# arbitrary, unique ID and value
human_sites = ['Stool',
'Mouth',
'Right hand',
'Left hand',
'Forehead',
'Torso',
'Left leg',
'Right leg',
'Nares',
'Hair',
'Tears',
'Nasal mucus',
'Ear wax',
'Vaginal mucus']
animal_sites = ['Stool',
'Mouth',
'Nares',
'Ears',
'Skin',
'Fur']
general_sites = ['Animal Habitat',
'Biofilm',
'Dust',
'Food',
'Fermented Food',
'Indoor Surface',
'Outdoor Surface',
'Plant habitat',
|
'Soil',
'Sole of shoe',
'Water']
#####################################
# Users
#####################################
def authenticateWebAppUser(self, username, password):
""" Attempts to validate authenticate the supplied username/password
Attempt to authenticate the user against the list of users in
web_app_user table. If successful,
|
a dict with user innformation is
returned. If not, the function returns False.
"""
with TRN:
sql = """SELECT cast(ag_login_id as varchar(100)) as ag_login_id,
email, name, address, city,
state, zip, country,kit_password
FROM ag_login
INNER JOIN ag_kit USING (ag_login_id)
WHERE supplied_kit_id = %s"""
TRN.add(sql, [username])
row = TRN.execute_fetchindex()
if not row:
return False
results = dict(row[0])
password = password.encode('utf-8')
if not bcrypt.checkpw(password, results['kit_password']):
return False
results['ag_login_id'] = str(results['ag_login_id'])
return results
def check_login_exists(self, email):
"""Checks if email for login already exists on system
Parameters
----------
email : str
Email for user to check
Returns
-------
ag_login_id or None
If exists, returns ag_login_id, else returns None
"""
with TRN:
clean_email = email.strip().lower()
sql = "SELECT ag_login_id FROM ag_login WHERE LOWER(email) = %s"
TRN.add(sql, [clean_email])
value = TRN.execute_fetchindex()
if value:
value = value[0][0]
return None if value == [] else value
def addAGLogin(self, email, name, address, city, state, zip_, country):
"""Adds a new login or returns the login_id if email already exists
Parameters
----------
email : str
Email to register for user
name : str
Name to register for user
address : str
Street address to register for user
city : str
City to register for user
state : str
State to register for user
zip_ : str
Postal code to register for user
country : str
Country to register for user
Returns
-------
ag_login_id : str
UUID for new user, or existing user if email already in system
"""
with TRN:
clean_email = email.strip().lower()
ag_login_id = self.check_login_exists(email)
if not ag_login_id:
# create the login
sql = """INSERT INTO ag_login
(email, name, address, city, state, zip, country)
VALUES (%s, %s, %s, %s, %s, %s, %s)
RETURNING ag_login_id"""
TRN.add(sql, [clean_email, name, address, city, state, zip_,
country])
ag_login_id = TRN.execute_fetchlast()
return ag_login_id
def getAGBarcodeDetails(self, barcode):
"""Returns information about the barcode from both AG and standard info
Parameters
----------
barcode : str
Barcode to get information for
Returns
-------
dict
All barcode info, keyed to column name
Raises
------
ValueError
Barcode not found in AG information tables
"""
sql = """SELECT email,
cast(ag_kit_barcode_id as varchar(100)),
cast(ag_kit_id as varchar(100)),
barcode,
site_sampled,
environment_sampled,
sample_date,
sample_time,
participant_name,
notes,
refunded,
withdrawn,
moldy,
other,
other_text,
date_of_last_email,
overloaded,
name,
status
FROM ag.ag_kit_barcodes
LEFT JOIN barcodes.barcode USING (barcode)
LEFT JOIN ag.ag_kit USING (ag_kit_id)
LEFT JOIN ag.ag_login_surveys USING (ag_login_id)
LEFT JOIN ag.ag_login USING (ag_login_id)
WHERE barcode = %s"""
with TRN:
TRN.add(sql, [barcode])
row = TRN.execute_fetchindex()
if not row:
raise ValueError('Barcode does not exist in AG: %s' % barcode)
return dict(row[0])
def getAGSurveyDetails(self, survey_id, language):
"""Returns survey information of a specific survey_id and language
Parameters
----------
survey_id : str
the id of the survey group
language : str
the language the survey is intended for
Returns
-------
DataFrame
pandas DataFrame of sorted survey details
Raises
------
ValueError
survey_id not found in database
ValueError
language not found in database
"""
if survey_id not in self.getKnownSurveyIds():
raise ValueError('Invalid survey_id')
if language not in self.getKnownLanguages():
raise ValueError('Invalid language')
sql = """SELECT survey_question_id,
survey_group,
%s,
question_shortname,
response,
ag.survey_question_response.display_index
AS response_index
FROM ag.survey_question
LEFT JOIN ag.survey_q
|
denis-vilyuzhanin/selenium-fastview
|
py/selenium/webdriver/firefox/firefox_binary.py
|
Python
|
apache-2.0
| 8,338
| 0.002998
|
#!/usr/bin/python
#
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import platform
from subprocess import Popen, STDOUT
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common import utils
import time
class FirefoxBinary(object):
NO_FOCUS_LIBRARY_NAME = "x_ignore_nofocus.so"
def __init__(self, firefox_path=None, log_file=None):
"""
Creates a new instance of Firefox binary.
:Args:
- firefox_path - Path to the Firefox executable. By default, it will be detected from the standard locations.
- log_file - A file object to redirect the firefox process output to. It can be sys.stdout.
Please note that with parallel run the output won't be synchronous.
By default, it will be redirected to /dev/null.
"""
self._start_cmd = firefox_path
# We used to default to subprocess.PIPE instead of /dev/null, but after
# a while the pipe would fill up and Firefox would freeze.
self._log_file = log_file or open(os.devnull, "wb")
self.command_line = None
if self._start_cmd is None:
self._start_cmd = self._get_firefox_start_cmd()
if not self._start_cmd.strip():
raise Exception("Failed to find firefox binary. You can set it by specifying the path to 'firefox_binary':\n\nfrom selenium.webdriver.firefox.firefox_binary import FirefoxBinary\n\n" +
"binary = FirefoxBinary('/path/to/binary')\ndriver = webdriver.Firefox(firefox_binary=binary)")
# Rather than modifying the environment of the calling Python process
# copy it and modify as needed.
self._firefox_env = os.environ.copy()
self._firefox_env["MOZ_CRASHREPORTER_DISABLE"] = "1"
self._firefox_env["MOZ_NO_REMOTE"] = "1"
self._firefox_env["NO_EM_RESTART"] = "1"
def add_command_line_options(self, *args):
self.command_line = args
def launch_browser(self, profile):
"""Launches the browser for the given profile name.
It is assumed the profile already exists.
"""
self.profile = profile
self._start_from_profile_path(self.profile.path)
self._wait_until_connectable()
def kill(self):
"""Kill the browser.
This is useful when the browser is stuck.
"""
if self.process:
self.process.kill()
self.process.wait()
def _start_from_profile_path(self, path):
self._firefox_env["XRE_PROFILE_PATH"] = path
if platform.system().lower() == 'linux':
self._modify_link_library_path()
command = [self._start_cmd, "-silent"]
if self.command_line is not None:
for cli in self.command_line:
command.append(cli)
Popen(command, stdout=self._log_file, stderr=STDOUT,
env=self._firefox_env).communicate()
command[1] = '-foreground'
self.process = Popen(
command, stdout=self._log_file, stderr=STDOUT,
env=self._firefox_env)
def _wait_until_connectable(self):
"""Blocks until the extension is connectable in the firefox."""
count = 0
while not utils.is_connectable(self.profile.port):
if self.process.poll() is not None:
# Browser has exited
raise WebDriverException("The browser appears to have exited "
"before we could connect. If you specified a log_file in "
"the FirefoxBinary constructor, check it for details.")
if count == 30:
self.kill()
raise WebDriverException("Can't load the profile. Profile "
"Dir: %s If you specified a log_file in the "
"FirefoxBinary constructor, check
|
it for details.")
count += 1
time.sleep(1)
return True
def _find_exe_in_registry(self):
try:
from _w
|
inreg import OpenKey, QueryValue, HKEY_LOCAL_MACHINE, HKEY_CURRENT_USER
except ImportError:
from winreg import OpenKey, QueryValue, HKEY_LOCAL_MACHINE, HKEY_CURRENT_USER
import shlex
keys = (
r"SOFTWARE\Classes\FirefoxHTML\shell\open\command",
r"SOFTWARE\Classes\Applications\firefox.exe\shell\open\command"
)
command = ""
for path in keys:
try:
key = OpenKey(HKEY_LOCAL_MACHINE, path)
command = QueryValue(key, "")
break
except OSError:
try:
key = OpenKey(HKEY_CURRENT_USER, path)
command = QueryValue(key, "")
break
except OSError:
pass
else:
return ""
if not command:
return ""
return shlex.split(command)[0]
def _get_firefox_start_cmd(self):
"""Return the command to start firefox."""
start_cmd = ""
if platform.system() == "Darwin":
start_cmd = ("/Applications/Firefox.app/Contents/MacOS/firefox-bin")
elif platform.system() == "Windows":
start_cmd = (self._find_exe_in_registry() or
self._default_windows_location())
elif platform.system() == 'Java' and os._name == 'nt':
start_cmd = self._default_windows_location()
else:
for ffname in ["firefox", "iceweasel"]:
start_cmd = self.which(ffname)
if start_cmd is not None:
break
else:
# couldn't find firefox on the system path
raise RuntimeError("Could not find firefox in your system PATH." +
" Please specify the firefox binary location or install firefox")
return start_cmd
def _default_windows_location(self):
program_files = [os.getenv("PROGRAMFILES", r"C:\Program Files"),
os.getenv("PROGRAMFILES(X86)", r"C:\Program Files (x86)")]
for path in program_files:
binary_path = os.path.join(path, r"Mozilla Firefox\firefox.exe")
if os.access(binary_path, os.X_OK):
return binary_path
return ""
def _modify_link_library_path(self):
existing_ld_lib_path = os.environ.get('LD_LIBRARY_PATH', '')
new_ld_lib_path = self._extract_and_check(
self.profile, self.NO_FOCUS_LIBRARY_NAME, "x86", "amd64")
new_ld_lib_path += existing_ld_lib_path
self._firefox_env["LD_LIBRARY_PATH"] = new_ld_lib_path
self._firefox_env['LD_PRELOAD'] = self.NO_FOCUS_LIBRARY_NAME
def _extract_and_check(self, profile, no_focus_so_name, x86, amd64):
paths = [x86, amd64]
built_path = ""
for path in paths:
library_path = os.path.join(profile.path, path)
os.makedirs(library_path)
import shutil
shutil.copy(os.path.join(os.path.dirname(__file__), path,
self.NO_FOCUS_LIBRARY_NAME),
library_path)
built_path += library_path + ":"
return built_path
def which(self, fname):
"""Returns the fully qualified path by searching Path of the given
name"""
for pe in os.environ['PATH'].split(os.pathsep):
checkname
|
gdorion/advent-of-code
|
2015/python/Day3/houses.py
|
Python
|
mit
| 2,174
| 0.0046
|
import math
class Point(object):
X = 0
Y = 0
def __init__(self, x, y):
self.X = x
self.Y = y
def getX(self):
return self.X
def getY(self):
return self.Y
def __str__(self):
return "Point(%s,%s)" % (self.X, self.Y)
def __eq__(self, other):
return self.X == other.X and self.Y == other.Y
def move_left(self):
self.X = self.X - 1
return None
def move_up(self):
self.Y = self.Y - 1
return None
def move_right(self):
self.X = self.X + 1
return None
def move_bottom(self):
self.Y = self.Y + 1
return None
class Trail(object):
def __init__(self):
self.trail = []
def extend(self, point):
self.trail.append(point)
print "Added : " + str(point)
def get
|
HousesCountVisitedOnce(self):
uniquePoints = []
for point1 in self.trail:
found = False
for point2 in uniquePoints:
if point1.X == point2.X and point1.Y == point2.Y :
found = True
if found == False:
uniquePoints.append(point1)
return len(uniquePoints)
def m
|
ain():
#
# Entry point
#
origin = Point(0,0) # Where Santa starts it's run.
trail = Trail() # The gifts delivery trail.
trail.extend(origin)
with open('data.txt') as f:
for c in f.read():
if c == "<":
origin = Point(origin.X, origin.Y)
origin.move_left()
trail.extend(origin)
elif c == "^":
origin = Point(origin.X, origin.Y)
origin.move_up()
trail.extend(origin)
elif c == ">":
origin = Point(origin.X, origin.Y)
origin.move_right()
trail.extend(origin)
elif c == "v":
origin = Point(origin.X, origin.Y)
origin.move_bottom()
trail.extend(origin)
print "Number of houses visited once : " + str(trail.getHousesCountVisitedOnce())
if __name__ == "__main__":
main()
|
phyng/python-google-proxy
|
proxy/wsgi.py
|
Python
|
mit
| 387
| 0
|
"""
WSGI config for proxy project.
It exposes the WSGI callable as a module-level variable named
|
``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_app
|
lication
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "proxy.settings")
application = get_wsgi_application()
|
ennoborg/gramps
|
gramps/gen/proxy/cache.py
|
Python
|
gpl-2.0
| 6,358
| 0.000472
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (c) 2016 Gramps Development Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
Proxy class for the Gramps databases. Caches lookups from handles.
"""
from ..utils.lru import LRU
class CacheProxyDb:
"""
A Proxy for a database with cached lookups on handles.
Does not invalid caches. Should be used only in read-only
places, and not where caches are altered.
"""
def __init__(self, database):
"""
CacheProxy will cache items based on their handle.
Assumes all handles (regardless of type) are unique.
Database is called self.db for consistency with other
proxies.
"""
self.db = database
self.clear_cache()
def __getattr__(self, attr):
"""
If an attribute isn't found here, use the self.db
|
version.
"""
return getattr(self.db, attr)
def clear_cache(self, handle=None):
|
"""
Clears all caches if handle is None, or
specific entry.
"""
if handle:
del self.cache_handle[handle]
else:
self.cache_handle = LRU(100000)
def get_person_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_person_from_handle(handle)
return self.cache_handle[handle]
def get_event_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_event_from_handle(handle)
return self.cache_handle[handle]
def get_family_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_family_from_handle(handle)
return self.cache_handle[handle]
def get_repository_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_repository_from_handle(handle)
return self.cache_handle[handle]
def get_place_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_place_from_handle(handle)
return self.cache_handle[handle]
def get_place_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_place_from_handle(handle)
return self.cache_handle[handle]
def get_citation_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_citation_from_handle(handle)
return self.cache_handle[handle]
def get_source_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_source_from_handle(handle)
return self.cache_handle[handle]
def get_note_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_note_from_handle(handle)
return self.cache_handle[handle]
def get_media_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_media_from_handle(handle)
return self.cache_handle[handle]
def get_tag_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_tag_from_handle(handle)
return self.cache_handle[handle]
|
sysbot/OpenClos
|
jnpr/openclos/ztp.py
|
Python
|
apache-2.0
| 4,984
| 0.008427
|
'''
Created on Sep 10, 2014
@author: moloyc
'''
import os
import logging
from jinja2 import Environment, PackageLoader
from netaddr import IPNetwork
import util
from model import Pod
from dao import Dao
from writer import DhcpConfWriter
moduleName = 'ztp'
logging.basicConfig()
logger = logging.getLogger(moduleName)
logger.setLevel(logging.DEBUG)
ztpTemplateLocation = os.path.join('conf', 'ztp')
class ZtpServer():
def __init__(self, conf = {}, templateEnv = None):
if any(conf) == False:
self.conf = util.loadConfig()
logger.setLevel(logging.getLevelName(self.conf['logLevel'][moduleName]))
else:
self.conf = conf
self.dao = Dao(self.conf)
if templateEnv is None:
self.templateEnv = Environment(loader=PackageLoader('jnpr.openclos', ztpTemplateLocation))
def dcpServerReloadConfig(self):
#TODO: sudo service isc-dhcp-server force-reload
# Not needed as of now
pass
''' TODO: for 2.0, Not needed as of now
def createSingleDh
|
cpConfFile(self):
pods = self.dao.getAll(Pod)
if len(pods) > 0:
confWriter = DhcpConfWriter(self.conf, pods[0], self.dao)
confWriter.writeSingle(self.generateSingleDhcpConf())
'''
def generateSingleDhcpConf(self):
if util.isPlatformUbuntu():
ztp = self.populateDhcpGlobalSettings()
dhcpTemplate = self.t
|
emplateEnv.get_template('dhcp.conf.ubuntu')
return dhcpTemplate.render(ztp = self.populateDhcpDeviceSpecificSettingForAllPods(ztp))
def createPodSpecificDhcpConfFile(self, podName):
pod = self.dao.getUniqueObjectByName(Pod, podName)
confWriter = DhcpConfWriter(self.conf, pod, self.dao)
confWriter.write(self.generatePodSpecificDhcpConf(pod.name))
def generatePodSpecificDhcpConf(self, podName):
ztp = self.populateDhcpGlobalSettings()
conf = None
if util.isPlatformUbuntu():
dhcpTemplate = self.templateEnv.get_template('dhcp.conf.ubuntu')
ztp = self.populateDhcpDeviceSpecificSetting(podName, ztp)
conf = dhcpTemplate.render(ztp = ztp)
elif util.isPlatformCentos():
dhcpTemplate = self.templateEnv.get_template('dhcp.conf.centos')
ztp = self.populateDhcpDeviceSpecificSetting(podName, ztp)
conf = dhcpTemplate.render(ztp = ztp)
logger.debug('dhcpd.conf\n%s' % (conf))
return conf
def populateDhcpGlobalSettings(self):
ztp = {}
ztpGlobalSettings = util.loadClosDefinition()['ztp']
subnet = ztpGlobalSettings['dhcpSubnet']
dhcpBlock = IPNetwork(subnet)
ipList = list(dhcpBlock.iter_hosts())
ztp['network'] = str(dhcpBlock.network)
ztp['netmask'] = str(dhcpBlock.netmask)
ztp['defaultRoute'] = ztpGlobalSettings.get('dhcpOptionRoute')
if ztp['defaultRoute'] is None or ztp['defaultRoute'] == '':
ztp['defaultRoute'] = str(ipList[0])
ztp['rangeStart'] = ztpGlobalSettings.get('dhcpOptionRangeStart')
if ztp['rangeStart'] is None or ztp['rangeStart'] == '':
ztp['rangeStart'] = str(ipList[1])
ztp['rangeEnd'] = ztpGlobalSettings.get('dhcpOptionRangeEnd')
if ztp['rangeEnd'] is None or ztp['rangeEnd'] == '':
ztp['rangeEnd'] = str(ipList[-1])
ztp['broadcast'] = str(dhcpBlock.broadcast)
ztp['httpServerIp'] = self.conf['httpServer']['ipAddr']
ztp['imageUrl'] = ztpGlobalSettings.get('junosImage')
return ztp
def populateDhcpDeviceSpecificSettingForAllPods(self, ztp = {}):
pods = self.dao.getAll(Pod)
for pod in pods:
ztp = self.populateDhcpDeviceSpecificSetting(pod.name, ztp)
return ztp
def populateDhcpDeviceSpecificSetting(self, podName, ztp = {}):
if ztp.get('devices') is None:
ztp['devices'] = []
pod = self.dao.getUniqueObjectByName(Pod, podName)
for device in pod.devices:
if device.role == 'spine':
image = pod.spineJunosImage
elif device.role == 'leaf':
image = pod.leafJunosImage
else:
image = None
logger.error('Pod: %s, Device: %s with unknown role: %s' % (pod.name, device.name, device.role))
deviceMgmtIp = str(IPNetwork(device.managementIp).ip)
ztp['devices'].append({'name': device.name, 'mac': device.macAddress,
'configUrl': 'pods/' + pod.name + '/devices/' + device.name + '/config',
'imageUrl': image, 'mgmtIp': deviceMgmtIp})
return ztp
if __name__ == '__main__':
ztpServer = ZtpServer()
ztpServer.createPodSpecificDhcpConfFile('labLeafSpine')
ztpServer.createPodSpecificDhcpConfFile('anotherPod')
#ztpServer.createSingleDhcpConfFile()
|
wooga/airflow
|
backport_packages/import_all_provider_classes.py
|
Python
|
apache-2.0
| 4,232
| 0.001654
|
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import importlib
import os
import sys
import traceback
from inspect import isclass
from typing import List
def import_all_provider_classes(source_path: str,
provider_ids: List[str] = None,
print_imports: bool = False) -> List[str]:
"""
Imports all classes in providers packages. This method loads and imports
all the classes found in providers, so that we can find all the subclasses
of operators/sensors etc.
:param provider_ids - provider ids that should be loaded.
:param print_imports - if imported class should also be printed in output
:param source_path: path to look for sources - might be None to look for all packages in all source paths
:return: list of all imported classes
"""
if provider_ids:
prefixed_provider_paths = [source_path + "/airflow/providers/" + provider_id.replace(".", "/")
for provider_id in provider_ids]
else:
prefixed_provider_paths = [source_path + "/airflow/providers/"]
imported_classes = []
tracebacks = []
for root, dirs, files in os.walk(source_path):
if all([not root.startswith(prefix_provider_path)
for prefix_provider_path in prefixed_provider_paths]) or root.endswith("__pycache__"):
# Skip loading module if it is not in the list of providers that we are looking for
continue
package_name = root[len(source_path) + 1:].replace("/", ".")
for file in files:
if file.endswith(".py"):
|
module_name = package_name + "." + file[:-3] if file != "__init__.py" else package_name
if print_imports:
print(f"Importing module: {module_name}")
# noinspection PyBroadException
try:
_module = importlib.import_module(module_name)
for attribute_name in dir(_module):
class_name = module_nam
|
e + "." + attribute_name
attribute = getattr(_module, attribute_name)
if isclass(attribute):
if print_imports:
print(f"Imported {class_name}")
imported_classes.append(class_name)
except Exception:
exception_str = traceback.format_exc()
tracebacks.append(exception_str)
if tracebacks:
print("""
ERROR: There were some import errors
""", file=sys.stderr)
for trace in tracebacks:
print("----------------------------------------", file=sys.stderr)
print(trace, file=sys.stderr)
print("----------------------------------------", file=sys.stderr)
sys.exit(1)
else:
return imported_classes
if __name__ == '__main__':
install_source_path = None
for python_path_candidate in sys.path:
providers_path_candidate = os.path.join(python_path_candidate, "airflow", "providers")
if os.path.isdir(providers_path_candidate):
install_source_path = python_path_candidate
print()
print(f"Walking all paths in {install_source_path}")
print()
import_all_provider_classes(print_imports=True, source_path=install_source_path)
print()
print("SUCCESS: All backport packages are importable!")
print()
|
photoninger/ansible
|
lib/ansible/modules/network/f5/bigip_gtm_facts.py
|
Python
|
gpl-3.0
| 31,508
| 0.001016
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: bigip_gtm_facts
short_description: Collect facts from F5 BIG-IP GTM devices
description:
- Collect facts from F5 BIG-IP GTM devices.
version_added: "2.3"
options:
include:
description:
- Fact category to collect.
required: True
choices:
- pool
- wide_ip
- virtual_server
filter:
description:
- Perform regex filter of response. Filtering is done on the name of
the resource. Valid filters are anything that can be provided to
Python's C(re) module.
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Get pool facts
bigip_gtm_facts:
server: lb.mydomain.com
user: admin
password: secret
include: pool
filter: my_pool
delegate_to: localhost
'''
RETURN = r'''
wide_ip:
description:
Contains the lb method for the wide ip and the pools that are within the wide ip.
returned: changed
type: list
sample:
wide_ip:
- enabled: True
failure_rcode: noerror
failure_rcode_response: disabled
failure_rcode_ttl: 0
full_path: /Common/foo.ok.com
last_resort_pool: ""
minimal_response: enabled
name: foo.ok.com
partition: Common
persist_cidr_ipv4: 32
persist_cidr_ipv6: 128
persistence: disabled
pool_lb_mode: round-robin
pools:
- name: d3qw
order: 0
partition: Common
ratio: 1
ttl_persistence: 3600
type: naptr
pool:
description: Contains the pool object status and enabled status.
returned: changed
type: list
sample:
pool:
- alternate_mode: round-robin
dynamic_ratio: disabled
enabled: True
fallback_mode: return-to-dns
full_path: /Common/d3qw
load_balancing_mode: round-robin
manual_resume: disabled
max_answers_returned: 1
members:
- disabled: True
flags: a
full_path: ok3.com
member_order: 0
name: ok3.com
order: 10
preference: 10
ratio: 1
service: 80
name: d3qw
partition: Common
qos_hit_ratio: 5
qos_hops: 0
qos_kilobytes_second: 3
qos_lcs: 30
qos_packet_rate: 1
qos_rtt: 50
qos_topology: 0
qos_vs_capacity: 0
qos_vs_score: 0
availability_state: offline
enabled_state: disabled
ttl: 30
type: naptr
verify_member_availability: disabled
virtual_server:
description:
Contains the virtual server enabled and availability status, and address.
returned: changed
type: list
sample:
virtual_server:
- addresses:
- device_name: /Common/qweqwe
name: 10.10.10.10
translation: none
datacenter: /Common/xfxgh
enabled: True
expose_route_domains: no
full_path: /Common/qweqwe
iq_allow_path: yes
iq_allow_service_check: yes
iq_allow_snmp: yes
limit_cpu_usage: 0
limit_cpu_usage_status: disabled
limit_max_bps: 0
limit_max_bps_status: disabled
limit_max_connections: 0
limit_max_connections_status: disabled
limit_max_pps: 0
limit_max_pps_status: disabled
limit_mem_avail: 0
limit_mem_avail_status: disabled
link_discovery: disabled
monitor: /Common/bigip
name: qweqwe
partition: Common
product: single-bigip
virtual_server_discovery: disabled
virtual_servers:
- destination: 10.10.10.10:0
enabled: True
full_path: jsdfhsd
limit_max_bps: 0
limit_max_bps_status: disabled
limit_max_connections: 0
limit_max_connections_status: disabled
limit_max_pps: 0
limit_max_pps_status: disabled
name: jsdfhsd
translation_address: none
translation_port: 0
'''
import re
from ansible.module_utils.basic import AnsibleModule
HAS_DEVEL_IMPORTS = False
try:
# Sideband repository used for dev
from library.module_utils.network.f5.bigip import HAS_F5SDK
from library.module_utils.network.f5.bigip import F5Client
from library.module_utils.network.f5.common import F5ModuleError
from library.mod
|
ule_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fqdn_name
from library.module_utils.network.f5.common import f5_argument_spec
try:
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
HAS_DEVEL_IMPORTS = True
except Imp
|
ortError:
# Upstream Ansible
from ansible.module_utils.network.f5.bigip import HAS_F5SDK
from ansible.module_utils.network.f5.bigip import F5Client
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fqdn_name
from ansible.module_utils.network.f5.common import f5_argument_spec
try:
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
try:
import json
except ImportError:
import simplejson as json
from ansible.module_utils.parsing.convert_bool import BOOLEANS_TRUE
from ansible.module_utils.six import iteritems
from distutils.version import LooseVersion
try:
from f5.utils.responses.handlers import Stats
from ansible.module_utils.f5_utils import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
class BaseManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.kwargs = kwargs
self.types = dict(
a_s='a',
aaaas='aaaa',
cnames='cname',
mxs='mx',
naptrs='naptr',
srvs='srv'
)
def exec_module(self):
result = self.read_current_from_device()
return result
def filter_matches_name(self, name):
if self.want.filter is None:
return True
matches = re.match(self.want.filter, str(name))
if matches:
return True
else:
return False
def version_is_less_than_12(self):
version = self.client.api.tmos_version
if LooseVersion(version) < LooseVersion('12.0.0'):
return True
else:
return False
def get_facts_from_collection(self, collection, collection_type=None):
results = []
for item in collection:
if not self.filter_matches_name(item.name):
continue
facts = self.format_facts(item, collection_type)
results.append(facts)
return results
def read_stats_from_device(self, resource):
stats = Stats(resource.stats.load())
return stats.stat
class UntypedManager(BaseManager):
def exec_module(self):
results = []
facts = self.read_facts()
for item in facts:
filtered = [(k, v) for k, v in iteritems(item) if self.filter_matches_name(k)]
if filtered:
results.append(dict(filtered))
return results
class TypedManager(BaseManager):
def exec_module(self):
results = []
for collection, type in i
|
Coops1980/Logbook
|
Main.py
|
Python
|
unlicense
| 2,472
| 0.033981
|
import tkinter as tk
import tkinter.ttk as ttk
def NewFlight():
from Controller import LoadNewFlightPage
LoadNewFlightPage(mainPage)
def LogBook():
from Controller import LoadLogBookPage
LoadLogBookPage(mainPage)
def CreateMainPage():
global mainPage
mainPage = tk.Tk()
mainPage.configure(background="midnight blue")
mainPage.title("RAF Flying Logbook v1")
"""mainPage.geometry('%dx%d+0+0' % (mainPage.winfo_screenwidth(),mainPage.winfo_screenheight()))
mainPageWidth = mainPage.winfo_screenwidth()
mainPageHeight = mainPage.winfo_screenheight()"""
FrameTitle = tk.L
|
abel(text = "RAF Flight Logbook v1", fg="steel blue", font=("Comic Sans MS", 10),width=50)
FrameTitle.grid(row=0,column=0,columnspan=4)
TopSpace = tk.L
|
abel(text = "", bg="midnight blue")
TopSpace.grid(row=1,columnspan=4,sticky='ew')
LogBook_btn = tk.Button(text = "Log Book", fg ="black",command=LogBook)
LogBook_btn.grid(row=2,column=1,columnspan=2,)
MidTopSpace = tk.Label(text = "", bg="midnight blue")
MidTopSpace.grid(row=3,columnspan=4,sticky='ew')
NewFlight_btn = tk.Button(text = "New Flight", fg ="black",command=NewFlight)
NewFlight_btn.grid(row=4,column=1,columnspan=2,)
MidSpace = tk.Label(text = "", bg="midnight blue")
MidSpace.grid(row=5,columnspan=4,sticky='ew')
QSummary_btn= tk.Button(text = "Quarterly Summary", fg ="black")
QSummary_btn.grid(row=6,column=1,columnspan=2,)
BotSpace = tk.Label(text = "", bg="midnight blue")
BotSpace.grid(row=7,columnspan=4,sticky='ew')
ASummary_btn= tk.Button(text = "Annual Summary", fg ="black")
ASummary_btn.grid(row=8,column=1,columnspan=2,)
TableSpace = tk.Label(text = "", bg="midnight blue")
TableSpace.grid(row=9,columnspan=4,sticky='ew')
Summary = ttk.Treeview(mainPage,height=4)
Summary["columns"]=("one")
Summary.heading("#0", text='Type', anchor='w')
Summary.column("one", width=40)
Summary.heading("one", text="Hours")
Summary.insert("", 0,text="Approaches", values=("3"))
Summary.insert("", 0,text="IF", values=("2"))
Summary.insert("", 0,text="Night", values=("1"))
Summary.insert("", 0,text="Day", values=("0"))
Summary.grid(row=10,column=0,columnspan=3,sticky='e')
Summary.columnconfigure(0,weight=0)
mainPage.mainloop()
|
MicrosoftLearning/Django
|
Demos/MusicStore/app/models.py
|
Python
|
mit
| 582
| 0.024055
|
"""
Definition of models.
"""
from django.db import models
from django import forms;
# Create your models here.
class Artist(models.
|
Model):
name = models.CharField(max_length=50);
year_formed = models.PositiveIntegerField();
class ArtistForm(forms.ModelForm):
class Meta:
model = Artist;
|
fields = ['name', 'year_formed'];
class ArtistForm(forms.ModelForm):
class Meta:
model = Artist;
fields = ['name', 'year_formed'];
class Album(models.Model):
name = models.CharField(max_length=50);
artist = models.ForeignKey(Artist);
|
kartoza/jakarta-flood-maps
|
django_project/flood_mapper/migrations/0004_auto_20141216_1042.py
|
Python
|
bsd-2-clause
| 665
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('flood_mapper', '0003_data_migration_20141201_0218'),
]
operations = [
migrations.AlterField(
model_name='floodstatus',
name='notes',
field=models.TextField(nul
|
l=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='village',
name='slug',
field=models.SlugField(unique=True, max_length=100),
|
preserve_default=True,
),
]
|
google/ffn
|
train.py
|
Python
|
apache-2.0
| 27,262
| 0.006383
|
# Copyright 2017-2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Driver script for FFN training.
The FFN is first run on single seed point. The mask prediction for that seed
point is then used to train subsequent steps of the FFN by moving the field
of view in a way dependent on the initial predictions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import deque
from io import BytesIO
from functools import partial
import itertools
import json
import logging
import os
import random
import time
import h5py
import numpy as np
import PIL
import PIL.Image
import six
from scipy.special import expit
from scipy.special import logit
import tensorflow as tf
from absl import app
from absl import flags
from tensorflow import gfile
from ffn.inference import movement
from ffn.training import mask
from ffn.training.import_util import import_symbol
from ffn.training import inputs
from ffn.training import augmentation
# Necessary so that optimizer flags are defined.
# pylint: disable=unused-import
from ffn.training import optimizer
# pylint: enable=unused-import
FLAGS = flags.FLAGS
# Options related to training data.
flags.DEFINE_string('train_coords', None,
'Glob for the TFRecord of training coordinates.')
flags.DEFINE_string('data_volumes', None,
'Comma-separated list of <volume_name>:<volume_path>:'
'<dataset>, where volume_name need to match the '
'"label_volume_name" field in the input example, '
'volume_path points to HDF5 volumes containing uint8 '
'image data, and `dataset` is the name of the dataset '
'from which data will be read.')
flags.DEFINE_string('label_volumes', None,
'Comma-separated list of <volume_name>:<volume_path>:'
'<dataset>, where volume_name need to match the '
'"label_volume_name" field in the input example, '
'volume_path points to HDF5 volumes containing int64 '
'label data, and `dataset` is the name of the dataset '
'from which data will be read.')
flags.DEFINE_string('model_name', None,
'Name of the model to train. Format: '
'[<packages>.]<module_name>.<model_class>, if packages is '
'missing "ffn.training.models" is used as default.')
flags.DEFINE_string('model_args', None,
'JSON string with arguments to be passed to the model '
'constructor.')
# Training infra options.
flags.DEFINE_string('train_dir', '/tmp',
'Path where checkpoints and other data will be saved.')
flags.DEFINE_string('master', '', 'Network address of the master.')
flags.DEFINE_integer('batch_size', 4, 'Number of images in a batch.')
flags.DEFINE_integer('task', 0, 'Task id of the replica running the training.')
flags.DEFINE_integer('ps_tasks', 0, 'Number of tasks in the ps job.')
flags.DEFINE_integer('max_steps', 10000, 'Number of steps to train for.')
flags.DEFINE_integer('replica_step_delay', 300,
'Require the model to reach step number '
'<replica_step_delay> * '
'<replica_id> before starting training on a given '
'replica.')
flags.DEFINE_integer('summary_rate_secs', 120,
'How often to save summaries (in seconds).')
# FFN training options.
flags.DEFINE_float('seed_pad', 0.05,
'Value to use for the unknown area of the seed.')
flags.DEFINE_float('threshold', 0.9,
'Value to be reached or exceeded at the new center of the '
'field of view in order for the network to inspect it.')
flags.DEFINE_enum('fov_policy', 'fixed', ['fixed', 'max_pred_moves'],
'Policy to determine where to move the field of the '
'network. "fixed" tries predefined offsets specified by '
'"model.shifts". "max_pred_moves" moves to the voxel with '
'maximum mask activation within a plane perpendicular to '
'one of the 6 Cartesian directions, offset by +/- '
'model.deltas from the current FOV position.')
# TODO(mjanusz): Implement fov_moves > 1 for the 'fixed' policy.
flags.DEFINE_integer('fov_moves', 1,
'Number of FOV moves by "model.delta" voxels to execute '
'in every dimension. Currently only works with the '
'"max_pred_moves" policy.')
flags.DEFINE_boolean('shuffle_moves', True,
'Whether to randomize the order of the moves used by the '
'network with the "fixed" policy.')
flags.DEFINE_float('image_mean', None,
'Mean image intensity to use for input normalization.')
flags.DEFINE_float('image_stddev', None,
'Image intensity standard deviation to use for input '
'normalization.')
flags.DEFINE_list('image_offset_scale_map', None,
'Optional per-volume specification of mean and stddev. '
'Every entry in the list is a colon-separated tuple of: '
'volume_label, offset, scale.')
flags.DEFINE_list('permutable_axes', ['1', '2'],
'List of integers equal to a subset of [0, 1, 2] specifying '
'which of the [z, y, x] axes, respectively, may be permuted '
'in order to augment the training data.')
flags.DEFINE_list('reflectable_axes', ['0', '1', '2'],
'List of integers equal to a subset of [0, 1, 2] specifying '
'which of the [z, y, x] axes, respectively, may be reflected '
'in order to augment the training data.')
FLAGS = flags.FLAGS
class EvalTracker(object):
"""Tracks eval results over multiple training steps."""
def __init__(self, eval_shape):
self.eval_labels = tf.plac
|
eholder(
|
tf.float32, [1] + eval_shape + [1], name='eval_labels')
self.eval_preds = tf.placeholder(
tf.float32, [1] + eval_shape + [1], name='eval_preds')
self.eval_loss = tf.reduce_mean(
tf.nn.sigmoid_cross_entropy_with_logits(
logits=self.eval_preds, labels=self.eval_labels))
self.reset()
self.eval_threshold = logit(0.9)
self.sess = None
self._eval_shape = eval_shape
def reset(self):
"""Resets status of the tracker."""
self.loss = 0
self.num_patches = 0
self.tp = 0
self.tn = 0
self.fn = 0
self.fp = 0
self.total_voxels = 0
self.masked_voxels = 0
self.images_xy = deque(maxlen=16)
self.images_xz = deque(maxlen=16)
self.images_yz = deque(maxlen=16)
def slice_image(self, labels, predicted, weights, slice_axis):
"""Builds a tf.Summary showing a slice of an object mask.
The object mask slice is shown side by side with the corresponding
ground truth mask.
Args:
labels: ndarray of ground truth data, shape [1, z, y, x, 1]
predicted: ndarray of predicted data, shape [1, z, y, x, 1]
weights: ndarray of loss weights, shape [1, z, y, x, 1]
slice_axis: axis in the middle of which to place the cutting plane
for which the summary image will be generated, valid values are
2 ('x'), 1 ('y'), and 0 ('z').
Returns:
tf.Summary.Value object with the image.
"""
zyx = list(labels.shape[1:-1])
se
|
nasseralkmim/SaPy
|
sapy/load.py
|
Python
|
gpl-3.0
| 342
| 0
|
import numpy as np
def P_vector(model, nodal_load):
"""Return the load vector
"""
P = np.zeros(model.nt)
for n, p in nodal_load.items():
if n not in model.CON:
raise Except
|
ion('Not a valid DOF for the applied load!')
for i, d in enumerate(model.DOF[n]):
P[d] =
|
p[i]
return P
|
sampathweb/game_app
|
card_games/test/test_blackjack.py
|
Python
|
mit
| 917
| 0.001091
|
#!/usr/bin/env python
"""
Test code for blackjack game. Tests can be run with py.test or nosetests
"""
from __future__ import print_function
from unittest import TestCase
from card_games import blackjack
from card_games.blackjack import BlackJack
print(blackjack.__file__)
class TestRule(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_init(self):
mygam
|
e = BlackJack()
self.assertEqual(len(mygame.player_hand), 2) # Initial hand for Player
|
self.assertEqual(len(mygame.dealer_hand), 2) # Initial hand for Dealer
def test_player_bust(self):
mygame = BlackJack()
for cnt in range(10): # Draw 10 cards - Sure to loose
mygame.draw_card_player()
self.assertEqual(len(mygame.player_hand), 12) # Twelve cards in Player's hand
self.assertEqual(mygame.game_result(), 'bust') # Definitely a bust
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/add_data_lake_store_parameters.py
|
Python
|
mit
| 943
| 0
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class AddDataLakeStoreParameter
|
s(Model):
"""The parameters used to add a new Data Lake Store account.
:param suffix: The optional suffix for the Data Lake Store account.
:type suffix: str
"""
_attribute_map = {
|
'suffix': {'key': 'properties.suffix', 'type': 'str'},
}
def __init__(self, suffix=None):
super(AddDataLakeStoreParameters, self).__init__()
self.suffix = suffix
|
numenta/htmresearch
|
htmresearch/frameworks/pytorch/modules/k_winners.py
|
Python
|
agpl-3.0
| 9,177
| 0.007083
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2019, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from __future__ import print_function
import abc
import torch
import torch.nn as nn
import torch.nn.functional as F
from htmresearch.frameworks.pytorch.duty_cycle_metrics import (
maxEntropy, binaryEntropy
)
from htmresearch.frameworks.pytorch.functions import k_winners, k_winners2d
def getEntropy(m):
"""
Function used to get the current and max entropies of KWinners modules.
:param m: any module
:return: (currentEntropy, maxEntropy)
"""
if isinstance(m, KWinnersBase):
return m.entropy(), m.maxEntropy()
else:
return 0.0, 0.0
def getEntropies(m):
"""
Recursively get the current and max entropies from every child module
:param m: any module
:return: (currentEntropy, maxEntropy)
"""
entropy = 0.0
max_entropy = 0.0
for module in m.children():
e, m = getEntropies(module)
entropy += e
max_entropy += m
e, m = getEntropy(m)
entropy += e
max_entropy += m
return entropy, max_entropy
def updateBoostStrength(m):
"""
Function used to update KWinner modules boost strength after each epoch.
Call using :meth:`torch.nn.Module.apply` after each epoch if required
For example: ``m.apply(updateBoostStrength)``
:param m: KWinner module
"""
if isinstance(m, KWinnersBase):
if m.training:
m.boostStrength = m.boostStrength * m.boostStrengthFactor
class KWinnersBase(nn.Module):
"""
Base KWinners class
"""
__metaclass__ = abc.ABCMeta
def __init__(self, n, k, kInferenceFactor=1.0, boostStrength=1.0,
boostStrengthFactor=1.0, dutyCyclePeriod=1000):
"""
:param n:
Number of units
:type n: int
:param k:
The activity of the top k units will be allowed to remain, the rest are set
to zero
:type k: int
:param kInferenceFactor:
During inference (training=False) we increase k by this factor.
:type kInferenceFactor: float
:param boostStrength:
boost strength (0.0 implies no boosting).
:type boostStrength: float
:param boostStrengthFactor:
Boost strength factor to use [0..1]
:type boostStrengthFactor: float
:param dutyCyclePeriod:
The period used to calculate duty cycles
:type dutyCyclePeriod: int
"""
super(KWinnersBase, self).__init__()
assert (boostStrength >= 0.0)
self.n = n
self.k = k
self.kInferenceFactor = kInferenceFactor
self.learningIterations = 0
# Boosting related parameters
self.boostStrength = boostStrength
self.boostStrengthFactor = boostStrengthFactor
self.dutyCyclePeriod = dutyCyclePeriod
def getLearningIterations(self):
return self.learningIterations
@abc.abstractmethod
def updateDutyCycle(self, x):
"""
Updates our duty cycle estimates with the new value. Duty cycles are
updated according to the following formula:
.. math::
dutyCycle = \\frac{dutyCycle \\times \\left( period - batchSize \\right)
+ newValue}{period}
:param x:
Current activity of each unit
"""
raise NotImplementedError
def updateBoostStrength(self):
"""
Update boost strength using given strength factor during training
"""
if self.training:
self.boostStrength = self.boostStrength * self.boostStrengthFactor
def entropy(self):
"""
Returns the current total entropy of this layer
"""
if self.k < self.n:
_, entropy = binaryEntropy(self.dutyCycle)
return entropy
else:
return 0
def maxEntropy(self):
"""
Returns the maximum total entropy we can expect from this layer
"""
return maxEntropy(self.n, self.k)
class KWinners(KWinnersBase):
"""
Applies K-Winner function to the input tensor
See :class:`htmresearch.frameworks.pytorch.functions.k_winners`
"""
def __init__(self, n, k, kInferenceFactor=1.0, boostStrength=1.0,
boostStrengthFactor=1.0, dutyCyclePeriod=1000):
"""
:param n:
Number of units
:type n: int
:param k:
The activity of the top k units will be allowed to remain, the rest are set
to zero
:type k: int
:param kInf
|
erenceFactor:
During infere
|
nce (training=False) we increase k by this factor.
:type kInferenceFactor: float
:param boostStrength:
boost strength (0.0 implies no boosting).
:type boostStrength: float
:param boostStrengthFactor:
Boost strength factor to use [0..1]
:type boostStrengthFactor: float
:param dutyCyclePeriod:
The period used to calculate duty cycles
:type dutyCyclePeriod: int
"""
super(KWinners, self).__init__(n=n, k=k,
kInferenceFactor=kInferenceFactor,
boostStrength=boostStrength,
boostStrengthFactor=boostStrengthFactor,
dutyCyclePeriod=dutyCyclePeriod)
self.register_buffer("dutyCycle", torch.zeros(self.n))
def forward(self, x):
# Apply k-winner algorithm if k < n, otherwise default to standard RELU
if self.k >= self.n:
return F.relu(x)
if self.training:
k = self.k
else:
k = min(int(round(self.k * self.kInferenceFactor)), self.n)
x = k_winners.apply(x, self.dutyCycle, k, self.boostStrength)
if self.training:
self.updateDutyCycle(x)
return x
def updateDutyCycle(self, x):
batchSize = x.shape[0]
self.learningIterations += batchSize
period = min(self.dutyCyclePeriod, self.learningIterations)
self.dutyCycle.mul_(period - batchSize)
self.dutyCycle.add_(x.gt(0).sum(dim=0, dtype=torch.float))
self.dutyCycle.div_(period)
class KWinners2d(KWinnersBase):
"""
Applies K-Winner function to the input tensor
See :class:`htmresearch.frameworks.pytorch.functions.k_winners2d`
"""
def __init__(self, n, k, channels, kInferenceFactor=1.0, boostStrength=1.0,
boostStrengthFactor=1.0, dutyCyclePeriod=1000):
"""
:param n:
Number of units. Usually the output of the max pool or whichever layer
preceding the KWinners2d layer.
:type n: int
:param k:
The activity of the top k units will be allowed to remain, the rest are set
to zero
:type k: int
:param channels:
Number of channels (filters) in the convolutional layer.
:type channels: int
:param kInferenceFactor:
During inference (training=False) we increase k by this factor.
:type kInferenceFactor: float
:param boostStrength:
boost strength (0.0 implies no boosting).
:type boostStrength: float
:param boostStrengthFactor:
Boost strength factor to use [0..1]
:type boostStrengthFactor: float
:param dutyCyclePeriod:
The period used to calculate duty cycles
:type dutyCyclePeriod: int
"""
super(KWinners2d, self).__init__(n=n, k=k,
kInferenceFactor=kInferenceFactor,
boostStrength=boostStrength,
boostStrengthFactor=boostStrengthFactor,
dutyCyclePeriod=dutyCyclePeriod)
self.channels =
|
facebookexperimental/eden
|
eden/hg-server/edenscm/hgext/highlight/highlight.py
|
Python
|
gpl-2.0
| 3,008
| 0.000997
|
# Portions Copyright (c) Facebook, Inc. and its affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
# highlight.py - highlight extension implementation file
#
# Copyright 2007-2009 Adam Hupp <adam@hupp.org> and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
#
# The original module was split in an interface and an implementation
# file to defer pygments loading and speedup extension setup.
from __future__ import absolute_import
fr
|
om edenscm.mercurial import demandimport, encoding, util
demandimport.ignore.extend(["pkgutil", "pkg_resources", "__main__"])
with demandimport.deactivated():
import pygments
import pygments.formatters
import pygments.lexers
import pygments.util
highlight = pygments.highlight
ClassNotFound = pygments.util.ClassNotFound
guess_lexer = pygments.lexers.guess_lexer
guess_lexer_for_filename = pygments.lexe
|
rs.guess_lexer_for_filename
TextLexer = pygments.lexers.TextLexer
HtmlFormatter = pygments.formatters.HtmlFormatter
SYNTAX_CSS = '\n<link rel="stylesheet" href="{url}highlightcss" ' 'type="text/css" />'
def pygmentize(field, fctx, style, tmpl, guessfilenameonly=False):
# append a <link ...> to the syntax highlighting css
old_header = tmpl.load("header")
if SYNTAX_CSS not in old_header:
new_header = old_header + SYNTAX_CSS
tmpl.cache["header"] = new_header
text = fctx.data()
if util.binary(text):
return
# str.splitlines() != unicode.splitlines() because "reasons"
for c in "\x0c\x1c\x1d\x1e":
if c in text:
text = text.replace(c, "")
# Pygments is best used with Unicode strings:
# <http://pygments.org/docs/unicode/>
text = text.decode(encoding.encoding, "replace")
# To get multi-line strings right, we can't format line-by-line
try:
lexer = guess_lexer_for_filename(fctx.path(), text[:1024], stripnl=False)
except (ClassNotFound, ValueError):
# guess_lexer will return a lexer if *any* lexer matches. There is
# no way to specify a minimum match score. This can give a high rate of
# false positives on files with an unknown filename pattern.
if guessfilenameonly:
return
try:
lexer = guess_lexer(text[:1024], stripnl=False)
except (ClassNotFound, ValueError):
# Don't highlight unknown files
return
# Don't highlight text files
if isinstance(lexer, TextLexer):
return
formatter = HtmlFormatter(nowrap=True, style=style)
colorized = highlight(text, lexer, formatter)
coloriter = (s.encode(encoding.encoding, "replace") for s in colorized.splitlines())
tmpl.filters["colorize"] = lambda x: next(coloriter)
oldl = tmpl.cache[field]
newl = oldl.replace("line|escape", "line|colorize")
tmpl.cache[field] = newl
|
SnowWalkerJ/quantlib
|
quant/data/wind/tables/asharebalancesheet.py
|
Python
|
gpl-3.0
| 18,744
| 0.018408
|
from ....common.db.sql import VARCHAR, Numeric as NUMBER, DateTime as DATETIME, Column, BaseModel, CLOB, DATE
VARCHAR2 = VARCHAR
class AShareBalanceSheet(BaseModel):
"""
4.45 中国A股资产负债表
Attributes
----------
object_id: VARCHAR2(100)
对象ID
s_info_windcode: VARCHAR2(40)
Wind代码
wind_code: VARCHAR2(40)
Wind代码
ann_dt: VARCHAR2(8)
公告日期
report_period: VARCHAR2(8)
报告期
statement_type: VARCHAR2(10)
报表类型 报表类型:408001000:合并报表408004000:合并报表(调整)408005000:合并报表(更正前)408050000:合并调整(更正前)408006000:母公司报表408009000:母公司报表(调整)408010000:母公司报表(更正前)408060000:母公司调整(更正前)
crncy_code: VARCHAR2(10)
货币代码 CNY
monetary_cap: NUMBER(20,4)
货币资金
tradable_fin_assets: NUMBER(20,4)
交易性金融资产
notes_rcv: NUMBER(20,4)
应收票据
acct_rcv: NUMBER(20,4)
应收账款
oth_rcv: NUMBER(20,4)
其他应收款
prepay: NUMBER(20,4)
预付款项
dvd_rcv: NUMBER(20,4)
应收股利
int_rcv: NUMBER(20,4)
应收利息
inventories: NUMBER(20,4)
存货
consumptive_bio_assets: NUMBER(20,4)
消耗性生物资产
deferred_exp: NUMBER(20,4)
待摊费用
non_cur_assets_due_within_1y: NUMBER(20,4)
一年内到期的非流动资产
settle_rsrv: NUMBER(20,4)
结算备付金
loans_to_oth_banks: NUMBER(20,4)
拆出资金
prem_rcv: NUMBER(20,4)
应收保费
rcv_from_reinsurer: NUMBER(20,4)
应收分保账款
rcv_from_ceded_insur_cont_rsrv: NUMBER(20,4)
应收分保合同准备金
red_monetary_cap_for_sale: NUMBER(20,4)
买入返售金融资产
oth_cur_assets: NUMBER(20,4)
其他流动资产
tot_cur_assets: NUMBER(20,4)
流动资产合计
fin_assets_avail_for_sale: NUMBER(20,4)
可供出售金融资产
held_to_mty_invest: NUMBER(20,4)
持有至到期投资
long_term_eqy_invest: NUMBER(20,4)
长期股权投资
invest_real_estate: NUMBER(20,4)
投资性房地产
time_deposits: NUMBER(20,4)
定期存款
oth_assets: NUMBER(20,4)
其他资产
long_term_rec: NUMBER(20,4)
长期应收款
fix_assets: NUMBER(20,4)
固定资产
const_in_prog: NUMBER(20,4)
在建工程
proj_matl: NUMBER(20,4)
工程物资
fix_assets_disp: NUMBER(20,4)
固定资产清理
productive_bio_assets: NUMBER(20,4)
生产性生物资产
oil_and_natural_gas_assets: NUMBER(20,4)
油气资产
intang_assets: NUMBER(20,4)
无形资产
r_and_d_costs: NUMBER(20,4)
开发支出
goodwill: NUMBER(20,4)
商誉
long_term_deferred_exp: NUMBER(20,4)
长期待摊费用
deferred_tax_assets: NUMBER(20,4)
递延所得税资产
loans_and_adv_granted: NUMBER(20,4)
发放贷款及垫款
oth_non_cur_assets: NUMBER(20,4)
其他非流动资产
tot_non_cur_assets: NUMBER(20,4)
非流动资产合计
cash_deposits_central_bank: NUMBER(20,4)
现金及存放中央银行款项
asset_dep_oth_banks_fin_inst: NUMBER(20,4)
存放同业和其它金融机构款项
precious_metals: NUMBER(20,4)
贵金属
derivative_fin_assets: NUMBER(20,4)
衍生金融资产
agency_bus_assets: NUMBER(20,4)
代理业务资产
subr_rec: NUMBER(20,4)
应收代位追偿款
rcv_ceded_unearned_prem_rsrv: NUMBER(20,4)
应收分保未到期责任准备金
rcv_ceded_claim_rsrv: NUMBER(20,4)
应收分保未决赔款准备金
rcv_ceded_life_insur_rsrv: NUMBER(20,4)
应收分保寿险责任准备金
rcv_ceded_lt_health_insur_rsrv: NUMBER(20,4)
应收分保长期健康险责任准备金
mrgn_paid: NUMBER(20,4)
存出保证金
insured_pledge_loan: NUMBER(20,4)
保户质押贷款
cap_mrgn_paid: NUMBER(20,4)
存出资本保证金
independent_acct_assets: NUMBER(20,4)
独立账户资产
clients_cap_deposit: NUMBER(20,4)
客户资金存款
clients_rsrv_settle: NUMBER(20,4)
客户备付金
incl_seat_fees_exchange: NUMBER(20,4)
其中:交易席位费
rcv_invest: NUMBER(20,4)
应收款项类投资
tot_assets: NUMBER(20,4)
资产总计
st_borrow: NUMBER(20,4)
短期借款
borrow_central_bank: NUMBER(20,4)
向中央银行借款
deposit_received_ib_deposits: NUMBER(20,4)
吸收存款及同业存放
loans_oth_banks: NUMBER(20,4)
拆入资金
tradable_fin_liab: NUMBER(20,4)
交易性金融负债
notes_payable: NUMBER(20,4)
应付票据
acct_payable: NUMBER(20,4)
应付账款
adv_from_cust: NUMBER(20,4)
预收款项
fund_sales_fin_assets_rp: NUMBER(20,4)
卖出回购金融资产款
handling_charges_comm_payable: NUMBER(20,4)
应付手续费及佣金
empl_ben_payable: NUMBER(20,4)
应付职工薪酬
taxes_surcharges_payable: NUMBER(20,4)
应交税费
int_payable: NUMBER(20,4)
应付利息
dvd_payable: NUMBER(20,4)
应付股利
oth_payable: NUMBER(20,4)
其他应付款
acc_exp: NUMBER(20,4)
预提费用
deferred_inc: NUMBER(20,4)
递延收益
st_bonds_payable: NUMBER(20,4)
应付短期债券
payable_to_reinsurer: NUMBER(20,4)
应付分保账款
rsrv_insur_cont: NUMBER(20,4)
保险合同准备金
acting_trading_sec: NUMBER(20,4)
代理买卖证券款
acting_uw_sec: NUMBER(20,4)
代理承销证券款
non_cur_liab_due_within_1y: NUMBER(20,4)
一年内到期的非流动负债
oth_cur_liab: NUMBER(20,4)
其他流动负债
tot_cur_liab: NUMBER(20,4)
流动负债合计
lt_borrow: NUMBER(20,4)
长期借款
bonds_payable: NUMBER(20,4)
应付债券
lt_payable: NUMBER(20,4)
长期应付款
specific_item_payable: NUMBER(20,4)
专项应付款
provisions: NUMBER(20,4)
预计负债
deferred_tax_liab: NUMBER(20,4)
递延所得税负债
deferred_inc_non_cur_liab: NUMBER(20,4)
递延收益-非流动负债
oth_non_cur_liab: NUMBER(20,4)
其他非流动负债
tot_non_
|
cur_liab: NUMBER(20,4)
非流动负债合计
liab_dep_oth_banks_fin_inst: NUMBER(20,4)
同业和其它金融机构存放款项
derivative_fin_liab: NUMBER(20,4)
衍生金融负债
cust_bank_dep: NUMBER(20,4)
吸收存款
agency_bus_liab: NUMBER(20,4)
代理业务负债
oth_liab: NUMBER(20,4)
其他负债
|
prem_received_adv: NUMBER(20,4)
预收保费
deposit_received: NUMBER(20,4)
存入保证金
insured_deposit_invest: NUMBER(20,4)
保户储金及投资款
unearned_prem_rsrv: NUMBER(20,4)
未到期责任准备金
out_loss_rsrv: NUMBER(20,4)
未决赔款准备金
life_insur_rsrv: NUMBER(20,4)
寿险责任准备金
lt_health_insur_v: NUMBER(20,4)
长期健康险责任准备金
independent_acct_liab: NUMBER(20,4)
独立账户负债
incl_pledge_loan: NUMBER(20,4)
其中:质押借款
claims_payable: NUMBER(20,4)
应付赔付款
dvd_payable_insured: NUMBER(20,4)
应付保单红利
tot_liab: NUMBER(20,4)
负债合计
cap_stk: NUMBER(20,4)
股本
cap_rsrv: NUMBER(20,4)
资本公积金
special_rsrv: NUMBER(20,4)
专项储备
surplus_rsrv: NUMBER(20,4)
盈余公积金
undistributed_profit: NUMBER(20,4)
未分配利润
less_tsy_stk: NUMBER(20,4)
减:库存股
prov_nom_risks: NUMBER(20,4)
一般风险准备
cnvd_diff_foreign_curr_stat: NUMBER(20,4)
外币报表折算差额
unconfirmed_invest_loss: NUMBER(20,4)
未确认的投资损失
minority_int: NUMBER(20,4)
少数股东权益
tot_shrhldr_eqy_excl_min_int: NUMBER(20,4)
股东权益合计(不含少数股东权益)
tot_shrhldr_eqy_incl_min_int: NUMBER(20,4)
股东权益合计(含少数股东权益)
tot_liab_shrhldr_eqy: NUMBER(20,4)
负债及股东权益总计
comp_type_code: VARCHAR2(2)
公司类型代码 1非金融类2银行3保险4证券
actual_ann_dt: VARCHAR2(8)
实际公告日期
spe_cur_assets_diff: NUMBER(20,4)
流动资产差额(特殊报表科目)
tot_cur_assets_diff: NUMBER(20,4)
流动资产差额(合计平衡项目)
spe_non_cur_assets_diff: NUMBER(20,4)
非流动资产差额(特殊报表科目)
tot_non_cur_assets_diff: NUMBER(20,4)
非流动资产差额(合计平衡项目)
spe_bal_assets_diff: NUMBER(20,4)
资产差额(特殊报表科目)
tot_bal_assets_diff: NUMBER(20,4)
资产差额(合计平衡项目)
spe_cur_liab_diff: NUMBER(20,4)
流动负债差额(特殊报表科目)
tot_cur_liab_diff: NUMBER(20,4)
流动负债差额(合计平衡项目)
spe_non_cur_liab_diff: NUMBER(20,4)
非流动负债差额(特殊报表科目)
tot_non_cur_liab_diff: NUMBER(20,4)
非流动负债差额(合计平衡项目)
spe_bal_liab_diff: NUMBER(20,4)
负债差额(特殊报表科目)
tot_
|
Noirello/bonsai
|
src/bonsai/tornado/__init__.py
|
Python
|
mit
| 53
| 0
|
from .tor
|
nadoconnection import TornadoLDAPCon
|
nection
|
deni-zen/csvelte
|
docs/_contrib/apigenrole.py
|
Python
|
mit
| 2,618
| 0.002292
|
from docutils import nodes, utils
from docutils.parsers.rst.roles import set_classes
# I cant figure out how the hell to import this so I'm just gonna forget it for now
def apigen_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
"""Link to API Docs page.
Returns 2 part tuple containing list of nodes to insert into the
document and a list of system messages. Both are allowed to be
empty.
:param name: The role name used in the document.
:param rawtext: The entire markup snippet, with role.
:param text: The text marked with the role.
:param lineno: The line number where rawtext appears in the input.
:param inliner: The inliner instance that called us.
:param options: Directive options for customization.
:param content: The directive content for customization.
"""
try:
class_name = text.replace('\\', '.')
if text[0:1] == '.':
class_name = class_name[1:]
if class_name == "":
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'Class name must be a valid fully qualified class name; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
app = inliner.document.settings.env.app
node = make_link_node(rawtext, app, 'class', class_name, options)
return [node], []
def make_link_node(rawtext, app, type, slug, options):
"""Create a link to an
|
ApiGen API docs page.
:param rawtext: Text being replaced with link node.
:param app: Sphinx application context
:param type: Item type (class, namespace, etc.)
:param slug: ID of the thing to link to
:param options: Options dictionary passed to role func.
"""
#
try:
base = app.config.apigen_docs_uri
if not base:
raise AttributeError
excep
|
t AttributeError, err:
raise ValueError('apigen_docs_uri configuration value is not set (%s)' % str(err))
# Build API docs link
slash = '/' if base[-1] != '/' else ''
ref = base + slash + type + '-' + slug + '.html'
set_classes(options)
node = nodes.reference(rawtext, type + ' ' + utils.unescape(slug), refuri=ref,
**options)
return node
def setup(app):
"""Install the plugin.
:param app: Sphinx application context.
"""
app.info('Initializing Api Class plugin')
app.add_role('apiclass', apigen_role)
# app.add_role('apins', apigen_namespace_role)
app.add_config_value('apigen_docs_uri', None, 'env')
return
|
note35/sinon
|
sinon/lib/util/ErrorHandler.py
|
Python
|
bsd-2-clause
| 2,133
| 0.016409
|
"""
Copyright (c) 2016-2017, Kir Chou
https://github.com/note35/sinon/blob/master/LICENSE
A set of functions for handling known error
"""
def __exception_helper(msg, exception=Exception): #pylint: disable=missing-docstring
raise exception(msg)
def mock_type_error(obj): #pylint: disable=missing-docstring
error_msg = "[{}] is an invalid module/class".format(str(obj))
return __exception_helper(error_msg)
def prop_type_error(prop): #pylint: disable=missing-docstring
error_msg = "[{}] is an invalid property, it should be a string".format(prop)
return __exception_helper(error_msg)
def prop_is_func_error(obj, prop): #pylint: disable=missing-docstring
name = obj.__name__ if hasattr(obj, "__name__") else obj
error_msg = "[{}] is an invalid property, it should be a method in [{}]".format(prop, name)
return __exception_helper(error_msg)
def prop_in_obj_error(obj, prop): #pylint: disable=missing-docstring
error_msg = "[{}] is not exist in [{}]".format(prop, obj)
return __exception_helper(error_msg)
def lock_error(obj): #pylint: disable=missing-docstring
name = obj.__name__ if hasattr(obj, "__name__") else obj
error_msg = "[{}] have
|
already been declared".format(name)
return __exception_helper(error_msg)
def called_with_empty_error(): #pylint: disable=missing-docstring
error_msg = "There is no argument"
return __exception_helper(error_msg)
def is_not_spy_error(obj): #pylint: disable=missing-docstring
e
|
rror_msg = "[{}] is an invalid spy".format(str(obj))
return __exception_helper(error_msg)
def matcher_type_error(prop): #pylint: disable=missing-docstring
error_msg = "[{}] is an invalid property, it should be a type".format(prop)
return __exception_helper(error_msg, exception=TypeError)
def matcher_instance_error(prop): #pylint: disable=missing-docstring
error_msg = "[{}] is an invalid property, it should be an instance".format(prop)
return __exception_helper(error_msg, exception=TypeError)
def wrapper_object_not_found_error():
error_msg = 'Wrapper object cannot be found'
return __exception_helper(error_msg)
|
tapomayukh/projects_in_python
|
modeling_new_tactile_skin/polynomial_model_fitting/force_area_output_model/force_area_output_model2.py
|
Python
|
mit
| 3,444
| 0.009582
|
import sys, os
import math, numpy as np
import matplotlib.pyplot as pp
import roslib; roslib.load_manifest('sandbox_tapo_darpa_m3')
import hrl_lib.util as ut
import hrl_lib.matplotlib_util as mpu
import scipy.optimize as scp
# Params
area = 0.0
# For one push and pull:
def force_one_push_pull(d):
ft_l = d['ft'][30:]
adc_l = (d['adc_bias'] - np.array(d['adc'][30:])).tolist()
f_prev = 0.
temp_ft_l = []
temp_adc_l = []
increasing = True
count = 1
for i in range(len(ft_l)):
f = ft_l[i]
a = adc_l[i]
if f>f_prev:
if increasing:
temp_ft_l.append(f)
temp_adc_l.append(a)
else:
if len(temp_ft_l) > 50:
count +=1
if count == 2:
break
increasing = True
else:
if increasing:
if len(temp_ft_l) > 50:
f_prev = f
else:
temp_ft_l.append(f)
temp_adc_l.append(a)
increasing = False
f_prev = f
#print temp_ft_l
return np.array(temp_adc_l), np.array(temp_ft_l)
def force_vs_adc(nm, adc, ft, color):
pp.scatter(adc, ft, marker='o', color=color,
label=nm.split('/')[-1].split('.')[0], s=50)
pp.xlabel('ADC bias - ADC')
pp.ylabel('FT_z')
pp.legend()
pp.grid('on')
def residuals(p, y, x, area):
coeff_1, coeff_2, coeff_3, coeff_4 = p
err = y-(coeff_1*area*(x)**3 + coeff_2*area*(x)**2 + coeff_3*area*(x) + coeff_4*area)
return err
def fit_the_data(pkl_list, color):
# Training on 4-cm data
d = ut.load_pickle(pkl_list[2])
x,y_meas = force_one_push_pull(d)
area = math.sqrt(d['contact_area'])
coeff_1 = 5.
coeff_2 = 5.
coeff_3 = 5.
coeff_4 = 5.
p0 = np.array([coeff_1, coeff_2, coeff_3, coeff_4])
p_lsq = scp.leastsq(residuals, p0, args=(y_meas, x, area))
print p_lsq[0]
y_fit = p_lsq[0][0]*area*(x)**3 + p_lsq[0][1]*area*(x)**2 + p_lsq[0][2]*area*(x)**1 + p_lsq[0][3]*area
pp.plot(x, y_fit, color=color, linewidth = 3.0)
# Training on 2-cm data with 4-cm result as initial condition
p0 = p_lsq[0]
d = ut.load_pickle(pkl_list[1])
x,y_meas = force_one_push_pull(d)
area = math.sqrt(d['contact_area'])
p_lsq = scp.leastsq(residuals, p0, args=(y_meas, x, area))
print p_lsq[0]
y_fit = p_lsq[0][0]*area*(x)**3 + p_lsq[0][1]*area*(x)**2 + p_lsq[0][2]*area*(x)**1 + p_lsq[0][3]*area
pp.plot(x, y_fit, color=color, linewidth = 3.0)
# Testing on 1-cm
d = ut.load_pickle(pkl_list[0])
x,y_meas = force_one
|
_push_pull(d)
area = math.sqrt(d['contact_area'])
y_fit = p_lsq[0][0]*area*(x)**3 + p_lsq[0][1]*area*(x)**2 + p_lsq[0][2]*area*(x)**1 + p_lsq[0][3]*area
pp.plot(x, y_fit, color=color, linewidth = 3.0)
# Calculate RMS Error
rmse = math.sqrt(float(np.sum((np.array(y_meas) - np.array(y_fit))**2))/float(np.size(y_meas)))
print rmse
if __name__ == '__main__':
mpu.figure()
pkl_list = ['./1_cm.pkl', './2_cm.pkl', './4_cm.pkl']
color_list = ['r', 'g', 'b']
for
|
pkl, c in zip(pkl_list, color_list):
d = ut.load_pickle(pkl)
adc, ft = force_one_push_pull(d)
force_vs_adc(pkl, adc, ft, c)
pp.xlim((0,1000))
pp.ylim((-10,80))
fit_the_data(pkl_list, 'k')
pp.show()
|
1uk/LPTHW
|
ex26.py
|
Python
|
bsd-3-clause
| 1,313
| 0.002285
|
from ex25 import *
print "Let's practice everything."
print 'You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs.'
poem = """
\tThe lovely world
with logic so firmly planted
cannot discern \n the needs of love
n
|
or comprehend passion from intuition
and requires an explantion
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 2 - 5
print "This should be five: %s" % five
def secret_formula(started):
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
beans, jars, crates = secret_formula(start_point)
print "With a starting point of: %d" % start_point
print "We'd have %
|
d beans, %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
sentence = "All good things come to those who wait."
words = break_words(sentence)
sorted_words = sort_words(words)
print_first_word(words)
print_last_word(words)
print_first_word(sorted_words)
print_last_word(sorted_words)
sorted_words = sort_sentence(sentence)
print sorted_words
print_first_and_last(sentence)
print_first_and_last_sorted(sentence)
|
lpfann/fri
|
fri/tests/test_parameter_searcher.py
|
Python
|
mit
| 1,221
| 0
|
import numpy as np
import pytest
from sklearn.preprocessing import StandardScaler
from sklearn.utils import check_random_state
import fri
from fri import genLupiData
from fri.parameter_searcher import find_best_model
@pytest.fixture(scope="session")
def randomstate():
return check_random_state(1337)
@pytest.mark.parametrize("n_weak", [0, 2])
@pytest.mark.parametrize("problem", fri.LUPI_MODELS)
def test_baseline_lupi(problem, n_weak, randomstate):
n_samples = 300
template = problem.value[0]().get_initmodel_template
params = problem.value[0]().get_all_parameters()
data = genLupiData(
problem,
n_strel=1,
n_weakrel=n_weak,
n_samples=n_samples,
n_irrel=1,
n_repeated=0,
random_s
|
tate=randomstate,
)
X, X_priv, y = data
X = StandardScaler().fit(X).transform(X)
X_priv = StandardScaler().fi
|
t(X_priv).transform(X_priv)
combined = np.hstack([X, X_priv])
iter = 50
best_model, best_score = find_best_model(
template,
params,
(combined, y),
randomstate,
iter,
verbose=1,
n_jobs=-2,
lupi_features=X_priv.shape[1],
)
assert best_score > 0.5
|
littlejo/Libreosteo
|
libreosteoweb/migrations/0022_therapeutsettings_invoice_footer.py
|
Python
|
gpl-3.0
| 504
| 0.001984
|
# -*- coding: utf-8 -*-
from __future__ impor
|
t unicode_literals
from django.db import models, migrations
class Migration
|
(migrations.Migration):
dependencies = [
('libreosteoweb', '0021_therapeutsettings_siret'),
]
operations = [
migrations.AddField(
model_name='therapeutsettings',
name='invoice_footer',
field=models.TextField(null=True, verbose_name='Invoice footer', blank=True),
preserve_default=True,
),
]
|
brianhang/tritonscheduler
|
docs/conf.py
|
Python
|
mit
| 11,786
| 0.000085
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# tritonschedule documentation build configuration file, created by
# sphinx-quickstart on Wed Jun 22 11:40:06 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'tritonschedule'
copyright = '2016, tritonschedule'
author = 'tritonschedule'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = ''
# The full version, including alpha/beta/rc tags.
release = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_wa
|
rnings = False
# If true, `todo` and `todo
|
List` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = 'tritonschedule v'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'tritonscheduledoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'tritonschedule.tex', 'tritonschedule Documentation',
'tritonschedule', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then topl
|
yutiansut/QUANTAXIS
|
QUANTAXIS/QASetting/cache.py
|
Python
|
mit
| 4,703
| 0.001701
|
# coding:utf-8
#
# The MIT License (MIT)
#
# Copyright (c) 2016-2021 yutiansut/QUANTAXIS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sqlite3
import os
CACHE = None
def _init(cache_file):
"""Creates a new Cache object."""
global CACHE
CACHE = Cache(cache_file)
def get_cache(config_file=None):
"""Used to retrieve the global cache obje
|
ct."""
if CACHE is None:
_init(config_file)
return CACHE
class Cache():
"""This object is used to interface with the job cache. It uses a SQLite3
database to store the information.
:param str cache_file: The path to the cache file. This will be created if
it does not already exist.
"""
def __init__(self, cache_file):
self.filename = cache_file
if not os.path.isfile(self.filename):
self._create(self
|
.filename)
self.conn = sqlite3.connect(self.filename)
self.cur = self.conn.cursor()
self.cur.execute("PRAGMA foreign_keys = ON")
def __del__(self):
"""Commit the changes and close the connection."""
if getattr(self, "conn", None):
self.conn.commit()
self.conn.close()
def _create(self, cache_file):
"""Create the tables needed to store the information."""
conn = sqlite3.connect(cache_file)
cur = conn.cursor()
cur.execute("PRAGMA foreign_keys = ON")
cur.execute('''
CREATE TABLE jobs(
hash TEXT NOT NULL UNIQUE PRIMARY KEY, description TEXT NOT NULL,
last_run REAL, next_run REAL, last_run_result INTEGER)''')
cur.execute('''
CREATE TABLE history(
hash TEXT, description TEXT, time REAL, result INTEGER,
FOREIGN KEY(hash) REFERENCES jobs(hash))''')
conn.commit()
conn.close()
def has(self, job):
"""Checks to see whether or not a job exists in the table.
:param dict job: The job dictionary
:returns: True if the job exists, False otherwise
"""
return bool(self.cur.execute('SELECT count(*) FROM jobs WHERE hash=?', (job["id"],)))
def get(self, id):
"""Retrieves the job with the selected ID.
:param str id: The ID of the job
:returns: The dictionary of the job if found, None otherwise
"""
self.cur.execute("SELECT * FROM jobs WHERE hash=?", (id,))
item = self.cur.fetchone()
if item:
return dict(zip(
("id", "description", "last-run", "next-run", "last-run-result"),
item))
return None
def update(self, job):
"""Update last_run, next_run, and last_run_result for an existing job.
:param dict job: The job dictionary
:returns: True
"""
self.cur.execute('''UPDATE jobs
SET last_run=?,next_run=?,last_run_result=? WHERE hash=?''', (
job["last-run"], job["next-run"], job["last-run-result"], job["id"]))
def add_job(self, job):
"""Adds a new job into the cache.
:param dict job: The job dictionary
:returns: True
"""
self.cur.execute("INSERT INTO jobs VALUES(?,?,?,?,?)", (
job["id"], job["description"], job["last-run"], job["next-run"], job["last-run-result"]))
return True
def add_result(self, job):
"""Adds a job run result to the history table.
:param dict job: The job dictionary
:returns: True
"""
self.cur.execute(
"INSERT INTO history VALUES(?,?,?,?)",
(job["id"], job["description"], job["last-run"], job["last-run-result"]))
return True
|
LLNL/spack
|
var/spack/repos/builtin/packages/py-kiwisolver/package.py
|
Python
|
lgpl-2.1
| 1,376
| 0.00436
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack
|
Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyKiwisolver(PythonPackage):
""
|
"A fast implementation of the Cassowary constraint solver"""
homepage = "https://github.com/nucleic/kiwi"
pypi = "kiwisolver/kiwisolver-1.1.0.tar.gz"
version('1.3.2', sha256='fc4453705b81d03568d5b808ad8f09c77c47534f6ac2e72e733f9ca4714aa75c')
version('1.3.1', sha256='950a199911a8d94683a6b10321f9345d5a3a8433ec58b217ace979e18f16e248')
version('1.3.0', sha256='14f81644e1f3bf01fbc8b9c990a7889e9bb4400c4d0ff9155aa0bdd19cce24a9')
version('1.2.0', sha256='247800260cd38160c362d211dcaf4ed0f7816afb5efe56544748b21d6ad6d17f')
version('1.1.0', sha256='53eaed412477c836e1b9522c19858a8557d6e595077830146182225613b11a75')
version('1.0.1', sha256='ce3be5d520b4d2c3e5eeb4cd2ef62b9b9ab8ac6b6fedbaa0e39cdb6f50644278')
depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
depends_on('python@3.5:', type=('build', 'run'), when='@1.2.0:')
depends_on('python@3.6:', type=('build', 'run'), when='@1.3.0:')
depends_on('python@3.7:', type=('build', 'run'), when='@1.3.2:')
depends_on('py-setuptools', type='build')
depends_on('py-cppy@1.1.0:', type='build', when='@1.2.0:')
|
google-research/motion_imitation
|
mpc_controller/torque_stance_leg_controller.py
|
Python
|
apache-2.0
| 7,722
| 0.006346
|
# Lint as: python3
"""A torque based stance controller framework."""
from __future__ import absolute_import
from __future__ import division
#from __future__ import google_type_annotations
from __future__ import print_function
import os
import inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(os.path.dirname(currentdir))
os.sys.path.insert(0, parentdir)
from typing import Any, Sequence, Tuple
import numpy as np
import pybullet as p # pytype: disable=import-error
try:
from mpc_controller import gait_generator as gait_generator_lib
from mpc_controller import leg_controller
except: #pylint: disable=W0702
print("You need to install motion_imitation")
print("Either run python3 setup.py install --user in this repo")
print("or use pip3 install motion_imitation --user")
sys.exit()
try:
import mpc_osqp as convex_mpc # pytype: disable=import-error
except: #pylint: disable=W0702
print("You need to install motion_imitation")
print("Either run python3 setup.py install --user in this repo")
print("or use pip3 install motion_imitation --user")
sys.exit()
_FORCE_DIMENSION = 3
# The QP weights in the convex MPC formulation. See the MIT paper for details:
# https://ieeexplore.ieee.org/document/8594448/
# Intuitively, this is the weights of each state dimension when tracking a
# desired CoM trajectory. The full CoM state is represented by
# (roll_pitch_yaw, position, angular_velocity, velocity, gravity_place_holder).
# _MPC_WEIGHTS = (5, 5, 0.2, 0, 0, 10, 0.5, 0.5, 0.2, 0.2, 0.2, 0.1, 0)
# This worked well for in-place stepping in the real robot.
# _MPC_WEIGHTS = (5, 5, 0.2, 0, 0, 10, 0., 0., 0.2, 1., 1., 0., 0)
_MPC_WEIGHTS = (5, 5, 0.2, 0, 0, 10, 0., 0., 1., 1., 1., 0., 0)
_PLANNING_HORIZON_STEPS = 10
_PLANNING_TIMESTEP = 0.025
class TorqueStanceLegController(leg_controller.LegController):
"""A torque based stance leg controller framework.
Takes in high level parameters like walking speed and turning speed, and
generates necessary the torques for stance legs.
"""
def __init__(
self,
robot: Any,
gait_generator: Any,
state_estimator: Any,
desired_speed: Tuple[float, float] = (0, 0),
desired_twisting_speed: float = 0,
desired_body_height: float = 0.45,
body_mass: float = 220 / 9.8,
body_inertia: Tuple[float, float, float, float, float, float, float,
float, float] = (0.07335, 0, 0, 0, 0.25068, 0, 0, 0,
0.25447),
num_legs: int = 4,
friction_coeffs: Sequence[float] = (0.45, 0.45, 0.45, 0.45),
qp_solver = convex_mpc.QPOASES
):
"""Initializes the class.
Tracks the desired position/velocity of the robot by computing proper joint
torques using MPC module.
Args:
robot: A robot instance.
gait_generator: Used to query the locomotion phase and leg states.
state_estimator: Estimate the robot states (e.g. CoM velocity).
desired_speed: desired CoM speed in x-y plane.
desired_twisting_speed: desired CoM rotating speed in z direction.
desired_body_height: The standing height of the robot.
body_mass: The total mass of the robot.
body_inertia: The inertia matrix in the body principle frame. We assume
the body principle coordinate frame has x-forward and z-up.
num_legs: The number of legs used for force planning.
friction_coeffs: The friction coeffs on the contact surfaces.
"""
self._robot = robot
self._gait_generator = gait_generator
self._state_estimator = state_estimator
self.desired_speed = desired_speed
self.desired_twisting_speed = desired_twisting_speed
self._desired_body_height = desired_body_height
self._body_mass = body_mass
self._num_legs = num_legs
self._friction_coeffs = np.array(friction_coeffs)
body_inertia_list = list(body_inertia)
weights_list = list(_MPC_WEIGHTS)
self._cpp_mpc = convex_mpc.ConvexMpc(
body_mass,
body_inertia_list,
self._num_legs,
_PLANNING_HORIZON_STEPS,
_PLANNING_TIMESTEP,
weights_list,
1e-5,
qp_solver
)
def reset(self, current_time):
del current_time
def update(self, current_time):
del current_time
def get_action(self):
"""Computes the torque for stance legs."""
desired_com_position = np.array((0., 0., self._desired_body_height),
dtype=np.float64)
desired_com_velocity = np.array(
(self.desired_speed[0], self.desired_speed[1], 0.), dtype=np.float64)
desired_com_roll_pitch_yaw = np.array((0., 0., 0.), dtype=np.float64)
desired_com_angular_velocity = np.array(
(0., 0., self.desired_twisting_speed), dtype=np.float64)
foot_contact_state = np.array(
[(leg_state in (gait_generator_lib.LegState.STANCE,
gait_generator_lib.LegState.EARLY_CONTACT))
for leg_state in sel
|
f._gait_generator.desired_leg_state],
dtype=np.int32)
# We use the body yaw aligned world frame for MPC computation.
com_roll_pitch_yaw = np.array(self._robot.GetBaseRollPitchYaw(),
dtype=np.float
|
64)
com_roll_pitch_yaw[2] = 0
#predicted_contact_forces=[0]*self._num_legs*_FORCE_DIMENSION
# print("Com Vel: {}".format(self._state_estimator.com_velocity_body_frame))
# print("Com RPY: {}".format(self._robot.GetBaseRollPitchYawRate()))
# print("Com RPY Rate: {}".format(self._robot.GetBaseRollPitchYawRate()))
p.submitProfileTiming("predicted_contact_forces")
predicted_contact_forces = self._cpp_mpc.compute_contact_forces(
[0], #com_position
np.asarray(self._state_estimator.com_velocity_body_frame,
dtype=np.float64), #com_velocity
np.array(com_roll_pitch_yaw, dtype=np.float64), #com_roll_pitch_yaw
# Angular velocity in the yaw aligned world frame is actually different
# from rpy rate. We use it here as a simple approximation.
np.asarray(self._robot.GetBaseRollPitchYawRate(),
dtype=np.float64), #com_angular_velocity
foot_contact_state, #foot_contact_states
np.array(self._robot.GetFootPositionsInBaseFrame().flatten(),
dtype=np.float64), #foot_positions_base_frame
self._friction_coeffs, #foot_friction_coeffs
desired_com_position, #desired_com_position
desired_com_velocity, #desired_com_velocity
desired_com_roll_pitch_yaw, #desired_com_roll_pitch_yaw
desired_com_angular_velocity #desired_com_angular_velocity
)
p.submitProfileTiming()
# sol = np.array(predicted_contact_forces).reshape((-1, 12))
# x_dim = np.array([0, 3, 6, 9])
# y_dim = x_dim + 1
# z_dim = y_dim + 1
# print("Y_forces: {}".format(sol[:, y_dim]))
contact_forces = {}
for i in range(self._num_legs):
contact_forces[i] = np.array(
predicted_contact_forces[i * _FORCE_DIMENSION:(i + 1) *
_FORCE_DIMENSION])
action = {}
for leg_id, force in contact_forces.items():
# While "Lose Contact" is useful in simulation, in real environment it's
# susceptible to sensor noise. Disabling for now.
# if self._gait_generator.leg_state[
# leg_id] == gait_generator_lib.LegState.LOSE_CONTACT:
# force = (0, 0, 0)
motor_torques = self._robot.MapContactForceToJointTorques(leg_id, force)
for joint_id, torque in motor_torques.items():
action[joint_id] = (0, 0, 0, 0, torque)
return action, contact_forces
|
MishtuBanerjee/xaya
|
xaya/xayanet.py
|
Python
|
mit
| 22,529
| 0.011718
|
#!/usr/bin/env python
"""
BeginDate:20071001
CurrentRevisionDate:20071001
Development Version : net 001
Release Version: pre-release
Author(s): Mishtu Banerjee
Contact: mishtu@harmeny.com
Copyright: The Authors
License: Distributed under MIT License
[http://opensource.org/licenses/mit-license.html]
Environment: Programmed and tested under Python 2.3.3 on Windows 2000.
Database Access: Psycopg and Postgres 7.4
Dependencies:
Python Interpreter and base libraries.
Psycopg [website]
Postgres [website] test X
==============================================
XAYAnet_001 A Toolkit for Exploring Networks
==============================================
XAYAcore is a Pythonic implementation of the Graph Abstraction Logic (GAL)
design principles. GAL relationally models information as graphs (an old idea
originating in modern logic) as a "little language" that can embedded
or embellished. It provides a basis for modelling and querying data from
complex systems. Data---> Information--> Knowledge (but wisdom is golden).
GAL was inspired by Robert Ulanowicz's ecological network theory of Ascendency,
Stan Salthe's developmental theory of Hierarchical Systems,
and Charles Peirce's Existential Graphs (a visual method of doing logic).
Hopefully beautiful ideas can lead to pragmatic working code ;-} ...
Xayacore goes frominstances to ontologies,and points inbetween.
One graph to rule them all! And some common sense to bind them ...
Send bugs, fixes, suggestions to mishtu@harmeny.com (Thanks).
USAGE EXAMPLE: (see examples under individual functions)
ALGORITHMs:
AlgorithmName -- Reference
CODE SOURCES:
Guido Code. [webref]
Graphlib Code
Pydot Code
Graphpath Code
Python Cookbook
Djikstra's
Algorithms in Python
REFERENCES:
Ascendency book.
Salthe Hierarchy Book.
Peirce Book (Reasoning and the Logic of Things)
Graph Algorithms Reference
Information Flow Book
Foundations of Logic.
Tools for Thought book
"""
# KNOWN BUGS
# None Known at this point
# UNITTESTS
# Unittests are below fn/obj being tested
# DESIGN CONTRACTS
# Design Contract for each fn/obj is right after docstring
#DOCTESTS
# Used sparingly where they illustrate code above and beyond unittest
import types
#from math import sqrt
import math
import xayacore
import xayastats
import pprint
import random
import copy
import sets
# To allow the code to use built-in set fns in Python 2.4 or the sets module in Python 2.3
try :
set
except NameError:
from sets import Set as set
def xayanetVersion():
return "Current version is xayanet_001 (development branch), updated October 1, 2007"
# ---------- Utility Functions ----------
# These functions handle basic 'data munging' tasks, calculate probablities,
# allow for sampling and audit of data
# Basic math tricks for sums and series
def readNetwork(filepath = ""):
"""
"""
def writeNetwork(graph = {}, filepath = "defaultFilePath"):
""" Inverse of readGraph. Stores a XAYA format dictGraph as a text file"""
transList = transGraphToList(graph)
fileObject = open(filepath, 'a+')
fileObject.writelines(transList)
fileObject.flush()
return fileObject
def shelveNetwork(graph = {},filepath = "defaultFilePath"):
""" Stores via the Graph at the filepath location e.g '/apath/file.xay'
Preconditions: Takes a graph
Postconditions: Returns a shelf object with stored graph
Usage:
>>> agraph = {'Key1': ['Value1', 'Value2'], 'Key2':['Value3',4]}
>>> shelveGraph(agraph,'storeagraph')
{'Key2': ['Value3', 4], 'Key1': ['Value1', 'Value2']}
>>>
Algorithms (see pseudocode below)
#Open a shelfObject
#read graph int
|
o shelfObject; deal with case of emptygraph via get fn
#return shelfObject
"""
#DATASETS FOR FIND COMPONENTS
a
|
network = {'a': [1,2,3], 'b' : [4,5,6], 1 : ['d', 'e', 'f']}
nothernetwork = {'a': [1,2,3], 'b' : [4,5,6], 1 : ['d', 'e', 'f'], 4: ['e', 'f', 'g'],
'x': [9, 10,11], 11: [12, 13, 14]}
loopnetwork = {'a': [1,2,3], 'b' : [4,5,6], 1 : ['d', 'e', 'f'], 4: ['e', 'f', 'g'],
12: [13], 13: [12]}
#DATASETS FOR MUTUAL INFORMATION
fullyconnet = {'a':['a','b','c'],
'b': ['a', 'b', 'c'],
'c': ['a','b','c']}
noselfcon = {'a':['b','c'],
'b': ['a', 'c'],
'c': ['a','b']}
cycle3 = {'a':['b'],
'b':['c'],
'c':['a']}
cycle4 = {'a':['b'],
'b':['c'],
'c':['d'],
'd':['a']}
tree = {'a': ['b','c'],
'b': ['d', 'e'],
'c': ['f', 'g'],
'd': ['h', 'i'],
'e': ['j', 'k']}
def findComponents (graph = {}):
"""
Given a directed graph (network) findComponents returns a list of isolated
"Islands in the Network" or components. Within an island, there are paths
to nodes. Across islands, there is not direct path.
General Algorithm:
1. Define the keyset (the parent nodes in a set of directed arcs)
2. Assign each parent node and its direct children (arcs) to a new component
3. Iterate through the node-set and combine parent nodes that have paths, including their child arcs
(These are the candidate components)
4. Remove candidate components that are subsets of each other.
5. Create final components from the remaining candidate components
(and do a check on the results of loops -- note this may be a bug in the path-finding algorithm)
6. Return the final list of components
"""
# Define the keyset
keys = graph.keys()
keys2 = copy.deepcopy(keys)
# For each key, assign arcs to a new component.
compgraph = {}
compkey = 0
for key in keys:
compkey = compkey +1
compgraph[compkey] = [key] + graph[key]
# Iterate through keys, and combine pairs of keys with a path between them
# These are the 'candidate' components
for dkey in keys2:
if key <> dkey:
if xayacore.findAllPaths(graph, key, dkey) <> {}:
compgraph[compkey] = [key] + graph[key] + graph[dkey]
keys2.remove(key) # remove the key that has been combined
# Remove candidate components that are simply subsets of each other
compkeys = compgraph.keys()
compkeys2 = copy.deepcopy(compkeys)
for key in compkeys:
for nextkey in compkeys:
if key <> nextkey:
set1 = set(compgraph[key])
set2 = set(compgraph[nextkey])
if set1.difference(set2) == set([]) and set2.difference(set1) <> set([]):
compkeys2.remove(key)
# Create Final components
finalcomp = {}
finalcompkey = 0
for key in compkeys2:
# Check on and remove the output from loops -- same element is repeated so list <> set cardinality
if len(compgraph[key]) == len(set(compgraph[key])):
finalcompkey = finalcompkey + 1
finalcomp[finalcompkey] = compgraph[key]
return finalcomp
def countArcs(graph = {}):
# Calculate Number of Arcs in graph
arcounter = 0
for key in graph:
arcounter = arcounter + len(graph[key])
return arcounter
def calcMI(graph= {}):
'''Given a xayaformat graph -- calculates the mutal information of the
adjacency matrix of connections (i.e. does not assume flow values)
'''
sources = graph
destinations = xayacore.reverseGraph(sources)
# Calculate Number of Arcs in graph
arcounter = 0
for key in sources:
arcounter = arcounter + len(sources[key])
pSourceDest = 1/float(arcounter)
sumMI = 0
for key in sources:
# calc P(Source/Destination)
for arc in sou
|
valvy/miniqubit
|
examples/python/executefile/main.py
|
Python
|
mit
| 714
| 0.009804
|
#!/usr/bin/env python
from PyMiniQbt import getVersion, getName, QasmAsyncInterpreter
import sys
def main(arguments):
if(not len(arguments) == 2):
print("which file?"
|
)
sys.exit(-1)
print("Using", getName(), "version:", getVersion())
with open(arguments[1]) as dat:
src = dat.read()
interpreter = QasmAsyncInterpreter()
|
interpreter.interpret(src)
while interpreter.hasErrors():
print(interpreter.getError())
print("results:")
for register in interpreter.getRegisters():
print(register,":",interpreter.readClassicResult(register).dataToString())
if __name__ == "__main__":
main(sys.argv)
|
erg0dic/hipshare
|
hipshare/lib/xmpp.py
|
Python
|
bsd-2-clause
| 1,321
| 0.003785
|
import logging
from sleekxmpp import ClientXMPP
log = logging.getLogger(__name__)
class HipshareXMPP(ClientXMPP):
def __init__(self, jid, password):
ClientXMPP.__init__(self, jid, password)
self.add_event_handler("session_start", self.session_start, threaded=True)
def session_start(self
|
, event):
self.send_presence()
self.get_roster()
class Client(object):
def __init__(self, config):
self.config = config
self.xmpp = HipshareXMPP(config.strategy['jid'], config.strategy['password'])
for plugin in config.options['plugins']:
self.xmpp.register_plugin(plugin)
def c
|
onnect(self, *args, **kwargs):
return self.xmpp.connect(*args, **kwargs)
def disconnect(self, *args, **kwargs):
return self.xmpp.disconnect(*args, **kwargs)
def get_plugin(self, plugin):
return self.xmpp.plugin[plugin]
def process(self, *args, **kwargs):
return self.xmpp.process(*args, **kwargs)
def line_emitter(self, data):
log.debug("Emitting {} to {}:".format(data, self.config.strategy['rooms']))
for room in self.config.strategy['rooms']:
self.xmpp.send_message(**{
"mto": room,
"mbody": data,
"mtype": 'groupchat'
})
|
SUSE/teuthology
|
teuthology/test/test_repo_utils.py
|
Python
|
mit
| 9,075
| 0.000882
|
import logging
import unittest.mock as mock
import os
import os.path
from pytest import raises, mark
import shutil
import subprocess
import tempfile
from teuthology.exceptions import BranchNotFoundError, CommitNotFoundError
from teuthology import repo_utils
from teuthology import parallel
repo_utils.log.setLevel(logging.WARNING)
class TestRepoUtils(object):
@classmethod
def setup_class(cls):
cls.temp_path = tempfile.mkdtemp(prefix='test_repo-')
cls.dest_path = cls.temp_path + '/empty_dest'
cls.src_path = cls.temp_path + '/empty_src'
if 'TEST_ONLINE' in os.environ:
cls.repo_url = 'https://github.com/ceph/empty.git'
cls.commit = '71245d8e454a06a38a00bff09d8f19607c72e8bf'
else:
cls.repo_url = 'file://' + cls.src_path
cls.commit = None
@classmethod
def teardown_class(cls):
shutil.rmtree(cls.temp_path)
def setup_method(self, method):
assert not os.path.exists(self.dest_path)
proc = subprocess.Popen(
('git', 'init', self.src_path),
stdout=subprocess.PIPE,
)
assert proc.wait() == 0
proc = subprocess.Popen(
('git', 'config', 'user.email', 'test@ceph.com'),
cwd=self.src_path,
stdout=subprocess.PIPE,
)
assert proc.wait() == 0
proc = subprocess.Popen(
('git', 'config', 'user.name', 'Test User'),
cwd=self.src_path,
stdout=subprocess.PIPE,
)
assert proc.wait() == 0
proc = subprocess.Popen(
('git', 'commit', '--allow-empty', '--allow-empty-message',
'--no-edit'),
cwd=self.src_path,
stdout=subprocess.PIPE,
)
assert proc.wait() == 0
if not self.commit:
result = subprocess.check_output(
'git rev-parse HEAD',
shell=True,
cwd=self.src
|
_path,
).split()
assert result
self.commit = result[0].decode()
def teardown_method(self, method):
shutil.rmtree(self.dest_path, ignore_errors=True)
def test_clone_repo_existing_branch(self):
repo_utils.clone_repo(self.repo_url, self.dest_path, 'master', self.commit)
assert os.path.exists(self.dest_path)
def test_clone_repo_non_existing_branch(self):
with raises(BranchNotFoundError):
repo_
|
utils.clone_repo(self.repo_url, self.dest_path, 'nobranch', self.commit)
assert not os.path.exists(self.dest_path)
def test_fetch_no_repo(self):
fake_dest_path = self.temp_path + '/not_a_repo'
assert not os.path.exists(fake_dest_path)
with raises(OSError):
repo_utils.fetch(fake_dest_path)
assert not os.path.exists(fake_dest_path)
def test_fetch_noop(self):
repo_utils.clone_repo(self.repo_url, self.dest_path, 'master', self.commit)
repo_utils.fetch(self.dest_path)
assert os.path.exists(self.dest_path)
def test_fetch_branch_no_repo(self):
fake_dest_path = self.temp_path + '/not_a_repo'
assert not os.path.exists(fake_dest_path)
with raises(OSError):
repo_utils.fetch_branch(fake_dest_path, 'master')
assert not os.path.exists(fake_dest_path)
def test_fetch_branch_fake_branch(self):
repo_utils.clone_repo(self.repo_url, self.dest_path, 'master', self.commit)
with raises(BranchNotFoundError):
repo_utils.fetch_branch(self.dest_path, 'nobranch')
@mark.parametrize('git_str',
["fatal: couldn't find remote ref",
"fatal: Couldn't find remote ref"])
@mock.patch('subprocess.Popen')
def test_fetch_branch_different_git_versions(self, mock_popen, git_str):
"""
Newer git versions return a lower case string
See: https://github.com/git/git/commit/0b9c3afdbfb629363
"""
branch_name = 'nobranch'
process_mock = mock.Mock()
attrs = {
'wait.return_value': 1,
'stdout.read.return_value': f"{git_str} {branch_name}".encode(),
}
process_mock.configure_mock(**attrs)
mock_popen.return_value = process_mock
with raises(BranchNotFoundError):
repo_utils.fetch_branch('', branch_name)
def test_enforce_existing_branch(self):
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'master')
assert os.path.exists(self.dest_path)
def test_enforce_existing_commit(self):
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'master', self.commit)
assert os.path.exists(self.dest_path)
def test_enforce_non_existing_branch(self):
with raises(BranchNotFoundError):
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'blah', self.commit)
assert not os.path.exists(self.dest_path)
def test_enforce_non_existing_commit(self):
with raises(CommitNotFoundError):
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'master', 'c69e90807d222c1719c45c8c758bf6fac3d985f1')
assert not os.path.exists(self.dest_path)
def test_enforce_multiple_calls_same_branch(self):
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'master', self.commit)
assert os.path.exists(self.dest_path)
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'master', self.commit)
assert os.path.exists(self.dest_path)
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'master', self.commit)
assert os.path.exists(self.dest_path)
def test_enforce_multiple_calls_different_branches(self):
with raises(BranchNotFoundError):
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'blah1')
assert not os.path.exists(self.dest_path)
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'master', self.commit)
assert os.path.exists(self.dest_path)
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'master', self.commit)
assert os.path.exists(self.dest_path)
with raises(BranchNotFoundError):
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'blah2')
assert not os.path.exists(self.dest_path)
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'master', self.commit)
assert os.path.exists(self.dest_path)
def test_enforce_invalid_branch(self):
with raises(ValueError):
repo_utils.enforce_repo_state(self.repo_url, self.dest_path, 'a b', self.commit)
def test_simultaneous_access(self):
count = 5
with parallel.parallel() as p:
for i in range(count):
p.spawn(repo_utils.enforce_repo_state, self.repo_url,
self.dest_path, 'master', self.commit)
for result in p:
assert result is None
def test_simultaneous_access_different_branches(self):
branches = [('master', self.commit), ('master', self.commit), ('nobranch', 'nocommit'),
('nobranch', 'nocommit'), ('master', self.commit), ('nobranch', 'nocommit')]
with parallel.parallel() as p:
for branch, commit in branches:
if branch == 'master':
p.spawn(repo_utils.enforce_repo_state, self.repo_url,
self.dest_path, branch, commit)
else:
dest_path = self.dest_path + '_' + branch
def func():
repo_utils.en
|
hacktyler/hacktyler_crime
|
config/settings.py
|
Python
|
mit
| 4,568
| 0.005035
|
#!/usr/bin/env python
import os
import django
# Base paths
DJANGO_ROOT = os.path.dirname(os.path.realpath(django.__file__))
SITE_ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
# Debugging
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'hacktyler_crime',
'USER': 'hacktyler_crime',
'PASSWORD': 'qw8ndyHprt',
}
}
# Localization
TIME_ZONE = 'America/Chicago'
LANGUAGE_CODE = 'en-us'
USE_I18N = True
USE_L10N = True
# Media
STATIC_ROOT = os.path.join(SITE_ROOT, 'media')
STATIC_URL = '/site_media/'
ADMIN_MEDIA_PREFIX = '/site_media/admin/'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
# Uploads
MEDIA_ROOT = '/tmp/sirens'
# Make this unique, and don't share
|
it with anybody.
SECRET_KEY = '+ei7-2)76sh$$dy^5h4zmkglw#ey1d3f0cj^$r+3zo!wq9j+_*'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.c
|
ontext_processors.media',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
)
ROOT_URLCONF = 'config.urls'
TEMPLATE_DIRS = (
os.path.join(SITE_ROOT, 'templates')
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.humanize',
'django.contrib.staticfiles',
'django.contrib.gis',
'compressor',
'activecalls',
'sirens'
)
# Email
# run "python -m smtpd -n -c DebuggingServer localhost:1025" to see outgoing
# messages dumped to the terminal
EMAIL_HOST = 'localhost'
EMAIL_PORT = 1025
DEFAULT_FROM_EMAIL = 'do.not.reply@crime.hacktyler.com'
# Django-compressor
COMPRESS_ENABLED = False
# Caching
CACHE_MIDDLEWARE_KEY_PREFIX='hacktyler_crime'
CACHE_MIDDLEWARE_SECONDS=90 * 60 # 90 minutes
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
# Logging
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'console': {
'level':'DEBUG',
'class':'logging.StreamHandler',
'formatter': 'standard'
},
'default': {
'level':'INFO',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/sites/hacktyler_crime/hacktyler_crime.log',
'maxBytes': 1024*1024*5, # 5 MB
'backupCount': 5,
'formatter':'standard',
},
'request_handler': {
'level':'INFO',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/sites/hacktyler_crime/requests.log',
'maxBytes': 1024*1024*5, # 5 MB
'backupCount': 5,
'formatter':'standard',
},
'backend_handler': {
'level':'DEBUG',
'class':'django.utils.log.NullHandler',
},
},
'loggers': {
'': {
'handlers': ['default', 'console'],
'level': 'DEBUG',
'propagate': True
},
'django.request': {
'handlers': ['request_handler', 'console'],
'level': 'DEBUG',
'propagate': False
},
'django.db': {
'handlers': ['backend_handler'],
'level': 'DEBUG',
'propagate': False
},
'requests.packages.urllib3.connectionpool': {
'handlers': ['console'],
'level': 'ERROR',
'propogate': False
},
'geopy': {
'handlers': ['console'],
'level': 'INFO',
'propogate': False
}
}
}
# Pusher
PUSHER_APP_ID = '11732'
PUSHER_KEY = 'd20fddb74c58823cd05d'
PUSHER_SECRET = None # must be in local_settings.py
PUSHER_CHANNEL = 'active-calls-test'
# Mapquest
MAPQUEST_API_KEY = None # must be in local_settings.py
# App
DEFAULT_HOURS_DISPLAYED = 4
# Allow for local (per-user) override
try:
from local_settings import *
except ImportError:
pass
|
cactorium/UCFBrainStuff
|
seniordesign/emokit/gyro_plot.py
|
Python
|
mit
| 1,625
| 0.007385
|
# This is an example of popping a packet from the Emotiv class's packet queue
# and printing the gyro x and y values to the console.
from emokit.emotiv import Emotiv
import platform
if platform.system() == "Windows":
import socket # Needed to prevent gevent crashing on Windows. (surfly / gevent issue #459)
import gevent
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
is_running = True
def evt_main(ring_buf):
headset = Emotiv()
gevent.spawn(headset.setup)
gevent.sleep(0)
pos = 0
try:
while True:
packet = headset.dequeue()
print packet.gyro_x, packet.gyro_y
ring_buf[pos] = packet.gyro_x
if pos % 4 == 0:
yield ring_buf
pos = (pos + 1) % 1024
gevent.sleep(0)
except KeyboardInterrupt:
headset.close()
finally:
is_running = False
headset.close()
x = np.linspace(0, 1023, 1024)
test_buf = np.zeros(1024)
fig, ax = plt.subplots()
line, = ax.plot(x, test_buf)
plt.axis([0, 1024, -100, 100])
def evt_wrapper():
def gen():
return evt_main(test_buf)
return gen
def init():
line.set_ydata(np.ma.array(x, mask=True))
return line,
def anima
|
te(rb):
print "Animation!"
print rb
line.set_ydata(rb)
return line,
def counter():
i = 0
while is_running:
yield i
i = i +
|
1
ani = animation.FuncAnimation(fig, animate, evt_wrapper(), init_func=init, interval=20, blit=True)
plt.show()
# gevent.Greenlet.spawn(evt_main, test_buf)
while True:
gevent.sleep(0)
|
khalidm/VarPub
|
src/mytest.py
|
Python
|
gpl-2.0
| 181
| 0.005525
|
import pybedtools
a = pybedtools.example_bedtool('a.bed')
b = pybedtools.exampl
|
e_bedtool('b.bed')
print "cat a.bed\n" + str(a)
print "cat b.bed\n" + str(b)
pri
|
nt a.intersect(b)
|
emersonmx/ippl
|
ippl/genetic_algorithm/chromosome.py
|
Python
|
gpl-3.0
| 852
| 0.001174
|
#
# Copyright (C) 2013-2014 Emerson Max de Medeiros Silva
#
# This file is part of ippl.
#
# ippl is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your optio
|
n) any later version.
#
# ippl is distributed in the hope tha
|
t it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ippl. If not, see <http://www.gnu.org/licenses/>.
#
class Chromosome(object):
def __init__(self):
super(Chromosome, self).__init__()
self.genes = []
self.fitness = 0.0
|
jazztpt/edx-platform
|
lms/djangoapps/instructor/tests/test_services.py
|
Python
|
agpl-3.0
| 3,469
| 0
|
"""
Tests for the InstructorService
"""
import json
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from courseware.models import StudentModule
from instructor.services import InstructorService
from instructor.tests.test_tools import msk_from_problem_urlname
from nose.plugins.attrib import attr
from student.models import CourseEnrollm
|
ent
from student.tests.factories import UserFactory
@attr('shard_1')
class InstructorServiceTests(ModuleStoreTestCase)
|
:
"""
Tests for the InstructorService
"""
def setUp(self):
super(InstructorServiceTests, self).setUp()
self.course = CourseFactory.create()
self.student = UserFactory()
CourseEnrollment.enroll(self.student, self.course.id)
self.problem_location = msk_from_problem_urlname(
self.course.id,
'robot-some-problem-urlname'
)
self.other_problem_location = msk_from_problem_urlname(
self.course.id,
'robot-some-other_problem-urlname'
)
self.problem_urlname = unicode(self.problem_location)
self.other_problem_urlname = unicode(self.other_problem_location)
self.service = InstructorService()
self.module_to_reset = StudentModule.objects.create(
student=self.student,
course_id=self.course.id,
module_state_key=self.problem_location,
state=json.dumps({'attempts': 2}),
)
def test_reset_student_attempts_delete(self):
"""
Test delete student state.
"""
# make sure the attempt is there
self.assertEqual(
StudentModule.objects.filter(
student=self.module_to_reset.student,
course_id=self.course.id,
module_state_key=self.module_to_reset.module_state_key,
).count(),
1
)
self.service.delete_student_attempt(
self.student.username,
unicode(self.course.id),
self.problem_urlname
)
# make sure the module has been deleted
self.assertEqual(
StudentModule.objects.filter(
student=self.module_to_reset.student,
course_id=self.course.id,
module_state_key=self.module_to_reset.module_state_key,
).count(),
0
)
def test_reset_bad_content_id(self):
"""
Negative test of trying to reset attempts with bad content_id
"""
result = self.service.delete_student_attempt(
self.student.username,
unicode(self.course.id),
'foo/bar/baz'
)
self.assertIsNone(result)
def test_reset_bad_user(self):
"""
Negative test of trying to reset attempts with bad user identifier
"""
result = self.service.delete_student_attempt(
'bad_student',
unicode(self.course.id),
'foo/bar/baz'
)
self.assertIsNone(result)
def test_reset_non_existing_attempt(self):
"""
Negative test of trying to reset attempts with bad user identifier
"""
result = self.service.delete_student_attempt(
self.student.username,
unicode(self.course.id),
self.other_problem_urlname
)
self.assertIsNone(result)
|
Yubico/yubioath-desktop-dpkg
|
yubioath/core/legacy_otp.py
|
Python
|
gpl-3.0
| 7,391
| 0
|
# Copyright (c) 2014 Yubico AB
# All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Additional permission under GNU GPL version 3 section 7
#
# If you modify this program, or any covered work, by linking or
# combining it with the OpenSSL project's OpenSSL library (or a
# modified version of that library), containing parts covered by the
# terms of the OpenSSL or SSLeay licenses, We grant you additional
# permission to convey the resulting work. Corresponding Source for a
# non-source form of such a combination shall include the source code
# for the parts of OpenSSL used as well as that of the covered work.
from __future__ import print_function
from .utils import time_challenge, parse_full, format_code
from .standard import TYPE_TOTP
from .exc import InvalidSlotError, NeedsTouchError
from yubioath.yubicommon.ctypes import CLibrary
from hashlib import sha1
from ctypes import (Structure, POINTER, c_int, c_uint8, c_uint, c_char_p,
c_bool, sizeof, create_string_buffer, cast, addressof)
import weakref
SLOTS = [
-1,
0x30,
0x38
]
YK_KEY = type('YK_KEY', (Structure,), {})
# Programming
SLOT_CONFIG = 1
SLOT_CONFIG2 = 3
CONFIG1_VALID = 1
CONFIG2_VALID = 2
YKP_CONFIG = type('YKP_CONFIG', (Structure,), {})
YK_CONFIG = type('YK_CONFIG', (Structure,), {})
YK_STATUS = type('YK_STATUS', (Structure,), {})
class YkPers(CLibrary):
_yk_errno_location = [], POINTER(c_int)
yk_init = [], bool
yk_release = [], bool
ykpers_check_version = [c_char_p], c_char_p
yk_open_first_key = [], POINTER(YK_KEY)
yk_close_key = [POINTER(YK_KEY)], bool
yk_challenge_response = [POINTER(YK_KEY), c_uint8, c_int, c_uint, c_char_p,
c_uint, c_char_p], bool
ykds_alloc = [], POINTER(YK_STATUS)
ykds_free = [POINTER(YK_STATUS)], None
ykds_touch_level = [POINTER(YK_STATUS)], c_int
yk_get_status = [POINTER(YK_KEY), POINTER(YK_STATUS)], c_int
ykp_alloc = [], POINTER(YKP_CONFIG)
ykp_free_config = [POINTER(YKP_CONFIG)], bool
ykp_configure_version = [POINTER(YKP_CONFIG), POINTER(YK_STATUS)], None
ykp_HMAC_key_from_raw = [POINTER(YKP_CONFIG), c_char_p], bool
ykp_set_tktflag_CHAL_RESP = [POINTER(YKP_CONFIG), c_bool], bool
ykp_set_cfgflag_CHAL_HMAC = [POINTER(YKP_CONFIG), c_bool], bool
ykp_set_cfgflag_HMAC_LT64 = [POINTER(YKP_CONFIG), c_bool], bool
ykp_set_extflag_SERIAL_API_VISIBLE = [POINTER(YKP_CONFIG), c_bool], bool
ykp_set_extflag_ALLOW_UPDATE = [POINTER(YKP_CONFIG), c_bool], bool
ykp_set_cfgflag_CHAL_BTN_TRIG = [POINTER(YKP_CONFIG), c_bool], bool
ykp_core_config = [POINTER(YKP_CONFIG)], POINTER(YK_CONFIG)
yk_write_command = [POINTER(YK_KEY), POINTER(YK_CONFIG), c_uint8, c_char_p
], bool
def yk_get_errno(self):
return self._yk_errno_location().contents.value
ykpers = YkPers('ykpers-1', '1')
YK_ETIMEOUT = 0x04
YK_EWOULDBLOCK = 0x0b
if not ykpers.yk_init():
raise Exception("Unable to initialize ykpers")
ykpers_version = ykpers.ykpers_check_version(None).decode('ascii')
class LegacyOathOtp(object):
"""
OTP interface to a legacy OATH-enabled YubiKey.
"""
def __init__(self, device):
self._device = device
def slot_status(self):
st = ykpers.ykds_alloc()
ykpers.yk_get_status(self._device, st)
tl = ykpers.ykds_touch_level(st)
ykpers.ykds_free(st)
return (
bool(tl & CONFIG1_VALID == CONFIG1_VALID),
bool(tl & CONFIG2_VALID == CONFIG2_VALID)
)
def calculate(self, slot, digits=6, timestamp=None, mayblock=0):
challenge = time_challenge(timestamp)
resp = create_string_buffer(64)
status = ykpers.yk_challenge_response(
self._device, SLOTS[slot], mayblock, len(challenge), challenge,
sizeof(resp), resp)
if not status:
errno = ykpers.yk_get_errno()
if errno == YK_EWOULDBLOCK:
raise NeedsTouchError()
raise InvalidSlotError()
return format_code(parse_full(resp.raw[:20]), digits)
def put(self, slot, key, require_touch=False):
if len(key) > 64: # Keys longer than 64 bytes are hashed, as per HMAC.
key = sha1(key).digest()
if len(key) > 20:
raise ValueError('YubiKey slots cannot handle keys over 20 bytes')
slot = SLOT_CONFIG if slot == 1 else SLOT_CONFIG2
key += b'\x00' * (20 - len(key)) # Keys must be padded to 20 bytes.
st = ykpers.ykds_alloc()
ykpers.yk_get_status(self._device, st)
cfg = ykpers.ykp_alloc()
ykpers.ykp_configure_version(cfg, st)
ykpers.ykds_free(st)
ykpers.ykp_set_tktflag_CHAL_RESP(cfg, True)
ykpers.ykp_set_cfgflag_CHAL_HMAC(cfg, True)
ykpers.ykp_set_cfgflag_HMAC_LT64(cfg, True)
ykpers.ykp_set_extflag_SERIAL_API_VISIBLE(cfg, True)
ykpers.ykp_set_extflag_ALLOW_UPDATE(cfg, True)
if require_touch:
ykpers.ykp_set_cfgflag_CHAL_BTN_TRIG(cfg, True)
if ykpers.ykp_HMAC_key_from_raw(cfg, key):
raise ValueError("Error setting the key")
ycfg = ykpers.ykp_core_config(cfg)
try:
if not ykpers.yk_write_command(self._device, ycfg, slot, None):
raise ValueError("Error writing configuration to key")
finally:
ykpers.ykp_free_config(cfg)
def delete(self, slot):
slot = SLOT_CONFIG if slot == 1 else SLOT_CONFIG2
if not ykpers.yk_write_command(self._device, None, slot, None):
raise ValueError("Error writing configuration to key")
class LegacyCredential(object):
def __init__(self, legacy, slot, digits=6):
self.name = 'YubiKey slot %d' % slot
self.oath_type = TYPE_TOTP
self.touch = None # Touch is unknown
self._legacy = legacy
self._slot = slot
self._digits = digits
def calculate(self, timestamp=None):
try:
return self._legacy.calculate(self._slot, self._digits, timestamp,
1 if self.touch else 0)
|
except NeedsTouchError:
self.touch = True
raise
|
else:
if self.touch is None:
self.touch = False
def delete(self):
self._legacy.delete(self._slot)
def __repr__(self):
return self.name
# Keep track of YK_KEY references.
_refs = []
def open_otp():
key = ykpers.yk_open_first_key()
if key:
key_p = cast(addressof(key.contents), POINTER(YK_KEY))
def cb(ref):
_refs.remove(ref)
ykpers.yk_close_key(key_p)
_refs.append(weakref.ref(key, cb))
return key
return None
|
arq5x/poretools
|
poretools/metadata.py
|
Python
|
mit
| 641
| 0.031201
|
import Fast5File
def run(parser, args):
if args.read:
for i, fast5 in enumerate(Fast5File.Fast5FileSet(args.files)):
for metadata_dict in fast5.read_metadata:
if i == 0:
header = metadata_dict.keys()
print "\t".join(["filename"] + header)
print "\t".join([fast5.filename] + [str( metadata_dict[k] ) for k in header])
else:
print "asic_id\tasic_temp\theatsink_temp"
for fast5 in Fast5File.Fast5FileSet(args.files):
|
asic_temp = fast5.get_asic_temp()
asic_id = fast5.get_asic_id()
heatsink_temp
|
= fast5.get_heatsink_temp()
print "%s\t%s\t%s" % (asic_id, asic_temp, heatsink_temp)
fast5.close()
|
elsehow/moneybot
|
moneybot/market/scrape.py
|
Python
|
bsd-3-clause
| 5,485
| 0.000547
|
# -*- coding: utf-8 -*-
import time
import requests
from datetime import datetime
from logging import getLogger
from typing import Optional
from typing import Dict
from typing import Iterable
from funcy import compose
from funcy import partial
from pandas import DataFrame
from pandas import to_datetime
from pandas import Series
from pyloniex import PoloniexPublicAPI
from moneybot.clients import Postgres
from moneybot.clients import Poloniex
YEAR_IN_SECS = 60 * 60 * 24 * 365
logger = getLogger(__name__)
def format_time(ts: datetime) -> str:
return ts.strftime('%Y-%m-%d %H:%M:%S')
def historical(ticker: str) -> Dict:
url = f'https://graphs.coinmarketcap.com/currencies/{ticker}'
return requests.get(url).json()
def market_cap(hist_ticker: Dict) -> Series:
r = {}
t
|
s = None
for key, vals in hist_ticke
|
r.items():
if ts is None:
ts = [to_datetime(t[0] * 1000000) for t in vals]
r[key] = [t[1] for t in vals]
return DataFrame(r, index=ts)
coin_history = compose(market_cap, historical)
def marshall(hist_df):
btc_to_usd = hist_df['price_usd'] / hist_df['price_btc']
# volume in BTC
# TODO is this correct? or is `'volume'` the quote volume?
hist_df['volume'] = hist_df['volume_usd'] / btc_to_usd
hist_df = hist_df.drop([
'market_cap_by_available_supply',
'volume_usd'
], axis=1)
hist_df['weighted_average'] = hist_df['price_usd']
hist_df['time'] = hist_df.index
hist_df['currency_pair'] = hist_df.apply(lambda x: 'USD_BTC', axis=1)
def nothing_burger():
return hist_df.apply(lambda x: None, axis=1)
hist_df['open'] = nothing_burger()
hist_df['high'] = nothing_burger()
hist_df['low'] = nothing_burger()
hist_df['close'] = nothing_burger()
hist_df['quote_volume'] = nothing_burger()
return hist_df
def historical_prices_of(
polo: PoloniexPublicAPI,
btc_price_history: Series,
pair: str,
period: int = 900,
start: Optional[float] = None,
end: Optional[float] = None,
) -> Iterable[Series]:
'''
Returns a series of time-indexed prices.
`pair` is of the form e.g. 'BTC_ETH',
`period` is an integer number of seconds,
either 300, 900, 1800, 7200, 14400, or 86400.
We do some data marshalling in this method as well,
to turn API results into stuff amenable for our Postgres DB.
'''
def contemporary_usd_price(row: Series) -> float:
contemporary_btc_price = btc_price_history['price_usd'].asof(row.name)
return row['weightedAverage'] * contemporary_btc_price
# Scraping
now = time.time()
start = start or now - YEAR_IN_SECS
end = end or now
ex_trades = polo.return_chart_data(
currency_pair=pair,
period=period,
start=start,
end=end,
)
# Data marshalling
ts_df = DataFrame(ex_trades, dtype=float)
ts_df['time'] = [datetime.fromtimestamp(t) for t in ts_df['date']]
ts_df.index = ts_df['time']
ts_df['price_usd'] = ts_df.apply(contemporary_usd_price, axis=1)
ts_df['currency_pair'] = ts_df.apply(lambda x: pair, axis=1)
ts_df = ts_df.rename(index=str, columns={
'quoteVolume': 'quote_volume',
'weightedAverage': 'weighted_average',
})
for _, row in ts_df.iterrows():
# chart = scraped_chart(pair, row)
# for some reason, when there's no chart data to report,
# the API will give us some reading with all 0s.
if row['volume'] == 0 and row['weighted_average'] == 0:
# we will just ignore these
pass
else:
yield row
def insert(cursor, row):
return cursor.execute("""
INSERT INTO scraped_chart (time, currency_pair, high, low, price_usd, quote_volume, volume, weighted_average)
VALUES (%(time)s, %(currency_pair)s, %(high)s, %(low)s, %(price_usd)s, %(quote_volume)s, %(volume)s, %(weighted_average)s);""",
row.to_dict())
def scrape_since_last_reading():
# postgres client
client = Postgres.get_client()
cursor = client.cursor()
inserter = partial(insert, cursor)
# get the last time we fetched some data,
# looking at the most recent result in the db
query = ' '.join([
'select time from scraped_chart',
'order by time desc',
'limit 1',
])
cursor.execute(query)
latest_fetch_time = cursor.fetchone()[0]
latest_fetch_unix = time.mktime(latest_fetch_time.timetuple())
# now get USD_BTC history
btc_price_hist = coin_history('bitcoin')
# and write that history to DB,
btc_rows = marshall(btc_price_hist)
# NOTE since latest fetch time?
# recent_btc = btc_rows[btc_rows['time'] > latest_fetch_time]
# [inserter(row) for _, row in recent_btc.iterrows()]
[inserter(row) for _, row in btc_rows.iterrows()]
client.commit()
logger.debug('Scraped USD_BTC')
# now, a poloniex client
polo = Poloniex.get_public()
# and a method for grabbing historical prices
grab_historical_prices = partial(historical_prices_of, polo, btc_price_hist)
# for each market,
for market in polo.return_ticker():
# fetch all the chart data since last fetch
generator = grab_historical_prices(
market,
start=latest_fetch_unix,
end=time.time(),
)
list(map(inserter, generator))
client.commit()
logger.debug(f'Scraped {market}')
cursor.close()
|
moeskerv/ABElectronics_Python_Libraries
|
ADCDACPi/demo-dacsinewave.py
|
Python
|
gpl-2.0
| 3,862
| 0.000777
|
#!/usr/bin/python
from ABE_ADCDACPi import ADCDACPi
import time
import math
"""
================================================
ABElectronics ADCDAC Pi 2-Channel ADC, 2-Channel DAC | DAC sine wave generator demo
Version 1.0 Created 17/05/2014
Version 1.1 16/11/2014 updated code and functions to PEP8 format
run with: python demo-dacsinewave.py
================================================
# this demo uses the set_dac_raw method to generate a sine wave from a
# predefined set of values
"""
adcdac = ADCDACPi(1) # create an instance of the ADCDAC Pi with a DAC gain set to 1
DACLookup_FullSine_12Bit = \
[2048, 2073, 2098, 2123, 2148, 2174, 2199, 2224,
2249, 2274, 2299, 2324, 2349, 2373, 2398, 2423,
2448, 2472, 2497, 2521, 2546, 2570, 2594, 2618,
2643, 2667, 2690, 2714, 2738, 2762, 2785, 2808,
2832, 2855, 2878, 2901, 2924, 2946, 2969, 2991,
3013, 3036, 3057, 3079, 3101, 3122, 3144, 3165,
3186, 3207, 3227, 3248, 3268, 3288, 3308, 3328,
3347, 3367, 3386, 3405, 3423, 3442, 3460, 3478,
3496, 3514, 3531, 3548, 3565, 3582, 3599, 3615,
3631, 3647, 3663, 3678, 3693, 3708, 3722, 3737,
3751, 3765, 3778, 3792, 3805, 3817, 3830, 3842,
3854, 3866, 3877, 3888, 3899, 3910, 3920, 3930,
3940, 3950, 3959, 3968, 3976, 3985, 3993, 4000,
4008, 4015, 4022, 4028, 4035, 4041, 4046, 4052,
4057, 4061, 4066, 4070, 4074, 4077, 4081, 4084,
4086, 408
|
8, 4090, 4092, 4094, 4095, 4095, 4095,
4095, 4095, 4095, 4095, 4094, 4092, 4090, 4088,
4086, 4084, 4081, 4077, 4074, 4070, 4066, 4061,
4057, 4052, 4046
|
, 4041, 4035, 4028, 4022, 4015,
4008, 4000, 3993, 3985, 3976, 3968, 3959, 3950,
3940, 3930, 3920, 3910, 3899, 3888, 3877, 3866,
3854, 3842, 3830, 3817, 3805, 3792, 3778, 3765,
3751, 3737, 3722, 3708, 3693, 3678, 3663, 3647,
3631, 3615, 3599, 3582, 3565, 3548, 3531, 3514,
3496, 3478, 3460, 3442, 3423, 3405, 3386, 3367,
3347, 3328, 3308, 3288, 3268, 3248, 3227, 3207,
3186, 3165, 3144, 3122, 3101, 3079, 3057, 3036,
3013, 2991, 2969, 2946, 2924, 2901, 2878, 2855,
2832, 2808, 2785, 2762, 2738, 2714, 2690, 2667,
2643, 2618, 2594, 2570, 2546, 2521, 2497, 2472,
2448, 2423, 2398, 2373, 2349, 2324, 2299, 2274,
2249, 2224, 2199, 2174, 2148, 2123, 2098, 2073,
2048, 2023, 1998, 1973, 1948, 1922, 1897, 1872,
1847, 1822, 1797, 1772, 1747, 1723, 1698, 1673,
1648, 1624, 1599, 1575, 1550, 1526, 1502, 1478,
1453, 1429, 1406, 1382, 1358, 1334, 1311, 1288,
1264, 1241, 1218, 1195, 1172, 1150, 1127, 1105,
1083, 1060, 1039, 1017, 995, 974, 952, 931,
910, 889, 869, 848, 828, 808, 788, 768,
749, 729, 710, 691, 673, 654, 636, 618,
600, 582, 565, 548, 531, 514, 497, 481,
465, 449, 433, 418, 403, 388, 374, 359,
345, 331, 318, 304, 291, 279, 266, 254,
242, 230, 219, 208, 197, 186, 176, 166,
156, 146, 137, 128, 120, 111, 103, 96,
88, 81, 74, 68, 61, 55, 50, 44,
39, 35, 30, 26, 22, 19, 15, 12,
10, 8, 6, 4, 2, 1, 1, 0,
0, 0, 1, 1, 2, 4, 6, 8,
10, 12, 15, 19, 22, 26, 30, 35,
39, 44, 50, 55, 61, 68, 74, 81,
88, 96, 103, 111, 120, 128, 137, 146,
156, 166, 176, 186, 197, 208, 219, 230,
242, 254, 266, 279, 291, 304, 318, 331,
345, 359, 374, 388, 403, 418, 433, 449,
465, 481, 497, 514, 531, 548, 565, 582,
600, 618, 636, 654, 673, 691, 710, 729,
749, 768, 788, 808, 828, 848, 869, 889,
910, 931, 952, 974, 995, 1017, 1039, 1060,
1083, 1105, 1127, 1150, 1172, 1195, 1218, 1241,
1264, 1288, 1311, 1334, 1358, 1382, 1406, 1429,
1453, 1478, 1502, 1526, 1550, 1575, 1599, 1624,
1648, 1673, 1698, 1723, 1747, 1772, 1797, 1822,
1847, 1872, 1897, 1922, 1948, 1973, 1998, 2023]
while True:
for val in DACLookup_FullSine_12Bit:
adcdac.set_dac_raw(1, val)
|
ActiveState/code
|
recipes/Python/577484_PRNG_Test/recipe-577484.py
|
Python
|
mit
| 1,669
| 0.009587
|
# PRNG (Pseudo-Random Number Generator) Test
# PRNG info:
# http://en.wikipedia.org/wiki/Pseudorandom_number_generator
# FB - 201012046
# Compares output distribution of any given PRNG
# w/ an hypothetical True-Random Number Generator (TRNG)
import math
import time
global x
x = time.clock() # seed for the PRNG
# PRNG to test
def prng():
global x
x = math.fmod((x + math.pi) ** 2.0, 1.0)
return x
# combination by recursive method
def c(n, k):
if k == 0: return 1
if n == 0
|
: return 0
return c(n - 1, k - 1) + c(n - 1, k)
### combination by multiplicative method
##def c_(n, k):
## mul = 1.0
## for i in range(k):
## mul = mul * (n - k + i + 1) / (i + 1)
## return mul
# MAIN
n = 20 # number of bits in each trial
print 'Test in progress...'
print
cnk = [] # array to hold bit counts
for k in range(n + 1):
cnk.append(0)
# generate 2**n n-bit pseudo-random numbers
for j in range
|
(2 ** n):
# generate n-bit pseudo-random number and count the 0's in it
# num = ''
ctr = 0
for i in range(n):
b = int(round(prng())) # generate 1 pseudo-random bit
# num += str(b)
if b == 0: ctr += 1
# print num
# increase bit count in the array
cnk[ctr] += 1
print 'Number of bits in each pseudo-random number (n) =', n
print
print 'Comparison of "0" count distributions:'
print
print ' k', ' c(n,k)', ' actual', '%dif'
difSum = 0
for k in range(n + 1):
cnk_ = c(n, k)
dif = abs(cnk_ - cnk[k])
print '%2d %10d %10d %4d' % (k, cnk_, cnk[k], 100 * dif / cnk_)
difSum += dif
print
print 'Difference percentage between the distributions:'
print 100 * difSum / (2 ** n)
|
PyCQA/pylint
|
tests/functional/u/unused/unused_variable_py38.py
|
Python
|
gpl-2.0
| 1,000
| 0.006
|
"""Tests for the unused-variable message in assignment expressions"""
def typed_assignment_in_function_default( # [unused-variable]
param: str = (typed_default := "walrus"), # [unused-variable]
) -> None:
"""An unused annotated assignment expression in a default parameter should emit"""
return param
def assignment_in_function_default( # [unused-variable]
param=(default := "walrus"), # [unused-variable]
) -> None:
"""An unused assignment expression in a default parameter should emit"""
return param
def assignment_used_in
|
_function_scope( # [unused-variable]
param=(function_default := "walrus"),
) -> None:
"""An used assignment expression in a default parameter should not emit"""
print(function_default)
return param
def assignment_used_in_global_scope( # [unused-variable]
param=(global_default := "walrus"),
) -> No
|
ne:
"""An used assignment expression in a default parameter should not emit"""
return param
print(global_default)
|
scottbarstow/iris-python
|
iris_sdk/models/maps/subscriptions.py
|
Python
|
mit
| 131
| 0.015267
|
#!/usr/bin
|
/env python
from iris_sdk.mode
|
ls.maps.base_map import BaseMap
class SubscriptionsMap(BaseMap):
subscription = None
|
hydraplatform/hydra-base
|
hydra_base/lib/HydraTypes/Types.py
|
Python
|
lgpl-3.0
| 9,393
| 0.005642
|
"""
Types that can be represented by a dataset are defined here
Each Hydra type must subclass DataType and implement the
required abstract properties and methods. The form of each
class' constructor is not part of the interface and is left
to the implementer.
"""
import json
import math
import six
import numpy as np
import pandas as pd
from abc import abstractmethod, abstractproperty
from datetime import datetime
import collections
from hydra_base import config
from .Encodings import ScalarJSON, ArrayJSON, DescriptorJSON, DataframeJSON, TimeseriesJSON
from hydra_base.exceptions import HydraError
import logging
log = logging.getLogger(__name__)
class DataType(object):
""" The DataType class serves as an abstract base class for data types"""
def __init_subclass__(cls):
tag = cls.tag
name = cls.name
# Register class with hydra
from .Registry import typemap
if tag in typemap:
raise ValueError('Type with tag "{}" already registered.'.format(tag))
else:
typemap[tag] = cls
log.info('Registering data type "{}".'.format(tag))
@abstractproperty
def skeleton(self):
""" Reserved for future use """
pass
@abstractproperty
def tag(self):
""" A str which uniquely identifies this type and serves as its key in
the Registry.typemap dict
"""
pass
@abstractproperty
def value(self):
""" This type's representation of the value contained within
a dataset of the same type
"""
pass
@abstractmethod
def validate(self):
""" Raises (any) exception if the dataset's value argument
cannot be correctly represented as this type
"""
pass
@abstractmethod
def json(self):
""" Reserved for future use """
pass
@abstractmethod
def fromDataset(cls, value, metadata=None):
""" Factory method which performs any required transformations
on a dataset argument, invokes the type's ctor, and returns
the resulting instance
"""
pass
class Scalar(DataType):
tag = "SCALAR"
name = "Scalar"
skeleton = "[%f]"
json = ScalarJSON()
def __init__(self, value):
super(Scalar, self).__init__()
self.value = value
self.validate()
@classmethod
def fromDataset(cls, value, metadata=None):
return cls(value)
def validate(self):
f = float(self.value)
assert not math.isnan(f) # Excludes NaN etc
def get_value(self
|
):
return str(self._value)
def set_value(self, val):
self._value = val
value = property(get_value, set_value)
class Array(DataType):
tag = "ARRAY"
name = "Array"
skeleton = "[%f, ...]"
json = ArrayJSON()
def __init__(self, encstr):
super(Array, self).__init__()
self.value = encstr
self.vali
|
date()
@classmethod
def fromDataset(cls, value, metadata=None):
return cls(value)
def validate(self):
j = json.loads(self.value)
assert len(j) > 0 # Sized
assert iter(j) is not None # Iterable
assert j.__getitem__ # Container
assert not isinstance(j, six.string_types) # Exclude strs
def get_value(self):
return self._value
def set_value(self, val):
self._value = val
value = property(get_value, set_value)
class Descriptor(DataType):
tag = "DESCRIPTOR"
name = "Descriptor"
skeleton = "%s"
json = DescriptorJSON()
def __init__(self, data):
super(Descriptor, self).__init__()
self.value = data
self.validate()
@classmethod
def fromDataset(cls, value, metadata=None):
if metadata and metadata.get('data_type') == 'hashtable':
try:
df = pd.read_json(six.text_type(value))
data = df.transpose().to_json()
except Exception:
noindexdata = json.loads(six.text_type(value))
indexeddata = {0:noindexdata}
data = json.dumps(indexeddata)
return cls(data)
else:
return cls(six.text_type(value))
def validate(self):
pass
def get_value(self):
return self._value
def set_value(self, val):
self._value = val
value = property(get_value, set_value)
class Dataframe(DataType):
tag = "DATAFRAME"
name = "Data Frame"
skeleton = "%s"
json = DataframeJSON()
def __init__(self, data):
super(Dataframe, self).__init__()
self.value = data
self.validate()
@classmethod
def fromDataset(cls, value, metadata=None):
df = cls._create_dataframe(value)
return cls(df)
@classmethod
def _create_dataframe(cls, value):
"""
Builds a dataframe from the value
"""
try:
ordered_jo = json.loads(six.text_type(value), object_pairs_hook=collections.OrderedDict)
#Pandas does not maintain the order of dicts, so we must break the dict
#up and put it into the dataframe manually to maintain the order.
cols = list(ordered_jo.keys())
if len(cols) == 0:
raise ValueError("Dataframe has no columns")
#Assume all sub-dicts have the same set of keys
if isinstance(ordered_jo[cols[0]], list):
index = range(len(ordered_jo[cols[0]]))
else:
#cater for when the indices are not the same by identifying
#all the indices, and then making a set of them.
longest_index = []
for col in ordered_jo.keys():
index = list(ordered_jo[col].keys())
if len(index) > len(longest_index):
longest_index = index
index = longest_index
df = pd.read_json(value, convert_axes=False)
#Make both indices the same type, so they can be compared
df.index = df.index.astype(str)
new_index = pd.Index(index).astype(str)
#Now reindex the dataframe so that the index is in the correct order,
#as per the data in the DB, and not with the default pandas ordering.
new_df = df.reindex(new_index)
#If the reindex didn't work, don't use that value
if new_df.isnull().sum().sum() != len(df.index):
df = new_df
except ValueError as e:
""" Raised on scalar types used as pd.DataFrame values
in absence of index arg
"""
log.exception(e)
raise HydraError(str(e))
except AssertionError as e:
log.warning("An error occurred creating the new data frame: %s. Defaulting to a simple read_json"%(e))
df = pd.read_json(value).fillna(0)
return df
def validate(self):
assert isinstance(self._value, pd.DataFrame)
assert not self._value.empty
def get_value(self):
return self._value.to_json()
def set_value(self, val):
self._value = val
try:
""" Use validate test to confirm is pd.DataFrame... """
self.validate()
except AssertionError:
""" ...otherwise attempt as json..."""
try:
df = self.__class__._create_dataframe(val)
self._value = df
self.validate()
except Exception as e:
""" ...and fail if neither """
raise HydraError(str(e))
value = property(get_value, set_value)
class Timeseries(DataType):
tag = "TIMESERIES"
name = "Time Series"
skeleton = "[%s, ...]"
json = TimeseriesJSON()
def __init__(self, ts):
super(Timeseries, self).__init__()
self.value = ts
self.validate()
@classmethod
def fromDataset(cls, value, metadata=None):
ordered_jo = json.loads(six.t
|
mmilata/atomic-reactor
|
atomic_reactor/plugins/post_import_image.py
|
Python
|
bsd-3-clause
| 2,151
| 0
|
"""
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import unicode_literals
import json
import os
from osbs.api import OSBS
from osbs.conf import Configuration
from atomic_reactor.plugin import PostBuildPlugin
from atomic_reactor.util import ImageName
class ImportImagePlugin(PostBuildPlugin):
"""
Import image tags from external docker registry into OpenShift.
"""
key = "import_image"
can_fail = False
def __init__(self, tasker, workflow, url, verify_ssl=True, use_auth=True):
"""
constructor
:param tasker: DockerTasker instance
:param workflow: DockerBuildWorkflow instance
:param url: str, URL to OSv3 instance
:param verify_ssl: bool, verify SSL certificate?
:param use_auth: bool, initiate authentication with openshift?
"""
# call parent constructor
super(ImportImagePlugin, self).__init__(tasker, workflow)
self.url = url
self.verify_ssl = verify_ssl
self.use_auth = use_auth
def run(self):
try:
build_json = json.loads(os.environ["BUILD"])
except KeyError:
self.log.error("No $BUILD env variable. "
"Probably not running in build container.")
raise
osbs_conf = Configuration(conf_file=None, openshift_uri=self.url,
use_auth=self.use_auth,
|
verify_ssl=self.verify_ssl)
osbs = OSBS(osbs_conf, osbs_conf)
metadata = build_json.get("metadata", {})
kwargs = {}
if 'namespace' in metadata:
kwargs['namespace'] = metadata['namespace']
labels = metadata.get("labels", {})
try:
imagestream = labels["ima
|
gestream"]
except KeyError:
self.log.error("No imagestream label set for this Build")
raise
self.log.info("Importing tags for %s", imagestream)
osbs.import_image(imagestream, **kwargs)
|
meisterkleister/erpnext
|
erpnext/setup/doctype/email_digest/email_digest.py
|
Python
|
agpl-3.0
| 11,804
| 0.027702
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frapp
|
e
from frappe import _
from frappe.utils import fmt_money, formatdate, format_time, now_datetime, \
ge
|
t_url_to_form, get_url_to_list, flt
from datetime import timedelta
from dateutil.relativedelta import relativedelta
from frappe.core.doctype.user.user import STANDARD_USERS
import frappe.desk.notifications
from erpnext.accounts.utils import get_balance_on
user_specific_content = ["calendar_events", "todo_list"]
from frappe.model.document import Document
class EmailDigest(Document):
def __init__(self, arg1, arg2=None):
super(EmailDigest, self).__init__(arg1, arg2)
self.from_date, self.to_date = self.get_from_to_date()
self.set_dates()
self._accounts = {}
self.currency = frappe.db.get_value("Company", self.company,
"default_currency")
def get_users(self):
"""get list of users"""
user_list = frappe.db.sql("""
select name, enabled from tabUser
where name not in ({})
and user_type != "Website User"
order by enabled desc, name asc""".format(", ".join(["%s"]*len(STANDARD_USERS))), STANDARD_USERS, as_dict=1)
if self.recipient_list:
recipient_list = self.recipient_list.split("\n")
else:
recipient_list = []
for p in user_list:
p["checked"] = p["name"] in recipient_list and 1 or 0
frappe.response['user_list'] = user_list
def send(self):
# send email only to enabled users
valid_users = [p[0] for p in frappe.db.sql("""select name from `tabUser`
where enabled=1""")]
recipients = filter(lambda r: r in valid_users,
self.recipient_list.split("\n"))
original_user = frappe.session.user
if recipients:
for user_id in recipients:
frappe.set_user(user_id)
msg_for_this_receipient = self.get_msg_html()
if msg_for_this_receipient:
frappe.sendmail(
recipients=user_id,
subject="{frequency} Digest".format(frequency=self.frequency),
message=msg_for_this_receipient,
bulk=True,
reference_doctype = self.doctype,
reference_name = self.name,
unsubscribe_message = _("Unsubscribe from this Email Digest"))
frappe.set_user(original_user)
def get_msg_html(self):
"""Build email digest content"""
frappe.flags.ignore_account_permission = True
from erpnext.setup.doctype.email_digest.quotes import get_random_quote
context = frappe._dict()
context.update(self.__dict__)
self.set_title(context)
self.set_style(context)
self.set_accounting_cards(context)
context.events = self.get_calendar_events()
context.todo_list = self.get_todo_list()
context.notifications = self.get_notifications()
quote = get_random_quote()
context.quote = {"text": quote[0], "author": quote[1]}
if not (context.events or context.todo_list or context.notifications or context.cards):
return None
frappe.flags.ignore_account_permission = False
# style
return frappe.render_template("erpnext/setup/doctype/email_digest/templates/default.html",
context, is_path=True)
def set_title(self, context):
"""Set digest title"""
if self.frequency=="Daily":
context.title = _("Daily Reminders")
context.subtitle = _("Pending activities for today")
elif self.frequency=="Weekly":
context.title = _("This Week's Summary")
context.subtitle = _("Summary for this week and pending activities")
elif self.frequency=="Monthly":
context.title = _("This Month's Summary")
context.subtitle = _("Summary for this month and pending activities")
def set_style(self, context):
"""Set standard digest style"""
context.text_muted = '#8D99A6'
context.text_color = '#36414C'
context.h1 = 'margin-bottom: 30px; margin-bottom: 0; margin-top: 40px; font-weight: 400;'
context.label_css = '''display: inline-block; color: {text_muted};
padding: 3px 7px; margin-right: 7px;'''.format(text_muted = context.text_muted)
context.section_head = 'margin-top: 60px; font-size: 16px;'
context.line_item = 'padding: 5px 0px; margin: 0; border-bottom: 1px solid #d1d8dd;'
context.link_css = 'color: {text_color}; text-decoration: none;'.format(text_color = context.text_color)
def get_notifications(self):
"""Get notifications for user"""
notifications = frappe.desk.notifications.get_notifications()
notifications = sorted(notifications.get("open_count_doctype", {}).items(),
lambda a, b: 1 if a[1] < b[1] else -1)
notifications = [{"key": n[0], "value": n[1],
"link": get_url_to_list(n[0])} for n in notifications if n[1]]
return notifications
def get_calendar_events(self):
"""Get calendar events for given user"""
from frappe.desk.doctype.event.event import get_events
events = get_events(self.future_from_date.strftime("%Y-%m-%d"),
self.future_to_date.strftime("%Y-%m-%d")) or []
for i, e in enumerate(events):
e.starts_on_label = format_time(e.starts_on)
e.ends_on_label = format_time(e.ends_on)
e.date = formatdate(e.starts)
e.link = get_url_to_form("Event", e.name)
return events
def get_todo_list(self, user_id=None):
"""Get to-do list"""
if not user_id:
user_id = frappe.session.user
todo_list = frappe.db.sql("""select *
from `tabToDo` where (owner=%s or assigned_by=%s) and status="Open"
order by field(priority, 'High', 'Medium', 'Low') asc, date asc""",
(user_id, user_id), as_dict=True)
for t in todo_list:
t.link = get_url_to_form("ToDo", t.name)
return todo_list
def set_accounting_cards(self, context):
"""Create accounting cards if checked"""
cache = frappe.cache()
context.cards = []
for key in ("income", "expenses_booked", "income_year_to_date", "expense_year_to_date",
"invoiced_amount", "payables", "bank_balance"):
if self.get(key):
cache_key = "email_digest:card:" + key
card = cache.get(cache_key)
if card:
card = eval(card)
else:
card = frappe._dict(getattr(self, "get_" + key)())
# format values
if card.last_value:
card.diff = int(flt(card.value - card.last_value) / card.last_value * 100)
if card.diff < 0:
card.diff = str(card.diff)
card.gain = False
else:
card.diff = "+" + str(card.diff)
card.gain = True
card.last_value = self.fmt_money(card.last_value)
card.value = self.fmt_money(card.value)
cache.setex(cache_key, card, 24 * 60 * 60)
context.cards.append(card)
def get_income(self):
"""Get income for given period"""
income, past_income = self.get_period_amounts(self.get_root_type_accounts("income"))
return {
"label": self.meta.get_label("income"),
"value": income,
"last_value": past_income
}
def get_income_year_to_date(self):
"""Get income to date"""
return self.get_year_to_date_balance("income")
def get_expense_year_to_date(self):
"""Get income to date"""
return self.get_year_to_date_balance("expense")
def get_year_to_date_balance(self, root_type):
"""Get income to date"""
balance = 0.0
for account in self.get_root_type_accounts(root_type):
balance += get_balance_on(account, date = self.future_to_date)
return {
"label": self.meta.get_label(root_type + "_year_to_date"),
"value": balance
}
def get_bank_balance(self):
# account is of type "Bank" or "Cash"
return self.get_type_balance('bank_balance', 'Bank')
def get_payables(self):
return self.get_type_balance('payables', 'Payable')
def get_invoiced_amount(self):
return self.get_type_balance('invoiced_amount', 'Receivable')
def get_expenses_booked(self):
expense, past_expense = self.get_period_amounts(self.get_root_type_accounts("expense"))
return {
"label": self.meta.get_label("expenses_booked"),
"value": expense,
"last_value": past_expense
}
def get_period_amounts(self, accounts):
"""Get amounts for current and past periods"""
balance = past_balance = 0.0
for account in accounts:
balance += (get_balance_on(account, date = self.future_to_date)
- get_balance_on(account, date = self.future_from_date))
past_balance += (get_balance_on(account, date = self.past_to_date)
- get_balance_on(account, date = self.past
|
RedHatQE/cfme_tests
|
cfme/storage/object_store_object.py
|
Python
|
gpl-2.0
| 6,631
| 0.001206
|
# -*- coding: utf-8 -*-
import attr
from navmazing import NavigateToAttribute
from widgetastic.widget import NoSuchElementException
from widgetastic.widget import Text
from widgetastic.widget import View
from widgetastic_patternfly import BootstrapNav
from widgetastic_patternfly import BreadCrumb
from widgetastic_patternfly import Button
from widgetastic_patternfly import Dropdown
from cfme.base.ui import BaseLoggedInPage
from cfme.common import Taggable
from cfme.exceptions import ItemNotFound
from cfme.modeling.base import BaseCollection
from cfme.modeling.base import BaseEntity
from cfme.utils.appliance.implementations.ui import CFMENavigateStep
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.appliance.implementations.ui import navigator
from cfme.utils.providers import get_crud_by_name
from widgetastic_manageiq import Accordion
from widgetastic_manageiq import BaseEntitiesView
from widgetastic_manageiq import ItemsToolBarViewSelector
from widgetastic_manageiq import ManageIQTree
from widgetastic_manageiq import Search
from widgetastic_manageiq import SummaryTable
class ObjectStoreObjectToolbar(View):
"""The toolbar on the Object Store Object page"""
configuration = Dropdown('Configuration')
policy = Dropdown('Policy')
download = Dropdown('Download')
view_selector = View.nested(ItemsToolBarViewSelector)
class ObjectStoreObjectDetailsToolbar(View):
"""The toolbar on the Object Store Object detail page"""
policy = Dropdown('Policy')
download = Button(title='Download summary in PDF format')
class ObjectStoreObjectDetailsEntities(View):
"""The entities on the Object Store Object detail page"""
breadcrumb = BreadCrumb()
properties = SummaryTable('Properties')
relationships = SummaryTable('Relationships')
smart_management = SummaryTable('Smart Management')
class ObjectStoreObjectDetailsSidebar(View):
"""The sidebar on the Object Store Object details page"""
@View.nested
class properties(Accordion): # noqa
tree = ManageIQTree()
@View.nested
class relationships(Accordion): # noqa
tree = ManageIQTree()
class ObjectStoreObjectView(BaseLoggedInPage):
"""A base view for all the Object Store Object pages"""
title = Text('.//div[@id="center_div" or @id="main-content"]//h1')
@property
def in_object(self):
return (
self.logged_in_as_current_user and
self.navigation.currently_selected == ['Storage', 'Object Storage',
'Object Store Objects'])
class ObjectStoreObjectAllView(ObjectStoreObjectView):
"""The all Object Stor
|
e Object page"""
t
|
oolbar = View.nested(ObjectStoreObjectToolbar)
search = View.nested(Search)
including_entities = View.include(BaseEntitiesView, use_parent=True)
@property
def is_displayed(self):
return (
self.in_object and
self.title.text == 'Cloud Object Store Objects')
@View.nested
class my_filters(Accordion): # noqa
ACCORDION_NAME = "My Filters"
navigation = BootstrapNav('.//div/ul')
tree = ManageIQTree()
class ObjectStoreObjectDetailsView(ObjectStoreObjectView):
"""The detail Object Store Object page"""
@property
def is_displayed(self):
expected_title = '{} (Summary)'.format(self.context['object'].key)
return (
self.title.text == expected_title and
self.entities.breadcrumb.active_location == expected_title)
toolbar = View.nested(ObjectStoreObjectDetailsToolbar)
sidebar = View.nested(ObjectStoreObjectDetailsSidebar)
entities = View.nested(ObjectStoreObjectDetailsEntities)
@attr.s
class ObjectStoreObject(BaseEntity, Taggable):
""" Model of an Storage Object Store Object in cfme
Args:
key: key of the object.
provider: provider
"""
key = attr.ib()
provider = attr.ib()
@attr.s
class ObjectStoreObjectCollection(BaseCollection):
"""Collection object for the :py:class:'cfme.storage.object_store_object.ObjStoreObject' """
ENTITY = ObjectStoreObject
@property
def manager(self):
coll = self.appliance.collections.object_managers.filter(
{"provider": self.filters.get('provider')}
)
# For each provider has single object type storage manager
return coll.all()[0]
def all(self):
"""returning all Object Store Objects"""
view = navigate_to(self, 'All')
view.entities.paginator.set_items_per_page(500)
objects = []
try:
if 'provider'in self.filters:
for item in view.entities.elements.read():
if self.filters['provider'].name in item['Cloud Provider']:
objects.append(self.instantiate(key=item['Key'],
provider=self.filters['provider']))
else:
for item in view.entities.elements.read():
provider_name = item['Cloud Provider'].split()[0]
provider = get_crud_by_name(provider_name)
objects.append(self.instantiate(key=item['Key'], provider=provider))
return objects
except NoSuchElementException:
return None
def delete(self, *objects):
view = navigate_to(self, 'All')
for obj in objects:
try:
view.entities.get_entity(key=obj.key, surf_pages=True).check()
except ItemNotFound:
raise ItemNotFound('Could not locate object {}'.format(obj.key))
view.toolbar.configuration.item_select('Remove Object Storage Objects', handle_alert=True)
view.flash.assert_no_error()
@navigator.register(ObjectStoreObjectCollection, 'All')
class ObjectStoreObjectAll(CFMENavigateStep):
VIEW = ObjectStoreObjectAllView
prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn')
def step(self, *args, **kwargs):
self.prerequisite_view.navigation.select(
'Storage', 'Object Storage', 'Object Store Objects')
@navigator.register(ObjectStoreObject, 'Details')
class ObjectStoreObjectDetails(CFMENavigateStep):
VIEW = ObjectStoreObjectDetailsView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self, *args, **kwargs):
try:
self.prerequisite_view.entities.get_entity(key=self.obj.key, surf_pages=True).click()
except ItemNotFound:
raise ItemNotFound('Could not locate object {}'.format(self.obj.key))
|
QInfer/python-qinfer
|
src/qinfer/domains.py
|
Python
|
bsd-3-clause
| 19,964
| 0.002655
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
##
# domains.py: module for domains of model outcomes
##
# © 2017, Chris Ferrie (csferrie@gmail.com) and
# Christopher Granade (cgranade@cgranade.com).
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
##
## IMPORTS ###################################################################
from __future__ import division
from __f
|
uture__ import absolute_import
from builtins import range
from future.utils import with_metaclass
from functools import reduce
from operator import mul
from scipy.special import binom
from math import factorial
from itertools import combinations_with_replacement, product
import numpy as np
from .utils import join_struct_arrays, separate_struct_array
import abc
import warnings
## EXPORTS ###################################################################
__all__ = [
'Domain',
'ProductDomain',
'RealDomain',
'IntegerDomain',
'MultinomialDomain'
]
## FUNCTIONS #################################################################
## ABSTRACT CLASSES AND MIXINS ###############################################
class Domain(with_metaclass(abc.ABCMeta, object)):
"""
Abstract base class for domains of outcomes of models.
"""
## ABSTRACT PROPERTIES ##
@abc.abstractproperty
def is_continuous(self):
"""
Whether or not the domain has an uncountable number of values.
:type: `bool`
"""
pass
@abc.abstractproperty
def is_finite(self):
"""
Whether or not the domain contains a finite number of points.
:type: `bool`
"""
pass
@abc.abstractproperty
def dtype(self):
"""
The numpy dtype of a single element of the domain.
:type: `np.dtype`
"""
pass
@abc.abstractproperty
def n_members(self):
"""
Returns the number of members in the domain if it
`is_finite`, otherwise, returns `np.inf`.
:type: ``int`` or ``np.inf``
"""
pass
@abc.abstractproperty
def example_point(self):
"""
Returns any single point guaranteed to be in the domain, but
no other guarantees; useful for testing purposes.
This is given as a size 1 ``np.array`` of type `dtype`.
:type: ``np.ndarray``
"""
pass
@abc.abstractproperty
def values(self):
"""
Returns an `np.array` of type `dtype` containing
some values from the domain.
For domains where `is_finite` is ``True``, all elements
of the domain will be yielded exactly once.
:rtype: `np.ndarray`
"""
pass
## CONCRETE PROPERTIES ##
@property
def is_discrete(self):
"""
Whether or not the domain has a countable number of values.
:type: `bool`
"""
return not self.is_continuous
## ABSTRACT METHODS ##
@abc.abstractmethod
def in_domain(self, points):
"""
Returns ``True`` if all of the given points are in the domain,
``False`` otherwise.
:param np.ndarray points: An `np.ndarray` of type `self.dtype`.
:rtype: `bool`
"""
pass
class ProductDomain(Domain):
"""
A domain made from the cartesian product of other domains.
:param Domain domains: ``Domain`` instances as separate arguments,
or as a singe list of ``Domain`` instances.
"""
def __init__(self, *domains):
if len(domains) == 1:
try:
self._domains = list(domains[0])
except:
self._domains = domains
else:
self._domains = domains
self._domains = domains
self._dtypes = [domain.example_point.dtype for domain in self._domains]
self._example_point = join_struct_arrays(
[np.array(domain.example_point) for domain in self._domains]
)
self._dtype = self._example_point.dtype
@property
def is_continuous(self):
"""
Whether or not the domain has an uncountable number of values.
:type: `bool`
"""
return any([domain.is_continuous for domain in self._domains])
@property
def is_finite(self):
"""
Whether or not the domain contains a finite number of points.
:type: `bool`
"""
return all([domain.is_finite for domain in self._domains])
@property
def dtype(self):
"""
The numpy dtype of a single element of the domain.
:type: `np.dtype`
"""
return self._dtype
@property
def n_members(self):
"""
Returns the number of members in the domain if it
`is_finite`, otherwise, returns `np.inf`.
:type: ``int`` or ``np.inf``
"""
if self.is_finite:
return reduce(mul, [domain.n_members for domain in self._domains], 1)
else:
return np.inf
@property
def example_point(self):
"""
Returns any single point guaranteed to be in the domain, but
no other guarantees; useful for testing purposes.
This is given as a size 1 ``np.array`` of type `dtype`.
:type: ``np.ndarray``
"""
return self._example_point
@property
def values(self):
"""
Returns an `np.array` of type `dtype` containing
some values from the domain.
For domains where `is_finite` is ``True``, all elements
of the domain will be yielded exactly once.
:rtype: `np.ndarray`
"""
separate_values = [domain.values for domain in self._domains]
return np.concatenate([
join_struct_arrays(list(map(np.array, value)))
for value in product(*separate_values)
])
## METHODS ##
def _mytype(self, array):
# astype does weird stuff with struct names, and possibly
# depends on numpy version; hopefully
# the following is a bit more predictable since it passes through
# uint8
return separate_struct_array(array, self.dtype)[0]
def to_regular_arrays(self, array):
"""
Expands from an array of type `self.dtype` into a list of
arrays with dtypes corresponding to the factor domains.
:param np.ndarray array: An `np.array` of type `self.dtype`.
:rtype: ``list``
"""
return separate_struct_array(self._mytype(array), self._dtypes)
def from_regular_arrays(self, arrays):
"""
Merges a list of arrays (of the same shape) of dtypes
corresponding to the factor domains int
|
falgore88/command_manager
|
manage.py
|
Python
|
bsd-2-clause
| 187
| 0
|
# -*
|
- coding: utf-8 -*-
from __future__ import unicode_literals
if __name__ == '__main__':
from command_manager import Manager
manager = Manager(["commands"])
manager.run()
| |
ebrahimraeyat/civilTools
|
section/filehandling/slenders.py
|
Python
|
gpl-3.0
| 3,474
| 0.014105
|
# -*- coding: utf-8 -*-
slenderParameters = {'notPlate': {'beam': {'M': {'BF': '2*bf', 'tfCriteria': True, 'TF': ('2*tf', ''), 'D': 'd',
'twCriteria': True, 'TW': ('(D-2*TF)/(d-2(tf+r))*tw', '')},
'H': {'BF': '2*bf', 'tfCriteria': True, 'TF': ('2*0.55*tf/.6', ''), 'D': 'd',
'twCriteria': True, 'TW': ('(D-2*TF)/(d-2(tf+r))*tw', '')}},
'column': {'M': {'BF': '2*bf', 'tfCriteria': True, 'TF': ('2*tf', ''), 'D': 'd',
'twCriteria': True, 'TW': ('(D-2*TF)/(d-2(tf+r))*tw', '')},
'H': {'BF': '2*bf', 'tfCriteria': True, 'TF': ('2*tf', ''), 'D': 'd',
'twCriteria': True, 'TW': ('(D-2*TF)/(d-2(tf+r))*tw', '')}}},
'TBPlate': {'beam': {'M': {'BF': 'c+bf', 'tfCriteria': 't1<(.76*B1*tf)/(1.12*bf)',
'TF': ('(1.12*BF*t1)/(.76*B1)', '(BF*tf)/bf'), 'D': 'd+2*t1',
'twCriteria': True, 'TW': ('(D-2*TF)/(d-2(tf+r))*tw', '')},
'H': {'BF': 'c+bf', 'tfCriteria': 't1<(.6*B1*tf)/(0.55*bf)',
'TF': ('(0.55*BF*t1)/(.60*B1)', '(BF*tf)/bf'), 'D': 'd+2*t1',
'twCriteria': True, 'TW': ('(D-2*TF)/(d-2(tf+r))*tw', '')}},
'column': {'M': {'BF': 'c+bf', 'tfCriteria': 't1<(.76*B1*tf)/(1.12*bf)',
'TF': ('(1.12*BF*t1)/(.76*B1)', '(BF*tf)/bf'), 'D': 'd+2*t1',
'twCriteria': True, 'TW': ('(D-2*TF)/(d-2(tf+r))*tw', '')},
'H': {'BF': 'c+bf', 'tfCriteria': 't1<(B1*tf)/(bf)',
'TF': ('(BF*t1)/(B1)', '(BF*tf)/bf'), 'D': 'd+2*t1',
'twCriteria': True, 'TW': ('(D-2*TF)/(d-2(tf+r))*tw', '')}}},
'LRPlate': {'beam': {'M': {'BF': 'c+bf+2*tf', 'tfCriteria': 't1<(.76*B1*tf)/(1.12*bf)',
'TF': ('(1.12*BF*t1)/(.76*B1)', '(BF*tf)/bf'), 'D': 'd+2*t1',
'twCriteria': 't2<(d*tw)/(d-2(tf+r))', 'TW': ('t2*(D-2*TF)/d', 'tw*(D-2*TF)/(d-2*(tf+r))')},
'H': {'BF': 'c+bf+2*tf', 'tfCriteria': 't1<(.6*B1*tf)/(0.55*bf)',
'TF': ('(0.55*BF*t1)/(.60*B1)', '(BF*tf)/
|
bf'), 'D': 'd+2*t1',
'twCriteria': 't2<(d*tw)/(d-2(tf+r))', 'TW': ('t2*(D-2*TF)/d', 'tw*(D-2*TF)/(d-2*(tf+r))')}},
|
'column': {'M': {'BF': 'c+bf+2*tf', 'tfCriteria': 't1<(.76*B1*tf)/(1.12*bf)',
'TF': ('(1.12*BF*t1)/(.76*B1)', '(BF*tf)/bf'), 'D': 'd+2*t1',
'twCriteria': 't2<(d*tw)/(d-2(tf+r))', 'TW': ('t2*(D-2*TF)/d', 'tw*(D-2*TF)/(d-2*(tf+r))')},
'H': {'BF': 'c+bf+2*tf', 'tfCriteria': 't1<(B1*tf)/(bf)',
'TF': ('(BF*t1)/(B1)', '(BF*tf)/bf'), 'D': 'd+2*t1',
'twCriteria': 't2<(d*tw)/(d-2(tf+r))', 'TW': ('t2*(D-2*TF)/d', 'tw*(D-2*TF)/(d-2*(tf+r))')}}}}
if __name__ == '__main__':
comositeSection = 'LRPlate'
sectionPos = 'beam'
ductility = 'M'
parameters = slenderParameters[comositeSection][sectionPos][ductility]
BF = parameters['BF']
tfCriteria = parameters['tfCriteria']
TF1 = parameters['TF'][0]
TF2 = parameters['TF'][1]
D = parameters['D']
twCriteria = parameters['twCriteria']
TW1 = parameters['TW'][0]
TW2 = parameters['TW'][1]
print BF
print tfCriteria
|
ResearchSoftwareInstitute/MyHPOM
|
hs_tracking/__init__.py
|
Python
|
bsd-3-clause
| 60
| 0
|
def
|
ault_app_config = 'hs_
|
tracking.apps.HSTrackingAppConfig'
|
google/vimdoc
|
vimdoc/parser.py
|
Python
|
apache-2.0
| 3,977
| 0.012321
|
"""The vimdoc parser."""
from vimdoc import codeline
from vimdoc import docline
from vimdoc import error
from vimdoc import regex
def IsComment(line):
return regex.comment_leader.match(line)
def IsContinuation(line):
return regex.line_continuation.match(line)
def StripContinuator(line):
assert regex.line_continuation.match(line)
return regex.line_continuation.sub('', line)
def EnumerateStripNewlinesAndJoinContinuations(lines):
"""Preprocesses the lines of a vimscript file.
Enumerates the lines, strips the newlines from the end, and joins the
continuations.
Args:
lines: The lines of the file.
Yields:
Each preprocessed line.
"""
lineno, cached = (None, None)
for i, line in enumerate(lines):
line = line.rstrip('\n')
if IsContinuation(line):
if cached is None:
raise error.CannotContinue('No preceding line.', i)
elif IsComment(cached) and not IsComment(line):
raise error.CannotContinue('No comment to continue.', i)
else:
cached += StripContinuator(line)
continue
if cached is not None:
yield lineno, cached
lineno, cached = (i, line)
if cached is not None:
yield lineno, cached
def EnumerateParsedLines(lines):
vimdoc_mode = False
for i, line in EnumerateStripNewlinesAndJoinContinuations(lines):
if not vimdoc_mode:
if regex.vimdoc_leader.match(line):
vimdoc_mode = True
# There's no need to yield the blank line if it's an empty starter line.
# For example, in:
# ""
# " @usage whatever
# " description
# There's no need to yield the first docline as a blank.
if not regex.empty_vimdoc_leader.match(line):
# A starter line starts with two comment leaders.
# If we strip one of them it's a normal comment line.
yield i, ParseCommentLi
|
ne(regex.comment_leader.sub('', line))
elif IsComment(line):
yield i,
|
ParseCommentLine(line)
else:
vimdoc_mode = False
yield i, ParseCodeLine(line)
def ParseCodeLine(line):
"""Parses one line of code and creates the appropriate CodeLine."""
if regex.blank_code_line.match(line):
return codeline.Blank()
fmatch = regex.function_line.match(line)
if fmatch:
namespace, name, args = fmatch.groups()
return codeline.Function(name, namespace, regex.function_arg.findall(args))
cmatch = regex.command_line.match(line)
if cmatch:
args, name = cmatch.groups()
flags = {
'bang': '-bang' in args,
'range': '-range' in args,
'count': '-count' in args,
'register': '-register' in args,
'buffer': '-buffer' in args,
'bar': '-bar' in args,
}
return codeline.Command(name, **flags)
smatch = regex.setting_line.match(line)
if smatch:
name, = smatch.groups()
return codeline.Setting('g:' + name)
flagmatch = regex.flag_line.match(line)
if flagmatch:
a, b, default = flagmatch.groups()
return codeline.Flag(a or b, default)
return codeline.Unrecognized(line)
def ParseCommentLine(line):
"""Parses one line of documentation and creates the appropriate DocLine."""
block = regex.block_directive.match(line)
if block:
return ParseBlockDirective(*block.groups())
return docline.Text(regex.comment_leader.sub('', line))
def ParseBlockDirective(name, rest):
if name in docline.BLOCK_DIRECTIVES:
try:
return docline.BLOCK_DIRECTIVES[name](rest)
except ValueError:
raise error.InvalidBlockArgs(rest)
raise error.UnrecognizedBlockDirective(name)
def ParseBlocks(lines, filename):
blocks = []
selection = []
lineno = 0
try:
for lineno, line in EnumerateParsedLines(lines):
for block in line.Affect(blocks, selection):
yield block.Close()
for block in codeline.EndOfFile().Affect(blocks, selection):
yield block.Close()
except error.ParseError as e:
e.lineno = lineno + 1
e.filename = filename
raise
|
sinotradition/meridian
|
meridian/tst/acupoints/test_zhimai44.py
|
Python
|
apache-2.0
| 297
| 0.006734
|
#!/usr/bin/python
#coding=utf-8
'''
@author: sheng
@license:
'''
import unittest
from meridian.acupoints import zhimai44
class TestZhimai44Functions(unittest.TestCase):
def setUp(self):
pass
def test_xxx(s
|
elf):
pass
if __n
|
ame__ == '__main__':
unittest.main()
|
noemis-fr/old-custom
|
account_cutoff_base/account_cutoff.py
|
Python
|
agpl-3.0
| 18,694
| 0.000053
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Account Cut-off Base module for OpenERP
# Copyright (C) 2013 Akretion (http://www.akretion.com)
# @author Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, fields
import openerp.addons.decimal_precision as dp
from openerp.tools.translate import _
from datetime import datetime
class account_cutoff(orm.Model):
_name = 'account.cutoff'
_rec_name = 'cutoff_date'
_order = 'cutoff_date desc'
_inherit = ['mail.thread']
_description = 'Account Cut-off'
_track = {
'state': {
'account_cutoff_base.cutoff_done':
lambda self, cr, uid, obj, ctx=None: obj['state'] == 'done',
}
}
def copy(self, cr, uid, id, default=None, context=None):
if default is None:
default = {}
default.update({
'cutoff_date': '%d-12-31' % datetime.today().year,
'move_id': False,
'state': 'draft',
'line_ids': False,
})
return super(account_cutoff, self).copy(
cr, uid, id, default=default, context=context)
def _compute_total_cutoff(self, cr, uid, ids, name, arg, context=None):
res = {}
for cutoff in self.browse(cr, uid, ids, context=context):
res[cutoff.id] = 0
for line in cutoff.line_ids:
res[cutoff.id] += line.cutoff_amount
return res
_columns = {
'cutoff_date': fields.date(
'Cut-off Date', required=True, readonly=True,
states={'draft': [('readonly', False)]},
track_visibility='always'),
'type': fields.selection([
('accrued_revenue', 'Accrued Revenue'),
('accrued_expense', 'Accrued Expense'),
('prepaid_revenue', 'Prepaid Revenue'),
('prepaid_expense', 'Prepaid Expense'),
], 'Type', required=True, readonly=True,
states={'draft': [('readonly', False)]}),
'move_id': fields.many2one(
'account.move', 'Cut-off Journal Entry', readonly=True),
'move_label': fields.char(
'Label of the Cut-off Journal Entry',
size=64, required=True, readonly=True,
states={'draft': [('readonly', False)]},
help="This label will be written in the 'Name' field of the "
"Cut-off Account Move Lines and in the 'Reference' field of "
"the Cut-off Account Move."),
'cutoff_account_id': fields.many2one(
'account.account', 'Cut-off Account',
domain=[('type', '<>', 'view'), ('type', '<>', 'closed')],
required=True, readonly=True,
states={'draft': [('readonly', False)]}),
'cutoff_journal_id': fields.many2one(
'account.journal', 'Cut-off Account Journal', required=True,
readonly=True, states={'draft': [('readonly', False)]}),
'total_cutoff_amount': fields.function(
_compute_total_cutoff, type='float', string="Total Cut-off Amount",
readonly=True, track_visibility='always'),
'company_id': fields.many2one(
'res.company', 'Company', required=True, readonly=True,
states={'draft': [('readonly', False)]}),
'company_currency_id': fields.related(
'company_id', 'currency_id', readonly=True, type='many2one',
relation='res.currency', string='Company Currency'),
'line_ids': fields.one2many(
'account.cutoff.line', 'parent_id', 'Cut-off Lines', readonly=True,
states={'draft': [('readonly', False)]}),
'state': fields.selection([
('draft', 'Draft'),
('done', 'Done'),
],
'State', select=True, readonly=True, track_visibility='onchange',
help="State of the cutoff. When the Journal Entry is created, "
"the state is set to 'Done' and the fields become read-only."),
}
def _get_default_journal(self, cr, uid, context=None):
cur_user = self.pool['res.users'].browse(cr, uid, uid, context=context)
return cur_user.company_id.default_cutoff_journal_id.id or None
def _default_move_label(self, cr, uid, context=None):
if context is None:
context = {}
type = context.get('type')
cutoff_date = context.get('cutoff_date')
if cutoff_date:
cutoff_date_label = ' dated %s' % cutoff_date
else:
cutoff_date_label = ''
label = ''
if type == 'accrued_expense':
label = _('Accrued Expense%s') % cutoff_date_label
elif type == 'accrued_revenue':
label = _('Accrued Revenue%s') % cutoff_date_label
elif type == 'prepaid_revenue':
label = _('Prepaid Revenue%s') % cutoff_date_label
elif type == 'prepaid_expense':
label = _('Prepaid Expense%s') % cutoff_date_label
return label
def _default_type(self, cr, uid, context=None):
if context is None:
context = {}
|
return context.get('type')
def _inherit_default_cutoff_account_id(self, cr, uid, context=None):
'''Function designed to be inherited by other cutoff modules'''
return None
def _default_cutoff_account_id(self, cr, uid, context=None):
'''This function can't be inherited, so we use a second function'''
re
|
turn self._inherit_default_cutoff_account_id(
cr, uid, context=context)
_defaults = {
'state': 'draft',
'company_id': lambda self, cr, uid, context:
self.pool['res.users'].browse(
cr, uid, uid, context=context).company_id.id,
'cutoff_journal_id': _get_default_journal,
'move_label': _default_move_label,
'type': _default_type,
'cutoff_account_id': _default_cutoff_account_id,
}
_sql_constraints = [(
'date_type_company_uniq',
'unique(cutoff_date, company_id, type)',
'A cutoff of the same type already exists with this cut-off date !'
)]
def cutoff_date_onchange(
self, cr, uid, ids, type, cutoff_date, move_label):
res = {'value': {}}
if type and cutoff_date:
context = {'type': type, 'cutoff_date': cutoff_date}
res['value']['move_label'] = self._default_move_label(
cr, uid, context=context)
return res
def back2draft(self, cr, uid, ids, context=None):
assert len(ids) == 1,\
'This function should only be used for a single id at a time'
cur_cutoff = self.browse(cr, uid, ids[0], context=context)
if cur_cutoff.move_id:
self.pool['account.move'].unlink(
cr, uid, [cur_cutoff.move_id.id], context=context)
self.write(cr, uid, ids[0], {'state': 'draft'}, context=context)
return True
def _prepare_move(self, cr, uid, cur_cutoff, to_provision, context=None):
if context is None:
context = {}
movelines_to_create = []
amount_total = 0
move_label = cur_cutoff.move_label
for (cutoff_account_id, analytic_account_id), amount in \
to_provision.items():
movelines_to_create.append((0, 0, {
'account_id': cutoff_accoun
|
briehl/narrative
|
src/biokbase/narrative/tests/test_jobmanager.py
|
Python
|
mit
| 6,949
| 0.002446
|
"""
Tests for job management
"""
import unittest
from unittest import mock
import biokbase.narrative.jobs.jobmanager
from biokbase.narrative.jobs.job import Job
from .util import TestConfig
import os
from IPython.display import HTML
from .narrative_mock.mockclients import get_mock_client, get_failing_mock_client
from biokbase.narrative.exception_util import NarrativeException
__author__ = "Bill Riehl <wjriehl@lbl.gov>"
config = TestConfig()
job_info = config.load_json_file(config.get("jobs", "ee2_job_info_file"))
@mock.patch("biokbase.narrative.jobs.job.clients.get", get_mock_client)
def phony_job():
return Job.from_state(
"phony_job",
{"params": [], "service_ver": "0.0.0"},
"kbasetest",
"NarrativeTest/test_editor",
tag="dev",
)
def create_jm_message(r_type, job_id=None, data={}):
data["request_type"] = r_type
data["job_id"] = job_id
return {"content": {"data": data}}
class JobManagerTest(unittest.TestCase):
@classmethod
@mock.patch("biokbase.narrative.jobs.jobmanager.clients.get", get_mock_client)
def setUpClass(cls):
cls.jm = biokbase.narrative.jobs.jobmanager.JobManager()
cls.job_ids = list(job_info.keys())
os.environ["KB_WORKSPACE_ID"] = config.get("jobs", "job_test_wsname")
@mock.patch("biokbase.narrative.jobs.jobmanager.clients.get", get_mock_client)
def setUp(self):
self.jm.initialize_jobs()
def validate_status_message(self, msg):
core_keys = set(["widget_info", "owner", "state", "spec"])
state_keys = set(
["user", "authstrat", "wsid", "status", "updated", "job_input"]
)
if not core_keys.issubset(set(msg.keys())):
print(
"Missing core key(s) - [{}]".format(
", ".join(core_keys.difference(set(msg.keys())))
)
)
return False
if not state_keys.issubset(set(msg["state"].keys())):
print(
"Missing status key(s) - [{}]".format(
", ".join(state_keys.difference(set(msg["state"].keys())))
)
)
return False
return True
def test_get_job_good(self):
job_id = self.job_ids[0]
job = self.jm.get_job(job_id)
self.assertEqual(job_id, job.job_id)
def test_get_job_bad(self):
with self.assertRaises(ValueError):
self.jm.get_job("not_a_job_id")
@mock.patch("biokbase.narrative.jobs.jobmanager.clients.get", get_mock_client)
def test_list_jobs_html(self):
jobs_html = self.jm.list_jobs()
self.assertIsInstance(jobs_html, HTML)
htm
|
l = jobs_html.data
print(html)
self.assertIn("<td>5d64935ab215ad4128de94d6</td>", html
|
)
self.assertIn("<td>NarrativeTest/test_editor</td>", html)
self.assertIn("<td>2019-08-26 ", html)
self.assertIn(":54:48</td>", html)
self.assertIn("<td>fake_test_user</td>", html)
self.assertIn("<td>completed</td>", html)
self.assertIn("<td>Not started</td>", html)
self.assertIn("<td>Incomplete</td>", html)
@mock.patch("biokbase.narrative.jobs.jobmanager.clients.get", get_mock_client)
def test_cancel_job_good(self):
new_job = phony_job()
job_id = new_job.job_id
self.jm.register_new_job(new_job)
self.jm.cancel_job(job_id)
def test_cancel_job_bad(self):
with self.assertRaises(ValueError):
self.jm.cancel_job(None)
@mock.patch("biokbase.narrative.jobs.jobmanager.clients.get", get_mock_client)
def test_lookup_all_job_states(self):
states = self.jm.lookup_all_job_states()
self.assertEqual(len(states), 2)
states = self.jm.lookup_all_job_states(ignore_refresh_flag=True)
self.assertEqual(len(states), 3)
# @mock.patch('biokbase.narrative.jobs.jobmanager.clients.get', get_mock_client)
# def test_job_status_fetching(self):
# self.jm._handle_comm_message(create_jm_message("all_status"))
# msg = self.jm._comm.last_message
# job_data = msg.get('data', {}).get('content', {})
# job_ids = list(job_data.keys())
# # assert that each job info that's flagged for lookup gets returned
# jobs_to_lookup = [j for j in self.jm._running_jobs.keys()]
# self.assertCountEqual(job_ids, jobs_to_lookup)
# for job_id in job_ids:
# self.assertTrue(self.validate_status_message(job_data[job_id]))
# self.jm._comm.clear_message_cache()
# @mock.patch('biokbase.narrative.jobs.jobmanager.clients.get', get_mock_client)
# def test_single_job_status_fetch(self):
# new_job = phony_job()
# self.jm.register_new_job(new_job)
# self.jm._handle_comm_message(create_jm_message("job_status", new_job.job_id))
# msg = self.jm._comm.last_message
# self.assertEqual(msg['data']['msg_type'], "job_status")
# # self.assertTrue(self.validate_status_message(msg['data']['content']))
# self.jm._comm.clear_message_cache()
# Should "fail" based on sent message.
# def test_job_message_bad_id(self):
# self.jm._handle_comm_message(create_jm_message("foo", job_id="not_a_real_job"))
# msg = self.jm._comm.last_message
# self.assertEqual(msg['data']['msg_type'], 'job_does_not_exist')
def test_cancel_job_lookup(self):
pass
# @mock.patch('biokbase.narrative.jobs.jobmanager.clients.get', get_mock_client)
# def test_stop_single_job_lookup(self):
# # Set up and make sure the job gets returned correctly.
# new_job = phony_job()
# phony_id = new_job.job_id
# self.jm.register_new_job(new_job)
# self.jm._handle_comm_message(create_jm_message("start_job_update", job_id=phony_id))
# self.jm._handle_comm_message(create_jm_message("stop_update_loop"))
# self.jm._lookup_all_job_status()
# msg = self.jm._comm.last_message
# self.assertTrue(phony_id in msg['data']['content'])
# self.assertEqual(msg['data']['content'][phony_id].get('listener_count', 0), 1)
# self.jm._comm.clear_message_cache()
# self.jm._handle_comm_message(create_jm_message("stop_job_update", job_id=phony_id))
# self.jm._lookup_all_job_status()
# msg = self.jm._comm.last_message
# self.assertTrue(self.jm._running_jobs[phony_id]['refresh'] == 0)
# self.assertIsNone(msg)
@mock.patch(
"biokbase.narrative.jobs.jobmanager.clients.get", get_failing_mock_client
)
def test_initialize_jobs_ee2_fail(self):
# init jobs should fail. specifically, ee2.check_workspace_jobs should error.
with self.assertRaises(NarrativeException) as e:
self.jm.initialize_jobs()
self.assertIn("Job lookup failed", str(e.exception))
if __name__ == "__main__":
unittest.main()
|
saintdragon2/python-3-lecture-2015
|
civil_mid_final/2johack/source/Stage.py
|
Python
|
mit
| 25,316
| 0.008532
|
import pygame
import Vector2, BaseObject, Enemy, Resources, Menu
# Classe que gerencia as fases do jogo
class Stage(BaseObject.BaseObject):
# param: manager - instancia do ScreenManager
def __init__(self, manager):
# sempre lembrar de chamar o 'super'
BaseObject.BaseObject.__init__(self, manager)
map_ = Resources.MAP
# lista com os tiles do mapa # como todas as fases terao os mesmos tiles, entao pode ler o tile de qualquer stage
self.tileList = Resources.loadTileList(map_['ts'], map_['tw'], map_['th'])
Resources.optimizeSurfacesToBlit(self.tileList, [5,6,7,8,13,14,15,16,17,18,19,20])
# o estado do input do jogador
self.inputState = self.screenManager.inputState
# tamanho do mapa em pixels
self.stageSize = Vector2.Vector2(0,0)
# ponto de referencia que sera passado para a camera
self.referencePoint = Vector2.Vector2(0,0)
# gravidade do mapa
self.gravity = Vector2.Vector2(0,10)
# jogador principal
self.player = self.screenManager.factory.getPlayer()
# lista de inimigos para adicionar na fase
# enemyList[0] = Vector2 com a posicao do inimigo
# enemyList[1] = uma String com o tipo do inimigo
# enemyList[2] = um boolean com a direcao do inimigo
self.enemyList = []
# lista com todos os GameObjects deste mapa, com execao do player
self.gameObjects = []
# lista com os GameObjects que serao removidos do mapa
self.gameObjectsToRemove = []
# offset que indica o quanto um objeto pode sair da tela, mas seu update() ainda eh chamado
self.updateOffset = 450, 300 # valores relativos a partir da resolucao maxima
# offset que indica o quanto um objeto pode sair da tela, mas o updateModifier deste stage ainda pode ser chamado
self.modifierOffset = 700, 600 # valores relativos a partir da resolucao maxima
# boolean que guarda se o jogador esta morto (perdeu todas as vidas)
self.gameOver = False
# imagem da tela de game over
gameOverImage = Resources.loadOpaqueImage("res/img/gameover.png")
self.gameOverImage = pygame.transform.scale(gameOverImage, self.screenManager.resolution)
self.goAlpha = 0 # alpha da imagem do game over
self.gameOverSound = pygame.mixer.Sound("res/sound/defeat.ogg")
self.flCount = 0 # contador para a animacao do finish level
self.finishLevelSound = pygame.mixer.Sound("res/sound/pass.ogg")
self.killSound = pygame.mixer.Sound("res/sound/kill.wav")
# musica de fundo
self.backgroundMusic = None
self.isPause = False
background = Resources.loadOpaqueImage("res/img/background.png")
self.background = pygame.transform.smoothscale(background, self.screenManager.resolution)
#self.m = Resources.loadTileList("res/img/City Day.png", 64, 64)
# override GameObject.update()
def update(self):
# verifica se o jogador pausou o jogo ou se saiu da fase
if not self.gameOver and not self.inputState.P_K_p and self.inputState.K_p:
if self.isPause:
self.isPause = False
pygame.mixer.music.set_volume(1)
else:
self.isPause = True
pygame.mixer.music.set_volume(0.15)
elif not self.inputState.P_K_ESCAPE and self.inputState.K_ESCAPE:
# se apertar ESC, volta pro menu
self.screenManager.setBaseObjectToUpdate(Menu.StagesMenu(self.screenManager))
self.finishLevelSound.fadeout(100)
self.gameOverSound.fadeout(150)
pygame.mixer.music.load("res/sound/title.ogg")
pygame.mixer.music.play(-1)
if not self.isPause: # se o jogador nao 'pausou' o jogo
if self.gameOver: # se fim de jogo
if self.goAlpha == 0:
pygame.mixer.music.stop()
self.gameOverSound.play()
self.goAlpha += 2
if self.goAlpha > 200 and (self.inputState.K_ESCAPE or self.inputState.K_ENTER or self.inputState.K_SPACE or self.inputState.K_a):
self.gameOverSound.fadeout(600)
# depois de morrer abre o menu de fase
self.screenManager.setBaseObjectToUpdate(Menu.StagesMenu(self.screenManager))
pygame.mixer.music.load("res/sound/title.ogg")
pygame.mixer.music.play(-1)
elif self.isFinished(): # se passou de fa
|
se
if self.flCount == 0:
pygame.mixer.music.stop()
self.finishLevelSound.play()
elif self.flCount > self.screenManager.resolution[0]:
self.finishLevelSound.fadeout(900)
# depois de terminar a f
|
ase abre o menu de fase
self.screenManager.setBaseObjectToUpdate(Menu.StagesMenu(self.screenManager))
pygame.mixer.music.load("res/sound/title.ogg")
pygame.mixer.music.play(-1)
self.flCount += 20
else: # update da propria fase
self.handlePlayer()
self.handleGameObjects()
if self.gameObjectsToRemove.__len__() > 0:
self.kill()
# referencePoint = centro do jogador no eixo x, um pouco acima do jogador no eixo y
self.referencePoint.x = self.player.position.x + self.player.size.x / 2
self.referencePoint.y = self.player.position.y + self.player.size.y / 2 # esta tirando 200 pro jogador ficar um pouco abaixo do centro da camera
self.screenManager.updateCamera(self.stageSize.t(), self.referencePoint)
self.putEnemies()
# manipula o jogador, faz suas atualizacoes e tratamentos
def handlePlayer(self):
# atualisa o player
self.updateGameObject(self.player)
playerPosX = self.player.position.x
# nao deixa o player sair do mapa na horizontal
if playerPosX < 0:
self.player.position.x = 0
elif playerPosX + self.player.size.x > self.stageSize.x:
self.player.position.x = self.stageSize.x - self.player.size.x
# ve se o player saiu do mapa na vertical
playerOutOfStage = self.updateStageModifiers(self.player)
if playerOutOfStage:
self.player.loseLife()
# manipula os gameObjects, faz suas atualizacoes e tratamentos
def handleGameObjects(self):
# atualisa os outros GameObjects
for go in self.gameObjects:
goOutOfStage = False
if self.screenManager.isInsideScreen(go.position, self.modifierOffset):
# se 'go' esta dentro da tela
if self.screenManager.isInsideScreen(go.position, self.updateOffset):
self.updateGameObject(go)
# se 'go' esta dentro do offset
goOutOfStage = self.updateStageModifiers(go)
# se tiver fora do offset nao atualisa o 'go' e nem aplica os modificadores da fase
if go.type == Enemy.SHOOTER:
go.updateShot() # atualisa os tiros dos inimigos
goAmmunition = go.shot.ammunition
playerCollided = self.gameObjectAndBulletsCollided(self.player, goAmmunition)
if playerCollided:
self.player.loseLife()
if goOutOfStage:
self.addToKill(go)
if go.state == None:
self.addToKill(go)
elif go.state != Enemy.DYING:
if self.gameObjectsCollided(go, self.player):
self.player.loseLife()
playerAmmunition = self.player.shot.ammunition
|
duomarket/openbazaar-test-nodes
|
qa/complete_direct_online.py
|
Python
|
mit
| 9,485
| 0.004428
|
import requests
import json
import time
from collections import OrderedDict
from test_framework.test_framework import OpenBazaarTestFramework, TestFailure
class CompleteDirectOnlineTest(OpenBazaarTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 2
def run_test(self):
alice = self.nodes[0]
bob = self.nodes[1]
# generate some coins and send them to bob
time.sleep(4)
api_url = bob["gateway_url"] + "wallet/address"
r = requests.get(api_url)
if r.status_code == 200:
resp = json.loads(r.text)
address = resp["address"]
elif r.status_code == 404:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Address endpoint not found")
else:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Unknown response")
self.send_bitcoin_cmd("sendtoaddress", address, 10)
time.sleep(20)
# post listing to alice
with open('testdata/listing.json') as listing_file:
listing_json = json.load(listing_file, object_pairs_hook=OrderedDict)
api_url = alice["gateway_url"] + "ob/listing"
r = requests.post(api_url, data=json.dumps(listing_json, indent=4))
if r.status_code == 404:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Listing post endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("CompleteDirectOnlineTest - FAIL: Listing POST failed. Reason: %s", resp["reason"])
resp = json.loads(r.text)
slug = resp["slug"]
time.sleep(4)
# get listing hash
api_url = alice["gateway_url"] + "ipns/" + alice["peerId"] + "/listings.json"
r = requests.get(api_url)
if r.status_code != 200:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Couldn't get listing index")
resp = json.loads(r.text)
listingId = resp[0]["hash"]
# bob send order
with open('testdata/order_direct.json') as order_file:
order_json = json.load(order_file, object_pairs_hook=OrderedDict)
order_json["items"][0]["listingHash"] = listingId
api_url = bob["gateway_url"] + "ob/purchase"
r = requests.post(api_url, data=json.dumps(order_json, indent=4))
if r.status_code == 404:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Purchase post endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("CompleteDirectOnlineTest - FAIL: Purchase POST failed. Reason: %s", resp["reason"])
resp = json.loads(r.text)
orderId = resp["orderId"]
payment_address = resp["paymentAddress"]
payment_amount = resp["amount"]
# check the purchase saved correctly
api_url = bob["gateway_url"] + "ob/order/" + orderId
r = requests.get(api_url)
if r.status_code != 200:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Couldn't load order from Bob")
resp = json.loads(r.text)
if resp["state"] != "AWAITING_PAYMENT":
raise TestFailure("CompleteDirectOnlineTest - FAIL: Bob purchase saved in incorrect state")
if resp["funded"] == True:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Bob incorrectly saved as funded")
# check the sale saved correctly
api_url = alice["gateway_url"] + "ob/order/" + orderId
r = requests.get(api_url)
if r.status_code != 200:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Couldn't load order from Alice")
resp = json.loads(r.text)
if resp["state"] != "AWAITING_PAYMENT":
raise TestFailure("CompleteDirectOnlineTest - FAIL: Alice purchase saved in incorrect state")
if resp["funded"] == True:
rai
|
se TestFailure("CompleteDirectOnlineTest - FAIL: Alice incorrectly saved as funded")
# fund order
spend = {
"address": payment_address,
"amount": payment_amount,
"feeLevel": "NORMAL"
}
api_url = bob["gateway_url"] + "wallet/spend
|
"
r = requests.post(api_url, data=json.dumps(spend, indent=4))
if r.status_code == 404:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Spend post endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("CompleteDirectOnlineTest - FAIL: Spend POST failed. Reason: %s", resp["reason"])
time.sleep(20)
# check bob detected payment
api_url = bob["gateway_url"] + "ob/order/" + orderId
r = requests.get(api_url)
if r.status_code != 200:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Couldn't load order from Bob")
resp = json.loads(r.text)
if resp["state"] != "AWAITING_FULFILLMENT":
raise TestFailure("CompleteDirectOnlineTest - FAIL: Bob failed to detect his payment")
if resp["funded"] == False:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Bob incorrectly saved as unfunded")
# check alice detected payment
api_url = alice["gateway_url"] + "ob/order/" + orderId
r = requests.get(api_url)
if r.status_code != 200:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Couldn't load order from Alice")
resp = json.loads(r.text)
if resp["state"] != "AWAITING_FULFILLMENT":
raise TestFailure("CompleteDirectOnlineTest - FAIL: Alice failed to detect payment")
if resp["funded"] == False:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Alice incorrectly saved as unfunded")
# alice send order fulfillment
with open('testdata/fulfillment.json') as fulfillment_file:
fulfillment_json = json.load(fulfillment_file, object_pairs_hook=OrderedDict)
fulfillment_json["orderId"] = orderId
fulfillment_json["slug"] = slug
api_url = alice["gateway_url"] + "ob/orderfulfillment"
r = requests.post(api_url, data=json.dumps(fulfillment_json, indent=4))
if r.status_code == 404:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Fulfillment post endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("CompleteDirectOnlineTest - FAIL: Fulfillment POST failed. Reason: %s", resp["reason"])
time.sleep(5)
# check bob received fulfillment
api_url = bob["gateway_url"] + "ob/order/" + orderId
r = requests.get(api_url)
if r.status_code != 200:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Couldn't load order from Bob")
resp = json.loads(r.text)
if resp["state"] != "FULFILLED":
raise TestFailure("CompleteDirectOnlineTest - FAIL: Bob failed to detect order fulfillment")
# check alice set fulfillment correctly
api_url = alice["gateway_url"] + "ob/order/" + orderId
r = requests.get(api_url)
if r.status_code != 200:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Couldn't load order from Bob")
resp = json.loads(r.text)
if resp["state"] != "FULFILLED":
raise TestFailure("CompleteDirectOnlineTest - FAIL: Alice failed to order fulfillment")
# bob send order completion
oc = {
"orderId": orderId,
"ratings": [
{
"slug": slug,
"overall": 4,
"quality": 5,
"description": 5,
"customerService": 4,
"deliverySpeed": 3,
"review": "I love it!"
}
]
}
api_url = bob["gateway_url"] + "ob/ordercompletion"
r = requests.post(api_url, data=json.dumps(oc, indent=4))
if r.status_code == 404:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Completion post endpoint not found")
|
david81brs/seaport
|
l4_vogais.py
|
Python
|
gpl-2.0
| 159
| 0.062893
|
#!/usr/bin/python3
def vogal(v):
lis
|
ta = ['a','e','i','o','u','A','E','
|
I','O','U']
if v in lista:
return True
else:
return False
|
gmorph/MAVProxy
|
MAVProxy/modules/mavproxy_map/__init__.py
|
Python
|
gpl-3.0
| 27,717
| 0.005773
|
#!/usr/bin/env python
'''
map display module
Andrew Tridgell
June 2012
'''
import sys, os, math
import functools
import time
from MAVProxy.modules.mavproxy_map import mp_elevation
from MAVProxy.modules.lib import mp_util
from MAVProxy.modules.lib import mp_settings
from MAVProxy.modules.lib import mp_module
from MAVProxy.modules.lib.mp_menu import *
from pymavlink import mavutil
class MapModule(mp_module.MPModule):
def __init__(self, mpstate):
super(MapModule, self).__init__(mpstate, "map", "map display", public = True)
self.lat = None
self.lon = None
self.heading = 0
self.wp_change_time = 0
self.fence_change_time = 0
self.rally_change_time = 0
self.have_simstate = False
self.have_vehicle = {}
self.move_wp = -1
self.moving_wp = None
self.moving_fencepoint = None
self.moving_rally = None
self.mission_list = None
self.icon_counter = 0
self.click_position = None
self.click_time = 0
self.draw_line = None
self.draw_callback = None
self.have_global_position = False
self.vehicle_type_name = 'plane'
self.ElevationMap = mp_elevation.ElevationModel()
self.last_unload_check_time = time.time()
self.unload_check_interval = 0.1 # seconds
self.map_settings = mp_settings.MPSettings(
[ ('showgpspos', int, 0),
('showgps2pos', int, 1),
('showsimpos', int, 0),
('showahrs2pos', int, 0),
('showahrs3pos', int, 0),
('brightness', float, 1),
('rallycircle', bool, False),
('loitercircle',bool, False)])
service='OviHybrid'
if 'MAP_SERVICE' in os.environ:
service = os.environ['MAP_SERVICE']
import platform
from MAVProxy.modules.mavproxy_map import mp_slipmap
mpstate.map = mp_slipmap.MPSlipMap(service=service, elevation=True, title='Map')
mpstate.map_functions = { 'draw_lines' : self.draw_lines }
mpstate.map.add_callback(functools.partial(self.map_callback))
self.add_command('map', self.cmd_map, "map control", ['icon',
'set (MAPSETTING)'])
self.add_completion_function('(MAPSETTING)', self.map_settings.completion)
self.default_popup = MPMenuSubMenu('Popup', items=[])
self.add_menu(MPMenuItem('Fly To', 'Fly To', '# guided ',
handler=MPMenuCallTextDialog(title='Altitude (m)', default=100)))
self.add_menu(MPMenuItem('Set Home', 'Set Home', '# map sethome '))
self.add_menu(MPMenuItem('Terrain Check', 'Terrain Check', '# terrain check'))
self.add_menu(MPMenuItem('Show Position', 'Show Position', 'showPosition'))
self._colour_for_wp_command = {
# takeoff commands
mavutil.mavlink.MAV_CMD_NAV_TAKEOFF: (255,0,0),
mavutil.mavlink.MAV_CMD_NAV_TAKEOFF_LOCAL: (255,0,0),
mavutil.mavlink.MAV_CMD_NAV_VTOL_TAKEOFF: (255,0,0),
# land commands
mavutil.mavlink.MAV_CMD_NAV_LAND_LOCAL: (255,255,0),
mavutil.mavlink.MAV_CMD_NAV_LAND: (255,255,0),
mavutil.mavlink.MAV_CMD_NAV_VTOL_LAND: (255,255,0),
# waypoint commands
mavutil.mavlink.MAV_CMD_NAV_WAYPOINT: (0,255,255),
mavutil.mavlink.MAV_CMD_NAV_SPLINE_WAYPOINT: (64,255,64),
# other commands
mavutil.mavlink.MAV_CMD_DO_LAND_START: (255,127,0),
}
self._label_suffix_for_wp_command = {
mavutil.mavlink.MAV_CMD_NAV_TAKEOFF: "TOff",
mavutil.mavlink.MAV_CMD_DO_LAND_START: "DLS",
mavutil.mavlink.MAV_CMD_NAV_SPLINE_WAYPOINT: "SW",
mavutil.mavlink.MAV_CMD_NAV_VTOL_LAND: "VL",
}
def add_menu(self, menu):
'''add to the default popup menu'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
self.default_popup.add(menu)
self.mpstate.map.add_object(mp_slipmap.SlipDefaultPopup(self.default_popup, combine=True))
def show_position(self):
'''show map position click information'''
pos = self.click_position
dms = (mp_util.degrees_to_dms(pos[0]), mp_util.degrees_to_dms(pos[1]))
msg = "Coordinates in WGS84\n"
msg += "Decimal: %.6f %.6f\n" % (pos[0], pos[1])
msg += "DMS: %s %s\n" % (dms[0], dms[1])
msg += "Grid: %s\n" % mp_util.latlon_to_grid(pos)
if self.logdir:
logf = open(os.path.join(self.logdir, "positions.txt"), "a")
logf.write("Position: %.6f %.6f at %s\n" % (pos[0], pos[1], time.ctime()))
logf.close()
posbox = MPMenuChildMessageDialog('Position', msg, font_size=32)
posbox.show()
def cmd_map(self, args):
'''map commands'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
if args[0] == "icon":
if len(args) < 3:
print("Usage: map icon <lat> <lon> <icon>")
else:
lat = args[1]
lon = args[2]
flag = 'flag.png'
if len(args) > 3:
flag = args[3] + '.png'
icon = self.mpstate.map.icon(flag)
self.mpstate.map.add_object(mp_slipmap.SlipIcon('icon - %s [%u]' % (str(flag),self.icon_counter),
(float(lat),float(lon)),
icon, layer=3, rotation=0, follow=False))
self.icon_counter += 1
elif args[0] == "set":
self.map_settings.command(args[1:])
self.mpstate.map.add_object(mp_slipmap.SlipBrightness(self.map_settings.brightness))
elif args[0] == "sethome":
self.cmd_set_home(args)
else:
print("usage: map <icon|set>")
def colour_for_wp(self, wp_num):
'''return a tuple describing the colour a waypoint should appear on the map'''
wp = self.module('wp').wploader.wp(wp_num)
command = wp.command
return self._colour_for_wp_command.get(command, (0,255,0))
def label_for_waypoint(self, wp_num):
'''return the label the waypoint which should appear on the map'''
wp = self.module('wp').wploader.wp(wp_num)
command = wp.command
if command not in self._label_suffix_for_wp_command:
return str(wp_num)
return str(wp_num) + "(" + self._label_suffix_for_wp_command[command] + ")"
def display_waypoints(self):
'''display the waypoints'''
from MAVProxy.modules.mavproxy_ma
|
p import mp_slipmap
self.missi
|
on_list = self.module('wp').wploader.view_list()
polygons = self.module('wp').wploader.polygon_list()
self.mpstate.map.add_object(mp_slipmap.SlipClearLayer('Mission'))
for i in range(len(polygons)):
p = polygons[i]
if len(p) > 1:
popup = MPMenuSubMenu('Popup',
items=[MPMenuItem('Set', returnkey='popupMissionSet'),
MPMenuItem('WP Remove', returnkey='popupMissionRemove'),
MPMenuItem('WP Move', returnkey='popupMissionMove')])
self.mpstate.map.add_object(mp_slipmap.SlipPolygon('mission %u' % i, p,
layer='Mission', linewidth=2, colour=(255,255,255),
popup_menu=popup))
loiter_rad = self.get_mav_param('WP_LOITER_RAD')
labeled_wps = {}
self.mpstate.map.add_object(mp_slipmap.SlipClearLayer('LoiterCircles'))
for i in range(len(self.mission_list)):
next_list = self.mission_list[i]
for j in range(len(next_list)):
#label already printed for this wp?
if (next_list[j] not in labeled_wps):
label = self.label_for_waypoint(next_list[j])
colour = self.c
|
vadimadr/python-algorithms
|
tests/test_disjoint_set.py
|
Python
|
mit
| 407
| 0
|
from algorithms.structures.disjoint_set import DisjointSet
def test_disjoint_set():
a = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
ds = DisjointSet(a)
assert d
|
s.find_set(1) != ds.find_set(2)
ds.union(1, 2)
assert ds.find_set(1) == ds.find_set(2)
assert
|
ds.find_set(1) != ds.find_set(3)
ds.union(2, 3)
assert ds.find_set(1) == ds.find_set(2)
assert ds.find_set(2) == ds.find_set(3)
|
frohoff/Empire
|
lib/modules/powershell/code_execution/invoke_shellcodemsil.py
|
Python
|
bsd-3-clause
| 3,462
| 0.009821
|
import re
from lib.common import helpers
class Module:
def __init__(self, main
|
Menu, params=[]):
self.info = {
'Name': 'Invoke-ShellcodeMSIL',
'Author': ['@mattifestation'],
'Description': ('Execute shellcode within the context of the running PowerShell '
'process without making any Win32 function calls. Warning: This script has '
'no way to validate that your shellcode is 32 vs. 64-bit!'
'Note: Your shell
|
code must end in a ret (0xC3) and maintain proper stack '
'alignment or PowerShell will crash!'),
'Background' : False,
'OutputExtension' : None,
'NeedsAdmin' : False,
'OpsecSafe' : True,
'Language' : 'powershell',
'MinLanguageVersion' : '2',
'Comments': [
'http://www.exploit-monday.com',
'https://github.com/mattifestation/PowerSploit/blob/master/CodeExecution/Invoke-ShellcodeMSIL.ps1'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'Shellcode' : {
'Description' : 'Shellcode to inject, 0x00,0x0a,... format.',
'Required' : True,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self, obfuscate=False, obfuscationCommand=""):
# read in the common module source code
moduleSource = self.mainMenu.installPath + "/data/module_source/code_execution/Invoke-ShellcodeMSIL.ps1"
if obfuscate:
helpers.obfuscate_module(moduleSource=moduleSource, obfuscationCommand=obfuscationCommand)
moduleSource = moduleSource.replace("module_source", "obfuscated_module_source")
try:
f = open(moduleSource, 'r')
except:
print helpers.color("[!] Could not read module source path at: " + str(moduleSource))
return ""
moduleCode = f.read()
f.close()
script = moduleCode
scriptEnd = "Invoke-ShellcodeMSIL"
for option,values in self.options.iteritems():
if option.lower() != "agent":
if values['Value'] and values['Value'] != '':
if option.lower() == "shellcode":
# transform the shellcode to the correct format
sc = ",0".join(values['Value'].split("\\"))[1:]
scriptEnd += " -" + str(option) + " @(" + sc + ")"
if obfuscate:
scriptEnd = helpers.obfuscate(psScript=scriptEnd, obfuscationCommand=obfuscationCommand)
script += scriptEnd
return script
|
sadboyzvone/8080py
|
bin/8080.py
|
Python
|
gpl-3.0
| 13,651
| 0.002051
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
############## IMPORTS ##################
from __future__ import print_function
from os import path
from sys import argv, exit
from colorama import init as initColorama
from colorama import Fore, Style
from struct import pack
from binascii import unhexlify as unHex
#########################################
######### CROSS-PYTHON HACK #############
try:
input = raw_input # For Python 2
except NameError:
pass # For Python 3
#########################################
# Init colorama module
initColorama()
######### CONSTANTS #########
# Instruction table dictionary
instructionTable = {
'nop': 0,
'stax b': 2,
'inx b': 3,
'inr b': 4,
'dcr b': 5,
'rlc': 7,
'dad b': 9,
'ldax b': 10,
'dcx b': 11,
'inr c': 12,
'dcr c': 13,
'rrc': 15,
'stax d': 18,
'inx d': 19,
'inr d': 20,
'dcr d': 21,
'ral': 23,
'dad d': 25,
'ldax d': 26,
'dcx d': 27,
'inr e': 28,
'dcr e': 29,
'rar': 31,
'rim': 32,
'inx h': 35,
'inr h': 36,
'dcr h': 37,
'daa': 39,
'dad h': 41,
'dcx h': 43,
'inr l': 44,
'dcr l': 45,
'cma': 47,
'sim': 48,
'inx sp': 51,
'inr m': 52,
'dcr m': 53,
'stc': 55,
'dad sp': 57,
'dcx sp': 59,
'inr a': 60,
'dcr a': 61,
'push b': 197,
'rst 0': 199,
'rz': 200,
'ret': 201,
'rst 1': 207,
'rnc': 208,
'pop d': 209,
'push d': 213,
'rst 2': 215,
'rc': 216,
'rst 3': 223,
'rpo': 224,
'pop h': 225,
'xthl': 227,
'push h': 229,
'rst 4': 231,
'rpe': 232,
'pchl': 233,
'xchg': 235,
'rst 5': 239,
'rp': 240,
'pop psw': 241,
'di': 243,
'push psw': 245,
'rst 6': 247,
'rm': 248,
'sphl': 249,
'ei': 251,
'rst 7': 255,
'cmc': 63,
'mov b,b': 64,
'mov b,c': 65,
'mov b,d': 66,
'mov b,e': 67,
'mov b,h': 68,
'mov b,l': 69,
'mov b,m': 70,
'mov b,a': 71,
'mov c,b': 72,
'mov c,c': 73,
'mov c,d': 74,
'mov c,e': 75,
'mov c,h': 76,
'mov c,l': 77,
'mov c,m': 78,
'mov c,a': 79,
'mov d,b': 80,
'mov d,c': 81,
'mov d,d': 82,
'mov d,e': 83,
'mov d,h': 84,
'mov d,l': 85,
'mov d,m': 86,
'mov d,a': 87,
'mov e,b': 88,
'mov e,c': 89,
'mov e,d': 90,
'mov e,e': 91,
'mov e,h': 92,
'mov e,l': 93,
'mov e,m': 94,
'mov e,a': 95,
'mov h,b': 96,
'mov h,c': 97,
'mov h,d': 98,
'mov h,e': 99,
'mov h,h': 100,
'mov h,l': 101,
'mov h,m': 102,
'mov h,a': 103,
'mov l,b': 104,
'mov l,c': 105,
'mov l,d': 106,
'mov l,e': 107,
'mov l,h': 108,
'mov l,l': 109,
'mov l,m': 110,
'mov l,a': 111,
'mov m,b': 112,
'mov m,c': 113,
'mov m,d': 114,
'mov m,e': 115,
'mov m,h': 116,
'mov m,l': 117,
'hlt': 118,
'mov m,a': 119,
'mov a,b': 120,
'mov a,c': 121,
'mov a,d': 122,
'mov a,e': 123,
'mov a,h': 124,
'mov a,l': 125,
'mov a,m': 126,
'mov a,a': 127,
'add b': 128,
'add c': 129,
'add d': 130,
'add e': 131,
'add h': 132,
'add l': 133,
'add m': 134,
'add a': 135,
'adc b': 136,
'adc c': 137,
'adc d': 138,
'adc e': 139,
'adc h': 140,
'adc l': 141,
'adc m': 142,
'adc a': 143,
'sub b': 144,
'sub c': 145,
'sub d': 146,
'sub e': 147,
'sub h': 148,
'sub l': 149,
'sub m': 150,
'sub a': 151,
'sbb b': 152,
'sbb c': 153,
'sbb d': 154,
'sbb e': 155,
'sbb h': 156,
'sbb l': 157,
'sbb m': 158,
'sbb a': 159,
'ana b': 160,
'ana c': 161,
'ana d': 162,
'ana e': 163,
'ana h': 164,
'ana l': 165,
'ana m': 166,
'ana a': 167,
'xra b': 168,
'xra c': 169,
'xra d': 170,
'xra e': 171,
'xra h': 172,
'xra l': 173,
'xra m': 174,
'xra a': 175,
'ora b': 176,
'ora c': 177,
'ora d': 178,
'ora e': 179,
'ora h': 180,
'ora l': 181,
'ora m': 182,
'ora a': 183,
'cmp b': 184,
'cmp c': 185,
'cmp d': 186,
'cmp e': 187,
'cmp h': 188,
'cmp l': 189,
'cmp m': 190,
'cmp a': 191,
'rnz': 192,
'pop b': 193,
}
# Instruction table dictionary that expects a secondary parameter (8-bit)
varInstructionTable_EigthBit = {
'mvi b,': 6,
'mvi c,': 14,
'mvi d,': 22,
'mvi e,': 30,
'mvi h,': 38,
'mvi l,': 46,
'sta': 50,
'mvi m,': 54,
'lda': 58,
'mvi a,': 62,
'adi': 198,
'aci': 206,
'out': 211,
'sui': 214,
'in': 219,
'sbi': 222,
'ani': 230,
'xri': 238,
'ori': 246,
'cpi': 254
}
# Instruction table dictionary that expects a secondary parameter (16-bit)
varInstructionTable_SixteenBit = {
'lxi b,': 1,
'lxi d,': 17,
'lxi h,': 33,
'shld': 34,
'lhld': 42,
'lxi sp,': 49,
'jnz': 194,
'jmp': 195,
'cnz': 196,
'jz': 202,
'cz': 204,
'call': 205,
'jnc': 210,
'cnc': 212,
'cc': 220,
'jc': 218,
'jpo': 226,
'cpo': 228,
'jpe': 234,
'cpe': 236,
'jp': 242,
'cp': 244,
'jm': 250,
'cm': 252
}
helpArgVariants = ['-h', '--h', '-help', '--help']
######### FUNCTIONS #########
# Print a small ASCII art banner
def banner():
print(Style.DIM)
print(' ___________________________')
print(' / /\\')
print(' / sadboyzvone\'s _/ /\\')
print(' / Intel 8080 / \/')
print(' / Assembler /\\')
print('/___________________________/ /')
print('\___________________________\/')
print(' \\ \\ \\ \\ \\ \\ \\ \\ \\ \\ \\ \\ \\ \\ \\'
+ Style.RESET_ALL + Style.BRIGHT)
print(Fore.WHITE + '\nPowered by ' + Fore.BLUE + 'Pyt'
+ Fore.YELLOW + 'hon' + Fore.WHITE
+ '\nCopyright (C) 2017, Zvonimir Rudinski')
# Print usage information
def printHelp():
print('\nThis ' + Fore.BLUE + 'Intel' + Fore.WHITE
+ ' 8080 assembler was made for ' + Fore.BLUE + 'Project '
+ Fore.YELLOW + 'Week' + Fore.WHITE + ' at my school.')
print('It is written in ' + Fore.BLUE + 'Pyt' + Fore.YELLOW + 'hon'
+ Fore.WHITE)
print('Modules: ' + Fore.RED + 'Co' + Fore.BLUE + 'lo'
+ Fore.YELLOW + 'ra' + Fore.GREEN + 'ma' + Fore.WHITE)
print('\nPass a file path as an arguement.')
# Main function
def run(fileNameArg):
banner() # Print banner
# File name
fileName = None
# Variable and label info
labelMap = {}
variableMap = {}
# Program counter
programCounter = 0
try:
if fileNameArg in helpArgVariants:
printHelp() # Print help then exit
exit(0)
else:
fileName = fileNameArg # Argument is provided
print('Trying to open ' + Fore.YELLOW +
'\'' + fileName + '\'' + Fore.WHITE)
if path.isfile(fileName) is False: # Check if the file exists
print(Fore.RED + 'Fatal error: ' + Fore.WHITE +
'File not found: ' + Fore.YE
|
LLOW + '\'' + fileName + '\'')
raise IOError # It doesn't raise an exception
# Read in the source code from the file
with open(fileName, 'r') as sourceFile:
sourceCode = sourceFile.readlines()
# Strip the newlines
sourceCode = map(lambda sc: sc.strip(), sourceCode)
# Start compiling the code
with open(fileName + '.rom', 'wb+') as romFile:
# Check the line
|
for (i, scLine) in enumerate(sourceCode):
scLine = scLine.lower() # Turn it to lower case for easier lookup
# Check for ORG
if scLine.split(' ')[0] == "org":
programCounter = int(scLine.split(' ')[1].zfill(4))
print("ORG set to " + str(programCounter))
romFile.seek(0,0)
for x in range(0,programCounter):
romFile.write(pack('B', instructionTable['nop']))
romFile.seek(programCounte
|
twisted/twistedchecker
|
twistedchecker/test/test_docstring.py
|
Python
|
mit
| 678
| 0.001475
|
from twisted.trial import unittest
from twistedchecker.checkers.docstring import DocstringChecker
class DocstringTe
|
stC
|
ase(unittest.TestCase):
"""
Test for twistedchecker.checkers.docstring
"""
def test_getLineIndent(self):
"""
Test of twistedchecker.checkers.docstring._getLineIndent.
"""
checker = DocstringChecker()
indentNoSpace = checker._getLineIndent("foo")
indentTwoSpaces = checker._getLineIndent(" foo")
indentFourSpaces = checker._getLineIndent(" foo")
self.assertEqual(indentNoSpace, 0)
self.assertEqual(indentTwoSpaces, 2)
self.assertEqual(indentFourSpaces, 4)
|
wahur666/kodok
|
python/bead5/mian.py
|
Python
|
gpl-3.0
| 1,500
| 0.002667
|
import os.path
for root, dirs, files in os.walk(os.path.dirname(os.path.realpath(__file__))):
for file in files :
if file.endswith('.prog') :
os.chdir(root)
f1 = open(file, 'r')
output = file.split(".")
f2 = open(output[0]+'.py', 'w')
indent = 0
pad = " "
temp = f1.read().splitlines()
for line in temp:
tags = line.split(";;")
for t
|
ag in tags:
a = tag.replace("{", ":\n")
a = a.replace("CIKLUS", "for")
a = a.replace("ELAGAZAS", "if")
if "\n" in a:
t = a.split("\n")
for t1 in t:
for i in range(indent):
t1 = pad + t1
if "for" in t1 or "if" in t1:
indent +=
|
1
if "}" in t1:
indent -= t1.count("}")
t1 = t1.replace("}" ,"")
f2.write(t1 + '\n')
else:
for i in range(indent):
a = pad + a
if "}" in a:
indent -= a.count("}")
a = a.replace("}", "")
f2.write(a + '\n')
f1.close()
f2.close()
|
ofavre/cellulart
|
matrixwidget.py
|
Python
|
bsd-3-clause
| 13,392
| 0.006347
|
# -*- coding: utf-8 -*-
# License: See LICENSE file.
import random
import math
try:
import png
except ImportError:
png = False
import pygtk
pygtk.require('2.0')
import gtk
import gtk.gtkgl
import gtk.gtkgl.apputils
from OpenGL.GL import *
from OpenGL.GLU import *
import openglutils
class MatrixWidget(gtk.DrawingArea, gtk.gtkgl.Widget):
"""OpenGL widget drawing the view of the matrices of the world."""
default_max_width = 1024
default_max_height = 768
default_min_width = 200
default_min_height = 150
def __init__(self, world, pointsize=None, offset=None):
gtk.DrawingArea.__init__(self)
#gtk.gtkgl.Widget.__init__(self) # (abstract class)
# Object initialisation
self.__world = world
self.__original_pointsize = pointsize
self.__pointsize_power = 0
self.__pointsize_power_min = -50
self.__pointsize_power_max = +50
self.__pointsize_factor = 1.1
self.__pointsize = pointsize # self.__original_pointsize * self.__pointsize_factor ** self.__pointsize_power
self.offset = list(offset) if not offset == None else None
self.__is_panning = None
self.__needs_reconfigure = False
# Widget initialisation
# If no pointsize is given, find a good one
if self.__original_pointsize == None:
self.__original_pointsize = 1
w = self.default_max_width / float(world.get_shape()[1])
h = self.default_max_height / float(world.get_shape()[0])
pw = math.log(w) / math.log(self.__pointsize_factor)
ph = math.log(h) / math.log(self.__pointsize_factor)
if pw > 0: pw = int(math.ceil(pw))
else: pw = int(math.floor(pw))
if ph > 0: ph = int(math.ceil(ph))
else: ph = int(math.floor(ph))
self.__pointsize_power = max(self.__pointsize_power_min, min(self.__pointsize_power_max, min(pw,ph)))
self.__pointsize = self.__original_pointsize * self.__pointsize_factor ** self.__pointsize_power
# Request a default size
reqw, reqh = int(math.ceil(max(self.default_min_width,min(self.default_max_width,world.get_shape()[1]*self.__pointsize)))), int(math.ceil(max(self.default_min_height,min(self.default_max_height,world.get_shape()[0]*self.__pointsize))))
self.set_size_request(reqw, reqh)
# Calculate an offset to center the matrices, assuming the default size is the actual size
if self.offset == None:
mw = reqw/self.__pointsize - world.get_shape()[1]
mh = reqh/self.__pointsize - world.get_shape()[0]
self.offset = [-mh/2, -mw/2]
# Set OpenGL-capability to the drawing area
self.set_gl_capability(openglutils.get_glconfig(), share_list=None, direct=True, render_type=gtk.gdkgl.RGBA_TYPE)
# Connect the relevant signals for the drawing
self.connect_after('realize', self.__on_realize)
self.connect('configure-event', self.__on_configure_event)
self.connect('expose-event', self.__on_expose_event)
# Connect additionnal events for the manipulation of the view
self.set_events(gtk.gdk.ALL_EVENTS_MASK)
self.connect('scroll-event', self.__on_scroll)
self.connect('motion-notify-event', self.__on_motion_notify)
self.connect('button-press-event', self.__on_button_press)
self.connect('button-release-event', self.__on_button_release)
def get_pointsize(self):
""" Returns the size of a cell, in pixels."""
return self.__pointsize
def set_pointsize(self,pointsize):
""" Sets the size of a cell, in pixels.
Requests a redraw."""
self.__pointsize = pointsize
self.queue_redraw(True)
def queue_redraw(self,needs_reconfigure=False):
""" Requests a redraw of the widget.
needs_reconfigure: set to True if the offset or the point size has changed."""
if needs_reconfigure:
self.__needs_reconfigure = needs_reconfigure
self.queue_draw()
def __on_scroll(self, widget, event):
"""Handles the mousewheel events and zooms the view accordingly."""
# Pan to get the origin at the mouse position's point
self.offset[0] += (widget.get_allocation().height-event.y) / self.__pointsize
self.offset[1] += event.x / self.__pointsize
# Zoom
if event.direction == gtk.gdk.SCROLL_UP:
self.__pointsize_power = min(self.__pointsize_power_max, self.__pointsize_power+1)
elif event.direction == gtk.gdk.SCROLL_DOWN:
self.__pointsize_power = max(self.__pointsize_power_min, self.__pointsize_power-1)
self.__pointsize = self.__original_pointsize * self.__pointsize_factor ** self.__pointsize_power
# Pan back to get the mouse position's point back under the pointer on the screen
self.offset[0] -= (widget.get_allocation().height-event.y) / self.__pointsize
self.offset[1] -= event.x / self.__pointsize
self.queue_redraw(True)
def __on_button_press(self, widget, event):
"""Handles a button press and starts a panning operation."""
self.__is_panning = (event.y, event.x)
self.queue_redraw(True)
def __on_motion_notify(self, widget, event):
"""Handles the pointer motion and pans accordingly if in a panning operation."""
if self.__is_panning != None:
self.offset[0] += (event.y - self.__is_panning[0]) / self.__pointsize
self.offset[1] -= (event.x - self.__is_panning[1]) / self.__pointsize
self.__is_panning = (event.y, event.x)
self.queue_redraw(True)
def __on_button_release(self, widget, event):
"""Handles a button release and stops a panning operation."""
self.__is_panning = None
def __on_realize(self, *args):
"""A one time widget setup. Initialises the OpenGL drawable."""
gldrawable = self.get_gl_drawable()
glcontext = self.get_gl_context()
if not gldrawable.gl_begin(glcontext):
return
# Generate (one) texture #TODO: Test if one texture by matrix is better
self.iTex = glGenTextures(1)
# Configure use of textures
glEnable(GL_TEXTURE_2D)
glDisable(GL_TEXTURE_GEN_S)
glDisable(GL_TEXTURE_GEN_T)
glBindTexture(GL_TEXTURE_2D, self.iTex)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST)
# As we're in 2D, we don't need any depth test
glDisable(GL_DEPTH_TEST)
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
# Set opaque black as background color
glClearColor(0.0, 0.0, 0.0, 1.0)
# Refresh the view configuration
self.__reconfigure()
gldrawable.gl_end()
return True
def __on_configure_event(self, *args):
"""Called whenever the widget changes in size. Refreshes the view configuration."""
gldrawable = self.get_gl_drawable()
glcontext = self.get_gl_context()
if not gldrawable.gl_begin(glcontext):
return False
# Refresh the view configuration
self.__reconfigure()
gldrawable.gl_end()
return True
def __reconfigure(self):
|
"""Configures the view origin, size and viewport.
To be called inside gl_begin() and gl_end()."""
# Get the widget's size
width, height = self.allocation.width, self.allocation.height
# Tell OpenGL to draw onto the same size
glViewport(0, 0, width, height)
# Set the view origin and extent
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(self.offset[1]+0.0, self
|
.offset[1]+width/float(self.__pointsize), self.offset[0]+0.0, self.offset[0]+height/float(self.__pointsize), -1.0, 1.0)
# Reset any 3D transform
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
def __on_expose_event(self, widget, event, for_export=False):
gldrawable = self.get_gl_drawable()
|
indexofire/django-cms-content
|
cms_content_patch/forms.py
|
Python
|
bsd-3-clause
| 604
| 0.006623
|
# -*- coding: utf-8 -*-
from django import forms
from cms_content.settings import EDITOR
from cms_content.models import CMSArticle
from cms_content import widgets
WIDGET = getattr(widgets, EDITOR)
class CMSArticleAdminForm(forms.ModelForm):
content = forms.CharField(widget=WIDGET)
class Meta:
model = CMSArticle
|
clas
|
s CMSArticleFrontendForm(forms.ModelForm):
error_css_class = 'error'
required_css_class = 'required'
content = forms.CharField(widget=WIDGET)
class Meta:
model = CMSArticle
fields = ('title', 'slug', 'content', 'category',)
|
benjamincongdon/adept
|
editMapTestScene.py
|
Python
|
mit
| 5,875
| 0.00783
|
import os
import os.path
import sys
import pygame
from buffalo import utils
from buffalo.scene import Scene
from buffalo.label import Label
from buffalo.button import Button
from buffalo.input import Input
from buffalo.tray import Tray
from camera import Camera
from mapManager import MapManager
from pluginManager import PluginManager
from toolManager import ToolManager
class CameraController:
def __init__(self):
self.fPos = (0.0, 0.0)
self.pos = (int(self.fPos[0]), int(self.fPos[1]))
self.xv, self.yv = 0.0, 0.0
self.speed = 1.2
self.shift_speed = self.speed * 5.0
def update(self, keys):
w, a, s, d, shift = (
keys[pygame.K_w],
keys[pygame.K_a],
keys[pygame.K_s],
keys[pygame.K_d],
keys[pygame.K_LSHIFT],
)
if shift:
speed = self.shift_speed
else:
speed = self.speed
speed *= utils.delta / 16.0
self.xv = 0.0
self.yv = 0.0
if w:
self.yv -= speed
if a:
self.xv -= speed
if s:
self.yv += speed
if d:
self.xv += speed
x, y = self.fPos
x += self.xv
y += self.yv
self.fPos = x, y
self.pos = (int(self.fPos[0]), int(self.fPos[1]))
class EditMapTestScene(Scene):
def on_escape(self):
sys.exit()
def blit(self):
Camera.blitView()
def update(self):
super(EditMapTestScene, self).update()
keys = pygame.key.get_pressed()
self.camera_controller.update(keys)
Camera.update()
MapManager.soft_load_writer()
def __init__(self):
Scene.__init__(self)
self.BACKGROUND_COLOR = (0, 0, 0, 255)
PluginManager.loadPlugins()
self.camera_controller = CameraController()
Camera.lock(self.camera_controller, initial_update=True)
Button.DEFAULT_SEL_COLOR = (50, 50, 100, 255)
self.tool_tray = Tray(
(utils.SCREEN_W - 270, 20),
(250, 800),
min_width=250, max_width=250,
min_height=250, max_height=800,
color=(100, 50, 50, 100),
)
self.tool_tray.labels.add(
Label(
(int(self.tool_tray.width / 2), 10),
"Tool Tray",
color=(255,255,255,255),
x_centered=True,
font="default24",
)
)
self.tool_tray.labels.add(
Label(
(int(self.tool_tray.width / 2), 25),
"________________",
color=(255,255,255,255),
x_centered=True,
font="default18",
)
)
self.tool_tray.labels.add(
Label(
(int(self.tool_tray.width / 2), 50),
"Function",
color=(255,255,255,255),
x_centered=True,
font="default18",
)
)
def set_func_state_to_select():
ToolManager.set_func_state(ToolManager.FUNC_SELECT)
self.tool_tray.render()
self.button_select_mode = Button(
(15, 80),
" Select Mode ",
color=(255,255,255,255),
bg_color=(100,100,200,255),
font="default12",
func=set_func_state_to_select,
)
self.tool_tray.buttons.add(self.button_select_mode)
def set_func_state_to_fill():
ToolManager.set_func_state(ToolManager.FUNC_FILL)
self.tool_tray.render()
self.button_fill_mode = Button(
(self.tool_tray.width - 15, 80),
" Fill Mode ",
color=(255,255,255,255),
bg_color=(100,100,200,255),
invert_x_pos=True,
font="default12",
func=set_func_state_to_fill,
)
self.tool_tray.buttons.add(self.button_fill_mod
|
e)
self.tool_tray.labels.add(
Label(
(int(self.tool_tray.width / 2), 120),
"________________",
color=(255,255,255,255),
x_centered=True,
font="default18",
)
)
self.tool_tray.labels.add(
|
Label(
(int(self.tool_tray.width / 2), 150),
"Area of Effect",
color=(255,255,255,255),
x_centered=True,
font="default18",
)
)
def set_effect_state_to_draw():
ToolManager.set_effect_state(ToolManager.EFFECT_DRAW)
self.tool_tray.render()
self.button_draw_mode = Button(
(15, 180),
" Draw Mode ",
color=(255,255,255,255),
bg_color=(100,100,200,255),
font="default12",
func=set_effect_state_to_draw,
)
self.tool_tray.buttons.add(self.button_draw_mode)
def set_effect_state_to_area():
ToolManager.set_effect_state(ToolManager.EFFECT_AREA)
self.tool_tray.render()
self.button_area_mode = Button(
(self.tool_tray.width - 15, 180),
" Area Mode ",
color=(255,255,255,255),
bg_color=(100,100,200,255),
invert_x_pos=True,
font="default12",
func=set_effect_state_to_area,
)
self.tool_tray.buttons.add(self.button_area_mode)
ToolManager.initialize_states(
ToolManager.FUNC_SELECT, ToolManager.EFFECT_DRAW,
(
self.button_fill_mode,
self.button_select_mode,
self.button_draw_mode,
self.button_area_mode,
),
)
self.tool_tray.render()
self.trays.add(self.tool_tray)
|
OCA/partner-contact
|
partner_industry_secondary/tests/test_res_partner_industry.py
|
Python
|
agpl-3.0
| 1,860
| 0
|
# Copyright 2015 Antiun Ingenieria S.L. - Javier Iniesta
# Copyright 2016 Tecnativa S.L. - Vicent Cubells
# Copyright 2016 Tecnativa S.L. - Pedro M. Baeza
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo.exceptions import UserError, ValidationError
from odoo.tests import common
class TestResPartnerIndustry(common.SavepointCase):
@classmethod
def setUpClass(cls):
super(TestResPartnerIndustry, cls).setUpClass()
cls.industry_model = cls.env["res.part
|
ner.industry"]
cls.industry_main = cls.industry_model.create({"name": "Test"})
cls.industry_child = cls.industry_model.create(
{"name": "Test child", "parent_id": cls.industry_main.id}
)
|
def test_check_industries(self):
with self.assertRaises(ValidationError):
self.env["res.partner"].create(
{
"name": "Test",
"industry_id": self.industry_main.id,
"secondary_industry_ids": [(4, self.industry_main.id)],
}
)
def test_check_copy(self):
industry_copy = self.industry_child.copy()
self.assertEqual(industry_copy.name, "Test child 2")
def test_check_uniq_name(self):
with self.assertRaises(ValidationError):
self.industry_model.create({"name": "Test"})
def test_check_recursion(self):
with self.assertRaises(UserError):
self.industry_main.parent_id = self.industry_child.id
with self.assertRaises(ValidationError) as e:
self.industry_main._check_parent_id()
error_message = "Error! You cannot create recursive industries."
self.assertEqual(e.exception.args[0], error_message)
def test_name(self):
self.assertEqual(self.industry_child.display_name, "Test / Test child")
|
sesuncedu/bitcurator
|
python/bc_gen_feature_rep_xls.py
|
Python
|
gpl-3.0
| 1,974
| 0.015198
|
#!/usr/bin/env python
#
# Generate report in Excel format (from xml input)
#
import sys,os,shelve
import re,dfxml,fiwalk
from bc_utils import filename_from_path
from openpyxl.workbook import Workbook
from openpyxl.writer.excel import ExcelWriter
from openpyxl.cell import get_column_letter
def bc_generate_feature_xlsx(PdfReport, data, feature_file):
wb = Workbook()
dest_filename = PdfReport.featuredir +'/'+ (filename_from_path(feature_file))[10:-3] + "xlsx"
row_idx = [2]
ws = wb.worksheets[0]
ws.title = "File Feature Information"
ws.cell('%s%s'%('A', '1')).value = '%s' % "Filename"
ws.cell('%s%s'%('B', '1')).value = '%s' % "Position"
ws.cell('%s%s'%('C', '1')).value = '%s' % "Feature"
linenum=0
for r
|
ow in data:
# Skip the lines with known text lines to be eliminated
if (re.match("Total features",str(row))):
conti
|
nue
filename = "Unknown"
feature = "Unknown"
position = "Unknown"
# Some lines in the annotated_xxx.txt have less than three
# columns where filename or feature may be missing.
if len(row) > 3:
filename = row[3]
else:
filename = "Unknown"
if len(row) > 1:
feature = row[1]
else:
feature = "Unknown"
position = row[0]
# If it is a special file, check if the user wants it to
# be repoted. If not, exclude this from the table.
if (PdfReport.bc_config_report_special_files == False) and \
(is_special_file(filename)):
## print("D: File %s is special. So skipping" %(filename))
continue
ws.cell('%s%s'%('A', row_idx[0])).value = '%s' % filename
ws.cell('%s%s'%('B', row_idx[0])).value = '%s' % feature
ws.cell('%s%s'%('C', row_idx[0])).value = '%s' % position
row_idx[0] += 1
wb.save(filename=dest_filename)
|
nicococo/tilitools
|
tilitools/utils_data.py
|
Python
|
mit
| 5,881
| 0.009522
|
import numpy as np
import cvxopt as co
def load_mnist_dataset():
import torchvision.datasets as datasets
mnist_train = datasets.MNIST(root='../data/mnist', train=True, download=True, transform=None)
mnist_test = datasets.MNIST(root='../data/mnist', train=False, download=True, transform=None)
test_labels = np.array([mnist_test[i][1].numpy() for i in range(len(mnist_test))], dtype=np.int)
train_labels = np.array([mnist_train[i][1].numpy() for i in range(len(mnist_train))], dtype=np.int)
test = np.array([np.asarray(mnist_test[i][0]).reshape(28*28) for i in range(len(mnist_test))], dtype=np.float)
train = np.array([np.asarray(mnist_train[i][0]).reshape(28*28) for i in range(len(mnist_train))], dtype=np.float)
train /= 255. # normalize data to be in range [0,1]
test /= 255.
return train, train_labels, test, test_labels, [28, 28]
def load_fashion_mnist_dataset():
import torchvision.datasets as datasets
mnist_train = datasets.FashionMNIST(root='../data/fashion-mnist', train=True, download=True, transform=None)
mnist_test = datasets.FashionMNIST(root='../data/fashion-mnist', train=False, download=True, transform=None)
test_labels = np.array([mnist_test[i][1].numpy() for i in range(len(mnist_test))], dtype=np.int)
train_labels = np.array([mnist_train[i][1].numpy() for i in range(len(mnist_train))], dtype=np.int)
test = np.array([np.asarray(mnist_test[i][0]).reshape(28*28) for i in range(len(mnist_test))], dtype=np.float)
train = np.array([np.asarray(mnist_train[i][0]).reshape(28*28) for i in range(len(mnist_train))], dtype=np.float)
train /= 255. # normalize data to be in range [0,1]
test /= 255.
return train, train_labels, test, test_labels, [28, 28]
def load_emnist_dataset():
import torchvision.datasets as datasets
mnist_train = datasets.EMNIST(root='../data/emnist', split='balanced', train=True, download=True, transform=None)
mnist_test = datasets.EMNIST(root='../data/emnist', split='balanced', train=False, download=True, transform=None)
test_labels = np.array([mnist_test[i][1].numpy() for i in range(len(mnist_test))], dtype=np.int)
train_labels = np.array([mnist_train[i][1].numpy() for i in range(len(mnist_train))], dtype=np.int)
test = np.array([np.asarray(mnist_test[i][0]).reshape(28*28) for i in range(len(mnist_test))], dtype=np.float)
train = np.array([np.asarray(mnist_train[i][0]).reshape(28*28) for i in range(len(mnist_train))], dtype=np.float)
train /= 255. # normalize data to be in range [0,1]
test /= 255.
return train, train_labels, test, test_labels, [28, 28]
def load_cifar10_dataset():
import torchvision.datasets as datasets
cifar_train = datasets.CIFAR10(root='../data/cifar10', train=True, download=True, transform=None)
cifar_test = datasets.CIFAR10(root='../data/cifar10', train=False, download=True, transform=None)
test_labels = np.array([cifar_test[i][1] for i in range(len(cifar_test))], dtype=np.int)
train_labels = np.array([cifar_train[i][1] for i in range(len(cifar_train))], dtype=np.int)
test = np.array([np.asarray(cifar_test[i][0].convert('F')).reshape(32*32) for i in range(len(cifar_test))], dtype=np.float)
train = np.array([np.asarray(cifar_train[i][0].convert('F')).reshape(32*32) for i in range(len(cifar_train))], dtype=np.float)
train /= 255. # normalize data to be in range [0,1]
test /= 255.
return train, train_labels, test, test_labels, [32, 32]
def get_gaussian(num, dims=2, means=[0,0], vars=[1,1]):
data = np.random.multivariate_normal(means, np.eye(dims), num)
return data
def get_2state_gaussian_seq(lens,dims=2,means1=[2,2,2,2],means2=[5,5,5,5],vars1=[1,1,1,1],vars2=[1,1,1,1],anom_prob=1.0):
seqs = np.zeros((dims, lens))
lbls = np.zeros((1, lens), dtype=np.int8)
marker = 0
# generate first state sequence
for d in range(dims):
seqs[d, :] = np.random.randn(lens)*vars1[d] + means1[d]
prob = np.random.uniform()
if prob < anom_prob:
# add second state blocks
while True:
max_block_len = 0.6*lens
min_block_len = 0.1*lens
block_len = np.int(max_block_len*np.random.uniform()+3)
block_start = np.int(lens*np.random.uniform())
if block_len - (block_start+block
|
_len-lens)-3 > min_block_len:
break
block_len = min( [block_len, block_len - (block_start+block_len-lens)-3] )
lbls[block_start:block_start+block_len-1] = 1
marker = 1
for d in range(dims):
seqs[d,block_start:block_start+block_len-1] = np.random.randn(1,block_len-1)*vars2[d] + means2[d]
return seqs, lbls, marker
def get_2state_anom_seq(lens, comb_block_len, anom_prob=1.0, num_blocks=1):
se
|
qs = co.matrix(0.0, (1, lens))
lbls = co.matrix(0, (1, lens))
marker = 0
# generate first state sequence, gaussian noise 0=mean, 1=variance
seqs = np.zeros((1, lens))
lbls = np.zeros((1, lens))
bak = seqs.copy()
prob = np.random.uniform()
if prob < anom_prob:
# add second state blocks
block_len = np.int(np.floor(comb_block_len / float(num_blocks)))
marker = 1
# add a single block
blen = 0
for b in range(np.int(num_blocks)):
if (b==num_blocks-1 and b>1):
block_len = np.round(comb_block_len-blen)
isDone = False
while isDone == False:
start = np.int(np.random.uniform()*float(lens-block_len+1))
if np.sum(lbls[0,start:start+block_len]) == 0:
lbls[0, start:start+block_len] = 1
seqs[0, start:start+block_len] = bak[0, start:start+block_len]+4.0
isDone = True
break
blen += block_len
return seqs, lbls, marker
|
zenn1989/scoria-interlude
|
L2Jscoria-Game/data/scripts/quests/335_TheSongOfTheHunter/__init__.py
|
Python
|
gpl-3.0
| 28,333
| 0.032895
|
import sys
from com.l2scoria.gameserver.model.quest import State
from com.l2scoria.gameserver.model.quest import QuestState
from com.l2scoria.gameserver.model.quest.jython import QuestJython as JQuest
from com.l2scoria.gameserver.network.serverpackets import CreatureSay
from com.l2scoria.util.random import Rnd
qn = "335_TheSongOfTheHunter"
#NPCS
Grey = 30744
Tor = 30745
Cybellin = 30746
#Items
Cyb_Dagger = 3471
License_1 = 3692
License_2 = 3693
Leaf_Pin = 3694
Test_Instructions_1 = 3695
Test_Instructions_2 = 3696
Cyb_Req = 3697
#Mobs
Breka_Orc_Warrior = 20271
Windsus = 20553
Tarlk_Bugbear_Warrior = 20571
Gremlin_Filcher = 27149
Mobs = [Breka_Orc_Warrior, Windsus, Tarlk_Bugbear_Warrior, Gremlin_Filcher]
Lizardmen = [20578,20579,20581,20582,20641,20642,20643]
#Droplist Format- npcId:[itemId,itemAmount,chance]
Level_1 = {
20550 : [3709,40,75], #Gaurdian Basilisk
20581 : [3710,20,50], #Leto Lizardman Shaman
27140 : [3711,1,100], #Breka Overlord Haka
27141 : [3712,1,100], #Breka Overlord Jaka
27142 : [3713,1,100], #Breka Overlord Marka
27143 : [3714,1,100], #Windsus Aleph
20563 : [3715,20,50], #Manashen Gargoyle
20565 : [3715,20,50], #Enchanted Stone Golemn
20555 : [3716,30,70], #Giant Fungus
}
Level_2 = {
20586 : [3717,20,50], #Timak Orc Warrior
20560 : [3718,20,50], #Trisalim Spider
20561 : [3718,20,50], #Trisalim Tarantula
20591 : [3719,30,100], #Valley Treant
20597 : [3719,30,100], #Valley Treant Elder
20675 : [3720,20,50], #Tairim
20660 : [3721,20,50], #Archer of Greed
27144 : [3722,1,100], #Tarlk Raider Athu
27145 : [3723,1,100], #Tarlk Raider Lanka
27146 : [3724,1,100], #Tarlk Raider Triska
27147 : [3725,1,100], #Tarlk Raider Motura
27148 : [3726,1,100], #Tarlk Raider Kalath
}
Grey_Advance = [
#level 1
[[3709],40],
[[3710],20],
[[3711,3712,3713],1],
[[3714],1],
[[3715],20],
[[3716],30],
#level 2
[[3717],20],
[[3718],20],
[[3
|
719],30],
[[3720],20],
[[3721],20],
[[3722,3723,3724,3725,3726],1]
]
#Droplist Format- npcId : [itemRequired,itemGive,itemToGiveAmount,itemAmount,chance]
Tor_requests_1 = {
|
20578 : [3727,3769,'1',40,80], #Leto Lizardman Archer
20579 : [3727,3769,'1',40,83], #Leto Lizardman Soldier
20586 : [3728,3770,'1',50,89], #Timak Orc Warrior
20588 : [3728,3770,'1',50,100], #Timak Orc Overlord
20565 : [3729,3771,'1',50,100], #Enchanted Stone Golem
20556 : [3730,3772,'1',30,50], #Giant Monster Eye
20557 : [3731,3773,'1',40,80], #Dire Wyrm
20550 : [3732,3774,'Rnd.get(2) + 1',100,100], #Guardian Basilisk
20552 : [3733,3775,'1',50,100], #Fettered Soul
20553 : [3734,3776,'1',30,50], #Windsus
20554 : [3735,3777,'2',100,100],#Grandis
20631 : [3736,3778,'1',50,100], #Taik Orc Archer
20632 : [3736,3778,'1',50,93], #Taik Orc Warrior
20600 : [3737,3779,'1',30,50], #Karul Bugbear
20601 : [3738,3780,'1',40,62], #Tamlin Orc
20602 : [3738,3780,'1',40,80], #Tamlin Orc Archer
27157 : [3739,3781,'1',1,100], #Leto Chief Narak
20567 : [3740,3782,'1',50,50], #Enchanted Gargoyle
20269 : [3741,3783,'1',50,93], #Breka Orc Shaman
20271 : [3741,3783,'1',50,100], #Breka Orc Warrior
27156 : [3742,3784,'1',1,100], #Leto Shaman Ketz
27158 : [3743,3785,'1',1,100], #Timak Raider Kaikee
20603 : [3744,3786,'1',30,50], #Kronbe Spider
27160 : [3746,3788,'1',1,100], #Gok Magok
27164 : [3747,3789,'1',1,100] #Karul Chief Orooto
}
#Droplist Format- npcId : [itemRequired,itemGive,itemAmount,chance]
Tor_requests_2 = {
20560 : [3749,3791,40,66], #Trisalim Spider
20561 : [3749,3791,40,75], #Trisalim Tarantula
20633 : [3750,3792,50,53], #Taik Orc Shaman
20634 : [3750,3792,50,99], #Taik Orc Captain
20641 : [3751,3793,40,88], #Harit Lizardman Grunt
20642 : [3751,3793,40,88], #Harit Lizardman Archer
20643 : [3751,3793,40,91], #Harit Lizardman Warrior
20661 : [3752,3794,20,50], #Hatar Ratman Thief
20662 : [3752,3794,20,52], #Hatar Ratman Boss
20667 : [3753,3795,30,90], #Farcran
20589 : [3754,3796,40,49], #Fline
20590 : [3755,3797,40,51], #Liele
20592 : [3756,3798,40,80], #Satyr
20598 : [3756,3798,40,100], #Satyr Elder
20682 : [3758,3800,30,70], #Vanor Silenos Grunt
20683 : [3758,3800,30,85], #Vanor Silenos Scout
20684 : [3758,3800,30,90], #Vanor Silenos Warrior
20571 : [3759,3801,30,63], #Tarlk Bugbear Warrior
27159 : [3760,3802,1,100], #Timak Overlord Okun
27161 : [3761,3803,1,100], #Taik Overlord Kakran
20639 : [3762,3804,40,86], #Mirror
20664 : [3763,3805,20,77], #Deprive
20593 : [3764,3806,20,68], #Unicorn
20599 : [3764,3806,20,86], #Unicorn Elder
27163 : [3765,3807,1,100], #Vanor Elder Kerunos
20659 : [3766,3808,20,73], #Grave Wanderer
27162 : [3767,3809,1,100], #Hatar Chieftain Kubel
20676 : [3768,3810,10,64] #Judge of Marsh
}
#FilcherDropList Format- reqId : [item,amount,bonus]
Filcher = {
3752 : [3794,20,3],
3754 : [3796,40,5],
3755 : [3797,40,5],
3762 : [3804,40,5]
}
#SpawnList Format- npcId : [item1,item2,npcToSpawn]
Tor_requests_tospawn = {
20582 : [3739,3781,27157], #Leto Lizardman Overlord
20581 : [3742,3784,27156], #Leto Lizardman Shaman
20586 : [3743,3785,27158], #Timak Orc Warrior
20554 : [3746,3788,27160], #Grandis
#level 2
20588 : [3760,3802,27159], #Timak Orc Overlord
20634 : [3761,3803,27161], #Tiak Orc Captain
20686 : [3765,3807,27163], #Vanor Silenos Chieftan
20662 : [3767,3809,27162] #Hatar Ratman Boss
}
#RewardsList Format- requestId : [item,quantity,rewardAmount]
Tor_Rewards_1 = {
3727 : [3769,40,2090],
3728 : [3770,50,6340],
3729 : [3771,50,9480],
3730 : [3772,30,9110],
3731 : [3773,40,8690],
3732 : [3774,100,9480],
3733 : [3775,50,11280],
3734 : [3776,30,9640],
3735 : [3777,100,9180],
3736 : [3778,50,5160],
3737 : [3779,30,3140],
3738 : [3780,40,3160],
3739 : [3781,1,6370],
3740 : [3782,50,19080],
3741 : [3783,50,17730],
3742 : [3784,1,5790],
3743 : [3785,1,8560],
3744 : [3786,30,8320],
3746 : [3788,1,27540],
3747 : [3789,1,20560],
}
Tor_Rewards_2 = {
3749 : [3791,40,7250],
3750 : [3792,50,7160],
3751 : [3793,40,6580],
3752 : [3794,20,10100],
3753 : [3795,30,13000],
3754 : [3796,40,7660],
3755 : [3797,40,7660],
3756 : [3798,40,11260],
3758 : [3800,30,8810],
3759 : [3801,30,7350],
3760 : [3802,1,8760],
3761 : [3803,1,9380],
3762 : [3804,40,17820],
3763 : [3805,20,17540],
3764 : [3806,20,14160],
3765 : [3807,1,15960],
3766 : [3808,20,39100],
3767 : [3809,1,39550],
3768 : [3810,10,41200]
}
#Format item : adenaAmount
Cyb_Rewards = {
3699 : 3400,
3700 : 6800,
3701 : 13600,
3702 : 27200,
3703 : 54400,
3704 : 108800,
3705 : 217600,
3706 : 435200,
3707 : 870400
}
Tor_menu = [
"<a action=\"bypass -h Quest 335_TheSongOfTheHunter 3727\">C: Obtain 40 charms of Kadesh</a><br>",
"<a action=\"bypass -h Quest 335_TheSongOfTheHunter 3728\">C: Collect 50 Timak Jade Necklaces</a><br>",
"<a action=\"bypass -h Quest 335_TheSongOfTheHunter 3729\">C: Gather 50 Enchanted Golem Shards</a><br>",
"<a action=\"bypass -h Quest 335_TheSongOfTheHunter 3730\">C: Collect and bring back 30 pieces of Giant Monster Eye Meat</a><br>",
"<a action=\"bypass -h Quest 335_TheSongOfTheHunter 3731\">C: Collect and bring back 40 Dire Wyrm Eggs</a><br>",
"<a action=\"bypass -h Quest 335_TheSongOfTheHunter 3732\">C: Collect and bring back 100 guardian basilisk talons</a><br>",
"<a action=\"bypass -h Quest 335_TheSongOfTheHunter 3733\">C: Collect and bring back 50 revenants chains</a><br>",
"<a action=\"bypass -h Quest 335_TheSongOfTheHunter 3734\">C: Collect and bring back 30 Windsus Tusks</a><br>",
"<a action=\"bypass -h Quest 335_TheSongOfTheHunter 3735\">C: Collect and bring back 100 Grandis Skulls</a><br>",
"<a action=\"bypass
|
legionus/billing
|
tests/test_wapi_metrics.py
|
Python
|
gpl-3.0
| 2,622
| 0.031274
|
import uuid
from unithelper import DBTestCase
from unithelper import mocker
from unithelper import requestor
from unithelper import hashable_dict
from bc import database
from bc import metrics
from bc_wapi import wapi_metrics
class Test(DBTestCase):
def test_metric_get(self):
"""Check getting metric with metricGet"""
data={
'id': str(uuid.uuid4()),
'type': str(uuid.uuid4())[:10],
'formula': metrics.constants.
|
FORMULA_SPEED,
'aggregate': 0L,
}
with database.DBConnect() as db:
db.insert('metrics', data)
self.ass
|
ertEquals(wapi_metrics.metricGet({'id': data['id']}),
requestor({'metric': data}, 'ok'))
self.assertEquals(wapi_metrics.metricGet({'id':''}),
requestor({'message': 'Metric not found' }, 'error'))
with mocker([('bc.metrics.get', mocker.exception),
('bc_wapi.wapi_metrics.LOG.error', mocker.passs)]):
self.assertEquals(wapi_metrics.metricGet({'id':''}),
requestor({'message': 'Unable to obtain metric' }, 'servererror'))
def test_metric_get_list(self):
"""Check getting metrics with metricList"""
data = []
for i in range(2, 10):
d={
'id': str(uuid.uuid4()),
'type': str(uuid.uuid4())[:10],
'formula': metrics.constants.FORMULA_SPEED,
'aggregate': 0L,
}
with database.DBConnect() as db:
db.insert('metrics', d)
data.append(d)
ans = wapi_metrics.metricList('')
self.assertEquals(ans[0], (01 << 2))
self.assertEquals(ans[1]['status'], 'ok')
self.assertEquals(set(map(lambda x: hashable_dict(x), ans[1]['metrics'])),
set(map(lambda x: hashable_dict(x), data)))
with mocker([('bc.metrics.get_all', mocker.exception),
('bc_wapi.wapi_metrics.LOG.error', mocker.passs)]):
self.assertEquals(wapi_metrics.metricList({'id':''}),
requestor({'message': 'Unable to obtain metric list' }, 'servererror'))
def test_metric_add(self):
"""Check the creating metric with metricAdd"""
data={
'id': str(uuid.uuid4()),
'type': str(uuid.uuid4())[:10],
'formula': metrics.constants.FORMULA_SPEED,
'aggregate': 0L,
}
ans = wapi_metrics.metricAdd(data.copy())
self.assertEquals(ans, requestor({'id':data['id']}, 'ok'))
with database.DBConnect() as db:
t1 = db.find('metrics').one()
self.assertEquals(data['id'], t1['id'])
self.assertEquals(data['type'], t1['type'])
with mocker([('bc.metrics.add', mocker.exception),
('bc_wapi.wapi_metrics.LOG.error', mocker.passs)]):
self.assertEquals(wapi_metrics.metricAdd({'id':''}),
requestor({'message': 'Unable to add new metric' }, 'servererror'))
|
sparkslabs/kamaelia_
|
Sketches/JL/IRC/plainPython/client.py
|
Python
|
apache-2.0
| 2,730
| 0.007692
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/lic
|
enses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import socket
import select
import threading
class inputThread(threading.Thread):
def say(self, chan, words)
|
:
send = 'PRIVMSG %s :%s\r\n' % (chan, words)
sock.send(send)
def run(self):
global done
while not done:
msg = raw_input('IRC > ')
if msg == 'QUIT':
sock.send('QUIT')
done = True
self.say(channel, msg)
class outputThread(threading.Thread):
def checkForMessages(self):
read_list, write_list, error_list = \
select.select([sock], [], [sock], 0)
if sock in read_list:
raw = sock.recv(8000)
self.printOutput(raw)
def run(self):
while not done:
self.checkForMessages()
def printOutput(self, text):
if '\r' in text:
text = text.replace('\r', '\n')
lines = text.split('\n')
for one_line in lines:
if len(one_line) > 0:
print self.formatLine(one_line)
def formatLine(self, line):
words = line.split()
sender = ""
if line[0] == ':' and len(words) >= 2:
sender = line[1:line.find('!')]
words = words[1:]
tag = words[0].upper()
if tag == 'PRIVMSG':
return '%s: %s' % (sender, words[2])
else:
return line
network = 'irc.freenode.net'
port = 6667
nick = 'lolasuketo'
uname = 'jinna'
host = 'jlei-laptop'
server = 'comcast'
realname = 'python irc bot'
channel = '#kamtest'
sock = socket.socket ( socket.AF_INET, socket.SOCK_STREAM )
sock.connect ( ( network, port ) )
sock.send ('NICK %s \r\n' % nick )
sock.send ( 'USER %s %s %s :%s r\n' % (uname, host, server, realname))
sock.send ( 'JOIN #kamtest\r\n' )
done = False
input1 = inputThread()
output1 = outputThread()
input1.start()
output1.start()
|
ytsapras/robonet_site
|
events/management/commands/fetch_obs_for_field.py
|
Python
|
gpl-2.0
| 879
| 0.009101
|
# -*- coding: utf-8 -*-
"""
Created on Mon Apr 24 10:55:03 2017
@author: rstreet
"""
from django.core.managemen
|
t.base import BaseComman
|
d
from django.contrib.auth.models import User
from events.models import Field, ObsRequest
from sys import exit
from scripts import query_db
class Command(BaseCommand):
help = ''
def add_arguments(self, parser):
parser.add_argument('field', nargs='+', type=str)
def _fetch_obs_for_field(self,*args, **options):
field_name = options['field'][0]
field_id = Field.objects.get(name=field_name).id
print '\nActive obs for '+field_name+':\n'
active_obs = query_db.get_active_obs()
for obs in active_obs:
print obs.grp_id, obs.timestamp, obs.time_expire
def handle(self,*args, **options):
self._fetch_obs_for_field(*args,**options)
|
julien-girard/openstick
|
openstick/openstick.py
|
Python
|
gpl-3.0
| 8,148
| 0.001473
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2017 Julien Girard
#
# Licensed under the GNU GENERAL PUBLIC LICENSE, Version 3 ;
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://http://www.gnu.org/licenses/gpl-3.0.html
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import configparser
import names
import os
import socket
def get_open_port():
"""
http://stackoverflow.com/questions/2838244/get-open-tcp-port-in-python/2838309#2838309
:return: free port
"""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind(("", 0))
s.listen(1)
port = s.getsockname()[1]
return port
def is_open_port(port):
p = int(port)
with
|
socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
try:
# Test if port is open
s.bind(("", p))
is_open = p
except socket.error as e:
# If
|
port is use, we try with next port
if e.errno == 98:
is_open = is_open_port(p + 1)
else:
print(e)
except OverflowError as e:
# If port is overflow, we get a random open port
is_open = get_open_port()
return is_open
def get_fw_port(port):
if ":" in port:
fw_port = is_open_port(port.split(":")[-1])
else:
fw_port = get_open_port()
return fw_port
def launch():
# Parse command line arguments.
parser = argparse.ArgumentParser(description='Because sometimes you don\'t need a full stack')
parser.add_argument(
'-b', '--boot',
help='Boot option. Can be hdd, cdrom or network.',
dest='boot'
)
parser.add_argument(
'-c', '--cores',
help='Number of cores for the virtual machine.',
dest='cores'
)
parser.add_argument(
'-d', '--disk',
help='Path to a disk image',
dest='disk'
)
parser.add_argument(
'-e', '--domains',
help='DNS to look for.',
dest='domains'
)
parser.add_argument(
'--file',
help='Path to the configuration file. Default is ~/.openstick',
dest='configuration_file_path',
default=os.path.expanduser('~') + '/.openstick'
)
parser.add_argument(
'-f', '--format',
help='Format to use for images.',
dest='format'
)
parser.add_argument(
'-j', '--hostname',
help='Hostname of the virtual machine.',
dest='hostname'
)
parser.add_argument(
'-i', '--image',
help='Virtual machine hard drive image path',
dest='image'
)
parser.add_argument(
'-m', '--memory',
help='Memory size of the virtual machine.',
dest='memory'
)
parser.add_argument(
'-n', '--name',
help='Name of the virtual machine. Default is a random name.',
dest='name',
default=names.get_first_name()
)
parser.add_argument(
'-p', '--ports',
help='Ports to forward.',
dest='ports'
)
parser.add_argument(
'-r', '--root',
help='Path to a virtual machine image to use as root (base).',
dest='root_image_path'
)
parser.add_argument(
'-s', '--size',
help='Size of the virtual machine hard drive.',
dest='size'
)
parser.add_argument(
'-u', '--sockets',
help='Number of CPU sockets',
dest='sockets'
)
parser.add_argument(
'-t', '--threads',
help='Number of threads by core.',
dest='threads'
)
args = parser.parse_args()
# Parse configuration file.
config = configparser.ConfigParser()
config.read(args.configuration_file_path)
# Write specified values to configuration file.
vm_name = args.name.lower()
if not vm_name in config:
config[vm_name] = {}
exclude_args = ["configuration_file_path"]
for key, value in vars(args).items():
if key in exclude_args or value is None:
continue
config[vm_name][key] = value
with open(args.configuration_file_path, 'w') as configfile:
config.write(configfile)
# Initialize Parameters.
vm_boot = config.get(vm_name, 'boot', fallback='hdd')
vm_cores = config.get(vm_name, 'cores', fallback='2')
vm_disk = config.get(vm_name, 'disk', fallback='/dev/null')
vm_domains = config.get(vm_name, 'domains', fallback='')
vm_hostname = config.get(vm_name, 'hostname', fallback='%s-%s-%s' % (socket.gethostname(), os.getlogin(), vm_name))
vm_format = config.get(vm_name, 'format', fallback='qcow2')
vm_image = os.path.expanduser(
config.get(vm_name, 'image', fallback=os.path.join(os.path.expanduser('~/VMs'), '%s.%s' % (vm_name, vm_format)))
)
vm_memory = config.get(vm_name, 'memory', fallback='1G')
vm_ports = config.get(vm_name, 'ports', fallback='22,80,443')
vm_size = config.get(vm_name, 'size', fallback='20G')
vm_sockets = config.get(vm_name, 'sockets', fallback='1')
vm_threads = config.get(vm_name, 'threads', fallback='1')
# Create the base directory of the vm_image if necessary
vm_image_dir = os.path.dirname(vm_image)
if not os.path.isdir(vm_image_dir):
os.makedirs(vm_image_dir)
# Create virtual machine image if it does not exist.
if not os.path.isfile(vm_image):
if args.root_image_path is not None:
os.system('qemu-img create -f %s -b %s %s' % (vm_format, args.root_image_path, vm_image))
else:
if vm_boot != 'cdrom' and vm_boot != 'network':
print('You\'re trying to create a new virtual machine but does not specify boot from cdrom or network.')
exit(1)
else:
os.system('qemu-img create -f %s %s %s' % (vm_format, vm_image, vm_size))
# Build network configuration
vm_network = 'user,id=net0,hostname=%s' % vm_hostname
for domain in [x for x in vm_domains.split(',') if x]:
vm_network = '%s,dnssearch=%s' % (vm_network, domain)
print("VM name is : %s" % vm_name)
# Forward port
for port in [x for x in vm_ports.split(',') if x]:
fw_port = get_fw_port(port)
if ":" in port:
port = port.split(":")[0]
else:
pass
print("Forward port %s to : %s" % (port, fw_port))
vm_network = '%s,hostfwd=tcp::%s-:%s' % (vm_network, fw_port, port)
# Build boot option
if vm_boot == 'hdd':
vm_boot_option = 'c'
elif vm_boot == 'cdrom':
vm_boot_option = 'd'
elif vm_boot == 'network':
vm_boot_option = 'n'
else:
print('Invalid boot option: \'%s\'' % vm_boot)
exit(2)
# Start virtual machine.
os.system('qemu-system-x86_64 -enable-kvm -smp cores=%s,threads=%s,sockets=%s -hda %s -device virtio-net-pci,netdev=net0,id=nic0 -netdev %s -m %s -boot %s -cdrom %s -monitor stdio' % (
vm_cores,
vm_threads,
vm_sockets,
vm_image,
vm_network,
vm_memory,
vm_boot_option,
vm_disk
))
def list():
# Parse command line arguments.
parser = argparse.ArgumentParser(description='Because sometimes you don\'t need a full stack')
parser.add_argument(
'--file',
help='Path to the configuration file. Default is ~/.openstick',
dest='configuration_file_path',
default=os.path.expanduser('~') + '/.openstick'
)
args = parser.parse_args()
# Parse configuration file.
config = configparser.ConfigParser()
config.read(args.configuration_file_path)
for name in config.sections():
print(name)
# Launch main if call as a script
if __name__ == '__main__':
launch()
|
pmghalvorsen/gramps_branch
|
gramps/gen/datehandler/_datestrings.py
|
Python
|
gpl-2.0
| 14,146
| 0.012588
|
# -*- coding: utf-8 -*-
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2013 Vassilii Khachaturov
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
Date strings to translate per each language for display and parsing.
"""
from __future__ import print_function, unicode_literals
#-------------------------------------------------------------------------
#
# set up logging
#
#-------------------------------------------------------------------------
import logging
log = logging.getLogger(".DateStrings")
#-------------------------------------------------------------------------
#
# DateStrings
#
#-------------------------------------------------------------------------
class DateStrings(object):
"""
String tables for :class:`.DateDisplay` and :class:`.DateParser`.
"""
# This table needs not be localized, it's only for parsing
# Swedish calendar dates using Swedish month names.
# Display of these months uses the regular long_months.
# TODO should we pack these into alt_long_months instead?
swedish_SV = (
"", "Januari", "Februari", "Mars",
"April", "Maj", "Juni",
"Juli", "Augusti", "September",
"Oktober", "November", "December"
)
def __init__(self, locale):
_ = locale.translation.lexgettext
self.long_months = ( "",
# TRANSLATORS: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to select proper inflection to be used in your localized
# DateDisplayer code!
_("localized lexeme inflections||January"),
_("localized lexeme inflections||February"),
_("localized lexeme inflections||March"),
_("localized lexeme inflections||April"),
_("localized lexeme inflections||May"),
_("localized lexeme inflections||June"),
_("localized lexeme inflections||July"),
_("localized lexeme inflections||August"),
_("localized lexeme inflections||September"),
_("localized lexeme inflections||October"),
_("localized lexeme inflections||November"),
_("localized lexeme inflections||December") )
self.short_months = ( "",
# TRANSLATORS: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to select proper inflection to be used in your localized
# DateDisplayer code!
_("localized lexe
|
me inflections - short month form||Jan"),
_("localized lexeme inflections - short month form||Feb"),
_("localized lexeme inflections - short
|
month form||Mar"),
_("localized lexeme inflections - short month form||Apr"),
_("localized lexeme inflections - short month form||May"),
_("localized lexeme inflections - short month form||Jun"),
_("localized lexeme inflections - short month form||Jul"),
_("localized lexeme inflections - short month form||Aug"),
_("localized lexeme inflections - short month form||Sep"),
_("localized lexeme inflections - short month form||Oct"),
_("localized lexeme inflections - short month form||Nov"),
_("localized lexeme inflections - short month form||Dec") )
_ = locale.translation.sgettext
self.alt_long_months = ( "",
# TRANSLATORS: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to add proper alternatives to be recognized in your localized
# DateParser code!
_("alternative month names for January||"),
_("alternative month names for February||"),
_("alternative month names for March||"),
_("alternative month names for April||"),
_("alternative month names for May||"),
_("alternative month names for June||"),
_("alternative month names for July||"),
_("alternative month names for August||"),
_("alternative month names for September||"),
_("alternative month names for October||"),
_("alternative month names for November||"),
_("alternative month names for December||") )
self.calendar = (
# Must appear in the order indexed by Date.CAL_... numeric constants
_("calendar|Gregorian"),
_("calendar|Julian"),
_("calendar|Hebrew"),
_("calendar|French Republican"),
_("calendar|Persian"),
_("calendar|Islamic"),
_("calendar|Swedish") )
_ = locale.translation.lexgettext
self.hebrew = (
"",
# TRANSLATORS: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to select proper inflection to be used in your localized
# DateDisplayer code!
_("Hebrew month lexeme|Tishri"),
_("Hebrew month lexeme|Heshvan"),
_("Hebrew month lexeme|Kislev"),
_("Hebrew month lexeme|Tevet"),
_("Hebrew month lexeme|Shevat"),
_("Hebrew month lexeme|AdarI"),
_("Hebrew month lexeme|AdarII"),
_("Hebrew month lexeme|Nisan"),
_("Hebrew month lexeme|Iyyar"),
_("Hebrew month lexeme|Sivan"),
_("Hebrew month lexeme|Tammuz"),
_("Hebrew month lexeme|Av"),
_("Hebrew month lexeme|Elul")
)
self.french = (
"",
# TRANSLATORS: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to select proper inflection to be used in your localized
# DateDisplayer code!
_("French month lexeme|Vendémiaire"),
_("French month lexeme|Brumaire"),
_("French month lexeme|Frimaire"),
_("French month lexeme|Nivôse"),
_("French month lexeme|Pluviôse"),
_("French month lexeme|Ventôse"),
_("French month lexeme|Germinal"),
_("French month lexeme|Floréal"),
_("French month lexeme|Prairial"),
_("French month lexeme|Messidor"),
_("French month lexeme|Thermidor"),
_("French month lexeme|Fructidor"),
_("French month lexeme|Extra"),
)
self.islamic = (
"",
# TRANSLATORS: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to select proper inflection to be used in your localized
# DateDisplayer code!
_("Islamic month lexeme|Muharram"),
_("Islamic month lexeme|Safar"),
_("Islamic month lexeme|Rabi`al-Awwal"),
_("Islamic month lexeme|Rabi`ath-Thani"),
_("Islamic month lexeme|Jumada l-Ula"),
_("Islamic month lexeme|Jumada t-Tania"),
_("Islamic month lexeme|Rajab"),
_("Islamic month lexeme|Sha`ban"),
_("Islamic month lexeme|Ramadan"),
_("Islamic month lexeme|Shawwal"),
_("Islamic month lexeme|Dhu l-Qa`da"),
|
stvstnfrd/edx-platform
|
lms/lib/courseware_search/test/test_lms_result_processor.py
|
Python
|
agpl-3.0
| 3,315
| 0.001207
|
"""
Tests for the lms_result_processor
"""
import six
import pytest
from lms.djangoapps.courseware.tests.factories import UserFactory
from lms.lib.courseware_search.lms_result_processor import LmsSearchResultProcessor
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
class LmsSearchResultProcessorTestCase(ModuleStoreTestCase):
""" Test case class to test search result processor """
def build_course(self):
"""
Build up a course tree with an html control
"""
self.gl
|
obal_staff = UserFactory(is_staff=True)
self.course = CourseFactory.create(
org='Elasticsearch',
course='ES101',
run='test_run',
display_name='El
|
asticsearch test course',
)
self.section = ItemFactory.create(
parent=self.course,
category='chapter',
display_name='Test Section',
)
self.subsection = ItemFactory.create(
parent=self.section,
category='sequential',
display_name='Test Subsection',
)
self.vertical = ItemFactory.create(
parent=self.subsection,
category='vertical',
display_name='Test Unit',
)
self.html = ItemFactory.create(
parent=self.vertical,
category='html',
display_name='Test Html control',
)
self.ghost_subsection = ItemFactory.create(
parent=self.section,
category='sequential',
display_name=None,
)
self.ghost_vertical = ItemFactory.create(
parent=self.ghost_subsection,
category='vertical',
display_name=None,
)
self.ghost_html = ItemFactory.create(
parent=self.ghost_vertical,
category='html',
display_name='Ghost Html control',
)
def setUp(self):
super(LmsSearchResultProcessorTestCase, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments
self.build_course()
def test_url_parameter(self):
fake_url = ""
srp = LmsSearchResultProcessor({}, "test")
with pytest.raises(ValueError):
fake_url = srp.url
assert fake_url == ''
srp = LmsSearchResultProcessor(
{
"course": six.text_type(self.course.id),
"id": six.text_type(self.html.scope_ids.usage_id),
"content": {"text": "This is the html text"}
},
"test"
)
assert srp.url == '/courses/{}/jump_to/{}'.format(six.text_type(self.course.id),
six.text_type(self.html.scope_ids.usage_id))
def test_should_remove(self):
"""
Tests that "visible_to_staff_only" overrides start date.
"""
srp = LmsSearchResultProcessor(
{
"course": six.text_type(self.course.id),
"id": six.text_type(self.html.scope_ids.usage_id),
"content": {"text": "This is html test text"}
},
"test"
)
assert srp.should_remove(self.global_staff) is False
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.