repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
skurmedel/wordsalad
|
tests/test_matrix.py
|
Python
|
mit
| 1,110
| 0.004505
|
from wordsalad import WordSaladMatrixBuilder
import unittest
class TestWordSaladMatrixBuilder(unittest.TestCase):
def setUp(self):
self.builder = WordSaladMatrixBuilder()
def test_count_followers_in_sequence_no_endmarker(self):
seq = [1, 2, 3]
self.builder.count_followers_in_sequence(seq)
mat = self.builder.build
|
_matrix()
self.assertEqual(mat.probability(1, 2), 1)
self.assertEqual(mat.probability(2, 3), 1)
probs3 = mat.probabilities(3)
self.assertEqual(probs3.sum(), 0)
def test_count_followers_in_sequence_endmarker(self):
marker = "end"
seq = [1, 2, 3]
self.builder.count_fo
|
llowers_in_sequence(seq, endmarker=marker)
mat = self.builder.build_matrix()
self.assertEqual(mat.probability(1, 2), 1)
self.assertEqual(mat.probability(2, 3), 1)
self.assertEqual(mat.probability(3, "end"), 1)
def test_count_followers_in_sequence_throws_on_type(self):
with self.assertRaises(TypeError):
self.builder.count_followers_in_sequence(None)
|
fofix/fofix
|
fofix/game/Main.py
|
Python
|
gpl-2.0
| 5,400
| 0.000926
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#####################################################################
# Frets on Fire X (FoFiX) #
# Copyright (C) 2006 Sami Kyöstilä #
# 2008 evilynux <evilynux@gmail.com> #
# 2009-2019 FoFiX Team #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, #
# MA 02110-1301, USA. #
#####################################################################
'''
Main game object.
'''
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import logging
import os
import pygame
from fofix.core import Version
from fofix.core import VFS
from fofix.core.VideoPlayer import VideoLayer, VideoPlayerError
from fofix.core.GameEngine import GameEngine
from fofix.game.MainMenu import MainMenu
from fofix.core import Config
log = logging.getLogger(__name__)
class Main(object):
def __init__(self, a):
self.args = a
self.configfile = self.args['config']
self.fullscreen = self.args['fullscreen']
self.resolution = self.args['resolution']
self.theme = self.args['theme']
self.config = self.load_config(self.configfile)
# Lysdestic - Allow support for manipulating fullscreen via CLI
if self.fullscreen is not None:
Config.set("video", "fullscreen", self.fullscreen)
# Lysdestic - Change resolution from CLI
if self.resolution is not None:
Config.set("video", "resolution", self.resolution)
# Lysdestic - Alter theme from CLI
if self.theme is not None:
Config.set("coffee", "themename
|
", self.theme)
self.engine = GameEngine(self.config)
self.videoLayer = False
self.restartRequested = False
@staticmethod
def load_config(configPath):
''' Load the configuration file. '''
if configPath is not None:
if configPath.lower() == "reset":
# Get os specific location of config file, and remove it.
fileName = os.path.join(
|
VFS.getWritableResourcePath(), Version.PROGRAM_UNIXSTYLE_NAME + ".ini")
os.remove(fileName)
# Recreate it
config = Config.load(Version.PROGRAM_UNIXSTYLE_NAME + ".ini", setAsDefault=True)
else:
# Load specified config file
config = Config.load(configPath, setAsDefault=True)
else:
# Use default configuration file
config = Config.load(Version.PROGRAM_UNIXSTYLE_NAME + ".ini", setAsDefault=True)
return config
def restart(self):
log.info("Restarting.")
self.engine.audio.close()
self.restartRequested = True
def run(self):
# Perhaps this could be implemented in a better way...
# Play the intro video if it is present, we have the capability
themename = Config.get("coffee", "themename")
vidSource = os.path.join(Version.dataPath(), 'themes', themename, 'menu', 'intro.ogv')
if os.path.isfile(vidSource):
try:
vidPlayer = VideoLayer(self.engine, vidSource, cancellable=True)
except (IOError, VideoPlayerError):
log.error("Error loading intro video:")
else:
vidPlayer.play()
self.engine.view.pushLayer(vidPlayer)
self.videoLayer = True
self.engine.ticksAtStart = pygame.time.get_ticks()
while not vidPlayer.finished:
self.engine.run()
self.engine.view.popLayer(vidPlayer)
self.engine.view.pushLayer(MainMenu(self.engine))
if not self.videoLayer:
self.engine.setStartupLayer(MainMenu(self.engine))
# Run the main game loop.
try:
self.engine.ticksAtStart = pygame.time.get_ticks()
while self.engine.run():
pass
except KeyboardInterrupt:
log.info("Left mainloop due to KeyboardInterrupt.")
# don't reraise
# Restart the program if the engine is asking that we do so.
if self.engine.restartRequested:
self.restart()
# MainMenu class already calls this - useless?
self.engine.quit()
|
GuessWhoSamFoo/pandas
|
pandas/tests/arrays/test_datetimes.py
|
Python
|
bsd-3-clause
| 10,666
| 0
|
# -*- coding: utf-8 -*-
"""
Tests for DatetimeArray
"""
import operator
import numpy as np
import pytest
from pandas.core.dtypes.dtypes import DatetimeTZDtype
import pandas as pd
from pandas.core.arrays import DatetimeArray
from pandas.core.arrays.datetimes import sequence_to_dt64ns
import pandas.util.testing as tm
class TestDatetimeArrayConstructor(object):
def test_freq_validation(self):
# GH#24623 check that invalid instances cannot be created with the
# public constructor
arr = np.arange(5, dtype=np.int64) * 3600 * 10**9
msg = ("Inferred frequency H from passed values does not "
"conform to passed frequency W-SUN")
with pytest.raises(ValueError, match=msg):
DatetimeArray(arr, freq="W")
@pytest.mark.parametrize('meth', [DatetimeArray._from_sequence,
sequence_to_dt64ns,
pd.to_datetime,
pd.DatetimeIndex])
def test_mixing_naive_tzaware_raises(self, meth):
# GH#24569
arr = np.array([pd.Timestamp('2000'), pd.Timestamp('2000', tz='CET')])
msg = ('Cannot mix tz-aware with tz-naive values|'
'Tz-aware datetime.datetime cannot be converted '
'to datetime64 unless utc=True')
for obj in [arr, arr[::-1]]:
# check that we raise regardless of whether naive is found
# before aware or vice-versa
with pytest.raises(ValueError, match=msg):
meth(obj)
def test_from_pandas_array(self):
arr = pd.array(np.arange(5, dtype=np.int64)) * 3600 * 10**9
result = DatetimeArray._from_sequence(arr, freq='infer')
expected = pd.date_range('1970-01-01', periods=5, freq='H')._data
tm.assert_datetime_array_equal(result, expected)
def test_mismatched_timezone_raises(self):
arr = DatetimeArray(np.array(['2000-01-01T06:00:00'], dtype='M8[ns]'),
dtype=DatetimeTZDtype(tz='US/Central'))
dtype = DatetimeTZDtype(tz='US/Eastern')
with pytest.raises(TypeError, match='Timezone of the array'):
DatetimeArray(arr, dtype=dtype)
def test_non_array_raises(self):
with pytest.raises(ValueError, match='list'):
DatetimeArray([1, 2, 3])
def test_other_type_raises(self):
with pytest.raises(ValueError,
match="The dtype of 'values' is incorrect.*bool"):
DatetimeArray(np.array([1, 2, 3], dtype='bool'))
def test_incorrect_dtype_raises(self):
with pytest.raises(ValueError, match="Unexpected value for 'dtype'."):
DatetimeArray(np.array([1, 2, 3], dtype='i8'), dtype='category')
def test_freq_infer_raises(self):
with pytest.raises(ValueError, match='Frequency inference'):
DatetimeArray(np.array([1, 2, 3], dtype='i8'), freq="infer")
def test_copy(self):
data = np.array([1, 2, 3], dtype='M8[ns]')
arr = DatetimeArray(data, copy=False)
assert arr._data is data
arr = DatetimeArray(data, copy=True)
assert arr._data is not data
class TestDatetimeArrayComparisons(object):
# TODO: merge this into tests/arithmetic/test_datetime64 once it is
# sufficiently robust
def test_cmp_dt64_arraylike_tznaive(self, all_compare_operators):
# arbitrary tz-naive DatetimeIndex
opname = all_compare_operators.strip('_')
op = getattr(operator, opname)
dti = pd.date_range('2016-01-1', freq='MS', periods=9, tz=None)
arr = DatetimeArray(dti)
assert arr.freq == dti.freq
assert arr.tz == dti.tz
right = dti
expected = np.ones(len(arr), dtype=bool)
if opname in ['ne', 'gt', 'lt']:
# for these the comparisons should be all-False
expected = ~expected
result = op(arr, arr)
tm.assert_numpy_array_equal(result, expected)
for other in [right, np.array(right)]:
# TODO: add list and tuple, and object-dtype once those
# are fixed in the constructor
result = op(arr, other)
tm.assert_numpy_array_equal(result, expected)
result = op(other, arr)
tm.assert_numpy_array_equal(result, expected)
class TestDatetimeArray(object):
def test_astype_to_same(self):
arr = DatetimeArray._from_sequence(['2000'], tz='US/Central')
result = arr.astype(DatetimeTZDtype(tz="US/Central"), copy=False)
assert result is arr
@pytest.mark.parametrize("dtype", [
int, np.int32, np.int64, 'uint32', 'uint64',
])
def test_astype_int(self, dtype):
arr = DatetimeArray._from_sequence([pd.Timestamp('2000'),
pd.Timestamp('2001')])
result = arr.astype(dtype)
if np.dtype(dtype).kind == 'u':
expected_dtype = np.dtype('uint64')
else:
expected_dtype = np.dtype('int64')
expected = arr.astype(expected_dtype)
assert result.dtype == expected_dtype
tm.assert_numpy_array_equal(result, expected)
def test_tz_setter_raises(self):
arr = DatetimeArray._from_sequence(['2000'], tz='US/Central')
with pytest.raises(AttributeError, match='tz_localize'):
arr.tz = 'UTC'
def test_setitem_different_tz_raises(self):
data = np.array([1, 2, 3], dtype='M8[ns]')
arr = DatetimeArray(data, copy=False,
dtype=DatetimeTZDtype(tz="US/Central"))
with pytest.raises(ValueError, match="None"):
arr[0] = pd.Timestamp('2000')
with pytest.raises(ValueError, match="US/Central"):
arr[0] = pd.Timestamp('2000', tz="US/Eastern")
def test_setitem_clears_freq(self):
a = DatetimeArray(pd.date_range('2000', periods=2, freq='D',
tz='US/Central'))
a[0] = pd.Timestamp("2000", tz="US/Central")
assert a.freq is None
def test_repeat_preserves_tz(self):
dti = pd.date_range('2000', periods=2, freq='D', tz='US/Central')
arr = DatetimeArray(dti)
repeated = arr.repeat([1, 1])
# preserves tz and values, but not freq
expected = DatetimeArray(arr.asi8, freq=None, dtype=arr.dtype)
tm.assert_equal(repeated, expected)
def test_value_counts_preserves_tz(self):
dti = pd.date_range('2000', periods=2, freq='D', tz='US/Central')
arr = DatetimeArray(dti).repeat([4, 3])
result = arr.value_counts()
# Note: not tm.assert_index_equal, since `freq`s do not match
assert result.index.equals(dti)
arr[-2] = pd.NaT
result = arr.value_counts()
expected = pd.Series([1, 4, 2],
index=[pd.NaT, dti[0], dti[1]])
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize('method', ['pad', 'backfill'])
def test_fillna_preserves_tz(self, method):
dti = pd.date_range('2000-01-01', periods=5, freq='D', tz='US/Central')
arr = DatetimeArray(dti, copy=True)
arr[2] = pd.NaT
fill_val = dti[1] if method == 'pad' else dti[3]
expected = DatetimeArray._from_sequence(
[dti[0], dti[1], fill_val, dti[3], dti[4]],
freq=None, tz='US/Central'
)
result = arr.fillna(method=method)
tm.assert_extension_array_equal(result, expected)
# assert that arr and dti were not modified in-place
assert arr[2] is pd.NaT
assert dti[2] == pd.Timestamp('2000-01-03', tz='US/Central')
def test_array_interface_tz(self):
tz = "US/Central"
data = Datetim
|
eArray(pd.date_range('2017', periods=2, tz=tz))
result = np.asarray(data)
expected = np.array([pd.Timestamp('2017-01-01T00:00:00', tz=tz),
pd.Timestamp('2017-01-
|
02T00:00:00', tz=tz)],
dtype=object)
tm.assert_numpy_array_equal(result, expected)
result = np.asarray(data, dtype=object)
tm.ass
|
QuantumTechDevStudio/RUDNEVGAUSS
|
archive/solver/DifferentialEvolutionAbstract.py
|
Python
|
gpl-3.0
| 1,558
| 0.000666
|
import numpy
class DifferentialEvolutionAbstract:
amount_of_individuals = None
f = None
p = None
end_method = None
def __init__(self, min_element=-1, max_element=1):
self.min_element = min_element
self.max_element = max_element
self.f = 0.5
self.p = 0.9
self.func = None
self.population = None
self.func_population = None
self.dim = 0
self.child_funcs = None
self.cost_list = []
self.end_method = 'max_iter'
def set_amount_of_individuals(self, amount_of_individuals):
self.amount_of_individuals = amount_of_individuals
def set_params(self, f, p):
self.f = f
self.p = p
def set_end_method(self, end_method):
self.end_method = end_method
def create_population(self):
# Создаем популяцию
|
population = []
for _ in range(self.amount_of_individuals):
population.append(numpy.random.uniform(self.min_element, self.max_element, self.dim))
return numpy.array(population)
def choose_best_individual(self):
# Данная функция находит лучшую особь в популяции
func_list = list(self.func_population)
best_index = func_list.index(min(func_list))
return self.population[best_index]
def iteration(self
|
):
return []
def optimize(self, func, dim, end_cond, debug_pop_print=-1):
return []
def return_cost_list(self):
return self.cost_list
|
chfw/Flask-Excel
|
tests/testapp.py
|
Python
|
bsd-3-clause
| 4,610
| 0
|
import platform
from datetime import datetime
import pyexcel as pe
from flask import Flask, abort, jsonify, request
from flask_sqlalchemy import SQLAlchemy
import flask_excel as excel
app = Flask(__name__)
data = [[1, 2, 3], [4, 5, 6]]
if platform.python_implementation() == "PyPy":
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///tmp.db"
else:
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite://"
db = SQLAlchemy(app)
excel.init_excel(app)
class Post(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(80))
body = db.Column(db.Text)
pub_date = db.Column(db.DateTime)
category_id = db.Column(db.Integer, db.ForeignKey("category.id"))
category = db.relationship(
"Category", backref=db.backref("posts", lazy="dynamic")
)
def __init__(self, title, body, category, pub_date=None):
self.title = title
self.body = body
if pub_date is None:
pub_date = datetime.utcnow()
self.pub_date = pub_date
self.category = category
def __repr__(self):
return "<Post %r>" % self.title
class Category(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50))
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Category %r>" % self.name
@app.route("/respond/<struct_type>", methods=["GET", "POST"])
def respond_array(struct_type):
if struct_type == "array":
array = request.get_array(field_name="file")
return jsonify({"result": array})
elif struct_type == "dict":
adict = request.get_dict(field_name="file")
return jsonify({"result": adict})
elif struct_type == "records":
records = request.get_records(field_name="file")
return jsonify({"result": records})
elif struct_type == "book":
book = request.get_book(field_name="file")
return jsonify({"result": book.to_dict()})
elif struct_type == "book_dict":
book_dict = request.get_book_dict(field_name="file")
return jsonify({"result": book_dict})
@app.route("/switch/<file_type>", methods=["POST"])
def switch(file_type):
try:
sheet = request.get_sheet(field_name="file")
return excel.make_response(sheet, file_type)
except IOError:
abort(400)
@app.route("/file_name/<file_type>/<file_name>", methods=["POST"])
def swtich_file_name(file_type, file_name):
return excel.make_response(
pe.Sheet([["a", "b", "c"]]), file_type, file_name=file_name
)
@app.route("/exchange/<struct_type>", methods=["POST"])
def upload_array(struct_type):
if struct_type == "array":
array = request.get_array(field_name="file")
return excel.make_response_from_array(
|
array, "xls", sheet_name="test_array"
)
elif struct_type == "dict":
adict = request.get_dict(field_name="file")
return excel.make_response_from_dict(
adict, "xls", sheet_name="test_array"
)
elif struct_type == "records":
records = request.get_records(
|
field_name="file")
return excel.make_response_from_records(
records, "xls", sheet_name="test_array"
)
elif struct_type == "book":
book = request.get_book(field_name="file")
return excel.make_response(book, "xls")
elif struct_type == "book_dict":
book_dict = request.get_book_dict(field_name="file")
return excel.make_response_from_book_dict(book_dict, "xls")
@app.route("/upload/categories", methods=["POST"])
def upload_categories():
def table_init_func(row):
return Category(row["name"])
request.save_to_database(
field_name="file",
session=db.session,
table=Category,
initializer=table_init_func,
)
return excel.make_response_from_a_table(db.session, Category, "xls")
@app.route("/upload/all", methods=["POST"])
def upload_all():
def category_init_func(row):
c = Category(row["name"])
c.id = row["id"]
return c
def post_init_func(row):
# this is lessons learned that relation needs an object not a string
c = Category.query.filter_by(name=row["category"]).first()
p = Post(row["title"], row["body"], c, row["pub_date"])
return p
request.save_book_to_database(
field_name="file",
session=db.session,
tables=[Category, Post],
initializers=[category_init_func, post_init_func],
)
return excel.make_response_from_tables(db.session, [Category, Post], "xls")
|
KRHS-GameProgramming-2015/Adlez
|
Adlez.py
|
Python
|
bsd-2-clause
| 6,657
| 0.011417
|
import sys, pygame, math, random, time
from Level import *
from Player import *
from Enemy import *
from NPC import *
from Menu import *
from Item import *
pygame.init()
clock = pygame.time.Clock()
width = 1000
height = 700
size = width, height
bgColor = r,b,g = 255,255,255
screen = pygame.display.set_mode(size)
mode = "menu"
enemies = pygame.sprite.Group()
boundries = pygame.sprite.Group()
backGrounds = pygame.sprite.Group()
people = pygame.sprite.Group()
items = pygame.sprite.Group()
players = pygame.sprite.Group()
all = pygame.sprite.OrderedUpdates()
Enemy.containers = (enemies, all)
SoftBlock.containers = (backGrounds, all)
HardBlock.containers = (boundries, all)
NPC.containers = (people, all)
Item.containers = (items, all)
Player.containers = (people, players, all)
levLayer =0
levx = 3
levy = 3
start = time.time()
def loadNewLev(direction, levx, levy):
if direction == "up":
if levy >1:
levy-=1
elif direction == "down":
if levy <3:
levy+=1
elif direction == "left":
if levx >1:
levx-=1
elif direction == "right":
if levx <3:
levx+=1
for s in all.sprites():
s.kill()
levFile = "Levels/map" + str(levLayer) + str(levy) + str(levx)
level=Level(levFile)
return levx, levy
while True:
while mode == "menu":
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_1:
mode = "game"
if event.key == pygame.K_2:
mode = "how to play"
if event.key == pygame.K_q:
mode = "quit"
bg = pygame.image.load("Resources/mainmenu.png")
bgrect = bg.get_rect(center = [width/2,height/2])
screen.fill(bgColor)
screen.blit(bg, bgrect)
pygame.display.flip()
clock.tick(60)
while mode == "how to play":
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_RETURN:
mode = "menu"
bg = pygame.image.load("Resources/howtoplay.png")
bgre
|
ct = bg.get_rect(center = [width/2,height/1.9])
screen.fill(bgColor)
screen.blit(bg, bgrect)
pygame.display.flip()
clock.tick(60)
while mode == "quit":
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_q:
sys.exit()
levFile = "Levels/map" + str(levLay
|
er) + str(levy) + str(levx)
level=Level(levFile)
player = Player([5,5], [900,500])
while mode == "test":
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_w:
levx, levy = loadNewLev("up", levx, levy)
elif event.key == pygame.K_s:
levx, levy = loadNewLev("down", levx, levy)
elif event.key == pygame.K_a:
levx, levy = loadNewLev("left", levx, levy)
elif event.key == pygame.K_d:
levx, levy = loadNewLev("right", levx, levy)
#print len(all.sprites())
bgColor = r,g,b
screen.fill(bgColor)
dirty = all.draw(screen)
pygame.display.update(dirty)
pygame.display.flip()
clock.tick(60)
while mode == "game":
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_w or event.key == pygame.K_UP:
player.go("up")
elif event.key == pygame.K_s or event.key == pygame.K_DOWN:
player.go("down")
elif event.key == pygame.K_a or event.key == pygame.K_LEFT:
player.go("left")
elif event.key == pygame.K_d or event.key == pygame.K_RIGHT:
player.go("right")
elif event.type == pygame.KEYUP:
if event.key == pygame.K_w or event.key == pygame.K_UP:
player.go("stop up")
elif event.key == pygame.K_s or event.key == pygame.K_DOWN:
player.go("stop down")
elif event.key == pygame.K_a or event.key == pygame.K_LEFT:
player.go("stop left")
elif event.key == pygame.K_d or event.key == pygame.K_RIGHT:
player.go("stop right")
all.update(size)
#print len(all.sprites())
#From Manpac V2
if player.rect.center[0] > size[0]:
levx, levy = loadNewLev("right", levx, levy)
player = Player([5,5], [0, player.rect.center[1]])
elif player.rect.center[0] < 0:
levx, levy = loadNewLev("left", levx, levy)
player = Player([5,5], [size[0], player.rect.center[1]])
elif player.rect.center[1] > size[1]:
levx, levy = loadNewLev("down", levx, levy)
player = Player([5,5], [player.rect.center[0], 0])
elif player.rect.center[1] < 0:
levx, levy = loadNewLev("up", levx, levy)
player = Player([5,5], [player.rect.center[0], size[1]])
playersHitsBoundries = pygame.sprite.groupcollide(players, boundries, False, False)
for p in playersHitsBoundries:
for boundry in playersHitsBoundries[p]:
p.collideHardblock(boundry)
#playersHitsItems = pygame.sprite.groupcollide(players, items, False, False)
#for p in playersHitsitems:
#for item in playersHitsitems[p]:
enemiesHitsBoundries = pygame.sprite.groupcollide(enemies, boundries, False, False)
for e in enemiesHitsBoundries:
for boundry in enemiesHitsBoundries[e]:
e.collideHardblock(boundry)
bgColor = r,g,b
screen.fill(bgColor)
dirty = all.draw(screen)
pygame.display.update(dirty)
pygame.display.flip()
clock.tick(60)
|
AleCandido/Lab3.2
|
4_Ottica1/cadmio.py
|
Python
|
gpl-2.0
| 2,784
| 0.019044
|
from lab import *
from pylab import *
from uncertainties import *
from uncertainties import unumpy
from statistics import *
from scipy.constants import *
import getpass
users={"candi": "C:\\Users\\candi\\Documents\\GitHub\\Lab3.2\\",
"silvanamorreale":"C:\\Users\\silvanamorreale\\Documents\\GitHub\\Lab3.2\\" ,
"Studenti": "C:\\Users\\Studenti\\Desktop\\Lab3\\",
"User":"C:\\Users\\User\\Documents\\GitHub\\Lab3.2\\"
}
try:
user=getpass.getuser()
path=users[user]
print("buongiorno ", user, "!!!")
except:
raise Error("unknown user, please specify it and the path in the file Esercitazione*.py")
sys.path = sys.path + [path]
dir= path + "4_Ottica1\\"
##
alphar = loadtxt(dir + "data\\cadmio.txt", unpack=True)
alphas = alphar[0] + alphar[1]/60
alpha = []
dalpha = []
for i in range(0, len(alphas), 3):
alpha += [mean(alphas[i:i+3])]
err = stdev(alphas[i:i+3])
if err != 0:
dalpha += [err]
else:
dalpha += [1.5/60]
alphal = unumpy.uarray(alpha,dalpha)
alpha0 = ufloat(10.895, 0.026)
alphai = alphal - alpha0
lamda = array([467.8, 480.0, 508.6, 643.8])*10**(-9)
E = (h/physical_constants['electron volt'][0])*c/lamda
errorbar(unumpy.nominal_values(alphai), E, xerr = unumpy.std_devs(alphai), fmt = 'b,')
par, cov = fit_linear(E, unumpy.nominal_values(alphai), dy = unumpy.std_devs(alphai))
m, q = par
x = linspace(min(E)*0.99, max(E)*1.01)
y = m*x + q
ret=lambda x: m*x+q
chisq=sum((unumpy.nominal_values(alphai)-ret(E))**2/unumpy.std_devs(alphai)**2)
print("Chiq=", chisq, "su ", len(alphai)-2)
xlim(min(y)*0.998, max(y)*1.002)
xlabel("Deflection angles [°]")
ylabel("Light energies [eV]")
grid()
plot(y, x, 'g')
savefig(dir + "grafici\\calcadmio.pdf")
m,q = correlated_values([m,q], cov)
a = 1/m
b = -q/m
print("\nangular coefficient:",a,"intercepts:",b, "\n")
pr
|
int("Sodio:\n")
giallor = [59+10/60,59+6/60,59+6/60]
giallos = ufloat(mean(giallor), stdev(giallor))
giallo = giallos - alpha0
verder = [59+18/60,59+20/60,59+16/60]
verdes = ufloat(mean(verder), stdev(verder))
verde = verdes - alpha0
verdescuror = [59+55/60,59+55/60,59+55/60]
verdescuros = ufloat(mean(verdesc
|
uror), stdev(verdescuror)+0.1)
verdescuro = verdescuros - alpha0
azzurror = [60+8/60, 60+10/60, 60+6/60]
azzurros = ufloat(mean(azzurror), stdev(azzurror))
azzurro = azzurros - alpha0
rossor = [58+55/60,58+53/60,58+56/60]
rossos = ufloat(mean(rossor), stdev(rossor))
rosso = rossos - alpha0
violar = [60+39/60]*3
violas = ufloat(mean(violar), stdev(violar)+0.1)
viola = violas - alpha0
color = [giallo,verde,verdescuro,azzurro,rosso,viola]
colorname = ['giallo','verde','verdescuro','azzurro','rosso','viola']
for i in range(0,len(color)):
print(colorname[i],": ",c/(color[i]*a + b)*(h/physical_constants['electron volt'][0]))
|
ronniedada/litmus
|
collectors/cbagent/collectors/libstats/systemstats.py
|
Python
|
apache-2.0
| 869
| 0
|
from fabric.api import run
from cbagent.collectors.libstats.decorators import multi_node_task
class SystemStats(object):
def __init__(self, hosts, user, password):
self.hosts = hosts
self.user = user
self.password = p
|
assword
@multi_node_task
def swap_usage(ip):
output = run("free | grep -i swap")
swap_usage = {}
for i, metric in enumerate(("swap_total", "swap_free", "swap_used")):
swap_usage["swap_" + metric] = output.split()[i + 1]
return swap_usage
@multi_node_task
def mem_usage(ip):
output = run("free | g
|
rep -i mem")
mem_usage = {}
for i, metric in enumerate(("total", "used", "free", "shared",
"buffers", "cached")):
mem_usage["mem_" + metric] = output.split()[i + 1]
return mem_usage
|
jeffzheng1/tensorflow
|
tensorflow/contrib/layers/python/layers/feature_column_ops.py
|
Python
|
apache-2.0
| 36,030
| 0.004496
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed
|
under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities related to FeatureColumn."""
from __future__ import absolute_imp
|
ort
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.framework.python.framework import checkpoint_utils
from tensorflow.contrib.framework.python.framework import experimental
from tensorflow.contrib.framework.python.ops import variables as contrib_variables
from tensorflow.contrib.layers.python.layers import embedding_ops
from tensorflow.contrib.layers.python.layers import feature_column as fc
from tensorflow.contrib.layers.python.layers import layers
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor as sparse_tensor_py
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import parsing_ops
from tensorflow.python.ops import sparse_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import tf_logging as logging
def _embeddings_from_arguments(column,
args,
weight_collections,
trainable,
output_rank=2):
"""Returns embeddings for a column based on the computed arguments.
Args:
column: the column name.
args: the _DeepEmbeddingLookupArguments for this column.
weight_collections: collections to store weights in.
trainable: whether these embeddings should be trainable.
output_rank: the desired rank of the returned `Tensor`. Inner dimensions will
be combined to produce the desired rank.
Returns:
the embeddings.
Raises:
ValueError: if not possible to create.
"""
# pylint: disable=protected-access
input_tensor = layers._inner_flatten(args.input_tensor, output_rank)
weight_tensor = None
if args.weight_tensor is not None:
weight_tensor = layers._inner_flatten(args.weight_tensor, output_rank)
# pylint: enable=protected-access
if args.hashed:
embeddings = contrib_variables.model_variable(
name='weights',
shape=[args.vocab_size],
dtype=dtypes.float32,
initializer=args.initializer,
trainable=trainable,
collections=weight_collections)
return embedding_ops.hashed_embedding_lookup_sparse(
embeddings, input_tensor, args.dimension,
combiner=args.combiner, name='lookup')
if args.shared_embedding_name is not None:
shared_embedding_collection_name = (
'SHARED_EMBEDDING_COLLECTION_' + args.shared_embedding_name.upper())
graph = ops.get_default_graph()
shared_embedding_collection = (
graph.get_collection_ref(shared_embedding_collection_name))
shape = [args.vocab_size, args.dimension]
if shared_embedding_collection:
if len(shared_embedding_collection) > 1:
raise ValueError('Collection %s can only contain one '
'(partitioned) variable.'
% shared_embedding_collection_name)
else:
embeddings = shared_embedding_collection[0]
if embeddings.get_shape() != shape:
raise ValueError('The embedding variable with name {} already '
'exists, but its shape does not match required '
'embedding shape here. Please make sure to use '
'different shared_embedding_name for different '
'shared embeddings.'.format(
args.shared_embedding_name))
else:
embeddings = contrib_variables.model_variable(
name=args.shared_embedding_name,
shape=shape,
dtype=dtypes.float32,
initializer=args.initializer,
trainable=trainable,
collections=weight_collections)
graph.add_to_collection(shared_embedding_collection_name, embeddings)
else:
embeddings = contrib_variables.model_variable(
name='weights',
shape=[args.vocab_size, args.dimension],
dtype=dtypes.float32,
initializer=args.initializer,
trainable=trainable,
collections=weight_collections)
if isinstance(embeddings, variables.Variable):
embeddings = [embeddings]
else:
embeddings = embeddings._get_variable_list() # pylint: disable=protected-access
# pylint: disable=protected-access
_maybe_restore_from_checkpoint(
column._checkpoint_path(), embeddings)
return embedding_ops.safe_embedding_lookup_sparse(
embeddings,
input_tensor,
sparse_weights=weight_tensor,
combiner=args.combiner,
name=column.name + 'weights')
def _input_from_feature_columns(columns_to_tensors,
feature_columns,
weight_collections,
trainable,
scope,
output_rank,
default_name):
"""Implementation of `input_from(_sequence)_feature_columns`."""
check_feature_columns(feature_columns)
with variable_scope.variable_scope(scope,
default_name=default_name,
values=columns_to_tensors.values()):
output_tensors = []
transformer = _Transformer(columns_to_tensors)
if weight_collections:
weight_collections = list(set(list(weight_collections) +
[ops.GraphKeys.GLOBAL_VARIABLES]))
for column in sorted(set(feature_columns), key=lambda x: x.key):
with variable_scope.variable_scope(None,
default_name=column.name,
values=columns_to_tensors.values()):
transformed_tensor = transformer.transform(column)
try:
# pylint: disable=protected-access
arguments = column._deep_embedding_lookup_arguments(
transformed_tensor)
output_tensors.append(_embeddings_from_arguments(
column,
arguments,
weight_collections,
trainable,
output_rank=output_rank))
except NotImplementedError as ee:
try:
# pylint: disable=protected-access
output_tensors.append(column._to_dnn_input_layer(
transformed_tensor,
weight_collections,
trainable,
output_rank=output_rank))
except ValueError as e:
raise ValueError('Error creating input layer for column: {}.\n'
'{}, {}'.format(column.name, e, ee))
return array_ops.concat(output_rank - 1, output_tensors)
def input_from_feature_columns(columns_to_tensors,
feature_columns,
weight_collections=None,
trainable=True,
scope=None):
"""A tf.contrib.layer style input layer builder based on FeatureColumns.
Generally a single example in training data is described with feature columns.
At the first layer of the model, this column oriented data should be converted
to a single tensor. Each feature column needs a different kind of operation
during this conversion. For ex
|
kikocorreoso/brython
|
www/src/Lib/test/test_glob.py
|
Python
|
bsd-3-clause
| 13,345
| 0.000225
|
import glob
import os
import shutil
import sys
import unittest
from test.support import (TESTFN, skip_unless_symlink,
can_symlink, create_empty_file, change_cwd)
class GlobTests(unittest.TestCase):
def norm(self, *parts):
return os.path.normpath(os.path.join(self.tempdir, *parts))
def joins(self, *tuples):
return [os.path.join(self.tempdir, *parts) for parts in tuples]
def mktemp(self, *parts):
filename = self.norm(*parts)
base, file = os.path.split(filename)
if not os.path.exists(base):
os.makedirs(base)
create_empty_file(filename)
def setUp(self):
self.tempdir = TESTFN + "_dir"
self.mktemp('a', 'D')
self.mktemp('aab', 'F')
self.mktemp('.aa', 'G')
self.mktemp('.bb', 'H')
self.mktemp('aaa', 'zzzF')
self.mktemp('ZZZ')
self.mktemp('EF')
self.mktemp('a', 'bcd', 'EF')
self.mktemp('a', 'bcd', 'efg', 'ha')
if can_symlink():
os.symlink(self.norm('broken'), self.norm('sym1'))
os.symlink('broken', self.norm('sym2'))
os.symlink(os.path.join('a', 'bcd'), self.norm('sym3'))
def tearDown(self):
shutil.rmtree(self.tempdir)
def glob(self, *parts, **kwargs):
if len(parts) == 1:
pattern = parts[0]
else:
pattern = os.path.join(*parts)
p = os.path.join(self.tempdir, pattern)
res = glob.glob(p, **kwargs)
self.assertCountEqual(glob.iglob(p, **kwargs), res)
bres = [os.fsencode(x) for x in res]
self.assertCountEqual(glob.glob(os.fsencode(p), **kwargs), bres)
self.assertCountEqual(glob.iglob(os.fsencode(p), **kwargs), bres)
return res
def assertSequencesEqual_noorder(self, l1, l2):
l1 = list(l1)
l2 = list(l2)
self.assertEqual(set(l1), set(l2))
self.assertEqual(sorted(l1), sorted(l2))
def test_glob_literal(self):
eq = self.assertSequencesEqual_noorder
eq(self.glob('a'), [self.norm('a')])
eq(self.glob('a', 'D'), [self.norm('a', 'D')])
eq(self.glob('aab'), [self.norm('aab')])
eq(self.glob('zymurgy'), [])
res = glob.glob('*')
self.assertEqual({type(r) for r in res}, {str})
res = glob.glob(os.path.join(os.curdir, '*'))
self.assertEqual({type(r) for r in res}, {str})
res = glob.glob(b'*')
self.assertEqual({type(r) for r in res}, {bytes})
res = glob.glob(os.path.join(os.fsencode(os.curdir), b'*'))
self.assertEqual({type(r) for r in res}, {bytes})
def test_glob_one_directory(self):
eq = self.assertSequencesEqual_noorder
eq(self.glob('a*'), map(self.norm, ['a', 'aab', 'aaa']))
eq(self.glob('*a'), map(self.norm, ['a', 'aaa']))
eq(self.glob('.*'), map(self.norm, ['.aa', '.bb']))
eq(self.glob('?aa'), map(self.norm, ['aaa']))
eq(self.glob('aa?'), map(self.norm, ['aaa', 'aab']))
eq(self.glob('aa[ab]'), map(self.norm, ['aaa', 'aab']))
eq(self.glob('*q'), [])
def test_glob_nested_directory(self):
eq = self.assertSequencesEqual_noorder
if os.path.normcase("abCD") == "abCD":
# case-sensitive filesystem
eq(self.glob('a', 'bcd', 'E*'), [self.norm('a', 'bcd', 'EF')])
else:
# case insensitive filesystem
eq(self.glob('a', 'bcd', 'E*'), [self.norm('a', 'bcd', 'EF'),
self.norm('a', 'bcd', 'efg')])
eq(self.glob('a', 'bcd', '*g'), [self.norm('a', 'bcd', 'efg')])
def test_glob_directory_names(self):
eq = self.assertSequencesEqual_noorder
eq(self.glob('*', 'D'), [self.norm('a', 'D')])
eq(self.glob('*', '*a'), [])
eq(self.glob('a', '*', '*', '*a'),
[self.norm('a', 'bcd', 'efg', 'ha')])
eq(self.glob('?a?', '*F'), [self.norm('aaa', 'zzzF'),
self.norm('aab', 'F')])
def test_glob_directory_with_trailing_slash(self):
# Patterns ending with a slash shouldn't match non-dirs
res = glob.glob(self.norm('Z*Z') + os.sep)
self.assertEqual(res, [])
res = glob.glob(self.norm('ZZZ') + os.sep)
self.assertEqual(res, [])
# When there is a wildcard pattern which ends with os.sep, glob()
# doesn't blow up.
res = glob.glob(self.norm('aa*') + os.sep)
self.assertEqual(len(res), 2)
# either of these results is reasonable
self.assertIn(set(res), [
{self.norm('aaa'), self.norm('aab')},
{self.norm('aaa') + os.sep, self.norm('aab') + os.sep},
])
def test_glob_bytes_directory_with_trailing_slash(self):
# Same as test_glob_directory_with_trailing_slash, but with a
# bytes argument.
res = glob.glob(os.fsencode(self.norm('Z*Z') + os.sep))
self.assertEqual(res, [])
res = glob.glob(os.fsencode(self.norm('ZZZ') + os.sep))
self.assertEqual(res, [])
res = glob.glob(os.fsencode(self.norm('aa*') + os.sep))
self.assertEqual(len(res), 2)
# either of these results is reasonable
self.assertIn(set(res), [
{os.fsencode(self.norm('aaa')),
os.fsencode(self.norm('aab'))},
{os.fsencode(self.norm('aaa') + os.sep),
os.fsencode(self.norm('aab') + os.sep)},
])
@skip_unless_symlink
def test_glob_symlinks(self):
eq = self.assertSequencesEqual_noorder
eq(self.glob('sym3'), [self.norm('sym3')])
eq(self.glob('sym3', '*'), [self.norm('sym3', 'EF'),
self.norm('sym3', 'efg')])
self.assertIn(self.glob('sym3' + os.sep),
[[self.norm('sym3')], [self.norm('sym3') + os.sep]])
eq(self.glob('*', '*F'),
[self.norm('aaa', 'zzzF'),
self.norm('aab', 'F'), self.norm('sym3', 'EF')])
@skip_
|
unless_symlink
def test_glob_broken_symlinks(self):
eq = self.assertSequencesEqual_noorder
eq(self.glob('sym*'), [self.norm('sym1'), self.norm('sym2'),
self.norm('sym3')])
eq(self.glob('sym1'), [self.norm('sym1')])
eq(self.glob('sym2'), [self.norm('sym2')])
@unittest.skipUnless(sys.platform =
|
= "win32", "Win32 specific test")
def test_glob_magic_in_drive(self):
eq = self.assertSequencesEqual_noorder
eq(glob.glob('*:'), [])
eq(glob.glob(b'*:'), [])
eq(glob.glob('?:'), [])
eq(glob.glob(b'?:'), [])
eq(glob.glob('\\\\?\\c:\\'), ['\\\\?\\c:\\'])
eq(glob.glob(b'\\\\?\\c:\\'), [b'\\\\?\\c:\\'])
eq(glob.glob('\\\\*\\*\\'), [])
eq(glob.glob(b'\\\\*\\*\\'), [])
def check_escape(self, arg, expected):
self.assertEqual(glob.escape(arg), expected)
self.assertEqual(glob.escape(os.fsencode(arg)), os.fsencode(expected))
def test_escape(self):
check = self.check_escape
check('abc', 'abc')
check('[', '[[]')
check('?', '[?]')
check('*', '[*]')
check('[[_/*?*/_]]', '[[][[]_/[*][?][*]/_]]')
check('/[[_/*?*/_]]/', '/[[][[]_/[*][?][*]/_]]/')
@unittest.skipUnless(sys.platform == "win32", "Win32 specific test")
def test_escape_windows(self):
check = self.check_escape
check('?:?', '?:[?]')
check('*:*', '*:[*]')
check(r'\\?\c:\?', r'\\?\c:\[?]')
check(r'\\*\*\*', r'\\*\*\[*]')
check('//?/c:/?', '//?/c:/[?]')
check('//*/*/*', '//*/*/[*]')
def rglob(self, *parts, **kwargs):
return self.glob(*parts, recursive=True, **kwargs)
def test_recursive_glob(self):
eq = self.assertSequencesEqual_noorder
full = [('EF',), ('ZZZ',),
('a',), ('a', 'D'),
('a', 'bcd'),
('a', 'bcd', 'EF'),
('a', 'bcd', 'efg'),
('a', 'bcd', 'efg', 'ha'),
('aaa',),
|
tommorris/mf2py
|
test/test_suite.py
|
Python
|
mit
| 886
| 0.001129
|
from __future__ import unicode_literals, print_function
from nose.tools import assert_equal
import glob
import json
import mf2py
import os.path
import sys
from test_parser import check_unicode
assert_equal.__self__.maxDiff = None
def test_mf2tests():
allfiles = glob.glob(
os.path.join('.', 'testsuite', 'tests', '*', '*', '*.json'))
for jsonfile in allfiles:
htmlfile = jsonfile[:-4] + 'html'
with open(htmlfile) as f:
p = mf2py.parse
|
(doc=f, url='http://example.com')
yield check_unicode, htmlfile, p
with open(jsonfile) as jsonf:
try:
s = json.load(jsonf)
except:
s = "bad file: " + jsonfile + sy
|
s.exc_info()[0]
yield check_mf2, htmlfile, p, s
def check_mf2(htmlfile, p, s):
# TODO ignore extra keys in p that are not in s
assert_equal(p, s)
|
SurfasJones/djcmsrc3
|
venv/lib/python2.7/site-packages/cms/models/permissionmodels.py
|
Python
|
mit
| 5,696
| 0.002282
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.db import models
from django.utils import importlib
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ImproperlyConfigured
from django.contrib.auth.models import Group
from django.contrib.sites.models import Site
from cms.compat import is_user_swapped, user_model_label
from cms.models import Page
from cms.models.managers import (PagePermissionManager,
GlobalPagePermissionManager)
from cms.utils.helpers import reversion_register
from cms.utils.compat.dj import force_u
|
nicode, python_2_unicode_comp
|
atible
# To avoid circular dependencies, don't use cms.compat.get_user_model, and
# don't depend on the app registry, to get the custom user model if used
if is_user_swapped:
user_app_name, user_model_name = user_model_label.rsplit('.', 1)
User = None
for app in settings.INSTALLED_APPS:
if app.endswith(user_app_name):
user_app_models = importlib.import_module(app + ".models")
User = getattr(user_app_models, user_model_name)
break
if User is None:
raise ImproperlyConfigured(
"You have defined a custom user model %s, but the app %s is not "
"in settings.INSTALLED_APPS" % (user_model_label, user_app_name)
)
else:
from django.contrib.auth.models import User
# NOTE: those are not just numbers!! we will do binary AND on them,
# so pay attention when adding/changing them, or MASKs..
ACCESS_PAGE = 1
ACCESS_CHILDREN = 2 # just immediate children (1 level)
ACCESS_PAGE_AND_CHILDREN = 3 # just immediate children (1 level)
ACCESS_DESCENDANTS = 4
ACCESS_PAGE_AND_DESCENDANTS = 5
# binary masks for ACCESS permissions
MASK_PAGE = 1
MASK_CHILDREN = 2
MASK_DESCENDANTS = 4
ACCESS_CHOICES = (
(ACCESS_PAGE, _('Current page')),
(ACCESS_CHILDREN, _('Page children (immediate)')),
(ACCESS_PAGE_AND_CHILDREN, _('Page and children (immediate)')),
(ACCESS_DESCENDANTS, _('Page descendants')),
(ACCESS_PAGE_AND_DESCENDANTS, _('Page and descendants')),
)
class AbstractPagePermission(models.Model):
"""Abstract page permissions
"""
# who:
user = models.ForeignKey(user_model_label, verbose_name=_("user"), blank=True, null=True)
group = models.ForeignKey(Group, verbose_name=_("group"), blank=True, null=True)
# what:
can_change = models.BooleanField(_("can edit"), default=True)
can_add = models.BooleanField(_("can add"), default=True)
can_delete = models.BooleanField(_("can delete"), default=True)
can_change_advanced_settings = models.BooleanField(_("can change advanced settings"), default=False)
can_publish = models.BooleanField(_("can publish"), default=True)
can_change_permissions = models.BooleanField(_("can change permissions"), default=False, help_text=_("on page level"))
can_move_page = models.BooleanField(_("can move"), default=True)
can_view = models.BooleanField(_("view restricted"), default=False, help_text=_("frontend view restriction"))
class Meta:
abstract = True
app_label = 'cms'
@property
def audience(self):
"""Return audience by priority, so: All or User, Group
"""
targets = filter(lambda item: item, (self.user, self.group,))
return ", ".join([force_unicode(t) for t in targets]) or 'No one'
def save(self, *args, **kwargs):
if not self.user and not self.group:
# don't allow `empty` objects
return
return super(AbstractPagePermission, self).save(*args, **kwargs)
@python_2_unicode_compatible
class GlobalPagePermission(AbstractPagePermission):
"""Permissions for all pages (global).
"""
can_recover_page = models.BooleanField(_("can recover pages"), default=True, help_text=_("can recover any deleted page"))
sites = models.ManyToManyField(Site, null=True, blank=True, help_text=_('If none selected, user haves granted permissions to all sites.'), verbose_name=_('sites'))
objects = GlobalPagePermissionManager()
class Meta:
verbose_name = _('Page global permission')
verbose_name_plural = _('Pages global permissions')
app_label = 'cms'
def __str__(self):
return "%s :: GLOBAL" % self.audience
@python_2_unicode_compatible
class PagePermission(AbstractPagePermission):
"""Page permissions for single page
"""
grant_on = models.IntegerField(_("Grant on"), choices=ACCESS_CHOICES, default=ACCESS_PAGE_AND_DESCENDANTS)
page = models.ForeignKey(Page, null=True, blank=True, verbose_name=_("page"))
objects = PagePermissionManager()
class Meta:
verbose_name = _('Page permission')
verbose_name_plural = _('Page permissions')
app_label = 'cms'
def __str__(self):
page = self.page_id and force_unicode(self.page) or "None"
return "%s :: %s has: %s" % (page, self.audience, force_unicode(dict(ACCESS_CHOICES)[self.grant_on]))
class PageUser(User):
"""Cms specific user data, required for permission system
"""
created_by = models.ForeignKey(user_model_label, related_name="created_users")
class Meta:
verbose_name = _('User (page)')
verbose_name_plural = _('Users (page)')
app_label = 'cms'
class PageUserGroup(Group):
"""Cms specific group data, required for permission system
"""
created_by = models.ForeignKey(user_model_label, related_name="created_usergroups")
class Meta:
verbose_name = _('User group (page)')
verbose_name_plural = _('User groups (page)')
app_label = 'cms'
reversion_register(PagePermission)
|
sgraham/nope
|
tools/perf/profile_creators/small_profile_creator.py
|
Python
|
bsd-3-clause
| 2,596
| 0.00963
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page i
|
mport profile_creator
import logging
import page_sets
from telemetry import benchmark
from telemetry.page import page_test
from telemetry.page import test_expectations
from telemetry.results import results_options
from telemetry.user_story import user_story_runner
class SmallPr
|
ofileCreator(profile_creator.ProfileCreator):
"""
Runs a browser through a series of operations to fill in a small test profile.
This consists of performing a single navigation to each of the top 25 pages.
"""
class PageTest(page_test.PageTest):
def __init__(self):
super(SmallProfileCreator.PageTest, self).__init__()
self._page_set = page_sets.Typical25PageSet()
self._ValidatePageSet(self._page_set)
# Open all links in the same tab save for the last _NUM_TABS links which
# are each opened in a new tab.
self._NUM_TABS = 5
@staticmethod
def _ValidatePageSet(page_set):
"""Raise an exception if |page_set| uses more than one WPR archive."""
wpr_paths = set(page_set.WprFilePathForUserStory(p)
for p in page_set if not p.is_local)
if len(wpr_paths) > 1:
raise Exception("Invalid page set: has multiple WPR archives: %s" %
','.join(sorted(wpr_paths)))
def TabForPage(self, page, browser):
"""Superclass override."""
idx = page.page_set.pages.index(page)
# The last _NUM_TABS pages open a new tab.
if idx <= (len(page.page_set.pages) - self._NUM_TABS):
return browser.tabs[0]
else:
return browser.tabs.New()
def ValidateAndMeasurePage(self, page, tab, results):
"""Superclass override."""
tab.WaitForDocumentReadyStateToBeComplete()
def __init__(self):
super(SmallProfileCreator, self).__init__()
self._page_test = SmallProfileCreator.PageTest()
def Run(self, options):
expectations = test_expectations.TestExpectations()
results = results_options.CreateResults(
benchmark.BenchmarkMetadata(profile_creator.__class__.__name__),
options)
user_story_runner.Run(self._page_test, self._page_test._page_set,
expectations, options, results)
if results.failures:
logging.warning('Some pages failed to load.')
logging.warning('Failed pages:\n%s',
'\n'.join(map(str, results.pages_that_failed)))
raise Exception('SmallProfileCreator failed.')
|
mahmoud/womp
|
womp/inputs/__init__.py
|
Python
|
gpl-3.0
| 1,255
| 0.001594
|
#from feedback import FeedbackV4 # removed from WP API
#from feedback import FeedbackV5 # removed from WP API
from article_history import ArticleHistory
from assessment import Assessment
from backlinks import Backlinks
from dom import DOM
from google import GoogleNews
from go
|
ogle import GoogleSearch
from grokse import PageViews
from interwikilin
|
ks import InterWikiLinks
from langlinks import LangLinks
from nineteen_dom import NineteenDOM
from protection import Protection
from revisions import Revisions
from templates import ParsedTemplates
from watchers import Watchers
from wikitrust import Wikitrust
ALL_INPUTS = [ArticleHistory,
Assessment,
Backlinks,
DOM,
GoogleNews,
GoogleSearch,
InterWikiLinks,
LangLinks,
NineteenDOM,
PageViews,
ParsedTemplates,
Protection,
Revisions,
Watchers,
Wikitrust]
DEFAULT_INPUTS = [ArticleHistory,
Backlinks,
InterWikiLinks,
LangLinks,
NineteenDOM,
PageViews,
ParsedTemplates,
Protection]
|
google-research/disentanglement_lib
|
disentanglement_lib/evaluation/metrics/fairness.py
|
Python
|
apache-2.0
| 8,341
| 0.007553
|
# coding=utf-8
# Copyright 2018 The DisentanglementLib Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
|
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fairness metric task.
In this task, we compute the unfairness of a classifier trained on a given
represe
|
ntation. This procedure is described in Section 4 of the paper "On the
Fairness of Disentangled Representations", Francesco Locatello, Gabriele Abbati,
Tom Rainforth, Stefan Bauer, Bernhard Schoelkopf, Olivier Bachem, NeurIPS 2019,
https://arxiv.org/abs/1905.13662.
The unfairness of a classifier is computed as the total variation distance
between the distribution of predictions of the target varible for different
interventions on the sensitive variable. This approach is described in Section
4 of the paper.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from disentanglement_lib.evaluation.metrics import utils
import numpy as np
from six.moves import range
import gin.tf
@gin.configurable(
"fairness",
blacklist=["ground_truth_data", "representation_function", "random_state",
"artifact_dir"])
def compute_fairness(ground_truth_data,
representation_function,
random_state,
artifact_dir=None,
num_train=gin.REQUIRED,
num_test_points_per_class=gin.REQUIRED,
batch_size=16):
"""Computes unfairness scores.
We first compute either the mean or maximum total variation for a given
sensitive and target variable. Then, we either average or take the maximum
with respect to target and sensitive variable. For convenience, we compute and
save all combinations. The score used in Section 4 of the paper is here called
mean_fairness:mean_pred:mean_sens.
Args:
ground_truth_data: GroundTruthData to be sampled from.
representation_function: Function that takes observations as input and
outputs a dim_representation sized representation for each observation.
random_state: Numpy random state used for randomness.
artifact_dir: Optional path to directory where artifacts can be saved.
num_train: Number of points used for training.
num_test_points_per_class: Number of points used for testing.
batch_size: Batch size for sampling.
Returns:
Dictionary with scores.
"""
del artifact_dir
factor_counts = ground_truth_data.factors_num_values
num_factors = len(factor_counts)
scores = {}
# Training a predictive model.
mus_train, ys_train = utils.generate_batch_factor_code(
ground_truth_data, representation_function, num_train, random_state,
batch_size)
predictor_model_fn = utils.make_predictor_fn()
# For each factor train a single predictive model.
mean_fairness = np.zeros((num_factors, num_factors), dtype=np.float64)
max_fairness = np.zeros((num_factors, num_factors), dtype=np.float64)
for i in range(num_factors):
model = predictor_model_fn()
model.fit(np.transpose(mus_train), ys_train[i, :])
for j in range(num_factors):
if i == j:
continue
# Sample a random set of factors once.
original_factors = ground_truth_data.sample_factors(
num_test_points_per_class, random_state)
counts = np.zeros((factor_counts[i], factor_counts[j]), dtype=np.int64)
for c in range(factor_counts[j]):
# Intervene on the sensitive attribute.
intervened_factors = np.copy(original_factors)
intervened_factors[:, j] = c
# Obtain the batched observations.
observations = ground_truth_data.sample_observations_from_factors(
intervened_factors, random_state)
representations = utils.obtain_representation(observations,
representation_function,
batch_size)
# Get the predictions.
predictions = model.predict(np.transpose(representations))
# Update the counts.
counts[:, c] = np.bincount(predictions, minlength=factor_counts[i])
mean_fairness[i, j], max_fairness[i, j] = inter_group_fairness(counts)
# Report the scores.
scores.update(compute_scores_dict(mean_fairness, "mean_fairness"))
scores.update(compute_scores_dict(max_fairness, "max_fairness"))
return scores
def compute_scores_dict(metric, prefix):
"""Computes scores for combinations of predictive and sensitive factors.
Either average or take the maximum with respect to target and sensitive
variable for all combinations of predictive and sensitive factors.
Args:
metric: Matrix of shape [num_factors, num_factors] with fairness scores.
prefix: Prefix for the matrix in the returned dictionary.
Returns:
Dictionary containing all combinations of predictive and sensitive factors.
"""
result = {}
# Report min and max scores for each predictive and sensitive factor.
for i in range(metric.shape[0]):
for j in range(metric.shape[1]):
if i != j:
result["{}:pred{}:sens{}".format(prefix, i, j)] = metric[i, j]
# Compute mean and max values across rows.
rows_means = []
rows_maxs = []
for i in range(metric.shape[0]):
relevant_scores = [metric[i, j] for j in range(metric.shape[1]) if i != j]
mean_score = np.mean(relevant_scores)
max_score = np.amax(relevant_scores)
result["{}:pred{}:mean_sens".format(prefix, i)] = mean_score
result["{}:pred{}:max_sens".format(prefix, i)] = max_score
rows_means.append(mean_score)
rows_maxs.append(max_score)
# Compute mean and max values across rows.
column_means = []
column_maxs = []
for j in range(metric.shape[1]):
relevant_scores = [metric[i, j] for i in range(metric.shape[0]) if i != j]
mean_score = np.mean(relevant_scores)
max_score = np.amax(relevant_scores)
result["{}:sens{}:mean_pred".format(prefix, j)] = mean_score
result["{}:sens{}:max_pred".format(prefix, j)] = max_score
column_means.append(mean_score)
column_maxs.append(max_score)
# Compute all combinations of scores.
result["{}:mean_sens:mean_pred".format(prefix)] = np.mean(column_means)
result["{}:mean_sens:max_pred".format(prefix)] = np.mean(column_maxs)
result["{}:max_sens:mean_pred".format(prefix)] = np.amax(column_means)
result["{}:max_sens:max_pred".format(prefix)] = np.amax(column_maxs)
result["{}:mean_pred:mean_sens".format(prefix)] = np.mean(rows_means)
result["{}:mean_pred:max_sens".format(prefix)] = np.mean(rows_maxs)
result["{}:max_pred:mean_sens".format(prefix)] = np.amax(rows_means)
result["{}:max_pred:max_sens".format(prefix)] = np.amax(rows_maxs)
return result
def inter_group_fairness(counts):
"""Computes the inter group fairness for predictions based on the TV distance.
Args:
counts: Numpy array with counts of predictions where rows correspond to
predicted classes and columns to sensitive classes.
Returns:
Mean and maximum total variation distance of a sensitive class to the
global average.
"""
# Compute the distribution of predictions across all sensitive classes.
overall_distribution = np.sum(counts, axis=1, dtype=np.float32)
overall_distribution /= overall_distribution.sum()
# Compute the distribution for each sensitive class.
normalized_counts = np.array(counts, dtype=np.float32)
counts_per_class = np.sum(counts, axis=0)
normalized_counts /= np.expand_dims(counts_per_class, 0)
# Compute the differences and sum up for each sensitive class.
differences = normalized_counts - np.expand_dims(overall_distribution, 1)
total_variation_distances = np.sum(np.abs(difference
|
jbzdak/edx-platform
|
openedx/core/djangoapps/user_api/views.py
|
Python
|
agpl-3.0
| 33,442
| 0.001615
|
"""HTTP end-points for the User API. """
import copy
from opaque_keys import InvalidKeyError
from django.conf import settings
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.core.urlresolvers import reverse
from django.core.exceptions import ImproperlyConfigured, NON_FIELD_ERRORS, ValidationError
from django.utils.translation import ugettext as _
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import ensure_csrf_cookie, csrf_protect, csrf_exempt
from opaque_keys.edx import locator
from rest_framework import authentication
from rest_framework import filters
from rest_framework import generics
from rest_framework import status
from rest_framework import viewsets
from rest_framework.views import APIView
from rest_framework.exceptions import ParseError
from django_countries import countries
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from openedx.core.lib.api.permissions import ApiKeyHeaderPermission
import third_party_auth
from django_comment_common.models import Role
from edxmako.shortcuts import marketing_link
from student.views import create_account_with_params
from student.cookies import set_logged_in_cookies
from openedx.core.lib.api.authentication import SessionAuthenticationAllowInactiveUser
from util.json_request import JsonResponse
from .preferences.api import update_email_opt_in
from .helpers import FormDescription, shim_student_view, require_post_params
from .models import UserPreference, UserProfile
from .accounts import (
NAME_MAX_LENGTH, EMAIL_MIN_LENGTH, EMAIL_MAX_LENGTH, PASSWORD_MIN_LENGTH, PASSWORD_MAX_LENGTH,
USERNAME_MIN_LENGTH, USERNAME_MAX_LENGTH
)
from .accounts.api import check_account_exists
from .serializers import UserSerializer, UserPreferenceSerializer
class LoginSessionView(APIView):
"""HTTP end-points for logging in users. """
# This end-point is available to anonymous users,
# so do not require authentication.
authentication_classes = []
@method_decorator(ensure_csrf_cookie)
def get(self, request):
"""Return a description of the login form.
This decouples clients from the API definition:
if the API decides to modify the form, clients won't need
to be updated.
See `user_api.helpers.FormDescription` for examples
of the JSON-encoded form description.
Returns:
HttpResponse
"""
form_desc = FormDescription("post", reverse("user_api_login_session"))
# Translators: This label appears above a field on the login form
# meant to hold the user's email address.
email_label = _(u"Email")
# Translators: This example email address is used as a placeholder in
# a field on the login form meant to hold the user's email address.
email_placeholder = _(u"username@domain.com")
# Translators: These instructions appear on the login form, immediately
# below a field meant to hold the user's email address.
email_instructions = _(
u"The email address you used to register with {platform_name}"
).format(platform_name=settings.PLATFORM_NAME)
form_desc.add_field(
"email",
field_type="email",
label=email_label,
placeholder=email_placeholder,
instructions=email_instructions,
restrictions={
"min_length": EMAIL_MIN_LENGTH,
"max_length": EMAIL_MAX_LENGTH,
}
)
# Translators: This label appears above a field on the login form
# meant to hold the user's password.
password_label = _(u"Password")
form_desc.add_field(
"password",
label=password_label,
field_ty
|
pe="password",
restrictions={
"min_length": PASSWORD_MIN_LENGTH,
"max_length": PASSWORD_MAX_LENGTH,
}
)
form_desc.add_field(
"remember",
field_type="checkbox",
label=_("Remember me"),
default=False,
required=False,
)
return HttpResponse(form_desc.to_
|
json(), content_type="application/json")
@method_decorator(require_post_params(["email", "password"]))
@method_decorator(csrf_protect)
def post(self, request):
"""Log in a user.
You must send all required form fields with the request.
You can optionally send an `analytics` param with a JSON-encoded
object with additional info to include in the login analytics event.
Currently, the only supported field is "enroll_course_id" to indicate
that the user logged in while enrolling in a particular course.
Arguments:
request (HttpRequest)
Returns:
HttpResponse: 200 on success
HttpResponse: 400 if the request is not valid.
HttpResponse: 403 if authentication failed.
403 with content "third-party-auth" if the user
has successfully authenticated with a third party provider
but does not have a linked account.
HttpResponse: 302 if redirecting to another page.
Example Usage:
POST /user_api/v1/login_session
with POST params `email`, `password`, and `remember`.
200 OK
"""
# For the initial implementation, shim the existing login view
# from the student Django app.
from student.views import login_user
return shim_student_view(login_user, check_logged_in=True)(request)
class RegistrationView(APIView):
"""HTTP end-points for creating a new user. """
DEFAULT_FIELDS = ["email", "name", "username", "password"]
EXTRA_FIELDS = [
"city",
"country",
"gender",
"year_of_birth",
"level_of_education",
"mailing_address",
"goals",
"honor_code",
"terms_of_service",
]
# This end-point is available to anonymous users,
# so do not require authentication.
authentication_classes = []
def _is_field_visible(self, field_name):
"""Check whether a field is visible based on Django settings. """
return self._extra_fields_setting.get(field_name) in ["required", "optional"]
def _is_field_required(self, field_name):
"""Check whether a field is required based on Django settings. """
return self._extra_fields_setting.get(field_name) == "required"
def __init__(self, *args, **kwargs):
super(RegistrationView, self).__init__(*args, **kwargs)
# Backwards compatibility: Honor code is required by default, unless
# explicitly set to "optional" in Django settings.
self._extra_fields_setting = copy.deepcopy(settings.REGISTRATION_EXTRA_FIELDS)
self._extra_fields_setting["honor_code"] = self._extra_fields_setting.get("honor_code", "required")
# Check that the setting is configured correctly
for field_name in self.EXTRA_FIELDS:
if self._extra_fields_setting.get(field_name, "hidden") not in ["required", "optional", "hidden"]:
msg = u"Setting REGISTRATION_EXTRA_FIELDS values must be either required, optional, or hidden."
raise ImproperlyConfigured(msg)
# Map field names to the instance method used to add the field to the form
self.field_handlers = {}
for field_name in self.DEFAULT_FIELDS + self.EXTRA_FIELDS:
handler = getattr(self, "_add_{field_name}_field".format(field_name=field_name))
self.field_handlers[field_name] = handler
@method_decorator(ensure_csrf_cookie)
def get(self, request):
"""Return a description of the registration form.
This decouples clients from the API definition:
if the API decides to modify the form, clients won't need
to be updated.
This is especially important for the registration form,
since different edx-platform installations might
collect different demographic information.
|
tax/pywhatsapp
|
whatsapp.py
|
Python
|
bsd-3-clause
| 6,224
| 0.003213
|
import os
import threading
import logging
from yowsup import env
from yowsup.stacks import YowStack, YOWSUP_CORE_LAYERS, YOWSUP_PROTOCOL_LAYERS_FULL
from yowsup.layers import YowLayerEvent
from yowsup.layers.auth import YowAuthenticationProtocolLayer
from yowsup.layers.coder import YowCoderLayer
from yowsup.layers.network import YowNetworkLayer
from yowsup.layers.interface import YowInterfaceLayer, ProtocolEntityCallback
from yowsup.layers.protocol_media.mediauploader import MediaUploader
from yowsup.common import YowConstants
from yowsup.layers.protocol_messages.protocolentities import TextMessageProtocolEntity
from yowsup.layers.protocol_media.protocolentities import (
ImageDownloadableMediaMessageProtocolEntity,
DownloadableMediaMessageProtocolEntity,
RequestUploadIqProtocolEntity
)
logging.basicConfig(level=logging.ERROR)
logger = logging.getLogger(__name__)
EXT_IMAGE = ['.jpg', '.png']
EXT_AUDIO = ['.mp3', '.wav', '.aac', '.wma', '.ogg', '.oga']
EXT_VIDEO = ['.mp4']
class SendLayer(YowInterfaceLayer):
PROP_MESSAGES = "org.
|
openwhatsapp.yowsup.prop.sendclient.queue"
def __in
|
it__(self):
super(SendLayer, self).__init__()
self.ackQueue = []
self.lock = threading.Condition()
def get_upload_entity(self, path):
filename, extension = os.path.splitext(path)
if extension in EXT_IMAGE:
return RequestUploadIqProtocolEntity(
RequestUploadIqProtocolEntity.MEDIA_TYPE_IMAGE, filePath=path
)
if extension in EXT_VIDEO:
return RequestUploadIqProtocolEntity(
RequestUploadIqProtocolEntity.MEDIA_TYPE_VIDEO, filePath=path
)
if extension in EXT_AUDIO:
return RequestUploadIqProtocolEntity(
RequestUploadIqProtocolEntity.MEDIA_TYPE_AUDIO, filePath=path
)
self.disconnect("ERROR MEDIA")
@ProtocolEntityCallback("success")
def on_success(self, success_protocol_entity):
self.lock.acquire()
for target in self.getProp(self.__class__.PROP_MESSAGES, []):
phone, message, is_media = target
jid = "%s@s.whatsapp.net" % phone
if is_media:
path = message
entity = self.get_upload_entity(path)
success_fn = lambda success, original: self.on_request_upload_result(jid, path, success, original)
error_fn = lambda error, original: self.on_request_upload_error(jid, path, error, original)
self._sendIq(entity, success_fn, error_fn)
else:
message_entity = TextMessageProtocolEntity(message, to=jid)
self.ackQueue.append(message_entity.getId())
self.toLower(message_entity)
self.lock.release()
@ProtocolEntityCallback("ack")
def on_ack(self, entity):
self.lock.acquire()
# if the id match the id in ackQueue, then pop the id of the message out
if entity.getId() in self.ackQueue:
self.ackQueue.pop(self.ackQueue.index(entity.getId()))
if not len(self.ackQueue):
self.lock.release()
logger.info("Message sent")
raise KeyboardInterrupt()
def disconnect(self, result=None):
self.broadcastEvent(YowLayerEvent(YowNetworkLayer.EVENT_STATE_DISCONNECT))
if result:
raise ValueError(result)
def on_request_upload_result(self, jid, file_path, result_entity, request_entity):
if result_entity.isDuplicate():
self.send_file(file_path, result_entity.getUrl(), jid, result_entity.getIp())
else:
uploader = MediaUploader(
jid, self.getOwnJid(),
file_path,
result_entity.getUrl(),
result_entity.getResumeOffset(),
self.on_upload_success,
self.on_upload_error,
self.on_upload_progress,
async=False
)
uploader.start()
def on_request_upload_error(self, *args):
self.disconnect("ERROR REQUEST")
def on_upload_error(self, file_path, jid, url):
self.disconnect("ERROR UPLOAD")
def on_upload_success(self, file_path, jid, url):
self.send_file(file_path, url, jid)
def on_upload_progress(self, file_path, jid, url, progress):
logger.info("Progress: {}".format(progress))
def send_file(self, file_path, url, to, ip=None):
filename, extension = os.path.splitext(file_path)
entity = None
if extension in EXT_IMAGE:
entity = ImageDownloadableMediaMessageProtocolEntity.fromFilePath(file_path, url, ip, to)
elif extension in EXT_VIDEO:
entity = DownloadableMediaMessageProtocolEntity.fromFilePath(file_path, url, "video", ip, to)
elif extension in EXT_AUDIO:
entity = DownloadableMediaMessageProtocolEntity.fromFilePath(file_path, url, "audio", ip, to)
if entity:
self.toLower(entity)
class Client(object):
def __init__(self, login, password):
self.login = login
self.password = password
def _send_message(self, to, message, is_media=False):
layers = (SendLayer,) + (YOWSUP_PROTOCOL_LAYERS_FULL,) + YOWSUP_CORE_LAYERS
self.stack = YowStack(layers)
self.stack.setProp(YowAuthenticationProtocolLayer.PROP_PASSIVE, True)
self.stack.setProp(YowAuthenticationProtocolLayer.PROP_CREDENTIALS, (self.login, self.password))
self.stack.setProp(YowNetworkLayer.PROP_ENDPOINT, YowConstants.ENDPOINTS[0])
self.stack.setProp(YowCoderLayer.PROP_DOMAIN, YowConstants.DOMAIN)
self.stack.setProp(YowCoderLayer.PROP_RESOURCE, env.YowsupEnv.getCurrent().getResource())
self.stack.setProp(SendLayer.PROP_MESSAGES, [([to, message, is_media])])
self.stack.broadcastEvent(YowLayerEvent(YowNetworkLayer.EVENT_STATE_CONNECT))
try:
self.stack.loop()
except KeyboardInterrupt:
pass
def send_message(self, to, message):
self._send_message(to, message)
def send_media(self, to, path):
self._send_message(to, path, is_media=True)
|
eltonkevani/tempest_el_env
|
tempest/api/compute/volumes/test_volumes_list.py
|
Python
|
apache-2.0
| 4,824
| 0
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.compute import base
from tempest.common.utils import data_utils
from tempest.test import attr
class VolumesTestJSON(base.BaseV2ComputeTest):
"""
This test creates a number of 1G volumes. To run successfully,
ensure that the backing file for the volume group that Nova uses
has space for at least 3 1G volumes!
If you are running a Devstack environment, ensure that the
VOLUME_BACKING_FILE_SIZE is atleast 4G in your localrc
"""
_interface = 'json'
@classmethod
def setUpClass(cls):
super(VolumesTestJSON, cls).setUpClass()
cls.client = cls.volumes_extensions_client
if not cls.config.service_available.cinder:
skip_msg = ("%s skipped as Cinder is not available" % cls.__name__)
raise cls.skipException(skip_msg)
# Create 3 Volumes
cls.volume_list = []
cls.volume_id_list = []
for i in range(3):
v_name = data_utils.rand_name('volume-%s')
metadata = {'Type': 'work'}
v_name += cls._interface
try:
resp, volume = cls.client.create_volume(size=1,
display_name=v_name,
metadata=metadata)
cls.client.wait_for_volume_status(volume['id'], 'available')
resp, volume = cls.client.get_volume(volume['id'])
cls.volume_list.append(volume)
cls.volume_id_list.append(volume['id'])
except Exception:
if cls.volume_list:
# We could not create all the volumes, though we were able
# to create *some* of the volumes. This is typically
# because the backing file size of the volume group is
# too small. So, here, we clean up whatever we did manage
# to create and raise a SkipTest
for volume in cls.volume_list:
cls.client.delete_volume(volume)
msg = ("Failed to create ALL necessary volumes to run "
"test. This typically means that the backing file "
"size of the nova-volumes group is too small to "
"create the 3 volumes needed by this test case")
raise cls.skipException(msg)
raise
@classmethod
def tearDownClass(cls):
# Delete the created Volumes
for volume in cls.volume_list:
resp, _ = cls.client.delete_volume(volume['id'])
cls.client.wait_for_resource_deletion(volume['id'])
super(VolumesTestJSON, cls).tearDownClass()
@attr(type='gate')
def test_volume_list(self):
# Should return the list of Volumes
# Fetch all Volumes
resp, fetched_list = self.client.list_volumes()
self.assertEqual(200, resp.status)
# Now check if all the Volumes created in setup are in fetched list
missing_volumes = [
v for v in self.volume_list if v not in fetched_list
]
self.assertFalse(missing_volumes,
"Failed to find volume %s in fetched list" %
', '.join(m_vol['displayName']
for m_vol in missing_volumes))
@attr(type='gate')
def test_volume_list_with_details(self):
# Should return the list of Volumes with details
# Fetch all Volumes
resp, fetched_list = self.client.list_volumes_with_detail()
self.assertEqual(200, resp.status)
# Now check if all the Volumes created in setup are in fetched list
missing_volumes = [
|
v for v in self.volume_list if v not in fetched_list
]
self.assertFalse(missing_volumes,
"Failed to find volume %s in fetched list" %
', '.join(m_vol['displayName']
for m_vol in missing_volumes))
class VolumesTes
|
tXML(VolumesTestJSON):
_interface = 'xml'
|
bearstech/ansible
|
lib/ansible/modules/cloud/google/gce_instance_template.py
|
Python
|
gpl-3.0
| 19,122
| 0.00068
|
#!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gce_instance_template
version_added: "2.3"
short_description: create or destroy instance templates of Compute Engine of GCP.
description:
- Creates or destroy Google instance templates
of Compute Engine of Google Cloud Platform.
options:
state:
description:
- The desired state for the instance template.
default: "present"
choices: ["present", "absent"]
name:
description:
- The name of the GCE instance template.
required: true
default: null
size:
description:
- The desired machine type for the instance template.
default: "f1-micro"
source:
description:
- A source disk to attach to the instance.
Cannot specify both I(image) and I(source).
default: null
image:
description:
- The image to use to create the instance.
Cannot specify both both I(image) and I(source).
default: null
image_family:
description:
- The image family to use to create the instance.
If I(image) has been used I(image_family) is ignored.
Cannot specify both I(image) and I(source).
default: null
disk_type:
description:
- Specify a C(pd-standard) disk or C(pd-ssd)
for an SSD disk.
default: pd-standard
disk_auto_delete:
description:
- Indicate that the boot disk should be
deleted when the Node is deleted.
default: true
network:
description:
- The network to associate with the instance.
default: "default"
subnetwork:
description:
- The Subnetwork resource name for this instance.
default: null
can_ip_forward:
description:
- Set to True to allow instance to
send/receive non-matching src/dst packets.
default: false
external_ip:
description:
- The external IP address to use.
If C(ephemeral), a new non-static address will be
used. If C(None), then no external address will
be used. To use an existing static IP address
specify address name.
default: "ephemeral"
service_account_email:
description:
- service account email
default: null
service_account_permissions:
description:
- service account permissions (see
U(https://cloud.google.com/sdk/gcloud/reference/compute/instances/create),
--scopes section for detailed information)
default: null
choices: [
"bigquery", "cloud-platform", "compute-ro", "compute-rw",
"useraccounts-ro", "useraccounts-rw", "datastore", "logging-write",
"monitoring", "sql-admin", "storage-full", "storage-ro",
"storage-rw", "taskqueue", "userinfo-email"
]
automatic_restart:
description:
- Defines whether the instance should be
automatically restarted when it is
terminated by Compute Engine.
default: null
preemptible:
description:
- Defines whether the instance is preemptible.
default: null
tags:
description:
- a comma-separated list of tags to associate with the instance
default: null
metadata:
description:
- a hash/dictionary of custom data for the instance;
'{"key":"value", ...}'
default: null
description:
description:
- description of instance template
default: null
disks:
description:
- a list of persistent disks to attach to the instance; a string value
gives the name of the disk; alternatively, a dictionary value can
define 'name' and 'mode' ('READ_ONLY' or 'READ_WRITE'). The first entry
will be the boot disk (which must be READ_WRITE).
default: null
nic_gce_struct:
description:
- Support passing in the GCE-specific
formatted networkInterfaces[] structure.
default: null
disks_gce_struct:
description:
- Support passing in the GCE-specific
formatted formatted disks[] structure. Case sensitive.
see U(https://cloud.google.com/compute/docs/reference/latest/instanceTemplates#resource) for detailed information
default: null
version_added: "2.4"
project_id:
description:
- your GCE project ID
default: null
pem_file:
description:
- path to the pem file associated with the service account email
This option is deprecated. Use 'credentials_file'.
default: null
credentials_file:
description:
- path to the JSON file associated with the service account email
default: null
subnetwork_region:
version_added: "2.4"
description:
- Region that subnetwork resides in. (Required for subnetwork to successfully complete)
default: null
requirements:
- "python >= 2.6"
- "apache-libcloud >= 0.13.3, >= 0.17.0 if using JSON credentials,
>= 0.20.0 if using preemptible option"
notes:
- JSON credentials strongly preferred.
author: "Gwenael Pellen (@GwenaelPellenArkeup) <gwenael.pellen@arkeup.com>"
'''
EXAMPLES = '''
# Usage
- name: create instance template named foo
gce
|
_instance_template:
name: foo
size: n1-standard-1
image_family: ubuntu-1604-lts
state: present
project_id: "your-project-name"
credentials_file: "/path/to/your-key.json"
service_account_email: "your-sa@your-project-name.iam.gserviceaccount.com"
# Example Playbook
- name: Compute Engine Instance Template Examples
hosts: localhost
vars:
service_account_email: "your-sa@your-project-name.iam.gserviceacc
|
ount.com"
credentials_file: "/path/to/your-key.json"
project_id: "your-project-name"
tasks:
- name: create instance template
gce_instance_template:
name: my-test-instance-template
size: n1-standard-1
image_family: ubuntu-1604-lts
state: present
project_id: "{{ project_id }}"
credentials_file: "{{ credentials_file }}"
service_account_email: "{{ service_account_email }}"
- name: delete instance template
gce_instance_template:
name: my-test-instance-template
size: n1-standard-1
image_family: ubuntu-1604-lts
state: absent
project_id: "{{ project_id }}"
credentials_file: "{{ credentials_file }}"
service_account_email: "{{ service_account_email }}"
# Example playbook using disks_gce_struct
- name: Compute Engine Instance Template Examples
hosts: localhost
vars:
service_account_email: "your-sa@your-project-name.iam.gserviceaccount.com"
credentials_file: "/path/to/your-key.json"
project_id: "your-project-name"
tasks:
- name: create instance template
gce_instance_template:
name: foo
size: n1-standard-1
state: present
project_id: "{{ project_id }}"
credentials_file: "{{ credentials_file }}"
service_account_email: "{{ service_account_email }}"
disks_gce_struct:
- device_name: /dev/sda
boot: true
autoDelete: true
initializeParams:
diskSizeGb: 30
diskType: pd-ssd
sourceImage: projects/debian-cloud/global/images/family/debian-8
'''
RETURN = '''
'''
import traceback
try:
from ast import literal_eval
HAS_PYTHON26 = True
except ImportError:
HAS_PYTHON26 = False
try:
import libcloud
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
from libcloud.common.google import GoogleBaseError, QuotaExceededError, \
ResourceExistsError, ResourceInUseError, ResourceNotFoundError
from libcloud.compute.drivers.gce import GCEAddress
_ = Provider.GCE
HAS_LIBCLOUD = True
except ImportError:
HAS_LIBCLOUD = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.gce import gce_connect
from ansible.module_utils._text import t
|
polysquare/polysquare-generic-file-linter
|
test/test_linter_acceptance.py
|
Python
|
mit
| 11,503
| 0
|
# /test/test_linter_acceptance.py
#
# Acceptance tests for polysquarelinter.linter, actually runs
# main() and checks results of various functions.
#
# Disable no-self-use in tests as all test methods must be
# instance methods and we don't necessarily have to use a matcher
# with them.
# pylint:disable=no-self-use
#
# See /LICENCE.md for Copyright information
"""Test the linter to ensure that each lint use-case triggers warnings."""
import doctest
import errno
import os
import shutil
import sys
import tempfile
from contextlib import contextmanager
from iocapture import capture
from nose_parameterized import parameterized
from polysquarelinter import linter
from testtools import TestCase
from testtools.matchers import (DocTestMatches,
Equals as TTEqMatcher,
MatchesSetwise)
# Pychecker complains about the Equals matcher failing to override comparator
# so do that here
class Equals(TTEqMatcher): # suppress(R0903)
"""Matcher which tests equality
|
."""
def __init__(self, matchee):
"""Forward matchee to parent class."""
super(Equals, self).__init__(matchee)
def comparator(self, expected, other):
"""Check that expected == other."""
|
return other == expected
class LinterFailure(Exception):
"""Exception raised when the linter reports a message."""
def __init__(self, message, repl):
"""Initialize exception with mesh and replacement."""
super(LinterFailure, self).__init__()
self.message = message
self.replacement = repl
def __str__(self):
"""Represent as string."""
return str("{0}".format(self.message))
def run_with_kwargs_as_switches(func, *args, **kwargs):
"""Run :func: with :kwargs: converted to switches."""
arguments = list(args)
def _convert_kv_to_switches(key, value):
"""Convert a key-value pair to command-line switches."""
append_args = ["--{0}".format(key).replace("_", "-")]
type_dispatch = {
bool: [],
list: value,
str: [value]
}
# We assume that the types in type_dispatch are the only types
# we'll encounter, all others will throw an exception.
append_args += type_dispatch[type(value)]
return append_args
for key, value in kwargs.items():
arguments += _convert_kv_to_switches(key, value)
return func(arguments)
def run_linter_main(filename, **kwargs):
"""Run linter.main() (as an integration test)."""
arguments = [filename]
return run_with_kwargs_as_switches(linter.main, *arguments, **kwargs)
class TestLinterAcceptance(TestCase):
"""Acceptance tests for linter.main()."""
def __init__(self, *args, **kwargs): # pylint:disable=super-on-old-class
"""Initialize class variables."""
super(TestLinterAcceptance, self).__init__(*args, **kwargs)
self._temporary_file = None
def setUp(self): # suppress(N802)
"""Create a temporary file."""
from six import StringIO
super(TestLinterAcceptance, self).setUp()
self._temporary_file = tempfile.mkstemp()
self.patch(sys, "stdout", StringIO())
os.environ["JOBSTAMPS_DISABLED"] = "1"
def tearDown(self): # suppress(N802)
"""Remove temporary file.
Note that we need to ensure that the file is closed
first, so if it hasn't been opened yet, we won't get
EBADF. Otherwise we'll get EBADF and we can safely
ignore it.
"""
try:
os.close(self._temporary_file[0])
except OSError as error:
if error.errno != errno.EBADF: # suppress(PYC90)
raise error
os.remove(self._temporary_file[1])
super(TestLinterAcceptance, self).tearDown()
def test_parallelization_path(self):
"""Generate expected number of errors when running in parallel."""
contents = ("#\n"
"#\n"
"# Description\n"
"#\n"
"# See LICENCE.md for Copyright information\n"
"\n")
temporary_dir = tempfile.mkdtemp(prefix=os.path.join(os.getcwd(),
"technical"))
self.addCleanup(lambda: shutil.rmtree(temporary_dir))
files_to_lint = []
for i in range(0, 20):
with open(os.path.join(temporary_dir,
"file{0}".format(i)), "w") as lint_file:
lint_file.write(contents)
files_to_lint.append(os.path.realpath(lint_file.name))
result = run_with_kwargs_as_switches(linter.main,
*files_to_lint,
whitelist="headerblock/copyright")
self.assertEqual(result, 20)
def test_inline_suppressions_above(self):
"""Check inline suppressions work above the error-generating line."""
contents = ("#\n"
"#\n"
"# Description\n"
"#\n"
"# suppress(headerblock/copyright)\n"
"# See LICENCE.md for Copyright information\n"
"\n")
with os.fdopen(self._temporary_file[0], "a+") as process_file:
process_file.write(contents)
result = run_linter_main(self._temporary_file[1],
whitelist=["headerblock/copyright"])
self.assertEqual(result, 0)
def test_handle_empty_files(self):
"""Handle empty files with appropriate error message."""
contents = ""
with os.fdopen(self._temporary_file[0], "a+") as process_file:
process_file.write(contents)
result = run_linter_main(self._temporary_file[1])
# There should be a failure exit status, since empty
# files will trigger errors in the linter.
self.assertEqual(result, 1)
def test_inline_suppressions_beside(self):
"""Check inline suppressions work beside the error-generating line."""
contents = ("#\n"
"#\n"
"# Description\n"
"#\n"
"# See LICENCE.md for Copyright information"
"# suppress(headerblock/copyright)\n" # on the same line
"\n")
with os.fdopen(self._temporary_file[0], "a+") as process_file:
process_file.write(contents)
result = run_linter_main(self._temporary_file[1],
whitelist=["headerblock/copyright"])
self.assertEqual(result, 0)
def test_blacklist(self):
"""Check that blacklisting a test causes it not to run."""
contents = ("#\n"
"#\n"
"# Description\n"
"#\n"
"# See /LICENCE.md for Copyright information\n"
"\n")
with os.fdopen(self._temporary_file[0], "a+") as process_file:
process_file.write(contents)
result = run_linter_main(self._temporary_file[1],
blacklist=["headerblock/filename",
"file/spelling_error"])
self.assertEqual(result, 0)
def test_whitelist_pass(self):
"""Check that white-listing a test causes only it to run."""
contents = ("#\n")
with os.fdopen(self._temporary_file[0], "a+") as process_file:
process_file.write(contents)
result = run_linter_main(self._temporary_file[1],
whitelist=["file/newline_last_char"])
self.assertEqual(result, 0)
def test_whitelist_fail(self):
"""Check that white-listing a test causes only it to run."""
contents = ("#")
with os.fdopen(self._temporary_file[0], "a+") as process_file:
process_file.write(contents)
result = run_linter_main(self._temporary_file[1],
whitelist=["file/newline_last_char"])
|
plivo/plivo-python
|
tests/xml/test_sayAsElement.py
|
Python
|
mit
| 789
| 0.002535
|
from unittest import TestCase
from plivo import plivoxml
from tests import PlivoXmlTestCase
class SayAsElementTest(TestCase, PlivoXmlTestCase):
def test_set_methods(self):
expected_response = '<Response><Speak><say-as format="application/ssml+xml" interpret-as="spell-out">' \
'This is Test</say-as></Speak></Response>'
interpret_as = "spell-out"
|
format = "application/ssml+xml"
content = 'This is Test'
element = plivoxml.ResponseElement()
response = element.add(
plivoxml.SpeakElement("").add(
plivoxml.SayAsElement(content).set_interpret_as(interpret_as).set_format(format)
)
).
|
to_string(False)
self.assertXmlEqual(response, expected_response)
|
Winand/pandas
|
pandas/tests/reshape/test_merge.py
|
Python
|
bsd-3-clause
| 64,631
| 0.000031
|
# pylint: disable=E1103
import pytest
from datetime import datetime, date
from numpy.random import randn
from numpy import nan
import numpy as np
import random
import pandas as pd
from pandas.compat import lrange, lzip
from pandas.core.reshape.concat import concat
from pandas.core.reshape.merge import merge, MergeError
from pandas.util.testing import assert_frame_equal, assert_series_equal
from pandas.core.dtypes.dtypes import CategoricalDtype
from pandas.core.dtypes.common import is_categorical_dtype, is_object_dtype
from pandas import DataFrame, Index, MultiIndex, Series, Categorical
import pandas.util.testing as tm
N = 50
NGROUPS = 8
def get_test_data(ngroups=NGROUPS, n=N):
unique_groups = lrange(ngroups)
arr = np.asarray(np.tile(unique_groups, n // ngroups))
if len(arr) < n:
arr = np.asarray(list(arr) + unique_groups[:n - len(arr)])
random.shuffle(arr)
return arr
class TestMerge(object):
def setup_method(self, method):
# aggregate multiple columns
self.df = DataFrame(
|
{'key1':
|
get_test_data(),
'key2': get_test_data(),
'data1': np.random.randn(N),
'data2': np.random.randn(N)})
# exclude a couple keys for fun
self.df = self.df[self.df['key2'] > 1]
self.df2 = DataFrame({'key1': get_test_data(n=N // 5),
'key2': get_test_data(ngroups=NGROUPS // 2,
n=N // 5),
'value': np.random.randn(N // 5)})
self.left = DataFrame({'key': ['a', 'b', 'c', 'd', 'e', 'e', 'a'],
'v1': np.random.randn(7)})
self.right = DataFrame({'v2': np.random.randn(4)},
index=['d', 'b', 'c', 'a'])
def test_merge_inner_join_empty(self):
# GH 15328
df_empty = pd.DataFrame()
df_a = pd.DataFrame({'a': [1, 2]}, index=[0, 1], dtype='int64')
result = pd.merge(df_empty, df_a, left_index=True, right_index=True)
expected = pd.DataFrame({'a': []}, index=[], dtype='int64')
assert_frame_equal(result, expected)
def test_merge_common(self):
joined = merge(self.df, self.df2)
exp = merge(self.df, self.df2, on=['key1', 'key2'])
tm.assert_frame_equal(joined, exp)
def test_merge_index_singlekey_right_vs_left(self):
left = DataFrame({'key': ['a', 'b', 'c', 'd', 'e', 'e', 'a'],
'v1': np.random.randn(7)})
right = DataFrame({'v2': np.random.randn(4)},
index=['d', 'b', 'c', 'a'])
merged1 = merge(left, right, left_on='key',
right_index=True, how='left', sort=False)
merged2 = merge(right, left, right_on='key',
left_index=True, how='right', sort=False)
assert_frame_equal(merged1, merged2.loc[:, merged1.columns])
merged1 = merge(left, right, left_on='key',
right_index=True, how='left', sort=True)
merged2 = merge(right, left, right_on='key',
left_index=True, how='right', sort=True)
assert_frame_equal(merged1, merged2.loc[:, merged1.columns])
def test_merge_index_singlekey_inner(self):
left = DataFrame({'key': ['a', 'b', 'c', 'd', 'e', 'e', 'a'],
'v1': np.random.randn(7)})
right = DataFrame({'v2': np.random.randn(4)},
index=['d', 'b', 'c', 'a'])
# inner join
result = merge(left, right, left_on='key', right_index=True,
how='inner')
expected = left.join(right, on='key').loc[result.index]
assert_frame_equal(result, expected)
result = merge(right, left, right_on='key', left_index=True,
how='inner')
expected = left.join(right, on='key').loc[result.index]
assert_frame_equal(result, expected.loc[:, result.columns])
def test_merge_misspecified(self):
pytest.raises(ValueError, merge, self.left, self.right,
left_index=True)
pytest.raises(ValueError, merge, self.left, self.right,
right_index=True)
pytest.raises(ValueError, merge, self.left, self.left,
left_on='key', on='key')
pytest.raises(ValueError, merge, self.df, self.df2,
left_on=['key1'], right_on=['key1', 'key2'])
def test_index_and_on_parameters_confusion(self):
pytest.raises(ValueError, merge, self.df, self.df2, how='left',
left_index=False, right_index=['key1', 'key2'])
pytest.raises(ValueError, merge, self.df, self.df2, how='left',
left_index=['key1', 'key2'], right_index=False)
pytest.raises(ValueError, merge, self.df, self.df2, how='left',
left_index=['key1', 'key2'],
right_index=['key1', 'key2'])
def test_merge_overlap(self):
merged = merge(self.left, self.left, on='key')
exp_len = (self.left['key'].value_counts() ** 2).sum()
assert len(merged) == exp_len
assert 'v1_x' in merged
assert 'v1_y' in merged
def test_merge_different_column_key_names(self):
left = DataFrame({'lkey': ['foo', 'bar', 'baz', 'foo'],
'value': [1, 2, 3, 4]})
right = DataFrame({'rkey': ['foo', 'bar', 'qux', 'foo'],
'value': [5, 6, 7, 8]})
merged = left.merge(right, left_on='lkey', right_on='rkey',
how='outer', sort=True)
exp = pd.Series(['bar', 'baz', 'foo', 'foo', 'foo', 'foo', np.nan],
name='lkey')
tm.assert_series_equal(merged['lkey'], exp)
exp = pd.Series(['bar', np.nan, 'foo', 'foo', 'foo', 'foo', 'qux'],
name='rkey')
tm.assert_series_equal(merged['rkey'], exp)
exp = pd.Series([2, 3, 1, 1, 4, 4, np.nan], name='value_x')
tm.assert_series_equal(merged['value_x'], exp)
exp = pd.Series([6, np.nan, 5, 8, 5, 8, 7], name='value_y')
tm.assert_series_equal(merged['value_y'], exp)
def test_merge_copy(self):
left = DataFrame({'a': 0, 'b': 1}, index=lrange(10))
right = DataFrame({'c': 'foo', 'd': 'bar'}, index=lrange(10))
merged = merge(left, right, left_index=True,
right_index=True, copy=True)
merged['a'] = 6
assert (left['a'] == 0).all()
merged['d'] = 'peekaboo'
assert (right['d'] == 'bar').all()
def test_merge_nocopy(self):
left = DataFrame({'a': 0, 'b': 1}, index=lrange(10))
right = DataFrame({'c': 'foo', 'd': 'bar'}, index=lrange(10))
merged = merge(left, right, left_index=True,
right_index=True, copy=False)
merged['a'] = 6
assert (left['a'] == 6).all()
merged['d'] = 'peekaboo'
assert (right['d'] == 'peekaboo').all()
def test_intelligently_handle_join_key(self):
# #733, be a bit more 1337 about not returning unconsolidated DataFrame
left = DataFrame({'key': [1, 1, 2, 2, 3],
'value': lrange(5)}, columns=['value', 'key'])
right = DataFrame({'key': [1, 1, 2, 3, 4, 5],
'rvalue': lrange(6)})
joined = merge(left, right, on='key', how='outer')
expected = DataFrame({'key': [1, 1, 1, 1, 2, 2, 3, 4, 5],
'value': np.array([0, 0, 1, 1, 2, 3, 4,
np.nan, np.nan]),
'rvalue': [0, 1, 0, 1, 2, 2, 3, 4, 5]},
columns=['value', 'key', 'rvalue'])
assert_frame_equal(joined, expected)
def test_merge_join_key_dtype_cast(self):
# #8596
df1 = DataFrame({'key': [1], 'v1': [10]})
df2 = DataFrame({'key': [2], 'v1': [20]})
df = merge(df1, df2, how='outer')
assert df['key'].dtype == 'int64'
df1 = DataFrame(
|
flavour/porto
|
controllers/admin.py
|
Python
|
mit
| 53,568
| 0.006907
|
# -*- coding: utf-8 -*-
"""
Admin Controllers
"""
module = "admin"
resourcename = request.function
# Options Menu (available in all Functions' Views)
# - can Insert/Delete items from default menus within a function, if required.
s3_menu(module)
# S3 framework functions
# -----------------------------------------------------------------------------
def index():
""" Module's Home Page """
module_name = deployment_settings.modules[module].name_nice
response.title = module_name
return dict(module_name=module_name)
# =============================================================================
@auth.s3_requires_membership(1)
def settings():
""" Custom page to link to those Settings which can be edited through the web interface """
return dict()
# =============================================================================
# AAA
# =============================================================================
@auth.s3_requires_membership(1)
def role():
"""
Role Manager
@author: Dominic König <dominic@aidiq.com>
@ToDo: Prevent (or warn?) users from renaming Staff Roles
"""
prefix = "auth"
name = "group"
# ACLs as component of roles
s3mgr.model.add_component(auth.permission.table,
auth_group="group_id")
def prep(r):
if r.representation != "html":
return False
handle
|
r = s3base
|
.S3RoleManager()
modules = deployment_settings.modules
handler.controllers = Storage([(m, modules[m])
for m in modules
if modules[m].restricted])
# Configure REST methods
r.set_handler("users", handler)
r.set_handler("read", handler)
r.set_handler("list", handler)
r.set_handler("copy", handler)
r.set_handler("create", handler)
r.set_handler("update", handler)
r.set_handler("delete", handler)
return True
response.s3.prep = prep
response.s3.stylesheets.append( "S3/role.css" )
output = s3_rest_controller(prefix, name)
return output
# -----------------------------------------------------------------------------
@auth.s3_requires_membership(1)
def user():
""" RESTful CRUD controller """
module = "auth"
tablename = "auth_user"
table = db[tablename]
s3mgr.configure(tablename,
main="first_name",
# Add users to Person Registry & 'Authenticated' role:
create_onaccept = auth.s3_register)
def disable_user(r):
if not r.id:
session.error = T("Can only disable 1 record at a time!")
redirect(URL(args=[]))
if r.id == session.auth.user.id: # we're trying to disable ourself
session.error = T("Cannot disable your own account!")
redirect(URL(args=[]))
table = auth.settings.table_user
query = (table.id == r.id)
db(query).update(registration_key = "disabled")
session.confirmation = T("User Account has been Disabled")
redirect(URL(args=[]))
def approve_user(r):
if not r.id:
session.error = T("Can only approve 1 record at a time!")
redirect(URL(args=[]))
# Send them an email to let them know that their account has been approved
form = Storage()
form.vars = Storage()
form.vars.id = r.id
form.vars.email = r.record.email
user_approve(form)
# Allow them to login
table = auth.settings.table_user
query = (table.id == r.id)
db(query).update(registration_key = "")
session.confirmation = T("User Account has been Approved")
redirect(URL(args=[]))
# Custom Methods
role_manager = s3base.S3RoleManager()
s3mgr.model.set_method(module, resourcename, method="roles",
action=role_manager)
s3mgr.model.set_method(module, resourcename, method="disable",
action=disable_user)
s3mgr.model.set_method(module, resourcename, method="approve",
action=approve_user)
# CRUD Strings
ADD_USER = T("Add User")
LIST_USERS = T("List Users")
s3.crud_strings[tablename] = Storage(
title_create = ADD_USER,
title_display = T("User Details"),
title_list = LIST_USERS,
title_update = T("Edit User"),
title_search = T("Search Users"),
subtitle_create = T("Add New User"),
subtitle_list = T("Users"),
label_list_button = LIST_USERS,
label_create_button = ADD_USER,
label_delete_button = T("Delete User"),
msg_record_created = T("User added"),
msg_record_modified = T("User updated"),
msg_record_deleted = T("User deleted"),
msg_list_empty = T("No Users currently registered"))
# Allow the ability for admin to change a User's Organisation
org = table.organisation_id
org.writable = True
org.requires = IS_NULL_OR(IS_ONE_OF(db, "org_organisation.id",
organisation_represent,
orderby="org_organisation.name",
sort=True))
org.represent = organisation_represent
org.widget = S3OrganisationAutocompleteWidget()
org.comment = DIV(_class="tooltip",
_title="%s|%s|%s" % (T("Organization"),
T("The default Organization for whom this person is acting."),
T("Enter some characters to bring up a list of possible matches")))
# Allow the ability for admin to change a User's Facility
site = table.site_id
site.writable = True
site.requires = IS_NULL_OR(IS_ONE_OF(db, "org_site.id",
org_site_represent,
orderby="org_site.name",
sort=True))
site.represent = org_site_represent
site.widget = S3SiteAutocompleteWidget()
site.comment = DIV(_class="tooltip",
_title="%s|%s|%s" % (T("Facility"),
T("The default Facility for which this person is acting."),
T("Enter some characters to bring up a list of possible matches")))
# Pre-processor
def prep(r):
if r.interactive:
s3mgr.configure(r.tablename,
deletable=False,
# jquery.validate is clashing with dataTables so don't embed the create form in with the List
listadd=False,
addbtn=True,
sortby = [[2, "asc"], [1, "asc"]],
# Password confirmation
create_onvalidation = user_create_onvalidation)
# Allow the ability for admin to Disable logins
reg = r.table.registration_key
reg.writable = True
reg.readable = True
reg.label = T("Status")
# In Controller to allow registration to work with UUIDs - only manual edits need this setting
reg.requires = IS_NULL_OR(IS_IN_SET(["disabled",
"pending"]))
elif r.representation == "aadata":
# dataTables' columns need to match
r.table.registration_key.readable = True
if r.method == "delete" and r.http == "GET":
if r.id == session.auth.user.id: # we're trying to delete ourself
request.get_vars.update({"user.id":str(r.id)})
r.id = None
s3mgr.configure(r.tablename,
delete_next = URL(c="default", f="user/logout"))
s3.crud.confirm_delete = T("You are attempting to delete your own account - are you sure you want to proceed?")
elif r.method == "update":
# Send an email to user if their account is approv
|
dezede/dezede
|
libretto/models/individu.py
|
Python
|
bsd-3-clause
| 12,969
| 0.000233
|
from django.core.exceptions import ValidationError
from django.db import connection
from django.db.models import (
CharField, ForeignKey, ManyToManyField, PROTECT, BooleanField)
from django.urls import reverse
from django.utils.html import strip_tags
from django.utils.safestring import mark_safe
from django.utils.translation import (
pgettext_lazy, ugettext, ugettext_lazy as _)
from tinymce.models import HTMLField
from common.utils.abbreviate import abbreviate
from common.utils.html import href, sc, hlp
from common.utils.text import str_list, str_list_w_last, ex
from .base import (
CommonModel, AutoriteModel, UniqueSlugModel, TypeDeParente,
PublishedManager, PublishedQuerySet, AncrageSpatioTemporel,
slugify_unicode, ISNI_VALIDATORS)
from .evenement import Evenement
__all__ = ('TypeDeParenteDIndividus', 'ParenteDIndividus', 'Individu')
class TypeDeParenteDIndividus(TypeDeParente):
class Meta(object):
unique_together = ('nom', 'nom_relatif')
verbose_name = _('type de parenté d’individus')
verbose_name_plural = _('types de parenté d’individus')
ordering = ('classement',)
@staticmethod
def invalidated_relations_when_saved(all_relations=False):
if all_relations:
return ('parentes',)
return ()
class ParenteDIndividus(CommonModel):
type = ForeignKey('TypeDeParenteDIndividus', related_name='parentes',
verbose_name=_('type'), on_delete=PROTECT)
parent = ForeignKey('Individu', related_name='enfances',
verbose_name=_('individu parent'), on_delete=PROTECT)
enfant = ForeignKey('Individu', related_name='parentes',
verbose_name=_('individu enfant'), on_delete=PROTECT)
class Meta(object):
verbose_name = _('parenté d’individus')
verbose_name_plural = _('parentés d’individus')
ordering = ('type', 'parent', 'enfant')
@staticmethod
def invalidated_relations_when_saved(all_relations=False):
if all_relations:
return ('parent', 'enfant')
return ()
def clean(self):
try:
parent, enfant = self.parent, self.enfant
except Individu.DoesNotExist:
return
if parent and enfant and parent == enfant:
raise ValidationError(_('Un individu ne peut avoir une '
'parenté avec lui-même.'))
def __str__(self):
return ugettext('%(parent)s, %(type)s de %(enfant)s') % {
'parent': self.parent, 'type': self.type.nom,
'enfant': self.enfant}
class IndividuQuerySet(PublishedQuerySet):
def are_feminins(self):
return all(i.is_feminin() for i in self)
class IndividuManager(PublishedManager):
def get_queryset(self):
return IndividuQuerySet(self.model, using=self._db)
def are_feminins(self):
return self.get_queryset().are_feminins()
class Individu(AutoriteModel, UniqueSlugModel):
particule_nom = CharField(
_('particule du nom d’usage'), max_length=10, blank=True,
db_index=True)
# TODO: rendre le champ nom 'blank'
nom = CharField(_('nom d’usage'), max_length=200, db_index=True)
particule_nom_naissance = CharField(
_('particule du nom de naissance'), max_length=10, blank=True,
db_index=True)
nom_naissance = CharField(
_('nom de naissance'), max_length=200, blank=True, db_index=True,
help_text=_('Ne remplir que s’il est différent du nom d’usage.'))
prenoms = CharField(_('prénoms'), max_length=50, blank=True,
db_index=True, help_text=ex('Antonio'))
prenoms_complets = CharField(
_('prénoms complets'), max_length=100, blank=True, db_index=True,
help_text=
ex('Antonio Lucio',
post=' Ne remplir que s’il existe un ou des prénoms '
'peu usités pour cet individu.'))
pseudonyme = CharField(_('pseudonyme'), max_length=200, blank=True,
db_index=True)
DESIGNATIONS = (
('S', _('Standard (nom, prénoms et pseudonyme)')),
('P', _('Pseudonyme (uniquement)')),
('L', _('Nom d’usage (uniquement)')), # L pour Last name
('B', _('Nom de naissance (standard)')), # B pour Birth name
('F', _('Prénom(s) (uniquement)')), # F pour First name
)
designation = CharField(_('affichage'), max_length=1,
choices=DESIGNATIONS, default='S')
TITRES = (
('M', _('M.')),
('J', _('Mlle')), # J pour Jouvencelle
('F', _('Mme')),
)
titre = CharField(pgettext_lazy('individu', 'titre'), max_length=1,
choices=TITRES, blank=True, db_index=True)
naissance = AncrageSpatioTemporel(has_heure=False,
verbose_name=_('naissance'))
deces = AncrageSpatioTemporel(has_heure=False,
verbose_name=_('décès'))
professions = ManyToManyField(
'Profession', related_name='individus', blank=True,
verbose_name=_('professions'))
enfants = ManyToManyField(
'self', through='ParenteDIndividus', related_name='parents',
symmetrical=False, verbose_name=_('enfants'))
biographie = HTMLField(_('biographie'), blank=True)
isni = CharField(
_('Identifiant ISNI'), max_length=16, blank=True,
validators=ISNI_VALIDATORS,
help_text=_('Exemple : « 0000000121269154 » pour Mozart.'))
sans_isni = BooleanField(_('sans ISNI'), default=False)
objects = IndividuManager()
class Meta(object):
verbose_name = _('individu')
verbose_name_plural = _('individus')
ordering = ('nom',)
permissions = (('can_change_status', _('Peut changer l’état')),)
@staticmethod
def invalidated_relations_when_saved(all_relations=False):
relations = ('auteurs', 'elements_de_distribution',)
if all_relations:
relations += ('enfants', 'dossiers',)
return relations
def get_slug(self):
parent = super(Individu, self).get_slug()
return slugify_unicode(self.nom) or parent
def get_absolute_url(self):
return reverse('individu_detail', args=(self.slug,))
def permalien(self):
return reverse('individu_permanent_detail', args=(self.pk,))
def link(self):
return self.html()
link.short_description = _('lien')
def oeuvres(self):
oeuvres = self.auteurs.oeuvres()
return oeuvres.exclude(extrait_de__in=oeuvres)
def oeuvres_with_descendants(self):
return self.auteurs.oeuvres()
def publications(self):
return self.auteurs.sources()
def apparitions(self):
# FIXME: Gérer la période d’activité des membres d’un groupe.
sql = """
SELECT DISTINCT COALESCE(distribution.evenement_id, programme.evenement_id)
FROM libretto_elementdedistribution AS distribution
LEFT JOIN libretto_elementdeprogramme AS programme
ON (programme.id = distribution.element_de_programme_id)
WHERE distribution.individu_id = %s
"""
with connection.cursor() as cursor:
cursor.execute(sql, (self.pk,))
evenement_ids = [t[0] for t in cursor.fetchall()]
return Evenement.objects.filter(id__in=evenement_ids)
def evenements_referents(self):
return Evenement.objects.filter(
programme__oeuvre__auteurs__individu=self).distinct()
def membre_de(self):
return self.membres.order_by('-debut', 'instrument', 'classement')
def calc_titre(self, tags=False):
titre = self.titre
if not titre:
return ''
if tags:
if titre == 'M':
return hlp(ugettext('M.'), 'Monsieur')
elif titre == 'J':
return hlp(ugettext('M<sup>lle</sup>'), 'Ma
|
demoiselle')
|
elif titre == 'F':
return hlp(ugettext('M<sup>me</sup>'), 'Madame')
if titre == 'M':
return ugettext('Monsieur')
elif titre == 'J':
return ugettext('Mademoisel
|
noironetworks/neutron
|
neutron/tests/unit/services/logapi/common/test_db_api.py
|
Python
|
apache-2.0
| 14,273
| 0
|
# Copyright (c) 2017 Fujitsu Limited
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from neutron_lib import constants as const
from neutron_lib import context
from oslo_utils import uuidutils
from neutron.common import utils
from neutron.objects.logapi import logging_resource as log_object
from neutron.services.logapi.common import constants as log_const
from neutron.services.logapi.common import db_api
from neutron.services.logapi.common import validators
from neutron.services.logapi.rpc import server as server_rpc
from neutron.tests.unit.extensions import test_securitygroup as test_sg
def _create_log(tenant_id, resource_id=None,
target_id=None, event='ALL', enabled=True,):
log_data = {
'id': uuidutils.generate_uuid(),
'name': 'test',
'resource_type': 'security_group',
'project_id': tenant_id,
'event': event,
'enabled': enabled}
if resource_id:
log_data['resource_id'] = resource_id
if target_id:
log_data['target_id'] = target_id
return log_object.Log(**log_data)
class LoggingDBApiTestCase(test_sg.SecurityGroupDBTestCase):
def setUp(self):
super(LoggingDBApiTestCase, self).setUp()
self.context = context.get_admin_context()
self.sg_id, self.port_id, self.tenant_id = self._create_sg_and_port()
self.context.tenant_id = self.tenant_id
def _create_sg_and_port(self):
with self.network() as network, \
self.subnet(network), \
self.security_group() as sg:
sg_id = sg['security_group']['id']
tenant_id = sg['security_group']['tenant_id']
res = self._create_port(
self.fmt, network['network']['id'],
security_groups=[sg_id])
ports_rest = self.deserialize(self.fmt, res)
port_id = ports_rest['port']['id']
return sg_id, port_id, tenant_id
def test_get_logs_bound_port(self):
log = _create_log(target_id=self.port_id, tenant_id=self.tenant_id)
with mock.patch.object(log_object.Log, 'get_objects',
return_value=[log]):
self.assertEqual(
[log], db_api.get_logs_bound_port(self.context, self.port_id))
# Test get log objects with required resource type
calls = [mock.call(self.context, project_id=self.tenant_id,
resource_type=log_const.SECURITY_GROUP,
enabled=True)]
log_object.Log.get_objects.assert_has_calls(calls)
def test_get_logs_not_bound_port(self):
fake_sg_id = uuidutils.generate_uuid()
log = _create_log(resource_id=fake_sg_id, tenant_id=self.tenant_id)
with mock.patch.object(log_object.Log, 'get_objects',
return_value=[log]):
self.assertEqual(
[], db_api.get_logs_bound_port(self.context, self.port_id))
# Test get log objects with required resource type
calls = [mock.call(self.context, project_id=self.tenant_id,
resource_type=log_const.SECURITY_GROUP,
enabled=True)]
log_object.Log.get_objects.assert_has_calls(calls)
def test_get_logs_bound_sg(self):
log = _create_log(resource_id=self.sg_id, tenant_id=self.tenant_id)
with mock.patch.object(log_object.Log, 'get_objects',
return_value=[log]):
self.assertEqual(
[log], db_api.get_logs_bound_sg(self.context, self.sg_id))
# Test get log objects with required resource type
calls = [mock.call(self.context, project_id=self.tenant_id,
resource_type=log_const.SECURITY_GROUP,
enabled=True)]
log_object.Log.get_objects.assert_has_calls(calls)
def test_get_logs_not_bound_sg(self):
with self.network() as network, \
self.subnet(network), \
self.security_group() as sg:
sg2_id = sg['security_group']['id']
res = self._create_port(
self.fmt, network['network']['id'],
security_groups=[sg2_id])
port2_id = self.deserialize(self.fmt, res)['port']['id']
log = _create_log(target_id=port2_id, tenant_id=self.tenant_id)
with mock.patch.object(log_object.Log, 'get_objects',
return_value=[log]):
self.assertEqual(
[], db_api.get_logs_bound_sg(self.context, self.sg_id))
# Test get log objects with required resource type
calls = [mock.call(self.context, project_id=self.tenant_id,
resource_type=log_const.SECURITY_GROUP,
enabled=True)]
log_object.Log.get_objects.assert_has_calls(calls)
def test__get_ports_being_logged(self):
log1 = _create_log(target_id=self.port_id,
tenant_id=self.tenant_id)
log2 = _create_log(resource_id=self.sg_id,
tenant_id=self.tenant_id)
log3 = _create_log(target_id=self.port_id,
resource_id=self.tenant_id,
tenant_id=self.tenant_id)
log4 = _create_log(tenant_id=self.tenant_id)
with mock.patch.object(
validators, 'validate_log_type_for_port', return_value=True):
ports_log1 = db_api._get_ports_being_logged(self.context, log1)
ports_log2 = db_api._get_ports_being_logged(self.context, log2)
ports_log3 = db_api._get_ports_being_logged(self.context, log3)
ports_log4 = db_api._get_ports_being_logged(self.context, log4)
self.assertEqual([self.port_id], ports_log1)
self.assertEqual([self.port_id], ports_log2)
self.assertEqual([self.port_id], ports_log3)
self.assertEqual([self.port_id], ports_log4)
def test__get_p
|
orts_being_logged_not_supported_log_type(self):
log = _create_log(tenant_id=self.tenant_id)
with mock.patch.object(
|
validators, 'validate_log_type_for_port', return_value=False):
ports_log = db_api._get_ports_being_logged(self.context, log)
self.assertEqual([], ports_log)
class LoggingRpcCallbackTestCase(test_sg.SecurityGroupDBTestCase):
def setUp(self):
super(LoggingRpcCallbackTestCase, self).setUp()
self.context = context.get_admin_context()
self.rpc_callback = server_rpc.LoggingApiSkeleton()
def test_get_sg_log_info_for_create_or_update_log(self):
with self.network() as network, \
self.subnet(network), \
self.security_group() as sg:
sg_id = sg['security_group']['id']
tenant_id = sg['security_group']['tenant_id']
rule1 = self._build_security_group_rule(
sg_id,
'ingress', const.PROTO_NAME_TCP, '22', '22',
)
rule2 = self._build_security_group_rule(
sg_id,
'egress', const.PROTO_NAME_TCP,
remote_ip_prefix='10.0.0.1',
)
rules = {
'security_group_rules': [rule1['security_group_rule'],
rule2['security_group_rule']]}
self._create_security_group_rule(self.fmt, rules)
res = self._create_port(
|
martinbede/second-sight
|
tensorflow/examples/tutorials/word2vec/word2vec_basic.py
|
Python
|
apache-2.0
| 8,770
| 0.011403
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# li
|
mitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import print_function
import collections
import math
import os
import random
import zipfile
import numpy as np
from six.moves import urllib
from six.moves import xrange # pylint: disable=redefined-
|
builtin
import tensorflow as tf
# Step 1: Download the data.
url = 'http://mattmahoney.net/dc/'
def maybe_download(filename, expected_bytes):
"""Download a file if not present, and make sure it's the right size."""
if not os.path.exists(filename):
filename, _ = urllib.request.urlretrieve(url + filename, filename)
statinfo = os.stat(filename)
if statinfo.st_size == expected_bytes:
print('Found and verified', filename)
else:
print(statinfo.st_size)
raise Exception(
'Failed to verify ' + filename + '. Can you get to it with a browser?')
return filename
filename = maybe_download('text8.zip', 31344016)
# Read the data into a string.
def read_data(filename):
f = zipfile.ZipFile(filename)
for name in f.namelist():
return f.read(name).split()
f.close()
words = read_data(filename)
print('Data size', len(words))
# Step 2: Build the dictionary and replace rare words with UNK token.
vocabulary_size = 50000
def build_dataset(words):
count = [['UNK', -1]]
count.extend(collections.Counter(words).most_common(vocabulary_size - 1))
dictionary = dict()
for word, _ in count:
dictionary[word] = len(dictionary)
data = list()
unk_count = 0
for word in words:
if word in dictionary:
index = dictionary[word]
else:
index = 0 # dictionary['UNK']
unk_count += 1
data.append(index)
count[0][1] = unk_count
reverse_dictionary = dict(zip(dictionary.values(), dictionary.keys()))
return data, count, dictionary, reverse_dictionary
data, count, dictionary, reverse_dictionary = build_dataset(words)
del words # Hint to reduce memory.
print('Most common words (+UNK)', count[:5])
print('Sample data', data[:10])
data_index = 0
# Step 3: Function to generate a training batch for the skip-gram model.
def generate_batch(batch_size, num_skips, skip_window):
global data_index
assert batch_size % num_skips == 0
assert num_skips <= 2 * skip_window
batch = np.ndarray(shape=(batch_size), dtype=np.int32)
labels = np.ndarray(shape=(batch_size, 1), dtype=np.int32)
span = 2 * skip_window + 1 # [ skip_window target skip_window ]
buffer = collections.deque(maxlen=span)
for _ in range(span):
buffer.append(data[data_index])
data_index = (data_index + 1) % len(data)
for i in range(batch_size // num_skips):
target = skip_window # target label at the center of the buffer
targets_to_avoid = [ skip_window ]
for j in range(num_skips):
while target in targets_to_avoid:
target = random.randint(0, span - 1)
targets_to_avoid.append(target)
batch[i * num_skips + j] = buffer[skip_window]
labels[i * num_skips + j, 0] = buffer[target]
buffer.append(data[data_index])
data_index = (data_index + 1) % len(data)
return batch, labels
batch, labels = generate_batch(batch_size=8, num_skips=2, skip_window=1)
for i in range(8):
print(batch[i], '->', labels[i, 0])
print(reverse_dictionary[batch[i]], '->', reverse_dictionary[labels[i, 0]])
# Step 4: Build and train a skip-gram model.
batch_size = 128
embedding_size = 128 # Dimension of the embedding vector.
skip_window = 1 # How many words to consider left and right.
num_skips = 2 # How many times to reuse an input to generate a label.
# We pick a random validation set to sample nearest neighbors. Here we limit the
# validation samples to the words that have a low numeric ID, which by
# construction are also the most frequent.
valid_size = 16 # Random set of words to evaluate similarity on.
valid_window = 100 # Only pick dev samples in the head of the distribution.
valid_examples = np.random.choice(valid_window, valid_size, replace=False)
num_sampled = 64 # Number of negative examples to sample.
graph = tf.Graph()
with graph.as_default():
# Input data.
train_inputs = tf.placeholder(tf.int32, shape=[batch_size])
train_labels = tf.placeholder(tf.int32, shape=[batch_size, 1])
valid_dataset = tf.constant(valid_examples, dtype=tf.int32)
# Ops and variables pinned to the CPU because of missing GPU implementation
with tf.device('/cpu:0'):
# Look up embeddings for inputs.
embeddings = tf.Variable(
tf.random_uniform([vocabulary_size, embedding_size], -1.0, 1.0))
embed = tf.nn.embedding_lookup(embeddings, train_inputs)
# Construct the variables for the NCE loss
nce_weights = tf.Variable(
tf.truncated_normal([vocabulary_size, embedding_size],
stddev=1.0 / math.sqrt(embedding_size)))
nce_biases = tf.Variable(tf.zeros([vocabulary_size]))
# Compute the average NCE loss for the batch.
# tf.nce_loss automatically draws a new sample of the negative labels each
# time we evaluate the loss.
loss = tf.reduce_mean(
tf.nn.nce_loss(nce_weights, nce_biases, embed, train_labels,
num_sampled, vocabulary_size))
# Construct the SGD optimizer using a learning rate of 1.0.
optimizer = tf.train.GradientDescentOptimizer(1.0).minimize(loss)
# Compute the cosine similarity between minibatch examples and all embeddings.
norm = tf.sqrt(tf.reduce_sum(tf.square(embeddings), 1, keep_dims=True))
normalized_embeddings = embeddings / norm
valid_embeddings = tf.nn.embedding_lookup(
normalized_embeddings, valid_dataset)
similarity = tf.matmul(
valid_embeddings, normalized_embeddings, transpose_b=True)
# Step 5: Begin training.
num_steps = 100001
with tf.Session(graph=graph) as session:
# We must initialize all variables before we use them.
tf.initialize_all_variables().run()
print("Initialized")
average_loss = 0
for step in xrange(num_steps):
batch_inputs, batch_labels = generate_batch(
batch_size, num_skips, skip_window)
feed_dict = {train_inputs : batch_inputs, train_labels : batch_labels}
# We perform one update step by evaluating the optimizer op (including it
# in the list of returned values for session.run()
_, loss_val = session.run([optimizer, loss], feed_dict=feed_dict)
average_loss += loss_val
if step % 2000 == 0:
if step > 0:
average_loss /= 2000
# The average loss is an estimate of the loss over the last 2000 batches.
print("Average loss at step ", step, ": ", average_loss)
average_loss = 0
# Note that this is expensive (~20% slowdown if computed every 500 steps)
if step % 10000 == 0:
sim = similarity.eval()
for i in xrange(valid_size):
valid_word = reverse_dictionary[valid_examples[i]]
top_k = 8 # number of nearest neighbors
nearest = (-sim[i, :]).argsort()[1:top_k+1]
log_str = "Nearest to %s:" % valid_word
for k in xrange(top_k):
close_word = reverse_dictionary[nearest[k]]
log_str = "%s %s," % (log_str, close_word)
print(log_str)
final_embeddings = normalized_embeddings.eval()
# Step 6: Visualize the embeddings.
def plot_with_labels(low_dim_embs, labels, filename='tsne.png'):
assert low_dim_embs.shape[0] >= len(labels), "More labels than embeddings"
plt.figure(figsize=(18, 18)) #in inches
for i, label in enumerate(labels):
x, y = low_dim_embs[i,:]
plt.scatter(x, y)
plt.annotate(label,
xy=(x, y),
|
USGSDenverPychron/pychron
|
pychron/mv/degas/degasser.py
|
Python
|
apache-2.0
| 12,798
| 0.000391
|
# ===============================================================================
# Copyright 2013 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
import os
import time
from threading import Event, Thread
import yaml
from chaco.plot_containers import HPlotContainer
from enable.component_editor import ComponentEditor
# ============= standard library imports ========================
from numpy import uint8, zeros, random, uint16
from skimage.color import gray2rgb
from traits.api import Float, Instance, Range, Button
from traitsui.api import View, Item, UItem, VGroup
# ============= local library imports ==========================
from pychron.core.pid import PID
from pychron.core.ui.gui import invoke_in_main_thread
from pychron.core.yaml import yload
from pychron.graph.graph import Graph
from pychron.graph.stream_graph import StreamStackedGraph
from pychron.loggable import Loggable
from pychron.paths import paths
class LM:
def __init__(self, t, dt=1):
self._pid = PID(kp=-0.5, ki=0.1)
self._dt = dt
self._target_value = t
def set_laser_power_hook(self, *args, **kw):
pass
def get_brightness(self, v):
err = self._target_value - v
out = self._pid.get_value(err, self._dt)
src = random.randint(0, 255, (150, 150))
return src.astype(uint8), out
class Degasser(Loggable):
laser_manager = None
lumens = Float(50)
|
pid = Instance(PID)
stream_graph = Instance(StreamStackedGraph, ())
img_graph = Instance(Graph, ())
plot_container = Instance(HPlotContainer, ()
|
)
threshold = Range(0, 100, 25)
test = Button
edit_pid_button = Button
save_button = Button
_lum_thread = None
_lum_evt = None
_luminosity_value = 0
_testing = False
_info = None
def stop(self):
self.debug("stop")
self.dump()
if self._lum_evt:
self._lum_evt.set()
if self._info:
invoke_in_main_thread(self._info.dispose, abort=True)
@property
def persistence_path(self):
return os.path.join(paths.setup_dir, "pid_degasser.yaml")
def load(self):
self.debug("loading")
self.pid = PID()
p = self.persistence_path
if not os.path.isfile(p):
self.warning("No PID degasser file located at {}".format(p))
return
jd = yload(p)
if jd:
self.threshold = jd["threshold"]
self.pid.load_from_obj(jd["pid"])
def dump(self):
self.debug("dump")
obj = self.pid.get_dump_obj()
jd = {"pid": obj, "threshold": self.threshold}
with open(self.persistence_path, "wb") as wfile:
yaml.dump(jd, wfile, encoding="utf-8")
def degas(self, lumens=None, autostart=True):
self.load()
if lumens is None:
lumens = self.lumens
self.lumens = lumens
self._setup_graph()
# def _open():
# self._info = self.edit_traits()
#
# invoke_in_main_thread(_open)
if autostart:
self.start()
def start(self):
self.pid.reset()
self._lum_evt = Event()
self._lum_thread = Thread(target=self._degas, args=(self.lumens, self.pid))
self._lum_thread.start()
def _edit_pid_button_fired(self):
info = self.pid.edit_traits(kind="livemodal")
if info.result:
self.dump()
def _save_button_fired(self):
self.dump()
def _test_fired(self):
if self._testing:
self.stop()
self.laser_manager.disable_laser()
self.stream_graph.export_data(path="/Users/argonlab3/Desktop/degas.csv")
else:
self.laser_manager.enable_laser()
self.start()
self._testing = not self._testing
def _setup_graph(self):
self.plot_container = HPlotContainer()
g = self.stream_graph
g.clear()
g.new_plot(padding_left=70, padding_right=10)
g.new_series(plotid=0)
g.set_y_title("Lumens", plotid=0)
g.new_plot(padding_left=70, padding_right=10)
g.new_series(plotid=1)
g.set_y_title("Error", plotid=1)
g.new_plot(padding_left=70, padding_right=10)
g.new_series(plotid=2)
g.set_y_title("Output", plotid=2)
g = self.img_graph
g.clear()
imgplot = g.new_plot(padding_right=10)
imgplot.aspect_ratio = 1.0
imgplot.x_axis.visible = False
imgplot.y_axis.visible = False
imgplot.x_grid.visible = False
imgplot.y_grid.visible = False
imgplot.data.set_data("imagedata", zeros((150, 150, 3), dtype=uint8))
imgplot.img_plot("imagedata", origin="top left")
self.plot_container.add(self.stream_graph.plotcontainer)
self.plot_container.add(self.img_graph.plotcontainer)
def _degas(self, lumens, pid):
self.lumens = lumens
g = self.stream_graph
img = self.img_graph.plots[0]
ld = self.laser_manager.stage_manager.lumen_detector
def update(c, e, o, src, targets):
g.record(c, plotid=0)
g.record(e, plotid=1)
g.record(o, plotid=2)
if src.dtype == uint16:
src = src.astype("uint32")
src = src / 4095 * 255
src = src.astype("uint8")
imgdata = gray2rgb(src)
ld.draw_targets(imgdata, targets)
img.data.set_data("imagedata", imgdata)
evt = self._lum_evt
set_laser_power = self.laser_manager.set_laser_power_hook
ld.reset()
get_brightness = self.laser_manager.get_brightness
target = self.lumens
prev = 0
sst = time.time()
while not evt.is_set():
dt = pid.kdt
st = time.time()
src, current, targets = get_brightness(threshold=self.threshold)
err = target - current
out = pid.get_value(err) or 0
invoke_in_main_thread(update, current, err, out, src, targets)
if abs(prev - out) > 0.02:
self.debug("set power output={}".format(out))
set_laser_power(out)
prev = out
if time.time() - sst > 10:
sst = time.time()
ld.reset()
et = time.time() - st
t = dt - et
if t > 0:
evt.wait(dt)
def traits_view(self):
v = View(
VGroup(
Item("pid", style="custom"),
Item("threshold", label="T"),
Item("test"),
UItem("plot_container", style="custom", editor=ComponentEditor()),
)
)
return v
if __name__ == "__main__":
d = Degasser(laser_manager=LM(30))
d.configure_traits()
# ============= EOF =============================================
#
# class PID(HasTraits):
# _integral_err = 0
# _prev_err = 0
# Kp = Float(0.25)
# Ki = Float(0.0001)
# Kd = Float(0)
# min_output = 0
# max_output = 100
#
# def get_value(self, error, dt):
# self._integral_err += (error * dt)
# derivative = (error - self._prev_err) / dt
# output = (self.Kp * error) + (self.Ki * self._integral_err) + (
# self.Kd * derivative)
# self._prev_err = error
# return min(self.
|
wfarn/scratchpad
|
django/django_db/django_db/wsgi.py
|
Python
|
apache-2.0
| 393
| 0.002545
|
"""
WSGI config for django_db project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_db
|
.settings")
|
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
xzturn/tensorflow
|
tensorflow/tools/gcs_test/python/gcs_smoke.py
|
Python
|
apache-2.0
| 10,461
| 0.013765
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Smoke test for reading records from GCS to TensorFlow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import random
import sys
import time
import numpy as np
import tensorflow as tf
from tensorflow.core.example import example_pb2
from tensorflow.python.lib.io import file_io
flags = tf.compat.v1.app.flags
flags.DEFINE_string("gcs_bucket_url", "",
"The URL to the GCS bucket in which the temporary "
"tfrecord file is to be written and read, e.g., "
"gs://my-gcs-bucket/test-directory")
flags.DEFINE_integer("num_examples", 10, "Number of examples to generate")
FLAGS = flags.FLAGS
def create_examples(num_examples, input_mean):
"""Create ExampleProto's containing data."""
ids = np.arange(num_examples).reshape([num_examples, 1])
inputs = np.random.randn(num_examples, 1) + input_mean
target = inputs - input_mean
examples = []
for row in range(num_examples):
ex = example_pb2.Example()
ex.features.feature["id"].bytes_list.value.append(str(ids[row, 0]))
ex.features.feature["target"].float_list.value.append(target[row, 0])
ex.features.feature["inputs"].float_list.value.append(inputs[row, 0])
examples.append(ex)
return examples
def create_dir_test():
"""Verifies file_io directory handling methods."""
# Test directory creation.
starttime_ms = int(round(time.time() * 1000))
dir_name = "%s/tf_gcs_test_%s" % (FLAGS.gcs_bucket_url, starttime_ms)
print("Creating dir %s" % dir_name)
file_io.create_dir(dir_name)
elapsed_ms = i
|
nt(round(time.time() * 1000)) - starttime_ms
print("Created directory in: %d milliseconds" % elapsed_ms)
# Check that the directory exists.
dir_exists = file_io.is_directory(dir_name)
assert dir_exists
print("%s directory exists: %s" % (d
|
ir_name, dir_exists))
# Test recursive directory creation.
starttime_ms = int(round(time.time() * 1000))
recursive_dir_name = "%s/%s/%s" % (dir_name,
"nested_dir1",
"nested_dir2")
print("Creating recursive dir %s" % recursive_dir_name)
file_io.recursive_create_dir(recursive_dir_name)
elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
print("Created directory recursively in: %d milliseconds" % elapsed_ms)
# Check that the directory exists.
recursive_dir_exists = file_io.is_directory(recursive_dir_name)
assert recursive_dir_exists
print("%s directory exists: %s" % (recursive_dir_name, recursive_dir_exists))
# Create some contents in the just created directory and list the contents.
num_files = 10
files_to_create = ["file_%d.txt" % n for n in range(num_files)]
for file_num in files_to_create:
file_name = "%s/%s" % (dir_name, file_num)
print("Creating file %s." % file_name)
file_io.write_string_to_file(file_name, "test file.")
print("Listing directory %s." % dir_name)
starttime_ms = int(round(time.time() * 1000))
directory_contents = file_io.list_directory(dir_name)
print(directory_contents)
elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
print("Listed directory %s in %s milliseconds" % (dir_name, elapsed_ms))
assert set(directory_contents) == set(files_to_create + ["nested_dir1/"])
# Test directory renaming.
dir_to_rename = "%s/old_dir" % dir_name
new_dir_name = "%s/new_dir" % dir_name
file_io.create_dir(dir_to_rename)
assert file_io.is_directory(dir_to_rename)
assert not file_io.is_directory(new_dir_name)
starttime_ms = int(round(time.time() * 1000))
print("Will try renaming directory %s to %s" % (dir_to_rename, new_dir_name))
file_io.rename(dir_to_rename, new_dir_name)
elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
print("Renamed directory %s to %s in %s milliseconds" % (
dir_to_rename, new_dir_name, elapsed_ms))
assert not file_io.is_directory(dir_to_rename)
assert file_io.is_directory(new_dir_name)
# Test Delete directory recursively.
print("Deleting directory recursively %s." % dir_name)
starttime_ms = int(round(time.time() * 1000))
file_io.delete_recursively(dir_name)
elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
dir_exists = file_io.is_directory(dir_name)
assert not dir_exists
print("Deleted directory recursively %s in %s milliseconds" % (
dir_name, elapsed_ms))
def create_object_test():
"""Verifies file_io's object manipulation methods ."""
starttime_ms = int(round(time.time() * 1000))
dir_name = "%s/tf_gcs_test_%s" % (FLAGS.gcs_bucket_url, starttime_ms)
print("Creating dir %s." % dir_name)
file_io.create_dir(dir_name)
num_files = 5
# Create files of 2 different patterns in this directory.
files_pattern_1 = ["%s/test_file_%d.txt" % (dir_name, n)
for n in range(num_files)]
files_pattern_2 = ["%s/testfile%d.txt" % (dir_name, n)
for n in range(num_files)]
starttime_ms = int(round(time.time() * 1000))
files_to_create = files_pattern_1 + files_pattern_2
for file_name in files_to_create:
print("Creating file %s." % file_name)
file_io.write_string_to_file(file_name, "test file creation.")
elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
print("Created %d files in %s milliseconds" % (
len(files_to_create), elapsed_ms))
# Listing files of pattern1.
list_files_pattern = "%s/test_file*.txt" % dir_name
print("Getting files matching pattern %s." % list_files_pattern)
starttime_ms = int(round(time.time() * 1000))
files_list = file_io.get_matching_files(list_files_pattern)
elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
print("Listed files in %s milliseconds" % elapsed_ms)
print(files_list)
assert set(files_list) == set(files_pattern_1)
# Listing files of pattern2.
list_files_pattern = "%s/testfile*.txt" % dir_name
print("Getting files matching pattern %s." % list_files_pattern)
starttime_ms = int(round(time.time() * 1000))
files_list = file_io.get_matching_files(list_files_pattern)
elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
print("Listed files in %s milliseconds" % elapsed_ms)
print(files_list)
assert set(files_list) == set(files_pattern_2)
# Test renaming file.
file_to_rename = "%s/oldname.txt" % dir_name
file_new_name = "%s/newname.txt" % dir_name
file_io.write_string_to_file(file_to_rename, "test file.")
assert file_io.file_exists(file_to_rename)
assert not file_io.file_exists(file_new_name)
print("Will try renaming file %s to %s" % (file_to_rename, file_new_name))
starttime_ms = int(round(time.time() * 1000))
file_io.rename(file_to_rename, file_new_name)
elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
print("File %s renamed to %s in %s milliseconds" % (
file_to_rename, file_new_name, elapsed_ms))
assert not file_io.file_exists(file_to_rename)
assert file_io.file_exists(file_new_name)
# Delete directory.
print("Deleting directory %s." % dir_name)
file_io.delete_recursively(dir_name)
def main(argv):
del argv # Unused.
# Sanity check on the GCS bucket URL.
if not FLAGS.gcs_bucket_url or not FLAGS.gcs_bucket_url.startswith("gs://"):
print("ERROR: Invalid GCS bucket URL: \"%s\"" % FLAGS.gcs_bucket_url)
sys.exit(1)
# Generate random tfrecord path name.
input_path = FLAGS.gcs_bucket_url + "/"
input_path += "".join(random.choice("0123456789ABCDEF") for i in range(8))
i
|
humanoid-path-planner/hpp-corbaserver
|
src/hpp/__init__.py
|
Python
|
lgpl-3.0
| 1,269
| 0.002364
|
# Copyright (c) 2012 CNRS
# Author: Florent Lamiraux
#
# This file is part of hpp-corbaserver.
# hpp-corbaserver is free software: you can redistribute it
# and/or modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either version
# 3 of the License, or (at your option) any later version.
#
# hpp-corbaserver is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Lesser Public License for more details. You should have
# received a copy of the GNU Lesser General Public License along with
# hpp-corbaserver. If not, see
# <
|
http://www.gnu.org/licenses/>.
from .quaternion import Quaternion
from .transform import Transform
def retrieveRosResource(path):
import os
ros_package_paths = os.environ["ROS_PACKAGE_PATH"].split(':')
if path.startswith("package://"):
relpath = path[len("package://"):]
for dir in ros_package_paths:
abspath = os.path.join(dir,relpath)
if os.path.exists(abspath):
return abspath
return IOError ("Could not find re
|
source " + path)
else:
return path
|
theawless/Dict-O-nator
|
dictonator/setlog.py
|
Python
|
gpl-3.0
| 1,573
| 0.000636
|
# Dict'O'nator - A dictation plugin for gedit.
# Copyright (C) <2016> <Abhinav Singh>
#
# This file is part of Dict'O'nator.
#
# Dict'O'nator is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Dict'O'nator is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Dict'O'nator. If not, see <http://www.gnu.org/licenses/>.
"""Sets up the logger."""
import logging
import os
logger = logging.getLogger('dictonator')
GEDIT_PLUGIN_PATH = os.path.dirname(os.path.abspath(__file__))
if not os.path.exists(GEDIT_PLUGIN_PATH + '/.logs'):
os.makedirs(GEDIT_PLUGIN_PATH + '/.logs')
LOG_DIR_PATH = GEDIT_PLUGIN_PATH + "/.logs/"
def setup_logger():
# setting format of log
formatter = logging.Formatter('%(threadName)s - %(levelname)s - %(message)s')
logger.setLevel(logging.DEBUG)
# file location
debug_log = LOG_DIR_PATH + 'log.txt'
# adding handler for console logs
sh = logging.S
|
treamHandler()
sh.setFormatter(formatter)
logger.addHandler(sh)
# adding
|
handler for file logs
fh = logging.FileHandler(debug_log)
fh.setFormatter(formatter)
logger.addHandler(fh)
setup_logger()
|
henriquemiranda/yambo-py
|
yambopy/recipes.py
|
Python
|
bsd-3-clause
| 20,837
| 0.022172
|
# Copyright (C) 2018 Henrique Pereira Coutada Miranda, Alejandro Molina Sanchez, Alexandre Morlet, Fulvio Paleari
#
# All rights reserved.
#
# This file is part of yambopy
#
#
import os
from operator import itemgetter
from collections import OrderedDict
from yambopy import *
#
# by Henrique Miranda.
#
def pack_files_in_folder(folder,save_folder=None,mask='',verbose=True):
"""
Pack the output files in a folder to json files
"""
if not save_folder: save_folder = folder
#pack the files in .json files
for dirpath,dirnames,filenames in os.walk(folder):
#check if the folder fits the mask
if mask in dirpath:
#check if there are some output files in the folder
if ([ f for f in filenames if 'o-' in f ]):
if verbose: print(dirpath)
y = YamboOut(dirpath,save_folder=save_folder)
y.pack()
#
# by Alejandro Molina-Sanchez
#
def breaking_symmetries(efield1,efield2=[0,0,0],folder='.',RmTimeRev=True):
"""
Breaks the symmetries for a given field.
Second field used in circular polarized pump configuration
RmTimeRev : Remove time symmetry is set True by default
"""
os.system('mkdir -p %s'%folder)
os.system('cp -r database/SAVE %s'%folder)
os.system('cd %s; yambo'%folder)
ypp = YamboIn.from_runlevel('-y -V all',executable='ypp',folder=folder,filename='ypp.in')
ypp['Efield1'] = efield1 # Field in the X-direction
ypp['Efield2'] = efield2 # Field in the X-direction
if RmTimeRev:
ypp.arguments.append('RmTimeRev') # Remove Time Symmetry
ypp.write('%s/ypp.in'%folder)
os.system('cd %s ; ypp_ph -F ypp.in'%folder )
os.system('cd %s ; cd FixSymm; yambo '%folder )
os.system('rm -r %s/SAVE'%folder)
os.system('mv %s/FixSymm/SAVE %s/'%(folder,folder))
os.system('rm -r %s/FixSymm'%folder)
#
# by Alexandre Morlet & Henrique Miranda
#
def analyse_gw(folder,var,bandc,kpointc,bandv,kpointv,pack,text,draw,verbose=False):
"""
Study the convergence of GW calculations by looking at the change in band-gap value.
The script reads from <folder> all results from <variable> calculations and display them.
Use the band and k-point options (or change default values) according to the size of your k-grid and
the location of the band extrema.
"""
print(' K-point Band')
print('Conduction state %6d %6d'%(kpointc, bandc))
print(' Valence state %6d %6d'%(kpointv, bandv))
#find all ndb.QP files in the folder
io = OrderedDict()
for root, dirs, files in os.walk(folder):
#get starting name of folder
basename = os.path.basename(root)
#loo
|
k into folders starting with var or reference
if any( [basename.startswith(v) for v
|
in [var,'reference']] ):
for filename in files:
if filename != 'ndb.QP': continue
#get ndb.QP file in folder
io[basename] = ( YamboIn.from_file(folder=folder,filename="%s.in"%basename),
YamboQPDB.from_db(folder=root,filename=filename) )
#consistency check
#TODO
convergence_data = []
for basename, (inp,out) in io.items():
#get input
value, unit = inp[var]
#get qp value
# Be careful because the array of eigenvalues is defined now in another way
eigenvalues_dft, eigenvalues_qp, lifetimes, z = out.get_qps()
#save result
qp_gap = eigenvalues_qp[kpointc-out.min_kpoint,bandc-out.min_band] - eigenvalues_qp[kpointv-out.min_kpoint,bandv-out.min_band]
#check type of variable
if isinstance(value,list): value = value[1]
convergence_data.append([value,qp_gap])
convergence_data = np.array(sorted(convergence_data))
if convergence_data.dtype == 'object': raise ValueError('Unknown type of variable')
if text:
output_folder = 'analyse_%s'%folder
if not os.path.isdir(output_folder): os.mkdir(output_folder)
outname = os.path.join(output_folder,'%s_%s.dat'%(folder,var))
header = var+' ('+str(unit)+')'
np.savetxt(outname,convergence_data,delimiter='\t',header=header)
if draw:
import matplotlib.pyplot as plt
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(convergence_data[:,0],convergence_data[:,1],'o-')
ax.set_xlabel(var+' ('+unit+')')
ax.set_ylabel('E_gw = E_lda + \Delta E')
fig.savefig('%s.png'%var)
#
# by Alexandre Morlet
#
def analyse_bse(folder,var,numbexc,intexc,degenexc,maxexc,text,draw,verbose=False):
"""
Using ypp, you can study the convergence of BSE calculations in 2 ways:
Create a .png of all absorption spectra relevant to the variable you study
Look at the eigenvalues of the first n "bright" excitons (given a threshold intensity)
The script reads from <folder> all results from <variable> calculations for processing.
The resulting pictures and data files are saved in the ./analyse_<folder>/ folder.
Arguments:
folder -> Folder containing SAVE and convergence runs.
var -> Variable tested (e.g. FFTGvecs)
numbexc -> Number of excitons to read beyond threshold (default=2)
intexc -> Minimum intensity for excitons to be considered bright (default=0.05)
degenexc -> Energy threshold under which different peaks are merged (eV) (default=0.01)
maxexc -> Energy threshold after which excitons are not read anymore (eV) (default=8.0)
text -> Skips writing the .dat file (default: True)
draw -> Skips drawing (plotting) the abs spectra (default: True)
"""
#find the save folder
lat = YamboSaveDB.from_db_file(os.path.join(folder,'SAVE'))
#find all ndb.BS_diago_Q01 files in the folder
io = OrderedDict()
for root, dirs, files in os.walk(folder):
#get starting name of folder
basename = os.path.basename(root)
#look into folders starting with var or reference
if any( [basename.startswith(v) for v in [var,'reference']] ):
for filename in files:
if filename != 'ndb.BS_diago_Q01': continue
#get ndb.BS_diago_Q01 file in folder
io[basename] = ( YamboIn.from_file(folder=folder,filename="%s.in"%basename),
YamboExcitonDB.from_db_file(lat,folder=root,filename=filename) )
#TODO consistency check
exciton_energies = []
exciton_spectras = []
for basename, (inp,out) in io.items():
#get input
value, unit = inp[var]
#get exiton energies
exciton_energy = out.eigenvalues.real
#get excitonic spectra
exciton_spectra = out.get_chi()
#check type of variable
if isinstance(value,list): value = value[1]
exciton_energies.append([value,exciton_energy])
exciton_spectras.append([value,exciton_spectra])
exciton_spectras = sorted(exciton_spectras,key=lambda x: x[0])
exciton_energies = sorted(exciton_energies,key=lambda x: x[0])
#save a file with the exciton eneergies
output_folder = 'analyse_%s'%folder
if not os.path.isdir(output_folder): os.mkdir(output_folder)
output_file = '%s_exciton_energies.dat'%var
with open(os.path.join(output_folder,output_file),'w') as f:
header = "%s (%s)\n"%(var,unit) if unit else "%s\n"%var
f.write(header)
for value,energies in exciton_energies:
f.write("{} ".format(value)+("%10.6lf "*numbexc)%tuple(energies[:numbexc])+"\n")
import matplotlib.pyplot as plt
## Exciton spectra plots
filename = 'exciton_spectra.png'
fig = plt.figure(figsize=(6,5))
ax = fig.add_subplot(1,1,1)
#plot the spectra
cmap = plt.get_cmap('viridis')
nspectra = len(exciton_spectras)
for i,(value,(w,spectra)) in enumerate(exciton_spectras):
plt.plot(w,spectra.imag,c=cmap(i/nspectra),label="{} = {} {}".format(var,value,unit))
## Spectra plots
ax.set_xlabel('$\omega$ (eV)')
ax.set_ylabel('Im($\epsilon_M$)')
ax.legend(frameon=False)
output
|
repotvsupertuga/tvsupertuga.repository
|
script.module.streamtvsupertuga/lib/resources/lib/sources/en/moviesleak.py
|
Python
|
gpl-2.0
| 6,709
| 0.015502
|
# -*- coding: UTF-8 -*-
#######################################################################
# ----------------------------------------------------------------------------
# "THE BEER-WARE LICENSE" (Revision 42):
# @tantrumdev wrote this file. As long as you retain this notice you
# can do whatever you want with this stuff. If we meet some day, and you think
# this stuff is worth it, you can buy me a beer in return. - Muad'Dib
# ----------------------------------------------------------------------------
#######################################################################
# Addon Name: Yoda
# Addon id: plugin.video.Yoda
# Addon Provider: Supremacy
import re, urllib, urlparse
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import source_utils
from resources.lib.modules import debrid
from resources.lib.modules import dom_parser2
from resources.lib.modules import workers
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['300mbmoviesdl.com', 'moviesleak.net/', 'hevcbluray.net']
self.base_link = 'https://moviesleak.net/'
self.search_link = '?s=%s'
def movie(self, imdb, title, localtitle, aliases, year):
try:
url = {'imdb': imdb, 'title': title, 'year': year}
url = urllib.urlencode(url)
return url
except Exception:
return
def sources(self, url, hostDict, hostprDict):
try:
self._sources = []
if url is None:
return self._sources
if debrid.status() is False: raise Exception()
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title']
hdlr = 'S%02dE%02d' % (int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else data['year']
query = '%s S%02dE%02d' % (
data['tvshowtitle'], int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else '%s %s' % (
data['title'], data['year'])
query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query)
url = self.search_link % urllib.quote_plus(query)
url = urlparse.urljoin(self.base_link, url)
r = client.request(url)
posts = client.parseDOM(r, 'div', attrs={'class': 'item'})
hostDict = hostprDict + hostDict
items = []
for post in posts:
try:
tit = client.parseDOM(post, 'img', ret='alt')[0]
c = client.parseDOM(post, 'a', ret='href')[0]
name = tit
name = client.replaceHTMLCodes(name)
t = re.sub('(\.|\(|\[|\s)(\d{4}|S\d+E\d+|S\d+|3D)(\.|\)|\]|\s|)(.+|)', '', name, flags=re.I)
if not cleantitle.get(t) == cleantitle.get(title): raise Exception()
try:
y = re.findall('(?:\.|\(|\[|\s*|)(S\d+E\d+|S\d+)(?:\.|\)|\]|\s*|)', name, re.I)[-1].upper()
except Exception:
y = re.findall('(?:\.|\(|\[|\s*|)(\d{4})(?:\.|\)|\]|\s*|)', name, re.I)[0].upper()
if not y == hdlr: raise Exception()
try:
s = re.findall('((?:\d+\,\d+\.\d+|\d+\.\d+|\d+\,\d+|\d+)\s*(?:GB|GiB|MB|MiB))', t)[0]
except BaseException:
s = '0'
items += [(tit, c, s)]
except Exception:
pass
threads = []
for item in items:
threads.append(workers.Thread(self._get_sources,
|
item, hostDict))
[i.start() for i in threads]
[i.join() for i in threads]
return self._sources
except Exception:
return self._sources
def _get_sources(self, item, hostDict):
try:
quality, info = source_utils.get_release_quality(item[0], item[1])
size = item[2] if item[2] != '0'else item[0]
try:
size = re.findall('((?:\d+\,\d+\.\d+|\d+\.\d+|\d+\,\d+|\d+)\s*(?:GB|GiB|MB|MiB))', size)[-1
|
]
div = 1 if size.endswith(('GB', 'GiB')) else 1024
size = float(re.sub('[^0-9|/.|/,]', '', size.replace(',', '.'))) / div
size = '%.2f GB' % size
info.append(size)
except Exception:
pass
data = client.request(item[1])
try:
r = client.parseDOM(data, 'li', attrs={'class': 'elemento'})
r = [(dom_parser2.parse_dom(i, 'a', req='href')[0],
dom_parser2.parse_dom(i, 'img', req='alt')[0],
dom_parser2.parse_dom(i, 'span', {'class': 'd'})[0]) for i in r]
urls = [('http:' + i[0].attrs['href'] if not i[0].attrs['href'].startswith('http') else
i[0].attrs['href'], i[1].attrs['alt'], i[2].content) for i in r if i[0] and i[1]]
for url, host, qual in urls:
try:
if any(x in url for x in ['.rar', '.zip', '.iso', ':Upcoming']): raise Exception()
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
valid, host = source_utils.is_host_valid(host, hostDict)
if not valid: continue
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
quality, info = source_utils.get_release_quality(qual, quality)
info.append('HEVC')
info = ' | '.join(info)
self._sources.append(
{'source': host, 'quality': quality, 'language': 'en', 'url': url, 'info': info,
'direct': False, 'debridonly': True})
except Exception:
pass
except Exception:
pass
except BaseException:
return
def resolve(self, url):
if 'hideurl' in url:
data = client.request(url)
data = client.parseDOM(data, 'div', attrs={'class': 'row'})
url = [dom_parser2.parse_dom(i, 'a', req='href')[0] for i in data]
url = [i.attrs['href'] for i in url if 'direct me' in i.content][0]
return url
else:
return url
|
yuanming-hu/taichi
|
tests/python/test_arg_alignment.py
|
Python
|
mit
| 418
| 0
|
import taichi as ti
from tests import test_utils
@test_utils.test(exclude=[ti.opengl])
def test_ret_write():
@ti.kernel
|
def func(a: ti.i16) -> ti.f32:
return 3.0
assert func(255) == 3.0
@test_utils.test(exclude=[ti.opengl])
def test_arg_rea
|
d():
x = ti.field(ti.i32, shape=())
@ti.kernel
def func(a: ti.i8, b: ti.i32):
x[None] = b
func(255, 2)
assert x[None] == 2
|
wdecoster/nanoget
|
nanoget/extraction_functions.py
|
Python
|
gpl-3.0
| 18,527
| 0.003023
|
import logging
from functools import reduce
import nanoget.utils as ut
import pandas as pd
import sys
import pysam
import re
from Bio import SeqIO
import concurrent.futures as cfutures
from itertools import repeat
def process_summary(summaryfile, **kwargs):
"""Extracting information from an albacore summary file.
Only reads which have a >0 length are returned.
The fields below may or may not exist, depending on the type of sequencing performed.
Fields 1-14 are for 1D sequencing.
Fields 1-23 for 2D sequencing.
Fields 24-27, 2-5, 22-23 for 1D^2 (1D2) sequencing
Fields 28-38 for barcoded workflows
1 filename
2 read_id
3 run_id
4 channel
5 start_time
6 duration
7 num_events
8 template_start
9 num_events_template
10 template_duration
11 num_called_template
12 sequence_length_template
13 mean_qscore_template
14 strand_score_template
15 complement_start
16 num_events_complement
17 complement_duration
18 num_called_complement
19 sequence_length_complement
20 mean_qscore_complement
21 strand_score_complement
22 sequence_length_2d
23 mean_qscore_2d
24 filename1
25 filename2
26 read_id1
27 read_id2
28 barcode_arrangement
29 barcode_score
30 barcode_full_arrangement
31 front_score
32 rear_score
33 front_begin_index
34 front_foundseq_length
35 rear_end_index
36 rear_foundseq_length
37 kit
38 variant
"""
logging.info("Nanoget: Collecting metrics from summary file {} for {} sequencing".format(
summaryfile, kwargs["readtype"]))
ut.check_existance(summaryfile)
if kwargs["readtype"] == "1D":
cols = ["channel", "start_time", "duration",
"sequence_length_template", "mean_qscore_template"]
elif kwargs["readtype"] in ["2D", "1D2"]:
cols = ["channel", "start_time", "duration", "sequence_length_2d", "mean_qscore_2d"]
if kwargs["barcoded"]:
cols.append("barcode_arrangement")
logging.info("Nanoget: Extracting metrics per barcode.")
try:
datadf = pd.read_csv(
filepath_or_buffer=summaryfile,
sep="\t",
usecols=cols,
)
except ValueError:
logging.error("Nanoget: did not find expected columns in summary file {}:\n {}".format(
summaryfile, ', '.join(cols)))
sys.exit("ERROR: expected columns in summary file {} not found:\n {}".format(
summaryfile, ', '.join(cols)))
if kwargs["barcoded"]:
datadf.columns = ["channelIDs", "time", "duration", "lengths", "quals", "barcod
|
e"]
else:
datadf.columns = ["channelIDs", "time", "duration", "lengths", "quals"]
logging.info("Nanoget: Finished collecting statistics from summary file {}".format(summaryfile))
return ut.reduce_memory_usage(datadf.loc[datadf["lengths"] != 0].copy())
def check_bam(ba
|
m, samtype="bam"):
"""Check if bam file is valid.
Bam file should:
- exists
- has an index (create if necessary)
- is sorted by coordinate
- has at least one mapped read
"""
ut.check_existance(bam)
samfile = pysam.AlignmentFile(bam, "rb")
if not samfile.has_index():
pysam.index(bam)
samfile = pysam.AlignmentFile(bam, "rb") # Need to reload the samfile after creating index
logging.info("Nanoget: No index for bam file could be found, created index.")
if not samfile.header['HD']['SO'] == 'coordinate':
logging.error("Nanoget: Bam file {} not sorted by coordinate!.".format(bam))
sys.exit("Please use a bam file sorted by coordinate.")
if samtype == "bam":
logging.info("Nanoget: Bam file {} contains {} mapped and {} unmapped reads.".format(
bam, samfile.mapped, samfile.unmapped))
if samfile.mapped == 0:
logging.error("Nanoget: Bam file {} does not contain aligned reads.".format(bam))
sys.exit("FATAL: not a single read was mapped in bam file {}".format(bam))
return samfile
def process_ubam(bam, **kwargs):
"""Extracting metrics from unaligned bam format
Extracting lengths
"""
logging.info("Nanoget: Starting to collect statistics from ubam file {}.".format(bam))
samfile = pysam.AlignmentFile(bam, "rb", check_sq=False)
if not samfile.has_index():
pysam.index(bam)
# Need to reload the samfile after creating index
samfile = pysam.AlignmentFile(bam, "rb", check_sq=False)
logging.info("Nanoget: No index for bam file could be found, created index.")
datadf = pd.DataFrame(
data=[(read.query_name, ut.ave_qual(read.query_qualities), read.query_length)
for read in samfile.fetch(until_eof=True)],
columns=["readIDs", "quals", "lengths"]) \
.dropna(axis='columns', how='all') \
.dropna(axis='index', how='any')
logging.info("Nanoget: ubam {} contains {} reads.".format(
bam, datadf["lengths"].size))
return ut.reduce_memory_usage(datadf)
def process_bam(bam, **kwargs):
"""Combines metrics from bam after extraction.
Processing function: calls pool of worker functions
to extract from a bam file the following metrics:
-lengths
-aligned lengths
-qualities
-aligned qualities
-mapping qualities
-edit distances to the reference genome scaled by read length
Returned in a pandas DataFrame
"""
logging.info("Nanoget: Starting to collect statistics from bam file {}.".format(bam))
samfile = check_bam(bam)
chromosomes = samfile.references
if len(chromosomes) > 100 or kwargs["huge"]:
logging.info("Nanoget: lots of contigs (>100) or --huge, not running in separate processes")
datadf = pd.DataFrame(
data=extract_from_bam(bam, None, kwargs["keep_supp"]),
columns=["readIDs", "quals", "aligned_quals", "lengths",
"aligned_lengths", "mapQ", "percentIdentity"]) \
.dropna(axis='columns', how='all') \
.dropna(axis='index', how='any')
else:
unit = chromosomes
with cfutures.ProcessPoolExecutor(max_workers=kwargs["threads"]) as executor:
datadf = pd.DataFrame(
data=[res for sublist in executor.map(extract_from_bam,
repeat(bam),
unit,
repeat(kwargs["keep_supp"]))
for res in sublist],
columns=["readIDs", "quals", "aligned_quals", "lengths",
"aligned_lengths", "mapQ", "percentIdentity"]) \
.dropna(axis='columns', how='all') \
.dropna(axis='index', how='any')
logging.info(f"Nanoget: bam {bam} contains {datadf['lengths'].size} primary alignments.")
return ut.reduce_memory_usage(datadf)
def process_cram(cram, **kwargs):
"""Combines metrics from cram after extraction.
Processing function: calls pool of worker functions
to extract from a cram file the following metrics:
-lengths
-aligned lengths
-qualities
-aligned qualities
-mapping qualities
-edit distances to the reference genome scaled by read length
Returned in a pandas DataFrame
"""
logging.info("Nanoget: Starting to collect statistics from cram file {}.".format(cram))
samfile = check_bam(cram, samtype="cram")
chromosomes = samfile.references
if len(chromosomes) > 100:
unit = [None]
logging.info("Nanoget: lots of contigs (>100), not running in separate processes")
else:
unit = chromosomes
with cfutures.ProcessPoolExecutor(max_workers=kwargs["threads"]) as executor:
datadf = pd.DataFrame(
data=[res for sublist in executor.map(extract_from_bam,
repeat(cram), unit, repeat(kwargs["keep_supp"]))
for res in sublist],
columns=["readIDs", "quals", "aligned_q
|
qxcv/joint-regressor
|
keras/vggnet/vgg16_keras.py
|
Python
|
apache-2.0
| 2,604
| 0.014209
|
from keras.models import Sequential
from keras.layers.core import Flatten, Dense, Dropout
from keras.layers.convolutional import Convolution2D, MaxPooling2D, ZeroPadding2D
from keras.optimizers import SGD
import numpy as np
def VGG_16(weights_path=None):
model = Sequential()
model.add(ZeroPadding2D((1,1),input_shape=(3,224,224)))
model.add(Convolution2D(64, 3, 3, activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(64, 3, 3, activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(128, 3, 3, activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(128, 3, 3, activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(256, 3, 3, activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(256, 3, 3, activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(256, 3, 3, activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, 3, 3, activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, 3, 3, activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, 3, 3, activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, 3, 3, activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, 3, 3, activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, 3, 3, activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(Flatten())
model.add(Dense(4096, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(4096, activation='relu'))
model.add
|
(Dropout(0.5))
model.add(Den
|
se(1000, activation='softmax'))
if weights_path:
model.load_weights(weights_path)
return model
if __name__ == "__main__":
import cv2
im = cv2.resize(cv2.imread('cat.jpg'), (224, 224)).astype(np.float32)
im[:,:,0] -= 103.939
im[:,:,1] -= 116.779
im[:,:,2] -= 123.68
im = im.transpose((2,0,1))
im = np.expand_dims(im, axis=0)
# Test pretrained model
model = VGG_16('vgg16_weights.h5')
sgd = SGD(lr=0.1, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(optimizer=sgd, loss='categorical_crossentropy')
out = model.predict(im)
print np.argmax(out)
|
naveensan1/nuage-openstack-neutron
|
nuage_neutron/plugins/common/trunk_db.py
|
Python
|
apache-2.0
| 3,135
| 0
|
# Copyright 2017 Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.db import models_v2
from neutron.db import segments_db
from neutron.services.trunk import constants as t_consts
from neutron.services.trunk import models
from sqlalchemy.orm import aliased
from sqlalchemy.orm import joinedload
from sqlalchemy.orm import noload
def get_vlan_subports_of_trunk_physnet(session, trunk_id):
cur_trunk = aliased(models.Trunk, name='cur_trunk')
cur_parent_port = aliase
|
d(models_v2.Port, name='cur_parent_port')
cur_parent_network = aliased(models_v2.Network, name='cur_parent_network')
cur_parent_network_segment = aliased(segments_db.NetworkSegment,
name='cur_parent_network_segment')
other_parent_port = aliased(models_v2.Port, name='other_parent_port')
return (
|
session.query(models_v2.Port)
.options(
noload('*'),
joinedload(models_v2.Port.sub_port),
joinedload(models_v2.Port.fixed_ips))
.join(
(models.SubPort, models.SubPort.port_id == models_v2.Port.id),
(models.Trunk, models.SubPort.trunk_id == models.Trunk.id),
(other_parent_port, other_parent_port.id == models.Trunk.port_id),
(models_v2.Network,
models_v2.Network.id == other_parent_port.network_id),
(segments_db.NetworkSegment,
segments_db.NetworkSegment.network_id == models_v2.Network.id),
(cur_parent_network_segment,
cur_parent_network_segment.physical_network ==
segments_db.NetworkSegment.physical_network),
(cur_parent_network,
cur_parent_network.id == cur_parent_network_segment.network_id),
(cur_parent_port, cur_parent_port.network_id ==
cur_parent_network.id),
(cur_trunk, cur_parent_port.id == cur_trunk.port_id),
)
.filter(
cur_trunk.id == trunk_id,
models.SubPort.segmentation_type == t_consts.VLAN)
).all()
def get_vlan_subports_of_trunk(session, trunk_id):
return (
session.query(models_v2.Port)
.options(
noload('*'),
joinedload(models_v2.Port.sub_port),
joinedload(models_v2.Port.fixed_ips))
.join(
(models.SubPort, models.SubPort.port_id == models_v2.Port.id),
(models.Trunk, models.SubPort.trunk_id == models.Trunk.id)
)
.filter(
models.Trunk.id == trunk_id,
models.SubPort.segmentation_type == t_consts.VLAN)
).all()
|
szepeviktor/fail2ban
|
fail2ban/tests/samplestestcase.py
|
Python
|
gpl-2.0
| 5,376
| 0.023251
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*-
# vi: set ft=python sts=4 ts=4 sw=4 noet :
# This file is part of Fail2Ban.
#
# Fail2Ban is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Fail2Ban is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Fail2Ban; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# Fail2Ban developers
__copyright__ = "Copyright (c) 2013 Steven Hiscocks"
__license__ = "GPL"
import datetime
import fileinput
import inspect
import json
import os
import re
import sys
import time
import unittest
from ..server.filter import Filter
from ..client.filterreader import FilterReader
from .utils import setUpMyTime, tearDownMyTime, CONFIG_DIR
TEST_CONFIG_DIR = os.path.join(os.path.dirname(__file__), "config")
TEST_FILES_DIR = os.path.join(os.path.dirname(__file__), "files")
class FilterSamplesRegex(unittest.TestCase):
def setUp(self):
"""Call before every test case."""
self.filter = Filter(None)
self.filter.active = True
setUpMyTime()
def tearDown(self):
"""Call after every test case."""
tearDownMyTime()
def testFiltersPresent(self):
"""Check to ensure some tests exist"""
self.assertTrue(
len([test for test in inspect.getmembers(self)
if test[0].startswith('testSampleRegexs')])
>= 10,
"Expected more FilterSampleRegexs tests")
def testSampleRegexsFactory(name, basedir):
def testFilter(self):
# Check filter exists
filterConf = FilterReader(name, "jail", {}, basedir=basedir)
self.assertEqual(filterConf.getFile(), name)
self.assertEqual(filterConf.getJailName(), "jail")
filterConf.read()
filterConf.getOptions({})
for opt in filterConf.convert():
if opt[2] == "addfailregex":
self.filter.addFailRegex(opt[3])
elif opt[2] == "maxlines":
self.filter.setMaxLines(opt[3])
elif opt[2] == "addignoreregex":
self.filter.addIgnoreRegex(opt[3])
elif opt[2] == "datepattern":
self.filter.setDatePattern(opt[3])
self.assertTrue(
os.path.isfile(os.path.join(TEST_FILES_DIR, "logs", name)),
"No sample log file available for '%s' filter" % name)
logFile = fileinput.FileInput(
os.path.join(TEST_FILES_DIR, "logs", name))
regexsUsed = set()
for line in logFile:
jsonREMatch = re.match("^# ?failJSON:(.+)$", line)
if jsonREMatch:
try:
faildata = json.loads(jsonREMatch.group(1))
except ValueError as e:
raise ValueError("%s: %s:%i" %
(e, logFile.filename(), logFile.filelineno()))
line = next(logFile)
elif line.startswith("#") or not line.strip():
continue
else:
faildata = {}
ret = self.filter.processLine(
line, returnRawHost=True, checkAllRegex=True)[1]
if not ret:
# Check line is flagged as none match
self.assertFalse(faildata.get('match', True),
"Line not matched when should have: %s:%i %r" %
(logFile.filename(), logFile.filelineno(), line))
elif ret:
# Check line is flagged to match
self.assertTrue(faildata.get('match', False),
"Line matched when shouldn't have: %s:%i %r" %
(logFile.filename(), logFile.filelineno(), line))
self.assertEqual(len(ret), 1, "Multiple regexs matched %r - %s:%i" %
(map(lambda x: x[0], ret),logFile.filename(), logFile.filelineno()))
# Verify timestamp and host as expected
failregex, host, fail2banTime, lines = ret[0]
self.assertEqual(host, faildata.get("host", None))
t = faildata.get("time", None)
try:
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S")
except ValueError:
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S.%f")
jsonTime = time.mktime(jsonTimeLocal.timetuple())
jsonTime += jsonTim
|
eLocal.microsecond / 1000000
self.assertEqual(fail2banTime, jsonTime,
|
"UTC Time mismatch fail2ban %s (%s) != failJson %s (%s) (diff %.3f seconds) on: %s:%i %r:" %
(fail2banTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(fail2banTime)),
jsonTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(jsonTime)),
fail2banTime - jsonTime, logFile.filename(), logFile.filelineno(), line ) )
regexsUsed.add(failregex)
for failRegexIndex, failRegex in enumerate(self.filter.getFailRegex()):
self.assertTrue(
failRegexIndex in regexsUsed,
"Regex for filter '%s' has no samples: %i: %r" %
(name, failRegexIndex, failRegex))
return testFilter
for basedir_, filter_ in (
(CONFIG_DIR, lambda x: not x.endswith('common.conf') and x.endswith('.conf')),
(TEST_CONFIG_DIR, lambda x: x.startswith('zzz-') and x.endswith('.conf')),
):
for filter_ in filter(filter_,
os.listdir(os.path.join(basedir_, "filter.d"))):
filterName = filter_.rpartition(".")[0]
if not filterName.startswith('.'):
setattr(
FilterSamplesRegex,
"testSampleRegexs%s" % filterName.upper(),
testSampleRegexsFactory(filterName, basedir_))
|
hugosenari/dbus_async
|
example/low_api_client.py
|
Python
|
isc
| 1,198
| 0.010017
|
import dbus_async
@asyncio.coroutine
def dbus_hello_client(bus):
# create message
helloWorld = Method("HelloWorld",
path="/remote/object/path",
iface="com.example.Sample",
bus=bus)
# wait reply
result = yield from helloWorld("Anon")
#do something with response
if (result) {
print(result)
}
@asyncio.coroutine
def dbus_hello_client_signal(bus):
# define signal
signal = Signal(
"HelloWorlded",
path="/remote/object/path",
iface="com.example.Sample"
bus=bus
)
# wait signal
result = yield from signal.wait()
#do something with response
if (result) {
print(result)
}
if __name__ == "__main__":
loop = asyncio.get_event_loop()
bus = SessinoBus()
coro = bus.start(dbus_hello_client, dbus_hello_client_signal)
con = loop.run_until_complete(coro)
# Stay connected until Ctrl+C is pressed
print('Connected on {}'.format(bus))
try:
loop.run_forever()
except KeyboardInterrupt:
|
pass
con.close()
loop.run_until_complete(con.wait_closed())
loo
|
p.close()
|
agold/svgchart
|
lib/input/FileInput.py
|
Python
|
apache-2.0
| 333
| 0.042042
|
class FileInput
|
:
"""Retrieves the contents of a file and passes them to InputSets."""
def __init__(self,inputSets,filename):
"""
@param inputSets: List of target InputSets
@param filename: Name of file to be scanned for input
"""
with open(filename,'rU') as f:
text = f.read()
for i in inputSets:
i.inp
|
ut(text)
|
supercheetah/diceroller
|
pyinstaller/PyInstaller/hooks/hook-_tkinter.py
|
Python
|
artistic-2.0
| 5,503
| 0.001272
|
#
# Copyright (C) 2012, Martin Zibricky
# Copyright (C) 2011, Hartmut Goebel
# Copyright (C) 2005, Giovanni Bajo
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
import os
import sys
import PyInstaller.bindepend
from PyInstaller.compat import is_py24, is_win, is_darwin, is_unix, is_virtualenv
from PyInstaller.build import Tree
from PyInstaller.hooks.hookutils import exec_statement, logger
def _handle_broken_tk():
"""
Workaround for broken Tcl/Tk detection in virtualenv on Windows.
There is a bug in older versions of virtualenv in setting paths
to Tcl/Tk properly. PyInstaller running in virtualenv is then
not able to find Tcl/Tk.
This issue has been experienced in virtualenv with Python 2.4 on Win7.
https://github.com/pypa/virtualenv/issues/93
"""
if is_win and is_virtualenv and is_py24:
basedir = os.path.join(sys.real_prefix, 'tcl')
files = os.listdir(basedir)
v = os.environ
# Detect Tcl/Tk paths.
for f in files:
abs_path = os.path.join(basedir, f)
if f.startswith('tcl') and os.path.isdir(abs_path):
v['TCL_LIBRARY'] = abs_path
if f.startswith('tk') and os.path.isdir(abs_path):
v['TK_LIBRARY'] = abs_path
if f.startswith('tix') and os.path.isdir(abs_path):
v['TIX_LIBRARY'] = abs_path
def _find_tk_darwin_frameworks(binaries):
"""
Tcl and Tk are installed as Mac OS X Frameworks.
"""
tcl_root = tk_root = None
for nm, fnm in binaries:
if nm == 'Tcl':
tcl_root = os.path.join(os.path.dirname(fnm), 'Resources/Scripts')
if nm == 'Tk':
tk_root = os.path.join(os.path.dirname(fnm), 'Resources/Scripts')
return tcl_root, tk_root
def _find_tk_tclshell():
"""
Get paths to Tcl/Tk from the Tcl shell command 'info library'.
This command will return path to TCL_LIBRARY.
On most systems are Tcl and Tk libraries installed
in the same prefix.
"""
tcl_root = tk_root = None
# Python code to get path to TCL_LIBRARY.
code = 'from Tkinter import Tcl; t = Tcl(); print t.eval("info library")'
tcl_root = exec_statement(code)
tk_version = exec_statement('from _tkinter import TK_VERSION as v; print v')
# TK_LIBRARY is in the same prefix as Tcl.
tk_root = os.path.join(os.path.dirname(tcl_root), 'tk%s' % tk_version)
return tcl_root, tk_root
def _find_tk(mod):
"""
Find paths with Tcl and Tk data files to be bundled by PyInstaller.
Return:
tcl_root path to Tcl data files.
tk_root path to Tk data files.
"""
bins = PyInstaller.bindepend.selectImports(mod.__file__)
if is_darwin:
# _tkinter depends on system Tcl/Tk frameworks.
if not bins:
# 'mod.binaries' can't be used because on Mac OS X _tkinter.so
# might depend on system Tcl/Tk frameworks and these are not
# included in 'mod.binaries'.
bins = PyInstaller.bindepend.getImports(mod.__file__)
# Reformat data structure from
# set(['lib1', 'lib2', 'lib3'])
# to
# [('Tcl', '/path/to/Tcl'), ('Tk', '/path/to/Tk')]
mapping = {}
for l in bins:
mapping[os.path.basename(l)] = l
bins = [
('Tcl', mapping['Tcl']),
('Tk', mapping['Tk']),
]
# _tkinter depends on Tcl/Tk compiled as fr
|
ameworks.
path_to_tcl = bins[0][1]
if 'Library/Frameworks' in path_to_tcl:
tcl_tk = _find_tk_darwin_frameworks(bins)
# Tcl/Tk compiled as on Linux other Unices.
# For example this is the case of Tcl/Tk from macports.
else:
tcl_tk = _find_tk_tclshell()
else:
tcl_tk = _find_tk_tclshell()
return tcl_tk
def _collect_tkfiles(mod):
|
# Workaround for broken Tcl/Tk detection in virtualenv on Windows.
_handle_broken_tk()
tcl_root, tk_root = _find_tk(mod)
tcldir = "tcl"
tkdir = "tk"
tcltree = Tree(tcl_root, os.path.join('_MEI', tcldir),
excludes=['demos', 'encoding', '*.lib', 'tclConfig.sh'])
tktree = Tree(tk_root, os.path.join('_MEI', tkdir),
excludes=['demos', 'encoding', '*.lib', 'tkConfig.sh'])
return (tcltree + tktree)
def hook(mod):
# If not supported platform, skip TCL/TK detection.
if not (is_win or is_darwin or is_unix):
logger.info("... skipping TCL/TK detection on this platform (%s)",
sys.platform)
return mod
# Get the Tcl/Tk data files for bundling with executable.
#try:
tk_files = _collect_tkfiles(mod)
mod.datas.extend(tk_files)
#except:
#logger.error("could not find TCL/TK")
return mod
|
climbus/RPiRobot
|
tests/test_robot.py
|
Python
|
mit
| 10,802
| 0.000833
|
import sys
import os
import unittest
import time
from mock import Mock, patch
sys.path.append(os.path.abspath("."))
sys.path.append(os.path.abspath("rpirobot"))
from rpirobot.robot import Robot
class TestRobot(unittest.TestCase):
"""Tests for Robot class."""
def setUp(self):
self.robot = Robot()
def test_set_led(self):
led = Mock()
self.robot.set_led(led)
self.assertEqual(self.robot.led, led)
def test_set_motors(self):
motor1 = Mock()
motor2 = Mock()
self.robot.set_motors(motor1, motor2)
self.assertEqual(self.robot.motors[0], motor1)
self.assertEqual(self.robot.motors[1], motor2)
def test_set_motor(self):
motor = Mock()
index = 1
self.robot.set_motor(index, motor)
self.assertEqual(self.robot.motors[1], motor)
def test_set_motor_out_of_bounds(self):
with self.assertRaises(IndexError):
self.robot.set_motor(2, None)
def test_set_button(self):
button = Mock()
self.robot.set_button(button)
self.assertEqual(self.robot.button, button)
def test_forward(self):
self.robot.change_status = Mock()
self.robot._stop_motors = Mock()
self._set_motors()
self.robot.forward()
self.assertTrue(self.robot._stop_motors.called)
self.assertTrue(self.robot.motors[0].forward.called)
self.assertTrue(self.robot.motors[1].forward.called)
def test_forward_default_speed(self):
self.robot.change_status = Mock()
self._set_motors()
self.robot.forward()
self.robot.motors[0].forward.called_with(self.robot.default_speed)
self.robot.motors[1].forward.called_with(self.robot.default_speed)
def test_forward_with_speed(self):
self.robot.change_status = Mock()
self._set_motors()
self.robot.forward(speed=50)
self.robot.motors[0].forward.called_with(50)
self.robot.motors[1].forward.called_with(self.robot.default_speed)
def test_robot_has_status(self):
self.assertIsNotNone(self.robot.status)
def test_robot_starts_with_status(self):
self.assertEqual(self.robot.status, -1)
def test_robot_colud_change_status(self):
self._set_motors()
self.robot.set_led(Mock())
self.robot.change_status(1)
self.assertEqual(self.robot.status, 1)
def test_led_changes_color_after_status_change(self):
self.robot.set_led(Mock())
self._set_motors()
self.robot.change_status(1)
self.assertTrue(self.robot.led.set_color.called)
self.assertTrue(self.robot.led.on.called)
def test_should_stop_on_status_minus_1(self):
self.robot.set_led(Mock())
self.robot._stop_motors = Mock()
self._set_motors(
|
)
self.robot.change_status(-1)
self.assertTrue(self.robot._stop_motors.called)
def test_could_toggle_status(self):
self._set_motors()
self.robot.set_led(Mock())
self.robot.status = -1
self.robot.toggle_status()
self.assertEqual(self.robot.status, 1)
self.robot.toggle_status()
self.assertEqual(self.robot.status, -1)
def test_stop(self):
s
|
elf._set_motors()
self.robot.set_led(Mock())
self.robot._stop_motors = Mock()
self.robot.stop()
self.assertTrue(self.robot._stop_motors.called)
def test_left(self):
self.robot.set_led(Mock())
self._set_motors()
self.robot._stop_motors = Mock()
self.robot.left()
self.assertTrue(self.robot._stop_motors.called)
self.assertEqual(self.robot.motors[0].forward.call_count, 1)
self.assertEqual(self.robot.motors[1].forward.call_count, 0)
def test_right(self):
self.robot.set_led(Mock())
self._set_motors()
self.robot._stop_motors = Mock()
self.robot.right()
self.assertTrue(self.robot._stop_motors.called)
self.assertEqual(self.robot.motors[1].forward.call_count, 1)
self.assertEqual(self.robot.motors[0].forward.call_count, 0)
def test_left_default_speed(self):
self._set_motors()
self.robot.left()
self.robot.motors[0].forward.assert_called_with(self.robot.default_speed)
def test_left_with_speed(self):
self._set_motors()
self.robot.left(speed=50)
self.robot.motors[0].forward.assert_called_with(50)
def test_left_with_angle(self):
self._set_motors()
self.robot.stop = Mock()
self.robot.width = 13
self.robot.cps = 10
tm = time.time()
self.robot.left(angle=90)
self.assertEqual(round(time.time() - tm), 2)
def test_left_first_param_angle(self):
self._set_motors()
self.robot._go_for_distance = Mock()
self.robot.width = 13
self.robot.cps = 10
self.robot.left(50)
self.assertEqual(round(self.robot._go_for_distance.call_args[0][0]), 11)
def test_right_with_angle(self):
self._set_motors()
self.robot.stop = Mock()
self.robot.width = 13
self.robot.cps = 10
tm = time.time()
self.robot.right(angle=90)
self.assertEqual(round(time.time() - tm), 2)
def test_right_default_speed(self):
self._set_motors()
self.robot.right()
self.robot.motors[1].forward.assert_called_with(self.robot.default_speed)
def test_right_with_speed(self):
self._set_motors()
self.robot.right(speed=50)
self.robot.motors[1].forward.assert_called_with(50)
def test_right_first_param_angle(self):
self._set_motors()
self.robot._go_for_distance = Mock()
self.robot.width = 13
self.robot.cps = 10
self.robot.right(50)
self.assertEqual(round(self.robot._go_for_distance.call_args[0][0]), 11)
def test_forward_with_distance(self):
self.robot.change_status = Mock()
self._set_motors()
self.robot.cps = 10
tm = time.time()
self.robot.forward(distance=10)
self.assertEqual(round(time.time() - tm), 1)
def test_forward_with_small_distance(self):
self.robot.change_status = Mock()
self._set_motors()
self.robot.cps = 10
tm = time.time()
self.robot.forward(distance=5)
self.assertEqual(round(time.time() - tm, 1), 0.5)
def test_forward_first_param_distance(self):
self.robot.change_status = Mock()
self._set_motors()
self.robot._go_for_distance = Mock()
self.robot.forward(50)
self.robot._go_for_distance.assert_called_with(50)
def test_back(self):
self._set_motors()
self.robot._stop_motors = Mock()
self.robot.back()
self.assertTrue(self.robot._stop_motors.called)
self.assertTrue(self.robot.motors[0].backward.called)
self.assertTrue(self.robot.motors[1].backward.called)
def test_back_default_speed(self):
self.robot.change_status = Mock()
self._set_motors()
self.robot.back()
self.robot.motors[0].backward.assert_called_with(self.robot.default_speed)
self.robot.motors[1].backward.assert_called_with(self.robot.default_speed)
def test_back_with_speed(self):
self.robot.change_status = Mock()
self._set_motors()
self.robot.back(speed=50)
self.robot.motors[0].backward.assert_called_with(50)
self.robot.motors[1].backward.assert_called_with(50)
def test_back_with_distance(self):
self.robot.change_status = Mock()
self._set_motors()
self.robot.cps = 10
tm = time.time()
self.robot.back(distance=10)
self.assertEqual(round(time.time() - tm), 1)
def test_back_first_param_distance(self):
self.robot.change_status = Mock()
self._set_motors()
self.robot._go_for_distance = Mock()
self.robot.back(50)
self.robot._go_for_distance.assert_called_with(50)
def test_back_with_small_distance(self):
self.robot.change_status = Mock()
self._set_motors()
self
|
great-expectations/great_expectations
|
great_expectations/validator/metric_configuration.py
|
Python
|
apache-2.0
| 2,379
| 0
|
import json
from typing i
|
mport Tuple
from great_expectations.core.id_dict import IDDict
class MetricConfiguration:
def __init__(
self,
metric_name: str,
metric_domain_kwargs: dict,
metri
|
c_value_kwargs: dict = None,
metric_dependencies: dict = None,
):
self._metric_name = metric_name
if not isinstance(metric_domain_kwargs, IDDict):
metric_domain_kwargs = IDDict(metric_domain_kwargs)
self._metric_domain_kwargs = metric_domain_kwargs
if not isinstance(metric_value_kwargs, IDDict):
if metric_value_kwargs is None:
metric_value_kwargs = {}
metric_value_kwargs = IDDict(metric_value_kwargs)
self._metric_value_kwargs = metric_value_kwargs
if metric_dependencies is None:
metric_dependencies = {}
self._metric_dependencies = metric_dependencies
def __repr__(self):
return json.dumps(self.to_json_dict(), indent=2)
def __str__(self):
return self.__repr__()
@property
def metric_name(self):
return self._metric_name
@property
def metric_domain_kwargs(self):
return self._metric_domain_kwargs
@property
def metric_value_kwargs(self):
return self._metric_value_kwargs
@property
def metric_domain_kwargs_id(self):
return self.metric_domain_kwargs.to_id()
@property
def metric_value_kwargs_id(self):
return self.metric_value_kwargs.to_id()
@property
def metric_dependencies(self):
return self._metric_dependencies
@metric_dependencies.setter
def metric_dependencies(self, metric_dependencies):
self._metric_dependencies = metric_dependencies
@property
def id(self) -> Tuple[str, str, str]:
return (
self.metric_name,
self.metric_domain_kwargs_id,
self.metric_value_kwargs_id,
)
def to_json_dict(self) -> dict:
json_dict: dict = {
"metric_name": self.metric_name,
"metric_domain_kwargs": self.metric_domain_kwargs,
"metric_domain_kwargs_id": self.metric_domain_kwargs_id,
"metric_value_kwargs": self.metric_value_kwargs,
"metric_value_kwargs_id": self.metric_value_kwargs_id,
"id": self.id,
}
return json_dict
|
borevitzlab/timestreamlib
|
scripts/tscli/playTimestream.py
|
Python
|
gpl-3.0
| 2,769
| 0.009029
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 28 17:49:08 2014
@author: chuong nguyen, chuong.v.nguyen@gmail.com
"""
from __future__ import absolute_import, division, print_function
import cv2
import logging
import timestream
import numpy as np
from timestream.parse import ts_parse_date
import docopt
import datetime
import os
CLI_OPTS = """
USAGE:
playtTimestream.py -i IN [-d DELAY] [--int INTERVAL] [-s START] [-e END] [--sh STARTHOUR] [--eh ENDHOUR] [-o OUT] [--byfiles]
OPTIONS:
-i IN Input timestream directory
-d DELAY Playing time delay [500 msecs]
--int INTERVAL Looping time interval [24*60*60 mins]
-s START Start date and time of looping
-e END End date and time of looping
--sh STARTHOUR Start time range of looping
--eh ENDHOUR End time range of looping
-o OUT Outputfolder
--byfiles Loop over by file [by time]
"""
opts = docopt.docopt(CLI_OPTS)
interval = 24*60*60 #None
start = None
end = None
start_hour = None
end_hour = None
delay = 500
inputRootPath = opts['-i']
if opts['-d']:
delay = int(opts['-d'])
if opts['--int']:
interval = int(opts['-int'])
if opts['-s']:
start = ts_parse_date(opts['-s'])
if opts['-e']:
end = ts_parse_date(opts['-e'])
if opts['--sh']:
start_hour = datetime.time(int(opts['--sh']), 0, 0)
if opts['--eh']:
end_end = datetime.time(int(opts['--eh']), 0, 0)
if opts['-o']:
outputRootPath = opts['-o']
else:
outputRootPath = None
timestream.setup_module_logging(level=logging.INFO)
ts = timestream.TimeStream()
ts.load(inputRootPath)
windowName = 'image'
cv2.imshow(windowName, np.zeros([100,100], dtype = np.uint8))
cv2.moveWindow(windowName, 10,10)
if opts['--byfiles']:
iterator = ts.iter_by_files()
else:
iterator = ts.iter_by_timepoints(start=start, end=end, interval = interval,
start_hour = start_hour, end_hour = end_hour)
for img in iterator:
if img is None or img.pixels is None:
continue
if outputRootPath:
if not os.path.exists(outputRootPath):
os.makedirs(outputRootPath)
cv2.imwrite(os.path.join(outputRootPath, os.path.basename(img.path)), img.pixels)
# estimate a downscale factor to make image fit into a normal HD screen
scale = img.pixels.shape[0]//1000 + 1
imgResized = cv2.resize(img.pixels, (img.pixels.shape[1]//scale, img.pixels.shape[0]//scale))
timestamp = timest
|
ream.parse.ts_format_date(img.datetime)
cv2.putText(imgResized, timestamp, (10,30), cv2.FONT_HERSHEY_SIMPLEX, 1, (0,0,255), thickness = 1)
cv2.imshow(windowName, imgResized[:,:,::-1])
k = cv2.waitKey(delay)
if k =
|
= 1048603:
# escape key is pressed
break
cv2.destroyAllWindows()
|
monkeypants/MAVProxy
|
MAVProxy/modules/mavproxy_wp.py
|
Python
|
gpl-3.0
| 33,902
| 0.003186
|
#!/usr/bin/env python
'''waypoint command handling'''
import time, os, fnmatch, copy, platform
from pymavlink import mavutil, mavwp
from MAVProxy.modules.lib import mp_module
from MAVProxy.modules.lib import mp_util
if mp_util.has_wxpython:
from MAVProxy.modules.lib.mp_menu import *
class WPModule(mp_module.MPModule):
def __init__(self, mpstate):
super(WPModule, self).__init__(mpstate, "wp", "waypoint handling", public = True)
self.wp_op = None
self.wp_requested = {}
self.wp_received = {}
self.wp_save_filename = None
self.wploader_by_sysid = {}
self.loading_waypoints = False
self.loading_waypoint_lasttime = time.time()
self.last_waypoint = 0
self.wp_period = mavutil.periodic_event(0.5)
self.undo_wp = None
self.undo_type = None
self.undo_wp_idx = -1
self.wploader.expected_count = 0
self.add_command('wp', self.cmd_wp, 'waypoint management',
["<list|clear|move|remove|loop|set|undo|movemulti|changealt|param|status>",
"<load|update|save|savecsv|show> (FILENAME)"])
if self.continue_mode and self.logdir is not None:
waytxt = os.path.join(mpstate.status.logdir, 'way.txt')
if os.path.exists(waytxt):
self.wploader.load(waytxt)
print("Loaded waypoints from %s" % waytxt)
self.menu_added_console = False
self.menu_added_map = False
if mp_util.has_wxpython:
self.menu = MPMenuSubMenu('Mission',
items=[MPMenuItem('Editor', 'Editor', '# wp editor'),
MPMenuItem('Clear', 'Clear', '# wp clear'),
MPMenuItem('List', 'List', '# wp list'),
MPMenuItem('Load', 'Load', '# wp load ',
handler=MPMenuCallFileDialog(flags=('open',),
title='Mission Load',
wildcard='*.txt')),
MPMenuItem('Save', 'Save', '# wp save ',
handler=MPMenuCallFileDialog(flags=('save', 'overwrite_prompt'),
title='Mission Save',
wildcard='*.txt')),
MPMenuItem('Draw', 'Draw', '# wp draw ',
handler=MPMenuCallTextDialog(title='Mission Altitude (m)',
default=100)),
MPMenuItem('Undo', 'Undo', '# wp undo'),
MPMenuItem('Loop', 'Loop', '# wp loop'),
MPMenuItem('Add NoFly', 'Loop', '# wp noflyadd')])
@property
def wploader(self):
'''per-sysid wploader'''
if self.target_system not in self.wploader_by_sysid:
self.wploader_by_sysid[self.target_system] = mavwp.MAVWPLoader()
return self.wploader_by_sysid[self.target_system]
def missing_wps_to_request(self):
ret = []
tnow = time.time()
next_seq = self.wploader.count()
for i in range(5):
seq = next_seq+i
if seq+1 > self.wploader.expected_count:
continue
if seq in self.wp_request
|
ed and tnow - self.wp_requested[seq] < 2:
continue
ret.append(seq)
return ret
def send_wp_requests(self, wps=None):
'''send some more WP requests'''
if wps is None:
wps = self.missing_wps_to_request()
tnow = time.time()
for seq in wps:
#print("REQUESTING %u/%u (%u)" % (seq, self.wploader.expected_count, i))
self.wp_requested[seq]
|
= tnow
self.master.waypoint_request_send(seq)
def wp_status(self):
'''show status of wp download'''
try:
print("Have %u of %u waypoints" % (self.wploader.count()+len(self.wp_received), self.wploader.expected_count))
except Exception:
print("Have %u waypoints" % (self.wploader.count()+len(self.wp_received)))
def mavlink_packet(self, m):
'''handle an incoming mavlink packet'''
mtype = m.get_type()
if mtype in ['WAYPOINT_COUNT','MISSION_COUNT']:
self.wploader.expected_count = m.count
if self.wp_op is None:
#self.console.error("No waypoint load started")
pass
else:
self.wploader.clear()
self.console.writeln("Requesting %u waypoints t=%s now=%s" % (m.count,
time.asctime(time.localtime(m._timestamp)),
time.asctime()))
self.send_wp_requests()
elif mtype in ['WAYPOINT', 'MISSION_ITEM'] and self.wp_op is not None:
if m.seq < self.wploader.count():
#print("DUPLICATE %u" % m.seq)
return
if m.seq+1 > self.wploader.expected_count:
self.console.writeln("Unexpected waypoint number %u - expected %u" % (m.seq, self.wploader.count()))
self.wp_received[m.seq] = m
next_seq = self.wploader.count()
while next_seq in self.wp_received:
m = self.wp_received.pop(next_seq)
self.wploader.add(m)
next_seq += 1
if self.wploader.count() != self.wploader.expected_count:
#print("m.seq=%u expected_count=%u" % (m.seq, self.wploader.expected_count))
self.send_wp_requests()
return
if self.wp_op == 'list':
for i in range(self.wploader.count()):
w = self.wploader.wp(i)
print("%u %u %.10f %.10f %f p1=%.1f p2=%.1f p3=%.1f p4=%.1f cur=%u auto=%u" % (
w.command, w.frame, w.x, w.y, w.z,
w.param1, w.param2, w.param3, w.param4,
w.current, w.autocontinue))
if self.logdir is not None:
fname = 'way.txt'
if m.get_srcSystem() != 1:
fname = 'way_%u.txt' % m.get_srcSystem()
waytxt = os.path.join(self.logdir, fname)
self.save_waypoints(waytxt)
print("Saved waypoints to %s" % waytxt)
self.loading_waypoints = False
elif self.wp_op == "save":
self.save_waypoints(self.wp_save_filename)
self.wp_op = None
self.wp_requested = {}
self.wp_received = {}
elif mtype in ["WAYPOINT_REQUEST", "MISSION_REQUEST"]:
self.process_waypoint_request(m, self.master)
elif mtype in ["WAYPOINT_CURRENT", "MISSION_CURRENT"]:
if m.seq != self.last_waypoint:
self.last_waypoint = m.seq
if self.settings.wpupdates:
self.say("waypoint %u" % m.seq,priority='message')
elif mtype == "MISSION_ITEM_REACHED":
wp = self.wploader.wp(m.seq)
if wp is None:
# should we spit out a warning?!
# self.say("No waypoints")
pass
else:
if wp.command == mavutil.mavlink.MAV_CMD_DO_LAND_START:
alt_offset = self.get_mav_param('ALT_OFFSET', 0)
if alt_offset > 0.005:
self.say("ALT OFFSET IS NOT ZERO passing DO_LAND_START")
def idle_task(self):
'''handle missing waypoints'''
if self.wp_period.trigger():
# cop
|
BansheeMediaPlayer/bockbuild
|
packages/libicu.py
|
Python
|
mit
| 156
| 0.070513
|
Package ('libicu', '55.1',
source_dir
|
_name = 'icu/source',
sources = [
'http://download.icu-project.org/files
|
/icu4c/%{version}/icu4c-55_1-src.tgz'
]
)
|
QuantumElephant/horton
|
scripts/horton-convert.py
|
Python
|
gpl-3.0
| 2,641
| 0.009087
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# HORTON: Helpful Open-source Research TOol for N-fermion systems.
# Copyright (C) 2011-2017 The HORTON Development Team
#
# This file is part of HORTON.
#
# HORTON is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# a
|
s published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# HORTON is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warr
|
anty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
import sys, argparse, os, numpy as np
from horton import __version__, IOData
# All, except underflows, is *not* fine.
np.seterr(divide='raise', over='raise', invalid='raise')
def parse_args():
parser = argparse.ArgumentParser(prog='horton-convert.py',
description='Convert between file formats supported in HORTON. This '
'only works of the input contains sufficient data for the '
'output')
parser.add_argument('-V', '--version', action='version',
version="%%(prog)s (HORTON version %s)" % __version__)
parser.add_argument('input',
help='The input file. Supported file types are: '
'*.h5 (HORTON\'s native format), '
'*.cif (Crystallographic Information File), '
'*.cp2k.out (Output from a CP2K atom computation), '
'*.cube (Gaussian cube file), '
'*.log (Gaussian log file), '
'*.fchk (Gaussian formatted checkpoint file), '
'*.molden.input (Molden wavefunction file), '
'*.mkl (Molekel wavefunction file), '
'*.wfn (Gaussian/GAMESS wavefunction file), '
'CHGCAR, LOCPOT or POSCAR (VASP files), '
'*.xyz (The XYZ format).')
parser.add_argument('output',
help='The output file. Supported file types are: '
'*.h5 (HORTON\'s native format), '
'*.cif (Crystallographic Information File), '
'*.cube (Gaussian cube file), '
'*.molden.input (Molden wavefunction file), '
'POSCAR (VASP files), '
'*.xyz (The XYZ format).')
return parser.parse_args()
def main():
args = parse_args()
mol = IOData.from_file(args.input)
mol.to_file(args.output)
if __name__ == '__main__':
main()
|
rogerscristo/BotFWD
|
env/lib/python3.6/site-packages/telegram/contrib/__init__.py
|
Python
|
mit
| 49
| 0
|
f
|
rom .botan import Botan
__all__ = ['Bot
|
an']
|
AutorestCI/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/v2017_11_01/models/virtual_network_peering.py
|
Python
|
mit
| 4,677
| 0.00171
|
# coding=utf-8
# ---------------------------------------------------------------------
|
-----
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code
|
is
# regenerated.
# --------------------------------------------------------------------------
from .sub_resource import SubResource
class VirtualNetworkPeering(SubResource):
"""Peerings in a virtual network resource.
:param id: Resource ID.
:type id: str
:param allow_virtual_network_access: Whether the VMs in the linked virtual
network space would be able to access all the VMs in local Virtual network
space.
:type allow_virtual_network_access: bool
:param allow_forwarded_traffic: Whether the forwarded traffic from the VMs
in the remote virtual network will be allowed/disallowed.
:type allow_forwarded_traffic: bool
:param allow_gateway_transit: If gateway links can be used in remote
virtual networking to link to this virtual network.
:type allow_gateway_transit: bool
:param use_remote_gateways: If remote gateways can be used on this virtual
network. If the flag is set to true, and allowGatewayTransit on remote
peering is also true, virtual network will use gateways of remote virtual
network for transit. Only one peering can have this flag set to true. This
flag cannot be set if virtual network already has a gateway.
:type use_remote_gateways: bool
:param remote_virtual_network: The reference of the remote virtual
network. The remote virtual network can be in the same or different region
(preview). See here to register for the preview and learn more
(https://docs.microsoft.com/en-us/azure/virtual-network/virtual-network-create-peering).
:type remote_virtual_network:
~azure.mgmt.network.v2017_11_01.models.SubResource
:param remote_address_space: The reference of the remote virtual network
address space.
:type remote_address_space:
~azure.mgmt.network.v2017_11_01.models.AddressSpace
:param peering_state: The status of the virtual network peering. Possible
values are 'Initiated', 'Connected', and 'Disconnected'. Possible values
include: 'Initiated', 'Connected', 'Disconnected'
:type peering_state: str or
~azure.mgmt.network.v2017_11_01.models.VirtualNetworkPeeringState
:param provisioning_state: The provisioning state of the resource.
:type provisioning_state: str
:param name: The name of the resource that is unique within a resource
group. This name can be used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'allow_virtual_network_access': {'key': 'properties.allowVirtualNetworkAccess', 'type': 'bool'},
'allow_forwarded_traffic': {'key': 'properties.allowForwardedTraffic', 'type': 'bool'},
'allow_gateway_transit': {'key': 'properties.allowGatewayTransit', 'type': 'bool'},
'use_remote_gateways': {'key': 'properties.useRemoteGateways', 'type': 'bool'},
'remote_virtual_network': {'key': 'properties.remoteVirtualNetwork', 'type': 'SubResource'},
'remote_address_space': {'key': 'properties.remoteAddressSpace', 'type': 'AddressSpace'},
'peering_state': {'key': 'properties.peeringState', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, id=None, allow_virtual_network_access=None, allow_forwarded_traffic=None, allow_gateway_transit=None, use_remote_gateways=None, remote_virtual_network=None, remote_address_space=None, peering_state=None, provisioning_state=None, name=None, etag=None):
super(VirtualNetworkPeering, self).__init__(id=id)
self.allow_virtual_network_access = allow_virtual_network_access
self.allow_forwarded_traffic = allow_forwarded_traffic
self.allow_gateway_transit = allow_gateway_transit
self.use_remote_gateways = use_remote_gateways
self.remote_virtual_network = remote_virtual_network
self.remote_address_space = remote_address_space
self.peering_state = peering_state
self.provisioning_state = provisioning_state
self.name = name
self.etag = etag
|
ktan2020/legacy-automation
|
win/Lib/site-packages/wx-3.0-msw/wx/lib/agw/fmresources.py
|
Python
|
mit
| 13,594
| 0.008092
|
import wx
# Overall menu styles
StyleDefault = 0
StyleXP = 1
Style2007 = 2
StyleVista = 3
# Menu shadows
RightShadow = 1 # Right side shadow
BottomShadow = 2 # Not full bottom shadow
BottomShadowFull = 4 # Full bottom shadow
# Button styles
BU_EXT_XP_STYLE = 1
BU_EXT_2007_STYLE = 2
BU_EXT_LEFT_ALIGN_STYLE = 4
BU_EXT_CENTER_ALIGN_STYLE = 8
BU_EXT_RIGHT_ALIGN_STYLE = 16
BU_EXT_RIGHT_TO_LEFT_STYLE = 32
# Control state
ControlPressed = 0
ControlFocus = 1
ControlDisabled = 2
ControlNormal = 3
# FlatMenu styles
FM_OPT_IS_LCD = 1
""" Use this style if your computer uses a LCD screen. """
FM_OPT_MINIBAR = 2
""" Use this if you plan to use the toolbar only. """
FM_OPT_SHOW_CUSTOMIZE = 4
""" Show "customize link" in the `More` menu, you will need to write your own handler. See demo. """
FM_OPT_SHOW_TOOLBAR = 8
""" Set this option is you are planning to use the toolbar. """
# Control status
ControlStatusNoFocus = 0
ControlStatusFocus = 1
ControlStatusPressed = 2
# HitTest constants
NoWhere = 0
MenuItem = 1
ToolbarItem = 2
DropDownArrowButton = 3
FTB_ITEM_TOOL = 0
FTB_ITEM_SEPARATOR = 1
FTB_ITEM_CHECK = 2
FTB_ITEM_RADIO = 3
FTB_ITEM_RADIO_MENU = 4
FTB_ITEM_CUSTOM = 5
LargeIcons = 32
SmallIcons = 16
MENU_HT_NONE = 0
MENU_HT_ITEM = 1
MENU_HT_SCROLL_UP = 2
MENU_HT_SCROLL_DOWN = 3
MENU_DEC_TOP = 0
MENU_DEC_BOTTOM = 1
MENU_DEC_LEFT = 2
MENU_DEC_RIGHT = 3
DROP_DOWN_ARROW_WIDTH = 16
SPACER = 12
MARGIN = 3
TOOLBAR_SPACER = 4
TOOLBAR_MARGIN = 4
SEPARATOR_WIDTH = 12
SCROLL_BTN_HEIGHT = 20
CS_DROPSHADOW = 0x00020000
INB_BOTTOM = 1
INB_LEFT = 2
INB_RIGHT = 4
INB_TOP = 8
INB_BORDER = 16
INB_SHOW_ONLY_TEXT = 32
INB_SHOW_ONLY_IMAGES = 64
INB_FIT_BUTTON = 128
INB_DRAW_SHADOW = 256
INB_USE_PIN_BUTTON = 512
INB_GRADIENT_BACKGROUND = 1024
INB_WEB_HILITE = 2048
INB_NO_RESIZE = 4096
INB_FIT_LABELTEXT = 8192
INB_BOLD_TAB_SELECTION = 16384
INB_DEFAULT_STYLE = INB_BORDER | INB_TOP | INB_USE_PIN_BUTTON
INB_TAB_AREA_BACKGROUND_COLOUR = 100
INB_ACTIVE_TAB_COLOUR = 101
INB_TABS_BORDER_COLOUR = 102
INB_TEXT_COLOUR = 103
INB_ACTIVE_TEXT_COLOUR = 104
INB_HILITE_TAB_COLOUR = 105
INB_LABEL_BOOK_DEFAULT = INB_DRAW_SHADOW | INB_BORDER | INB_USE_PIN_BUTTON | INB_LEFT
# HitTest results
IMG_OVER_IMG = 0
IMG_OVER_PIN = 1
IMG_OVER_EW_BORDER = 2
IMG_NONE = 3
# Pin button states
INB_PIN_NONE = 0
INB_PIN_HOVER = 200
INB_PIN_PRESSED = 201
# Windows Vista Colours
rgbSelectOuter = wx.Colour(170, 200, 245)
rgbSelectInner = wx.Colour(230, 250, 250)
rgbSelectTop = wx.Colour(210, 240, 250)
rgbSelectBottom = wx.Colour(185, 215, 250)
check_mark_xpm = [" 16 16 16 1",
"` c #000000",
". c #800000",
"# c #008000",
"a c #808000",
"b c #000080",
"c c #800080",
"d c #008080",
"e c #808080",
"f c #c0c0c0",
"g c #ff0000",
"h c #00ff00",
"i c #ffff00",
"j c #0000ff",
"k c #ff00ff",
"l c #00ffff",
"m c #ffffff",
"mmmmmmmmmmmmmmmm",
"mmmmmmmmmmmmmmmm",
"mmmmmmmmmmmmmmmm",
"mmmmmmmmmmmmmmmm",
"mmmmmmmmmmmmmmmm",
"mmmmmmmmmm`mmmmm",
"mmmmmmmmm``mmmmm",
"mmmm`mmm```mmmmm",
"mmmm``m```mmmmmm",
"mmmm`````mmmmmmm",
"mmmmm```mmmmmmmm",
"mmmmmm`mmmmmmmmm",
"mmmmmmmmmmmmmmmm",
"mmmmmmmmmmmmmmmm",
"mmmmmmmmmmmmmmmm",
"mmmmmmmmmmmmmmmm"
]
radio_item_xpm = [" 16 16 16 1",
"` c #000000",
". c #800000",
"# c #008000",
"a c #808000",
"b c #000080",
"c c #800080",
"d c #008080",
"e c #808080",
"f c #c0c0c0",
"g c #ff0000",
"h c #00ff00",
"i c #ffff00",
"j c #0000ff",
"k c #ff00ff",
"l c #00ffff",
"m c #ffffff",
"mmmmmmmmmmmmmmmm",
"mmmmmmmmmmmmmmmm",
"mmmmmmmmmmmmmmmm",
"mmmmmmmmmmmmmmmm",
"mmmmmmmmmmmmmmmm",
"mmmmmmmmmmmmmmmm",
"mmmmmm```mmmmmmm",
"mmmmm`````mmmmmm",
"mmmmm`````mmmmmm",
"mmmmmm```mmmmmmm",
"mmmmmmmmmmmmmmmm",
"mmmmmmmmmmmmmmmm",
"mmmmmmmmmmmmmmmm",
"mmmmmmmmmmmmmmmm",
|
"mmmmmmmmmmmmmmmm",
"mmmmmmmmmmmmmmmm"]
menu_right_arrow_xpm
|
= [
" 16 16 8 1",
"` c #ffffff",
". c #000000",
"# c #000000",
"a c #000000",
"b c #000000",
"c c #000000",
"d c #000000",
"e c #000000",
"````````````````",
"````````````````",
"````````````````",
"````````````````",
"````````````````",
"``````.`````````",
"``````..````````",
"``````...```````",
"``````....``````",
"``````...```````",
"``````..````````",
"``````.`````````",
"````````````````",
"````````````````",
"````````````````",
"````````````````"
]
#----------------------------------
# Shadow images
#----------------------------------
shadow_right_xpm = ["5 5 1 1"," c Black"," "," "," "," "," "]
# shadow_right.xpm 5x5
shadow_right_alpha = [168, 145, 115, 76, 46, 168, 145, 115, 76, 46, 168, 145, 115, 76, 46,
168, 145, 115, 76, 46, 168, 145, 115, 76, 46]
shadow_right_top_xpm = ["5 10 1 1"," c Black"," "," "," "," ",
" "," "," "," "," "," "]
shadow_right_top_alpha = [40, 35, 28, 18, 11, 67, 58, 46, 31, 18, 101, 87, 69, 46, 28,
128, 110, 87, 58, 35, 148, 128, 101, 67, 40, 161, 139, 110, 73, 44,
168, 145, 115, 76, 46, 168, 145, 115, 76, 46, 168, 145, 115, 76, 46,
168, 145, 115, 76, 46]
# shadow_buttom.xpm 5x5
shadow_bottom_alpha = [184, 184, 184, 184, 184, 168, 168, 168, 168, 168, 145, 145, 145, 145, 145,
115, 115, 115, 115, 115, 76, 76, 76, 76, 76]
shadow_bottom_left_xpm = ["10 5 1 1"," c Black"," "," ",
" "," "," "]
shadow_bottom_left_alpha = [22, 44, 73, 110, 139, 161, 176, 184, 184, 184,
20, 40, 67, 101, 128, 148, 161, 168, 168, 168,
17, 35, 58, 87, 110, 128, 139, 145, 145, 145,
13, 28, 46, 69, 87, 101, 110, 115, 115, 115,
9, 18, 31, 46, 58, 67, 73, 76, 76, 76]
shadow_center_xpm = ["5 5 1 1"," c Black"," "," "," "," "," "]
shadow_center_alpha = [161, 139, 110, 73, 44, 148, 128, 101, 67, 40,
128, 110, 87, 58, 35, 101, 87, 69, 46, 28,
67, 58, 46, 31, 18]
shadow_bottom_xpm = ["5 5 1 1"," c Black"," "," "," "," "," "]
arrow_down_xpm = ["16 16 3 1",
". c Black",
"X c #FFFFFF",
" c #008080",
" ",
" ",
" ",
" ",
" ....... ",
" XXXXXXX ",
|
Antonio-Team/enigma2
|
lib/python/Screens/FactoryReset.py
|
Python
|
gpl-2.0
| 1,088
| 0.020221
|
from Screens.MessageBox import MessageBox
from Screens.ParentalControlSetup import ProtectedScreen
from Components.config import config
class FactoryReset(MessageBox, ProtectedScreen):
def __init__(self, session):
MessageBox.__init__(self, session, _("Factory reset will restore your receiver to its default configuration. "
"All user data including system settings, tuner configuration, bouquets, services and plugins will be DELETED. "
"Recordings and other files stored on HDD and USB media will remain intact. "
"After completion, the system will restart automatically!\n\n"
"Do you really want to proceed?"), MessageBox.TYPE_YESN
|
O, default=False)
self.skinName = "MessageBox"
ProtectedScreen.__init__(self)
def isProtected(self):
return config.ParentalControl.setuppinactive.value and\
(not config.ParentalControl.config_sections.main_menu.value and not config.ParentalControl.config_sections.configuration.value or hasattr(self.session, 'infobar') and self.session.infobar is None) and\
config.Par
|
entalControl.config_sections.manufacturer_reset.value
|
liquidinstruments/pymoku
|
pymoku/_frequency_response_analyzer.py
|
Python
|
mit
| 22,648
| 0
|
import math
import logging
from pymoku._instrument import MokuInstrument
from pymoku._instrument import to_reg_unsigned
from pymoku._instrument import from_reg_unsigned
from pymoku._instrument import to_reg_signed
from pymoku._instrument import from_reg_signed
from pymoku._instrument import to_reg_bool
from pymoku._instrument import from_reg_bool
from pymoku._instrument import needs_commit
from pymoku._instrument import SWEEP
from pymoku._instrument import FULL_FRAME
from pymoku._instrument import RDR_DDS
from pymoku._instrument import ValueOutOfRangeException
from pymoku import _frame_instrument
from pymoku import _utils
from ._frequency_response_analyzer_data import FRAData
log
|
= logging.getLogger(__name__)
REG_FRA_SWEEP_FREQ_MIN_L = 64
REG_FRA_SWEEP_FREQ_MIN_H = 65
REG_FRA_SWEEP_FREQ_DELTA_L = 66
REG_FRA_SWEEP_FREQ_DELTA_H = 67
REG_FRA_LOG_EN = 68
REG_FRA_HOLD_OFF_L = 69
REG_FRA_SWEEP_LENGTH = 71
REG_FRA_AVERAGE_TIME = 72
REG_FRA_ENABLES = 73
REG_FRA_SWEEP_AMP_MULT = 74
REG_FRA_SETTLE_CYCLES = 76
REG_FRA_AVERAGE_CYCLES = 77
REG_FRA_SWEEP_OFF_MULT = 78
REG_FRA_PHASE_OFF_CH1_LSB
|
= 79
REG_FRA_PHASE_OFF_CH1_MSB = 80
REG_FRA_HARMONIC_MULT_CH1 = 81
REG_FRA_PHASE_OFF_CH2_LSB = 82
REG_FRA_PHASE_OFF_CH2_MSB = 83
REG_FRA_HARMONIC_MULT_CH2 = 84
_FRA_FPGA_CLOCK = 125e6
_FRA_DAC_SMPS = 1e9
_FRA_DAC_VRANGE = 1
_FRA_DAC_BITDEPTH = 2**16
_FRA_DAC_BITS2V = _FRA_DAC_BITDEPTH / _FRA_DAC_VRANGE
_FRA_SCREEN_WIDTH = 1024
_FRA_FREQ_SCALE = 2**48 / _FRA_DAC_SMPS
_FRA_FXP_SCALE = 2.0**30
class FrequencyResponseAnalyzer(_frame_instrument.FrameBasedInstrument):
""" Frequency Response Analyzer instrument object.
This should be instantiated and attached to a :any:`Moku` instance.
"""
def __init__(self):
super(FrequencyResponseAnalyzer, self).__init__()
self._register_accessors(_fra_reg_handlers)
self.scales = {}
self._set_frame_class(FRAData, instrument=self, scales=self.scales)
self.id = 9
self.type = "frequency_response_analyzer"
self.sweep_amp_volts_ch1 = 0
self.sweep_amp_volts_ch2 = 0
def _calculate_sweep_delta(self, start_frequency, end_frequency,
sweep_length, log_scale):
start_frequency = float(start_frequency)
end_frequency = float(end_frequency)
if log_scale:
sweep_freq_delta = round(((end_frequency / start_frequency) ** (
1.0 / (sweep_length - 1)) - 1) * _FRA_FXP_SCALE)
else:
sweep_freq_delta = round(((end_frequency - start_frequency) / (
sweep_length - 1)) * _FRA_FREQ_SCALE)
return sweep_freq_delta
def _calculate_freq_axis(self):
# Generates the frequency vector for plotting.
f_start = self.sweep_freq_min
fs = []
if self.log_en:
# Delta register becomes a multiplier in the logarithmic case
# Fixed-point precision is used in the FPGA multiplier
# (30 fractional bits)
fs = [f_start * (1 + (self.sweep_freq_delta / _FRA_FXP_SCALE)) ** n
for n in range(self.sweep_length)]
else:
fs = [(f_start + n * (self.sweep_freq_delta / _FRA_FREQ_SCALE)) for
n in range(self.sweep_length)]
return fs
def _calculate_gain_correction(self, fs):
sweep_freq = fs
cycles_time = [0.0] * self.sweep_length
if all(sweep_freq):
cycles_time = [self.averaging_cycles / sweep_freq[n]
for n in range(self.sweep_length)]
points_per_freq = [math.ceil(a * max(self.averaging_time, b) - 1e-12)
for (a, b) in zip(sweep_freq, cycles_time)]
average_gain = [0.0] * self.sweep_length
gain_scale = [0.0] * self.sweep_length
# Calculate gain scaling due to accumulator bit ranging
for f in range(self.sweep_length):
sweep_period = 1 / sweep_freq[f]
# Predict how many FPGA clock cycles each frequency averages for:
average_period_cycles = self.averaging_cycles * sweep_period \
* _FRA_FPGA_CLOCK
if self.averaging_time % sweep_period == 0:
average_period_time = self.averaging_time * _FRA_FPGA_CLOCK
else:
average_period_time = math.ceil(
self.averaging_time / sweep_period) * sweep_period * (
_FRA_FPGA_CLOCK)
if average_period_time >= average_period_cycles:
average_period = average_period_time
else:
average_period = average_period_cycles
# Scale according to the predicted accumulator counter size:
if average_period <= 2**15:
average_gain[f] = 2**4
elif average_period <= 2**20:
average_gain[f] = 2**-1
elif average_period <= 2**25:
average_gain[f] = 2**-6
elif average_period <= 2**30:
average_gain[f] = 2**-11
elif average_period <= 2**35:
average_gain[f] = 2**-16
else:
average_gain[f] = 2**-20
for f in range(self.sweep_length):
if sweep_freq[f] > 0.0:
gain_scale[f] = math.ceil(
average_gain[f] * points_per_freq[f] * _FRA_FPGA_CLOCK / (
sweep_freq[f]))
else:
gain_scale[f] = average_gain[f]
return gain_scale
@needs_commit
def set_input_range(self, ch, input_range):
"""Set the input range for a channel.
:type ch: int; {1,2}
:param ch: channel
:type input_range: {1, 10}
:param input_range: the peak to peak voltage (Vpp) range of the inputs.
"""
_utils.check_parameter_valid('set', ch, [1, 2],
'input channel', allow_none=True)
_utils.check_parameter_valid('set', input_range, [1, 10],
'input range', allow_none=False)
front_end_setting = self.get_frontend(ch)
self.set_frontend(ch, fiftyr=front_end_setting[0],
atten=(input_range == 10), ac=front_end_setting[2])
def _calculate_scales(self):
g1, g2 = self._adc_gains()
fs = self._calculate_freq_axis()
gs = self._calculate_gain_correction(fs)
return {'g1': g1,
'g2': g2,
'gain_correction': gs,
'frequency_axis': fs,
'sweep_freq_min': self.sweep_freq_min,
'sweep_freq_delta': self.sweep_freq_delta,
'sweep_length': self.sweep_length,
'log_en': self.log_en,
'averaging_time': self.averaging_time,
'sweep_amplitude_ch1': self.sweep_amp_volts_ch1,
'sweep_amplitude_ch2': self.sweep_amp_volts_ch2
}
@needs_commit
def set_sweep(self, f_start=100, f_end=120e6, sweep_points=512,
sweep_log=False, averaging_time=1e-3, settling_time=1e-3,
averaging_cycles=1, settling_cycles=1):
""" Set the output sweep parameters
:type f_start: int; 1 <= f_start <= 120e6 Hz
:param f_start: Sweep start frequency
:type f_end: int; 1 <= f_end <= 120e6 Hz
:param f_end: Sweep end frequency
:type sweep_points: int; 32 <= sweep_points <= 512
:param sweep_points: Number of points in the sweep (rounded to nearest
power of 2).
:type sweep_log: bool
:param sweep_log: Enable logarithmic frequency sweep scale.
:type averaging_time: float; sec
:param averaging_time: Minimum averaging time per sweep point.
:type settling_time: float; sec
:param settling_time: Minimum setting time per sweep point.
:type averaging_cycles: int; cycles
:param averaging_cycles: Minimum averaging cycles per sweep point.
:type settling_cycles: int; cycles
:param settling_cycles: Minimum settling cycles per sweep point.
"""
|
iulian787/spack
|
var/spack/repos/builtin/packages/r-dynamictreecut/package.py
|
Python
|
lgpl-2.1
| 744
| 0.005376
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RDynamictreecut(RPackage):
"""dynamicTreeCut: Methods for Detection of Clusters in Hierarchical
Clustering Dendrograms"""
homepage = "https://cloud.r-project.org/package=dynamicTreeCut"
url = "https://cloud.r-project.org/src/contrib/dynamicTreeCut_1.63-1.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/dynamicTree
|
Cut/"
version('1.63-1', sha256='831307f64eddd68dcf01bbe2963be99e5cde65a636a13ce9de229777285e4db9')
|
depends_on('r@2.3.0:', type=('build', 'run'))
|
mayson/viperpeers
|
viperpeers.py
|
Python
|
gpl-2.0
| 45,342
| 0.047087
|
#!/usr/bin/env python
# vim:fileencoding=utf-8
# Find the best reactor
reactorchoices = ["epollreactor", "kqreactor", "cfreactor", "pollreactor", "selectreactor", "posixbase", "default"]
for choice in reactorchoices:
try:
exec("from twisted.internet import %s as bestreactor" % choice)
break
except:
pass
bestreactor.install()
#from twisted.application import internet, service
from twisted.internet import reactor
from twisted.protocols import basic, policies
import yaml
import socket
import select
import re
import logging
import sys
import signal
import os
import traceback
import codecs
import time
import resource
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
#logging.getLogger().addHandler()
resource.setrlimit(resource.RLIMIT_NOFILE, [32768,65536])
trace=None
if 'format_exc' in dir(traceback):
from traceback import format_exc as trace
else:
from traceback import print_exc as trace
reload(sys)
def lock2key (lock):
key = {}
for i in xrange(1, len(lock)):
key[i] = ord(lock[i]) ^ ord(lock[i-1])
key[0] = ord(lock[0]) ^ ord(lock[len(lock)-1]) ^ ord(lock[len(lock)-2]) ^ 5
for i in xrange(0, len(lock)):
key[i] = ((key[i]<<4) & 240) | ((key[i]>>4) & 15)
out = ''
for i in xrange(0, len(lock)):
out += unichr(key[i])
out = out.replace(u'\0', u'/%DCN000%/').replace(u'\5', u'/%DCN005%/').replace(u'\44', u'/%DCN036%/')
out = out.replace(u'\140', u'/%DCN096%/').replace(u'\174', u'/%DCN124%/').replace(u'\176', u'/%DCN126%/')
return out
def number_to_human_size(size, precision=1):
"""
Returns a formatted-for-humans file size.
``precision``
The level of precision, defaults to 1
Examples::
>>> number_to_human_size(123)
'123 Bytes'
>>> number_to_human_size(1234)
'1.2 KB'
>>> number_to_human_size(12345)
'12.1 KB'
>>> number_to_human_size(1234567)
'1.2 MB'
>>> number_to_human_size(1234567890)
'1.1 GB'
>>> number_to_human_size(1234567890123)
'1.1 TB'
>>> number_to_human_size(1234567, 2)
'1.18 MB'
"""
if size == 1:
return "1 Byte"
elif size < 1024:
return "%d Bytes" % size
elif size < (1024**2):
return ("%%.%if KB" % precision) % (size / 1024.00)
elif size < (1024**3):
return ("%%.%if MB" % precision) % (size / 1024.00**2)
elif size < (1024**4):
return ("%%.%if GB" % precision) % (size / 1024.00**3)
elif size < (1024**5):
return ("%%.%if TB" % precision) % (size / 1024.00**4)
return ""
class DCUser:
recp={}
recp['tag']=re.compile('[<](.*)[>]$')
recp['slots']=re.compile('S:(\d*)')
recp['hubs']=re.compile('H:([0-9/]*)')
def __init__(self,myinfo="",descr=None,addr=None):
self.nick = ''
self.connection = ''
self.flag = ''
self.mail = ''
self.share = 0
self.descr = None
self.MyINFO = None
self.level = 0
self.tag = ''
self.slots = 0
self.hubs = 0
self.sum_hubs = 0
if len( myinfo )>0:
self.upInfo( myinfo )
self.descr = descr
self.addr = addr
def upInfo(self,myinfo):
self.MyINFO = myinfo
ar = myinfo.split("$")
ar2 = ar[2].split(" ",2)
self.nick = ar2[1]
self.description = ar2[2]
self.connection = ar[4][0:-1]
self.flag = ar[4][-1]
self.mail = ar[5]
self.share = int( ar[6] )
# Parsing TAG
tag = self.recp['tag'].search( self.description )
if self.tag != None:
self.tag=tag.group( 1 )
slots = self.recp['slots'].search( self.tag )
if slots != None:
self.slots = int( slots.group( 1 ) )
hubs = self.recp['hubs'].search( self.tag )
if hubs != None:
self.hubs = hubs.group( 1 )
try:
self.sum_hubs=self.get_sum_hubs()
except:
logging.warning( 'WRONG TAG: %s' % tag )
def get_ip( self ):
return self.addr.split(':')[0]
def get_sum_hubs( self ):
s=0
for i in self.hubs.split('/'):
s=s+int( i )
return s
class DCHub( policies.ServerFactory ):
# CONSTANTS
LOCK='EXTENDEDPROTOCOL_VIPERHUB Pk=versionHidden'
SUPPORTS='OpPlus NoGetINFO NoHello UserIP UserIP2'
def _(self,string): # Translate function
return self.lang.get(string,string)
def tUCR( self, req ):
'''translate and make usercmmand request %[line:req:] '''
return '%%[line:%s:]' % self._( req )
def UC( self, menu, params ):
'''make UserCommands'''
return '$UserCommand 1 2 %s %s %s%s||' % ( menu, '$<%[mynick]>', self.core_settings['cmdsymbol'], ' '.join( params ) )
def Gen_UC( self ):
self.usercommands={}
# -- CORE USERCOMMANDS --
self.usercommands['Quit'] = self.UC( self._('Core\\Quit'), ['Quit'] )
self.usercommands['Save'] = self.UC( self._('Settings\\Save settings'), ['Save'] )
self.usercommands['SetTopic'] = self.UC( self._('Settings\\Set hub topic'), ['SetTopic', self.tUCR('New Topic')] )
self.usercommands['Help'] = self.UC( self._('Help'), ['Help'] )
self.usercommands['RegenMenu'] = self.UC( self._( 'Core\\Regenerate menu' ), ['RegenMenu'] )
self.usercommands['ReloadSettings'] = self.UC( self._( 'Core\\Reload settings (DANGEROUS)' ), ['ReloadSettings'] )
# -- settings get/set
self.usercommands['Get'] = self.UC( self._('Settings\\List settings files'), ['Get'] )
self.usercommands['Set'] = self.UC( self._('Settings\\Set variable'), ['Set', self.tUCR( 'File' ), self.tUCR( 'Variable' ), self.tUCR( 'New Value' )] )
# -- Limits control
self.usercommands['Set'] += self.UC( self._('Settings\\Limits\\Set max users'), ['Set core max_users', self.tUCR( 'New max users' )] )
self.usercommands['Set'] += self.UC( self._('Settings\\Limits\\Set min share'), ['Set core min_share', self.tUCR( 'New min share (in bytes)' )] )
self.usercommands['Set'] += self.UC( self._('Settings\\Limits\\Set max hubs'), ['Set core max_hubs', self.tU
|
CR( 'New max hubs' )] )
self.usercommands['Set'] += self.UC( self._('Settings\\Limits\\Set min slots'), ['Set core min_slots', self.tUCR( 'New m
|
in slots' )] )
# -- User control
self.usercommands['AddReg'] = ''
self.usercommands['SetLevel'] = ''
for i in self.settings['privlist'].keys():
self.usercommands['AddReg'] += self.UC( self._( 'Users\\Selected\\Register selected nick as\\%s' ) % i, ['AddReg %[nick]', i, self.tUCR( 'Password' )] )
self.usercommands['AddReg'] += self.UC( self._( 'Users\\Register nick...' ), ['AddReg', self.tUCR( 'nick' ), self.tUCR( 'level' ), self.tUCR( 'Password' )] )
self.usercommands['ListReg'] = self.UC( self._( 'Users\\List registred nicks' ), ['ListReg'] )
self.usercommands['DelReg'] = self.UC( self._( 'Users\\Selected\\Unreg selected nick' ), ['DelReg %[nick]'] )
self.usercommands['DelReg'] += self.UC( self._( 'Users\\Unreg nick...' ), ['DelReg', self.tUCR('Nick')] )
for i in self.settings['privlist'].keys():
self.usercommands['SetLevel'] += self.UC( self._( 'Users\\Selected\\Set level for selected nick\\%s' ) % i, ['SetLevel %[nick]', i] )
self.usercommands['PasswdTo'] = self.UC( self._( 'Users\\Selected\\Set password for selected nick...' ), ['PasswdTo %[nick]', self.tUCR('new password')] )
self.usercommands['Kick'] = self.UC( self._( 'Kick selected nick...' ), ['Kick %[nick]', self.tUCR( 'reason (may be empty)' )] )
self.usercommands['UI'] = self.UC( self._( 'Users\\Selected\\User Info' ), ['UI %[nick]'] )
# -- Plugin control
#self.usercommands['ListPlugins'] = self.UC( self._( 'Plugins\\List aviable plugins' ), ['ListPlugins'] )
#self.usercommands['ActivePlugins'] = self.UC( self._( 'Plugins\\List active plugins' ), ['ListPlugins'] )
menu = self._( 'Plugins\\Load/Reload Plugin\\' )
menuU = self._( 'Plugins\\Unload Plugin\\' )
loaded = self._( '(loaded)' )
aplugs = self.get_aviable_plugins()
self.usercommands['ReloadPlugin'] = ''
self.usercommands['LoadPlugin'] = ''
self.usercommands['UnloadPlugin'] = ''
for i in aplugs:
if i in self.plugs:
self.usercommands['ReloadPlugin'] += self.UC( menu + i + ' ' + loaded, ['ReloadPlugin', i] )
else:
self.usercommands['LoadPlugin'] += self.UC( menu + i, ['LoadPlugin', i] )
for i in self.plugs.keys():
self.usercommands['UnloadPlugin'] += self.UC( menuU + i, ['UnloadPlugin', i] )
#self.usercommands['ListPlugins']='$UserCommand 1 2 '+self._('Plugins\\List aviable plugins')+'$<%[mynick]> '+self.c
|
henzk/ape
|
ape/container_mode/tasks.py
|
Python
|
mit
| 12,347
| 0.002349
|
from __future__ import unicode_literals, print_function
import os
import sys
import subprocess
import json
from ape import feaquencer
from ape import tasks
from .exceptions import ContainerError, ContainerNotFound, ProductNotFound
class Config(object):
APE_ROOT = os.environ['APE_ROOT_DIR']
SOURCE_HEADER = '#please execute the following in your shell:\n'
introduce_conf = Config()
@tasks.register_helper
def get_container_dir(container_name):
return tasks.conf.APE_ROOT + '/' + container_name
@tasks.register_helper
def get_product_dir(container_name, product_name):
return tasks.get_container_dir(container_name) + '/products/' + product_name
@tasks.register_helper
def get_containers():
entries = os.listdir(tasks.conf.APE_ROOT)
containers = []
for entry in entries:
if os.path.isdir(tasks.get_container_dir(entry) + '/products'):
containers.append(entry)
return containers
@tasks.register_helper
def get_products(container_name):
products_dir = tasks.get_container_dir(container_name) + '/products'
if not os.path.isdir(products_dir):
return []
products = os.listdir(products_dir)
def is_product(p):
return not p.startswith('.') and not p.startswith('_')
return [p for p in products if is_product(p)]
@tasks.register
def info():
"""
List information about this productive environment
:return:
"""
print()
print('root directory :', tasks.conf.APE_ROOT)
print()
print('active container :', os.environ.get('CONTAINER_NAME', ''))
print()
print('active product :', os.environ.get('PRODUCT_NAME', ''))
print()
print('ape feature selection :', tasks.FEATURE_SELECTION)
print()
print('containers and products:')
print('-' * 30)
print()
for container_name in tasks.get_containers():
print(container_name)
for product_name in tasks.get_products(container_name):
print(' ' + product_name)
print()
@tasks.register
def cd(doi):
"""
cd to directory of interest(doi)
a doi can be:
herbert - the container named "herbert"
sdox:dev - product "website" located in container "herbert"
:param doi:
:return:
"""
parts = doi.split(':')
if len(parts) == 2:
container_name, product_name = parts[0], parts[1]
elif len(parts) == 1 and os.environ.get('CONTAINER_NAME'):
# interpret poi as product name if already zapped into a product in order
# to enable simply switching products by doing ape zap prod.
product_name = parts[0]
container_name = os.environ.get('CONTAINER_NAME')
else:
print('unable to parse context - format: <container_name>:<product_name>')
sys.exit(1)
if container_name not in tasks.get_containers():
print('No such container')
else:
if product_name:
if product_name not in tasks.get_products(container_name):
print('No such product')
else:
print(tasks.conf.SOURCE_HEADER)
print('cd ' + tasks.get_product_dir(container_name, product_name))
else:
print(tasks.conf.SOURCE_HEADER)
print('cd ' + tasks.get_container_dir(container_name))
SWITCH_TEMPLATE = '''{source_header}
export CONTAINER_NAME={container_name}
export PRODUCT_NAME={product_name}
update_ape_env
'''
@tasks.register
def switch(poi):
"""
Zaps into a specific product specified by switch context to the product of interest(poi)
A poi is:
sdox:dev - for product "dev" located in container "sdox"
If poi does not contain a ":" it is interpreted as product name implying that a product within this
container is already active. So if this task is called with ape zap prod (and the corresponding container is
already zapped in), than only the product is switched.
After the context has been switched to sdox:dev additional c
|
ommands may be available
that are relevant to sdox:dev
:param poi: product of interest, string: <container_name>:<product_name> or <product_name>.
"""
parts = poi.split(':')
if len(parts) == 2:
container_name, product_name = parts
elif len(parts) == 1 and os.environ.get('CONTAINER_NAME'):
# interpret poi as pro
|
duct name if already zapped into a product in order
# to enable simply switching products by doing ape zap prod.
container_name = os.environ.get('CONTAINER_NAME')
product_name = parts[0]
else:
print('unable to find poi: ', poi)
sys.exit(1)
if container_name not in tasks.get_containers():
raise ContainerNotFound('No such container %s' % container_name)
elif product_name not in tasks.get_products(container_name):
raise ProductNotFound('No such product %s' % product_name)
else:
print(SWITCH_TEMPLATE.format(
source_header=tasks.conf.SOURCE_HEADER,
container_name=container_name,
product_name=product_name
))
@tasks.register
def teleport(poi):
"""
switch and cd in one operation
:param poi:
:return:
"""
tasks.switch(poi)
tasks.cd(poi)
@tasks.register
def zap(poi):
'''alias for "teleport"'''
tasks.teleport(poi)
@tasks.register
def install_container(container_name):
"""
Installs the container specified by container_name
:param container_name: string, name of the container
"""
container_dir = os.path.join(os.environ['APE_ROOT_DIR'], container_name)
if os.path.exists(container_dir):
os.environ['CONTAINER_DIR'] = container_dir
else:
raise ContainerNotFound('ERROR: container directory not found: %s' % container_dir)
install_script = os.path.join(container_dir, 'install.py')
if os.path.exists(install_script):
print('... running install.py for %s' % container_name)
subprocess.check_call(['python', install_script])
else:
raise ContainerError('ERROR: this container does not provide an install.py!')
@tasks.register_helper
def get_extra_pypath(container_name=None):
from ape.installtools import pypath
return pypath.get_extra_pypath()
@tasks.register_helper
def get_poi_tuple(poi=None):
"""
Takes the poi or None and returns the container_dir and the product name either of the passed poi
(<container_name>: <product_name>) or from os.environ-
:param poi: optional; <container_name>: <product_name>
:return: tuple of the container directory and the product name
"""
if poi:
parts = poi.split(':')
if len(parts) == 2:
container_name, product_name = parts
if container_name not in tasks.get_containers():
print('No such container')
sys.exit(1)
elif product_name not in tasks.get_products(container_name):
print('No such product')
sys.exit(1)
else:
container_dir = tasks.get_container_dir(container_name)
else:
print('Please check your arguments: --poi <container>:<product>')
sys.exit(1)
else:
container_dir = os.environ.get('CONTAINER_DIR')
product_name = os.environ.get('PRODUCT_NAME')
return container_dir, product_name
@tasks.register
def validate_product_equation(poi=None):
"""
Validates the product equation.
* Validates the feature order
* Validates the product spec (mandatory functional features)
:param poi: optional product of interest
"""
from . import utils
from . import validators
container_dir, product_name = tasks.get_poi_tuple(poi=poi)
feature_list = utils.get_features_from_equation(container_dir, product_name)
ordering_constraints = utils.get_feature_order_constraints(container_dir)
spec_path = utils.get_feature_ide_paths(container_dir, product_name).product_spec_path
print('*** Starting product.equation validation')
# --------------------------------------------------------
# Validate the feature order
print('\tChecking feature order')
fea
|
pmaunz/pyqtgraph
|
examples/optics/pyoptic.py
|
Python
|
mit
| 18,598
| 0.008229
|
# -*- coding: utf-8 -*-
import pyqtgraph as pg
from pyqtgraph.Qt import QtGui, QtCore
import numpy as np
import csv, gzip, os
from pyqtgraph import Point
class GlassDB:
"""
Database of dispersion coefficients for Schott glasses
+ Corning 7980
"""
def __init__(self, fileName='schott_glasses.csv'):
path = os.path.dirname(__file__)
fh = gzip.open(os.path.join(path, 'schott_glasses.csv.gz'), 'rb')
r = csv.reader(map(str, fh.readlines()))
lines = [x for x in r]
self.data = {}
header = lines[0]
for l in lines[1:]:
info = {}
for i in range(1, len(l)):
info[header[i]] = l[i]
self.data[l[0]] = info
self.data['Corning7980'] = { ## Thorlabs UV fused silica--not in schott catalog.
'B1': 0.68374049400,
'B2': 0.42032361300,
'B3': 0.58502748000,
'C1': 0.00460352869,
'C2': 0.01339688560,
'C3': 64.49327320000,
'TAUI25/250': 0.95, ## transmission data is fabricated, but close.
'TAUI25/1400': 0.98,
}
for k in self.data:
self.data[k]['ior_cache'] = {}
def ior(self, glass, wl):
"""
Return the index of refraction for *glass* at wavelength *wl*.
The *glass* argument must be a key in self.data.
"""
info = self.data[glass]
cache = info['ior_cache']
if wl not in cache:
B = list(map(float, [info['B1'], info['B2'], info['B3']]))
C = list(map(float, [info['C1'], info['C2'], info['C3']]))
w2 = (wl/1000.)**2
n = np.sqrt(1.0 + (B[0]*w2 / (w2-C[0])) + (B[1]*w2 / (w2-C[1])) + (B[2]*w2 / (w2-C[2])))
cache[wl] = n
return cache[wl]
def transmissionCurve(self, glass):
data = self.data[glass]
keys = [int(x[7:]) for x in data.keys() if 'TAUI25' in x]
keys.sort()
curve = np.empty((2,len(keys)))
for i in range(len(keys)):
curve[0][i] = keys[i]
key = 'TAUI25/%d' % keys[i]
val = data[key]
if val == '':
val = 0
else:
val = float(val)
curve[1][i] = val
return curve
GLASSDB = GlassDB()
def wlPen(wl):
"""Return a pen representing the given wavelength"""
l1 = 400
l2 = 700
hue = np.clip(((l2-l1) - (wl-l1)) * 0.8 / (l2-l1), 0, 0.8)
val = 1.0
if wl > 700:
val = 1.0 * (((700-wl)/700.) + 1)
elif wl < 400:
val = wl * 1.0/400.
#print hue, val
color = pg.hsvColor(hue, 1.0, val)
pen = pg.mkPen(color)
return pen
class ParamObj(object):
# Just a helper for tracking parameters and responding to changes
def __init__(self):
self.__params = {}
def __setitem__(self, item, val):
self.setParam(item, val)
def setParam(self, param, val):
self.setParams(**{param:val})
def setParams(self, **params):
"""Set parameters for this optic. This is a good function to override for subclasses."""
self.__params.update(params)
self.paramStateChanged()
def paramStateChanged(self):
pass
def __getitem__(self, item):
# bug in pyside 1.2.2 causes getitem to be called inside QGraphicsObject.parentItem:
return self.getParam(item) # PySide bug: https://bugreports.qt.io/browse/PYSIDE-441
def getParam(self, param):
return self.__params[param]
class Optic(pg.GraphicsObject, ParamObj):
sigStateChanged = QtCore.Signal()
def __init__(self, gitem, **params):
ParamObj.__init__(self)
pg.GraphicsObject.__init__(self) #, [0,0], [1,1])
self.gitem = gitem
self.surfaces = gitem.surfaces
gitem.setParentItem(self)
self.roi = pg.ROI([0,0], [1,1])
self.roi.addRotateHandle([1, 1], [0.5, 0.5])
self.roi.setParentItem(self)
defaults = {
'pos': Point(0,0),
'angle': 0,
}
defaults.update(params)
self._ior_cache = {}
self.roi.sigRegionChanged.connect(self.roiChanged)
self.setParams(**defaults)
def updateTransform(self):
self.resetTransform()
self.setPos(0, 0)
self.translate(Point(self['pos']))
self.rotate(self['angle'])
def setParam(self, param, val):
ParamObj.setParam(self, param, val)
def paramStateChanged(self):
"""Some parameters of the optic have changed."""
# Move graphics item
self.gitem.setPos(Point(self['pos']))
self.gitem.resetTransform()
self.gitem.rotate(self['angle'])
# Move ROI to match
try:
self.roi.sigRegionChanged.disconnect(self.roiChanged)
br = self.gitem.boundingRect()
o = self.gitem.mapToParent(br.topLeft())
self.roi.setAngle(self['angle'])
self.roi.setPos(o)
self.roi.setSize([br.width(), br.height()])
finally:
self.roi.sigRegionChanged.connect(self.roiChanged)
self.sigStateChanged.emit()
def roiChanged(self, *args):
pos = self.roi.pos()
# rotate gitem temporarily so we can decide where it will need to move
self.gitem.resetTransform()
self.gitem.rotate(self.roi.angle())
br = self.gitem.boundingRect()
o1 = self.gitem.mapToParent(br.topLeft())
self.setParams(angle=self.roi.angle(), pos=pos + (self.gitem.pos() - o1))
def boundingRect(self):
return QtCore.QRectF()
def paint(self, p, *args):
pass
def ior(self, wavelength):
return GLASSDB.ior(self['glass'], wavelength)
class Lens(Optic):
def __init__(self, **params):
defaults = {
'dia'
|
: 25.4, ## diameter of lens
'r1': 50., ## positive means convex, use 0 for planar
'r2': 0, ## negative means convex
'd': 4.0,
'glass': 'N-BK7',
'refl
|
ect': False,
}
defaults.update(params)
d = defaults.pop('d')
defaults['x1'] = -d/2.
defaults['x2'] = d/2.
gitem = CircularSolid(brush=(100, 100, 130, 100), **defaults)
Optic.__init__(self, gitem, **defaults)
def propagateRay(self, ray):
"""Refract, reflect, absorb, and/or scatter ray. This function may create and return new rays"""
"""
NOTE:: We can probably use this to compute refractions faster: (from GLSL 120 docs)
For the incident vector I and surface normal N, and the
ratio of indices of refraction eta, return the refraction
vector. The result is computed by
k = 1.0 - eta * eta * (1.0 - dot(N, I) * dot(N, I))
if (k < 0.0)
return genType(0.0)
else
return eta * I - (eta * dot(N, I) + sqrt(k)) * N
The input parameters for the incident vector I and the
surface normal N must already be normalized to get the
desired results. eta == ratio of IORs
For reflection:
For the incident vector I and surface orientation N,
returns the reflection direction:
I – 2 ∗ dot(N, I) ∗ N
N must already be normalized in order to achieve the
desired result.
"""
iors = [self.ior(ray['wl']), 1.0]
for i in [0,1]:
surface = self.surfaces[i]
ior = iors[i]
p1, ai = surface.intersectRay(ray)
#print "surface intersection:", p1, ai*180/3.14159
#trans = self.sceneTransform().inverted()[0] * surface.sceneTransform()
#p1 = trans.map(p1)
if p1 is None:
ray.setEnd(None)
break
p1 = surface.mapToItem(ray, p1)
#print "adjusted position:", p1
#ior = self.ior(ray['wl'])
rd = ray['dir']
a1 = np.arctan2(rd[1], r
|
osspeak/osspeak
|
osspeak/recognition/actions/library/general.py
|
Python
|
mit
| 247
| 0.016194
|
def python_evaluate(text):
return
|
eval(str(text))
def python_print(*values, sep=' '):
joined = sep.join((str(v) for v in values))
print(joined)
def python_list(*args):
re
|
turn args
def error(text=''):
raise RuntimeError(text)
|
ptphp/PyLib
|
src/webpy1/webpy1.1/post/post_anju.py
|
Python
|
apache-2.0
| 4,425
| 0.018079
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
from poster.encode import multipart_encode
from poster.streaminghttp import register_openers
import urllib2,urllib,sys,time
import cookielib,mechanize
import re
DEBUG =0
reload(sys)
sys.setdefaultencoding('utf8') #@UndefinedVariable
register_openers()
headers = {
'Host':'agent.anjuke.c
|
om',
'User-Agent' : 'Mozilla/5.0 (X11; Linux i686; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
#'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
#'Accept-Language':'zh-cn,zh;q=0.5',
#'Accept-Encoding':'gzip, deflate',
#'Accept-Charset':'GB2312,utf-8;q=0.7,*;q=0.7',
'Keep-Alive':'115',
'Connection':'keep-alive',
}
#datagen11, headers = multipart_encode({"fileUploadInput": open("/home/myapp/Screenshot-1.jpg","rb"),"backFunction": "$.c.Uploader.finish"})
class httpPost():
data = {}
def __init__(self,dataDic):
self.cookie = cookielib.CookieJar()
httpsHandler = urllib2.HTTPHandler()
httpsHandler.set_http_debuglevel(DEBUG)
self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookie),httpsHandler)
self.data = dataDic
def login1(self):
self.brow = mechanize.Browser()
httpHandler = mechanize.HTTPHandler()
httpsHandler = mechanize.HTTPSHandler()
httpHandler.set_http_debuglevel(DEBUG)
self.cookiejar = mechanize.LWPCookieJar()
#self.cookiejar = "Cookie lzstat_uv=34741959842666604402|1786789; Hm_lvt_976797cb85805d626fc5642aa5244ba0=1304534271541; ASPSESSIONIDQCDRAQBB=JHCHINLAHGMAIGBIFMNANLGF; lzstat_ss=2189193215_2_1304564199_1786789; Hm_lpvt_976797cb85805d626fc5642aa5244ba0=1304535401191"
self.opener = mechanize.OpenerFactory(mechanize.SeekableResponseOpener).build_opener(
httpHandler,httpsHandler,
mechanize.HTTPCookieProcessor(self.cookiejar),
mechanize.HTTPRefererProcessor,
mechanize.HTTPEquivProcessor,
mechanize.HTTPRefreshProcessor,
)
self.opener.addheaders = [("User-Agent","Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13"),
("From", "")]
#self.opener.addheaders = [(
# "Referer", self.data['postUrl']
# )]
login={}
login['method'] = self.data['method']
login['name'] = self.data['name']
login['pwd'] = self.data['pwd']
loginUrl = self.data['loginUrl']+'?'+urllib.urlencode(login)
print loginUrl
response = mechanize.urlopen("http://esf.soufun.com/")
response = mechanize.urlopen(loginUrl)
print response.read().decode('gb2312')
def login(self):
self.cookie = cookielib.CookieJar()
httpsHandler = urllib2.HTTPHandler()
httpsHandler.set_http_debuglevel(DEBUG)
self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookie),httpsHandler)
login={}
login['act'] = self.data['act']
login['loginName'] = self.data['loginName']
login['history'] = ''
login['loginPasswd'] = self.data['loginPasswd']
loginUrl = self.data['loginUrl']
req = urllib2.Request(loginUrl,urllib.urlencode(login),headers)
r = self.opener.open(req)
res = None
for item in self.cookie:
#print item.name,item.value
if item.name == 'aQQ_ajklastuser':
res = item.value
return res
#aQQ_ajklastuser junyue_liuhua
#print self.opener.open('http://my.anjuke.com/v2/user/broker/checked/').read()
#open('login.txt','w').write(r.read().encode('utf-8'))
def post(self):
pass
#postData = {}
#postData['loginUrl'] = 'http://agent.anjuke.com/v2/login/'
#postData['act'] = 'login'
#postData['loginName'] = 'junyue_liuhua'
#postData['loginPasswd'] = 'lh_131415'
#http = httpPost(postData)
#http.login()
|
Jumpscale/jumpscale6_core
|
lib/JumpScale/grid/zdaemon/__init__.py
|
Python
|
bsd-2-clause
| 330
| 0
|
from JumpScale import j
import JumpScale.grid.gevent
import JumpScale.baselib.key_value_store
import JumpScale.baselib.serializers
from .ZDa
|
emonFactory import ZDaemonFactory
j.base.loader.makeAvailable(j, 'core')
j.core.zdaemon = ZDaemonFactory()
j.base.loa
|
der.makeAvailable(j, 'servers')
j.servers.zdaemon = ZDaemonFactory()
|
KernelAnalysisPlatform/kvalgrind
|
qemu/panda_plugins/syscalls/syscall_parser.py
|
Python
|
gpl-3.0
| 16,902
| 0.007159
|
# /* PANDABEGINCOMMENT
# *
# * Authors:
# * Tim Leek tleek@ll.mit.edu
# * Ryan Whelan rwhelan@ll.mit.edu
# * Joshua Hodosh josh.hodosh@ll.mit.edu
# * Michael Zhivich mzhivich@ll.mit.edu
# * Brendan Dolan-Gavitt brendandg@gatech.edu
# *
# * This work is licensed under the terms of the GNU GPL, version 2.
# * See the COPYING file in the top-level directory.
# *
#PANDAENDCOMMENT */
"""
Output panda tool to parse system calls on Linux
"""
import re
import os
from collections import defaultdict
from sys import argv
ARM_CALLNO = "env->regs[7]"
ARM_ARGS = ["env->regs[0]", "env->regs[1]", "env->regs[2]", "env->regs[3]", "env->regs[4]", "env->regs[5]", "env->regs[6]"]
ARM_SP = "env->regs[13]"
ARM_GUARD = "#ifdef TARGET_ARM"
X86_CALLNO = "EAX"
X86_ARGS = ["EBX", "ECX", "EDX", "ESI", "EDI", "EBP"]
X86_SP = "ESP"
# Linux's syscall ABI doesn't change between IA32 and AMD64
X86_GUARD = "#ifdef TARGET_I386"
PROTOS = "android_arm_prototypes.txt" if len(argv) < 2 else argv[1]
MODE = "ARM" if len(argv) < 3 else argv[2].upper()
DESTDIR = "." if len(argv) < 4 else argv[3]
# set arch/OS specific args by mode
for x in ["CALLNO", "ARGS", "SP", "GUARD"]:
locals()[x] = locals()["_".join([MODE, x])]
twoword_types = ["unsigned int", "unsigned long"]
types_64 = ["loff_t", 'u64']
stypes_32 = ["int", "long", '__s32']
types_32 = ["unsigned int", "unsigned long", "size_t", 'u32', 'off_t', 'timer_t', 'key_t',
'key_serial_t', 'mqd_t', 'clockid_t', 'aio_context_t', 'qid_t', 'old_sigset_t', 'union semun']
types_16 = ['old_uid_t', 'uid_t', 'mode_t', 'gid_t', 'pid_t']
types_pointer = ['cap_user_data_t', 'cap_user_header_t', '__sighandler_t', '...']
alltext = ""
alltext += GUARD + "\n"
alltext += "switch( " + CALLNO + " ){\n"
alltext+= "// we use std::string so that we only do lookups into guest memory once and cache the result\n"
def copy_string(dest, source, fname):
global alltext
alltext+= "syscalls::string %s = log_string(%s, \"%s\");\n" % (dest, source,fname)
def record_address(dest, source, fname):
global alltext
alltext+= "target_ulong %s = log_pointer(%s, \"%s\");\n" %(dest, source, fname)
def record_32(dest, source, fname):
global alltext
alltext+= "uint32_t %s = log_32(%s, \"%s\");\n" %(dest, source, fname)
def record_s32(dest, source, fname):
global alltext
alltext+= "int32_t %s = log_s32(%s, \"%s\");\n" %(dest, source, fname)
def record_64(dest, highsource, lowsource, fname):
global alltext
alltext+= "uint64_t %s = log_64(%s, %s, \"%s\");\n" %(dest, highsource, lowsource, fname)
CHAR_STAR = 'CHAR_STAR'
POINTER = 'POINTER'
BYTES_8 = '8BYTE'
BYTES_4 = '4BYTE'
BYTES_2 = '2BYTE'
SIGNED_4 = '4SIGNED'
# C++ types for callback arguments
ARG_TYPE_TRANSLATIONS = { CHAR_STAR: 'syscalls::string', # pointer
POINTER: 'target_ulong', # pointer
BYTES_8: 'uint64_t',
BYTES_4: 'uint32_t',
SIGNED_4: 'int32_t',
BYTES_2: 'uint16_t',
}
# C types for callback arguments
ARG_TYPE_C_TRANSLATIONS = dict(ARG_TYPE_TRANSLATIONS)
ARG_TYPE_C_TRANSLATIONS[CHAR_STAR] = 'target_ulong'
# Functions to translate arguments to C++ callbacks to arguments to C callbacks
# Defaults to returning the C++ argument's name
CXX_ARG_TO_C_ARG = defaultdict(lambda: lambda x: x)
CXX_ARG_TO_C_ARG[CHAR_STAR] = lambda x: "{0}.get_vaddr()".format(x) # uses internals of syscalls::string
CPP_RESERVED = {"new": "anew", "data":"data_arg"}
NAMESPACE = "syscalls"
class Argument(object):
def __init__(self):
self._type = None
self._name = None
self.var = None
@property
def type(self):
return self._type
@type.setter
def type(self, newtype):
assert(newtype in ARG_TYPE_TRANSLATIONS.keys())
self._type = newtype
@property
def name(self):
return self._name
@name.setter
def name(self, newname):
if newname.startswith('*'):
newname = newname[1:]
if newname in CPP_RESERVED:
newname = CPP_RESERVED[newname]
if newname is ')':
newname = "fn"
if newname.endswith('[]'):
newname = newname[:-2]
self._name = newname
class Syscall(object):
def __init__(self, name):
self.cxxargs = None
self.cxx_std_fn = None
self.cargs = None
self.args = None
self.precall = None
self.call_contents = None
self.name = name
# Prototypes for internal C++ callbacks per syscall
callback_defs = set()
# Typedefs for PPP C callbacks
typedefs = set()
# Names of all PPP C callbacks
cb_names = set()
# map from callback_def to code that comes before it in cpp file
precall = {}
# map from callback_def to its content in cpp file
call_contents = {}
# map from callback_def to call name
call_names = {}
syscalls = [] # objects, having a set is useless for dedup
# Goldfish kernel doesn't support OABI layer. Yay!
with open(PROTOS) as armcalls:
linere = re.compile("(\d+) (.+) (\w+)\((.*)\);")
charre = re.compile("char.*\*")
for line in armcalls:
# Fields: <no> <return-type> <name><signature with spaces>
fields = linere.match(line)
callno = fields.group(1)
rettype = fields.group(2)
callname = fields.group(3)
args = fields.group(4).split(',')
arg_types = []
alltext += "// " + str(callno)+" "+ str(rettype)+" "+ str(callname)+" "+ str(args) + '\n'
for argno, arg in enumerate(args):
# the .split() can leave us with args = ['']
if arg == '':
continue
#alltext += callno, rettype, callname, args
thisarg = Argument()
arg = arg.strip()
if arg.endswith('*') or len(arg.split()) == 1 or arg in twoword_types:
# no argname, just type
argname = "arg{0}".format(argno)
else:
argname = arg.split()[-1]
thisarg.name = argname
if argname == 'int':
print "ERROR: shouldn't be naming arg 'int'! Arg text: '{0}'".format(arg)
exit(1)
if charre.search(arg) and not argname.endswith('buf') and argname != '...' and not argname.endswith('[]'):
thisarg.type = CHAR_STAR
arg_types.append(thisarg)
elif '*' in arg or any([x in arg for x in types_pointer]):
thisarg.type = POINTER
arg_types.append(thisarg)
elif any([x in arg for x in types_64]):
thisarg.type = BYTES_8
arg_types.append(thisarg)
elif any([x in arg for x in types_32]) or any([x in arg for x in types_16]):
thisarg.type = BYTES_4
arg_types.append(thisarg)
elif any([x in arg for x in stypes_32]) and 'unsigned' not in arg:
thisarg.type = SIGNED_4
arg_types.append(thisarg)
elif arg == 'void':
pass
elif arg == 'unsigned' or (len(arg.split()) is 2 and arg.split()[0] == 'unsigned'):
thisarg.type = BYTES_4
arg_types.append(thisarg)
else:
print arg
alltext += "unknown:", arg
alltext += "case " + callno + "
|
: {\n"
alltext += "record_syscall(\"%s\")
|
;\n" % callname
argno = 0
for i, val in enumerate(arg_types):
arg_type = val.type
arg_name = val.name
if argno >= len(ARGS):
alltext += "// out of registers. Use the stack!"+'\n'
break
if arg_type == CHAR_STAR:
copy_string(arg_name, ARGS[argno], args[i])
elif arg_type == POINTER:
record_address(arg_name, ARGS[argno], args[i])
elif arg_type == BYTES_4:
record_32(arg_name, ARGS[argno], args[i])
elif arg_type == SIGNED_4:
record_s32(ar
|
sebdelsol/pyload
|
module/plugins/accounts/DebridItaliaCom.py
|
Python
|
gpl-3.0
| 1,321
| 0.006813
|
# -*- coding: utf-8 -*-
import re
from time import mktime, strptime
from module.plugins.Account import Account
class DebridItaliaCom(Account):
__name__ = "DebridItaliaCom"
__type__ = "account"
__version__ = "0.11"
|
__description__ = """Debriditalia.com account plugin"""
__lice
|
nse__ = "GPLv3"
__authors__ = [("stickell", "l.stickell@yahoo.it"),
("Walter Purcaro", "vuolter@gmail.com")]
WALID_UNTIL_PATTERN = r'Premium valid till: (.+?) \|'
def loadAccountInfo(self, user, req):
info = {"premium": False, "validuntil": None, "trafficleft": None}
html = req.load("http://debriditalia.com/")
if 'Account premium not activated' not in html:
m = re.search(self.WALID_UNTIL_PATTERN, html)
if m:
validuntil = int(mktime(strptime(m.group(1), "%d/%m/%Y %H:%M")))
info = {"premium": True, "validuntil": validuntil, "trafficleft": -1}
else:
self.logError(_("Unable to retrieve account information"))
return info
def login(self, user, data, req):
html = req.load("http://debriditalia.com/login.php",
get={'u': user, 'p': data['password']})
if 'NO' in html:
self.wrongPassword()
|
buck06191/BayesCMD
|
bparser/ast.py
|
Python
|
gpl-2.0
| 36,960
| 0.011986
|
# functions for processing the parsed syntax tree
import sys
import decimal
import string
import logger
# list of standard functions (from <math.h>) that are
# already known to be included in the destination
# NB: this list is fairly minimal -- the actual math.h
# for a contemporary C compiler is likely to include
# many more functions, but since we only use this for
# reporting purposes this list will do for now
# (these may eventually move to some other module)
STD_FUNCS = set(['acos', 'asin', 'atan', 'atan2', 'cos',
'cosh', 'sin', 'sinh', 'tan', 'tanh',
'exp', 'frexp', 'ldexp', 'log', 'log10',
'modf', 'pow', 'sqrt', 'ceil', 'fabs',
'floor', 'fmod'])
# shared non-negativity constraint for use with chemicals
NON_NEGATIVE = {'expr' : '0', 'i_expr' : (('literal', '0'),), 'kind' : 'bound', 'test' : '<'}
# when we need labels and they aren't supplied, use a simple
# integer counter to distinguish them
n_unlabelled = 0
def default_label(basename='unlabelled__'):
global n_unlabelled
n_unlabelled = n_unlabelled + 1
return basename + str(n_unlabelled)
# process all the top level items in a parsed model AST
# working out what's in them and what their dependencies are
def process(merged, sources, independent='t'):
work = {
'roots' : [],
'assigned' : set(),
'chemicals' : {},
'reactions' : {},
'symbols' : {},
'conflicts' : [],
'functions' : set(),
'embeds' : [],
'required' : set(),
'unused' : set(),
'symlist' : [],
'diffs' : [],
'algs' : [],
'auxiliaries' : {},
'diagonal' : True,
'version' : '',
'params' : [],
'intermeds' : [],
|
'outputs' : [],
'inputs' : [],
'docs'
|
: [],
'docstack' : [],
'modeldocs' : [],
'tags' : {},
'sources' : sources,
'extern' : [],
}
# independent variable is always at index 0
declare_symbol(find_independent(merged, independent), work)
for item in merged:
{
'reaction' : process_reaction,
'algeqn' : process_algeqn,
'diffeqn' : process_diffeqn,
'assign' : process_assign,
'constraint' : process_constraint,
'EMBEDDED' : process_embedded,
'version' : process_version,
'output' : process_output,
'input' : process_input,
'extern' : process_extern,
'import' : ignore_silent,
'independent' : ignore_silent,
'DOC' : process_doc
}.get(item[0], ignore_item)(item, work)
transform_reactions(work)
# consolidate global dependencies
for name in work['symbols'].keys():
recurse_dependencies(name, set(), set(), work['symbols'])
# assess whether a symbol depends on Y changes (made by solver)
# only parameter updates (specified by user)
rootset = set(work['roots'] + [work['symlist'][0]])
for name in work['symbols']:
if name in work['roots']:
pass
elif work['symbols'][name]['depends'] & rootset:
work['intermeds'].append(name)
else:
work['params'].append(name)
work['assignments'] = sort_assignments(work)
finalise_outputs(work)
finalise_externs(work)
for name in rootset.union(work['outputs']):
work['required'].add(name)
work['required'] = work['required'] | work['symbols'][name]['depends']
work['unused'] = set(work['symbols'].keys()) - work['required']
work['ind_params'] = sorted([x for x in work['params'] if len(work['symbols'][x]['depends']) == 0], key=str.lower)
work['deriv_params'] = sorted([x for x in work['params'] if x not in work['ind_params']], key=str.lower)
work['known'] = work['functions'] & STD_FUNCS
work['unknown'] = work['functions'] - STD_FUNCS
postprocess_docs(work)
return work
# at the moment this is just a hack to handle one special case
# more considered doc compiling will be dealt with later (and probably elsewhere)
def postprocess_docs(work):
for name in work['symbols']:
symbol = work['symbols'][name]
docs = symbol['docs']
for line in docs:
if line.startswith('+'):
symbol['tags'].extend(line.strip('+').strip().split())
elif line.startswith('$'):
symbol['latex'] = line.strip('$').strip()
elif line.startswith('~'):
symbol['units'] = line.strip('~').strip()
for tag in symbol['tags']:
if tag in work['tags']:
work['tags'][tag].append(name)
else:
work['tags'][tag] = [name]
# identify independent variable -- only first declaration applies
def find_independent(merged, default):
independent = None
for item in merged:
if item[0] == 'independent':
if independent is None:
independent = item[1]
else:
logger.warn('Ignoring additional @independent directive: ' + item[1])
if independent is None:
return default
return independent
# recursively consolidate all dependencies for all symbols
# this is pretty clunky and probably doing a lot of redundant work
# especially given that the results are of marginal utility
def recurse_dependencies(name, parents, done, symbols):
if name in parents:
if not symbols[name]['circular']:
logger.detail('Circular dependency found for ' + name)
symbols[name]['circular'] = True
elif symbols[name]['circular']:
logger.detail('Previous circularity noted for ' + name)
else:
for dep in symbols[name]['depends']:
if dep in done:
symbols[name]['depends'] = symbols[name]['depends'] | symbols[dep]['depends']
else:
symbols[name]['depends'] = (symbols[name]['depends']
| recurse_dependencies(dep, parents | set([name]), done, symbols))
done = done | set([name])
return symbols[name]['depends']
# sort assignment expressions into four groups:
# - independent for initialisation time
# - dependents ordered for overall initialisation time
# - parameters ordered for step initialisation time
# - dependents ordered for solver runtime
def sort_assignments(work):
independent = []
ind_expr = []
dependent_init = []
init_expr = []
dependent_step = []
step_expr = []
dependent_run = []
run_expr = []
symbols = work['symbols']
for name in work['assigned']:
init, run = choose_assignments(symbols[name])
if len(init['depends']) > 0:
dependent_init.append(name)
init_expr.append(init)
else:
independent.append(name)
ind_expr.append(init)
if run:
if name in work['intermeds']:
dependent_run.append(name)
run_expr.append(run)
else:
dependent_step.append(name)
step_expr.append(run)
else:
# intermeds is filled before we've determined
# whether there's any runtime assignment to do
# - now correct any earlier misapprehensions...
if name in work['intermeds']:
logger.message('reclassifying symbol %s as parameter' % name)
work['intermeds'].remove(name)
if name not in work['params']:
work['params'].append(name)
result = { 'independent': { 'names':independent, 'exprs':ind_expr } }
names, exprs = dependency_sort(dependent_init, init_expr)
result['dependent'] = { 'names': names, 'exprs': exprs }
names, exprs = dependency_sort(dependent_step, step_expr)
result['step'] = { 'names': na
|
skarphed/skarphed
|
admin/src/skarphedadmin/gui/ServerPropertyPage.py
|
Python
|
agpl-3.0
| 10,902
| 0.013212
|
#!/usr/bin/python
#-*- coding: utf-8 -*-
###########################################################
# © 2011 Daniel 'grindhold' Brendle and Team
#
# This file is part of Skarphed.
#
# Skarphed is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# Skarphed is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with Skarphed.
# If not, see http://www.gnu.org/licenses/.
###########################################################
import pygtk
pygtk.require("2.0")
import gtk
from skarphedadmin.gui import IconStock
from ViewPasswordButton import ViewPasswordButton
from InstancePage import InstancePage
from skarphedadmin.data.Generic import GenericObjectStoreException
from skarphedadmin.data.Server import Server
from skarphedadmin.gui.DefaultEntry import DefaultEntry
from skarphedadmin.glue.lng import _
class ServerPropertyPage(gtk.Frame):
addWindowOpen=False
MODE_EDIT = 0
MODE_NEW = 1
def __init__(self, parent, server=None):
gtk.Frame.__init__(self)
self.par = parent
self.serverId = None
if server is None:
self.set_label(_("Skarphed Admin Pro :: New Server"))
self.mode = ServerPropertyPage.MODE_NEW
else:
self.serverId = server.getLocalId()
self.set_label(_("Skarphed Admin Pro :: Server Properties of ")+server.getIp())
self.mode = ServerPropertyPage.MODE_EDIT
self.vbox = gtk.VBox()
self.instructionlabel = gtk.Label(_("Please enter the Server credentials"))
self.vbox.pack_start(self.instructionlabel,False)
self.ipFrame = gtk.Frame(_("Common"))
self.ipFrameT = gtk.Table(2,3,False)
self.ipFrame_IPLabel = gtk.Label(_("IP:"))
self.ipFrame_IPEntry = DefaultEntry(default_message="172.16.13.37")
self.ipFrameT.attach(self.ipFrame_IPLabel, 0,1,0,1)
self.ipFrameT.attach(self.ipFrame_IPEntry, 1,2,0,1)
self.ipFrame_NameLabel = gtk.Label(_("Name:"))
self.ipFrame_NameEntry = DefaultEntry(default_message="Server1")
self.ipFrameT.attach(self.ipFrame_NameLabel, 0,1,1,2)
self.ipFrameT.attach(self.ipFrame_NameEntry, 1,2,1,2)
self.ipFrame_Target_Label = gtk.Label("Target system:")
self.ipFrame_Target_model = gtk.ListStore(str)
for target in Server.INSTALLATION_TARGETS:
self.ipFrame_Target_model.append((target.getName(),))
self.ipFrame_Target_renderer = gtk.CellRendererText()
self.ipFrame_Target = gtk.ComboBox(self.ipFrame_Target_model)
self.ipFrame_Target.pack_start(self.ipFrame_Target_renderer,True)
self.ipFrame_Target.add_attribute(self.ipFrame_Target_renderer,'text',0)
self.ipFrameT.attach(self.ipFrame_Target_Label,0,1,2,3)
self.ipFrameT.attach(self.ipFrame_Target,1,2,2,3)
self.ipFrame.add(self.ipFrameT)
self.vbox.pack_start(self.ipFrame,False)
self.sshFrame = gtk.Frame(_("SSH"))
self.sshFrameT = gtk.Table(2,2,False)
self.sshFrame_NameLabel = gtk.Label(_("Username:"))
self.sshFrame_NameEntry = DefaultEntry(default_message="root")
self.sshFrame_PassLabel = gtk.Label(_("Password:"))
self.sshFrame_PassEntry = gtk.Entry()
self.sshFrame_PassEntry.set_visibility(False)
self.sshFrame_PassEntry.set_invisible_char("●")
self.sshFrameT.attach(self.sshFrame_NameLabel, 0,1,0,1)
self.sshFrameT.attach(self.sshFrame_NameEntry, 1,2,0,1)
self.sshFrameT.attach(self.sshFrame_PassLabel, 0,1,1,2)
self.sshFrameT.attach(self.sshFrame_PassEntry, 1,2,1,2)
self.sshFrame.add(self.sshFrameT)
self.vbox.pack_start(self.sshFrame,False)
self.instFrame = gtk.Frame(_("Instances"))
self.instFrameT = gtk.Table(2,4,False)
self.instList = gtk.TreeView()
self.instStore = gtk.ListStore(gtk.gdk.Pixbuf,str,int)
self.instList.set_model(self.instStore)
self.instCol_Icon = gtk.TreeViewColumn()
self.instCol_Name = gtk.TreeViewColumn(_('Instance'))
self.instRen_Icon = gtk.CellRendererPixbuf()
self.instRen_Name = gtk.CellRendererText()
self.instCol_Icon.pack_start(self.instRen_Icon,False)
self.instCol_Name.pack_start(self.instRen_Name,True)
self.instCol_Icon.add_attribute(self.instRen_Icon,'pixbuf',0)
self.instCol_Name.add_attribute(self.instRen_Name,'text',1)
self.instList.append_column(self.instCol_Icon)
self.instList.append_column(self.instCol_Name)
self.instAdd = gtk.Button(stock=gtk.STOCK_ADD)
self.instRemove = gtk.Button(stock=gtk.STOCK_REMOVE)
self.instEdit = gtk.Button(stock=gtk.STOCK_EDIT)
self.instFrameT.attach(self.instList,0,1,0,4)
self.instFrameT.attach(self.instAdd,1,2,0,1)
self.instFrameT.attach(self.instRemove,1,2,1,2)
self.instFrameT.attach(self.instEdit,1,2,2,3)
self.instAdd.connect("clicked",self.cb_Add)
self.instRemove.connect("clicked",self.cb_Remove)
self.instEdit.connect("clicked",self.cb_Edit)
self.instList.connect("cursor-changed", self.cb_cursorChanged)
self.instFrame.add(self.instFrameT)
self.vbox.pack_start(self.instFrame,False)
self.fill = gtk.Label("")
self.vbox.pack_start(self.fill,True)
self.buttons = gtk.HBox()
self.ok = gtk.Button(stock=gtk.STOCK_OK)
self.cancel = gtk.Button(stock=gtk.STOCK_CANCEL)
self.viewpass = ViewPasswordButton()
self.viewpass.addEntry(self.sshFrame_PassEntry)
self.ok.connect("clicked", self.cb_OK)
self.cancel.connect("clicked", self.cb_Cancel)
self.buttons.pack_end(self.ok,False)
self.buttons.pack_end(self.cancel,False)
self.buttons.pack_end(self.viewpass,False)
self.vbo
|
x.pack_start(self.buttons,False)
self.add(self.vbox)
if server is not None:
self.ipFrame_IPEntry.set_text(server.getIp())
self.ipFrame_NameEntry.set_text(server.getRawName())
self.sshFrame_NameEntry.set_text(server.getSSHName())
self.sshFrame_PassEntry.set_text(server.getSSHPass())
server.addCallback(self.render)
self.getApplication().getMainWindow().openDialogPane(self)
|
self.render()
def render(self):
def search(model, path, rowiter, target):
text = model.get_value(rowiter,0)
if text == target.getName():
self.ipFrame_Target.set_active_iter(rowiter)
server = None
try:
server = self.getApplication().getLocalObjectById(self.serverId)
except GenericObjectStoreException:
if self.mode == ServerPropertyPage.MODE_EDIT:
self.getApplication().getMainWindow().closeDialogPane()
return
self.instFrame.set_visible(self.mode == ServerPropertyPage.MODE_EDIT)
if server is not None and server.isTargetUsable():
self.ipFrame_Target_model.foreach(search, server.getTarget())
self.instStore.clear()
if server is not None:
for instance in server.getInstances():
icon = IconStock.SKARPHED #TODO: Implement Icon
self.instStore.append((icon,instance.getName(),instance.getLocalId()))
self.cb_cursorChanged()
def getPar(self):
return self.par
def getApplication(self):
return self.par.getApplication()
def cb_Add(self,widget=None,data=None):
server = self.getApplication().getLocalObjectById(self.serverId)
InstancePag
|
plotly/python-api
|
packages/python/plotly/plotly/validators/ohlc/hoverlabel/_bordercolor.py
|
Python
|
mit
| 526
| 0.001901
|
import _plotly_utils.basevalidators
clas
|
s BordercolorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self, plotly_name="bordercolor", parent_name="ohlc.hoverlabel", **kwargs
):
super(BordercolorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "style")
|
,
**kwargs
)
|
ScottWales/mosrs-setup
|
mosrs/setup.py
|
Python
|
apache-2.0
| 7,573
| 0.009375
|
#!/usr/bin/env python
"""
Copyright 2016 ARC Centre of Excellence for Climate Systems Science
author: Scott Wales <scott.wales@unimelb.edu.au>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
from subprocess import Popen, PIPE
from textwrap import dedent
from os import environ, path
from distutils.util import strtobool
import ldap
import getpass
from . import auth, gpg
def colour(text, colour):
if colour == 'red':
code = '\033[31;1m'
elif colour == 'green':
code = '\033[32m'
elif colour == 'blue':
code = '\033[93m'
else:
raise Exception
reset = '\033[m'
return code + text + reset
def info(text):
print("%s: %s"%(colour('INFO','blue'),text))
def warning(text):
print("%s: %s"%(colour('WARN','red'),text))
def todo(text):
print("%s: %s"%(colour('TODO','green'),text))
class SetupError(Exception):
"""
Indicates user needs to take action before setup can complete
"""
pass
def userinfo():
"""
Get current user's common name and email from LDAP
Returns: Tuple of (name, email)
"""
l = ldap.initialize(ldap.get_option(ldap.OPT_URI))
people = 'ou=People,dc=apac,dc=edu,dc=au'
info = l.search_s(people, ldap.SCOPE_SUBTREE, '(uid=%s)'%getpass.getuser())
return (info[0][1]['cn'][0],info[0][1]['mail'][0])
def prompt_bool(prompt):
|
"""
Ask a yes/no question
Returns: true/false answer
"""
raw_value = raw_input(prompt + ' [yes/no] ')
try:
return strtobool(raw_value)
except ValueError:
return ask_bool(prompt)
def prompt_or_default(prompt, default):
"""
Ask a question with a default answer
Returns: answer or default
"""
response = raw_input('%s [%s]: '%(prompt,default)).strip()
|
if response == '':
response = default
return response
def gpg_startup():
agent = dedent("""
[ -f ~/.gpg-agent-info ] && source ~/.gpg-agent-info
if [ -S "${GPG_AGENT_INFO%%:*}" ]; then
export GPG_AGENT_INFO
else
eval $( gpg-agent --daemon --allow-preset-passphrase --batch --max-cache-ttl 43200 --write-env-file ~/.gpg-agent-info )
fi
""")
home = environ['HOME']
for f in ['.profile','.bash_profile']:
p = path.join(home,f)
if path.exists(p):
# Check if gpg-agent is already referenced
grep = Popen(['grep','gpg-agent',p],stdout=PIPE)
grep.communicate()
if grep.returncode == 0:
warning('GPG Agent is referenced in ~/%s but is not currently running. '%f+
'Try relogging to start it again, if that doesn\'t work please contact the helpdesk')
continue
# Add script to file
with open(p,'a') as profile:
profile.write(agent)
todo('GPG Agent has been added to your startup scripts. '+
'Please log out of Accessdev then back in again to make sure it has been activated\n')
def check_gpg_agent():
"""
Make sure GPG-Agent is running
If the environment variable is not found add activation script to the
users's .profile
"""
try:
gpg.send('GETINFO version')
info('GPG Agent is running')
except Exception:
gpg_startup()
raise SetupError
def register_mosrs_account():
name, email = userinfo()
name = prompt_or_default('What is your name?',name)
email = prompt_or_default('What is your work email address?',email)
request = Popen(['mail', '-s','MOSRS account request for %s'%name, 'access_help@nf.nci.org.au'], stdin=PIPE)
request.communicate(dedent("""
ACCESS user %s (NCI id %s, email <%s>) would like to request an account on MOSRS.
Can the sponsor for their institution please submit a request on their behalf at
https://code.metoffice.gov.uk/trac/admin/newticket?type=account-request
You can check if they have an existing account at
https://code.metoffice.gov.uk/trac/home/wiki/UserList
"""%(name, environ['USER'], email)))
print('\n')
info('Submitting MOSRS account request for %s <%s> to access_help'%(name,email))
info('Once your account has been activated (will take at least one UK business day) '+
'you will receive an email detailing how to set up your password\n')
def setup_mosrs_account():
"""
Setup Mosrs
"""
check_gpg_agent()
mosrs_request = None
while mosrs_request not in ['yes', 'no', 'y', 'n']:
mosrs_request = prompt_or_default("Do you have a MOSRS account", "yes")
mosrs_request = mosrs_request.lower()
if mosrs_request.startswith('y'):
auth.check_or_update()
else:
print(dedent(
"""
If you need to access new versions of the UM please send a
request to 'cws_help@nci.org.au' saying that you'd like a MOSRS account
Once you have an account run this script again
"""
))
print('\n')
def check_raijin_ssh():
"""
Raijin has been decommissioned. There should no longer be any calls to this
procedure. In case there is, I'm leaving this stub in.
"""
raise ValueError("raijin should no longer be used. Please contact CMS")
def check_gadi_ssh():
"""
Test Rose/Cylc can be found on Gadi
"""
print('Testing Rose can be accessed on Gadi...')
# ssh -oBatchMode=yes /projects/access/bin/cylc --version
ssh = Popen(['ssh','-oBatchMode=yes','gadi','/projects/access/bin/cylc --version'])
result = ssh.wait()
if result == 0:
print('Successfully found Rose\n')
else:
warning('Unable to connect to Gadi')
warning('Follow the instructions at https://accessdev.nci.org.au/trac/wiki/Guides/SSH to set up a SSH agent\n')
raise SetupError
def accesssvn_setup():
"""
Setup GPG for access-svn access
"""
try:
check_gpg_agent()
print('\n')
print('To store your password for 12 hours run:')
print(' access-auth\n')
except SetupError:
todo('Once this has been done please run this setup script again\n')
def main():
print('\n')
print('Welcome to Accessdev, the user interface and control server for the ACCESS model at NCI')
print('This script will set up your account to use Rose and the UM\n')
try:
setup_mosrs_account()
check_gadi_ssh()
# Account successfully created
print('You are now able to use Rose and the UM. To see a list of available experiments run:')
print(' rosie go\n')
print('Your password will be cached for a maximum of 12 hours. To store your password again run:')
print(' mosrs-auth\n')
except SetupError:
todo('Once this has been done please run this setup script again\n')
finally:
print('You can ask for help with the ACCESS systems by emailing "access_help@nf.nci.org.au"\n')
if __name__ == '__main__':
main()
|
ballouche/navitia
|
source/jormungandr/jormungandr/scenarios/journey_filter.py
|
Python
|
agpl-3.0
| 15,470
| 0.00362
|
# Copyright (c) 2001-2015, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affer
|
o General Public License for more
|
details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# IRC #navitia on freenode
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from __future__ import absolute_import, print_function, unicode_literals, division
import logging
import itertools
import datetime
from jormungandr.scenarios.utils import compare, get_pseudo_duration, get_or_default, mode_weight
from navitiacommon import response_pb2
from jormungandr.utils import pb_del_if
def delete_journeys(responses, request):
if request.get('debug', False):
return
nb_deleted = 0
for r in responses:
nb_deleted += pb_del_if(r.journeys, lambda j: to_be_deleted(j))
if nb_deleted:
logging.getLogger(__name__).info('filtering {} journeys'.format(nb_deleted))
def filter_journeys(response_list, instance, request):
"""
Filter by side effect the list of pb responses's journeys
first draft, we only remove the journeys with the same vjs
"""
# for clarity purpose we build a temporary list
journeys = [j for r in response_list for j in r.journeys]
#DEBUG
for j in journeys:
_debug_journey(j)
_filter_too_short_heavy_journeys(journeys, request)
_filter_similar_vj_journeys(journeys, request)
_filter_too_long_journeys(journeys, instance, request)
_filter_too_long_waiting(journeys, request)
_filter_max_successive_physical_mode(journeys, instance, request)
return response_list
def final_filter_journeys(response_list, instance, request):
"""
Filter by side effect the list of pb responses's journeys
Final pass : we remove similar journeys (same lines and stop_points of change)
"""
# for clarity purpose we build a temporary list
journeys = [j for r in response_list for j in r.journeys]
final_line_filter = get_or_default(request, '_final_line_filter', False)
if final_line_filter:
_filter_similar_line_journeys(journeys, request)
_filter_too_much_connections(journeys, instance, request)
return response_list
def _get_worst_similar(j1, j2, request):
"""
Decide which is the worst journey between 2 similar journeys.
The choice is made on:
- asap
- less fallback
- duration
"""
if request.get('clockwise', True):
if j1.arrival_date_time != j2.arrival_date_time:
return j1 if j1.arrival_date_time > j2.arrival_date_time else j2
else:
if j1.departure_date_time != j2.departure_date_time:
return j1 if j1.departure_date_time < j2.departure_date_time else j2
if j1.duration != j2.duration:
return j1 if j1.duration > j2.duration else j2
return j1 if fallback_duration(j1) > fallback_duration(j2) else j2
def to_be_deleted(journey):
return 'to_delete' in journey.tags
def mark_as_dead(journey, *reasons):
journey.tags.append('to_delete')
for reason in reasons:
journey.tags.append('deleted_because_' + reason)
def _filter_similar_vj_journeys(journeys, request):
_filter_similar_journeys(journeys, request, similar_journeys_vj_generator)
def _filter_similar_line_journeys(journeys, request):
_filter_similar_journeys(journeys, request, similar_journeys_line_generator)
def _filter_similar_journeys(journeys, request, similar_journey_generator):
"""
we filter similar journeys
The given generator tells which part of journeys are compared
in case of similar journeys we let _get_worst_similar_vjs decide which one to delete
"""
logger = logging.getLogger(__name__)
for j1, j2 in itertools.combinations(journeys, 2):
if to_be_deleted(j1) or to_be_deleted(j2):
continue
if compare(j1, j2, similar_journey_generator):
#chose the best
worst = _get_worst_similar(j1, j2, request)
logger.debug("the journeys {}, {} are similar, we delete {}".format(j1.internal_id,
j2.internal_id,
worst.internal_id))
mark_as_dead(worst, 'duplicate_journey', 'similar_to_{other}'
.format(other=j1.internal_id if worst == j2 else j2.internal_id))
def _filter_too_short_heavy_journeys(journeys, request):
"""
we filter the journeys with use an "heavy" fallback mode if it's use only for a few minutes
Heavy fallback mode are Bike and Car, bss is not considered as one.
Typically you don't take your car for only 2 minutes
"""
logger = logging.getLogger(__name__)
for journey in journeys:
if to_be_deleted(journey):
continue
on_bss = False
for s in journey.sections:
if s.type == response_pb2.BSS_RENT:
on_bss = True
if s.type == response_pb2.BSS_PUT_BACK:
on_bss = False
if s.type != response_pb2.STREET_NETWORK:
continue
if s.street_network.mode == response_pb2.Car and s.duration < request['_min_car']:
logger.debug("the journey {} has not enough car, we delete it".format(journey.internal_id))
mark_as_dead(journey, "not_enough_car")
break
if not on_bss and s.street_network.mode == response_pb2.Bike and s.duration < request['_min_bike']:
logger.debug("the journey {} has not enough bike, we delete it".format(journey.internal_id))
mark_as_dead(journey, "not_enough_bike")
break
def _filter_too_long_waiting(journeys, request):
"""
filter journeys with a too long section of type waiting
"""
logger = logging.getLogger(__name__)
for j in journeys:
if to_be_deleted(j):
continue
for s in j.sections:
if s.type != response_pb2.WAITING:
continue
if s.duration < 4 * 60 * 60:
continue
logger.debug("the journey {} has a too long waiting, we delete it".format(j.internal_id))
mark_as_dead(j, "too_long_waiting")
break
def _filter_max_successive_physical_mode(journeys, instance, request):
"""
eliminates journeys with specified public_transport.physical_mode more than
_max_successive_physical_mode (used for STIF buses)
"""
logger = logging.getLogger(__name__)
max_successive_physical_mode = get_or_default(request, '_max_successive_physical_mode', 0)
if max_successive_physical_mode == 0:
return
for j in journeys:
if to_be_deleted(j):
continue
bus_count = 0
for s in j.sections:
if s.type != response_pb2.PUBLIC_TRANSPORT:
continue
if s.pt_display_informations.uris.physical_mode == instance.successive_physical_mode_to_limit_id:
bus_count += 1
else:
if bus_count <= max_successive_physical_mode:
bus_count = 0
if bus_count > max_successive_physical_mode:
logger.debug("the jou
|
dpmurphy/HSPI
|
docs/source/conf.py
|
Python
|
mit
| 5,577
| 0.001614
|
# -*- coding: utf-8 -*-
#
# HSPI documentation build configuration file, created by
# sphinx-quickstart on Sun Feb 26 17:22:42 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
# sys.path.insert(0, os.path.abspath('.'))
import sphinx_rtd_theme
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode']
disqus_shortname = 'hspi'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'HSPI'
copyright = u'2017, Alex Dresko'
author = u'Alex Dresko'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0.0'
# The full version, including alpha/beta/rc tags.
release = u'1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwri
|
te the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'HSPIdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'point
|
size': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'HSPI.tex', u'HSPI Documentation',
u'Alex Dresko', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'hspi', u'HSPI Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'HSPI', u'HSPI Documentation',
author, 'HSPI', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
|
mozillazg/redis-py-doc
|
redis/commands/timeseries/utils.py
|
Python
|
mit
| 1,309
| 0.002292
|
from ..helpers import nativestr
def list_to_dict(aList):
return {nativest
|
r(aList[i][0]): nativestr(aList[i][1]) for i in range(len(aList))}
def parse_range(response):
"""Parse range response. Used by TS.RANGE
|
and TS.REVRANGE."""
return [tuple((r[0], float(r[1]))) for r in response]
def parse_m_range(response):
"""Parse multi range response. Used by TS.MRANGE and TS.MREVRANGE."""
res = []
for item in response:
res.append({nativestr(item[0]): [list_to_dict(item[1]), parse_range(item[2])]})
return sorted(res, key=lambda d: list(d.keys()))
def parse_get(response):
"""Parse get response. Used by TS.GET."""
if not response:
return None
return int(response[0]), float(response[1])
def parse_m_get(response):
"""Parse multi get response. Used by TS.MGET."""
res = []
for item in response:
if not item[2]:
res.append({nativestr(item[0]): [list_to_dict(item[1]), None, None]})
else:
res.append(
{
nativestr(item[0]): [
list_to_dict(item[1]),
int(item[2][0]),
float(item[2][1]),
]
}
)
return sorted(res, key=lambda d: list(d.keys()))
|
Danielhiversen/home-assistant
|
homeassistant/components/dhcp/__init__.py
|
Python
|
apache-2.0
| 13,216
| 0.001059
|
"""The dhcp integration."""
from abc import abstractmethod
from datetime import timedelta
import fnmatch
from ipaddress import ip_address as make_ip_address
import logging
import os
import threading
from aiodiscover import DiscoverHosts
from aiodiscover.discovery import (
HOSTNAME as DISCOVERY_HOSTNAME,
IP_ADDRESS as DISCOVERY_IP_ADDRESS,
MAC_ADDRESS as DISCOVERY_MAC_ADDRESS,
)
from scapy.config import conf
from scapy.error import Scapy_Exception
from homeassistant.components.device_tracker.const import (
ATTR_HOST_NAME,
ATTR_IP,
ATTR_MAC,
ATTR_SOURCE_TYPE,
DOMAIN as DEVICE_TRACKER_DOMAIN,
SOURCE_TYPE_ROUTER,
)
from homeassistant.const import (
EVENT_HOMEASSISTANT_STARTED,
EVENT_HOMEASSISTANT_STOP,
STATE_HOME,
)
from homeassistant.core import Event, HomeAssistant, State, callback
from homeassistant.helpers.device_registry import format_mac
from homeassistant.helpers.event import (
async_track_state_added_domain,
async_track_time_interval,
)
from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import async_get_dhcp
from homeassistant.util.network import is_invalid, is_link_local, is_loopback
from .const import DOMAIN
FILTER = "udp and (port 67 or 68)"
REQUESTED_ADDR = "requested_addr"
MESSAGE_TYPE = "message-type"
HOSTNAME = "hostname"
MAC_ADDRESS = "macaddress"
IP_ADDRESS = "ip"
DHCP_REQUEST = 3
SCAN_INTERVAL = timedelta(minutes=60)
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the dhcp component."""
async def _initialize(_):
address_data = {}
integration_matchers = await async_get_dhcp(hass)
watchers = []
for cls in (DHCPWatcher, DeviceTrackerWatcher, NetworkWatcher):
watcher = cls(hass, address_data, integration_matchers)
await watcher.async_start()
watchers.append(watcher)
async def _async_stop(*_):
for watcher in watchers:
await watcher.async_stop()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _async_stop)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, _initialize)
return True
class WatcherBase:
"""Base class for dhcp and device tracker watching."""
def __init__(self, hass, address_data, integration_matchers):
"""Initialize class."""
super().__init__()
|
self.hass = hass
self._integration_matchers = integration_matchers
self._address_data = address_data
def process_client(self, ip_address, hostname, mac_address):
"""Process a client."""
made_ip_address = make_ip_address(ip_address)
if (
|
is_link_local(made_ip_address)
or is_loopback(made_ip_address)
or is_invalid(made_ip_address)
):
# Ignore self assigned addresses, loopback, invalid
return
data = self._address_data.get(ip_address)
if (
data
and data[MAC_ADDRESS] == mac_address
and data[HOSTNAME].startswith(hostname)
):
# If the address data is the same no need
# to process it
return
self._address_data[ip_address] = {MAC_ADDRESS: mac_address, HOSTNAME: hostname}
self.process_updated_address_data(ip_address, self._address_data[ip_address])
def process_updated_address_data(self, ip_address, data):
"""Process the address data update."""
lowercase_hostname = data[HOSTNAME].lower()
uppercase_mac = data[MAC_ADDRESS].upper()
_LOGGER.debug(
"Processing updated address data for %s: mac=%s hostname=%s",
ip_address,
uppercase_mac,
lowercase_hostname,
)
for entry in self._integration_matchers:
if MAC_ADDRESS in entry and not fnmatch.fnmatch(
uppercase_mac, entry[MAC_ADDRESS]
):
continue
if HOSTNAME in entry and not fnmatch.fnmatch(
lowercase_hostname, entry[HOSTNAME]
):
continue
_LOGGER.debug("Matched %s against %s", data, entry)
self.create_task(
self.hass.config_entries.flow.async_init(
entry["domain"],
context={"source": DOMAIN},
data={
IP_ADDRESS: ip_address,
HOSTNAME: lowercase_hostname,
MAC_ADDRESS: data[MAC_ADDRESS],
},
)
)
@abstractmethod
def create_task(self, task):
"""Pass a task to async_add_task based on which context we are in."""
class NetworkWatcher(WatcherBase):
"""Class to query ptr records routers."""
def __init__(self, hass, address_data, integration_matchers):
"""Initialize class."""
super().__init__(hass, address_data, integration_matchers)
self._unsub = None
self._discover_hosts = None
self._discover_task = None
async def async_stop(self):
"""Stop scanning for new devices on the network."""
if self._unsub:
self._unsub()
self._unsub = None
if self._discover_task:
self._discover_task.cancel()
self._discover_task = None
async def async_start(self):
"""Start scanning for new devices on the network."""
self._discover_hosts = DiscoverHosts()
self._unsub = async_track_time_interval(
self.hass, self.async_start_discover, SCAN_INTERVAL
)
self.async_start_discover()
@callback
def async_start_discover(self, *_):
"""Start a new discovery task if one is not running."""
if self._discover_task and not self._discover_task.done():
return
self._discover_task = self.create_task(self.async_discover())
async def async_discover(self):
"""Process discovery."""
for host in await self._discover_hosts.async_discover():
self.process_client(
host[DISCOVERY_IP_ADDRESS],
host[DISCOVERY_HOSTNAME],
_format_mac(host[DISCOVERY_MAC_ADDRESS]),
)
def create_task(self, task):
"""Pass a task to async_create_task since we are in async context."""
return self.hass.async_create_task(task)
class DeviceTrackerWatcher(WatcherBase):
"""Class to watch dhcp data from routers."""
def __init__(self, hass, address_data, integration_matchers):
"""Initialize class."""
super().__init__(hass, address_data, integration_matchers)
self._unsub = None
async def async_stop(self):
"""Stop watching for new device trackers."""
if self._unsub:
self._unsub()
self._unsub = None
async def async_start(self):
"""Stop watching for new device trackers."""
self._unsub = async_track_state_added_domain(
self.hass, [DEVICE_TRACKER_DOMAIN], self._async_process_device_event
)
for state in self.hass.states.async_all(DEVICE_TRACKER_DOMAIN):
self._async_process_device_state(state)
@callback
def _async_process_device_event(self, event: Event):
"""Process a device tracker state change event."""
self._async_process_device_state(event.data.get("new_state"))
@callback
def _async_process_device_state(self, state: State):
"""Process a device tracker state."""
if state.state != STATE_HOME:
return
attributes = state.attributes
if attributes.get(ATTR_SOURCE_TYPE) != SOURCE_TYPE_ROUTER:
return
ip_address = attributes.get(ATTR_IP)
hostname = attributes.get(ATTR_HOST_NAME, "")
mac_address = attributes.get(ATTR_MAC)
if ip_address is None or mac_address is None:
return
self.process_client(ip_address, hostname, _format_mac(mac_address))
def create_task(self, task):
"""Pass a task to async_create_task since we are
|
nrz/ylikuutio
|
external/bullet3/examples/pybullet/gym/pybullet_envs/minitaur/robots/robot_base.py
|
Python
|
agpl-3.0
| 4,709
| 0.007008
|
# Lint as: python3
"""The abstract robot class."""
import abc
from typing import Optional, Sequence
# Action names for robots operating kinematically.
LINEAR_VELOCITY = "linear_velocity"
ANGULAR_VELOCITY = "angular_velocity"
class RobotBase(metaclass=abc.ABCMeta):
"""The base class for all robots used in the mobility team."""
@abc.abstractmethod
def reset(
self,
base_position: Optional[Sequence[float]] = None,
base_orientation_quaternion: Optional[Sequence[float]] = None) -> None:
"""Resets the states (e.g. pose and sensor readings) of the robot.
This is called at the start of each episode by the environment.
Args:
base_position: Robot base position after reset. If None, robot stay where
it was after reset. For robot that does not support reset with position
change, a ValueError should be raised.
base_orientation_quaternion: Robot base orientation after reset. If None,
robot stays in pre-reset orientation. For robot that does not support
reset with orientation change, a ValueError should be raised.
"""
pass
@abc.abstractmethod
def terminate(self):
"""Shuts down the robot."""
pass
@abc.abstractmethod
def pre_control_step(self, action):
"""Processes the input action before the action repeat loop.
We assume that an action sent to the real robot is sticky, i.e. it will be
executed until a new action is received after some time. To simulate this,
we introduced the action_repeat parameter, to reflect how many time steps it
takes for the policy to generate a new action. That is, for each control
step, the simulation contains an inner loop:
robot.pre_control_step(action) # Smooth or interpolate the action
for i in range(action_repeat):
robot.apply_action(action)
bullet.stepSimulation(time_step) # Step the sim for one time step
robot.receive_observation() # Update the sensor observations
robot.post_control_step() # Update some internal variables.
Args:
action: Data type depends on the robot. Can be desired motor
position/torques for legged robots, or desired velocity/angular velocity
for wheeled robots.
"""
pass
@abc.abstractmethod
def apply_action(self, action):
"""Applies the action to the robot."""
pass
@abc.abstractmethod
def receive_observation(self):
"""Updates the robot sensor readings."""
|
pass
@abc.abstractmethod
def post_control_step(self):
"""Updates some internal variables such as
|
step counters."""
pass
@property
def action_space(self):
"""The action spec of the robot."""
raise NotImplementedError("action_space is not implemented")
@property
@abc.abstractmethod
def action_names(self):
"""Name of each action in the action_space.
This is a structure of strings with the same shape as the action space,
where each string describes the corresponding element of the action space
(for example, a kinematic robot might return ("linear_velocity",
"angular_velocity")). Used for logging in the safety layer.
"""
@property
def sensors(self):
"""Returns the sensors on this robot.
Sensors are the main interface between the robot class and the gym
environment. Sensors can return what the robot can measure (e.g.
joint angles, IMU readings), and can represent more general quantities, i.e.
the last action taken, that can be part of the observation space.
Sensor classes are used by the robot class to the specify its observation
space.
"""
raise NotImplementedError("sensors property not implemented")
@property
def base_orientation_quaternion(self):
"""Returns the base pose as a quaternion in format (x, y, z, w).
These properties differ from the sensor interfaces, as they represent
the built-in measurable quantities. We assume most robots have an IMU at
its base to measure the base pose. Actually, some sensor classes like the
base pose sensor and joint angle sensor will call these built-in methods. In
general, how these quantities can be extracted depends on the specific real
robots.
"""
raise NotImplementedError("base_orientation_quaternion is not implemented")
@property
def base_roll_pitch_yaw(self):
"""Returns the base roll, pitch, and yaw angles."""
raise NotImplementedError("base_roll_pitch_yaw is not implemented")
@property
def base_roll_pitch_yaw_rate(self):
raise NotImplementedError("base_roll_pitch_yaw_rate is not implemented")
@property
def base_position(self):
raise NotImplementedError("base_position is not implemented")
|
HandsomeYingyan/rpi-speech-recognition-car
|
contror/BACK.py
|
Python
|
agpl-3.0
| 331
| 0.036254
|
import RPi.GPIO as GPIO
import time
INT1=11
INT2=12
INT3=13
INT4=15
GPIO.setmode(GPIO.BOARD)
|
GPIO.setup(INT1,GPIO.OUT)
GPIO.setup(INT2,GPIO.OUT)
GPIO.setup(INT3,GPIO.OUT
|
)
GPIO.setup(INT4,GPIO.OUT)
GPIO.output(INT1,GPIO.LOW)
GPIO.output(INT2,GPIO.HIGH)
GPIO.output(INT3,False)
GPIO.output(INT4,False)
time.sleep(0.8)
GPIO.cleanup()
|
grengojbo/st2
|
st2auth/st2auth/backends/__init__.py
|
Python
|
apache-2.0
| 1,972
| 0.001014
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the Lic
|
ense at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law
|
or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import importlib
from oslo_config import cfg
from st2common.util.loader import _get_classes_in_module
__all__ = [
'get_backend_instance',
'VALID_BACKEND_NAMES'
]
BACKEND_MODULES = {
'flat_file': 'st2auth.backends.flat_file',
'mongodb': 'st2auth.backends.mongodb'
}
VALID_BACKEND_NAMES = BACKEND_MODULES.keys()
def get_backend_instance(name):
"""
:param name: Backend name.
:type name: ``str``
"""
if name not in VALID_BACKEND_NAMES:
raise ValueError('Invalid authentication backend specified: %s', name)
module = importlib.import_module(BACKEND_MODULES[name])
classes = _get_classes_in_module(module=module)
try:
cls = [klass for klass in classes if klass.__name__.endswith('AuthenticationBackend')][0]
except IndexError:
raise ValueError('"%s" backend module doesn\'t export a compatible class' % (name))
backend_kwargs = cfg.CONF.auth.backend_kwargs
if backend_kwargs:
try:
kwargs = json.loads(backend_kwargs)
except ValueError:
raise ValueError('Failed to JSON parse backend settings')
else:
kwargs = {}
return cls(**kwargs)
|
gloryofrobots/obin
|
arza/types/plist.py
|
Python
|
gpl-2.0
| 11,988
| 0.000167
|
from arza.types.root import W_Root
from arza.types import space
from arza.types import api
from arza.runtime import error
class W_PList(W_Root):
def __init__(self, head, tail):
self.head = head
self.tail = tail
def __iter__(self):
cur = self
while not is_empty(cur):
yield head(cur)
cur = cur.tail
def __getitem__(self, index):
assert isinstance(index, int)
return nth(self, index)
def __len__(self):
return self._length_()
def __getslice__(self, start, end):
return slice(self, start, end)
def show(self):
els = []
cur = self
while True:
if is_empty(cur):
els.append("()")
break
els.append("%s" % (cur.head))
cur = cur.tail
return str(els)
def _to_string_(self):
from arza.types import api
els = []
cur = self
while True:
if is_empty(cur):
break
els.append(api.to_s(head(cur)))
cur = cur.tail
return "[%s]" % (", ".join(els))
def _to_repr_(self):
return self._to_string_()
def _length_(self):
return length(self)
def _get_index_(self, obj):
return index(self, obj)
# def _at_index_(self, i):
# if i < 0:
# return space.newnil()
# return nth(self, i)
def _contains_(self, key):
return contains(self, key)
def _at_index_(self, i):
if i < 0:
return space.newvoid()
return nth(self, i)
def _at_(self, key):
if not space.isint(key):
error.throw_1(error.Errors.TYPE_ERROR, key)
int_index = api.to_i(key)
if int_index < 0:
return space.newvoid()
return nth(self, int_index)
def _put_(self, k, v):
from arza.types import api
error.affirm_type(k, space.isint)
i = api.to_i(k)
return update(self, i, v)
def _put_at_index_(self, i, v):
return update(self, i, v)
def _type_(self, process):
return process.std.types.List
def _equal_(self, other):
if not space.islist(other):
return False
if is_empty(other) and is_empty(self):
return True
return equal(self, other)
def to_l(self):
return [i for i in self]
__EMPTY__ = W_PList(space.newvoid(), space.newvoid())
def empty():
return __EMPTY__
def to_tuple(pl):
return space.newtuple(pl.to_l())
def foldl(func, acc, pl):
type_check(pl)
if is_empty(pl):
return acc
return foldl(func,
func(head(pl), acc),
tail(pl))
def foldr(func, acc, pl):
type_check(pl)
if is_empty(pl):
return acc
return func(head(pl),
foldr(func, acc, tail(pl)))
def is_empty(pl):
return pl is __EMPTY__
def head(pl):
type_check(pl)
return pl.head
def type_check(pl):
error.affirm_
|
type(pl, space.islist)
def tail(pl):
type_check(pl)
return pl.tail
def split(pl):
type_check(pl)
return head(pl), tail(pl)
def _length_foldl(el, acc):
return acc + 1
def length(pl):
type_chec
|
k(pl)
return foldl(_length_foldl, 0, pl)
def cons(v, pl):
error.affirm_any(v)
type_check(pl)
return W_PList(v, pl)
def cons_n_list(items, pl):
type_check(pl)
head = pl
for item in reversed(items):
head = cons(item, head)
return head
def append(pl, v):
type_check(pl)
return insert(pl, length(pl), v)
def concat(pl1, pl2):
type_check(pl1)
type_check(pl2)
return foldr(cons, pl2, pl1)
def pop(pl):
type_check(pl)
return pl.tail
def take(pl, count):
type_check(pl)
if count <= 0:
return empty()
if is_empty(pl):
return error.throw_1(error.Errors.INDEX_ERROR, space.newint(count))
return cons(head(pl), take(pop(pl), count - 1))
def drop(pl, count):
type_check(pl)
if count == 0:
return pl
if is_empty(pl):
return error.throw_1(error.Errors.INDEX_ERROR, space.newint(count))
return drop(tail(pl), count - 1)
##############################################
def _slice(pl, index, start, end):
if is_empty(pl):
return error.throw_3(error.Errors.SLICE_ERROR, space.newint(index),
space.newint(start), space.newint(end))
if index < start:
return _slice(tail(pl), index + 1, start, end)
if index < end:
return cons(head(pl), _slice(tail(pl), index + 1, start, end))
return empty()
def slice(pl, start, end):
type_check(pl)
if start == end:
return empty()
error.affirm(start >= 0, u"Invalid slice : start < 0")
error.affirm(end > start, u"Invalid slice : end <= start")
error.affirm(end > 0, u"Invalid slice : end <= 0 start")
# return take(drop(pl, start), end - 1)
return _slice(pl, 0, start, end)
##############################################
def _nth(pl, index):
from arza.types.space import newvoid
if index == 0:
return head(pl)
if is_empty(pl):
return newvoid()
return _nth(tail(pl), index - 1)
def nth(pl, index):
type_check(pl)
error.affirm(index >= 0, u"List nth: index < 0")
return _nth(pl, index)
##############################################
def _nth_tail(pl, index):
from arza.types.space import newvoid
if index == 0:
return tail(pl)
if is_empty(pl):
return newvoid()
return _nth_tail(tail(pl), index - 1)
def nth_tail(pl, index):
type_check(pl)
error.affirm(index >= 0, u"Invalid index index < 0")
return _nth_tail(pl, index)
def insert(pl, index, v):
type_check(pl)
if index == 0:
return cons(v, pl)
if is_empty(pl):
return error.throw_1(error.Errors.INDEX_ERROR, space.newint(index))
return W_PList(head(pl), insert(tail(pl), index - 1, v))
def update(pl, index, v):
type_check(pl)
if index == 0:
return cons(v, tail(pl))
if is_empty(tail(pl)):
return error.throw_1(error.Errors.INDEX_ERROR, space.newint(index))
return W_PList(head(pl), update(tail(pl), index - 1, v))
def remove_all(pl, v):
type_check(pl)
if is_empty(pl):
return pl
if api.equal_b(v, head(pl)):
l = remove_all(tail(pl), v)
return l
l = W_PList(head(pl), remove_all(tail(pl), v))
return l
def remove(pl, v):
type_check(pl)
from arza.types import api
if is_empty(pl):
return error.throw_1(error.Errors.VALUE_ERROR, pl)
if api.equal_b(v, head(pl)):
return tail(pl)
return W_PList(head(pl), remove(tail(pl), v))
def remove_silent(pl, v):
type_check(pl)
from arza.types import api
if is_empty(pl):
return empty()
if api.equal_b(v, head(pl)):
return tail(pl)
return W_PList(head(pl), remove_silent(tail(pl), v))
########################################################################
def count(pl, v):
count = 0
for i in pl:
if api.equal_b(i, v):
count += 1
return count
def is_hetero(pl):
for i in pl:
if count(pl, i) > 1:
return False
return True
def not_unique_item(pl):
for i in pl:
if count(pl, i) > 1:
return i
return None
def unique(pl, predicate=None):
if not predicate:
predicate = api.equal_b
lst = empty()
for item in pl:
if not contains_with(lst, item, predicate):
lst = cons(item, lst)
return reverse(lst)
########################################################################
def contains_with(pl, v, condition):
type_check(pl)
if is_empty(pl):
return False
if condition(v, head(pl)):
return True
return contains_with(tail(pl), v, condition)
def contains(pl, v):
return contains_with(pl, v, api.equal_b)
########################################################################
def find_with(pl, v, condition):
type_check(pl)
if is_empty(pl):
return space.newv
|
catapult-project/catapult
|
common/py_vulcanize/py_vulcanize/html_generation_controller.py
|
Python
|
bsd-3-clause
| 934
| 0.008565
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import absolute_import
import os
import re
from py_vulcanize import style_sheet
class HTMLGenerationController(object):
def __init__(self):
self.current_module = None
def GetHTMLForStylesheetHRef(self, href): # pylint: disable=unused-argument
return None
def GetHTMLForInlineStylesheet(self, contents):
if self.current_module is None:
if re.search('url\(.+\)', contents):
raise Exception(
'Default HTMLGenerationController cannot handle inline style urls')
return contents
module_dirname = os.path.dirname(self.current_module.resource.absolute_path)
ss = style_sheet.ParsedStyleSheet(
self.current_module.loader, module_dirname, content
|
s)
return ss.contents_with_inli
|
ned_images
|
davy39/eric
|
Plugins/VcsPlugins/vcsMercurial/Ui_HgBundleDialog.py
|
Python
|
gpl-3.0
| 9,093
| 0.002969
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file './Plugins/VcsPlugins/vcsMercurial/HgBundleDialog.ui'
#
# Created: Tue Nov 18 17:53:57 2014
# by: PyQt5 UI code generator 5.3.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_HgBundleDialog(object):
def setupUi(self, HgBundleDialog):
HgBundleDialog.setObjectName("HgBundleDialog")
HgBundleDialog.resize(450, 452)
HgBundleDialog.setSizeGripEnabled(True)
self.verticalLayout = QtWidgets.QVBoxLayout(HgBundleDialog)
self.verticalLayout.setObjectName("verticalLayout")
self.groupBox = QtWidgets.QGroupBox(HgBundleDialog)
self.groupBox.setObjectName("groupBox")
self.gridLayout = QtWidgets.QGridLayout(self.groupBox)
self.gridLayout.setObjectName("gridLayout")
self.multipleButton = QtWidgets.QRadioButton(self.groupBox)
self.multipleButton.setObjectName("multipleButton")
self.gridLayout.addWidget(self.multipleButton, 0, 0, 1, 1)
self.multipleEdit = QtWidgets.QPlainTextEdit(self.groupBox)
self.multipleEdit.setEnabled(False)
self.multipleEdit.setTabChangesFocus(True)
self.multipleEdit.setLineWrapMode(QtWidgets.QPlainTextEdit.NoWrap)
self.multipleEdit.setObjectName("multipleEdit")
self.gridLayout.addWidget(self.multipleEdit, 0, 1, 1, 1)
self.tagButton = QtWidgets.QRadioButton(self.groupBox)
self.tagButton.setObjectName("tagButton")
self.gridLayout.addWidget(self.tagButton, 1, 0, 1, 1)
self.tagCombo = QtWidgets.QComboBox(self.groupBox)
self.tagCombo.setEnabled(False)
self.tagCombo.setEditable(True)
self.tagCombo.setObjectName("tagCombo")
self.gridLayout.addWidget(self.tagCombo, 1, 1, 1, 1)
self.branchButton = QtWidgets.QRadioButton(self.groupBox)
self.branchButton.setObjectName("branchButton")
self.gridLayout.addWidget(self.branchButton, 2, 0, 1, 1)
self.branchCombo = QtWidgets.QComboBox(self.groupBox)
self.branchCombo.setEnabled(False)
self.branchCombo.setEditable(True)
self.branchCombo.setObjectName("branchCombo")
self.gridLayout.addWidget(self.branchCombo, 2, 1, 1, 1)
self.bookmarkButton = QtWidgets.QRadioButton(self.groupBox)
self.bookmarkButton.setObjectName("bookmarkButton")
self.gridLayout.addWidget(self.bookmarkButton, 3, 0, 1, 1)
self.bookmarkCombo = QtWidgets.QComboBox(self.groupBox)
self.bookmarkCombo.setEnabled(False)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.bookmarkCombo.sizePolicy().hasHeightForWidth())
self.bookmarkCombo.setSizePolicy(sizePolicy)
self.bookmarkCombo.setEditable(True)
self.bookmarkCombo.setObjectName("bookmarkCombo")
self.gridLayout.addWidget(self.bookmarkCombo, 3, 1, 1, 1)
self.noneButton = QtWidgets.QRadioButton(self.groupBox)
self.noneButton.setChecked(True)
self.noneButton.setObjectName("noneButton")
self.gridLayout.addWidget(self.noneButton, 4, 0, 1, 2)
self.verticalLayout.addWidget(self.groupBox)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.label_2 = QtWidgets.QLabel(HgBundleDialog)
self.label_2.setObjectName("label_2")
self.horizontalLayout_2.addWidget(self.label_2)
self.baseRevisionsEdit = QtWidgets.QPlainTextEdit(HgBundleDialog)
self.baseRevisionsEdit.setTabChangesFocus(True)
self.baseRevisionsEdit.setLineWrapMode(QtWidgets.QPlainTextEdit.NoWrap)
self.baseRevisionsEdit.setObjectName("baseRevisionsEdit")
self.horizontalLayout_2.addWidget(self.baseRevisionsEdit)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.label = QtWidgets.QLabel(HgBundleDialog)
self.label.setObjectName("label")
self.horizontalLayout.addWidget(self.label)
self.compressionCombo = QtWidgets.QComboBox(HgBundleDialog)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.compressionCombo.sizePolicy().hasHeightForWidth())
self.compressionCombo.setSizePolicy(sizePolicy)
self.compressionCombo.setObjectName("compressionCombo")
self.horizontalLayout.addWidget(self.compressionCombo)
self.verticalLayout.addLayout(self.horizontalLayout)
self.allCheckBox = QtWidgets.QCheckBox(HgBundleDialog)
self.allCheckBox.setObjectName("allCheckBox")
self.verticalLayout.addWidget(self.allCheckBox)
self.buttonBox = QtWidgets.QDialogButtonBox(HgBundleDialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(HgBundleDialog)
self.buttonBox.accepted.connect(HgBundleDialog.accept)
self.buttonBox.rejected.connect(HgBundleDialog.reject)
self.tagButton.toggled['bool'].connect(self.tagCombo.setEnabled)
self.branchButton.toggled['bool'].connect(self.branchCombo.setEnabled)
self.bookmarkButton.toggled['bool'].connect(self.bookmarkCombo.setEnabled)
self.multipleButton.toggled['bool'].connect(self.multipleEdit.setEnabled)
QtCore.QMetaObject.connectSlotsByName(HgBundleDialog)
HgBundleDialog.setTabOrder(self.multipleButton, self.multipleEdit)
HgBundleDialog.setTabOrder(self.multipleEdit, self.tagButton)
HgBundleDialog.setTabOrder(self.tagButton, self.tagCombo)
HgBundleDialog.setTabOrder(self.tagCombo, self.branchButton)
HgBundleDialog.setTabOrder(self.branchButton, self.branchCombo)
HgBundleDialog.setTabOrder(self.branchCombo, self.bookmarkButton)
HgBundleDialog.setTabOrder(self.bookmarkButton, self.bookmarkCombo)
HgBundleDialog.setTabOrder(self.bookmarkCombo, self.noneButton)
HgBundleDialog.setTabOrder(self.noneButton, self.baseRevisionsEdit)
HgBundleDialog.setTabOrder(self.baseRevisionsEdit, self.compressionCombo)
HgBundleDialog.setTabOrder(self.compressionCombo, self.allCheckBox)
HgBundleDialog.setTabOrder(self.allCheckBox, self.buttonBox)
def retranslateUi(self, HgBundleDialog):
_translate = QtCore.QCoreApplication.translate
HgBundleDialog.setWindowTitle(_translate("HgBundleDialog", "Mercurial Bundle"))
self.groupBox.setTitle(_translate("HgBundleDialog", "Revision"))
self.multipleButton.setToolTip(_translate("HgBundleDialog", "Select to specify multiple revisions"))
|
self.multipleButton.setText(_translate("HgBundleDialog", "Revision
|
s:"))
self.multipleEdit.setToolTip(_translate("HgBundleDialog", "Enter revisions by number, id, range or revset expression one per line"))
self.tagButton.setToolTip(_translate("HgBundleDialog", "Select to specify a revision by a tag"))
self.tagButton.setText(_translate("HgBundleDialog", "Tag:"))
self.tagCombo.setToolTip(_translate("HgBundleDialog", "Enter a tag name"))
self.branchButton.setToolTip(_translate("HgBundleDialog", "Select to specify a revision by a branch"))
self.branchButton.setText(_translate("HgBundleDialog", "Branch:"))
self.branchCombo.setToolTip(_translate("HgBundleDialog", "Enter a branch name"))
self.bookmarkButton.setToolTip(_translate("HgBundleDialog", "Select to specify a revision by a bookmark"))
self.bookmarkButton.setText(_translate("HgBundleDialog",
|
gdassori/python
|
python-twisted/tests/unit-test-full.py
|
Python
|
mit
| 7,584
| 0.006337
|
## www.pubnub.com - PubNub Real-time push service in the cloud.
# coding=utf8
## PubNub Real-time Push APIs and Notifications Framework
## Copyright (c) 2010 Stephen Blum
## http://www.pubnub.com/
## TODO Tests
##
## - wait 20 minutes, send a message, receive and success.
## -
## -
##
##
## -----------------------------------
## PubNub 3.1 Real-time Push Cloud API
## -----------------------------------
import sys
from pubnub import PubnubTwisted as Pubnub
publish_key = len(sys.argv) > 1 and sys.argv[1] or 'demo'
subscribe_key = len(sys.argv) > 2 and sys.argv[2] or 'demo'
secret_key = len(sys.argv) > 3 and sys.argv[3] or None
cipher_key = len(sys.argv) > 4 and sys.argv[4] or None
ssl_on = len(sys.argv) > 5 and bool(sys.argv[5]) or False
## -----------------------------------------------------------------------
## Command Line Options Supplied PubNub
## -----------------------------------------------------------------------
pubnub_user_supplied_options = Pubnub(
publish_key, # OPTIONAL (supply None to disable)
subscribe_key, # REQUIRED
secret_key, # OPTIONAL (supply None to disable)
cipher_key, # OPTIONAL (supply None to disable)
ssl_on # OPTIONAL (supply None to disable)
)
## -----------------------------------------------------------------------
## High Security PubNub
## -----------------------------------------------------------------------
pubnub_high_security = Pubnub(
## Publish Key
'pub-c-a30c030e-9f9c-408d-be89-d70b336ca7a0',
## Subscribe Key
'sub-c-387c90f3-c018-11e1-98c9-a5220e0555fd',
## Secret Key
'sec-c-MTliNDE0NTAtYjY4Ni0
|
0MDRkLTllYTItNDhiZGE0N2JlYzBl',
## Cipher Key
'YWxzamRmbVjFaa05HVnGFqZHM3NXRBS73jxmhVMkjiwVVXV1d5UrXR1JLSkZFRr' +
'WVd4emFtUm1iR0TFpUZvbiBoYXMgYmVlbxWkhNaF3uUi8kM0YkJTEVlZYVFjBYi' +
|
'jFkWFIxSkxTa1pGUjd874hjklaTFpUwRVuIFNob3VsZCB5UwRkxUR1J6YVhlQWa' +
'V1ZkNGVH32mDkdho3pqtRnRVbTFpUjBaeGUgYXNrZWQtZFoKjda40ZWlyYWl1eX' +
'U4RkNtdmNub2l1dHE2TTA1jd84jkdJTbFJXYkZwWlZtRnKkWVrSRhhWbFpZVmFz' +
'c2RkZmTFpUpGa1dGSXhTa3hUYTFwR1Vpkm9yIGluZm9ybWFNfdsWQdSiiYXNWVX' +
'RSblJWYlRGcFVqQmFlRmRyYUU0MFpXbHlZV2wxZVhVNFJrTnR51YjJsMWRIRTJU' +
'W91ciBpbmZvcm1hdGliBzdWJtaXR0ZWQb3UZSBhIHJlc3BvbnNlLCB3ZWxsIHJl' +
'VEExWdHVybiB0am0aW9uIb24gYXMgd2UgcG9zc2libHkgY2FuLuhcFe24ldWVns' +
'dSaTFpU3hVUjFKNllWaFdhRmxZUWpCaQo34gcmVxdWlGFzIHNveqQl83snBfVl3',
## 2048bit SSL ON - ENABLED TRUE
True
)
## -----------------------------------------------------------------------
## Channel | Message Test Data (UTF-8)
## -----------------------------------------------------------------------
crazy = ' ~`â¦â§!@#$%^&*(顶顅Ȓ)+=[]\\{}|;\':",./<>?abcd'
many_channels = [str(x) + '-many_channel_test' for x in range(10)]
runthroughs = 0
planned_tests = 2
delivery_retries = 0
max_retries = 10
## -----------------------------------------------------------------------
## Unit Test Function
## -----------------------------------------------------------------------
def test(trial, name):
if trial:
print('PASS: ' + name)
else:
print('- FAIL - ' + name)
def test_pubnub(pubnub):
global runthroughs, planned_tests, delivery_retries, max_retries
## -----------------------------------------------------------------------
## Many Channels
## -----------------------------------------------------------------------
def phase2():
status = {
'sent': 0,
'received': 0,
'connections': 0
}
def received(message, chan):
global runthroughs
test(status['received'] <= status['sent'], 'many sends')
status['received'] += 1
pubnub.unsubscribe({'channel': chan})
if status['received'] == len(many_channels):
runthroughs += 1
if runthroughs == planned_tests:
pubnub.stop()
def publish_complete(info, chan):
global delivery_retries, max_retries
status['sent'] += 1
test(info, 'publish complete')
test(info and len(info) > 2, 'publish response')
if not info[0]:
delivery_retries += 1
if max_retries > delivery_retries:
sendit(chan)
def sendit(chan):
tchan = chan
pubnub.publish({
'channel': chan,
'message': "Hello World",
'callback': (lambda msg: publish_complete(msg, tchan))
})
def connected(chan):
status['connections'] += 1
sendit(chan)
def delivered(info):
if info and info[0]:
status['sent'] += 1
def subscribe(chan):
pubnub.subscribe({
'channel': chan,
'connect': (lambda: connected(chan + '')),
'callback': (lambda msg: received(msg, chan))
})
## Subscribe All Channels
for chan in many_channels:
subscribe(chan)
## -----------------------------------------------------------------------
## Time Example
## -----------------------------------------------------------------------
def time_complete(timetoken):
test(timetoken, 'timetoken fetch')
test(isinstance(timetoken, int), 'timetoken int type')
pubnub.time({'callback': time_complete})
## -----------------------------------------------------------------------
## Publish Example
## -----------------------------------------------------------------------
def publish_complete(info):
test(info, 'publish complete')
test(info and len(info) > 2, 'publish response')
pubnub.history({
'channel': crazy,
'limit': 10,
'callback': history_complete
})
## -----------------------------------------------------------------------
## History Example
## -----------------------------------------------------------------------
def history_complete(messages):
test(messages and len(messages) > 0, 'history')
test(messages, 'history')
pubnub.publish({
'channel': crazy,
'message': "Hello World",
'callback': publish_complete
})
## -----------------------------------------------------------------------
## Subscribe Example
## -----------------------------------------------------------------------
def message_received(message):
test(message, 'message received')
pubnub.unsubscribe({'channel': crazy})
def done():
pubnub.unsubscribe({'channel': crazy})
pubnub.publish({
'channel': crazy,
'message': "Hello World",
'callback': (lambda x: x)
})
def dumpster(message):
test(0, 'never see this')
pubnub.subscribe({
'channel': crazy,
'connect': done,
'callback': dumpster
})
def connected():
pubnub.publish({
'channel': crazy,
'message': {'Info': 'Connected!'}
})
pubnub.subscribe({
'channel': crazy,
'connect': connected,
'callback': message_received
})
phase2()
## -----------------------------------------------------------------------
## Run Tests
## -----------------------------------------------------------------------
test_pubnub(pubnub_user_supplied_options)
test_pubnub(pubnub_high_security)
pubnub_high_security.start()
|
hsoft/dupeguru
|
qt/results_model.py
|
Python
|
gpl-3.0
| 3,932
| 0.000763
|
# Created By: Virgil Dupras
# Created On: 2009-04-23
# Copyright 2015 Hardcoded Software (http://www.hardcoded.net)
#
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.gnu.org/licenses/gpl-3.0.html
from PyQt5.QtCore import Qt, pyqtSignal, QModelIndex
from PyQt5.QtGui import QBrush, QFont, QFontMetrics, QColor
from PyQt5.QtWidgets import QTableView
from qtlib.table import Table
class ResultsModel(Table):
def __init__(self, app, view, **kwargs):
model = app.model.result_table
super().__init__(model, view, **kwargs)
view.horizontalHeader().setSortIndicator(1, Qt.AscendingOrder)
font = view.font()
font.setPointSize(app.prefs.tableFontSize)
self.view.setFont(font)
fm = QFontMetrics(font)
view.verticalHeader().setDefaultSectionSize(fm.height() + 2)
app.willSavePrefs.connect(self.appWillSavePrefs)
self.prefs = app.prefs
def _getData(self, row, column, role):
if column.name == "marked":
if role == Qt.CheckStateRole and row.markable:
return Qt.Checked if row.marked else Qt.Unchecked
return None
if role == Qt.DisplayRole:
data = row.data_delta if self.model.delta_values else row.data
return data[column.name]
elif role == Qt.ForegroundRole:
if row.isref:
return QBrush(Qt.blue)
elif row.is_cell_delta(column.name):
return QBrush(QColor(255, 142, 40)) # orange
elif role == Qt.FontRole:
font = QFont(self.view.font())
if self.prefs.reference_bold_font:
font.setBold(row.isref)
return font
elif role == Qt.EditRole:
if column.name == "name":
return row.data[column.name]
return None
def _getFlags(self, row, column):
flags = Qt.ItemIsEnabled | Qt.ItemIsSelectable
if column.name == "marked":
if row.markable:
flags |= Qt.ItemIsUserCheckable
elif column.name == "name":
flags |= Qt.ItemIsEditable
return flags
def _setData(self, row, column, value, role):
if role == Qt.CheckStateRole:
if column.name == "marked":
row.marked = bool(value)
return True
elif role == Qt.EditRole:
if column.name == "name":
return self.model.rename_selected(value)
return False
def sort(self, column, order):
column = self.model.COLUMNS[column]
self.model.sort(column.name, order == Qt.AscendingOrder)
# --- Properties
@property
def power_marker(self):
return self.model.power_marker
@power_marker.setter
def power_marker(self, value):
self.model.power_marker = value
@property
def delta_values(self):
return self.model.delta_values
@delta_values.setter
def delta_values(self, value):
self.model.delta_values = value
|
# --- Events
def appWillSavePrefs(self):
self.model.columns.save_columns()
# --- model --> view
def invalidate_markings(self):
# redraw view
# HACK. this is the only way I found to update the widget without reseting everything
self.view.scroll(0, 1)
self.view.scroll(0, -1)
clas
|
s ResultsView(QTableView):
# --- Override
def keyPressEvent(self, event):
if event.text() == " ":
self.spacePressed.emit()
return
super().keyPressEvent(event)
def mouseDoubleClickEvent(self, event):
self.doubleClicked.emit(QModelIndex())
# We don't call the superclass' method because the default behavior is to rename the cell.
# --- Signals
spacePressed = pyqtSignal()
|
ndparker/tdi
|
tdi/tools/htmlform/_adapters.py
|
Python
|
apache-2.0
| 4,395
| 0
|
# -*- coding: ascii -*-
r"""
:Copyright:
Copyright 2007 - 2015
Andr\xe9 Malo or his licensors, as applicable
:License:
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=====================
HTML forms reloaded
=====================
Form helper classes.
"""
if __doc__:
# pylint: disable = redefined-builtin
__doc__ = __doc__.encode('ascii').decode('unicode_escape')
__author__ = r"Andr\xe9 Malo".encode('ascii').decode('unicode_escape')
__docformat__ = "restructuredtext en"
__all__ = [
'DictParameterAdapter', 'ListDictParameterAdapter',
'MultiDictParameterAdapter', 'NullParameterAdapter',
]
from ._interfaces import ParameterAdapterInterface
class DictParameterAdapter(object):
"""
HTMLForm parameter adapter from a simple dict
:IVariables:
`param` : ``dict``
Parameters
"""
__implements__ = [ParameterAdapterInterface]
def __init__(self, param):
"""
Initialization
:Parameters:
`param` : ``dict``
Parameters
"""
self.param = param
def getfirst(self, name, default=None):
""" :See: ``tdi.tools.htmlform.ParameterAdapterInterface`` """
return self.param.get(name, default)
def getlist(self, name):
""" :See: ``tdi.tools.htmlform.ParameterAdapterInterface`` """
if name in self.param:
return [self.param[name]]
return []
class ListDictParameterAdapter(object):
"""
HTMLForm parameter adapter from a dict of sequences
:IVariables:
`param` : dict of sequences
Parameters
"""
__implements__ = [ParameterAdapterInterface]
def __init__(self, param):
"""
Initialization
:Parameters:
`param` : dict of sequences
Parameters. Empty sequences act as if the key was not present.
Otherwise ``getfirst`` will return the first element and
``getlist`` will return a shallow copy of the sequence as a
|
``list``.
"""
self.param = param
def getfirst(self, name, default=None):
""" :See: ``tdi.tools.htmlform.ParameterAd
|
apterInterface`` """
try:
result = self.param[name]
except KeyError:
pass
else:
if result:
return result[0]
return default
def getlist(self, name):
""" :See: ``tdi.tools.htmlform.ParameterAdapterInterface`` """
try:
result = self.param[name]
except KeyError:
pass
else:
return list(result)
return []
class MultiDictParameterAdapter(object):
"""
HTMLForm parameter adapter from a multidict (like paste provides)
:IVariables:
`param` : multidict
Parameters
"""
__implements__ = [ParameterAdapterInterface]
def __init__(self, param):
"""
Initialization
:Parameters:
`param` : multidict
Parameters. The object is expected to provide a getall() method
"""
self.param = param
def getfirst(self, name, default=None):
""" :See: ``tdi.tools.htmlform.ParameterAdapterInterface`` """
try:
return self.param.getall(name)[0]
except IndexError:
return default
def getlist(self, name):
""" :See: ``tdi.tools.htmlform.ParameterAdapterInterface`` """
return self.param.getall(name)
class NullParameterAdapter(object):
""" This adapter just returns nothing """
__implements__ = [ParameterAdapterInterface]
def getlist(self, name):
""" :See: `ParameterAdapterInterface.getlist` """
# pylint: disable = unused-argument
return []
def getfirst(self, name, default=None):
""" :See: `ParameterAdapterInterface.getfirst` """
# pylint: disable = unused-argument
return default
|
suziesparkle/wagtail
|
wagtail/wagtailadmin/views/home.py
|
Python
|
bsd-3-clause
| 2,933
| 0.003069
|
from django.shortcuts import render
from django.contrib.auth.decorators import permission_required
from django.conf import settings
from django.template import RequestContext
from django.template.loader import render_to_string
from wagtail.wagtailadmin import hooks
from wagtail.wagtailcore.models import Page, PageRevision, UserPagePermissionsProxy
from wagtail.wagtaildocs.models import Document
from wagtail.wagtailimages.models import get_image_model
# Panels for the homepage
class SiteSummaryPanel(object):
name = 'site_summary'
order = 100
def __init__(self, request):
self.request = request
def render(self):
return render_to_string('wagtailadmin/home/site_summary.html', {
'total_pages': Page.objects.count() - 1, # subtract 1 because the root node is not a real page
'total_images': get_image_model().objects.count(),
'total_docs': Document.objects.count(),
}, RequestContext(self.request))
class PagesForModerationPanel(object):
name = 'pages_for_moderation'
order = 200
def __init__(self, request):
self.request = request
user_perms = UserPagePermissionsProxy(request.user)
self.page_revisions_for_moderation = user_perms.revisions_for_moderation().select_related('page', 'user').order_by('-created_at')
def render(self):
return render_to_string('wagtailadmin/home/pages_for_moderation.html', {
'page_revisions_for_moderation': self.page_revisions_for_moderation,
}, RequestContext(self.request))
class RecentEditsPanel(object):
name = 'recent_edits'
order = 300
def __init__(self, request):
self.request = request
# Last n edited pages
self.last_edits = PageRevision.objects.raw(
"""
select wp.* FROM
wagtailcore_pagerevision wp JOIN (
SELECT max(created_at) as max_created_at, page_id F
|
ROM wagtailcore_pagerevision group by page_id
) as max_rev on max_rev.max_created_at = wp.created_at and wp.user_id = %s order by wp.created_at desc
""", [request.user.id])[:5]
def render(self):
return render_to_string('wagtailadmin/home/recent_edits.html', {
'last_edits': self.last_edits,
}, RequestContext(self.request))
|
@permission_required('wagtailadmin.access_admin')
def home(request):
panels = [
SiteSummaryPanel(request),
PagesForModerationPanel(request),
RecentEditsPanel(request),
]
for fn in hooks.get_hooks('construct_homepage_panels'):
fn(request, panels)
return render(request, "wagtailadmin/home.html", {
'site_name': settings.WAGTAIL_SITE_NAME,
'panels': sorted(panels, key=lambda p: p.order),
'user': request.user
})
def error_test(request):
raise Exception("This is a test of the emergency broadcast system.")
|
KirillMysnik/ArcJail
|
srcds/addons/source-python/plugins/arcjail/modules/arcjail/arcjail_user.py
|
Python
|
gpl-3.0
| 9,070
| 0.001985
|
# This file is part of ArcJail.
#
# ArcJail is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ArcJail is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ArcJail. If not, see <http://www.gnu.org/licenses/>.
import json
from time import time
from events import Event
from listeners.tick import GameThread
from ...classes.base_player_manager import BasePlayerManager
from ...internal_events import InternalEvent
from ...models.arcjail_user import ArcjailUser as DB_ArcjailUser
from ...resource.logger import logger
from ...resource.sqlalchemy import Session
from .item import Item
from .global_inventory import global_inventory
class ArcjailUser:
def __init__(self, player):
self.player = player
# We're saving to database asynchronously, and some properties will
# be unavailable
self._steamid = player.steamid
self._name = p
|
layer.name
self.last_online_reward = time()
self.account = 0
self.slot_data = []
self._loaded = False
@property
def loaded(self):
return self._loaded
def load_from_database(self):
if self._steamid == "BOT":
return
db_sessi
|
on = Session()
db_arcjail_user = db_session.query(DB_ArcjailUser).filter_by(
steamid=self._steamid).first()
if db_arcjail_user is not None:
self.account = db_arcjail_user.account
self.last_online_reward = db_arcjail_user.last_online_reward
self.slot_data = json.loads(db_arcjail_user.slot_data)
self._loaded = True
db_session.close()
# Iter over all of our items to initialize them in global_inventory
list(self.iter_all_items())
def save_to_database(self):
from ..credits import credits_config
if self._steamid == "BOT":
return
if not self._loaded:
raise RuntimeError("User couldn't be synced with database")
db_session = Session()
db_arcjail_user = db_session.query(DB_ArcjailUser).filter_by(
steamid=self._steamid).first()
if db_arcjail_user is None:
self.account = int(
credits_config['initial_credits']['initial_credits'])
db_arcjail_user = DB_ArcjailUser()
db_arcjail_user.steamid = self._steamid
db_session.add(db_arcjail_user)
db_arcjail_user.last_seen = time()
db_arcjail_user.last_used_name = self._name
db_arcjail_user.last_online_reward = self.last_online_reward
db_arcjail_user.account = self.account
db_arcjail_user.slot_data = json.dumps(self.slot_data)
db_session.commit()
db_session.close()
for item in self.iter_all_items():
global_inventory.save(item, async=False)
@classmethod
def save_temp_item(cls, steamid, item):
"""Used to save items whose IDs became
available after their owner has disconnected."""
if steamid == "BOT":
return
db_session = Session()
db_arcjail_user = db_session.query(DB_ArcjailUser).filter_by(
steamid=steamid).first()
if db_arcjail_user is None:
db_arcjail_user = DB_ArcjailUser()
db_arcjail_user.steamid = steamid
db_session.add(db_arcjail_user)
slot_data = []
else:
slot_data = json.loads(db_arcjail_user.slot_data)
db_arcjail_user.slot_data = json.dumps(slot_data + [item.id, ])
db_session.commit()
db_session.close()
def iter_all_items(self):
for item_id in self.slot_data:
yield global_inventory[item_id]
def iter_items_by_class_id(self, class_id):
for item in self.iter_all_items():
if item.class_.class_id == class_id:
yield item
def get_item_by_instance_id(self, class_id, instance_id):
for item in self.iter_items_by_class_id(class_id):
if item.class_.instance_id == instance_id:
return item
return None
def give_item(self, *args, amount=1, async=True):
if isinstance(args[0], Item):
item = args[0]
logger.log_debug(
"ArcjailUser.give_item: Giving item {} to (SteamID={}) "
"(async={})".format(item, self.player.steamid, async))
else:
class_id, instance_id = args[:2]
logger.log_debug(
"ArcjailUser.give_item: Giving item (class_id={}, "
"instance_id={}) to (SteamID={}) (async={})".format(
class_id, instance_id, self.player.steamid, async))
item = self.get_item_by_instance_id(class_id, instance_id)
if item is None:
if async:
def create_item():
item = Item.create(class_id, instance_id, self.player,
amount, async=False)
self.slot_data.append(item.id)
logger.log_debug(
"ArcjailUser.give_item: ... finished creating new "
"item {} (async=True) "
"-- ID added to slot data".format(item))
logger.log_debug(
"ArcjailUser.give_item: Creating new item (class_id={}, "
"instance_id={}) to (SteamID={}) (async=True)...".format(
class_id, instance_id, self.player.steamid))
GameThread(target=create_item).start()
return None
else:
item = Item.create(
class_id, instance_id, self.player, amount, async=False)
self.slot_data.append(item.id)
logger.log_debug(
"ArcjailUser.give_item: Created new item {} to "
"(SteamID={}) (async=False) "
"-- ID added to slot data".format(
item, self.player.steamid))
return item
item.give(amount, async)
return item
def take_item(self, *args, amount=1, async=True):
if isinstance(args[0], Item):
item = args[0]
logger.log_debug(
"ArcjailUser.take_item: Taking item {} from (SteamID={}) "
"(async={})".format(item, self.player.steamid, async))
else:
class_id, instance_id = args[:2]
logger.log_debug(
"ArcjailUser.take_item: Taking item (class_id={}, "
"instance_id={}) from (SteamID={}) (async={})".format(
class_id, instance_id, self.player.steamid, async))
item = self.get_item_by_instance_id(class_id, instance_id)
if item is None:
msg = ("Player {} doesn't have item (class_id={}, "
"instance_id={})".format(self, class_id, instance_id))
logger.log_warning(msg)
raise ValueError(msg)
if item.amount - amount <= 0:
self.slot_data.remove(item.id)
logger.log_debug("ArcjailUser.take_item: "
"-- ID removed from slot data")
item.take(amount, async)
return item
def __str__(self):
return "<ArcjailUser(userid={})>".format(self.player.userid)
class ArcjailUserManager(BasePlayerManager):
def create(self, player):
self[player.index] = arcjail_user = self._base_class(player)
GameThread(target=arcjail_user.load_from_database).start()
for callback in self._callbacks_on_player_registered:
callback(self[player.index])
return self[player.index]
|
n2o/dpb
|
links/admin.py
|
Python
|
mit
| 339
| 0
|
from django.contrib import adm
|
in
from .models import Link, LinkCategory
from dpb.admin import PageDownAdmin
class LinkAdmin(PageDownAdmin):
list_display = ('title', '
|
state', 'category', 'website')
list_filter = ('category',)
search_fields = ['title']
admin.site.register(Link, LinkAdmin)
admin.site.register(LinkCategory)
|
ntymtsiv/tempest
|
tempest/api/identity/admin/test_users.py
|
Python
|
apache-2.0
| 9,689
| 0
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from testtools.matchers import Contains
from tempest.api.identity import base
from tempest.common.utils import data_utils
from tempest.test import attr
class UsersTestJSON(base.BaseIdentityAdminTest):
_interface = 'json'
@classmethod
def setUpClass(cls):
super(UsersTestJSON, cls).setUpClass()
cls.alt_user = data_utils.rand_name('test_user_')
cls.alt_password = data_utils.rand_name('pass_')
cls.alt_email = cls.alt_user + '@testmail.tm'
@attr(type='smoke')
def test_create_user(self):
# Create a user
self.data.setup_test_tenant()
resp, user = self.client.create_user(self.alt_user, self.alt_password,
self.data.tenant['id'],
self.alt_email)
self.data.users.append(user)
self.assertEqual('200', resp['status'])
self.assertEqual(self.alt_user, user['name'])
@attr(type='smoke')
def test_create_user_with_enabled(self):
# Create a user with enabled : False
self.data.setup_test_tenant()
name = data_utils.rand_name('test_user_')
resp, user = self.client.create_user(name, self.alt_password,
self.data.tenant['id'],
self.alt_email, enabled=False)
self.data.users.append(user)
self.assertEqual('200', resp['status'])
self.assertEqual(name, user['name'])
self.assertEqual('false', str(user['enabled']).lower())
self.assertEqual(self.alt_email, user['email'])
@attr(type='smoke')
def test_update_user(self):
# Test case to check if updating of user attributes is successful.
test_user = data_utils.rand_name('test_user_')
self.data.setup_test_tenant()
resp, user = self.client.create_user(test_user, self.alt_password,
self.data.tenant['id'],
self.alt_email)
# Delete the User at the end of this method
self.addCleanup(self.client.delete_user, user['id'])
# Updating user details with new values
u_name2 = data_utils.rand_name('user2-')
u_email2 = u_name2 + '@testmail.tm'
resp, update_user = self.client.update_user(user['id'], name=u_name2,
email=u_email2,
enabled=False)
# Assert response body of update user.
self.assertEqual(200, resp.status)
self.assertEqual(u_name2, update_user['name'])
self.assertEqual(u_email2, update_user['email'])
self.assertEqual('false', str(update_user['enabled']).lower())
# GET by id after updating
resp, updated_user = self.client.get_user(user['id'])
# Assert response body of GET after updating
self.assertEqual(u_name2, updated_user['name'])
self.assertEqual(u_email2, updated_user['email'])
self.assertEqual('false', str(updated_user['enabled']).lower())
@attr(type='smoke')
def test_delete_user(self):
# Delete a user
test_user = data_utils.rand_name('test_user_')
self.data.setup_test_tenant()
resp, user = self.client.create_user(test_user, self.alt_password,
self.data.tenant['id'],
self.alt_email)
self.assertEqual('200', resp['status'])
resp, body = self.client.delete_user(user['id'])
self.assertEqual('204', resp['status'])
@attr(type='smoke')
def test_user_authentication(self):
# Valid user's token is authenticated
self.data.setup_test_user()
# Get a token
self.token_client.auth(self.data.test_user, self.data.test_password,
self.data.te
|
st_tenant)
# Re-auth
resp, body = self.token_client.auth(self.data.test_user,
|
self.data.test_password,
self.data.test_tenant)
self.assertEqual('200', resp['status'])
@attr(type='gate')
def test_authentication_request_without_token(self):
# Request for token authentication with a valid token in header
self.data.setup_test_user()
self.token_client.auth(self.data.test_user, self.data.test_password,
self.data.test_tenant)
# Get the token of the current client
token = self.client.auth_provider.get_token()
# Delete the token from database
self.client.delete_token(token)
# Re-auth
resp, body = self.token_client.auth(self.data.test_user,
self.data.test_password,
self.data.test_tenant)
self.assertEqual('200', resp['status'])
self.client.auth_provider.clear_auth()
@attr(type='smoke')
def test_get_users(self):
# Get a list of users and find the test user
self.data.setup_test_user()
resp, users = self.client.get_users()
self.assertThat([u['name'] for u in users],
Contains(self.data.test_user),
"Could not find %s" % self.data.test_user)
@attr(type='gate')
def test_list_users_for_tenant(self):
# Return a list of all users for a tenant
self.data.setup_test_tenant()
user_ids = list()
fetched_user_ids = list()
alt_tenant_user1 = data_utils.rand_name('tenant_user1_')
resp, user1 = self.client.create_user(alt_tenant_user1, 'password1',
self.data.tenant['id'],
'user1@123')
self.assertEqual('200', resp['status'])
user_ids.append(user1['id'])
self.data.users.append(user1)
alt_tenant_user2 = data_utils.rand_name('tenant_user2_')
resp, user2 = self.client.create_user(alt_tenant_user2, 'password2',
self.data.tenant['id'],
'user2@123')
self.assertEqual('200', resp['status'])
user_ids.append(user2['id'])
self.data.users.append(user2)
# List of users for the respective tenant ID
resp, body = self.client.list_users_for_tenant(self.data.tenant['id'])
self.assertIn(resp['status'], ('200', '203'))
for i in body:
fetched_user_ids.append(i['id'])
# verifying the user Id in the list
missing_users =\
[user for user in user_ids if user not in fetched_user_ids]
self.assertEqual(0, len(missing_users),
"Failed to find user %s in fetched list" %
', '.join(m_user for m_user in missing_users))
@attr(type='gate')
def test_list_users_with_roles_for_tenant(self):
# Return list of users on tenant when roles are assigned to users
self.data.setup_test_user()
self.data.setup_test_role()
user = self.get_user_by_name(self.data.test_user)
tenant = self.get_tenant_by_name(self.data.test_tenant)
role = self.get_role_by_name(self.data.test_role)
# Assigning roles to two users
user_ids = list()
fetched_user_ids = list()
user_ids.append(user['id'])
resp,
|
asimonov-im/boinc
|
py/Boinc/tools.py
|
Python
|
gpl-3.0
| 2,065
| 0.00678
|
## $Id: tools.py 23525 2011-05-12 04:11:40Z davea $
import configxml
try:
# use new hashlib if available
from hashlib import md5
except:
import md5
import os, shutil, binascii,
|
filecmp
# from http://www.plope.com/software/uuidgen/view
_urandomfd = None
def urandom(n):
"""urandom(n) -> str
Return a string of n random bytes suitable for cryptographic use.
"""
global _urandomfd
if _urandomfd is None:
try:
_urandomfd = os.open("/dev/urandom", os.O
|
_RDONLY)
except:
_urandomfd = NotImplementedError
if _urandomfd is NotImplementedError:
raise NotImplementedError("/dev/urandom (or equivalent) not found")
bytes = ""
while len(bytes) < n:
bytes += os.read(_urandomfd, n - len(bytes))
return bytes
def make_uuid():
return binascii.hexlify(urandom(16))
def md5_file(path):
"""
Return a 16-digit MD5 hex digest of a file's contents
Read the file in chunks
"""
chunk = 8096
try:
checksum = md5()
except NameError:
checksum = md5.new()
fp = open(path, 'r')
while True:
buffer = fp.read(chunk)
if not buffer:
break
checksum.update(buffer)
fp.close()
return checksum
def file_size(path):
"""Return the size of a file"""
f = open(path)
f.seek(0,2)
return f.tell()
def query_yesno(str):
'''Query user; default Yes'''
print str, "[Y/n] ",
return not raw_input().strip().lower().startswith('n')
def query_noyes(str):
'''Query user; default No'''
print str, "[y/N] ",
return raw_input().strip().lower().startswith('y')
def get_output_file_path(filename):
""" Return the filename's path in the upload directory
Use this if you're developing a validator/assimilator in Python
"""
config = configxml.default_config()
fanout = long(config.config.uldl_dir_fanout)
s = md5.new(filename).hexdigest()[1:8]
x = long(s, 16)
return "%s/%x/%s" % (config.config.upload_dir, x % fanout, filename)
|
MalloyPower/parsing-python
|
front-end/testsuite-python-lib/Python-2.3/Lib/test/test_urllibnet.py
|
Python
|
mit
| 5,447
| 0.001652
|
#!/usr/bin/env python
import unittest
from test import test_support
import socket
import urllib
import sys
import os
import mimetools
class URLTimeoutTest(unittest.TestCase):
TIMEOUT = 10.0
def setUp(self):
socket.setdefaulttimeout(self.TIMEOUT)
def tearDown(self):
socket.setdefaulttimeout(None)
def testURLread(self):
f = urllib.urlopen("http://www.python.org/")
x = f.read()
class urlopenNetworkTests(unittest.TestCase):
"""Tests urllib.urlopen using the network.
These tests are not exhaustive. Assuming that testing using files does a
good job overall of some of the basic interface features. There are no
tests exercising the optional 'data' and 'proxies' arguments. No tests
for transparent redirection have been written.
setUp is not used for always constructing a connection to
http://www.python.org/ since there a few tests that don't use that address
and making a connection is expensive enough to warrant minimizing unneeded
connections.
"""
def test_basic(self):
# Simple test expected to pass.
open_url = urllib.urlopen("http://www.python.org/")
for attr in ("read", "readline", "readlines", "fileno", "close",
"info", "geturl"):
self.assert_(hasattr(open_url, attr), "object returned from "
"urlopen lacks the %s attribute" % attr)
try:
self.assert_(open_url.read(), "calling 'read' failed")
finally:
open_url.close()
def test_readlines(self):
# Test both readline and readlines.
open_url = urllib.urlopen("http://www.python.org/")
try:
self.assert_(isinstance(open_url.readline(), basestring),
"readline did not return a string")
self.assert_(isinstance(open_url.readlines(), list),
"readlines did not return a list")
finally:
open_url.close()
def test_info(self):
# Test 'info'.
open_url = urllib.urlopen("http://www.python.org/")
try:
info_obj = open_url.info()
finally:
open_url.close()
self.assert_(isinstance(info_obj, mimetools.Message),
"object returned by 'info' is not an instance of "
"mimetools.Message")
self.assertEqual(info_obj.getsubtype(), "html")
def test_geturl(self):
# Make sure same URL as opened is returned by geturl.
URL = "http://www.python.org/"
open_url = urllib.urlopen(URL)
try:
gotten_url = open_url.geturl()
finally:
open_url.close()
self.assertEqual(gotten_url, URL)
def test_fileno(self):
if (sys.platform in ('win32',) or
not hasattr(os, 'fdopen')):
# On Windows, socket handles are not file descriptors; this
# test can't pass on Windows.
return
# Make sure fd returned by fileno is valid.
open_url = urllib.urlopen("http://www.python.org/")
fd = open_url.fileno()
FILE = os.fdopen(fd)
try:
self.assert_(FILE.read(), "reading from file created using fd "
"returned by fileno failed")
finally:
FILE.close()
def test_bad_address(self):
# Make sure proper exception is raised when connecting to a bogus
# address.
self.assertRaises(IOError,
urllib.urlopen, "http://www.sadflkjsasadf.com/")
class urlretrieveNetworkTests(unittest.TestCase):
"""Tests urllib.urlretrieve using the network."""
def test_basic(self):
# Test basic functionality.
file_location,info = urllib.urlretrieve("http://www.python.org/")
self.assert_(os.path.exists(file_location), "file location returned by"
" urlretrieve is not a valid path")
FILE = file(file_location)
try:
self.assert_(FILE.read(), "reading from the file location returned"
" by urlretrieve failed")
finally
|
:
FILE.close()
os.unlink(file_location)
def test_specified_path(self):
# Make sure that specifying the location of the fil
|
e to write to works.
file_location,info = urllib.urlretrieve("http://www.python.org/",
test_support.TESTFN)
self.assertEqual(file_location, test_support.TESTFN)
self.assert_(os.path.exists(file_location))
FILE = file(file_location)
try:
self.assert_(FILE.read(), "reading from temporary file failed")
finally:
FILE.close()
os.unlink(file_location)
def test_header(self):
# Make sure header returned as 2nd value from urlretrieve is good.
file_location, header = urllib.urlretrieve("http://www.python.org/")
os.unlink(file_location)
self.assert_(isinstance(header, mimetools.Message),
"header is not an instance of mimetools.Message")
def test_main():
test_support.requires('network')
test_support.run_unittest(URLTimeoutTest,
urlopenNetworkTests,
urlretrieveNetworkTests)
if __name__ == "__main__":
test_main()
|
danellecline/stoqs
|
stoqs/loaders/CANON/loadCANON_september2018.py
|
Python
|
gpl-3.0
| 20,474
| 0.006936
|
#!/usr/bin/env python
__author__ = 'Mike McCann,Duane Edgington,Danelle Cline'
__copyright__ = '2018'
__license__ = 'GPL v3'
__contact__ = 'duane at mbari.org'
__doc__ = '''
Master loader for all CANON May-June Campaign 2018
Mike McCann, Duane Edgington, Danelle Cline
MBARI 15 May 2018
@var __date__: Date of last svn commit
@undocumented: __doc__ parser
@status: production
@license: GPL
'''
import os
import sys
import datetime # needed for glider data
parentDir = os.path.join(os.path.dirname(__file__), "../")
sys.path.insert(0, parentDir) # So that CANON is found
from CANON import CANONLoader
import timing
cl = CANONLoader('stoqs_canon_september2018', 'CANON - September 2018',
description='September 2018 campaign observations in Monterey Bay',
x3dTerrains={
'http://dods.mbari.org/terrain/x3d/Monterey25_10x/Monterey25_10x_scene.x3d': {
'position': '-2822317.31255 -4438600.53640 3786150.85474',
'orientation': '0.89575 -0.31076 -0.31791 1.63772',
'centerOfRotation': '-2711557.9403829873 -4331414.329506527 3801353.4691465236',
'VerticalExaggeration': '10',
},
'http://stoqs.mbari.org/x3d/Monterey25_1x/Monterey25_1x_src_scene.x3d': {
'name': 'Monterey25_1x',
'position': '-2822317.31255 -4438600.53640 3786150.85474',
'orientation': '0.89575 -0.31076 -0.31791 1.63772',
'centerOfRotation': '-2711557.9403829873 -4331414.329506527 3801353.4691465236',
'VerticalExaggeration': '1',
},
},
grdTerrain=os.path.join(parentDir, 'Monterey25.grd')
)
# Set start and end dates for all loads from sources that contain data
# beyond the temporal bounds of the campaign
#
startdate = datetime.datetime(2018, 8, 30) # Fixed start. Aug 30, 2018
enddate = datetime.datetime(2018, 9, 12) # Fixed end. September 12, 2018.
# default location of thredds and dods data:
cl.tdsBase = 'http://odss.mbari.org/thredds/'
cl.dodsBase = cl.tdsBase + 'dodsC/'
#####################################################################
# DORADO
#####################################################################
# Use the attributes built by loadDorad() using startdate and enddate
#####################################################################
# LRAUV
#####################################################################
# Load netCDF files produced (binned, etc.) by Danelle Cline
# These binned files are created with the makeLRAUVNetCDFs.sh script in the
# toNetCDF directory. You must first edit and run that script once to produce
# the binned files before this will work
# Use the default parameters provided by loadLRAUV() calls below
######################################################################
# GLIDERS
######################################################################
# Glider data files from CeNCOOS thredds server
# L_662a updated parameter names in netCDF file
cl.l_662a_base = 'http://legacy.cencoos.org/thredds/dodsC/gliders/Line66/'
cl.l_662a_files = [
'OS_Glider_L_662_20180816_TS.nc',
]
cl.l_662a_parms = ['temperature', 'salinity', 'fluorescence','oxygen']
cl.l_662a_startDatetime = startdate
cl.l_662a_endDatetime = endda
|
te
# NPS_34a updated paramete
|
r names in netCDF file
## The following loads decimated subset of data telemetered during deployment
cl.nps34a_base = 'http://legacy.cencoos.org/thredds/dodsC/gliders/Line66/'
cl.nps34a_files = [ 'OS_Glider_NPS_G34_20180514_TS.nc' ]
cl.nps34a_parms = ['temperature', 'salinity','fluorescence']
cl.nps34a_startDatetime = startdate
cl.nps34a_endDatetime = enddate
# Slocum Teledyne nemesis Glider
## from ioos site ## these files proved to be not compatible with python loader
## cl.slocum_nemesis_base = 'https://data.ioos.us/gliders/thredds/dodsC/deployments/mbari/Nemesis-20170412T0000/'
## cl.slocum_nemesis_files = [ 'Nemesis-20170412T0000.nc3.nc' ]
## from cencoos directory, single non-aggregated files
cl.slocum_nemesis_base = 'http://legacy.cencoos.org/thredds/dodsC/gliders/Line66/Nemesis/nemesis_201808/'
cl.slocum_nemesis_files = [
'nemesis_20180912T155836_rt0.nc',
'nemesis_20180912T122153_rt0.nc',
'nemesis_20180912T094535_rt0.nc',
'nemesis_20180912T082354_rt0.nc',
'nemesis_20180912T052605_rt0.nc',
'nemesis_20180912T040049_rt0.nc',
'nemesis_20180912T023245_rt0.nc',
'nemesis_20180912T014715_rt0.nc',
'nemesis_20180911T232456_rt0.nc',
'nemesis_20180911T220817_rt0.nc',
'nemesis_20180911T205936_rt0.nc',
'nemesis_20180911T202344_rt0.nc',
'nemesis_20180911T183807_rt0.nc',
'nemesis_20180911T173504_rt0.nc',
'nemesis_20180911T163656_rt0.nc',
'nemesis_20180911T160046_rt0.nc',
'nemesis_20180911T141251_rt0.nc',
'nemesis_20180911T132308_rt0.nc',
'nemesis_20180911T125657_rt0.nc',
'nemesis_20180911T124143_rt0.nc',
'nemesis_20180911T111352_rt0.nc',
'nemesis_20180911T101639_rt0.nc',
'nemesis_20180911T090727_rt0.nc',
'nemesis_20180911T082949_rt0.nc',
'nemesis_20180911T063006_rt0.nc',
'nemesis_20180911T051246_rt0.nc',
'nemesis_20180911T035602_rt0.nc',
'nemesis_20180911T031327_rt0.nc',
'nemesis_20180911T012153_rt0.nc',
'nemesis_20180910T211616_rt0.nc',
'nemesis_20180910T184610_rt0.nc',
'nemesis_20180910T172924_rt0.nc',
'nemesis_20180910T144039_rt0.nc',
'nemesis_20180910T130046_rt0.nc',
'nemesis_20180910T110131_rt0.nc',
'nemesis_20180910T100656_rt0.nc',
'nemesis_20180910T073031_rt0.nc',
'nemesis_20180910T061920_rt0.nc',
'nemesis_20180910T051045_rt0.nc',
'nemesis_20180910T044015_rt0.nc',
'nemesis_20180910T024629_rt0.nc',
'nemesis_20180910T014244_rt0.nc',
'nemesis_20180910T003347_rt0.nc',
'nemesis_20180909T235752_rt0.nc',
'nemesis_20180909T214416_rt0.nc',
'nemesis_20180909T201408_rt0.nc',
'nemesis_20180909T184018_rt0.nc',
'nemesis_20180909T175140_rt0.nc',
'nemesis_20180909T155219_rt0.nc',
'nemesis_20180909T115410_rt0.nc',
'nemesis_20180909T092358_rt0.nc',
'nemesis_20180909T080600_rt0.nc',
'nemesis_20180909T053113_rt0.nc',
'nemesis_20180909T040344_rt0.nc',
'nemesis_20180909T022619_rt0.nc',
'nemesis_20180909T013445_rt0.nc',
'nemesis_20180908T225057_rt0.nc',
'nemesis_20180908T212417_rt0.nc',
'nemesis_20180908T201225_rt0.nc',
'nemesis_20180908T193455_rt0.nc',
'nemesis_20180908T175459_rt0.nc',
'nemesis_20180908T165653_rt0.nc',
'nemesis_20180908T155601_rt0.nc',
'nemesis_20180908T152828_rt0.nc',
'nemesis_20180908T141844_rt0.nc',
'nemesis_20180908T133548_rt0.nc',
'nemesis_20180908T124849_rt0.nc',
'nemesis_20180908T121517_rt0.nc',
'nemesis_20180908T100705_rt0.nc',
'nemesis_20180908T084223_rt0.nc',
'nemesis_20180908T070743_rt0.nc',
'nemesis_20180908T061608_rt0.nc',
'nemesis_20180908T040932_rt0.nc',
'nemesis_20180908T002343_rt0.nc',
'nemesis_20180907T215426_rt0.nc',
'nemesis_20180907T201505_rt0.nc',
'nemesis_20180907T181042_rt0.nc',
'nemesis_20180907T175240_rt0.nc',
'nemesis_20180907T160651_rt0.nc',
'nemesis_20180907T121649_rt0.nc',
'nemesis_20180907T104111_rt0.nc',
'nemesis_20180907T090004_rt0.nc',
'nemesis_20180907T080728_rt0.nc',
'nemesis_20180907T054213_rt0.nc',
'nemesis_20180907T042212_rt0.nc',
'nemesis_20180907T032232_rt0.nc',
'nemesis_20180907T025502_rt0.nc',
'nemesis_20180907T011917_rt0.nc',
'nemes
|
FFMG/myoddweb.piger
|
monitor/api/python/Python-3.7.2/Lib/test/test_pulldom.py
|
Python
|
gpl-2.0
| 12,628
| 0.000634
|
import io
import unittest
import xml.sax
from xml.sax.xmlreader import AttributesImpl
from xml.sax.handler import feature_external_ges
from xml.dom import pulldom
from test.support import findfile
tstfile = findfile("test.xml", subdir="xmltestdata")
# A handy XML snippet, containing attributes, a namespace prefix, and a
# self-closing tag:
SMALL_SAMPLE = """<?xml version="1.0"?>
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:xdc="http://www.xml.com/books">
<!-- A comment -->
<title>Introduction to XSL</title>
<hr/>
<p><xdc:author xdc:attrib="prefixed attribute" attrib="other attrib">A. Namespace</xdc:author></p>
</html>"""
class PullDOMTestCase(unittest.TestCase):
def test_parse(self):
"""Minimal test of DOMEventStream.parse()"""
# This just tests that parsing from a stream works. Actual parser
# semantics are tested using parseString with a more focused XML
# fragment.
# Test with a filename:
handler = pulldom.parse(tstfile)
self.addCleanup(handler.stream.close)
list(handler)
# Test with a file object:
with open(tstfile, "rb") as fin:
list(pulldom.parse(fin))
def test_parse_semantics(self):
"""Test DOMEventStream parsing semantics."""
items = pulldom.parseString(SMALL_SAMPLE)
evt, node = next(items)
# Just check the node is a Document:
self.assertTrue(hasattr(node, "createElement"))
self.assertEqual(pulldom.START_DOCUMENT, evt)
evt, node = next(items)
self.assertEqual(pulldom.START_ELEMENT, evt)
self.assertEqual("html", node.tagName)
self.assertEqual(2, len(node.attributes))
self.assertEqual(node.attributes.getNamedItem("xmlns:xdc").value,
"http://www.xml.com/books")
evt, node = next(items)
self.assertEqual(pulldom.CHARACTERS, evt) # Line break
evt, node = next(items)
# XXX - A comment should be reported here!
# self.assertEqual(pulldom.COMMENT, evt)
# Line break after swallowed comment:
self.assertEqual(pulldom.CHARACTERS, evt)
evt, node = next(items)
self.assertEqual("title", node.tagName)
title_node = node
evt, node = next(items)
self.assertEqual(pulldom.CHARACTERS, evt)
self.assertEqual("Introduction to XSL", node.data)
evt, node = next(items)
self.assertEqual(pulldom.END_ELEMENT, evt)
self.assertEqual("title", node.tagName)
self.assertTrue(title_node is node)
evt, node = next(items)
self.assertEqual(pulldom.CHARACTERS, evt)
evt, node = next(items)
self.assertEqual(pulldom.START_ELEMENT, evt)
self.assertEqual("hr", node.tagName)
evt, node = next(items)
self.assertEqual(pulldom.END_ELEMENT, evt)
self.assertEqual("hr", node.tagName)
evt, node = next(items)
self.assertEqual(pulldom.CHARACTERS, evt)
evt, node = next(items)
self.assertEqual(pulldom.START_ELEMENT, evt)
self.assertEqual("p", node.tagName)
evt, node = next(items)
self.assertEqual(pulldom.START_ELEMENT, evt)
self.assertEqual("xdc:author", node.tagName)
evt, node = next(items)
self.assertEqual(pulldom.CHARACTERS, evt)
evt, node = next(items)
self.assertEqual(pulldom.END_ELEMENT, evt)
self.assertEqual("xdc:author", node.tagName)
evt, node = next(items)
self.assertEqual(pulldom.END_ELEMENT, evt)
evt, node = next(items)
self.assertEqual(pulldom.CHARACTERS, evt)
evt, node = next(items)
self.assertEqual(pulldom.END_ELEMENT, evt)
# XXX No END_DOCUMENT item is ever obtained:
#evt, node = next(items)
#self.assertEqual(pulldom.END_DOCUMENT, evt)
def test_expandItem(self):
"""Ensure expandItem works as expected."""
items = pulldom.parseString(SMALL_SAMPLE)
# Loop through the nodes until we get to a "title" start tag:
for evt, item in items:
if evt == pulldom.START_ELEMENT and item.tagName == "title":
items.expandNode(item)
self.assertEqual(1, len(item.childNodes))
break
else:
self.fail("No \"title\" element detected in SMALL_SAMPLE!")
# Loop until we get to the next start-element:
for evt, node in items:
if evt == pulldom.START_ELEMENT:
break
self.assertEqual("hr", node.tagName,
"expandNode did not leave DOMEventStream in the correct state.")
# Attempt to expand a standalone element:
items.expandNode(node)
self.assertEqual(next(items)[0], pulldom.CHARACTERS)
evt, node = next(items)
self.assertEqual(node.tagName, "p")
items.expandNode(node)
next(items) # Skip character data
evt,
|
node = next(items)
self.assertEqual(node.tagName, "html")
with self.assertRaises(StopIteration):
next(ite
|
ms)
items.clear()
self.assertIsNone(items.parser)
self.assertIsNone(items.stream)
@unittest.expectedFailure
def test_comment(self):
"""PullDOM does not receive "comment" events."""
items = pulldom.parseString(SMALL_SAMPLE)
for evt, _ in items:
if evt == pulldom.COMMENT:
break
else:
self.fail("No comment was encountered")
@unittest.expectedFailure
def test_end_document(self):
"""PullDOM does not receive "end-document" events."""
items = pulldom.parseString(SMALL_SAMPLE)
# Read all of the nodes up to and including </html>:
for evt, node in items:
if evt == pulldom.END_ELEMENT and node.tagName == "html":
break
try:
# Assert that the next node is END_DOCUMENT:
evt, node = next(items)
self.assertEqual(pulldom.END_DOCUMENT, evt)
except StopIteration:
self.fail(
"Ran out of events, but should have received END_DOCUMENT")
def test_external_ges_default(self):
parser = pulldom.parseString(SMALL_SAMPLE)
saxparser = parser.parser
ges = saxparser.getFeature(feature_external_ges)
self.assertEqual(ges, False)
class ThoroughTestCase(unittest.TestCase):
"""Test the hard-to-reach parts of pulldom."""
def test_thorough_parse(self):
"""Test some of the hard-to-reach parts of PullDOM."""
self._test_thorough(pulldom.parse(None, parser=SAXExerciser()))
@unittest.expectedFailure
def test_sax2dom_fail(self):
"""SAX2DOM can"t handle a PI before the root element."""
pd = SAX2DOMTestHelper(None, SAXExerciser(), 12)
self._test_thorough(pd)
def test_thorough_sax2dom(self):
"""Test some of the hard-to-reach parts of SAX2DOM."""
pd = SAX2DOMTestHelper(None, SAX2DOMExerciser(), 12)
self._test_thorough(pd, False)
def _test_thorough(self, pd, before_root=True):
"""Test some of the hard-to-reach parts of the parser, using a mock
parser."""
evt, node = next(pd)
self.assertEqual(pulldom.START_DOCUMENT, evt)
# Just check the node is a Document:
self.assertTrue(hasattr(node, "createElement"))
if before_root:
evt, node = next(pd)
self.assertEqual(pulldom.COMMENT, evt)
self.assertEqual("a comment", node.data)
evt, node = next(pd)
self.assertEqual(pulldom.PROCESSING_INSTRUCTION, evt)
self.assertEqual("target", node.target)
self.assertEqual("data", node.data)
evt, node = next(pd)
self.assertEqual(pulldom.START_ELEMENT, evt)
self.assertEqual("html", node.tagName)
evt, node = next(pd)
self.assertEqual(pulldom.COMMENT, evt)
self.assertEqual("a comment", node.data)
evt, node = next(pd)
self.assertEqual(pulldom.PROCESSING_INSTRUCTION, evt)
self.assertEqual("target", node.target)
self.assertEqual("d
|
martomo/SublimeTextXdebug
|
xdebug/helper/helper_26.py
|
Python
|
mit
| 1,428
| 0.001401
|
"""
Helper module for Python version 2.6 and below
- Ordered dictionaries
- Encoding/decoding urls
- Unicode
- Exception handling (except Exception, e)
"""
import base64
from urllib import unquote, quote
try:
from ordereddict import OrderedDict
except:
pass
def modulename():
return 'Helper module for Python version 2.6 and below'
def url_decode(uri):
return unquote(uri)
def url_encode(uri):
return quote(uri)
def new_dictionary():
try:
return OrderedDict()
except:
return {}
def dictionary_keys(dictionary):
return dictionary.keys()
def dictionary_values(dictionary):
return dictionary.values()
def data_read(data):
# Data for reading/receiving already a string in version 2.*
return data
def data_write(data):
# Using string in version 2.* for sending/writing data
return data
def base64_decode(data):
return base64.b64decode(data)
def base64_encode(data):
return base64.b64encode(data)
def unicode_chr(code):
|
return unichr(code) # noqa: F821
def unicode_string(string):
if isinstance(string, unicode): # noqa: F821
return string
return string.decode('utf8', 'replace')
def is_digit(string):
# Check if basestring (str, unicode) is digit
return isinstance(string, basestring) and string.isdigit() # noqa: F821
def is_number(value):
return isinstance(value, (int, long)) # noqa: F8
|
21
|
gtaylor/btmux_template_io
|
btmux_template_io/parsers/ssw/populators/equipment.py
|
Python
|
bsd-3-clause
| 2,429
| 0
|
from btmux_template_io.item_table import ITEM_TABLE
from btmux_template_io.parsers.ssw.crit_mapping import PHYSICAL_WEAPON_MAP, \
EQUIPMENT_MAP
from . ammo import add_ammo
from . common import add_crits_from_locations
from . weapons import add_weapon
def populate_equipment(xml_root, unit_obj):
"""
Equipment is the general term for an item within a mech. Weapons,
ammo, melee weapons, etc.
:param lxml.etree.Element xml_root: The root of the XML doc.
:param btmux_template_io.unit.BTMuxUnit unit_obj: The unit instance
being populated.
"""
equipment_elements = xml_root.xpath('baseloadout/equipment')
for equip_e in equipment_elements:
e_type = equip_e.xpath('type')[0].text
e_name = equip_e.xpath('name')[0].text
if e_type in ['energy', 'ballistic', 'missile']:
add_weapon(equip_e, unit_obj)
elif 'Anti-Missile' in e_name and '@' not in e_name:
# These are of type equipment, but BTMux handles them like weapons.
add_weapon(equip_e, unit_obj)
elif e_type == 'ammunition':
add_ammo(equip_e, unit_obj)
elif e_type == 'physical':
_add_equipment(equip_e, unit_obj, PHYSICAL_WEAPON_MAP)
elif e_type in ['equipment', 'CASE', 'TargetingComputer']:
_add_equipment(equip_e, unit_obj, EQUIPMENT_MAP)
else:
raise ValueError("Invalid equipment type: %s" % e_type)
def _add_equipment(equip_e, unit_obj, map_dict):
ssw_name = equip_e.xpath('name')[0].text
try:
mapped_add_special = map_dict[ssw_name].get('add_special')
except KeyError:
raise ValueError("Unknown equipment type: %s" % ssw_name)
if mapped_add_special:
unit_obj.specials.add(mapped_add_special)
btmux_name = map_dict[ssw_name]['name']
if not btmux_name:
# Probably somethi
|
ng like a SearchLight, which has no crit in BTMux.
return
data_dict = ITEM_TABLE[btmux_name]
if 'tons_per_crit' in data_dict:
crits_per_item = int(round(
float(unit_ob
|
j.weight) / data_dict['tons_per_crit'], 0))
else:
crits_per_item = data_dict.get('crits', 1)
add_special = data_dict.get('add_special')
if add_special:
unit_obj.specials.add(add_special)
add_crits_from_locations(
equip_e,
btmux_name,
unit_obj,
crits_per_item=crits_per_item)
|
sysuccc/QiuDaBao
|
qiudabao/models.py
|
Python
|
gpl-2.0
| 617
| 0.025932
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
f
|
rom django.contrib.auth.models import User
# Create your models here.
class AccountInfo(models.Model):
user = models.ForeignKey(User)
balance = models.IntegerField()
DISH_NAME_MAX_LENGTH = 20
class Order(models.Model):
offerer = models.ForeignKe
|
y(User, related_name='offerer_order')
submiter = models.ForeignKey(User, related_name='submiter_order', null=True, blank=True)
dish = models.CharField(max_length=DISH_NAME_MAX_LENGTH)
description = models.TextField(blank=True)
place = models.TextField()
|
facebookexperimental/eden
|
eden/scm/edenscm/mercurial/annotate.py
|
Python
|
gpl-2.0
| 4,963
| 0.000403
|
# Portions Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
# Copyright Mercurial Contributors
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from typing import TypeVar, Callable, List, Tuple, Optional
from . import mdiff
from .thirdparty import attr
F = TypeVar("F")
L = TypeVar("L")
def annotate(
base: F,
parents: Callable[[F], List[F]],
decorate: Callable[[F], Tuple[List[L], bytes]],
diffopts: mdiff.diffopts,
skip: Optional[Callable[[F], bool]] = None,
) -> Tuple[List[L], bytes]:
"""annotate algorithm
base: starting point, usually a fctx.
parents: get parents from F.
decorate: get (lines, text) from F.
Return (lines, text) for 'base'.
"""
# This algorithm would prefer to be recursive, but Python is a
# bit recursion-hostile. Instead we do an iterative
# depth-first search.
# 1st DFS pre-calculates pcache and needed
visit = [base]
pcache = {}
needed = {base: 1}
while visit:
f = visit.pop()
if f in pcache:
continue
pl = parents(f)
pcache[f] = pl
for p in pl:
needed[p] = needed.get(p, 0) + 1
if p not in pcache:
visit.append(p)
# 2nd DFS does the actual annotate
visit[:] = [base]
hist = {}
while visit:
f = visit[-1]
if f in hist:
visit.pop()
continue
ready = True
pl = pcache[f]
for p in pl:
if p not in hist:
ready = False
visit.append(p)
if ready:
visit.pop()
curr = decorate(f)
skipchild = False
if skip is not None:
skipchild = skip(f)
curr = _annotatepair([hist[p] for p in pl], f, curr, skipchild, diffopts)
for p in pl:
if needed[p] == 1:
del hist[p]
del needed[p]
else:
needed[p] -= 1
hist[f] = curr
del pcache[f]
return hist[base]
def _annotatepair(parents, childfctx, child, skipchild, diffopts):
r"""
Given parent and child fctxes and annotate data for parents, for all lines
in either parent that match the child, annotate the child with the parent's
data.
Additionally, if `skipchild` is True, replace all other lines with parent
annotate data as well such that child is never blamed for any lines.
See test-annotate.py for unit tests.
"""
pblocks = [
(parent, mdiff.allblocks(parent[1], child[1], opts=diffopts))
for parent in parents
]
if skipchild:
# Need to iterate over the blocks twice -- make it a list
pblocks = [(p, list(blocks)) for (p, blocks) in pblocks]
# Mercurial currently prefers p2 over p1 for annotate.
# TODO: change this?
for parent, blocks in pblocks:
for (a1, a2, b1, b2), t in blocks:
# Changed blocks ('!') or blocks made only of blank lines ('~')
# belong to the child.
if t == "=":
child[0][b1:b2] = parent[0][a1:a2]
if skipchild:
# Now try and match up anything that couldn't be matched,
# Reversing pblocks maintains bias towards p2, matching above
# behavior.
pblocks.reverse()
# The heuristics are:
# * Work on blocks of changed lines (effectively diff hunks with -U0).
# This could potentially be smarter but works well enough.
# * For a non-matching section, do a best-effort fit. Match lines in
# diff hunks 1:1, dropping lines as necessary.
# * Repeat the last line as a last resort.
# First, replace as much as possible without repeat
|
ing the last line.
remaining = [(parent, []) for parent, _blocks in pblocks]
for idx, (parent, blocks) in enumerate(pblocks):
for (a1, a2, b1, b2), _t in blocks:
if a2 - a1 >= b2 - b1:
for bk in range(b1, b2):
if child[0][bk].fctx == childfctx:
ak = min(a1 +
|
(bk - b1), a2 - 1)
child[0][bk] = attr.evolve(parent[0][ak], skip=True)
else:
remaining[idx][1].append((a1, a2, b1, b2))
# Then, look at anything left, which might involve repeating the last
# line.
for parent, blocks in remaining:
for a1, a2, b1, b2 in blocks:
for bk in range(b1, b2):
if child[0][bk].fctx == childfctx:
ak = min(a1 + (bk - b1), a2 - 1)
child[0][bk] = attr.evolve(parent[0][ak], skip=True)
return child
|
annndrey/npui-unik
|
netprofile/netprofile/common/auth.py
|
Python
|
agpl-3.0
| 6,265
| 0.025064
|
#!/usr/bin/env python
# -*- coding: utf-8; tab-width: 4; indent-tabs-mode: t -*-
#
# NetProfile: Authentication routines
# © Copyright 2013-2014 Alex 'Unik' Unigovsky
#
# This file is part of NetProfile.
# NetProfile is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# NetProfile is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General
# Public License along with NetProfile. If not, see
# <http://www.gnu.org/licenses/>.
from __future__ import (
unicode_literals,
print_function,
absolute_import,
division
)
import hashlib
import random
import string
import time
from zope.interface import implementer
from pyramid.interfaces import IAuthenticationPolicy
from pyramid.security import (
Authenticated,
Everyone
)
class PluginPolicySelected(object):
def __init__(self, request, policy):
self.request = request
self.policy = policy
@implementer(IAuthenticationPolicy)
class PluginAuthenticationPolicy(object):
def __init__(self, default, routes=None):
self._default = default
if routes is None:
routes = {}
self._routes = routes
def add_plugin(self, route, policy):
self._routes[route] = policy
def match(self, request):
if hasattr(request, 'auth_policy'):
return request.auth_policy
cur = None
cur_len = 0
for route, plug in self._routes.items():
r_len = len(route)
if r_len <= cur_len:
continue
path = request.path
if route == path[:r_len]:
if len(path) > r_len:
if path[r_len:r_len + 1] != '/':
continue
cur = plug
cur_len = r_len
if cur:
request.auth_policy = cur
else:
request.auth_policy = self._default
request.registry.notify(PluginPolicySelected(request, request.auth_policy))
return request.auth_policy
def authenticated_userid(self, request):
return self.match(request).authenticated_userid(request)
def unauthenticated_userid(self, request):
return self.match(request).unauthenticated_userid(request)
def effective_principals(self, request):
return self.match(request).effective_principals(request)
def remember(self, request, principal, **kw):
return self.match(request).remember(request, principal, **kw)
def forget(self, request):
return self.match(request).forget(request)
_TOKEN_FILTER_MAP = (
[chr(n) for n in range(32)] +
[chr(127), '\\', '"']
)
_TOKEN_FILTER_MAP = dict.fromkeys(_TOKEN_FILTER_MAP, None)
def _filter_token(tok):
return str(tok).translate(_TOKEN_FILTER_MAP)
def _format_kvpairs(**kwargs):
return ', '.join('{0!s}="{1}"'.format(k, _filter_token(v)) for (k, v) in kwargs.items())
def _generate_nonce(ts, secret, salt=None, chars=string.hexdigits.upper()):
# TODO: Add IP-address to nonce
if not salt:
try:
rng = random.SystemRandom()
except NotImplementedError:
rng = random
salt = ''.join(rng.choice(chars) for i in range(16))
ctx = hashlib.md5(('%s:%s:%s' % (ts, salt, secret)).encode())
return ('%s:%s:%s' % (ts, salt, ctx.hexdigest()))
def _is_valid_nonce(nonce, secret):
comp = nonce.split(':')
if len(comp) != 3:
return False
calc_nonce = _generate_nonce(comp[0], secret, comp[1])
if nonce == calc_nonce:
return True
return False
def _generate_digest_challenge(ts, secret, realm, opaque, stale=False):
nonce = _generate_nonce(ts, secret)
return 'Digest %s' % (_format_kvpairs(
realm=realm,
qop='auth',
nonce=nonce,
opaque=opaque,
algorithm='MD5',
stale='true' if stale else 'false'
),)
def _add_www_authenticate(request, secret, realm):
resp = request.response
if not resp.www_authenticate:
resp.www_authenticate = _generate_digest_challenge(
round(time.time()),
secret, realm, 'NPDIGEST'
)
def _parse_authorization(request, secret, realm):
authz = request.authorization
if (not authz) or (len(authz) != 2) or (authz[0] != 'Digest'):
_add_www_authenticate(request, secret, realm)
return None
params = authz[1]
if 'algorithm' not in params:
params['algorithm'] = 'MD5'
|
for required in ('username', 'realm', 'nonce', 'uri', 'response'
|
, 'cnonce', 'nc', 'opaque'):
if (required not in params) or ((required == 'opaque') and (params['opaque'] != 'NPDIGEST')):
_add_www_authenticate(request, secret, realm)
return None
return params
@implementer(IAuthenticationPolicy)
class DigestAuthenticationPolicy(object):
def __init__(self, secret, callback, realm='Realm'):
self.secret = secret
self.callback = callback
self.realm = realm
def authenticated_userid(self, request):
params = _parse_authorization(request, self.secret, self.realm)
if params is None:
return None
if not _is_valid_nonce(params['nonce'], self.secret):
_add_www_authenticate(request, self.secret, self.realm)
return None
userid = params['username']
if self.callback(params, request) is not None:
return 'u:%s' % userid
_add_www_authenticate(request, self.secret, self.realm)
def unauthenticated_userid(self, request):
params = _parse_authorization(request, self.secret, self.realm)
if params is None:
return None
if not _is_valid_nonce(params['nonce'], self.secret):
_add_www_authenticate(request, self.secret, self.realm)
return None
return 'u:%s' % params['username']
def effective_principals(self, request):
creds = [Everyone]
params = _parse_authorization(request, self.secret, self.realm)
if params is None:
return creds
if not _is_valid_nonce(params['nonce'], self.secret):
_add_www_authenticate(request, self.secret, self.realm)
return creds
groups = self.callback(params, request)
if groups is None:
return creds
creds.append(Authenticated)
creds.append('u:%s' % params['username'])
creds.extend(groups)
return creds
def remember(self, request, principal, *kw):
return []
def forget(self, request):
return [('WWW-Authenticate', _generate_digest_challenge(
round(time.time()),
self.secret,
self.realm,
'NPDIGEST'
))]
|
bdmod/extreme-subversion
|
BinarySourcce/subversion-1.6.17/subversion/tests/cmdline/special_tests.py
|
Python
|
gpl-2.0
| 22,957
| 0.015943
|
#!/usr/bin/env python
#
# special_tests.py: testing special and reserved file handling
#
# Subversion is a tool for revision control.
# See http://subversion.tigris.org for more information.
#
# ====================================================================
# Copyright (c) 2000-2007 CollabNet. All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://subversion.tigris.org/license-1.html.
# If newer versions of this license are posted there, you may use a
# newer version instead, at your option.
#
######################################################################
# General modules
import sys, os, re
# Our testing module
import svntest
from svntest.main import server_has_mergeinfo
# (abbreviation)
Skip = svntest.testcase.Skip
SkipUnless = svntest.testcase.SkipUnless
XFail = svntest.testcase.XFail
Item = svntest.wc.StateItem
######################################################################
# Tests
#
# Each test must return on success or raise on failure.
#----------------------------------------------------------------------
def general_symlink(sbox):
"general symlink handling"
sbox.build()
wc_dir = sbox.wc_dir
# First try to just commit a symlink
newfile_path = os.path.join(wc_dir, 'newfile')
linktarget_path = os.path.join(wc_dir, 'linktarget')
svntest.main.file_append(linktarget_path, 'this is just a link target')
os.symlink('linktarget', newfile_path)
svntest.main.run_svn(None, 'add', newfile_path, linktarget_path)
expected_output = svntest.wc.State(wc_dir, {
'newfile' : Item(verb='Adding'),
'linktarget' : Item(verb='Adding'),
})
# Run a diff and verify that we get the correct output
exit_code, stdout_lines, stderr_lines = svntest.main.run_svn(1, 'diff',
wc_dir)
regex = '^\+link linktarget'
for line in stdout_lines:
if re.match(regex, line):
break
else:
raise svntest.Failure
# Commit and make sure everything is good
expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
expected_status.add({
'newfile' : Item(status=' ', wc_rev=2),
'linktarget' : Item(status=' ', wc_rev=2),
})
svntest.actions.run_and_verify_commit(wc_dir, expected_output,
expected_status, None,
wc_dir)
## Now we should update to the previous version, verify that no
## symlink is present, then update back to HEAD and see if the symlink
## is regenerated properly.
svntest.actions.run_and_verify_svn(None, None, [],
'up', '-r', '1', wc_dir)
# Is the symlink gone?
if os.path.isfile(newfile_path) or os.path.islink(newfile_path):
raise svntest.Failure
svntest.actions.run_and_verify_svn(None, None, [],
'up', '-r', '2', wc_dir)
# Is the symlink back?
new_target = os.readlink(newfile_path)
if new_target != 'linktarget':
raise svntest.Failure
## Now change the target of the symlink, verify that it is shown as
## modified and that a commit succeeds.
os.remove(newfile_path)
os.symlink('A', newfile_path)
was_cwd = os.getcwd()
os.chdir(wc_dir)
svntest.actions.run_and_verify_svn(None, [ "M newfile\n" ], [], 'st')
os.chdir(was_cwd)
expected_output = svntest.wc.State(wc_dir, {
'newfile' : Item(verb='Sending'),
})
expected_status
|
= svntest.actions.get_virginal_state(wc_dir, 2)
expected_status.add({
'newfile' : Item(status=' ', wc_rev=3),
'linktarget' : Item(status=' ', wc_rev=2),
})
svntest.actions.run_and_verify_commit(wc_dir, expected_output,
|
expected_status, None, wc_dir)
def replace_file_with_symlink(sbox):
"replace a normal file with a special file"
sbox.build()
wc_dir = sbox.wc_dir
# First replace a normal file with a symlink and make sure we get an
# error
iota_path = os.path.join(wc_dir, 'iota')
os.remove(iota_path)
os.symlink('A', iota_path)
# Does status show the obstruction?
was_cwd = os.getcwd()
os.chdir(wc_dir)
svntest.actions.run_and_verify_svn(None, [ "~ iota\n" ], [], 'st')
# And does a commit fail?
os.chdir(was_cwd)
exit_code, stdout_lines, stderr_lines = svntest.main.run_svn(1, 'ci', '-m',
'log msg',
wc_dir)
regex = 'svn: Commit failed'
for line in stderr_lines:
if re.match(regex, line):
break
else:
raise svntest.Failure
def import_export_symlink(sbox):
"import and export a symlink"
sbox.build()
wc_dir = sbox.wc_dir
# create a new symlink to import
new_path = os.path.join(wc_dir, 'new_file')
os.symlink('linktarget', new_path)
# import this symlink into the repository
url = sbox.repo_url + "/dirA/dirB/new_link"
exit_code, output, errput = svntest.actions.run_and_verify_svn(
'Import a symlink', None, [], 'import',
'-m', 'log msg', new_path, url)
regex = "(Committed|Imported) revision [0-9]+."
for line in output:
if re.match(regex, line):
break
else:
raise svntest.Failure
# remove the unversioned link
os.remove(new_path)
# run update and verify that the symlink is put back into place
svntest.actions.run_and_verify_svn(None, None, [],
'up', wc_dir)
# Is the symlink back?
link_path = wc_dir + "/dirA/dirB/new_link"
new_target = os.readlink(link_path)
if new_target != 'linktarget':
raise svntest.Failure
## Now we will try exporting from both the working copy and the
## repository directly, verifying that the symlink is created in
## both cases.
for export_src, dest_dir in [(sbox.wc_dir, 'export-wc'),
(sbox.repo_url, 'export-url')]:
export_target = sbox.add_wc_path(dest_dir)
svntest.actions.run_and_verify_svn(None, None, [],
'export', export_src, export_target)
# is the link at the correct place?
link_path = os.path.join(export_target, "dirA/dirB/new_link")
new_target = os.readlink(link_path)
if new_target != 'linktarget':
raise svntest.Failure
#----------------------------------------------------------------------
# Regression test for issue 1986
def copy_tree_with_symlink(sbox):
"'svn cp dir1 dir2' which contains a symlink"
sbox.build()
wc_dir = sbox.wc_dir
# Create a versioned symlink within directory 'A/D/H'.
newfile_path = os.path.join(wc_dir, 'A', 'D', 'H', 'newfile')
linktarget_path = os.path.join(wc_dir, 'A', 'D', 'H', 'linktarget')
svntest.main.file_append(linktarget_path, 'this is just a link target')
os.symlink('linktarget', newfile_path)
svntest.main.run_svn(None, 'add', newfile_path, linktarget_path)
expected_output = svntest.wc.State(wc_dir, {
'A/D/H/newfile' : Item(verb='Adding'),
'A/D/H/linktarget' : Item(verb='Adding'),
})
expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
expected_status.add({
'A/D/H/newfile' : Item(status=' ', wc_rev=2),
'A/D/H/linktarget' : Item(status=' ', wc_rev=2),
})
svntest.actions.run_and_verify_commit(wc_dir, expected_output,
expected_status, None, wc_dir)
# Copy H to H2
H_path = os.path.join(wc_dir, 'A', 'D', 'H')
H2_path = os.path.join(wc_dir, 'A', 'D', 'H2')
svntest.actions.run_and_verify_svn(None, None, [], 'cp', H_path, H2_path)
# 'svn status' should show just "A/D/H2 A +". Nothing broken.
expected_status.add({
'A/D/H2' : Item(status='A ', copied='+', wc_rev='-'),
'A/D/H2/chi' : Item(status=' ', copied='+', wc_rev='-'),
'A/D/H2/omega' : Item(status=' ', copied='+', wc_rev='-'),
'A/D/H2/psi' : Item(status=' ', copied='+', wc_rev='-'),
'A/D/H2/linktarget' : Item(status=' ', copied='+', wc_rev='-'),
'A/D/H2/newfile' : Item(status=' ', copied='+', wc_rev='-'),
})
svn
|
ArtemVavilov88/test2-Se-Python-14
|
test_search_negative.py
|
Python
|
apache-2.0
| 1,089
| 0.003673
|
import unittest
from selenium impo
|
rt webdriver
from selenium.webdriver.common.keys import Keys
class php4dvd_search_film_negative(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
self.driver.implicitly_wait(20)
def test_search_negative(self):
driver = self.driver
drive
|
r.get("http://localhost/php4dvd/")
driver.find_element_by_id("username").clear()
driver.find_element_by_id("username").send_keys("admin")
driver.find_element_by_name("password").clear()
driver.find_element_by_name("password").send_keys("admin")
driver.find_element_by_name("submit").click()
driver.find_element_by_id("q").clear()
driver.find_element_by_id("q").send_keys("An amazing film")
driver.find_element_by_id("q").send_keys(Keys.RETURN)
#checking message about searching
getText_message = driver.find_element_by_class_name("content").text
print getText_message
def tearDown(self):
self.driver.quit()
if __name__=="__main__":
unittest.main()
|
kreczko/l1t_cli
|
l1t_cli/commands/dqm/offline/setup/__init__.py
|
Python
|
apache-2.0
| 2,339
| 0.002138
|
"""
dqm offline setup:
Sets up CMSSW and the latest code to preduce OfflineDQM plots
Usage:
dqm offline setup
"""
import logging
import os
import string
import hepshell
from hepshell.interpreter import time_function
from l1t_cli.setup import WORKSPACE, INTEGRATION_TAG
LOG = logging.getLogger(__name__)
RECIPE = {
'cmssw_version': 'CMSSW_8_1_0_pre15',
'scram_arch': 'slc6_amd64_gcc530',
'l1t_version': INTEGRATION_TAG,
'alias': 'DQMOffline',
}
class Command(hepshell.Command):
def __init__(self, path=__file__, doc=__doc__):
super(Command, self).__init__(path, doc)
@time_function('dqm offline setup', LOG)
def run(self, args, variables):
self.__prepare(args, variables)
self.__version = RECIPE['cmssw_version']
if not os.path.exists(WORKSPACE):
os.mkdir(WORKSPACE)
from l1t_cli.commands.setup.cmssw import Command as CMSSetup
params = {
'version': RECIPE['cmssw_version'],
'scram_arch': RECIPE['scram_arch'],
'alias': RECIPE['alias'],
'parent_folder': WORKSPACE,
'init-git': True,
}
cmssw = CMSSetup()
cmssw.run([], params)
commands = [
#'git remote add cms-l1t-offline https://github.com/cms-l1t-offline/cmssw.git',
#'git fetch cms-l1t-offline',
#'git cms-merge-topic --unsafe cms-l1t-offline:{0}'.format(RECIPE['l1t_version']),
'git cms-addpkg DQMServices/Examples',
'git cms-addpkg DQMServices/Components',
'git cms-addpkg DQMOffline/L1Trigger',
'scram b jobs=2',
]
from l1t_cli.commands.run.within.cmssw import Command as RunCMSSW
cmssw = RunCMSSW()
run_cmssw = cmssw.run
parameters = {'cmssw_path': os.path.join(WORKSPACE, RECIPE['alias'])}
for command in commands:
success = run_cmssw(args=[command], variables=parameters)
if not s
|
uccess: # stop at first error
|
return False
# now activate the working area:
from l1t_cli.commands.update.active_cmssw import Command as UpdateCMSSW
c = UpdateCMSSW()
new_cmssw = os.path.join(WORKSPACE, RECIPE['alias'])
c.run([new_cmssw], {})
return True
|
josiah-wolf-oberholtzer/uqbar
|
tests/fake_package/multi/two.py
|
Python
|
mit
| 130
| 0
|
class PublicClass:
pass
class _PrivateClass:
pass
de
|
f public_function():
|
pass
def _private_function():
pass
|
ypid/series60-remote
|
pc/widget/ContactCanvas.py
|
Python
|
gpl-2.0
| 1,696
| 0.019458
|
# -*- coding: utf-8 -*-
# Copyright (c) 2008 - 2009 Lukas Hetzenecker <LuHe@gmx.at>
from PyQt4.QtCore import *
from PyQt4.QtGui import *
# Matplotlib
try:
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt4 import NavigationToolbar2QT as NavigationToolbar
from matplotlib.figure import
|
Figure
except ImportError:
USE_MATPLOTLIB = False
else:
USE_MATPLOTLIB= True
if USE_MATPLOTLIB:
class ContactCanvas(FigureCanvas):
def __init__(self, parent=None, width = 10, height = 3, dpi = 100, sharex = None, sharey = Non
|
e):
self.fig = Figure(figsize = (width, height), dpi=dpi, facecolor = '#FFFFFF')
self.ax = self.fig.add_subplot(111, sharex = sharex, sharey = sharey)
FigureCanvas.__init__(self, self.fig)
FigureCanvas.setSizePolicy(self,
QSizePolicy.Expanding,
QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
def format_labels(self):
labels_x = self.ax.get_xticklabels()
labels_y = self.ax.get_yticklabels()
for xlabel in labels_x:
xlabel.set_fontsize(8)
for ylabel in labels_y:
ylabel.set_fontsize(8)
ylabel.set_color('b')
else:
class ContactCanvas(QLabel):
def __init__(self, parent=None):
super(ContactCanvas, self).__init__(parent)
self.setText(self.tr("Matplotlib not found - Please install it."))
|
srct/bookshare
|
bookshare/lookouts/admin.py
|
Python
|
gpl-3.0
| 290
| 0
|
# core django imports
fro
|
m django.contrib import admin
# imports from your apps
from .models import Lookout
@admin.register(Lookout)
class LookoutAdmin(admin.ModelAdm
|
in):
list_display = ('id', 'created', 'modified', 'owner', 'isbn')
list_filter = ('created', 'modified', 'owner')
|
ncbray/pystream
|
bin/tests/test_tvl.py
|
Python
|
apache-2.0
| 3,771
| 0.024397
|
# Copyright 2011 Nicholas Bray
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import unittest
import util.tvl as tvl
class TestTVLTruth(unittest.TestCase):
def testTVLTrue(self):
self.assertEqual(tvl.TVLTrue.mustBeTrue(), True)
self.assertEqual(tvl.TVLTrue.maybeTrue(), True)
self.assertEqual(tvl.TVLTrue.maybeFalse(), False)
self.assertEqual(tvl.TVLTrue.mustBeFalse(), False)
self.assertRaises(TypeError, bool, tvl.TVLTrue)
def testTVLFalse(self):
self.assertEqual(tvl.TVLFalse.mustBeTrue(), False)
self.assertEqual(tvl.TVLFalse.maybeTrue(), False)
self.assertEqual(tvl.TVLFalse.maybeFalse(), True)
self.assertEqual(tvl.TVLFalse.mustBeFalse(), True
|
)
self.assertRaises(TypeError, bool, tvl.TVLFalse)
def testTVLMaybe(self):
self.assertEqual(tvl.TVLMaybe.mustBeTrue(), False)
self.assertEqual(tvl.TVLMaybe.maybeTrue(), True)
self.assertEqual(tvl.TVLMaybe.maybeFalse(), True)
self.assertEqual(tvl.TVLMaybe.mustBeFalse(), False)
self.assertRaises(TypeError, bool, tvl.TVLMaybe)
class TestTVLIn
|
vert(unittest.TestCase):
def testTVLTrue(self):
self.assertEqual(~tvl.TVLTrue, tvl.TVLFalse)
def testTVLFalse(self):
self.assertEqual(~tvl.TVLFalse, tvl.TVLTrue)
def testTVLMaybe(self):
self.assertEqual(~tvl.TVLMaybe, tvl.TVLMaybe)
class TestTVLAnd(unittest.TestCase):
def testTVLTrue(self):
self.assertEqual(tvl.TVLTrue&tvl.TVLTrue, tvl.TVLTrue)
self.assertEqual(tvl.TVLTrue&tvl.TVLFalse, tvl.TVLFalse)
self.assertEqual(tvl.TVLTrue&tvl.TVLMaybe, tvl.TVLMaybe)
def testTVLFalse(self):
self.assertEqual(tvl.TVLFalse&tvl.TVLTrue, tvl.TVLFalse)
self.assertEqual(tvl.TVLFalse&tvl.TVLFalse, tvl.TVLFalse)
self.assertEqual(tvl.TVLFalse&tvl.TVLMaybe, tvl.TVLFalse)
def testTVLMaybe(self):
self.assertEqual(tvl.TVLMaybe&tvl.TVLTrue, tvl.TVLMaybe)
self.assertEqual(tvl.TVLMaybe&tvl.TVLFalse, tvl.TVLFalse)
self.assertEqual(tvl.TVLMaybe&tvl.TVLMaybe, tvl.TVLMaybe)
class TestTVLOr(unittest.TestCase):
def testTVLTrue(self):
self.assertEqual(tvl.TVLTrue|tvl.TVLTrue, tvl.TVLTrue)
self.assertEqual(tvl.TVLTrue|tvl.TVLFalse, tvl.TVLTrue)
self.assertEqual(tvl.TVLTrue|tvl.TVLMaybe, tvl.TVLTrue)
def testTVLFalse(self):
self.assertEqual(tvl.TVLFalse|tvl.TVLTrue, tvl.TVLTrue)
self.assertEqual(tvl.TVLFalse|tvl.TVLFalse, tvl.TVLFalse)
self.assertEqual(tvl.TVLFalse|tvl.TVLMaybe, tvl.TVLMaybe)
def testTVLMaybe(self):
self.assertEqual(tvl.TVLMaybe|tvl.TVLTrue, tvl.TVLTrue)
self.assertEqual(tvl.TVLMaybe|tvl.TVLFalse, tvl.TVLMaybe)
self.assertEqual(tvl.TVLMaybe|tvl.TVLMaybe, tvl.TVLMaybe)
class TestTVLXor(unittest.TestCase):
def testTVLTrue(self):
self.assertEqual(tvl.TVLTrue^tvl.TVLTrue, tvl.TVLFalse)
self.assertEqual(tvl.TVLTrue^tvl.TVLFalse, tvl.TVLTrue)
self.assertEqual(tvl.TVLTrue^tvl.TVLMaybe, tvl.TVLMaybe)
def testTVLFalse(self):
self.assertEqual(tvl.TVLFalse^tvl.TVLTrue, tvl.TVLTrue)
self.assertEqual(tvl.TVLFalse^tvl.TVLFalse, tvl.TVLFalse)
self.assertEqual(tvl.TVLFalse^tvl.TVLMaybe, tvl.TVLMaybe)
def testTVLMaybe(self):
self.assertEqual(tvl.TVLMaybe^tvl.TVLTrue, tvl.TVLMaybe)
self.assertEqual(tvl.TVLMaybe^tvl.TVLFalse, tvl.TVLMaybe)
self.assertEqual(tvl.TVLMaybe^tvl.TVLMaybe, tvl.TVLMaybe)
|
renanalencar/hermes
|
agency/migrations/0014_auto_20150726_1411.py
|
Python
|
mit
| 468
| 0.002137
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('agency', '0013_auto_20150726_0001'),
]
operations = [
migrations.AlterField(
|
model_name='feedinfo',
name='feed_publisher_name',
field=models.CharField(max_length=5
|
0, verbose_name='Name', choices=[(b'EPTTC', 'EPTTC')]),
),
]
|
opennode/nodeconductor
|
waldur_core/core/migrations/0002_remove_organization.py
|
Python
|
mit
| 398
| 0
|
# -*- coding: utf-8 -*-
# Generated
|
by Django 1.11.7 on 2018-04-10 07:17
from __future__ import unicode_literals
from django.db import migrations
class Migra
|
tion(migrations.Migration):
dependencies = [
('core', '0001_squashed_0008'),
]
operations = [
migrations.RemoveField(
model_name='user',
name='organization_approved',
),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.