repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
ekivemark/my_device
|
bbp/bbp/member/models.py
|
Python
|
apache-2.0
| 548
| 0.00365
|
__author__ = 'mark'
"""
User Profile Extension based on One-to-One fields code in Djan
|
go Docs here:
https://docs.djangoproject.com/en/1.7/topics/auth/customizing/
"""
from django.db import models
from django.contrib.auth.models import User
from uuid import uuid4
class Member(models.Model):
user = models.OneToOneField(User)
member_guid = models.CharField(max_length=100, null=True, blank=True)
ext_uid
|
= models.CharField(max_length=100, null=True, blank=True)
user_token = models.CharField(max_length=100, null=True, blank=True)
|
DedMemez/ODS-August-2017
|
pets/PetBase.py
|
Python
|
apache-2.0
| 778
| 0.006427
|
# Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: toontown.pets.PetBase
from toontown.pets.PetConstants import AnimMoods
from
|
toontown.pets import PetMood
class PetBase:
def getSetterName(self, valueName, prefix = 'set'):
return '%s%s%s' % (prefix, valueName[0].upper(), valueName[1:])
def getAnimMood(self):
if self.mood.getDominantMood() in PetMood.PetMood.ExcitedMoods:
return AnimMoods.EXCITED
elif self.mood.getDominantMood() in PetMood.PetMood.UnhappyMoods:
return AnimMoods.SAD
else:
return AnimMoods.NEUTRAL
def isExcited(
|
self):
return self.getAnimMood() == AnimMoods.EXCITED
def isSad(self):
return self.getAnimMood() == AnimMoods.SAD
|
TheMutley/openpilot
|
selfdrive/car/honda/interface.py
|
Python
|
mit
| 23,054
| 0.013794
|
#!/usr/bin/env python
import os
import numpy as np
from cereal import car
from common.numpy_fast import clip, interp
from common.realtime import sec_since_boot
from selfdrive.swaglog import cloudlog
from selfdrive.config import Conversions as CV
from selfdrive.controls.lib.drive_helpers import create_event, EventTypes as ET, get_events
from selfdrive.controls.lib.vehicle_model import VehicleModel
from selfdrive.car.honda.carstate import CarState, get_can_parser
from selfdrive.car.honda.values import CruiseButtons, CM, BP, AH, CAR, HONDA_BOSCH
from selfdrive.controls.lib.planner import A_ACC_MAX
try:
from selfdrive.car.honda.carcontroller import CarController
except ImportError:
CarController = None
# msgs sent for steering controller by camera module on can 0.
# those messages are mutually exclusive on CRV and non-CRV cars
CAMERA_MSGS = [0xe4, 0x194]
def compute_gb_honda(accel, speed):
creep_brake = 0.0
creep_speed = 2.3
creep_brake_value = 0.15
if speed < creep_speed:
creep_brake = (creep_speed - speed) / creep_speed * creep_brake_value
return float(accel) / 4.8 - creep_brake
def get_compute_gb_acura():
# generate a function that takes in [desired_accel, current_speed] -> [-1.0, 1.0]
# where -1.0 is max brake and 1.0 is max gas
# see debug/dump_accel_from_fiber.py to see how those parameters were generated
w0 = np.array([[ 1.22056961, -0.39625418, 0.67952657],
[ 1.03691769, 0.78210306, -0.41343188]])
b0 = np.array([ 0.01536703, -0.14335321, -0.26932889])
w2 = np.array([[-0.59124422, 0.42899439, 0.38660881],
[ 0.79973811, 0.13178682, 0.08550351],
[-0.15651935, -0.44360259, 0.76910877]])
b2 = np.array([ 0.15624429, 0.02294923, -0.0341086 ])
w4 = np.array([[-0.31521443],
[-0.38626176],
[ 0.52667892]])
b4 = np.array([-0.02922216])
def compute_output(dat, w0, b0, w2, b2, w4, b4):
m0 = np.dot(dat, w0) + b0
m0 = leakyrelu(m0, 0.1)
m2 = np.dot(m0, w2) + b2
m2 = leakyrelu(m2, 0.1)
m4 = np.dot(m2, w4) + b4
return m4
def leakyrelu(x, alpha):
return np.maximum(x, alpha * x)
def _compute_gb_acura(accel, speed):
# linearly extrap below v1 using v1 and v2 data
v1 = 5.
v2 = 10.
dat = np.array([accel, speed])
if speed > 5.:
m4 = compute_output(dat, w0, b0, w2, b2, w4, b4)
else:
dat[1] = v1
m4v1 = compute_output(dat, w0, b0, w2, b2, w4, b4)
dat[1] = v2
m4v2 = compute_output(dat, w0, b0, w2, b2, w4, b4)
m4 = (speed - v1) * (m4v2 - m4v1) / (v2 - v1) + m4v1
return float(m4)
return _compute_gb_acura
class CarInterface(object):
def __init__(self, CP, sendcan=None):
self.CP = CP
self.frame = 0
self.last_enable_pressed = 0
self.last_enable_sent = 0
self.gas_pressed_prev = False
self.brake_pressed_prev = False
self.can_invalid_count = 0
self.cp = get_can_parser(CP)
# *** init the major players ***
self.CS = CarState(CP)
self.VM = VehicleModel(CP)
# sending if read only is False
if sendcan is not None:
self.sendcan = sendcan
self.CC = CarController(self.cp.dbc_name, CP.enableCamera)
if self.CS.CP.carFingerprint == CAR.ACURA_ILX:
self.compute_gb = get_compute_gb_acura()
else:
self.compute_gb = compute_gb_honda
@staticmethod
def calc_accel_override(a_ego, a_target, v_ego, v_target):
# limit the pcm accel cmd if:
# - v_ego exceeds v_target, or
# - a_ego exceeds a_target and v_ego is close to v_target
eA = a_ego - a_target
valuesA = [1.0, 0.1]
bpA = [0.3, 1.1]
eV = v_ego - v_target
valuesV = [1.0, 0.1]
bpV = [0.0, 0.5]
valuesRangeV = [1., 0.]
bpRangeV = [-1., 0.]
# only limit if v_ego is close to v_target
speedLimiter = interp(eV, bpV, valuesV)
accelLimiter = max(interp(eA, bpA, valuesA), interp(eV, bpRangeV, valuesRangeV))
# accelOverride is more or less the max throttle allowed to pcm: usually set to a constant
# unless aTargetMax is very high and then we scale with it; this help in quicker restart
return float(max(0.714, a_target / A_ACC_MAX))
|
* min(speedLimiter, accelLimiter)
@staticmethod
def get_params(candidate, fingerprint):
ret = car.CarParams.new_message()
ret.carName = "honda"
ret.carFingerprint = candidate
if candidate in HONDA_BOSCH:
ret.safetyModel = car.CarParams.SafetyModels.hondaBosch
ret.enableCamera = True
ret.radarOffCan = True
else:
ret.safetyModel = car.CarParams.SafetyModels.honda
ret.enableCamera = not any(x for x in CAMERA_MSGS
|
if x in fingerprint)
ret.enableGasInterceptor = 0x201 in fingerprint
cloudlog.warn("ECU Camera Simulated: %r", ret.enableCamera)
cloudlog.warn("ECU Gas Interceptor: %r", ret.enableGasInterceptor)
ret.enableCruise = not ret.enableGasInterceptor
# kg of standard extra cargo to count for drive, gas, etc...
std_cargo = 136
# FIXME: hardcoding honda civic 2016 touring params so they can be used to
# scale unknown params for other cars
mass_civic = 2923 * CV.LB_TO_KG + std_cargo
wheelbase_civic = 2.70
centerToFront_civic = wheelbase_civic * 0.4
centerToRear_civic = wheelbase_civic - centerToFront_civic
rotationalInertia_civic = 2500
tireStiffnessFront_civic = 192150
tireStiffnessRear_civic = 202500
# Optimized car params: tire_stiffness_factor and steerRatio are a result of a vehicle
# model optimization process. Certain Hondas have an extra steering sensor at the bottom
# of the steering rack, which improves controls quality as it removes the steering column
# torsion from feedback.
# Tire stiffness factor fictitiously lower if it includes the steering column torsion effect.
# For modeling details, see p.198-200 in "The Science of Vehicle Dynamics (2014), M. Guiggiani"
ret.steerKiBP, ret.steerKpBP = [[0.], [0.]]
ret.steerKf = 0.00006 # conservative feed-forward
if candidate == CAR.CIVIC:
stop_and_go = True
ret.mass = mass_civic
ret.wheelbase = wheelbase_civic
ret.centerToFront = centerToFront_civic
ret.steerRatio = 14.63 # 10.93 is end-to-end spec
tire_stiffness_factor = 1.
# Civic at comma has modified steering FW, so different tuning for the Neo in that car
is_fw_modified = os.getenv("DONGLE_ID") in ['99c94dc769b5d96e']
ret.steerKpV, ret.steerKiV = [[0.33], [0.10]] if is_fw_modified else [[0.8], [0.24]]
if is_fw_modified:
ret.steerKf = 0.00003
ret.longitudinalKpBP = [0., 5., 35.]
ret.longitudinalKpV = [3.6, 2.4, 1.5]
ret.longitudinalKiBP = [0., 35.]
ret.longitudinalKiV = [0.54, 0.36]
elif candidate == CAR.CIVIC_HATCH:
stop_and_go = True
ret.mass = 2916. * CV.LB_TO_KG + std_cargo
ret.wheelbase = wheelbase_civic
ret.centerToFront = centerToFront_civic
ret.steerRatio = 14.63 # 10.93 is spec end-to-end
tire_stiffness_factor = 1.
ret.steerKpV, ret.steerKiV = [[0.8], [0.24]]
ret.longitudinalKpBP = [0., 5., 35.]
ret.longitudinalKpV = [1.2, 0.8, 0.5]
ret.longitudinalKiBP = [0., 35.]
ret.longitudinalKiV = [0.18, 0.12]
elif candidate == CAR.ACCORD:
stop_and_go = True
ret.safetyParam = 1 # Accord and CRV 5G use an alternate user brake msg
ret.mass = 3279. * CV.LB_TO_KG + std_cargo
ret.wheelbase = 2.83
ret.centerToFront = ret.wheelbase * 0.39
ret.steerRatio = 15.96 # 11.82 is spec end-to-end
tire_stiffness_factor = 0.8467
ret.steerKpV, ret.steerKiV = [[0.6], [0.18]]
ret.longitudinalKpBP = [0., 5., 35.]
ret.longitudinalKpV = [1.2, 0.8, 0.5]
ret.longitudinalKiBP = [0., 35.]
ret.longitudinalKiV = [0.18, 0.12]
elif candidate == CAR.ACURA_ILX:
stop_and_go = False
ret.mass = 3095 * CV.LB_TO_KG + std_cargo
ret.wheelbase = 2.67
ret.centerToFront = ret.wheelbase * 0.37
ret.steerRatio = 18.61 # 15.3 is spec end-to-end
tire_stiffness_factor = 0.72
# Acura at comma has modified steeri
|
TrevorLowing/PyGames
|
pysollib/games/sanibel.py
|
Python
|
gpl-2.0
| 2,437
| 0.007386
|
#!/usr/bin/env python
# -*- mode: python; coding: utf-8; -*-
##---------------------------------------------------------------------------##
##
## Copyright (C) 1998-2003 Markus Franz Xaver Johanne
|
s Oberhumer
## Copyright (C) 2003 Mt. Hood Playing Card Co.
## Copyright (C) 2005-2009 Skomoroh
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as p
|
ublished by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
##---------------------------------------------------------------------------##
__all__ = []
# imports
import sys
# PySol imports
from pysollib.gamedb import registerGame, GameInfo, GI
from pysollib.util import *
from pysollib.stack import *
from pysollib.game import Game
from pysollib.layout import Layout
from pysollib.hint import AbstractHint, DefaultHint, CautiousDefaultHint, Yukon_Hint
from pysollib.games.gypsy import Gypsy
# ************************************************************************
# * Sanibel
# * play similar to Yukon
# ************************************************************************
class Sanibel(Gypsy):
Layout_Method = Layout.klondikeLayout
Talon_Class = StackWrapper(WasteTalonStack, max_rounds=1)
Foundation_Class = StackWrapper(SS_FoundationStack, max_move=0)
RowStack_Class = Yukon_AC_RowStack
Hint_Class = Yukon_Hint
def createGame(self):
Gypsy.createGame(self, rows=10, waste=1, playcards=23)
def startGame(self):
for i in range(3):
self.s.talon.dealRow(flip=0, frames=0)
for i in range(6):
self.s.talon.dealRow(frames=0)
self.startDealSample()
self.s.talon.dealRow()
self.s.talon.dealCards() # deal first card to WasteStack
def getHighlightPilesStacks(self):
return ()
registerGame(GameInfo(201, Sanibel, "Sanibel",
GI.GT_YUKON | GI.GT_CONTRIB | GI.GT_ORIGINAL, 2, 0, GI.SL_MOSTLY_SKILL))
|
coreyabshire/marv
|
bin/experiments/key_test.py
|
Python
|
mit
| 472
| 0.002119
|
import tty
import sys
import termios
fd = sys.stdin.fileno()
fdattrorig = termios.tcgetattr(fd)
try:
tty.setraw(fd)
done = False
while not done:
ch = sys.stdin.read(1)
sys.stdout.write('test: %s\r\n' % ord(ch))
if
|
ord(ch) == 27:
ch = sys.stdin.read(1)
sys.stdout.write('esc: %s\r\n' % ord(ch))
if ord(ch) == 3:
done = True
finally:
termios.tcsetattr(fd, te
|
rmios.TCSADRAIN, fdattrorig)
|
statbio/Sargasso
|
sargasso/filter/hits_checker.py
|
Python
|
mit
| 8,017
| 0.000624
|
from collections import namedtuple
class HitsChecker:
REJECTED = -1
AMBIGUOUS = -2
CIGAR_GOOD = 0
CIGAR_LESS_GOOD = 1
CIGAR_FAIL = 2
CIGAR_OP_MATCH = 0 # From pysam
CIGAR_OP_REF_INSERTION = 1 # From pysam
CIGAR_OP_REF_DELETION = 2 # From pysam
CIGAR_OP_REF_SKIP = 3 # From pysam
ThresholdData = namedtuple(
'ThresholdData',
['index', 'violated', 'multimaps', 'mismatches', 'cigar_check'])
def __init__(self, mismatch_thresh, minmatch_thresh, multimap_thresh,
reject_multimaps, logger):
self.logger = logger
self.mismatch_thresh = mismatch_thresh / 100.0
self.minmatch_thresh = minmatch_thresh / 100.0
self.multimap_thresh = multimap_thresh
self._assign_hits = self._assign_hits_reject_multimaps \
if reject_multimaps else self._assign_hits_standard
logger.debug(("PARAMS: mismatch - {mism}, minmatch - {minm}, " +
"multimap - {mult}").format(
mism=self.mismatch_thresh,
minm=self.minmatch_thresh,
mult=self.multimap_thresh))
def compare_and_write_hits(self, hits_managers):
# Compare the hits for a particular read in each species and decide whether
# the read can be assigned to one species or another, or if it must be
# rejected as ambiguous
for m in hits_managers:
m.update_hits_info()
threshold_data = [self._check_thresholds(i, m) for i, m
in enumerate(hits_managers)]
if __debug__:
for t in threshold_data:
self.logger.debug(t)
assignee = self._assign_hits(threshold_data)
if assignee == self.REJECTED:
for hits_manager in hits_managers:
hits_manager.add_rejected_hits_to_stats()
elif assignee == self.AMBIGUOUS:
for hits_manager in hits_managers:
hits_manager.add_ambiguous_hits_to_stats()
else:
for i, hits_manager in enumerate(hits_managers):
if i == assignee:
hits_manager.add_accepted_hits_to_stats()
hits_manager.write_hits()
# self.check_and_write_hits_for_read(hits_manager)
else:
hits_manager.add_rejected_hits_to_stats()
for hits_manager in hits_managers:
hits_manager.clear_hits()
def check_and_write_hits_for_read(self, hits_manager):
if hits_manager.hits_info is None:
hits_manager.update_hits_info()
if self.check_hits(hits_manager.hits_info):
hits_manager.add_accepted_hits_to_stats()
hits_manager.write_hits()
else:
hits_manager.add_rejected_hits_to_stats()
hits_manager.clear_hits()
def check_and_write_hits_for_remaining_reads(self, hits_manager):
try:
while True:
if hits_manager.hits_for_read is None:
hits_manager.get_next_read_hits()
self.check_and_write_hits_for_read(hits_manager)
except StopIteration:
pass
def check_hits(self, hits_info):
# check that the hits for a read are - in themselves - satisfactory to
# be assigned to a species.
violated = False
if hits_info.get_multimaps() > self.multimap_thresh:
violated = True
if __debug__:
self.logger.debug(
'only one competing hits manager but violated multimap.')
if hits_info.get_primary_mismatches() > \
round(self.mismatch_thresh * hits_info.get_total_length()):
violated = True
if __debug__:
self.logger.debug(
'only one competing hits manager but violated primary mismatches.')
if self._check_cigars(hits_info) == self.CIGAR_FAIL:
violated = True
if __debug__:
self.logger.debug(
'only one competing hits manager but violated primary CIGAR.')
if __debug__:
if not violated:
self.logger.debug('assigned due to only one competing filterer!')
return not violated
def _assign_hits_standard(self, threshold_data):
threshold_data = [t for t in threshold_data if not t.violated]
num_hits_managers = len(threshold_data)
if num_hits_managers == 0:
return self.REJECTED
elif num_hits_managers == 1:
if __debug__:
self.logger.debug('assigned due to only one filter left after checking threshold!')
return threshold_data[0].index
min_mismatches = min([m.mismatches for m in threshold_data])
threshold_data = [t for t in threshold_data
|
if t.mismatches == min_mismatches]
if len(threshold_data) == 1:
if __debug__:
self.logger.debug('assigne due to primary hit min_mismatches!')
return threshold_data[0].index
min_cigar_check = min([m.cigar_check for m in threshold_data])
threshold_data = [t for t in threshold_data
if t.cigar_check == min_cigar_check]
if len(threshold_d
|
ata) == 1:
if __debug__:
self.logger.debug('assigne due to primart hit CIGAR!')
return threshold_data[0].index
min_multimaps = min([m.multimaps for m in threshold_data])
threshold_data = [t for t in threshold_data
if t.multimaps == min_multimaps]
if len(threshold_data) == 1:
# # todo remove debug multimap
if __debug__:
self.logger.debug('assigned due to number of multimap!')
return threshold_data[0].index
if __debug__:
self.logger.debug('assigned due to Ambigous!')
return self.AMBIGUOUS
def _assign_hits_reject_multimaps(self, threshold_data):
if len([t for t in threshold_data if t.multimaps > 1]) > 0:
return self.REJECTED
return self._assign_hits_standard(threshold_data)
def _check_thresholds(self, index, hits_manager):
hits_info = hits_manager.hits_info
violated = False
multimaps = hits_info.get_multimaps()
if multimaps > self.multimap_thresh:
# # todo remove debug multimap
if __debug__:
self.logger.debug('violated due to multimap!')
violated = True
mismatches = hits_info.get_primary_mismatches()
if mismatches > round(self.mismatch_thresh *
hits_info.get_total_length()):
if __debug__:
self.logger.debug('violated due to primary mismatches!')
violated = True
cigar_check = self._check_cigars(hits_info)
if cigar_check == self.CIGAR_FAIL:
if __debug__:
self.logger.debug('violated due to primary CIGAR!')
violated = True
return self.ThresholdData(
index, violated, multimaps, mismatches, cigar_check)
def _check_cigars(self, hits_info):
total_length = hits_info.get_total_length()
min_match = total_length - round(self.minmatch_thresh * total_length)
cigars = hits_info.get_primary_cigars()
response = self.CIGAR_GOOD
num_matches = 0
for cigar in cigars:
for operation, length in cigar:
if operation == self.CIGAR_OP_MATCH:
num_matches += length
elif operation == self.CIGAR_OP_REF_INSERTION or \
operation == self.CIGAR_OP_REF_DELETION:
response = self.CIGAR_LESS_GOOD
if num_matches < min_match:
return self.CIGAR_FAIL
elif num_matches < total_length:
return self.CIGAR_LESS_GOOD
return response
|
GNOME/gnome-session
|
meson_post_install.py
|
Python
|
gpl-2.0
| 789
| 0.007605
|
#!/usr/bin/env python3
import os
import shutil
import subprocess
import sys
if os.environ.get('DESTDIR'):
install_root = os.environ.get('DESTDIR') + os.path.abspath(sys.argv[1])
else:
install_root = sys.argv[1]
if not os.environ.get('DESTDIR'):
schemadir = os.path.join(install_root, '
|
glib-2.0', 'schemas')
print('Compile gsettings schemas...')
subprocess.call(['glib-compil
|
e-schemas', schemadir])
# FIXME: Meson is unable to copy a generated target file:
# https://groups.google.com/forum/#!topic/mesonbuild/3iIoYPrN4P0
dst_dir = os.path.join(install_root, 'wayland-sessions')
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
src = os.path.join(install_root, 'xsessions', 'gnome.desktop')
dst = os.path.join(dst_dir, 'gnome.desktop')
shutil.copyfile(src, dst)
|
thomaslima/PySpice
|
PySpice/Tools/File.py
|
Python
|
gpl-3.0
| 7,812
| 0.005248
|
####################################################################################################
#
# PySpice - A Spice Package for Python
# Copyright (C) 2014 Fabrice Salvaire
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
####################################################################################################
####################################################################################################
import os
import subprocess
####################################################################################################
def file_name_has_extension(file_name, extension):
return file_name.endswith(extension)
####################################################################################################
def file_extension(filename):
# index = filename.rfind(os.path.extsep)
# if index == -1:
# return None
# else:
# return filename[index:]
return os.path.splitext(filename)[1]
####################################################################################################
def run_shasum(filename, algorithm=1, text=False, binary=False, portable=False):
if algorithm not in (1, 224, 256, 384, 512, 512224, 512256):
raise ValueError
args = ['shasum', '--algorithm=' + str(algorithm)]
if text:
args.append('--text')
elif binary:
args.append('--binary')
elif portable:
args.append('--portable')
args.append(filename)
output = subprocess.check_output(args)
shasum = output[:output.find(' ')]
return shasum
####################################################################################################
class Path:
##############################################
def __init__(self, path):
self._path = str(path)
##############################################
def __bool__(self):
return os.path.exists(self._path)
##############################################
def __str__(self):
return self._path
##############################################
@property
def path(self):
return self._path
##############################################
def is_absolut(self):
return os.path.isabs(self._path)
##############################################
def absolut(self):
return self.clone_for_path(os.path.abspath(self._path))
##############################################
def normalise(self):
return self.clone_for_path(os.path.normpath(self._path))
##############################################
def normalise_case(self):
return self.clone_for_path(os.path.normcase(self._path))
##############################################
def expand_vars_and_user(self):
return self.clone_for_path(os.path.expandvars(os.path.expanduser(self._path)))
##############################################
def real_path(self):
return self.clone_for_path(os.path.realpath(self._path))
##############################################
def relative_to(self, directory):
return self.clone_for_path(os.path.relpath(self._path, str(directory)))
##############################################
def clone_for_path(self, path):
return self.__class__(path)
##############################################
def split(self):
return self._path.split(os.path.sep)
##############################################
def directory_part(self):
return Directory(os.path.dirname(self._path))
##############################################
def filename_part(self):
return os.path.basename(self._path)
##############################################
def is_directory(self):
return os.path.isdir(self._path)
##############################################
def is_file(self):
return os.path.isfile(self._path)
##############################################
@property
def inode(self):
return os.stat(self._path).st_ino
##############################################
@property
def creation_time(self):
return os.stat(self._path).st_ctime
####################################################################################################
class Directory(Path):
##############################################
def __bool__(self):
return super().__nonzero__() and self.is_directory()
##############################################
def join_directory(self, directory):
return self.__class__(os.path.join(self._path, str(directory)))
##############################################
def join_fil
|
ename(self, filename):
return File(filename, self._path)
#########################################
|
#####
def iter_file(self, followlinks=False):
for root, directories, files in os.walk(self._path, followlinks=followlinks):
for filename in files:
yield File(filename, root)
##############################################
def iter_directories(self, followlinks=False):
for root, directories, files in os.walk(self._path, followlinks=followlinks):
for directory in directories:
yield Path(os.path.join(root, directory))
####################################################################################################
class File(Path):
default_shasum_algorithm = 256
##############################################
def __init__(self, filename, path=''):
super().__init__(os.path.join(str(path), str(filename)))
self._filename = self.filename_part()
if not self._filename:
raise ValueError
self._directory = self.directory_part()
self._shasum = None # lazy computation
##############################################
def __bool__(self):
return super().__nonzero__() and os.path.isfile(self._path)
##############################################
@property
def directory(self):
return self._directory
##############################################
@property
def filename(self):
return self._filename
##############################################
@property
def extension(self):
return file_extension(self._filename)
##############################################
@property
def shasum(self):
if self._shasum is None:
return self.compute_shasum()
else:
return self._shasum
##############################################
def compute_shasum(self, algorithm=None):
if algorithm is None:
algorithm = self.default_shasum_algorithm
self._shasum = run_shasum(self._path, algorithm, portable=True)
return self._shasum
####################################################################################################
#
# End
#
####################################################################################################
|
yephper/django
|
django/conf/locale/__init__.py
|
Python
|
bsd-3-clause
| 12,721
| 0.000161
|
# -*- encoding: utf-8 -*-
from __future__ import unicode_literals
"""
LANG_INFO is a dictionary structure to provide meta information about languages.
About name_local: capitalize it as if your language name was appearing
inside a sentence in your language.
The 'fallback' key can be used to specify a special fallback logic which doesn't
follow the traditional 'fr-ca' -> 'fr' fallback logic.
"""
LANG_INFO = {
'af': {
'bidi': False,
'code': 'af',
'name': 'Afrikaans',
'name_local': 'Afrikaans',
},
'ar': {
'bidi': True,
'code': 'ar',
'name': 'Arabic',
'name_local': 'العربيّة',
},
'ast': {
'bidi': False,
'code': 'ast',
'name': 'Asturian',
'name_local': 'asturianu',
},
'az': {
'bidi': True,
'code': 'az',
'name': 'Azerbaijani',
'name_local': 'Azərbaycanca',
},
'be': {
'bidi': False,
'code': 'be',
'name': 'Belarusian',
'name_local': 'беларуская',
},
'bg': {
'bidi': False,
'code': 'bg',
'name': 'Bulgarian',
'name_local': 'български',
},
'bn': {
'bidi': False,
'code': 'bn',
'name': 'Bengali',
'name_local': 'বাংলা',
},
'br': {
'bidi': False,
'code': 'br',
'name': 'Breton',
'name_local': 'brezhoneg',
},
'bs': {
'bidi': False,
'code': 'bs',
'name': 'Bosnian',
'name_local': 'bosanski',
},
'ca': {
'bidi': False,
'code': 'ca',
'name': 'Catalan',
'name_local': 'català',
},
'cs': {
'bidi': False,
'code': 'cs',
'name': 'Czech',
'name_local': 'česky',
},
'cy': {
'bidi': False,
'code': 'cy',
'name': 'Welsh',
'name_local': 'Cym
|
raeg',
},
'da': {
'bidi': False,
'code': 'da',
'name': 'Danish',
'name_local': 'dansk',
},
'de': {
'bidi': False,
'code': 'de',
'name': 'German',
'name_local': 'Deutsch',
},
'el': {
'bidi': False,
'code': 'el',
'name': 'Greek',
'name_local': 'Ελληνικά',
},
'en': {
'bidi': False,
'co
|
de': 'en',
'name': 'English',
'name_local': 'English',
},
'en-au': {
'bidi': False,
'code': 'en-au',
'name': 'Australian English',
'name_local': 'Australian English',
},
'en-gb': {
'bidi': False,
'code': 'en-gb',
'name': 'British English',
'name_local': 'British English',
},
'eo': {
'bidi': False,
'code': 'eo',
'name': 'Esperanto',
'name_local': 'Esperanto',
},
'es': {
'bidi': False,
'code': 'es',
'name': 'Spanish',
'name_local': 'español',
},
'es-ar': {
'bidi': False,
'code': 'es-ar',
'name': 'Argentinian Spanish',
'name_local': 'español de Argentina',
},
'es-co': {
'bidi': False,
'code': 'es-co',
'name': 'Colombian Spanish',
'name_local': 'español de Colombia',
},
'es-mx': {
'bidi': False,
'code': 'es-mx',
'name': 'Mexican Spanish',
'name_local': 'español de Mexico',
},
'es-ni': {
'bidi': False,
'code': 'es-ni',
'name': 'Nicaraguan Spanish',
'name_local': 'español de Nicaragua',
},
'es-ve': {
'bidi': False,
'code': 'es-ve',
'name': 'Venezuelan Spanish',
'name_local': 'español de Venezuela',
},
'et': {
'bidi': False,
'code': 'et',
'name': 'Estonian',
'name_local': 'eesti',
},
'eu': {
'bidi': False,
'code': 'eu',
'name': 'Basque',
'name_local': 'Basque',
},
'fa': {
'bidi': True,
'code': 'fa',
'name': 'Persian',
'name_local': 'فارسی',
},
'fi': {
'bidi': False,
'code': 'fi',
'name': 'Finnish',
'name_local': 'suomi',
},
'fr': {
'bidi': False,
'code': 'fr',
'name': 'French',
'name_local': 'français',
},
'fy': {
'bidi': False,
'code': 'fy',
'name': 'Frisian',
'name_local': 'frysk',
},
'ga': {
'bidi': False,
'code': 'ga',
'name': 'Irish',
'name_local': 'Gaeilge',
},
'gd': {
'bidi': False,
'code': 'gd',
'name': 'Scottish Gaelic',
'name_local': 'Gàidhlig',
},
'gl': {
'bidi': False,
'code': 'gl',
'name': 'Galician',
'name_local': 'galego',
},
'he': {
'bidi': True,
'code': 'he',
'name': 'Hebrew',
'name_local': 'עברית',
},
'hi': {
'bidi': False,
'code': 'hi',
'name': 'Hindi',
'name_local': 'Hindi',
},
'hr': {
'bidi': False,
'code': 'hr',
'name': 'Croatian',
'name_local': 'Hrvatski',
},
'hu': {
'bidi': False,
'code': 'hu',
'name': 'Hungarian',
'name_local': 'Magyar',
},
'ia': {
'bidi': False,
'code': 'ia',
'name': 'Interlingua',
'name_local': 'Interlingua',
},
'io': {
'bidi': False,
'code': 'io',
'name': 'Ido',
'name_local': 'ido',
},
'id': {
'bidi': False,
'code': 'id',
'name': 'Indonesian',
'name_local': 'Bahasa Indonesia',
},
'is': {
'bidi': False,
'code': 'is',
'name': 'Icelandic',
'name_local': 'Íslenska',
},
'it': {
'bidi': False,
'code': 'it',
'name': 'Italian',
'name_local': 'italiano',
},
'ja': {
'bidi': False,
'code': 'ja',
'name': 'Japanese',
'name_local': '日本語',
},
'ka': {
'bidi': False,
'code': 'ka',
'name': 'Georgian',
'name_local': 'ქართული',
},
'kk': {
'bidi': False,
'code': 'kk',
'name': 'Kazakh',
'name_local': 'Қазақ',
},
'km': {
'bidi': False,
'code': 'km',
'name': 'Khmer',
'name_local': 'Khmer',
},
'kn': {
'bidi': False,
'code': 'kn',
'name': 'Kannada',
'name_local': 'Kannada',
},
'ko': {
'bidi': False,
'code': 'ko',
'name': 'Korean',
'name_local': '한국어',
},
'lb': {
'bidi': False,
'code': 'lb',
'name': 'Luxembourgish',
'name_local': 'Lëtzebuergesch',
},
'lt': {
'bidi': False,
'code': 'lt',
'name': 'Lithuanian',
'name_local': 'Lietuviškai',
},
'lv': {
'bidi': False,
'code': 'lv',
'name': 'Latvian',
'name_local': 'latviešu',
},
'mk': {
'bidi': False,
'code': 'mk',
'name': 'Macedonian',
'name_local': 'Македонски',
},
'ml': {
'bidi': False,
'code': 'ml',
'name': 'Malayalam',
'name_local': 'Malayalam',
},
'mn': {
'bidi': False,
'code': 'mn',
'name': 'Mongolian',
'name_local': 'Mongolian',
},
'mr': {
'bidi': False,
'code': 'mr',
'name': 'Marathi',
'name_local': 'मराठी',
},
'my': {
'bidi': False,
'code': 'my',
'name': 'Burmese',
'name_local': 'မြန်မာဘာသာ',
},
'nb': {
'bidi': False,
'code': 'nb',
'name': 'Norwegian Bokmal',
|
mcus/SickRage
|
sickbeard/providers/morethantv.py
|
Python
|
gpl-3.0
| 8,361
| 0.00311
|
# Author: Seamus Wassman
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
# This file was adapted for MoreThanTV from the freshontv scraper by
# Sparhawk76, this is my first foray into python, so there most likel
|
y
# are some mistakes or things I could have done better.
import re
import requests
import traceback
import logging
from sickbeard import tvcache
from sickbeard.providers import generic
from sickbeard.bs4_parser import BS4Parser
from sickrage.helper.exceptions import AuthException
class MoreThanTVProvider(generic.TorrentProvider):
def __init__(self):
generic.To
|
rrentProvider.__init__(self, "MoreThanTV")
self.supportsBacklog = True
self._uid = None
self._hash = None
self.username = None
self.password = None
self.ratio = None
self.minseed = None
self.minleech = None
# self.freeleech = False
self.urls = {'base_url': 'https://www.morethan.tv/',
'login': 'https://www.morethan.tv/login.php',
'detail': 'https://www.morethan.tv/torrents.php?id=%s',
'search': 'https://www.morethan.tv/torrents.php?tags_type=1&order_by=time&order_way=desc&action=basic&searchsubmit=1&searchstr=%s',
'download': 'https://www.morethan.tv/torrents.php?action=download&id=%s'}
self.url = self.urls[b'base_url']
self.cookies = None
self.proper_strings = ['PROPER', 'REPACK']
self.cache = MoreThanTVCache(self)
def _checkAuth(self):
if not self.username or not self.password:
raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
return True
def _doLogin(self):
if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()):
return True
if self._uid and self._hash:
requests.utils.add_dict_to_cookiejar(self.session.cookies, self.cookies)
else:
login_params = {'username': self.username,
'password': self.password,
'login': 'Log in',
'keeplogged': '1'}
response = self.getURL(self.urls[b'login'], post_data=login_params, timeout=30)
if not response:
logging.warning("Unable to connect to provider")
return False
if re.search('Your username or password was incorrect.', response):
logging.warning("Invalid username or password. Check your settings")
return False
return True
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
results = []
items = {'Season': [], 'Episode': [], 'RSS': []}
# freeleech = '3' if self.freeleech else '0'
if not self._doLogin():
return results
for mode in search_params.keys():
logging.debug("Search Mode: %s" % mode)
for search_string in search_params[mode]:
if mode is not 'RSS':
logging.debug("Search string: %s " % search_string)
searchURL = self.urls[b'search'] % (search_string.replace('(', '').replace(')', ''))
logging.debug("Search URL: %s" % searchURL)
# returns top 15 results by default, expandable in user profile to 100
data = self.getURL(searchURL)
if not data:
continue
try:
with BS4Parser(data, features=["html5lib", "permissive"]) as html:
torrent_table = html.find('table', attrs={'class': 'torrent_table'})
torrent_rows = torrent_table.findChildren('tr') if torrent_table else []
# Continue only if one Release is found
if len(torrent_rows) < 2:
logging.debug("Data returned from provider does not contain any torrents")
continue
# skip colheader
for result in torrent_rows[1:]:
cells = result.findChildren('td')
link = cells[1].find('a', attrs={'title': 'Download'})
# skip if torrent has been nuked due to poor quality
if cells[1].find('img', alt='Nuked') != None:
continue
torrent_id_long = link[b'href'].replace('torrents.php?action=download&id=', '')
try:
if link.has_key('title'):
title = cells[1].find('a', {'title': 'View torrent'}).contents[0].strip()
else:
title = link.contents[0]
download_url = self.urls[b'download'] % (torrent_id_long)
seeders = cells[6].contents[0]
leechers = cells[7].contents[0]
size = -1
if re.match(r'\d+([,\.]\d+)?\s*[KkMmGgTt]?[Bb]', cells[4].contents[0]):
size = self._convertSize(cells[4].text.strip())
except (AttributeError, TypeError):
continue
if not all([title, download_url]):
continue
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode is not 'RSS':
logging.debug(
"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(
title, seeders, leechers))
continue
item = title, download_url, size, seeders, leechers
if mode is not 'RSS':
logging.debug("Found result: %s " % title)
items[mode].append(item)
except Exception as e:
logging.error("Failed parsing provider. Traceback: %s" % traceback.format_exc())
# For each search mode sort all the items by seeders if available
items[mode].sort(key=lambda tup: tup[3], reverse=True)
results += items[mode]
return results
def seedRatio(self):
return self.ratio
def _convertSize(self, sizeString):
size = sizeString[:-2].strip()
modifier = sizeString[-2:].upper()
try:
size = float(size)
if modifier in 'KB':
size = size * 1024
elif modifier in 'MB':
size = size * 1024 ** 2
elif modifier in 'GB':
size = size * 1024 ** 3
elif modifier in 'TB':
size = size * 1024 ** 4
except Exception:
size = -1
return int(size)
class MoreThanTVCache(tvcache.TVCache):
def __init__(self, provider_obj):
tvcache.TVCache.__init__(self, provider_obj)
# poll delay in minutes
self
|
hakuliu/inf552
|
hw3/pca.py
|
Python
|
apache-2.0
| 1,541
| 0.008436
|
import numpy
def doPCA(data, dim):
data = makeDataMatrix(data)
means = getMeanVector(data)
data = normalizeData(data, means)
cov = getCov(data)
eigvals, eigvecs = getEigs(cov)
principalComponents = sortEigs(eigvals, eigvecs)
return getDimensions(dim, principalComponents)
def getDimensions(d, pc):
if d <= len(pc):
result = numpy.zeros((d, len(pc[0])))
for i in range(d):
result[i] = pc[:,i]
return result
else: return None
def sortEigs(vals, vecs):
result = numpy.zeros((len(vecs), len(vecs[0])))
#selection sort because vals is short for now so it should be fast enough
lastMax = float("inf")
for i in range(len(vals)):
currentMax = float("-inf")
currentInd = -1
for j in range(len(vals)):
if vals[j] > currentMax and vals[j] < lastMax:
currentMax
|
= vals[j]
currentInd = j
if currentInd != -1:
result[i]
|
= vecs[currentInd]
lastMax = currentMax
return result
def getEigs(cov):
return numpy.linalg.eig(cov)
def getCov(data):
return numpy.cov(data)
def getMeanVector(data):
result = numpy.zeros(len(data))
for i in range(len(data)):
result[i] = numpy.mean(data[i,:])
return result
def normalizeData(data, means):
result = numpy.zeros((len(data), len(data[0])))
for i in range(len(data)):
result[i] = data[i,:] - means[i]
return result
def makeDataMatrix(data):
return numpy.transpose(data)
|
ohanar/PolyBoRi
|
pyroot/polybori/partial.py
|
Python
|
gpl-2.0
| 1,509
| 0
|
from polybori import BooleSet, interpolate_smallest_lex
class PartialFunction(object):
"""docstring for PartialFunction"""
def __init__(self, zeros, ones):
super(PartialFunction, self).__init__()
self.zeros = zeros.set()
self.ones = ones.set()
def interpolate_smallest_lex(self):
return interpolate_smallest_lex(self.zeros, self.ones)
def __str__(self):
return "PartialFunction(zeros=" + str(self.zeros) + ", ones=" + str(
self.ones) + ")"
def definedOn(self):
return self.zeros.union(self.ones)
def __add__(self, other):
domain = self.definedOn().intersect(other.definedOn())
zeros = self.zeros.intersect(other.zeros).union(self.ones.intersect(
other.ones))
ones = self.zeros.intersect(other.ones).union(self.ones.intersect(
other.zeros))
assert zeros.diff(domain).empty()
assert ones.diff(domain).empty()
return PartialFunction(zeros, ones)
def __repr__(self):
return str(self
|
)
def __mul__(self, other):
zeros = self.zeros.union(other.zeros)
ones = self.ones.intersect(other.ones)
return PartialFunction(zeros, ones)
d
|
ef __or__(self, other):
zeros = self.zeros.intersect(other.zeros)
ones = self.ones.union(other.ones)
return PartialFunction(zeros, ones)
def __xor__(self, other):
return self + other
def __and__(self, other):
return self * other
|
CarlosPena00/Mobbi
|
Rasp/nrf/lib_nrf24/example-nrf24-recv.py
|
Python
|
mit
| 1,196
| 0.020067
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Example program to receive packets from the radio link
#
import virtGPIO as GPIO
from lib_nrf24 import NRF24
import time
pipes = [[0xe7, 0xe7, 0xe7, 0xe7, 0xe7], [0xc2, 0xc2, 0xc2, 0xc2, 0xc2]]
radio2 = NRF24(GPIO, GPIO.SpiDev())
radio2.begin(9, 7)
radio2.setRetries(15,15)
radio2.setPayloadSize(32)
radio2.setChannel(0x60)
radio2.setDataRate(NRF24.BR_2MBPS)
radio2.setPALevel(NRF24.PA_MIN)
radio2.setAutoAck(True)
radio2.enableDynamicPayloads()
radio2.enableAckPayload()
radio2.openWritingPipe(pipes[0])
radio2.openReadingPipe(1, pipes[1])
radio2.startListening()
radio2.stopListening()
radio2.printDetails()
radio2.startListening()
c=1
while True:
akpl_buf = [c,1, 2, 3,4,5,6,7,8,9,0,1, 2, 3,4,5,6,7,8]
|
pipe
|
= [0]
while not radio2.available(pipe):
time.sleep(10000/1000000.0)
recv_buffer = []
radio2.read(recv_buffer, radio2.getDynamicPayloadSize())
print ("Received:") ,
print (recv_buffer)
c = c + 1
if (c&1) == 0:
radio2.writeAckPayload(1, akpl_buf, len(akpl_buf))
print ("Loaded payload reply:"),
print (akpl_buf)
else:
print ("(No return payload)")
|
plilja/algolib
|
util/checksol.py
|
Python
|
apache-2.0
| 2,044
| 0.005382
|
#!/usr/bin/env python
import sys
import os
import tempfile
import glob
import filecmp
import time
from argparse import ArgumentParser
usage = "usage: %prog [options] program_to_test"
parser = ArgumentParser(description="""Testrunner for programming puzzles, runs a program against each
.in-file and checks the output against the corresponding .out-file using unix diff""")
parser.add_argument("-v", "--verbose", action="store_true", help="Be verbose", required=False, default=False)
parser.add_argument("-e", "--executor", dest="executor", default="", help="Execute the program with this executor (ex: java or python)")
parser.add_argument("-d", "--directory", dest="directory", default="", help="""The directory where test files with extensions .in
and .ans can be found (default is a a folder named test placed as
a subfolde
|
r to the folder where the program is located)""")
parser.add_argument("program")
args = parser.parse_args()
program = args.program
if program[0] != '.':
program = "./" + program
f = open(pr
|
ogram)
program_path = os.path.dirname(program)
if args.directory:
test_search_path = "%s/*.in" % args.directory
else:
test_search_path = "%s/test/*.in" % program_path
success = True
tests_found = False
try:
for test_file in glob.glob(test_search_path):
tests_found = True
start = time.time()
os.system(args.executor + " " + program + "<" + test_file + " > answer.tmp")
end = time.time()
test_exp_file = test_file.replace(".in", ".ans")
if not filecmp.cmp(test_exp_file, "answer.tmp"):
success = False
print(test_file + ", FAILED")
elif args.verbose:
print(test_file + ", succes")
if args.verbose:
print(test_file + ", execution time = " + str(end - start))
finally:
if os.path.isfile("answer.tmp"):
os.remove("answer.tmp")
if not tests_found:
print("No test files found")
elif success:
print("Success")
else:
print("Failed (%s)" % program)
|
ylitormatech/terapialaskutus
|
config/urls.py
|
Python
|
bsd-3-clause
| 1,837
| 0.003266
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from django.views import defaults as default_views
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='pages/home.html'), name="home"),
url(r'^about/$', TemplateView.as_view(template_name='pages/about.html'), name="about"),
# Django Admin, use {% url 'admin:index' %}
url(r'^' + settings.ADMIN_URL, include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# User management
url(r'^users/', include("therapyinvoicing.users.urls", namespace="users")),
url(r'^accounts/', include('allauth.urls')),
# Your stuff: custom urls includes go here
|
url(r'^customers/', include("therapyinvoicing.customers.urls", namespace="customers")),
url(r'^customerinvoicing/', include("therapyinvoicing.customerinvoicing.urls", namespace="customerinvoicing")),
url(r'^kelainvoicing/', include("therapyinvoicing.kelainvoicing.urls", namespace="kelainvoicing")),
url(r'^api/', include("therapyinvoicing.api.urls", namespace="api")),
url(r'^reporting/', include("therapyinvoicing.r
|
eporting.urls", namespace="reporting")),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', default_views.bad_request),
url(r'^403/$', default_views.permission_denied),
url(r'^404/$', default_views.page_not_found),
url(r'^500/$', default_views.server_error),
]
|
linuxserver/davos
|
docs/source/conf.py
|
Python
|
mit
| 5,191
| 0.000385
|
# -*- coding: utf-8 -*-
#
# davos documentation build configuration file, created by
# sphinx-quickstart on Sat Jul 29 08:01:32 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree doc
|
ument.
master_doc = 'index'
|
# General information about the project.
project = u'davos'
copyright = u'2017, Josh Stark'
author = u'Josh Stark'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'2.2'
# The full version, including alpha/beta/rc tags.
release = u'2.2.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
#html_theme = 'alabaster'
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html',
'donate.html',
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'davosdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'davos.tex', u'davos Documentation',
u'Josh Stark', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'davos', u'davos Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'davos', u'davos Documentation',
author, 'davos', 'One line description of project.',
'Miscellaneous'),
]
|
JohnGarbutt/taskflow-1
|
setup.py
|
Python
|
apache-2.0
| 1,467
| 0
|
#!/usr/bin/env python
import os
import setuptools
def _clean_line(line):
line = line.strip()
line = line.split("#")[0]
line = line.strip()
return line
def read_requires(base):
path = os.path.join('tools', base)
requires = []
if not os.path.isfile(path):
return requires
with open(path, 'rb') as h:
for line in h.read().splitlines():
line = _clean_line(line)
if not line:
continue
requires.append(line)
return requires
setuptools.setup(
name='taskflow',
version='0.0.1',
author='OpenStack',
license='Apache Software License',
description='Taskflow structured state management library.',
l
|
ong_description='The taskflow library provides core functionality that '
'can be used to build [resumable, reliable, '
'easily understandable, ...] highly available '
'systems which process workflows in a structured manner.',
author_email='openstack-dev@lists.openstack.org',
url='http://www.openstack.org/',
pa
|
ckages=setuptools.find_packages(),
tests_require=read_requires('test-requires'),
install_requires=read_requires('pip-requires'),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.6', ],
)
|
csaldias/python-usm
|
Ejercicios progra.usm.cl/Parte 2/7- Procesamiento de Texto/vocales_consonantes.py
|
Python
|
mit
| 466
| 0.032189
|
def es_v
|
ocal(letra):
if letra in 'aeiou':
return True
else:
return False
def contar_vocales_y_consonantes(palabra):
cuenta_vocal = 0
cuenta_consonante = 0
for letra in palabra:
if es_vocal(letra):
cuenta_vocal += 1
else:
cuenta_consonante += 1
return (cuenta_vocal, cuenta_consonante)
palabra = raw_input("Ingrese palabra: ")
vocal, consonante = contar_vocales_y_cons
|
onantes(palabra)
print "Tiene", vocal, "vocales y", consonante, "consonantes"
|
endlessm/chromium-browser
|
third_party/catapult/telemetry/telemetry/internal/results/artifact_logger.py
|
Python
|
bsd-3-clause
| 2,300
| 0.004783
|
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Logging-like module for creating artifacts.
In order to actually create artifacts, RegisterArtifactImplementation must be
called from somewhere with an artifact implementation to use, otherwise
CreateArtifact will just end up logging the first 100 characters of the given
data.
This registration is automatically handled in tests that use Telemetry or typ as
their test runner, so it should only really need to be used if you are adding a
new test runner type.
Example usage:
# During test setup.
artifact_logger.RegisterArtifactImplementation(self.results)
# At any point in the test afterwards, from any module.
artifact_logger.CreateArtifact('some/crash/stack.txt', GetStackTrace())
"""
import datetime
from telemetry.internal.results import (
artifact_compatibility_wrapper as artifact_wrapper)
artifact_impl = artifact_wrapper.ArtifactCompatibilityWrapperFactory(None)
def CreateArtifact(name, data):
"""Create an artifact with the given data.
Args:
name: The name of the artifact, can include '/' to organize artifacts
within a hierarchy.
data: The data to write to the artifact.
"""
artifact_impl.CreateArtifact(name, data)
def Reg
|
isterArtifactImplementation(artifact_implementation):
"""Register the artifact implementation used to log future artifacts.
Args:
artifact_implementation: The artifact implementation to use for future
artifact creations. Must be supported in
artifact_compatibility_wrapper.ArtifactCompatibilityWrapperFactory.
"""
global artifact_impl # pylint: disable=global-statement
artifact_impl = artifact_wrapper.ArtifactCompatib
|
ilityWrapperFactory(
artifact_implementation)
def GetTimestampSuffix():
"""Gets the current time as a human-readable string.
The returned value is suitable to use as a suffix for avoiding artifact name
collision across different tests.
"""
# Format is YYYY-MM-DD-HH-MM-SS-microseconds. The microseconds are to prevent
# name collision if two artifacts with the same name are created in close
# succession.
return datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S-%f')
|
Dark-Bob/mro
|
mro/connection.py
|
Python
|
mit
| 910
| 0.002198
|
import atexit
connection = None
connection_function = None
reconnect_function = None
hooks = None
def set_connection_function(_connection_function):
|
global connection
global connection_function
connection_function = _connection_function
connection = connection_function()
def disconnect():
|
global connection
try:
connection.close()
except:
pass
def set_on_reconnect(_reconnect_function):
global reconnect_function
reconnect_function = _reconnect_function
def set_hooks(_hooks):
global hooks
hooks = _hooks
def reconnect():
global connection
global connection_function
global reconnect_function
connection = connection_function()
print("***********RECONNECTING DATABASE************")
reconnect_function(connection)
if hooks is not None:
for hook in hooks:
hook()
atexit.register(disconnect)
|
Jortolsa/l10n-spain
|
l10n_es_aeat_mod349/wizard/export_mod349_to_boe.py
|
Python
|
agpl-3.0
| 13,634
| 0.000074
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C)
# 2004-2011: Pexego Sistemas Informáticos. (http://pexego.es)
# 2013: Top Consultant Software Creations S.L.
# (http://www.topconsultant.es/)
# 2014: Serv. Tecnol. Avanzados (http://www.serviciosbaeza.com)
# Pedro M. Baeza <pedro.baeza@serviciosbaeza.com>
#
# Autores originales: Luis Manuel Angueira Blanco (Pexego)
# Omar Castiñeira Saavedra(omar@pexego.es)
# Migración OpenERP 7.0: Ignacio Martínez y Miguel López.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api, exceptions, _
class Mod349ExportToBoe(models.TransientModel):
_inherit = "l10n.es.aeat.report.export_to_boe"
_name = "l10n.es.aeat.mod349.export_to_boe"
_description = "Export AEAT Model 349 to BOE format"
@api.multi
def _get_company_name_with_title(self, company_obj):
"""Returns company name with title."""
if company_obj.partner_id and company_obj.partner_id.title:
return company_obj.name + ' ' + \
company_obj.partner_id.title.name.capitalize()
return company_obj.name
@api.multi
def _get_formatted_declaration_record(self, report):
"""Returns a type 1, declaration/company, formated record.
· All amounts must be positives
· Numeric fields with no data must be filled with zeros
· Alfanumeric/Alfabetic fields with no data must be filled with
empty spaces
· Numeric fields must be right aligned and filled with zeros on
the left
· Alfanumeric/Alfabetic fields must be uppercase left aligned,
filled with empty spaces on right side. No special characters
allowed unless specified in field description
Format of the record:
Tipo registro 1 – Registro de declarante:
Posiciones Naturaleza Descripción
1 Numérico Tipo de Registro Constante = '1'
2-4 Numérico Modelo Declaración Constante = '349'
5-8 Numérico Ejercicio
9-17 Alfanumérico NIF del declarante
18-57 Alfanumérico Apellidos y nombre o razón social del
declarante
58 Alfabético Tipo de soporte
59-67 Numérico (9) Teléfono contacto
68-107 Alfabético Apellidos y nombre contacto
108-120 Numérico Número identificativo de la declaración
121-122 Alfabético Declaración complementaria o
substitutiva
123-135 Numérico Número identific
|
ativo de la declaración
anterior
136-137 Alfanumérico Período
138-146 Numérico Número total de operadores
intracomunitarios
147-161 Numérico Importe de las operac
|
iones
intracomunitarias
147-159 Numérico Importe de las operaciones
intracomunitarias (parte entera)
160-161 Numérico Importe de las operaciones
intracomunitarias (parte decimal)
162-170 Numérico Número total de operadores
intracomunitarios con rectificaciones
171-185 Numérico Importe total de las rectificaciones
171-183 Numérico Importe total de las rectificaciones
(parte entera)
184-185 Numérico Importe total de las rectificaciones
(parte decimal)
186 Alfabético Indicador cambio periodicidad en la
obligación a declarar (X o '')
187-390 Blancos ---------------------------------------
391-399 Alfanumérico NIF del representante legal
400-487 Blancos ---------------------------------------
488-500 Sello electrónico
"""
assert report, 'No Report defined'
period = (report.period_selection == 'MO' and report.month_selection or
report.period_selection)
text = super(Mod349ExportToBoe,
self)._get_formatted_declaration_record(report)
text += self._formatString(period, 2) # Período
# Número total de operadores intracomunitarios
text += self._formatNumber(report.total_partner_records, 9)
# Importe total de las operaciones intracomunitarias (parte entera)
text += self._formatNumber(report.total_partner_records_amount, 13, 2)
# Número total de operadores intracomunitarios con rectificaciones
text += self._formatNumber(report.total_partner_refunds, 9)
# Importe total de las rectificaciones
text += self._formatNumber(report.total_partner_refunds_amount, 13, 2)
# Indicador cambio periodicidad en la obligación a declarar
text += self._formatBoolean(report.frequency_change)
text += 204 * ' ' # Blancos
# NIF del representante legal
text += self._formatString(report.representative_vat, 9)
# text += 9*' '
text += 88 * ' ' # Blancos
text += 13 * ' ' # Sello electrónico
text += '\r\n' # Retorno de carro + Salto de línea
assert len(text) == 502, \
_("The type 1 record must be 502 characters long")
return text
@api.multi
def _get_formatted_main_record(self, report):
file_contents = ''
for partner_record in report.partner_record_ids:
file_contents += self._get_formated_partner_record(
report, partner_record)
for refund_record in report.partner_refund_ids:
file_contents += self._get_formatted_partner_refund(
report, refund_record)
return file_contents
@api.multi
def _get_formated_partner_record(self, report, partner_record):
"""Returns a type 2, partner record
Format of the record:
Tipo registro 2
Posiciones Naturaleza Descripción
1 Numérico Tipo de Registro Constante = '2'
2-4 Numérico Modelo Declaración onstante = '349'
5-8 Numérico Ejercicio
9-17 Alfanumérico NIF del declarante
18-75 Blancos ---------------------------------------
76-92 Alfanumérico NIF operador Intracomunitario
76-77 Alfanumérico Codigo de País
78-92 Alfanumérico NIF
93-132 Alfanumérico Apellidos y nombre o razón social del
operador intracomunitario
133 Alfanumérico Clave de operación
134-146 Numérico Base imponible
134-144 Numérico Base imponible (parte entera)
145-146 Nu
|
DanielBaird/CliMAS-Next-Generation
|
climas-ng/climasng/tests/test_prosemaker_conditions_rangenum.py
|
Python
|
mit
| 8,241
| 0.005096
|
import unittest
import transaction
from pyramid import testing
from climasng.tests import ProseMakerTestCase
from climasng.parsing.prosemaker import ProseMaker
# ===================================================================
class TestProseMakerConditions(ProseMakerTestCase):
# ------------------------------------------------------- test --
def test_pm_condition_rangeequality_litnum_comparison(self):
samples = {
# these sources should result in 'showing'
'showing': [ '[[10 =2= 11]]showing',
'[[11 =2= 10]]showing',
'[[10 =5= 6]]showing',
'[[1.0 =0.1= 1.1]]showing',
'[[1 =0= 1]]showing',
],
# all these docs should result in ''
'': [ '[[10 =3= 6]]hiding',
'[[6 =3= 10]]hiding',
'[[1 =0= 1.01]]hiding',
'[[1 =0.1= 1.2]]hiding',
]
}
for sample_result, sample_docs in samples.items():
for sample_doc in sample_docs:
self.assertParses(sample_doc, sample_result)
# ------------------------------------------------------- test --
def test_pm_condition_rangeequality_litnumpercent_comparison(self):
samples = {
# these sources should result in 'showing'
'showing': [ '[[1 =15%= 1.1]]showing',
'[[10 =15%= 11]]showing',
'[[1000 =15%= 1100]]showing',
'[[79 =25%= 100]]showing',
'[[1234 =1%= 1236]]showing',
],
# all these docs should result in ''
'': [ '[[10 =10%= 6]]hiding',
'[[100 =25%= 79]]hiding',
'[[1.01 =10%= 10]]hiding',
'[[99.5 =0.1%= 100]]hiding',
]
}
for sample_result, sample_docs in samples.items():
for sample_doc in sample_docs:
self.assertParses(sample_doc, sample_result)
# ------------------------------------------------------- test --
def test_pm_condition_rangeequality_varnum_comparison(self):
self.pm.data = { 'one': 1, 'two': 2, 'aten': 10 }
samples = {
# these sources should result in 'showing'
'showing': [ '[[aten =2= 11]]showing',
'[[11 =2= aten]]showing',
'[[aten =5= 6]]showing',
'[[1 =0= one]]showing'
],
# all these docs should result in ''
'': [ '[[aten =3= 6]]hiding',
'[[6 =3= aten]]hiding'
]
}
for sample_result, sample_docs in samples.items():
for sample_doc in sample_docs:
self.assertParses(sample_doc, sample_result)
# ------------------------------------------------------- test --
def test_pm_condition_rangeleftrocket_litnum_comparison(self):
samples = {
# these sources should result in 'showing'
'showing': [ '[[10 <2= 11]]showing',
'[[6 <5= 10]]showing',
'[[1.0 <0.1= 1.1]]showing',
],
# all these docs should result in ''
'': [ '[[10 <3= 6]]hiding',
'[[1 <0= 1]]hiding',
'[[10 <5= 6]]hiding',
'[[11 <2= 10]]hiding',
'[[6 <3= 10]]hiding',
'[[1 <0= 1.01]]hiding',
'[[1 <0.1= 1.2]]hiding',
]
}
for sample_result, sample_docs in samples.items():
for sample_doc in sample_docs:
self.assertParses(sample_doc, sample_result)
# ------------------------------------------------------- test --
def test_pm_condition_rangeleftrocket_varnum_comparison(self):
self.pm.data = { 'one': 1, 'two': 2, 'aten': 10 }
samples = {
# these sources should result in 'showing'
'showing': [ '[[aten <2= 11]]showing',
'[[6 <5= aten]]showing',
'[[one <0.1= 1.1]]showing',
],
# all these docs should result in ''
'': [ '[[aten <3= 6]]hiding',
'[[one <0= one]]hiding',
'[[aten <5= 6]]hiding',
'[[11 <2= aten]]hiding',
'[[6 <3= aten]]hiding',
'[[one <0.1= 1.2]]hiding',
]
}
for sample_result, sample_docs in samples.items():
for sample_doc in sample_docs:
self.assertParses(sample_doc, sample_result)
# ------------------------------------------------------- test --
def test_pm_condition_rangerightrocket_litnum_comparison(self):
samples = {
# these sources should result in 'showing'
'showing': [ '[[11 =2> 10]]showing',
'[[10 =5> 6]]showing',
'[[1.1 =0.1> 1.0]]showing',
],
# all these docs should result in ''
'': [ '[[6 =3> 10]]hiding',
'[[1 =0> 1]]hiding',
'[[6 =5> 10]]hiding',
'[[10 =2> 11]]hiding',
'[[10 =3> 6]]hiding',
'[[1.01 =0> 1]]hiding',
'[[1.2 =0.1> 1]]hiding',
]
}
for sample_result, sample_docs in samples.items():
for sample_doc in sample_docs:
self.assertParses(sample_doc, sample_result)
# ------------------------------------------------------- test --
def test_pm_condition_rangemuchlessthan_litnum_comparison(self):
samples = {
# these sources should result in 'showing'
'showing': [ '[[6 <3< 10]]showing',
'[[1.0 <0.1< 1.101]]showing',
'[[1.0 <0.1< 1.2]]showing',
'[[0.99 <0< 1]]showing',
],
# all these docs should result in ''
'': [ '[[1.01 <0.1< 1.1]]hiding',
'[[1 <0.1< 1.1]]hiding',
'[[6 <5< 10]]hiding',
'[[1 <0< 1]]hiding',
'[[1 <1< 0.99]]hiding',
'[[10 <2< 11]]hiding',
'[[1.01 <0< 1]]hiding',
'[[1.2 <0.1< 1]]hiding',
]
}
for sample_result, sample_docs in samples.items():
for sample_doc in sample_docs:
self.assertParses(sample_doc, sample_result)
# ------------------------------------------------------- test --
def test_pm_condition_rangemuchgreaterthan_litnum_comparison(self):
samples = {
# these sources should result in 'showing'
'showing': [ '[[10 >3> 6]]showing',
'[[1.101 >0.1> 1.0]]showing',
'[[1.2 >0.1> 1.0]]showing',
'[[1 >0> 0.99]]showing',
],
# all these docs should result in ''
'': [ '[[1.1 >0.1> 1.01]]hiding',
'[[1.1 >0.1> 1]]hiding',
'[[10 >5> 6]]hiding',
'[[1 >0> 1]]hiding',
|
'[[0.99 >1> 1]]hiding',
'[[11 >2> 10]]hiding',
'[[1 >0> 1.01]]hiding',
'[[1 >0.1> 1.2]]hiding
|
',
]
}
for sample_result, sample_docs in samples.items():
for sample_doc in sample_docs:
self.assertParses(sample_doc, sample_result)
# =======
|
infoscout/weighted-levenshtein
|
weighted_levenshtein/__init__.py
|
Python
|
mit
| 20
| 0
|
from .clev
|
import *
|
|
AutonomyLab/deep_intent
|
code/autoencoder_model/scripts/config_nmta.py
|
Python
|
bsd-3-clause
| 3,079
| 0.003573
|
from __future__ import absolute_import
from __future__ import division
from __future__ impor
|
t print_function
from keras.optimizers import SGD
from keras.optimizers import Adam
from keras.optimi
|
zers import adadelta
from keras.optimizers import rmsprop
from keras.layers import Layer
from keras import backend as K
K.set_image_dim_ordering('tf')
import socket
import os
# -------------------------------------------------
# Background config:
hostname = socket.gethostname()
if hostname == 'baymax':
path_var = 'baymax/'
elif hostname == 'walle':
path_var = 'walle/'
elif hostname == 'bender':
path_var = 'bender/'
else:
path_var = 'zhora/'
DATA_DIR= '/local_home/JAAD_Dataset/iros/resized_imgs_128/train/'
# DATA_DIR= '/local_home/data/KITTI_data/'
HD_DATA_DIR= '/local_home/JAAD_Dataset/iros/resized_imgs_256/train/'
VAL_DATA_DIR= '/local_home/JAAD_Dataset/iros/resized_imgs_128/val/'
VAL_HD_DATA_DIR= '/local_home/JAAD_Dataset/iros/resized_imgs_256/val/'
TEST_DATA_DIR= '/local_home/JAAD_Dataset/iros/resized_imgs_128/test/'
MODEL_DIR = './../' + path_var + 'models'
if not os.path.exists(MODEL_DIR):
os.mkdir(MODEL_DIR)
CHECKPOINT_DIR = './../' + path_var + 'checkpoints'
if not os.path.exists(CHECKPOINT_DIR):
os.mkdir(CHECKPOINT_DIR)
ATTN_WEIGHTS_DIR = './../' + path_var + 'attn_weights'
if not os.path.exists(ATTN_WEIGHTS_DIR):
os.mkdir(ATTN_WEIGHTS_DIR)
GEN_IMAGES_DIR = './../' + path_var + 'generated_images'
if not os.path.exists(GEN_IMAGES_DIR):
os.mkdir(GEN_IMAGES_DIR)
CLA_GEN_IMAGES_DIR = GEN_IMAGES_DIR + '/cla_gen/'
if not os.path.exists(CLA_GEN_IMAGES_DIR):
os.mkdir(CLA_GEN_IMAGES_DIR)
LOG_DIR = './../' + path_var + 'logs'
if not os.path.exists(LOG_DIR):
os.mkdir(LOG_DIR)
TF_LOG_DIR = './../' + path_var + 'tf_logs'
if not os.path.exists(TF_LOG_DIR):
os.mkdir(TF_LOG_DIR)
TF_LOG_GAN_DIR = './../' + path_var + 'tf_gan_logs'
if not os.path.exists(TF_LOG_GAN_DIR):
os.mkdir(TF_LOG_GAN_DIR)
TEST_RESULTS_DIR = './../' + path_var + 'test_results'
if not os.path.exists(TEST_RESULTS_DIR):
os.mkdir(TEST_RESULTS_DIR)
PRINT_MODEL_SUMMARY = True
SAVE_MODEL = True
PLOT_MODEL = True
SAVE_GENERATED_IMAGES = True
SHUFFLE = True
VIDEO_LENGTH = 30
IMG_SIZE = (128, 128, 3)
ADVERSARIAL = False
BUF_SIZE = 10
LOSS_WEIGHTS = [1, 1]
ATTN_COEFF = 0
KL_COEFF = 0
# -------------------------------------------------
# Network configuration:
print ("Loading network/training configuration.")
print ("Config file: " + str(__name__))
BATCH_SIZE = 7
NB_EPOCHS_AUTOENCODER = 30
NB_EPOCHS_GAN = 0
OPTIM_A = Adam(lr=0.0001, beta_1=0.5)
OPTIM_G = Adam(lr=0.00001, beta_1=0.5)
# OPTIM_D = Adam(lr=0.000001, beta_1=0.5)
# OPTIM_D = SGD(lr=0.000001, momentum=0.5, nesterov=True)
OPTIM_D = rmsprop(lr=0.000001)
lr_schedule = [10, 20, 30] # epoch_step
def schedule(epoch_idx):
if (epoch_idx + 1) < lr_schedule[0]:
return 0.0001
elif (epoch_idx + 1) < lr_schedule[1]:
return 0.0001 # lr_decay_ratio = 10
elif (epoch_idx + 1) < lr_schedule[2]:
return 0.00001
return 0.000001
|
diogocs1/comps
|
web/addons/l10n_at/account_wizard.py
|
Python
|
apache-2.0
| 1,234
| 0.009724
|
# -*- encoding: utf-8 -*-
##########
|
####################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) conexus.at
#
# This pr
|
ogram is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import tools
from openerp.osv import osv
from openerp import addons
class AccountWizard_cd(osv.osv_memory):
_inherit='wizard.multi.charts.accounts'
_defaults = {
'code_digits' : 0,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
mtury/scapy
|
scapy/contrib/isotp.py
|
Python
|
gpl-2.0
| 75,260
| 0
|
#! /usr/bin/env python
# This file is part of Scapy
# See http://www.secdev.org/projects/scapy for more information
# Copyright (C) Nils Weiss <nils@we155.de>
# Copyright (C) Enrico Pozzobon <enricopozzobon@gmail.com>
# Copyright (C) Alexander Schroeder <alexander1.schroeder@st.othr.de>
# This program is published under a GPLv2 license
# scapy.contrib.description = ISO-TP (ISO 15765-2)
# scapy.contrib.status = loads
"""
ISOTPSocket.
"""
import ctypes
from ctypes.util import find_library
import struct
import socket
import time
from threading import Thread, Event, Lock, Semaphore
from scapy.packet import Packet
from scapy.fields import BitField, FlagsField, StrLenField, \
ThreeBytesField, XBitField, ConditionalField, \
BitEnumField, ByteField, XByteField, BitFieldLenField, StrField
from scapy.compat import chb, orb
from scapy.layers.can import CAN
import scapy.modules.six as six
import scapy.automaton as automaton
import six.moves.queue as queue
from scapy.error import Scapy_Exception, warning, log_loading
from scapy.supersocket import SuperSocket
from scapy.config import conf
from scapy.consts import LINUX
from scapy.contrib.cansocket import PYTHON_CAN
from scapy.sendrecv import sniff
from scapy.sessions import DefaultSession
__all__ = ["ISOTP", "ISOTPHeader", "ISOTPHeaderEA", "ISOTP_SF", "ISOTP_FF",
"ISOTP_CF", "ISOTP_FC", "ISOTPSoftSocket", "ISOTPSession",
"ISOTPSocket", "ISOTPSocketImplementation", "ISOTPMessageBuilder",
"ISOTPScan"]
USE_CAN_ISOTP_KERNEL_MODULE = False
if six.PY3 and LINUX:
LIBC = ctypes.cdll.LoadLibrary(find_library("c"))
try:
if conf.contribs['ISOTP']['use-can-isotp-kernel-module']:
USE_CAN_ISOTP_KERNEL_MODULE = True
except KeyError:
log_loading.info("Specify 'conf.contribs['ISOTP'] = "
"{'use-can-isotp-kernel-module': True}' to enable "
"usage of can-isotp kernel module.")
CAN_MAX_IDENTIFIER = (1 << 29) - 1 # Maximum 29-bit identifier
CAN_MTU = 16
CAN_MAX_DLEN = 8
ISOTP_MAX_DLEN_2015 = (1 << 32) - 1 # Maximum for 32-bit FF_DL
ISOTP_MAX_DLEN = (1 << 12) - 1 # Maximum for 12-bit FF_DL
N_PCI_SF = 0x00 # /* single frame */
N_PCI_FF = 0x10 # /* first frame */
N_PCI_CF = 0x20 # /* consecutive frame */
N_PCI_FC = 0x30 # /* flow control */
class ISOTP(Packet):
name = 'ISOTP'
fields_desc = [
StrField('data', B"")
]
__slots__ = Packet.__slots__ + ["src", "dst", "exsrc", "exdst"]
def answers(self, other):
if other.__class__ == self.__class__:
return self.payload.answers(other.payload)
return 0
def __init__(self, *args, **kwargs):
self.src = None
self.dst = None
self.exsrc = None
self.exdst = None
if "src" in kwargs:
self.src = kwargs["src"]
del kwargs["src"]
if "dst" in kwargs:
self.dst = kwargs["dst"]
del kwargs["dst"]
if "exsrc" in kwargs:
self.exsrc = kwargs["exsrc"]
del kwargs["exsrc"]
if "exdst" in kwargs:
self.exdst = kwargs["exdst"]
del kwargs["exdst"]
Packet.__init__(self, *args, **kwargs)
self.validate_fields()
def validate_fields(self):
if self.src is not None:
if not 0 <= self.src <= CAN_MAX_IDENTIFIER:
raise Scapy_Exception("src is not a valid CAN identifier")
if self.dst is not None:
if not 0 <= self.dst <= CAN_MAX_IDENTIFIER:
raise Scapy_Exception("dst is not a valid CAN identifier")
if self.exsrc is not None:
if not 0 <= self.exsrc <= 0xff:
raise Scapy_Exception("exsrc is not a byte")
if self.exdst is not None:
if not 0 <= self.exdst <= 0xff:
raise Scapy_Exception("exdst is not a byte")
def fragment(self):
data_bytes_in_frame = 7
if self.exdst is not None:
data_bytes_in_frame = 6
if len(self.data) > ISOTP_MAX_DLEN_2015:
raise Scapy_Exception("Too much data in ISOTP message")
if len(self.data) <= data_bytes_in_frame:
# We can do this in a single frame
frame_data = struct.pack('B', len(self.data)) + self.data
if self.exdst:
frame_data = struct.pack('B', self.exdst) + frame_data
pkt = CAN(identifier=self.dst, data=frame_data)
return [pkt]
# Construct the first frame
if len(self.data) <= ISOTP_MAX_DLEN:
frame_header = struct.pack(">H", len(self.data) + 0x1000)
else:
frame_header = struct.pack(">HI", 0x1000, len(self.data))
if self.exdst:
frame_header = struct.pack('B', self.exdst) + frame_header
idx = 8 - len(frame_header)
frame_data = self.data[0:idx]
frame = CAN(identifier=self.dst, data=frame_header + frame_data)
# Construct consecutive frames
n = 1
pkts = [frame]
while idx < len(self.data):
frame_data = self.data[idx:idx + data_bytes_in_f
|
rame]
frame_header = struct.pack("b", (n % 16) + N_PCI_CF)
n += 1
idx += len(frame_data)
if self.exdst:
frame_header = struct.pack('B', self.exdst) + frame_header
pkt = CAN(identifier=self.dst, data=frame_header + frame_data)
pkts.append(pkt)
return pkts
@staticmethod
def defragment(can_frames, use_extended_addressing=None):
|
if len(can_frames) == 0:
raise Scapy_Exception("ISOTP.defragment called with 0 frames")
dst = can_frames[0].identifier
for frame in can_frames:
if frame.identifier != dst:
warning("Not all CAN frames have the same identifier")
parser = ISOTPMessageBuilder(use_extended_addressing)
for c in can_frames:
parser.feed(c)
results = []
while parser.count > 0:
p = parser.pop()
if (use_extended_addressing is True and p.exdst is not None) \
or (use_extended_addressing is False and p.exdst is None) \
or (use_extended_addressing is None):
results.append(p)
if len(results) == 0:
return None
if len(results) > 0:
warning("More than one ISOTP frame could be defragmented from the "
"provided CAN frames, returning the first one.")
return results[0]
class ISOTPHeader(CAN):
name = 'ISOTPHeader'
fields_desc = [
FlagsField('flags', 0, 3, ['error',
'remote_transmission_request',
'extended']),
XBitField('identifier', 0, 29),
ByteField('length', None),
ThreeBytesField('reserved', 0),
]
def extract_padding(self, p):
return p, None
def post_build(self, pkt, pay):
"""
This will set the ByteField 'length' to the correct value.
"""
if self.length is None:
pkt = pkt[:4] + chb(len(pay)) + pkt[5:]
return pkt + pay
def guess_payload_class(self, payload):
"""
ISOTP encodes the frame type in the first nibble of a frame.
"""
t = (orb(payload[0]) & 0xf0) >> 4
if t == 0:
return ISOTP_SF
elif t == 1:
return ISOTP_FF
elif t == 2:
return ISOTP_CF
else:
return ISOTP_FC
class ISOTPHeaderEA(ISOTPHeader):
name = 'ISOTPHeaderExtendedAddress'
fields_desc = ISOTPHeader.fields_desc + [
XByteField('extended_address', 0),
]
def post_build(self, p, pay):
"""
This will set the ByteField 'length' to the correct value.
'chb(len(pay) + 1)' is required, because the field 'extended_address'
is counted as payload on the CAN layer
"""
if self.length is None:
p = p[:4] + chb(len(pay) + 1) + p[5:]
return p + pay
ISOTP_TYPE = {0: 'single',
1:
|
Rosebotics/pymata-aio
|
examples/sparkfun_redbot/sparkfun_experiments/Exp6_LineFollowing_IRSensors.py
|
Python
|
gpl-3.0
| 2,131
| 0.001877
|
"""//***********************************************************************
* Exp6_LineFollowing_IRSensors -- RedBot Experiment 6
*
* This code reads the three line following sensors on A3, A6, and A7
* and prints them out to the Serial Monitor. Upload this example to your
* RedBot and open up the Serial Monitor by clicking the magnifying glass
* in the upper-right hand corner.
*
* This sketch was written by SparkFun Electronics,with lots of help from
* the Arduino community. This code is completely free for any use.
*
* 8 Oct 2013 M. Hord
* Revised, 31 Oct 2014 B. Huang
* Revices, 2 Oct 2015 L Mathews
***********************************************************************/"""
import sys
import signal
from pymata_aio.pymata3 import PyMata3
from
|
library.redbot import RedBotSensor
WIFLY_IP_ADDRESS = None # Leave set as None if not using WiFly
WIFLY_IP_ADDRESS = "10.0.1.18" # If using a WiFly on the RedBot, set the ip address here.
if WIFLY_IP_ADDRESS:
board = PyMata3(ip_addres
|
s=WIFLY_IP_ADDRESS)
else:
# Use a USB cable to RedBot or an XBee connection instead of WiFly.
COM_PORT = None # Use None for automatic com port detection, or set if needed i.e. "COM7"
board = PyMata3(com_port=COM_PORT)
LEFT_LINE_FOLLOWER = 3 # pin number assignments for each IR sensor
CENTRE_LINE_FOLLOWER = 6
RIGHT_LINE_FOLLOWER = 7
IR_sensor_1 = RedBotSensor(board, LEFT_LINE_FOLLOWER)
IR_sensor_2 = RedBotSensor(board, CENTRE_LINE_FOLLOWER)
IR_sensor_3 = RedBotSensor(board, RIGHT_LINE_FOLLOWER)
def signal_handler(sig, frame):
"""Helper method to shutdown the RedBot if Ctrl-c is pressed"""
print('\nYou pressed Ctrl+C')
if board is not None:
board.send_reset()
board.shutdown()
sys.exit(0)
def setup():
signal.signal(signal.SIGINT, signal_handler)
print("Welcome to Experiment 6!")
print("------------------------")
def loop():
board.sleep(0.1)
print("IR Sensor Readings: {}, {}, {}".format(IR_sensor_1.read(), IR_sensor_2.read(), IR_sensor_3.read()))
if __name__ == "__main__":
setup()
while True:
loop()
|
OpenSourceOV/cavicapture
|
calibrate.py
|
Python
|
gpl-3.0
| 3,982
| 0.010296
|
from cavicapture import CaviCapture
from process import CaviProcess
import sys, os, getopt
import time, datetime
import numpy as np
import matplotlib.pyplot as plt
def main():
config_path = './config.ini' # default
try:
opts, args = getopt.getopt(sys.argv[1:], "c", ["config="])
except getopt.GetoptError:
print("Argument error")
sys.exit(2)
for opt, arg in opts:
if opt in ("--config"):
config_path = arg
calibrator = CaviCalibrate(config_path)
calibrator.init_calibration()
class CaviCalibrate:
def __init__(self, config_path):
self.output_dir = "./calibration"
if not os.path.exists(self.output_dir):
os.makedirs(self.output_dir)
self.output_dir = self.output_dir + "/" + datetime.datetime.now().strftime('%Y%m%d-%H%M%S')
if not os.path.exists(self.output_dir):
os.makedirs(self.output_dir)
self.cavi_capture = CaviCapture(config_path)
self.cavi_capture.log_file = self.output_dir + "/capture.log.txt"
self.cavi_capture.get_ini_config()
self.cavi_capture.setup_gpio()
self.cavi_capture.setup_camera()
self.cavi_process = CaviProcess(self.output_dir)
self.cavi_process.log_file = self.output_dir + "/process.log.txt"
def init_calibration(self):
files = []
self.cavi_capture.lights(True)
time.sleep(3) # Let lights settle
files.append(self.capture_image(self.output_dir + "/" + "image_1.png"))
files.append(self.capture_image(self.output_dir + "/" + "image_2.png"))
files.append(self.capture_image(self.output_dir + "/" + "image_3.png"))
files.append(self.capture_image(self.output_dir + "/" + "image_4.png"))
self.cavi_capture.lights(False)
self.process_files(files)
def process_files(self, files):
file_1 = files[0]
file_2 = files[1]
file_3 = files[2]
file_4 = files[3]
# Get the image difference and summary using 2 images
# diff = self.cavi_pro
|
cess.subtract_images(file
|
_1, file_2)
# self.cavi_process.write_image(self.output_dir + "/diff.png", diff)
# self.summarise(diff, self.output_dir + "/noise_hist.png")
# Image difference first two and last two
img_group_1_diff = self.cavi_process.subtract_images(file_1, file_2)
self.cavi_process.write_image(self.output_dir + "/image_group_1_diff.png", img_group_1_diff)
self.summarise(img_group_1_diff, self.output_dir + "/image_group_1_diff_hist.png")
img_group_2_diff = self.cavi_process.subtract_images(file_3, file_4)
self.cavi_process.write_image(self.output_dir + "/image_group_2_diff.png", img_group_2_diff)
self.summarise(img_group_2_diff, self.output_dir + "/image_group_2_diff_hist.png")
groups_min = np.minimum(img_group_1_diff, img_group_2_diff)
self.cavi_process.write_image(self.output_dir + "/groups_min.png", groups_min)
self.summarise(groups_min, self.output_dir + "/groups_min_hist.png")
# diff = self.cavi_process.subtract_images(self.output_dir + "/image_1_average.png", self.output_dir + "/image_2_average.png")
# self.cavi_process.write_image(self.output_dir + "/image_average_diff.png", diff)
# self.summarise(diff, self.output_dir + "/image_average_noise_hist.png")
def summarise(self, img, hist_path):
average_pixel = np.average(img[img>0])
max_pixel = np.max(img[img>0])
min_pixel = np.min(img[img>0])
total_area = len(img[img>0])
self.cavi_process.log("Noise max: " + str(max_pixel))
self.cavi_process.log("Noise min: " + str(min_pixel))
self.cavi_process.log("Noise average: " + str(average_pixel))
self.cavi_process.log("Noise area: " + str(total_area))
plt.hist(img.ravel(),max_pixel,[min_pixel,max_pixel])
plt.savefig(hist_path)
def gen_file_path(self):
return self.output_dir + "/" + datetime.datetime.now().strftime('%Y%m%d-%H%M%S') + ".png"
def capture_image(self, file_path):
self.cavi_capture.camera.capture(file_path, 'png')
return file_path
if __name__ == '__main__':
main()
|
janusnic/21v-python
|
unit_13/re6.py
|
Python
|
mit
| 469
| 0.013544
|
# -*- coding:utf-8 -*-
import re
# Обработка телефонных номеров
phonePattern = re.compile(r'^(\d{3})\D*(\d{3})\D*(\d{4})\D*(\d*)$')
print phonePattern.search('80055512
|
121234').groups()
# ('800', '555', '1212', '1234')
print phonePattern.search('800.555.1212 x1234').groups()
# ('800', '555', '1212', '1234')
print phonePattern.search('800-555-1212').
|
groups()
# ('800', '555', '1212', '')
print phonePattern.search('(800)5551212 x1234')
|
tjps/bitcoin
|
test/functional/mempool_persist.py
|
Python
|
mit
| 6,912
| 0.002604
|
#!/usr/bin/env python3
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mempool persistence.
By default, bitcoind will dump mempool on shutdown and
then reload it on startup. This can be overridden with
the -persistmempool=0 command line option.
Test is as follows:
- start node0, node1 and node2. node1 has -persistmempool=0
- create 5 transactions on node2 to its own address. Note that these
are not sent to node0 or node1 addresses because we don't want
them to be saved in the wallet.
- check that node0 and node1 have 5 transactions in their mempools
- shutdown all nodes.
- startup node0. Verify that it still has 5 transactions
in its mempool. Shutdown node0. This tests that by default the
mempool is persistent.
- startup node1. Verify that its mempool is empty. Shutdown node1.
This tests that with -persistmempool=0, the mempool is not
dumped to disk when the node is shut down.
- Restart node0 with -persistmempool=0. Verify that its mempool is
empty. Shutdown node0. This tests that with -persistmempool=0,
the mempool is not loaded from disk on start up.
- Restart node0 with -persistmempool. Verify that it has 5
transactions in its mempool. This tests that -persistmempool=0
does not overwrite a previously valid mempool stored on disk.
- Remove node0 mempool.dat and verify savemempool RPC recreates it
and verify that node1 can load it and has 5 transactions in its
mempool.
- Verify that savemempool throws when the RPC is called if
node1 can't write to disk.
"""
from decimal import Decimal
import os
import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than_or_equal,
assert_raises_rpc_error,
wait_until,
)
class MempoolPersistTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 3
self.extra_args = [[], ["-persistmempool=0"], []]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
self.log.debug("Send 5 transactions from node2 (to its own address)")
tx_creation_time_lower = int(time.time())
for i in range(5):
last_txid = self.nodes[2].sendtoaddress(self.nodes[2].getnewaddress(), Decimal("10"))
node2_balance = self.nodes[2].getbalance()
self.sync_all()
tx_creation_time_higher = int(time.time())
self.log.debug("Verify that node0 and node1 have 5 transactions in their mempools")
assert_equal(len(self.nodes[0].getrawmempool()), 5)
assert_equal(len(self.nodes[1].getrawmempool()), 5)
self.log.debug("Prioritize a transaction on node0")
fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
assert_equal(fees['base'], fees['modified'])
self.nodes[
|
0].prioritisetransaction(txid=last_txid, fee_delta=1000)
fees = self.nodes[0].getmempoo
|
lentry(txid=last_txid)['fees']
assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified'])
tx_creation_time = self.nodes[0].getmempoolentry(txid=last_txid)['time']
assert_greater_than_or_equal(tx_creation_time, tx_creation_time_lower)
assert_greater_than_or_equal(tx_creation_time_higher, tx_creation_time)
self.log.debug("Stop-start the nodes. Verify that node0 has the transactions in its mempool and node1 does not. Verify that node2 calculates its balance correctly after loading wallet transactions.")
self.stop_nodes()
# Give this node a head-start, so we can be "extra-sure" that it didn't load anything later
# Also don't store the mempool, to keep the datadir clean
self.start_node(1, extra_args=["-persistmempool=0"])
self.start_node(0)
self.start_node(2)
wait_until(lambda: self.nodes[0].getmempoolinfo()["loaded"], timeout=1)
wait_until(lambda: self.nodes[2].getmempoolinfo()["loaded"], timeout=1)
assert_equal(len(self.nodes[0].getrawmempool()), 5)
assert_equal(len(self.nodes[2].getrawmempool()), 5)
# The others have loaded their mempool. If node_1 loaded anything, we'd probably notice by now:
assert_equal(len(self.nodes[1].getrawmempool()), 0)
self.log.debug('Verify prioritization is loaded correctly')
fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified'])
self.log.debug('Verify time is loaded correctly')
assert_equal(tx_creation_time, self.nodes[0].getmempoolentry(txid=last_txid)['time'])
# Verify accounting of mempool transactions after restart is correct
self.nodes[2].syncwithvalidationinterfacequeue() # Flush mempool to wallet
assert_equal(node2_balance, self.nodes[2].getbalance())
self.log.debug("Stop-start node0 with -persistmempool=0. Verify that it doesn't load its mempool.dat file.")
self.stop_nodes()
self.start_node(0, extra_args=["-persistmempool=0"])
wait_until(lambda: self.nodes[0].getmempoolinfo()["loaded"])
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.log.debug("Stop-start node0. Verify that it has the transactions in its mempool.")
self.stop_nodes()
self.start_node(0)
wait_until(lambda: self.nodes[0].getmempoolinfo()["loaded"])
assert_equal(len(self.nodes[0].getrawmempool()), 5)
mempooldat0 = os.path.join(self.nodes[0].datadir, 'regtest', 'mempool.dat')
mempooldat1 = os.path.join(self.nodes[1].datadir, 'regtest', 'mempool.dat')
self.log.debug("Remove the mempool.dat file. Verify that savemempool to disk via RPC re-creates it")
os.remove(mempooldat0)
self.nodes[0].savemempool()
assert os.path.isfile(mempooldat0)
self.log.debug("Stop nodes, make node1 use mempool.dat from node0. Verify it has 5 transactions")
os.rename(mempooldat0, mempooldat1)
self.stop_nodes()
self.start_node(1, extra_args=[])
wait_until(lambda: self.nodes[1].getmempoolinfo()["loaded"])
assert_equal(len(self.nodes[1].getrawmempool()), 5)
self.log.debug("Prevent bitcoind from writing mempool.dat to disk. Verify that `savemempool` fails")
# to test the exception we are creating a tmp folder called mempool.dat.new
# which is an implementation detail that could change and break this test
mempooldotnew1 = mempooldat1 + '.new'
os.mkdir(mempooldotnew1)
assert_raises_rpc_error(-1, "Unable to dump mempool to disk", self.nodes[1].savemempool)
os.rmdir(mempooldotnew1)
if __name__ == '__main__':
MempoolPersistTest().main()
|
talapus/Ophidian
|
Academia/Filesystem/demo_os_walk.py
|
Python
|
bsd-3-clause
| 483
| 0.00207
|
#!/usr/bin/env python3
impo
|
rt os
print("root prints out directories only from what you specified")
print("dirs prints out sub-directories from root")
print("file
|
s prints out all files from root and directories")
print("*" * 20)
'''
for root, dirs, files in os.walk("/var/log"):
print('Root: '.format(root))
print('Dirs: '.format(dirs))
print('Files: '.format(files))
'''
for root, dirs, files in os.walk("/var/log"):
print(root)
print(dirs)
print(files)
|
iw3hxn/LibrERP
|
purchase_discount_combined/__openerp__.py
|
Python
|
agpl-3.0
| 1,361
| 0
|
# -*- coding: utf-8 -*-
########################################
|
######################################
#
# Copyright (C) 2020 Didotech S.r.l. (<http://www.d
|
idotech.com/>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Purchase Order Lines With Combined Discounts",
"author": "Didotech.com",
"version": "1.0.0",
"category": "Generic Modules/Sales & Purchases",
'description': """ """,
"depends": [
"stock",
'product',
"purchase",
"purchase_discount",
],
"data": [
"views/purchase_discount_view.xml",
],
"active": False,
"installable": True
}
|
mohamedhagag/dvit-odoo
|
dvit_report_inventory_valuation_multi_uom/wizard/stock_quant_report.py
|
Python
|
agpl-3.0
| 6,534
| 0.004132
|
# -*- coding: utf-8 -*-
###############################################################################
# License, author and contributors information in: #
# __manifest__.py file at the root folder of this module. #
###############################################################################
from odoo import models, fields, api, _
from odoo.exceptions import UserError, ValidationError
from itertools import groupby
from operator import itemgetter
from collections import defaultdict
class WizardValuationStockInventory(models.TransientModel):
_name = 'wizard.valuation.stock.inventory'
_description = 'Wizard that opens the stock Inventory by Location'
location_id = fields.Many2one('stock.location', string='Location', required=True)
product_categ_id = fields.Many2one('product.category', string='Category')
product_sub_categ_id = fields.Many2one('product.category', string='Sub Category')
line_ids = fields.One2many('wizard.valuation.stock.inventory.line', 'wizard_id', required=True, ondelete='cascade')
@api.multi
def print_pdf_stock_inventory(self, data):
line_ids_all_categ = []
line_ids_filterd_categ = []
line_ids = []
# Unlink All one2many Line Ids from same wizard
for wizard_id in self.env['wizard.valuation.stock.inventory.line'].search([('wizard_id', '=', self.id)]):
if wizard_id.wizard_id.id == self.id:
self.write({'line_ids': [(3, wizard_id.id)]})
child_loc_ids = []
if self.location_id:
child_loc_ids = self.env['stock.location'].sudo().search([('location_id', 'child_of', self.location_id.id)]).mapped('id')
# Creating Temp dictionry for Product List
if data["product_sub_categ_id"]:
for resource in self.env['stock.quant'].search(
['|', ('location_id', '=', self.location_id.id), ('location_id', 'in', child_loc_ids)]):
if resource.product_id.categ_id.id == data[
"product_sub_categ_id"] or resource.product_id.categ_id.parent_id.id == data[
"product_sub_categ_id"]:
line_ids_filterd_categ.append({
'location_id': resource.location_id.id,
'product_id': resource.product_id.id,
'product_categ_id': resource.product_id.categ_id.parent_id.id,
'product_sub_categ_id': resource.product_id.categ_id.id,
'product_uom_id': resource.product_id.uom_id.id,
'qty': resource.qty,
'standard_price': resource.product_id.standard_price,
})
else:
for resource in self.env['stock.quant'].search(
['|', ('location_id', '=', self.location_id.id), ('location_id', 'in', child_loc_ids)]):
line_ids_all_categ.append({
'location_id': resource.location_id.id,
'product_id': resource.product_id.id,
'product_categ_id': resource.product_id.categ_id.parent_id.id,
'product_sub_categ_id': resource.product_id.categ_id.id,
'product_uom_id': resource.product_id.uom_id.id,
'qty': resource.qty,
'standard_price': resource.product_id.standard_price,
})
if data["product_sub_categ_id"]:
# Merging stock moves into single product item line
grouper = itemgetter("product_id", "product_categ_id", "product_sub_categ_id", "location_id",
"product_uom_id", "standard_price")
for key, grp in groupby(sorted(line_ids_filterd_categ, key=grouper), grouper):
temp_dict = dict(zip(
["product_id", "product_categ_id", "product_sub_categ_id", "location_id", "product_uom_id",
"standard_price"], key))
temp_dict["qty"] = sum(item["qty"] for item in grp)
temp_dict["amount"] = temp_dict["standard_price"] * temp_dict["qty"]
line_ids.append((0, 0, temp_dict))
else:
# Merging stock moves into single product item line
grouper = itemgetter("product_id", "product_categ_id", "product_sub_categ_id", "location_id",
"product_uom_id", "standard_price")
for key, grp in groupby(sorted(line_ids_all_categ, key=grouper), grouper):
temp_dict = dict(zip(
["product_id", "product_categ_id", "product_sub_categ_id", "location_id", "product_uom_id",
"standard_price"], key))
temp_dict["qty"] = sum(item["qty"] for item in grp)
temp_dict["amount"] = temp_dict["standard_price"] * temp_dict["qty"]
line_ids.append((0, 0, temp_dict))
if len(line_ids) == 0:
raise ValidationError(_('Material is not available on this location.'))
# writing to One2many line_ids
self.write({'line_ids': line_ids})
context = {
'lang': 'en_US',
'active_ids': [self.id],
}
return {
'context': context,
'data': None,
'type': 'ir.actions.report.xml',
'report_name': 'dvit_report_inventory_valuation_multi_uom.report_stock_inventory_lo
|
cation',
'report_type': 'qweb-pdf',
'report_file': 'dvit_report_inventory_valuation_multi_uom.report_stock_inv
|
entory_location',
'name': 'Stock Inventory',
'flags': {'action_buttons': True},
}
class WizardValuationStockInventoryLine(models.TransientModel):
_name = 'wizard.valuation.stock.inventory.line'
wizard_id = fields.Many2one('wizard.valuation.stock.inventory', required=True, ondelete='cascade')
location_id = fields.Many2one('stock.location', 'Location')
product_id = fields.Many2one('product.product', 'Product')
product_categ_id = fields.Many2one('product.category', string='Category')
product_sub_categ_id = fields.Many2one('product.category', string='Sub Category')
product_uom_id = fields.Many2one('product.uom')
qty = fields.Float('Quantity')
standard_price = fields.Float('Rate')
amount = fields.Float('Amount')
@api.model
def convert_qty_in_uom(self, from_uom, to_uom, qty):
return (qty / from_uom.factor) * to_uom.factor
|
tarquasso/softroboticfish6
|
fish/pi/runMbedProgramWithLogging.py
|
Python
|
mit
| 1,134
| 0.0097
|
import sys
import os
import time
import resetMbed
import serialMonitor
# Program the mbed, restart it, launch a serial monitor to record streaming logs
def runMbedProgramWithLogging(argv):
for arg in argv:
if 'startup=1' in arg:
time.sleep(10)
# If a bin file was given as argument, program it onto the mbed
remount = True
for arg in argv:
if '.bin' in arg:
print 'Copying bin file...'
#os.system('sudo rm /media/MBED/*.bin')
#time.sleep(1)
#os.system('sudo cp /home/pi/Downloads/
|
*.bin /media/MBED')
#time.sleep(1)
os.system('sudo /home/pi/fish/mbed/programMbed.sh ' + arg)
if 'remount' in arg:
re
|
mount=int(arg.split('=')[1].strip())==1
# Remount mbed
if remount:
os.system("sudo /home/pi/fish/mbed/remountMbed.sh")
# Start mbed program and serial monitor
print 'Resetting mbed and starting serial monitor'
print ''
resetMbed.reset()
print '============'
serialMonitor.run(argv)
if __name__ == '__main__':
runMbedProgramWithLogging(sys.argv)
|
CEFAP-USP/fastqc-report
|
RunFastQC.py
|
Python
|
gpl-3.0
| 18,788
| 0.002715
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Workflow:
# 1. Check if the folder has been analysed before
# 1.1 Status: checked, converted, reported, compiled, emailed, running, error, completed
# 2. If the sequencer is NextSeq:
# 2.1 Run bcl2fastq to create the FASTQ files
# 2.1.1 Execution:
# nohup /usr/local/bin/bcl2fastq
# --runfolder-dir 160225_NB501279_0002_AHTGNYBGXX/
# --output-dir 160225_NB501279_0002_AHTGNYBGXX_fastq &
# 3. Run FastQC with the files created on output-dir on 2.1
# 3.1 /data/runs/FastQC/FastQC/fastqc --extract -t 8 Undetermined_S0_L00?_R1_001.fastq.gz
# 4. Compile te
|
x with the results on 3.1
# 4.1 pdflatex -output-directory [DIR] tex.tex
# 5. Send email with the PDF created on 4.1
# 5.1 sendmail ...
import argparse
import os
import subprocess
import shutil
import csv
import re
from collections import OrderedDict
from bs4 import BeautifulSoup
import datetime
BCL2FASTQ_PATH = '/usr/local/bin/bcl2fastq'
FASTQC_PATH = '/data/runs/FastQC
|
/FastQC/fastqc'
WORKING_DIR = os.path.dirname(os.path.abspath(__file__))
REPORT_FILE = 'FastQC_report.tex'
REPORTS_PATH = 'FastQC_reports'
STATUS_FILE = 'run_report'
# informações do experimento
SAMPLESHEET = 'SampleSheet.csv'
BCL2FASTQ_REPORT = 'laneBarcode.html'
def getDatetime():
try:
d = datetime.datetime.now()
return "{0}{1}{2}_{3}{4}{5}".format(
d.day,
d.month,
d.year,
d.hour,
d.minute,
d.second)
except Exception as e:
raise e
def getLogfile():
try:
d = getDatetime()
logfile = os.open(os.path.join(
WORKING_DIR, 'logfile-%s.log' % d), os.O_WRONLY | os.O_CREAT, 0o600)
return logfile
except Exception as e:
raise e
def get_status_folder(file_status):
if(not os.path.exists(file_status)):
return False
fs = open(file_status, 'r')
status = fs.readline().strip()
fs.close()
return status
def get_run_details(args):
try:
if(os.path.exists(
os.path.join(WORKING_DIR, args.runPath, SAMPLESHEET))):
csv_file = open(os.path.join(WORKING_DIR, args.runPath, SAMPLESHEET), 'rb')
ssheet = csv.reader(csv_file, delimiter=',')
lines = OrderedDict([])
key = ''
not_null = [row for row in ssheet if len(row) > 0]
for row in not_null:
if(row[0].startswith('[')):
key = row[0].upper()
lines[key] = []
else:
v = lines.get(key)
v.append(row)
lines[key] = v
return lines
except Exception as e:
raise e
def get_bcl2fastq_report(args, fastq_path):
try:
if(os.path.exists(
os.path.join(fastq_path, 'Reports'))):
html = open(os.path.join(
fastq_path,
'Reports', 'html', 'index.html'), 'r').read()
soup = BeautifulSoup(html, 'html.parser')
for fr in soup.find_all('frame'):
src = fr.get('src')
src = src.replace('lane.html', BCL2FASTQ_REPORT)
report = open(os.path.join(
fastq_path,
'Reports', 'html', src), 'r').read()
soup = BeautifulSoup(report, 'html.parser')
result = OrderedDict([])
ncolums = 0
hs = soup.find_all('h2')
result['h2'] = [ele.text.strip() for ele in hs]
tables = soup.find_all(id="ReportTable")
for i, table in enumerate(tables):
result['table-%i' % i] = OrderedDict([])
for j, row in enumerate(table.find_all('tr')):
if('head' not in result['table-%i' % i]):
heads = row.find_all('th')
heads = [ele.text.strip() for ele in heads]
result['table-%i' % i]['head'] = heads
if(len(heads) > ncolums):
ncolums = len(heads)
cols = row.find_all('td')
cols = [ele.text.strip() for ele in cols]
if(len(cols) > 0):
result['table-%i' % i]['%i-col' % j] = cols
if(len(cols) > ncolums):
ncolums = len(cols)
return ncolums, result
except Exception as e:
raise e
def rreplace(s, old, new, occurrence):
li = s.rsplit(old, occurrence)
return new.join(li)
def build_run_details_tex_table(args, data):
if(data):
tex_table = ''
ncoluns = len(data['[DATA]'][0])
# {|l|l|l|l|l|l|l|}
columns_table = '{'
for c in range(ncoluns):
columns_table += '|l'
columns_table += '|}'
for key in data.keys():
# HEADER
values = data.get(key)
tex_table += "\multicolumn{%s}{|c|}{%s} \\\\ \hline\n" % (
ncoluns, key.replace('[', '').replace(']', ''))
if(key == '[HEADER]'):
for value in values:
tex_table += "%s & \multicolumn{%s}{l|}{%s} \\\\ \hline\n" % (
value[0].replace('_', '\_'), ncoluns - 1, value[1].replace('_', '\_'))
# READS
elif(key == '[READS]'):
for value in values:
tex_table += "\multicolumn{%s}{|l|}{%s} \\\\ \hline\n" % (
ncoluns, value[0].replace('_', '\_'))
# SETTINGS
elif(key == '[SETTINGS]'):
for value in values:
tex_table += "%s & \multicolumn{%s}{l|}{%s} \\\\ \hline\n" % (
value[0].replace('_', '\_'), ncoluns - 1, value[1].replace('_', '\_'))
# DATA
elif(key == '[DATA]'):
for value in values:
tex_table += ''.join('%s & ' % v.replace('\\', '_').replace(
'_', '\_') for v in value)
tex_table = rreplace(tex_table, '&', ' ', 1)
tex_table += '\\\\ \hline\n'
return columns_table, tex_table
def build_bcl2fastq_report_tex_table(args, fastq_path):
ncoluns, data = get_bcl2fastq_report(args, fastq_path)
if(data):
tex_table = OrderedDict([])
headers = data.get('h2')
for i, head in enumerate(headers):
if(head == 'Top Unknown Barcodes'):
pass
else:
tb = data.get('table-%i' % i)
tex = ''
cols = len(tb['head'])
tex += "\multicolumn{%s}{|c|}{%s} \\\\ \hline\n" % (cols, head)
for key in tb.keys():
values = tb.get(key)
if(key == 'head'):
for v in values:
if(len(v.rsplit(" ", 1)) > 1):
v = "%s\\\\ %s" % (
v.rsplit(" ", 1)[0], v.rsplit(" ", 1)[1])
line = "\\begin{tabular}[c]{@{}l@{}}%s\\end{tabular} &" % v.replace(
'_', '\_').replace('%', '\%')
tex += line
else:
line = "%s &" % v
tex += line
tex = rreplace(tex, '&', ' ', 1)
tex += '\\\\ \hline\n'
else:
tex += ''.join('%s & ' % v.replace('_', '\_') for v in values)
tex = rreplace(tex, '&', ' ', 1)
tex += '\\\\ \hline\n'
tex_table[head] = tex
return tex_table
def check_analysed_folder(args, file_status):
status = get_status_folder(file_status)
if(status and status in ['emailed', 'running', 'completed']):
return False
if(not os.path.exists(file_status)):
fs = open(file_status, 'w+')
fs.write('checked\n')
fs.close()
return True
|
PRIArobotics/HedgehogProtocol
|
hedgehog/protocol/messages/ack.py
|
Python
|
agpl-3.0
| 1,301
| 0.002306
|
from typing import Any, Sequence, Union
from dataclasses import dataclass
from . import RequestMsg, ReplyMsg, Message, SimpleMessage
from hedgehog.protocol.proto import ack_pb2
from hedgehog.utils import protobuf
__all__ = ['Acknowledgement']
# <GSL customizable: module-header>
from hedgehog.protocol.proto.ack_pb2 import OK, UNKNOWN_COMMAND, INVALID_COMMAND, UNSUPPORTED_COMMAND, FAILED_COMMAND
__all__ += ['OK', 'UNKNOWN_COMMAND', 'INVALID_COMMAND', 'UNSUPPORTED_COMMAND', 'FAILED_COMMAND']
# </GSL customizable: module-header>
@ReplyMsg.message(ack_pb2.Acknowledgement, 'acknowledgement', fields=('code', 'message',))
@dataclass(frozen=True, repr=False)
class Acknowled
|
gement(SimpleMessage):
code: int = OK
message: str = ''
def __post_init__(self):
# <default GSL customizable: Acknowledgement-init-validation>
pass
# </GSL customizable: Acknowledgement-init-validation>
# <d
|
efault GSL customizable: Acknowledgement-extra-members />
@classmethod
def _parse(cls, msg: ack_pb2.Acknowledgement) -> 'Acknowledgement':
code = msg.code
message = msg.message
return cls(code, message=message)
def _serialize(self, msg: ack_pb2.Acknowledgement) -> None:
msg.code = self.code
msg.message = self.message
|
Exterminus/Redes
|
Cliente/Cliente_Interface/cliente_gui.py
|
Python
|
mit
| 18,741
| 0.001122
|
# coding: utf-8
import pygame
import sys
from pygame.locals import *
from gui import *
from conexao import *
from jogador import *
from Queue import Queue
from threading import Thread
"""
Cliente
Tp de Redes - Truco
UFSJ
Carlos Magno
Lucas Geraldo
Requisitos:
*python 2.7
*pygame
Modulo Principal.
"""
class Principal(Gui):
"""
Classe Principal
"""
def __init__(self):
#---HABILITAR BOTAO TRUCO---
# Ative para ativar a opção de pedir truco..
self.truco_habilitado = 1
#--------------------
self.mensagem_servidor = ""
self.carta_selecionada = -1
self.sua_vez = 0
self.conexao = Conexao()
self.conexao.conectar()
self.gui = Gui()
self.jogador = Jogador()
self.recebe_cartas()
self.gui.carrega_cartas()
#--------------------
self.pede_truco = "0"
self.rodada = 1
self.gui.valor_rodada = "0"
self.flag_truco = 0
self.gui.pontos = "0000"
self.gui.partidas = "000"
self.question_truco = "0"
self.proposta_truco_equipe = "0"
self.resposta_proposta_truco = "0"
self.mesa_jogo = "000000"
self.gui.mensagem_vez = "Aguarde..."
self.gui.cont_cartas = 3
#-----------------
self.quee = Queue()
self.verifica = Thread(target=self.verifica_resposta_servidor, args=(
self.quee, self.conexao))
self.verifica.daemon = True
self.verifica.start()
def atualiza_mensagem(self):
"Atualiza o campo de mensagens.."
if(self.sua_vez is 0):
self.gui.mensagem_vez = "Aguarde..."
self.gui.escrever(self.gui.mensagem_vez, (40, 430), (255, 0, 0))
if(self.sua_vez is 1):
self.gui.mensagem_vez = "Sua Vez..."
self.gui.escrever(self.gui.mensagem_vez, (40, 430), (0, 255, 0))
def agrupa_cartas(self, lista):
"""Agrupa as cartas recebidas do servidor"""
final = ""
c1 = ""
for i in lista:
c1 = c1 + i
if(len(c1) == 2):
final = final + c1 + ","
c1 = ""
lista = final.split(',')
lista.pop()
return lista
def recebe_cartas(self):
"""
Carrega as cartas recebidas do servidor.
Extrai os dados iniciais da primeira conexão.
"""
self.mensagem_servidor = self.conexao.ler_socket()
#--Extrai os dados iniciais...
self.jogador.id = self.mensagem_servidor[0:1]
self.jogador.equipe = self.mensagem_servidor[1:2]
self.sua_vez = int(self.mensagem_servidor[2:3])
cartas = self.mensagem_servidor[4:10]
print "ID ", self.jogador.id, "Equipe ", self.jogador.equipe, "Sua Vez ", self.sua_vez
self.jogador.cartas_mao = cartas
cartas = self.agrupa_cartas(cartas)
for i in cartas:
self.gui.cartas_recebidas.append(i)
def verifica_resposta_servidor(self, fila, conexao):
"""Verifica a conexao.."""
while (True):
palavra = conexao.ler_socket()
if(palavra is not None):
self.quee.put(palavra)
def verifica_erro_mensagem(self,lista):
"""Verifica e corrige erro na mensagem recebida"""
tamanho=len(lista)
if(tamanho<30):
lista = lista[
:0] + "00" + lista[1:]
print "Mensagem corrigida ",lista
return lista
def processa_resposta(self, lista):
"""Vai processar a mensagem recebida"""
self.mensagem_servidor = lista
if(lista is not None):
print "resposta vinda do servidor ", lista
#lista = self.verifica_erro_mensagem(lista)
self.sua_vez = int(lista[2:3])
self.atualiza_mensagem()
self.finaliza_rodada(int(lista[3:4]))
self.rodada = int(lista[3:4])
cartas = lista[4:10]
if(cartas is not "000000"):
pass
else:
# Considerando que nos decorrer das partida o servidor não envia as
# cartas. Redefine a mão do jogador.
self.gui.cartas_recebidas = []
self.jogador.cartas_mao = cartas
cartas = self.agrupa_cartas(cartas)
for i in cartas:
self.gui.cartas_recebidas.append(i)
self.gui.pontos = lista[10:14]
self.gui.partidas = lista[14:17]
self.gui.valor_rodada = lista[17:19]
self.question_truco = lista[19:20]
self.proposta_truco_equipe = lista[20:21]
self.mesa_jogo = lista[22:30]
self.renderiza_mesa()
print self.sua_vez
if(self.gui.cont_cartas > 1):
self.gui.cont_cartas = self.gui.cont_cartas - 1
def renderiza_mesa(self):
"""Função que renderiza_mesa"""
# 00 00 00 00
self.gui.caminho_cartas
print self.mensagem_servidor
cartas = self.agrupa_cartas(self.mesa_jogo)
print "Cartas Mesa ", cartas
cont = 0
for i in cartas:
if not (i == "00" or i == "0"):
i = self.gui.caminho_cartas + i + ".png"
if(self.jogador.id == "0"):
if cont is 0:
self.gui.renderiza_cartas_jogadas(
i, self.gui.sua_pos_carta)
if cont is 1:
self.gui.renderiza_cartas_jogadas(
i, self.gui.pos_cartas_jog_1)
self.gui.update_card_adversario(
1, self.gui.cont_cartas)
if cont is 2:
self.gui.renderiza_cartas_jogadas(
i, self.gui.pos_cartas_jog_2)
self.gui.update_card_adversario(
2, self.gui.cont_cartas)
if cont is 3:
self.gui.renderiza_cartas_jogadas(
i, self.gui.pos_cartas_jog_3)
self.gui.update_card_adversario(
3, self.gui.cont_cartas)
elif(self.jogador.id == "1"):
if cont is 0:
self.gui.renderiza_cartas_jogadas(
i, self.gui.pos_cartas_jog_3)
self.gui.update_card_adversario(
3, self.gui.cont_cartas)
elif cont is 1:
self.gui.renderiza_cartas_jogadas(
i, self.gui.sua_pos_carta)
elif cont is 2:
self.gui.renderiza_cartas_jogadas(
i, self.gui.pos_cartas_jog_1)
self.gui.update_card_adversario(
1, self.gui.cont_cartas)
elif cont is 3:
self.gui.renderiza_cartas_jogadas(
i, self.gui.pos_cartas_jog_2)
self.gui.update_card_adversario(
3, self.gui.cont_cartas)
elif(self.jogador.id == "2"):
if cont is 0:
self.gui.renderiza_cartas_jogadas(
i, self.gui.pos_cartas_jog_2)
self.gui.update_card_adversario(
2, self.gui.cont_cartas)
elif cont is 1:
self.gui.renderiza_cartas
|
_jogadas(
i, self.gui.pos_cartas_jog_3)
self.gui.update_card_adversario(
3, self.gui.cont_cartas)
elif cont is 2:
self.gui.renderiza_cartas_jogadas(
i, self.gui.sua_pos_carta)
elif cont is 3:
self.gui.renderiza_cartas_jogadas(
i, self.gui.pos_
|
cartas_jog_1)
self.gui.update_card_adversario(
|
cliffano/swaggy-jenkins
|
clients/python-experimental/generated/test/test_blue_ocean_api.py
|
Python
|
mit
| 4,757
| 0
|
# coding: utf-8
"""
Swaggy Jenkins
Jenkins API clients generated from Swagger / Open API specification # noqa: E501
The version of the OpenAPI document: 1.1.2-pre.0
Contact: blah@cliffano.com
Generated by: https://openapi-generator.tech
"""
import unittest
import openapi_client
from openapi_client.api.blue_ocean_api import BlueOceanApi # noqa: E501
class TestBlueOceanApi(unittest.TestCase):
"""BlueOceanApi unit test stubs"""
def setUp(self):
self.api = BlueOceanApi() # noqa: E501
def tearDown(self):
pass
def test_delete_pipeline_queue_item(self):
"""Test case for delete_pipeline_queue_item
"""
pass
def test_get_authenticated_user(self):
"""Test case for get_authenticated_user
"""
pass
def test_get_classes(self):
"""Test case for get_classes
"""
pass
def test_get_json_web_key(self):
"""Test case for get_json_web_key
"""
pass
def test_get_json_web_token(self):
"""Test case for get_json_web_token
"""
pass
def test_get_organisation(self):
"""Test case for get_organisation
"""
pass
def test_get_organisations(self):
"""Test case for get_organisations
"""
pass
def test_get_pipeline(self):
"""Test case for get_pipeline
"""
pass
def test_get_pipeline_activities(self):
"""Test case for get_pipeline_activities
"""
pass
def test_get_pipeline_branch(self):
"""Test case for get_pipeline_branch
"""
pass
def test_get_pipeline_branch_run(self):
"""Test case for get_pipeline_branch_run
"""
pass
def test_get_pipeline_branches(self):
"""Test case for get_pipeline_branches
"""
pass
def test_get_pipeline_folder(self):
"""Test case for get_pipeline_folder
"""
pass
def test_get_pipeline_folder_pipeline(self):
"""Test case for get_pipeline_folder_pipeline
"""
pass
def test_get_pipeline_queue(self):
"""Test case for get_pipeline_queue
"""
pass
def test_get_pipeline_run(self):
"""Test case for get_pipeline_run
"""
pass
def test_get_pipeline_run_log(self):
"""Test case for get_pipeline_run_log
"""
pass
def test_get_pipeline_run_node(self):
"""Test case for get_pipeline_run_node
"""
pass
def test_get_pipeline_run_node_step(self):
"""Test case for get_pipeline_run_node_step
"""
pass
def test_get_pipeline_run_node_step_log(self):
"""Test case for get_pipeline_run_node_step_log
"""
pass
def test_get_pipeline_run_node_steps(self):
"""Test case for get_pipeline_run_node_steps
"""
pass
def test_get_pipeline_run_nodes(self):
"""Test case for get_pipeline_run_nodes
"""
pa
|
ss
def test_get_pipeline_runs(self):
"""Test case for get_pipeline_runs
"""
pass
def test_get_pipelines(self
|
):
"""Test case for get_pipelines
"""
pass
def test_get_scm(self):
"""Test case for get_scm
"""
pass
def test_get_scm_organisation_repositories(self):
"""Test case for get_scm_organisation_repositories
"""
pass
def test_get_scm_organisation_repository(self):
"""Test case for get_scm_organisation_repository
"""
pass
def test_get_scm_organisations(self):
"""Test case for get_scm_organisations
"""
pass
def test_get_user(self):
"""Test case for get_user
"""
pass
def test_get_user_favorites(self):
"""Test case for get_user_favorites
"""
pass
def test_get_users(self):
"""Test case for get_users
"""
pass
def test_post_pipeline_run(self):
"""Test case for post_pipeline_run
"""
pass
def test_post_pipeline_runs(self):
"""Test case for post_pipeline_runs
"""
pass
def test_put_pipeline_favorite(self):
"""Test case for put_pipeline_favorite
"""
pass
def test_put_pipeline_run(self):
"""Test case for put_pipeline_run
"""
pass
def test_search(self):
"""Test case for search
"""
pass
def test_search_classes(self):
"""Test case for search_classes
"""
pass
if __name__ == '__main__':
unittest.main()
|
google/grumpy
|
third_party/stdlib/urlparse.py
|
Python
|
apache-2.0
| 19,619
| 0.001988
|
"""Parse (absolute and relative) URLs.
urlparse module is based upon the following RFC specifications.
RFC 3986 (STD66): "Uniform Resource Identifiers" by T. Berners-Lee, R. Fielding
and L. Masinter, January 2005.
RFC 2732 : "Format for Literal IPv6 Addresses in URL's by R.Hinden, B.Carpenter
and L.Masinter, December 1999.
RFC 2396: "Uniform Resource Identifiers (URI)": Generic Syntax by T.
Berners-Lee, R. Fielding, and L. Masinter, August 1998.
RFC 2368: "The mailto URL scheme", by P.Hoffman , L Masinter, J. Zwinski, July 1998.
RFC 1808: "Relative Uniform Resource Locators", by R. Fielding, UC Irvine, June
1995.
RFC 1738: "Uniform Resource Locators (URL)" by T. Berners-Lee, L. Masinter, M.
McCahill, December 1994
RFC 3986 is considered the current standard and any future changes to
urlparse module should conform with it. The urlparse module is
currently not entirely compliant with this RFC due to defacto
scenarios for parsing, and for backward compatibility purposes, some
parsing quirks from older RFCs are retained. The testcases in
test_urlparse.py provides a good indicator of parsing behavior.
"""
import re
import operator
_itemgetter = operator.itemgetter
_property = property
_tuple = tuple
__all__ = ["urlparse", "urlunparse", "urljoin", "urldefrag",
"urlsplit", "urlunsplit", "parse_qs", "parse_qsl"]
# A classification of schemes ('' means apply by default)
uses_relative = ['ftp', 'http', 'gopher', 'nntp', 'imap',
'wais', 'file', 'https', 'shttp', 'mms',
'prospero', 'rtsp', 'rtspu', '', 'sftp',
'svn', 'svn+ssh']
uses_netloc = ['ftp', 'http', 'gopher', 'nntp', 'telnet',
'imap', 'wais', 'file', 'mms', 'https', 'shttp',
'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '',
'svn', 'svn+ssh', 'sftp','nfs','git', 'git+ssh']
uses_params = ['ftp', 'hdl', 'prospero', 'http', 'imap',
'https', 'shttp', 'rtsp', 'rtspu', 'sip', 'sips',
'mms', '', 'sftp', 'tel']
# These are not actually used anymore, but should stay for backwards
# compatibility. (They are undocumented, but have a public-looking name.)
non_hierarchical = ['gopher', 'hdl', 'mailto', 'news',
'telnet
|
', 'wais', 'imap', 'snews', 'sip', 'sips']
uses_query = ['http', 'wais', 'imap', 'https', 'shttp', 'mms',
'gopher', 'rtsp', 'rtspu', 'sip', 'sips', '']
uses_fragment = ['ftp', 'hdl', 'http', 'gopher', 'news',
'nntp', 'wais', 'https', 'shttp', 'snews',
'file', 'prosper
|
o', '']
# Characters valid in scheme names
scheme_chars = ('abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'0123456789'
'+-.')
MAX_CACHE_SIZE = 20
_parse_cache = {}
def clear_cache():
"""Clear the parse cache."""
_parse_cache.clear()
class ResultMixin(object):
"""Shared methods for the parsed result objects."""
# @property
def username(self):
netloc = self.netloc
if "@" in netloc:
userinfo = netloc.rsplit("@", 1)[0]
if ":" in userinfo:
userinfo = userinfo.split(":", 1)[0]
return userinfo
return None
username = property(username)
# @property
def password(self):
netloc = self.netloc
if "@" in netloc:
userinfo = netloc.rsplit("@", 1)[0]
if ":" in userinfo:
return userinfo.split(":", 1)[1]
return None
password = property(password)
# @property
def hostname(self):
netloc = self.netloc.split('@')[-1]
if '[' in netloc and ']' in netloc:
return netloc.split(']')[0][1:].lower()
elif ':' in netloc:
return netloc.split(':')[0].lower()
elif netloc == '':
return None
else:
return netloc.lower()
hostname = property(hostname)
# @property
def port(self):
netloc = self.netloc.split('@')[-1].split(']')[-1]
if ':' in netloc:
port = netloc.split(':')[1]
if port:
port = int(port, 10)
# verify legal port
if (0 <= port <= 65535):
return port
return None
port = property(port)
# from collections import namedtuple
class _SplitResult(tuple):
'SplitResult(scheme, netloc, path, query, fragment)'
__slots__ = ()
_fields = ('scheme', 'netloc', 'path', 'query', 'fragment')
def __new__(_cls, scheme, netloc, path, query, fragment):
'Create new instance of SplitResult(scheme, netloc, path, query, fragment)'
return _tuple.__new__(_cls, (scheme, netloc, path, query, fragment))
# @classmethod
def _make(cls, iterable, new=tuple.__new__, len=len):
'Make a new SplitResult object from a sequence or iterable'
result = new(cls, iterable)
if len(result) != 5:
raise TypeError('Expected 5 arguments, got %d' % len(result))
return result
_make = classmethod(_make)
def __repr__(self):
'Return a nicely formatted representation string'
return 'SplitResult(scheme=%r, netloc=%r, path=%r, query=%r, fragment=%r)' % self
def _asdict(self):
'Return a new OrderedDict which maps field names to their values'
return OrderedDict(zip(self._fields, self))
def _replace(_self, **kwds):
'Return a new SplitResult object replacing specified fields with new values'
result = _self._make(map(kwds.pop, ('scheme', 'netloc', 'path', 'query', 'fragment'), _self))
if kwds:
raise ValueError('Got unexpected field names: %r' % kwds.keys())
return result
def __getnewargs__(self):
'Return self as a plain tuple. Used by copy and pickle.'
return tuple(self)
__dict__ = _property(_asdict)
def __getstate__(self):
'Exclude the OrderedDict from pickling'
pass
scheme = _property(_itemgetter(0), doc='Alias for field number 0')
netloc = _property(_itemgetter(1), doc='Alias for field number 1')
path = _property(_itemgetter(2), doc='Alias for field number 2')
query = _property(_itemgetter(3), doc='Alias for field number 3')
fragment = _property(_itemgetter(4), doc='Alias for field number 4')
# class SplitResult(namedtuple('SplitResult', 'scheme netloc path query fragment'), ResultMixin):
class SplitResult(_SplitResult, ResultMixin):
__slots__ = ()
def geturl(self):
return urlunsplit(self)
class _ParseResult(tuple):
'ParseResult(scheme, netloc, path, params, query, fragment)'
__slots__ = ()
_fields = ('scheme', 'netloc', 'path', 'params', 'query', 'fragment')
def __new__(_cls, scheme, netloc, path, params, query, fragment):
'Create new instance of ParseResult(scheme, netloc, path, params, query, fragment)'
return _tuple.__new__(_cls, (scheme, netloc, path, params, query, fragment))
# @classmethod
def _make(cls, iterable, new=tuple.__new__, len=len):
'Make a new ParseResult object from a sequence or iterable'
result = new(cls, iterable)
if len(result) != 6:
raise TypeError('Expected 6 arguments, got %d' % len(result))
return result
_make = classmethod(_make)
def __repr__(self):
'Return a nicely formatted representation string'
return 'ParseResult(scheme=%r, netloc=%r, path=%r, params=%r, query=%r, fragment=%r)' % self
def _asdict(self):
'Return a new OrderedDict which maps field names to their values'
return OrderedDict(zip(self._fields, self))
def _replace(_self, **kwds):
'Return a new ParseResult object replacing specified fields with new values'
result = _self._make(map(kwds.pop, ('scheme', 'netloc', 'path', 'params', 'query', 'fragment'), _self))
if kwds:
raise ValueError('Got unexpected field names: %r' % kwds.keys())
return result
def __getnewargs__(self):
'Return self as a plain tuple. Used by copy and pickle.'
return tuple(self)
__dict__ = _prope
|
mtils/ems
|
ems/qt4/gui/mapper/strategies/dict_strategy.py
|
Python
|
mit
| 857
| 0.016336
|
'''
Created on 21.03.2012
@author: michi
'''
from PyQt4.QtGui import QTableView
from ems.qt4.gui.mapper.base import BaseStrategy #@UnresolvedImport
|
from ems.xtype.base import DictType, ObjectInstanceType #@UnresolvedImport
from ems.qt4.gui.itemdelegate.xtypes.objectinstancetype import ObjectInstanceDelegate #@UnresolvedImport
class DictStrategy(BaseStrategy):
def match(self, param):
if isinstance(param, DictType):
return
|
True
return False
def getDelegateForItem(self, mapper, type_, parent=None):
return ObjectInstanceDelegate(type_, parent)
def addMapping(self, mapper, widget, columnName, type_):
if isinstance(widget, QTableView):
columnIndex = mapper.model.columnOfName(columnName)
mapper.dataWidgetMapper.addMapping(widget, columnIndex)
|
Aveias/gestt
|
main.py
|
Python
|
gpl-3.0
| 1,104
| 0.009991
|
# -*-coding:UTF-8 -*
import os
import Auth.authentication as auth
import Auth.login as log
import Menu.barreOutils as barre
import Users.Model as U
import Projects.Model as P
#On appelle le module d'identification - Commen
|
té pour les pahses de test d'autres modules
login = log.Login()
login.fenetre.mainloop()
#auth.Auth.access = True
#auth.Auth.current_user_id = 1
#On lance le programme
while auth.Auth.access == True:
print("programme en cours")
user = U.User(auth.Auth.current_user_id)
print("Bonjour", user.nom, user.prenom, "vous êtes dans la boucle")
# Instanciation d'un objet de la classe BarreOutils
barreOutils = barre.BarreOutils()
barreOutils.fenetre.mainloop()
# Test de
|
l'attribut fermer de la classe BarreOutils() -> true si appuie sur le bouton deconnexion
print("fermer = ",barreOutils.fermer)
if barreOutils.fermer == True:
auth.Auth.access = False
else:
os.system("pause")
# Test de l'attribut access qui détermine si on entre ou pas dans la boucle while
print("access = ", auth.Auth.access)
# Fin while
|
rakshify/News_Recommender
|
policy/policy.py
|
Python
|
mit
| 472
| 0.004237
|
"""
policy.py
Janbaanz Launde
Apr 1, 2017
"""
class Policy(object):
"""Abstract class for all policies"""
name = 'POLICY'
def __init__(self, contexts):
self.contexts = contexts
def predict_arm(self, contexts=None):
raise NotImplementedError("Yo
|
u need to override this function in child class.")
def pull_arm(self, arm, reward, contexts=None):
raise NotImplementedError("You need to override t
|
his function in child class.")
|
JesseLivezey/Diffusion-Probabilistic-Models
|
extensions.py
|
Python
|
mit
| 7,673
| 0.006907
|
"""
Extensions called during training to generate samples and diagnostic plots and printouts.
"""
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
import numpy as np
import os
import theano.tensor as T
import theano
from blocks.extensions import SimpleExtension
import viz
import sampler
class PlotSamples(SimpleExtension):
def __init__(self, model, algorithm, X, path, n_samples=49, **kwargs):
"""
Generate samples from the model. The do() function is called as an extension during training.
Generates 3 types of samples:
- Sample from generative model
- Sample from image denoising posterior distribution (default signal to noise of 1)
- Sample from image inpainting posterior distribution (inpaint left half of image)
"""
super(PlotSamples, self).__init__(**kwargs)
self.model = model
self.path = path
self.X = X[:n_samples].reshape(
(n_samples, model.n_colors, model.spatial_width, model.spatial_width))
self.n_samples = n_samples
X_noisy = T.tensor4('X noisy samp')
t = T.matrix('t samp')
self.get_mu_sigma = theano.function([X_noisy, t], model.get_mu_sigma(X_noisy, t),
allow_input_downcast=True)
def do(self, callback_name, *args):
print "generating samples"
base_fname_part1 = self.path + '/samples-'
base_fname_part2 = '_epoch%04d'%self.main_loop.status['epochs_done']
sampler.generate_samples(self.model, self.get_mu_sigma,
n_samples=self.n_samples, inpaint=False, denoise_sigma=None, X_true=None,
base_fname_part1=base_fname_part1, base_fname_part2=base_fname_part2)
sampler.generate_samples(self.model, self.get_mu_sigma,
n_samples=self.n_samples, inpaint=True, denoise_sigma=None, X_true=self.X,
base_fname_part1=base_fname_part1, base_fname_part2=base_fname_part2)
sampler.generate_samples(self.model, self.get_mu_sigma,
n_samples=self.n_samples, inpaint=False, denoise_sigma=1, X_true=self.X,
base_fname_part1=base_fname_part1, base_fname_part2=base_fname_part2)
class PlotParameters(SimpleExtension):
def __init__(self, model, blocks_model, path, **kwargs):
super(PlotParameters, self).__init__(**kwargs)
self.path = path
self.model = model
self.blocks_model = blocks_model
def do(self, callback_name, *args):
print "plotting parameters"
for param_name, param in self.blocks_model.params.iteritems():
filename_safe_name = '-'.join(param_name.split('/')[2:]).replace(' ', '_')
base_fname_part1 = self.path + '/params-' + filename_safe_name
base_fname_part2 = '_epoch%04d'%self.main_loop.status['epochs_done']
viz.plot_parameter(param.get_value(), base_fname_part1, base_fname_part2,
title=param_name, n_colors=self.model.n_colors)
class PlotGradients(SimpleExtension):
def __init__(self, model, blocks_model, algorithm, X, path, **kwargs):
super(PlotGradients, self).__init__(**kwargs)
self.path = path
self.X = X
self.model = model
self.blocks_model = blocks_model
gradients = []
for param_name in sorted(self.blocks_model.params.keys()):
gradients.append(algorithm.gradients[self.blocks_model.params[param_name]])
self.
|
grad_f = theano.function(algorithm.inputs, gradients, allow_input_downcast=True)
def do(self, callback_name, *args):
print "plotting gradients"
grad_vals = self.grad_f(self.X)
keynames = sorted(self.blocks_model.params.keys())
for ii in xrange(len(key
|
names)):
param_name = keynames[ii]
val = grad_vals[ii]
filename_safe_name = '-'.join(param_name.split('/')[2:]).replace(' ', '_')
base_fname_part1 = self.path + '/grads-' + filename_safe_name
base_fname_part2 = '_epoch%04d'%self.main_loop.status['epochs_done']
viz.plot_parameter(val, base_fname_part1, base_fname_part2,
title="grad " + param_name, n_colors=self.model.n_colors)
class PlotInternalState(SimpleExtension):
def __init__(self, model, blocks_model, state, features, X, path, **kwargs):
super(PlotInternalState, self).__init__(**kwargs)
self.path = path
self.X = X
self.model = model
self.blocks_model = blocks_model
self.internal_state_f = theano.function([features], state, allow_input_downcast=True)
self.internal_state_names = []
for var in state:
self.internal_state_names.append(var.name)
def do(self, callback_name, *args):
print "plotting internal state of network"
state = self.internal_state_f(self.X)
for ii in xrange(len(state)):
param_name = self.internal_state_names[ii]
val = state[ii]
filename_safe_name = param_name.replace(' ', '_').replace('/', '-')
base_fname_part1 = self.path + '/state-' + filename_safe_name
base_fname_part2 = '_epoch%04d'%self.main_loop.status['epochs_done']
viz.plot_parameter(val, base_fname_part1, base_fname_part2,
title="state " + param_name, n_colors=self.model.n_colors)
class PlotMonitors(SimpleExtension):
def __init__(self, path, burn_in_iters=0, **kwargs):
super(PlotMonitors, self).__init__(**kwargs)
self.path = path
self.burn_in_iters = burn_in_iters
def do(self, callback_name, *args):
print "plotting monitors"
try:
df = self.main_loop.log.to_dataframe()
except AttributeError:
# This starting breaking after a Blocks update.
print "Failed to generate monitoring plots due to Blocks interface change."
return
iter_number = df.tail(1).index
# Throw out the first burn_in values
# as the objective is often much larger
# in that period.
if iter_number > self.burn_in_iters:
df = df.loc[self.burn_in_iters:]
cols = [col for col in df.columns if col.startswith(('cost', 'train', 'test'))]
df = df[cols].interpolate(method='linear')
# If we don't have any non-nan dataframes, don't plot
if len(df) == 0:
return
try:
axs = df.interpolate(method='linear').plot(
subplots=True, legend=False, figsize=(5, len(cols)*2))
except TypeError:
# This starting breaking after a different Blocks update.
print "Failed to generate monitoring plots due to Blocks interface change."
return
for ax, cname in zip(axs, cols):
ax.set_title(cname)
fn = os.path.join(self.path,
'monitors_subplots_epoch%04d.png' % self.main_loop.status['epochs_done'])
plt.savefig(fn, bbox_inches='tight')
plt.clf()
df.plot(subplots=False, figsize=(15,10))
plt.gcf().tight_layout()
fn = os.path.join(self.path,
'monitors_epoch%04d.png' % self.main_loop.status['epochs_done'])
plt.savefig(fn, bbox_inches='tight')
plt.close('all')
def decay_learning_rate(iteration, old_value):
# TODO the numbers in this function should not be hard coded
# this is called every epoch
# reduce the learning rate by 10 every 1000 epochs
decay_rate = np.exp(np.log(0.1)/1000.)
new_value = decay_rate*old_value
if new_value < 1e-5:
new_value = 1e-5
print "learning rate %g"%new_value
return np.float32(new_value)
|
fedhere/SESNCfAlib
|
vaccaleibundgut.py
|
Python
|
mit
| 4,704
| 0.00744
|
import sys
import os
import glob
import inspect
import pylab as pl
from numpy import *
from scipy import optimize
import pickle
import time
import copy
cmd_folder = os.path.realpath(os.path.abspath(os.path.split(inspect.getfile(inspect.currentframe()))[0]) + "/templates")
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
from templutils import *
import pylabsetup
pl.ion()
#fits the vacca leibundgut model to data:
# a linear decay, with a gaussian peak on top, an exponential rise, and possibly a second gaussian (typically the Ia second bump around phase=25 days
def minfunc(p, y, x, e, secondg, plot=False):
'''
p is the parameter list
if secondg=1: secondgaussian added
if secondg=0: secondgaussian not
parameters are:
p[0]=first gaussian normalization (negative if fitting mag)
p[1]=first gaussian mean
p[2]=first gaussian sigma
p[3]=linear decay offset
p[4]=linear decay slope
p[5]=exponxential rise slope
p[6]=exponential zero point
p[7]=second gaussian normalization (negative if fitting mag)
p[8]=second gaussian mean
p[9]=second gaussian sigma
'''
if plot:
pl.figure(3)
pl.errorbar(x, y, yerr=e, color='k')
import time
# time.sleep(1)
# print sum(((y-mycavvaccaleib(x,p,secondg=True))**2))
if secondg > 0:
return sum(((y - mycavvaccaleib(x, p, secondg=True)) ** 2) / e ** 2)
else:
return sum(((y - mycavvaccaleib(x, p, secondg=False)) ** 2) / e ** 2)
import scipy.optimize
if __name__ == '__main__':
lcv = np.loadtxt(sys.argv[1], unpack=True)
secondg = False
try:
if int(sys.argv[2]) > 0:
secondg = True
except:
pass
x = lcv[1]
y = lcv[2]
e = lcv[3]
mjd = lcv[0]
ax = pl.figure(0, figsize=(10,5)).add_subplot(111)
#pl.errorbar(x, y, yerr=e, color="#47b56c", label="data")
p0 = [0] * 10
p0[0] = -4
peakdate = x[np.where(y == min(y))[0]]
if len(peakdate) > 1:
peakdate = peakdate[0]
p0[1] = peakdate + 5
p0[2] = 10 # sigma
#pl.draw()
lintail = np.where(x > peakdate + 50)[0]
if len(lintail) < 1:
print "no tail data"
linfit = np.polyfit(x[-2:], y[-2:], 1)
p0[3] = linfit[1]
p0[4] = linfit[0]
else:
linfit = np.polyfit(x[lintail], y[lintail], 1)
p0[3] = linfit[1]
p0[4] = linfit[0]
p0[5] = 0.1
p0[6] = peakdate - 20
p0[7] = -1
p0[8] = peakdate + 25
p0[9] = 10
pl.figure(3)
pl.clf()
# pf= scipy.optimize.minimize(minfunc,p0,args=(y,x,1), method='Powell')#,options={'maxiter':5})
if secondg:
p0[0] += 1.5
p0[1] *= 2
pl.plot(x[10:], mycavvaccaleib(x[10:], p0, secondg=True), 'm')
pf = scipy.optimize.minimize(minfunc, p0, args=(y[10:], x[10:], e[10:], 1), method='Powell') # ,options={'m
|
axiter':5})
else:
pl.plot(x[10:], mycavvaccaleib(x[10:], p0, secondg=False), 'k')
pf = scipy.optimize.minimize(minfunc, p0, args=(y[10:], x[10:], e[10:], 0), method='Powell') # ,options={'maxiter':5})
#pl.figure(4)
pl.figure(0)
ax.errorbar(mjd+0.5-53000, y, yerr=e, fmt=None, ms=7,
alpha = 0.5, color='k', markersize=10,)
ax.plot(mjd+0.5-53000, y, '.', ms=7,
alpha = 0.5, color='#47b56c', markersize=10,
|
label = "SN 19"+sys.argv[1].split('/')[-1].\
replace('.dat', '').replace('.', ' '))
# mycavvaccaleib(x,pf.x, secondg=True)
mycavvaccaleib(x, pf.x, secondg=secondg)
ax.plot(mjd[10:]+0.5-53000, mycavvaccaleib(x[10:], pf.x, secondg=secondg), 'k',
linewidth=2, label="vacca leibundgut fit") # , alpha=0.5)
# pl.plot(x,mycavvaccaleib(x,pf.x, secondg=True), 'k',linewidth=2, label="fit")
xlen = mjd.max() - mjd.min()
ax.set_xlim(mjd.min()-xlen*0.02+0.5-53000, mjd.max()+xlen*0.02+0.5-53000)
ax.set_ylim(max(y + 0.1), min(y - 0.1))
ax2 = ax.twiny()
Vmax = 2449095.23-2453000
ax2.tick_params('both', length=10, width=1, which='major')
ax2.tick_params('both', length=5, width=1, which='minor')
ax2.set_xlabel("phase (days)")
ax2.set_xlim((ax.get_xlim()[0] - Vmax, ax.get_xlim()[1] - Vmax))
# pl.ylim(10,21)
pl.draw()
pl.legend()
ax.set_xlabel("JD - 24530000")
ax.set_ylabel("magnitude")
#pl.title(sys.argv[1].split('/')[-1].replace('.dat', '').replace('.', ' '))
#pl.show()
pl.tight_layout()
pl.savefig("../fits/" + sys.argv[1].split('/')[-1].replace('.dat', '.vdfit.pdf'))
cmd = "pdfcrop " + "../fits/" + sys.argv[1].split('/')[-1].replace('.dat', '.vdfit.pdf')
print cmd
os.system(cmd)
|
iulian787/spack
|
var/spack/repos/builtin/packages/openipmi/package.py
|
Python
|
lgpl-2.1
| 1,087
| 0.00368
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Openipmi(AutotoolsPackage):
"""The Open IPMI project aims to develop an open code base
to allow access to platform information using Intelligent
Platform Management Interface (IPMI)."""
homepage = "https://sourceforge.net/projects/openipmi/"
url = "https://sourceforge.net/projects/openipmi
|
/files/OpenIPMI%202.0%20Library/OpenIPMI-2.0.29.tar.gz"
version('2.0.28', sha256='8e8b1de2a9a041b419133ecb21f956e999841cf2e759e973eeba9a36f8b40996')
version('2.0.27', sha256='f3b1fafaaec2e2bac32fec5a86941ad8b8cb64543470bd6d819d7b166713d20b')
depends_on('popt')
depends_on('python')
depends_on('termcap')
depends_on('ncurses')
def configure_args(self):
args = ['LIBS=' + self.spec['ncurses'].libs.link_flags]
return
|
args
def install(self, spec, prefix):
make('install', parallel=False)
|
tensorflow/agents
|
tf_agents/bandits/policies/boltzmann_reward_prediction_policy.py
|
Python
|
apache-2.0
| 13,821
| 0.004197
|
# coding=utf-8
# Copyright 2020 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Policy for reward prediction and boltzmann exploration."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from typing import Optional, Text, Tuple, Sequence
import gin
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
import tensorflow_probability as tfp
from tf_agents.bandits.networks import heteroscedastic_q_network
from tf_agents.bandits.policies import constraints as constr
from tf_agents.bandits.specs import utils as bandit_spec_utils
from tf_agents.distributions import shifted_categorical
from tf_agents.policies import tf_policy
from tf_agents.policies import utils as policy_utilities
from tf_agents.specs import tensor_spec
from tf_agents.trajectories import policy_step
from tf_agents.typing import types
@gin.configurable
class BoltzmannRewardPredictionPolicy(tf_policy.TFPolicy):
"""Class to build Reward Prediction Policies with Boltzmann exploration."""
def __init__(self,
time_step_spec: types.TimeStep,
action_spec: types.NestedTensorSpec,
reward_network: types.Network,
temperature: types.FloatOrReturningFloat = 1.0,
boltzmann_gumbel_exploration_constant: Optional[
types.Float] = None,
observation_and_action_constraint_splitter: Optional[
types.Splitter] = None,
accepts_per_arm_features: bool = False,
constraints: Tuple[constr.NeuralConstraint, ...] = (),
emit_policy_info: Tuple[Text, ...] = (),
num_samples_list: Sequence[tf.Variable] = (),
name: Optional[Text] = None):
"""Builds a BoltzmannRewardPredictionPolicy given a reward network.
This policy takes a tf_agents.Network predicting rewards and chooses an
action with weighted probabilities (i.e., using a softmax over the network
estimates of value for each action).
Args:
time_step_spec: A `TimeStep` spec of the expected time_steps.
action_spec: A nest of BoundedTensorSpec representing the actions.
reward_network: An instance of a `tf_agents.network.Network`,
callable via `network(observation, step_type) -> (output, final_state)`.
temperature: float or callable that returns a float. The temperature used
in the Boltzmann exploration.
boltzmann_gumbel_exploration_constant: optional positive float. When
provided, the policy implements Neural Bandit with Boltzmann-Gumbel
exploration from the paper:
N. Cesa-Bianchi et al., "Boltzmann Exploration Done Right", NIPS 2017.
observation_and_action_constraint_splitter: A function used for masking
valid/invalid actions with each state of the environment. The function
takes in a full observation and returns a tuple consisting of 1) the
part of the observation intended as input to the network and 2) the
mask. The mask should be a 0-1 `Tensor` of shape
`[batch_size, num_actions]`. This function should also work with a
`TensorSpec` as input, and should output `TensorSpec` objects for the
observation and mask.
accepts_per_arm_features: (bool) Whether the policy accep
|
ts per-arm
features.
constraints: iterable of constraints objects that are instances of
`tf_agents.bandits.agents.NeuralConstraint`.
emit_policy_info: (tuple of strings) what side information we want to get
as part of the policy info. Allowed values can be found in
`policy_utilities.PolicyInfo`.
num_samples_list: list or tuple of tf.Variable's. Used only in
Boltzmann-Gumbel explo
|
ration. Otherwise, empty.
name: The name of this policy. All variables in this module will fall
under that name. Defaults to the class name.
Raises:
NotImplementedError: If `action_spec` contains more than one
`BoundedTensorSpec` or the `BoundedTensorSpec` is not valid.
"""
policy_utilities.check_no_mask_with_arm_features(
accepts_per_arm_features, observation_and_action_constraint_splitter)
flat_action_spec = tf.nest.flatten(action_spec)
if len(flat_action_spec) > 1:
raise NotImplementedError(
'action_spec can only contain a single BoundedTensorSpec.')
self._temperature = temperature
action_spec = flat_action_spec[0]
if (not tensor_spec.is_bounded(action_spec) or
not tensor_spec.is_discrete(action_spec) or
action_spec.shape.rank > 1 or
action_spec.shape.num_elements() != 1):
raise NotImplementedError(
'action_spec must be a BoundedTensorSpec of type int32 and shape (). '
'Found {}.'.format(action_spec))
self._expected_num_actions = action_spec.maximum - action_spec.minimum + 1
self._action_offset = action_spec.minimum
reward_network.create_variables()
self._reward_network = reward_network
self._constraints = constraints
self._boltzmann_gumbel_exploration_constant = (
boltzmann_gumbel_exploration_constant)
self._num_samples_list = num_samples_list
if self._boltzmann_gumbel_exploration_constant is not None:
if self._boltzmann_gumbel_exploration_constant <= 0.0:
raise ValueError(
'The Boltzmann-Gumbel exploration constant is expected to be ',
'positive. Found: ', self._boltzmann_gumbel_exploration_constant)
if self._action_offset > 0:
raise NotImplementedError('Action offset is not supported when ',
'Boltzmann-Gumbel exploration is enabled.')
if accepts_per_arm_features:
raise NotImplementedError(
'Boltzmann-Gumbel exploration is not supported ',
'for arm features case.')
if len(self._num_samples_list) != self._expected_num_actions:
raise ValueError(
'Size of num_samples_list: ', len(self._num_samples_list),
' does not match the expected number of actions:',
self._expected_num_actions)
self._emit_policy_info = emit_policy_info
predicted_rewards_mean = ()
if policy_utilities.InfoFields.PREDICTED_REWARDS_MEAN in emit_policy_info:
predicted_rewards_mean = tensor_spec.TensorSpec(
[self._expected_num_actions])
bandit_policy_type = ()
if policy_utilities.InfoFields.BANDIT_POLICY_TYPE in emit_policy_info:
bandit_policy_type = (
policy_utilities.create_bandit_policy_type_tensor_spec(shape=[1]))
if accepts_per_arm_features:
# The features for the chosen arm is saved to policy_info.
chosen_arm_features_info = (
policy_utilities.create_chosen_arm_features_info_spec(
time_step_spec.observation))
info_spec = policy_utilities.PerArmPolicyInfo(
predicted_rewards_mean=predicted_rewards_mean,
bandit_policy_type=bandit_policy_type,
chosen_arm_features=chosen_arm_features_info)
else:
info_spec = policy_utilities.PolicyInfo(
predicted_rewards_mean=predicted_rewards_mean,
bandit_policy_type=bandit_policy_type)
self._accepts_per_arm_features = accepts_per_arm_features
super(BoltzmannRewardPredictionPolicy, self).__init__(
time_step_spec, action_spec,
policy_state_spec=reward_network.state_spec,
clip=False,
info_spec=info_spec,
emit_log_probability='log_probability' in emit_policy_info,
observation_and_action_constraint_splitter=(
observation_an
|
Forage/Gramps
|
gramps/plugins/tool/desbrowser.py
|
Python
|
gpl-2.0
| 5,538
| 0.004514
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2007 Donald N. Allingham
# Copyright (C) 2008 Brian G. Matherly
# Copyright (C) 2010 Jakim Friant
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
"""Tools/Analysis and Exploration/Interactive Descendant Browser"""
#------------------------------------------------------------------------
#
# GTK/GNOME modules
#
#------------------------------------------------------------------------
from gi.repository import Gdk
from gi.repository import Gtk
#------------------------------------------------------------------------
#
# GRAMPS modules
#
#------------------------------------------------------------------------
from gramps.gen.const import URL_MANUAL_PAGE
from gramps.gen.display.name import displayer as name_displayer
from gramps.gui.plug import tool
from gramps.gui.display import display_help
from gramps.gui.managedwindow import ManagedWindow
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.get_translation().sgettext
from gramps.gui.glade import Glade
from gramps.gui.editors import EditPerson
#------------------------------------------------------------------------
#
# Constants
#
#------------------------------------------------------------------------
WIKI_HELP_PAGE = '%s_-_Tools' % URL_MANUAL_PAGE
WIKI_HELP_SEC = _('manual|Interactive_Descendant_Browser...')
class DesBrowse(tool.ActivePersonTool, ManagedWindow):
def __init__(self, dbstate, uistate, options_class, name, callback=None):
tool.ActivePersonTool.__init__(self, dbstate, uistate, options_class,
name)
if self.fail:
return
self.dbstate = dbstate
active_handle = uistate.get_active('Person')
self.active = dbstate.db.get_person_from_handle(active_handle)
self.callback = callback
self.active_name = _("Descendant Browser: %s") % (
name_displayer.display(self.active)
)
ManagedWindow.__init__(self, uistate, [], self)
self.glade = Glade()
self.glade.connect_signals({
"destroy_passed_object" : self.close,
"on_help_clicked" : self.on_help_clicked,
"on_delete_event" : self.close,
})
window = self.glade.toplevel
self.set_window(window,self.glade.get_object('title'),
self.active_name)
self.tree = self.glade.get_object("tree1")
col = Gtk.TreeViewColumn('',Gtk.CellRendererText(),text=0)
self.tree.append_column(col)
self.tree.set_rules_hint(True)
self.tree.set_headers_visible(False)
self.tree.connect('button-press-event', self.button_press_event)
self.make_new_model()
self.show()
def build_menu_names(self, obj):
return (self.active_name,_("Descendant Browser tool"))
def make_new_model(self):
self.model = Gtk.TreeStore(str, object)
self.tree.set_model(self.model)
self.add_to_tree(None, None, self.active.get_handle())
self.tree.expand_all()
def on_help_clicked(self, obj):
"""Display the relevant portion of GRAMPS manual"""
display_help(webpage=WIKI_HELP_PAGE, section=WIKI_HELP_SEC)
def add_to_tree(self, parent_id, sib_id, person_handle):
item_id = self.model.insert_after(parent_id, sib_id)
person = self.db.get_person_from_handle(person_handle)
self.model.set(item_id, 0, name_displayer.display(person))
|
self.mo
|
del.set(item_id, 1, person_handle)
prev_id = None
for family_handle in person.get_family_handle_list():
family = self.db.get_family_from_handle(family_handle)
for child_ref in family.get_child_ref_list():
prev_id = self.add_to_tree(item_id, prev_id, child_ref.ref)
return item_id
def button_press_event(self, obj, event):
if event.type == Gdk.EventType._2BUTTON_PRESS and event.button == 1:
store, node = self.tree.get_selection().get_selected()
if node:
person_handle = store.get_value(node, 1)
person = self.db.get_person_from_handle(person_handle)
EditPerson(self.dbstate, self.uistate, self.track, person,
self.this_callback)
def this_callback(self, obj):
self.callback()
self.make_new_model()
#------------------------------------------------------------------------
#
#
#
#------------------------------------------------------------------------
class DesBrowseOptions(tool.ToolOptions):
"""
Defines options and provides handling interface.
"""
def __init__(self, name,person_id=None):
tool.ToolOptions.__init__(self, name,person_id)
|
theno/utlz
|
utlz/cmd.py
|
Python
|
mit
| 2,420
| 0
|
import subprocess
from utlz import func_has_arg, namedtuple
CmdResult = namedtuple(
typename='CmdResult',
field_names=[
'exitcode',
'stdout', # type: bytes
'stderr', # type: bytes
'cmd',
'input',
],
lazy_vals={
'stdout_str': lambda self: self.stdout.decode('utf-8'),
|
'stderr_str': lambda self: self.stderr.decode('utf-8'),
}
)
def run_cmd(cmd, input=None, timeout=30, max_try=3, num_try=1):
'''Run command `cmd`.
It's like that, and that's the way it is.
'''
if type(cmd) == str:
cmd = cmd.split()
process = subprocess.Popen(cmd,
stdin=open('/dev/null', 'r'),
|
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
communicate_has_timeout = func_has_arg(func=process.communicate,
arg='timeout')
exception = Exception
if communicate_has_timeout:
exception = subprocess.TimeoutExpired # python 3.x
stdout = stderr = b''
exitcode = None
try:
if communicate_has_timeout:
# python 3.x
stdout, stderr = process.communicate(input, timeout)
exitcode = process.wait()
else:
# python 2.x
if timeout is None:
stdout, stderr = process.communicate(input)
exitcode = process.wait()
else:
# thread-recipe: https://stackoverflow.com/a/4825933
def target():
# closure-recipe: https://stackoverflow.com/a/23558809
target.out, target.err = process.communicate(input)
import threading
thread = threading.Thread(target=target)
thread.start()
thread.join(timeout)
if thread.is_alive():
process.terminate()
thread.join()
exitcode = None
else:
exitcode = process.wait()
stdout = target.out
stderr = target.err
except exception:
if num_try < max_try:
return run_cmd(cmd, input, timeout, max_try, num_try+1)
else:
return CmdResult(exitcode, stdout, stderr, cmd, input)
return CmdResult(exitcode, stdout, stderr, cmd, input)
|
radish-bdd/radish
|
tests/unit/test_utils.py
|
Python
|
mit
| 1,022
| 0
|
"""
radish
~~~~~~
The root from red to green. BDD tooling for Python.
:copyright: (c) 2019 by Timo Furrer <tuxtimo@gmail.com>
:license: MIT, see LICENSE for more details.
"""
import pytest
import radish.utils as utils
@pytest.mark.filterwarnings("ignore")
def test_getting_any_debugger():
"""When asking for a debu
|
gger it should return one
It shouldn't matter if IPython i
|
s installed or not,
just give me that debugger.
"""
# when
debugger = utils.get_debugger()
# then
assert callable(debugger.runcall)
def test_utils_should_locate_arbitrary_python_object():
# when
obj = utils.locate_python_object("str")
# then
assert obj == str
def test_converting_pos_args_into_kwargs():
# given
def func(_, arg1, arg2, kwarg1=1, kwargs2=2):
pass
pos_arg_values = ["arg1-value", "arg2-value"]
# when
kwargs = utils.get_func_pos_args_as_kwargs(func, pos_arg_values)
# then
assert kwargs == {"arg1": "arg1-value", "arg2": "arg2-value"}
|
nathanielvarona/airflow
|
tests/providers/mongo/sensors/test_mongo.py
|
Python
|
apache-2.0
| 1,820
| 0.000549
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements
|
. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on
|
an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
import pytest
from airflow.models import Connection
from airflow.models.dag import DAG
from airflow.providers.mongo.hooks.mongo import MongoHook
from airflow.providers.mongo.sensors.mongo import MongoSensor
from airflow.utils import db, timezone
DEFAULT_DATE = timezone.datetime(2017, 1, 1)
@pytest.mark.integration("mongo")
class TestMongoSensor(unittest.TestCase):
def setUp(self):
db.merge_conn(
Connection(conn_id='mongo_test', conn_type='mongo', host='mongo', port='27017', schema='test')
)
args = {'owner': 'airflow', 'start_date': DEFAULT_DATE}
self.dag = DAG('test_dag_id', default_args=args)
hook = MongoHook('mongo_test')
hook.insert_one('foo', {'bar': 'baz'})
self.sensor = MongoSensor(
task_id='test_task',
mongo_conn_id='mongo_test',
dag=self.dag,
collection='foo',
query={'bar': 'baz'},
)
def test_poke(self):
assert self.sensor.poke(None)
|
JElchison/Numberjack
|
Numberjack/__init__.py
|
Python
|
lgpl-2.1
| 148,044
| 0.002742
|
# Copyright 2009 - 2014 Insight Centre for Data Analytics, UCC
UNSAT, SAT, UNKNOWN, LIMITOUT = 0, 1, 2, 3
LUBY, GEOMETRIC = 0, 1
MAXCOST = 100000000
from .solvers import available_solvers
import weakref
import exceptions
import datetime
import types
import sys
#SDG: extend recursive limit for predicate decomposition
sys.setrecursionlimit(10000)
#SDG: needed by the default eval method in BinPredicate
import operator
val_heuristics = ['Lex', 'AntiLex', 'Random', 'RandomMinMax', 'DomainSplit', 'RandomSplit', 'Promise', 'Impact', 'No', 'Guided']
var_heuristics = ['No', 'MinDomain', 'Lex', 'AntiLex', 'MaxDegree', 'MinDomainMinVal', 'Random', 'MinDomainMaxDegree', 'DomainOverDegree', 'DomainOverWDegree', 'DomainOverWLDegree', 'Neighbour', 'Impact', 'ImpactOverDegree', 'ImpactOverWDegree', 'ImpactOverWLDegree', 'Scheduling']
def flatten(x):
result = []
for el in x:
if hasattr(el, "__iter__") and not isinstance(el, basestring) and not issubclass(type(el), Expression):
result.extend(flatten(el))
else:
result.append(el)
return result
def numeric(x):
tx = type(x)
return tx is int or tx is float
# Numberjack exceptions:
class ConstraintNotSupportedError(exceptions.Exception):
"""
Raised if the solver being loaded does not support the constraint, and no
decomposition is available for the constraint. For example in the case of
loading a divison expression with a Mixed Integer Programming solver.
"""
def __init__(self, value, solver=None):
self.value = value
self.solver = solver
def __str__(self):
return "ERROR: Constraint %s not supported
|
by solver %s and no decomposition is available." % (self.value, self.solver)
class UnsupportedSolverFunction(exceptions.Exception):
"""
Raised if a solver does not support a particular API call.
"""
def __init__(self, solver, func_name, msg=""):
self.solver = so
|
lver
self.func_name = func_name
self.msg = msg
def __str__(self):
return "ERROR: The solver %s does not support the function '%s'. %s" % (self.solver, self.func_name, self.msg)
class InvalidEncodingException(exceptions.Exception):
"""
Raised if an invalid encoding was specified, for example if no domain
encoding is turned on.
"""
def __init__(self, msg=""):
self.msg = msg
def __str__(self):
return "ERROR: Invalid encoding configuration. %s" % self.msg
class InvalidConstraintSpecification(exceptions.Exception):
"""
Raised in the case of the invalid use of a constraint.
"""
def __init__(self, msg=""):
self.msg = msg
def __str__(self):
return "ERROR: Invalid constraint specification. %s" % self.msg
class ModelSizeError(exceptions.Exception):
"""
Raised if the size of a model has grown excessively large when decomposing
some constraints for a solver.
"""
def __init__(self, value, solver=None):
self.value = value
self.solver = solver
def __str__(self):
return "ERROR: Model decomposition size too big %s for solver %s." % (self.value, self.solver)
# Numberjack domain and expressions:
class Domain(list):
def __init__(self, arg1, arg2=None):
"""
\internal
This class is used to wrap the domain of variables
in order to print them and/or iterate over values
Initialised from a list of values, or a lower and an upper bound
"""
if arg2 is None:
list.__init__(self, arg1)
self.sort()
self.is_bound = False
else:
list.__init__(self, [arg1, arg2])
self.is_bound = True
self.current = -1
def next(self):
"""
\internal
Returns the next value when iterating
"""
self.current += 1
if self.is_bound:
if self[0] + self.current > self[-1]:
raise StopIteration
else:
return self[0] + self.current
else:
if self.current >= list.__len__(self):
raise StopIteration
else:
return list.__getitem__(self, self.current)
def __str__(self):
"""
\internal
"""
if self.is_bound:
lb = self[0]
ub = self[-1]
if lb + 1 == ub and type(lb) is int:
return '{' + str(lb) + ',' + str(ub) + '}'
else:
return '{' + str(lb) + '..' + str(ub) + '}'
def extend(idx):
x = self[idx]
y = x
idx += 1
while idx < len(self):
if type(self[idx]) is int and self[idx] == y + 1:
y = self[idx]
else:
break
idx += 1
return (x, y, idx)
ret_str = '{'
idx = 0
while idx < len(self):
if idx > 0:
ret_str += ','
(x, y, idx) = extend(idx)
ret_str += str(x)
if type(x) is int and x + 1 < y:
ret_str += ('..' + str(y))
elif x != y:
ret_str += (',' + str(y))
return ret_str + '}'
class Expression(object):
"""
Base class from which all expressions and variables inherit.
:param str operator: the name of this expression operator or variable name.
"""
def __init__(self, operator):
#self.mod = None
self.ident = -1
self.operator = operator
# This is the stuff for maintaining multiple representations of the
# model among different solvers
self.var_list = []
self.encoding = None
self.solver = None
def __iter__(self):
return self.get_domain()
def get_solver(self):
"""
Returns the solver with which this expression was last loaded.
:return: The last loaded solver, or `None` if it has not been loaded
anywhere.
:rtype: `NBJ_STD_Solver`
"""
if getattr(self, 'solver', False):
return self.solver
else:
return None
def initial(self):
"""
Returns a string representing the initial domain of the expression. For
example:
.. code-block:: python
var1 = Variable(0, 10)
print var1.initial()
>>> x0 in {0..10}
:return: A String representation of original expression definition
:rtype: str
"""
output = self.name()
if self.domain_ is None:
output += ' in ' + str(Domain(self.lb, self.ub))
else:
output += ' in ' + str(Domain(self.domain_))
return output
def domain(self, solver=None):
"""
Returns a string representing the current domain of the expression.
:param `NBJ_STD_Solver` solver: If specified, the solver for
which this expression has been loaded. If not specified, the solver
that has most recenlty loaded the expression will be used.
"""
output = self.name() + ' in ' + str(self.get_domain(solver=solver))
return output
## Returns a string containing the value of the expression
# @param solver Solver from which expression solution will be sourced
# @return String representation of expression solution
#
# solution(self, solver=None) :- Returns a string representing the current solution
# of the expression in the solver specified. If no solver is specified then
# the returned string represents the solution to the expression in the solver
# that has most recently loaded and solved the expression.
#
def solution(self, solver=None):
"""
.. deprecated:: 1.1
Instead you should use :func:`get_value`, this function is equivalent
to calling :func:`str` on that.
Returns a string containing the solution value of the expression. For a
native representation of the solution value, use :func:`get_value`
:param `NBJ_STD
|
ESSolutions/ESSArch_Core
|
ESSArch_Core/WorkflowEngine/migrations/0019_processstep_parent_step.py
|
Python
|
gpl-3.0
| 1,448
| 0.000691
|
"""
ESSArch is an open source archiving and digital preservation system
ESSArch
Copyright (C) 2005-2019 ES Solutions AB
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Contact information:
Web - http://www.essolutions.se
Email - essarch@essolutions.se
"""
# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-07-26 14:21
from django.db import migrations, models
import django.db.models.deletion
c
|
lass Migration(migrations.Migration):
dependencies = [
('WorkflowEngin
|
e', '0018_auto_20160725_2120'),
]
operations = [
migrations.AddField(
model_name='processstep',
name='parent_step',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='child_steps', to='WorkflowEngine.ProcessStep'),
),
]
|
BoyuanYan/CIFAR-10
|
cs231n/gradient_check.py
|
Python
|
apache-2.0
| 1,636
| 0.006541
|
import numpy as np
from random import randrange
def eval_numerical_gradient(f, x, verbose=True, h=0.00001):
'''
计算在x点,f的数值梯度的简单实现。
-f: 应该是一个只接受一个输入参数的函数
-x: 要评估梯度的点,是numpy的数组
'''
fx = f(x) # 获取源点函数值
grad = np.zeros_like(x)
it = np.nditer(x, flags=['multi_index'], op_flags=['readwrite'])
while not it.finished:
ix = it.multi_index # 获取下标
oldval = x[ix]
x[ix] = oldval + h
fxph = f(x) # 计算f(x+h)
x[ix] = oldval - h
fxmh = f(x) # 计算f(x-h)
x[ix] = oldva
|
l
grad[ix] = (fxph - fxmh) / (2 * h)
if verbose:
print(ix, grad[ix])
it.iternext()
return grad
def grad_check_sparse(f, x, analytic_grad, num_checks=10, h=1e-5):
'''
抽取x中的一些随机元素,计算在这些维度上的梯度值,跟analytic_grad的值做对比。
'''
for i in range(num_checks):
ix = tuple([randrange(m) for m in x.shape])
oldval = x[ix]
x[ix] = oldval + h # 增加很小的h值
fxph = f(x) # 计算f(x+h)
x[ix] = oldval - h # 减少很小的h
|
fxmh = f(x) # 计算f(x-h)
x[ix] = oldval # 重置
grad_numerical = (fxph - fxmh) / (2 * h)
grad_analytic = analytic_grad[ix]
rel_error = abs(grad_numerical - grad_analytic) / (abs(grad_numerical) + abs(grad_analytic))
print('数值计算求梯度:%f 分析法求梯度:%f,相对误差是:%e' % (grad_numerical, grad_analytic, rel_error))
|
acs-test/openfda
|
PER_2017-18/clientServer/P1/server_web.py
|
Python
|
apache-2.0
| 1,505
| 0.008638
|
# A basic web server using sockets
import socket
PORT = 8090
MAX_OPEN_REQUESTS = 5
def process_client(clientsocket):
print(clientsocket)
data = clientsocket.recv(1024)
print(data)
web_contents = "<h1>Received</h1>"
f = open("myhtml.html", "r")
web_contents = f.read()
f.close()
web_headers = "HTTP/1.1 200"
web_he
|
aders += "\n" + "Content-Type: text/html"
web_headers += "\n" + "Content-Length: %i" % len(str.encode(web_contents))
clientsocket.send(str.encode(web_headers + "\n\n" + web_contents))
clientsocket.close()
# create an INET, STREAMing socket
serversocket = socket.socket(socket.AF_INET,
socket.SOCK_STREAM)
# bind
|
the socket to a public host, and a well-known port
hostname = socket.gethostname()
ip = socket.gethostbyname(hostname)
# Let's use better the local interface name
hostname = "10.10.104.17"
try:
serversocket.bind((ip, PORT))
# become a server socket
# MAX_OPEN_REQUESTS connect requests before refusing outside connections
serversocket.listen(MAX_OPEN_REQUESTS)
while True:
# accept connections from outside
print ("Waiting for connections at %s %i" % (hostname, PORT))
(clientsocket, address) = serversocket.accept()
# now do something with the clientsocket
# in this case, we'll pretend this is a non threaded server
process_client(clientsocket)
except socket.error:
print("Problemas using port %i. Do you have permission?" % PORT)
|
kakaba2009/MachineLearning
|
python/src/algorithm/coding/basic/comprehension.py
|
Python
|
apache-2.0
| 240
| 0.0125
|
if __na
|
me__ == '__main__':
x = int(input())
y = int(input())
z = int(input())
n = int(input())
L = [[a,b,c] for a in range(x+1) for b in range(y+1) for c in range(z+1)]
L = list(filter(lambda x : sum(x) != n, L))
print(L
|
)
|
mastizada/kuma
|
vendor/packages/ipython/IPython/Extensions/__init__.py
|
Python
|
mpl-2.0
| 429
| 0
|
# -*- coding: utf-8 -*-
"""This directory is meant for special-pu
|
rpose extensions to IPython.
This can include things which alter the syntax processing stage (see
PhysicalQ_Input for an example of how to do this).
Any file located here can be called with an 'execfile =' option as
execfile = Extensions/filename.py
since the IPython directory itself is already part of the search path for
files l
|
isted as 'execfile ='.
"""
|
Tehsmash/inspector-hooks
|
inspector_hooks/enroll_node_not_found.py
|
Python
|
apache-2.0
| 1,013
| 0
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the Licens
|
e at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ironicclient import exceptions
from ironic_inspector import node_cache
from ironic_inspector i
|
mport utils
def hook(introspection_data, **kwargs):
ironic = utils.get_client()
try:
node = ironic.node.create(**{'driver': 'fake'})
except exceptions.HttpError as exc:
raise utils.Error(_("Can not create node in ironic for unknown"
"node: %s") % exc)
return node_cache.add_node(node.uuid, ironic=ironic)
|
CSC-IT-Center-for-Science/pouta-blueprints
|
pebbles/drivers/provisioning/openstack_driver.py
|
Python
|
mit
| 6,069
| 0.002636
|
""" ToDo: document OpenStack driver on user level here.
"""
import json
from pebbles.services.openstack_service import OpenStackService
from pebbles.drivers.provisioning import base_driver
from pebbles.client import PBClient
from pebbles.models import Instance
from pebbles.utils import parse_ports_string
SLEEP_BETWEEN_POLLS = 3
POLL_MAX_WAIT = 180
class OpenStackDriver(base_driver.ProvisioningDriverBase):
""" ToDo: document Openstack driver on developer/sysadmin level here.
"""
def get_oss(self):
return OpenStackService({'M2M_CREDENTIAL_STORE': self.config['M2M_CREDENTIAL_STORE']})
def get_configuration(self):
from pebbles.drivers.provisioning.openstack_driver_config import CONFIG
oss = self.get_oss()
images = [x.name for x in oss.list_images()]
flavors = [x.name for x in oss.list_flavors()]
config = CONFIG.copy()
config['schema']['properties']['image']['enum'] = images
config['schema']['properties']['flavor']['enum'] = flavors
return config
def get_running_instance_logs(self, token, instance_id):
running_log_uploader = self.create_prov_log_uploader(token, instance_id, log_type='running')
running_log_uploader.info('Cannot get running logs. This feature has not been implemented for the OpenStackDriver yet')
def do_update_connectivity(self, token, instance_id):
oss = self.get_oss()
pbclient = PBClient(token, self.config['INTERNAL_API_BASE_URL'], ssl_verify=False)
instance = pbclient.get_instance_description(instance_id)
instance_data = instance['instance_data']
security_group_id = instance_data['security_group_id']
blueprint_config = pbclient.get_blueprint_description(instance['blueprint_id'])
config = blueprint_config['full_config']
# Delete all existing rules and add the rules using the input port string
oss.clear_security_group_rules(security_group_id)
ports_str = config['exposed_ports']
if not ports_str:
ports_str = '22' # If the input port string is empty then use 22 as the default port
ports_list = parse_ports_string(ports_str)
for ports in ports_list:
from_port = ports[0]
to_port = ports[1]
oss.create_security_group_rule(
security_group_id,
from_port=from_port,
to_port=to_port,
cidr="%s/32" % instance['client_ip'],
ip_protocol='tcp',
group_id=None
)
def do_provision(self, token, instance_id):
self.logger.debug("do_provision %s" % instance_id)
pbclient = PBClient(token, self.config['INTERNAL_API_BASE_URL'], ssl_verify=False)
instance = pbclient.get_instance_description(instance_id)
instance_name = instance['name']
instance_user = instance['user_id']
# fetch config
blueprint_config = pbclient.get_blueprint_description(instance['blueprint_id'])
config = blueprint_config['full_config']
log_uploader = self.create_prov_log_uploader(token, instance_id, log_type='provisioning')
log_uploader.info("Provisioning OpenStack instance (%s)\n" % instance_id)
ports_str = config['exposed_ports']
if ports_str:
try:
parse_ports_string(ports_str)
except:
error = 'Incorrect exposed ports definition in blueprint'
error_body = {'state': Instance.STATE_FAILED, 'error_msg': error}
pbclient.do_instance_patch(instance_id, error_body)
self.logger.debug(error)
raise RuntimeError(error)
# fetch user public key
key_data = pbclient.get_user_key_data(instance_user).json()
if not key_data:
error = 'user\'s public key is missing'
error_body = {'state': Instance.STATE_FAILED, 'error_msg': error}
pbclient.do_instance_patch(instance_id, error_body)
self.logger.debug(error)
raise RuntimeError(error)
oss = self.get_oss()
result = oss.provision_instance(
instance_name,
config['image'],
config['flavor'],
nics=config.get('openstack_net_id', 'auto'),
public_key=key_data[0]['public_key'],
userdata=config.get('userdata'))
if 'error' in result:
log_uploader.warn('Provisioning failed %s' % result['error'])
return
ip = result['address_data']['public_ip']
instance_data = {
'server_id': result['server_id'],
'floating_ip': ip,
'allocated_from_pool': result['address_data']['allocated_from_pool'],
'security_group_id': result['security_group'],
'endpoints': [
{'name': 'SSH', 'access': 'ssh cloud-user@%s' % ip},
|
]
}
|
log_uploader.info("Publishing server data\n")
pbclient.do_instance_patch(
instance_id,
{'instance_data': json.dumps(instance_data), 'public_ip': ip})
log_uploader.info("Provisioning complete\n")
def do_deprovision(self, token, instance_id):
log_uploader = self.create_prov_log_uploader(token, instance_id, log_type='deprovisioning')
log_uploader.info("Deprovisioning instance %s\n" % instance_id)
pbclient = PBClient(token, self.config['INTERNAL_API_BASE_URL'], ssl_verify=False)
oss = self.get_oss()
instance = pbclient.get_instance_description(instance_id)
instance_data = instance['instance_data']
if 'server_id' not in instance_data:
log_uploader.info("Skipping, no server id in instance data")
return
server_id = instance_data['server_id']
log_uploader.info("Destroying server instance . . ")
oss.deprovision_instance(server_id)
log_uploader.info("Deprovisioning ready\n")
def do_housekeep(self, token):
pass
|
pcmagic/stokes_flow
|
ecoli_in_pipe/wrapper_head_tail.py
|
Python
|
mit
| 1,450
| 0.001379
|
import sys
import petsc4py
petsc4py.init(sys.argv)
from ecoli_in_pipe import head_tail
# import numpy as np
# from scipy.interpolate import interp1d
# from petsc4py import PETSc
# from ecoli_in_pipe import single_ecoli, ecoliInPipe, head_tail, ecoli_U
# from codeStore import ecoli_common
#
#
# def call_head_tial(uz_factor=1., wz_factor=1.):
# PETSc.Sys.Print('')
# PETSc.Sys.Print('################################################### uz_factor = %f, wz_factor = %f' %
# (uz_factor, wz_factor))
# t_head_U = head_U.copy()
# t_tail_U = tail_U.copy()
# t_head_U[2] = t_head_U[2] * uz_factor
# t_tail_U[2] = t_tail_U[2] * uz_factor
# # C1 = t_head_U[5] - t_tail_U[5]
# # C2 = t_head_U[5] / t_tail_U[5]
# # t_head_U[5] = wz_factor * C1 * C2 / (wz_factor * C2 - 1)
# # t
|
_tail_U[5] = C1 / (wz_factor * C2 - 1)
# t_head_U[5] = wz_factor * t_head_U[5]
# t_kwargs = {'head_U': t_head_U,
# 'tail_U': t_tail_U, }
# total_force = head_tail.main_fun()
# return total_force
#
#
# OptDB = PETSc.Options()
# fileHandle = OptDB.getString('f', 'ecoliInPipe')
# OptDB.setValue('f', fileHandle)
# main_kwargs = {'fileHandle': fileHandle}
# # head_U, tail_U, ref_U = ecoli_common.ecoli_restart(**main_kwargs)
# #
|
ecoli_common.ecoli_restart(**main_kwargs)
# head_U = np.array([0, 0, 1, 0, 0, 1])
# tail_U = np.array([0, 0, 1, 0, 0, 1])
# call_head_tial()
head_tail.main_fun()
|
DanBuchan/cache_server
|
blast_cache/wsgi.py
|
Python
|
gpl-2.0
| 400
| 0
|
"""
WSGI config for cache_server project.
It exposes the WSGI callable as a
|
module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.ws
|
gi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "blast_cache.settings")
application = get_wsgi_application()
|
COMU/lazimlik
|
lazimlik/social_app/views.py
|
Python
|
gpl-2.0
| 568
| 0.019366
|
from django.contrib.auth import logout as auth_logout
from django.c
|
ontrib.auth.decorators import login_required
from django.http import *
from django.template import Template, Context
from django.shortcuts import render_to_response, redirect, render, RequestContext, HttpResponseRedirect
def login(request):
return render(request, 'login.html')
@login_required
def home(request):
u = request.user
return render_to_response("home.html", locals(), context_instance=RequestContext(request))
def logout(request):
auth_
|
logout(request)
return redirect('/')
|
tensorflow/tfjs-examples
|
visualize-convnet/get_vgg16.py
|
Python
|
apache-2.0
| 172
| 0
|
from tensorflow.keras.applications.vgg16 import VGG16
import tensorflowjs as tfjs
model = VGG16(weights='imagenet')
tfjs.converters.save_ker
|
as_model(model,
|
'vgg16_tfjs')
|
jjgomera/pychemqt
|
lib/EoS/Cubic/SRK.py
|
Python
|
gpl-3.0
| 13,006
| 0.000385
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
r"""Pychemqt, Chemical Engineering Process simulator
Copyright (C) 2009-2017, Juan José Gómez Romera <jjgomera@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>."""
from math import exp
from lib.EoS.cubic import Cubic
class SRK(Cubic):
r"""Equation of state of Soave-Redlich-Kwong (1972)
.. math::
\begin{array}[t]{l}
P = \frac{RT}{V-b}-\frac{a}{V\left(V+b\right)}\\
a = 0.42747\frac{R^2T_c^2}{P_c}\alpha\\
b = 0.08664\frac{RT_c}{P_c}\\
\alpha^{0.5} = 1 + m\left(1-Tr^{0.5}\right)\\
m = 0.48 + 1.574\omega - 0.176\omega^2\\
\end{array}
In supercritical states, the α temperature dependence can use different
extrapolation correlation:
* Boston-Mathias expression, [3]_
.. math::
\begin{array}[t]{l}
\alpha = \exp\left(c\left(1-T_r^d\right)\right)\\
d = 1+\frac{m}{2}\\
c = \frac{m}{d}\\
\end{array}
* Nasrifar-Bolland expression, [4]_
.. math::
\begin{array}[t]{l}
\alpha = \frac{b_1}{T_r} + \frac{b_2}{T_r^2} + \frac{b_3}{T_r^3}\\
b_1 = 0.25\left(12 - 11m + m^2\right)\\
b_2 = 0.5\left(-6 + 9m - m^2\right)\\
b_3 = 0.25\left(4 - 7m + m^2\right)\\
\end{array}
Parameters
----------
alpha : int
Correlation index for alpha expresion at supercritical temperatures:
* 0 - Original
* 1 - Boston
* 2 - Nasrifar
Examples
--------
Example 4.3 from [2]_, Propane saturated at 300K
>>> from lib.mezcla import Mezcla
>>> mix = Mezcla(5, ids=[4], caudalMolar=1, fraccionMolar=[1])
>>> eq = SRK(300, 9.9742e5, mix)
>>> '%0.1f' % (eq.Vl.ccmol)
'98.4'
>>> eq = SRK(300, 42.477e5, mix)
>>> '%0.1f' % (eq.Vg.ccmol)
'95.1'
Helmholtz energy formulation example for supplementary documentatión from
[4]_, the critical parameter are override for the valued used in paper to
get the values of test with high precision
>>> from lib.mezcla import Mezcla
>>> from lib import unidades
>>> from lib.compuestos import Componente
>>> ch4 = Componente(2)
>>> ch4.Tc, ch4.Pc, ch4.f_acent = 190.564, 4599200, 0.011
>>> o2 = Componente(47)
>>> o2.Tc, o2.Pc, o2.f_acent = 154.581, 5042800, 0.022
>>> ar = Componente(98)
>>> ar.Tc, ar.Pc, ar.f_acent = 150.687, 4863000, -0.002
>>> mix = Mezcla(5, customCmp=[ch4, o2, ar], caudalMolar=1,
... fraccionMolar=[0.5, 0.3, 0.2])
>>> eq = SRK(800, 36451227.52066596, mix, R=8.3144598)
>>> fir = eq._phir(800, 5000, eq.yi)
>>> delta = 5000
>>> tau = 1/800
>>> print("fir: %0.14f" % (fir["fir"]))
fir: 0.11586323513845
>>> print("fird: %0.14f" % (fir["fird"]*delta))
fird: 0.12741566551477
>>> print("firt: %0.15f" % (fir["firt"]*tau))
firt: -0.082603152680518
>>> print("firdd: %0.15f" % (fir["firdd"]*delta**2))
firdd: 0.024895937945147
>>> print("firdt: %0.15f" % (fir["firdt"]*delta*tau))
firdt: -0.077752734990782
>>> print("firtt: %0.14f" % (fir["firtt"]*tau**2))
firtt: -0.10404751064185
>>> print("firddd: %0.16f" % (fir["firddd"]*delta**3))
firddd: 0.0060986538256190
>>> print("firddt: %0.16f" % (fir["firddt"]*delta**2*tau))
firddt: 0.0089488831000362
>>> print("firdtt: %0.15f" % (fir["firdtt"]*delta*tau**2))
firdtt: -0.097937890490398
>>> print("firttt: %0.14f" % (fir["firttt"]*tau**3))
firttt: 0.15607126596277
"""
__title__ = "Soave-Redlich-Kwong (1972)"
__status__ = "SRK72"
__doi__ = (
{
"autor": "Soave, G.",
"title": "Equilibrium Constants from a modified Redlich-Kwong "
"Equation of State",
"ref": "Chem. Eng. Sci. 27 (1972) 1197-1203",
"doi": "10.1016/0009-2509(72)80096-4"},
{
"autor": "Poling, B.E, Prausnitz, J.M, O'Connell, J.P",
"title": "The Properties of Gases and Liquids 5th Edition",
"ref": "McGraw-Hill, New York, 2001",
"doi": ""},
{
"autor": "Boston, J.F., Mathias, P.M.",
"title": "Phase Equilibria in a Third-Generation Process Simulator",
"ref": "Presented at: 'Phase Equilibria and Fluid Properties in the "
"Chemical Industries', Berlin, March 17-21, 1980.",
"doi": ""},
{
"autor": "Nasrifar, Kh., Bolland, O.",
"title": "Square-Well Potential and a New α Function for the Soave-"
"Redlich-Kwong Equation of State",
"ref": "Ind. Eng. Chem. Res. 43(21) (2004) 6901-6909",
"doi": "10.1021/ie049545i"},
)
def _cubicDefinition(self, T):
"""Definition of coefficients for generic cubic equation of state"""
# Schmidt-Wenzel factorization of terms
self.u = 1
self.w = 0
ao = []
ai = []
bi = []
mi = []
for cmp in self.componente:
a0, b = self._lib(cmp)
alfa = self._alfa(cmp, T)
m = self._m(cmp)
ao.append(a0)
ai.append(a0*alfa)
bi.append(b)
mi.append(m)
self.ao = ao
self.ai =
|
ai
self.bi = bi
self.mi = mi
def _lib(self, cmp):
ao = 0.42747*self.R**2*cmp.Tc**2/cmp.Pc # Eq 5
b = 0.08664*self.R*cmp.Tc/cmp.Pc # Eq 6
return ao, b
def _GEOS(self, xi):
am, bm = self._mixture("SRK", xi, [self.ai, self.bi])
delta = bm
epsilon = 0
|
return am, bm, delta, epsilon
def _alfa(self, cmp, T):
"""α parameter calculation procedure, separate of general procedure
to let define derived equation where only change this term.
This method use the original alpha formulation for temperatures below
the critical temperature and can choose by configuration between:
* Boston-Mathias formulation
* Nasrifar-Bolland formulation
Parameters
----------
cmp : componente.Componente
Componente instance
T : float
Temperature, [K]
Returns
-------
alpha : float
alpha parameter of equation, [-]
"""
Tr = T/cmp.Tc
m = self._m(cmp)
if Tr > 1:
alfa = self.kwargs.get("alpha", 0)
if alfa == 0:
alfa = (1+m*(1-Tr**0.5))**2 # Eq 13
elif alfa == 1:
# Use the Boston-Mathias supercritical extrapolation, ref [3]_
d = 1+m/2 # Eq 10
c = m/d # Eq 11
alfa = exp(c*(1-Tr**d)) # Eq 9
elif alfa == 2:
# Use the Nasrifar-Bolland supercritical extrapolation, ref [4]
b1 = 0.25*(12-11*m+m**2) # Eq 17
b2 = 0.5*(-6+9*m-m**2) # Eq 18
b3 = 0.25*(4-7*m+m**2) # Eq 19
alfa = b1/Tr + b2/Tr**2 + b3/Tr**3 # Eq 16
else:
alfa = (1+m*(1-Tr**0.5))**2 # Eq 13
return alfa
def _m(self, cmp):
"""Calculate the intermediate parameter for alpha expression"""
# Eq 15
return 0.48 + 1.574*cmp.f_acent - 0.176*cmp.f_ac
|
Trii/NoseGAE
|
examples/pets/pets.py
|
Python
|
bsd-2-clause
| 239
| 0
|
import webapp2
class Pets(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write('Hello Pets!')
app = webapp2.WSGIApplicat
|
ion([('/', Pets)], debug=True)
| |
joergdietrich/astropy
|
astropy/time/utils.py
|
Python
|
bsd-3-clause
| 3,571
| 0
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Time utilities.
In particular, routines to do basic arithmetic on numbers represented by two
doubles, using the procedure of Shewchuk, 1997, Discrete & Computational
Geometry 18(3):305-363 -- http://www.cs.berkeley.edu/~jrs/papers/robustr.pdf
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
def day_frac(val1, val2, factor=1., divisor=1.):
"""
Return the sum of ``val1`` and ``val2`` as two float64s, an integer part
and the fractional remainder. If ``factor`` is not 1.0 then multiply the
sum by ``factor``. If ``divisor`` is not 1.0 then divide the sum by
``divisor``.
The arithmetic is all done with exact floating point operations so no
precision is lost to rounding error. This routine assumes the sum is less
than about 1e16, otherwise the ``frac`` part will be greater than 1.0.
Returns
-------
day, frac : float64
Integer and fractional part of val1 + val2.
"""
# Add val1 and val2 exactly, returning the result as two float64s.
# The first is the approximate sum (with some floating point error)
# and the second is the error of the float64 sum.
sum12, err12 = two_sum(val1, va
|
l2)
if np.any(factor != 1.):
sum12, carry = two_product(sum12, factor)
carry += err12 * factor
sum12, err12 = two_sum(sum12, carry)
i
|
f np.any(divisor != 1.):
q1 = sum12 / divisor
p1, p2 = two_product(q1, divisor)
d1, d2 = two_sum(sum12, -p1)
d2 += err12
d2 -= p2
q2 = (d1 + d2) / divisor # 3-part float fine here; nothing can be lost
sum12, err12 = two_sum(q1, q2)
# get integer fraction
day = np.round(sum12)
extra, frac = two_sum(sum12, -day)
frac += extra + err12
return day, frac
def two_sum(a, b):
"""
Add ``a`` and ``b`` exactly, returning the result as two float64s.
The first is the approximate sum (with some floating point error)
and the second is the error of the float64 sum.
Using the procedure of Shewchuk, 1997,
Discrete & Computational Geometry 18(3):305-363
http://www.cs.berkeley.edu/~jrs/papers/robustr.pdf
Returns
-------
sum, err : float64
Approximate sum of a + b and the exact floating point error
"""
x = a + b
eb = x - a
eb = b - eb
ea = x - b
ea = a - ea
return x, ea + eb
def two_product(a, b):
"""
Multiple ``a`` and ``b`` exactly, returning the result as two float64s.
The first is the approximate product (with some floating point error)
and the second is the error of the float64 product.
Uses the procedure of Shewchuk, 1997,
Discrete & Computational Geometry 18(3):305-363
http://www.cs.berkeley.edu/~jrs/papers/robustr.pdf
Returns
-------
prod, err : float64
Approximate product a * b and the exact floating point error
"""
x = a * b
ah, al = split(a)
bh, bl = split(b)
y1 = ah * bh
y = x - y1
y2 = al * bh
y -= y2
y3 = ah * bl
y -= y3
y4 = al * bl
y = y4 - y
return x, y
def split(a):
"""
Split float64 in two aligned parts.
Uses the procedure of Shewchuk, 1997,
Discrete & Computational Geometry 18(3):305-363
http://www.cs.berkeley.edu/~jrs/papers/robustr.pdf
"""
c = 134217729. * a # 2**27+1.
abig = c - a
ah = c - abig
al = a - ah
return ah, al
|
bokeh/bokeh
|
bokeh/plotting/glyph_api.py
|
Python
|
bsd-3-clause
| 25,080
| 0.001994
|
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2022, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import annotations
import logging # isort:skip
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
from typing import TYPE_CHECKING, Any
# Bokeh imports
from ..models import glyphs
from ._decorators import glyph_method, marker_method
if TYPE_CHECKING:
from ..models.canvas import CoordinateMapping
from ..models.plots import Plot
from ..models.renderers import GlyphRenderer
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
__all__ = (
"GlyphAPI",
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
class GlyphAPI:
""" """
@property
def plot(self) -> Plot | None:
return self._parent
@property
def coordinates(self) -> Coo
|
rdinateMapping | None:
return self._coordinates
def __init__(self, parent: Plot | None = None, coordinates: CoordinateMapping | None = None) -> None:
self._parent = parent
self._coordinates = coordinates
@glyph_method(glyphs.AnnularWedg
|
e)
def annular_wedge(self, **kwargs):
pass
@glyph_method(glyphs.Annulus)
def annulus(self, **kwargs):
"""
Examples:
.. code-block:: python
from bokeh.plotting import figure, output_file, show
plot = figure(width=300, height=300)
plot.annulus(x=[1, 2, 3], y=[1, 2, 3], color="#7FC97F",
inner_radius=0.2, outer_radius=0.5)
show(plot)
"""
@glyph_method(glyphs.Arc)
def arc(self, *args: Any, **kwargs: Any) -> GlyphRenderer:
pass
@marker_method()
def asterisk(self, *args: Any, **kwargs: Any) -> GlyphRenderer:
"""
Examples:
.. code-block:: python
from bokeh.plotting import figure, output_file, show
plot = figure(width=300, height=300)
plot.asterisk(x=[1,2,3], y=[1,2,3], size=20, color="#F0027F")
show(plot)
"""
@glyph_method(glyphs.Bezier)
def bezier(self, *args: Any, **kwargs: Any) -> GlyphRenderer:
pass
@glyph_method(glyphs.Circle)
def circle(self, *args: Any, **kwargs: Any) -> GlyphRenderer:
"""
.. note::
Only one of ``size`` or ``radius`` should be provided. Note that ``radius``
defaults to |data units|.
Examples:
.. code-block:: python
from bokeh.plotting import figure, output_file, show
plot = figure(width=300, height=300)
plot.circle(x=[1, 2, 3], y=[1, 2, 3], size=20)
show(plot)
"""
@glyph_method(glyphs.Block)
def block(self, *args: Any, **kwargs: Any) -> GlyphRenderer:
"""
Examples:
.. code-block:: python
from bokeh.plotting import figure, output_file, show
plot = figure(width=300, height=300)
plot.block(x=[1, 2, 3], y=[1,2,3], width=0.5, height=1, , color="#CAB2D6")
show(plot)
"""
@marker_method()
def circle_cross(self, *args: Any, **kwargs: Any) -> GlyphRenderer:
"""
Examples:
.. code-block:: python
from bokeh.plotting import figure, output_file, show
plot = figure(width=300, height=300)
plot.circle_cross(x=[1,2,3], y=[4,5,6], size=20,
color="#FB8072", fill_alpha=0.2, line_width=2)
show(plot)
"""
@marker_method()
def circle_dot(self, *args: Any, **kwargs: Any) -> GlyphRenderer:
"""
Examples:
.. code-block:: python
from bokeh.plotting import figure, output_file, show
plot = figure(width=300, height=300)
plot.circle_dot(x=[1,2,3], y=[4,5,6], size=20,
color="#FB8072", fill_color=None)
show(plot)
"""
@marker_method()
def circle_x(self, *args: Any, **kwargs: Any) -> GlyphRenderer:
"""
Examples:
.. code-block:: python
from bokeh.plotting import figure, output_file, show
plot = figure(width=300, height=300)
plot.circle_x(x=[1, 2, 3], y=[1, 2, 3], size=20,
color="#DD1C77", fill_alpha=0.2)
show(plot)
"""
@marker_method()
def circle_y(self, *args: Any, **kwargs: Any) -> GlyphRenderer:
"""
Examples:
.. code-block:: python
from bokeh.plotting import figure, output_file, show
plot = figure(width=300, height=300)
plot.circle_y(x=[1, 2, 3], y=[1, 2, 3], size=20,
color="#DD1C77", fill_alpha=0.2)
show(plot)
"""
@marker_method()
def cross(self, *args: Any, **kwargs: Any) -> GlyphRenderer:
"""
Examples:
.. code-block:: python
from bokeh.plotting import figure, output_file, show
plot = figure(width=300, height=300)
plot.cross(x=[1, 2, 3], y=[1, 2, 3], size=20,
color="#E6550D", line_width=2)
show(plot)
"""
@marker_method()
def dash(self, *args: Any, **kwargs: Any) -> GlyphRenderer:
"""
Examples:
.. code-block:: python
from bokeh.plotting import figure, output_file, show
plot = figure(width=300, height=300)
plot.dash(x=[1, 2, 3], y=[1, 2, 3], size=[10,20,25],
color="#99D594", line_width=2)
show(plot)
"""
@marker_method()
def diamond(self, *args: Any, **kwargs: Any) -> GlyphRenderer:
"""
Examples:
.. code-block:: python
from bokeh.plotting import figure, output_file, show
plot = figure(width=300, height=300)
plot.diamond(x=[1, 2, 3], y=[1, 2, 3], size=20,
color="#1C9099", line_width=2)
show(plot)
"""
@marker_method()
def diamond_cross(self, *args: Any, **kwargs: Any) -> GlyphRenderer:
"""
Examples:
.. code-block:: python
from bokeh.plotting import figure, output_file, show
plot = figure(width=300, height=300)
plot.diamond_cross(x=[1, 2, 3], y=[1, 2, 3], size=20,
color="#386CB0", fill_color=None, line_width=2)
show(plot)
"""
@marker_method()
def diamond_dot(self, *args: Any, **kwargs: Any) -> GlyphRenderer:
"""
Examples:
.. code-block:: python
from bokeh.plotting import figure, output_file, show
plot = figure(width=300, height=300)
plot.diamond_dot(x=[1, 2, 3], y=[1, 2, 3], size=20,
color="#386CB0", fill_color=None)
show(plot)
"""
@marker_method()
def dot(self, *args: Any, **kwargs: Any) -> GlyphRenderer:
"""
Examples:
.. code-block:: python
from bokeh.plotting import figure, output_file, show
plot = figure(width=300, height=300)
plot.dot(x=[1, 2, 3], y=[1, 2, 3], size=20, color="#386CB0")
show(plot)
"""
@glyph_method(glyphs.HArea)
def harea(self, *args: Any, **kwargs: Any) -> GlyphRenderer:
"""
Examples:
.. code-block:: python
from bokeh.plotting import figure, output_file, show
plot = figure(width=300, height=300)
plot.harea(x1=[0, 0, 0], x2=[1, 4, 2], y=[1, 2, 3],
fill_color="#99D594")
show(plot)
"""
@glyph_method(glyphs.HBar)
def hbar(self, *args: Any, **kwargs
|
joodicator/PageBot
|
page/chess.py
|
Python
|
lgpl-3.0
| 2,574
| 0.007382
|
# coding=utf8
from __future__ import print_function
import re
import sys
import socket
from untwisted.mode import Mode
from untwisted.network import Work
from untwisted.event import DATA, BUFFER, FOUND, CLOSE, RECV_ERR
from untwisted.utils import std
from untwisted.utils.common import append, shrug
from untwisted.magic import sign
import util
import debug
import runtime
from util import NotInstalled, AlreadyInstalled
SOCKET_ADDRESS = 'state/chess'
RECONNECT_DELAY_SECONDS = 1
ch_work = []
ch_mode = Mode()
ch_mode.domain = 'ch'
ch_link = util.LinkSet()
ch_link.link_module(std)
ch_link.link(DATA, append)
ch_link.link(BUFFER, shrug, '\n')
if '--debug' in sys.argv: ch_link.link_module(debug)
ab_mode = None
ab_link = util.LinkSet()
@ab_link(('HELP', 'chess'))
def h_help(bot, reply, args):
reply('chess start',
'Starts a new game of chess.')
reply('chess rf RF',
'Moves the piece at rank r file f to rank R file F.')
reply('chess M [r|f|rf] RF',
'Moves a piece of type M to rank R file F'
' (moving from rank r and/or file f, if specified).')
reply('chess [r|f] RF',
'Moves a pawn to rank R file F'
' (moving from rank r or file f, if specified).')
reply('chess stop',
'Cancels the current game of chess.')
def init_work(address):
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
work = Work(ch_mode, sock)
work.address = address
ch_work.append(work)
work.setblocking(0)
work.connect_ex(address)
def kill_work(work):
work.destroy()
work.shutdown(socket.SHUT_RDWR)
work.close()
ch_work.remove(work)
def install(bot):
global ab_mode
if ab_mode is not None: raise AlreadyInstalled
ab_mode = bot
ab_link.install(ab_mode)
ch_link.install(ch_mode)
init_work(SOCKET_ADDRESS)
def uninstall(bot):
global ab_mode
if ab_mode is None: raise NotInstalled
ch_link.uninstall(ch_mode)
while len(ch_work):
kill_work(ch_work[0])
ab_link.uninstall(ab_mode)
ab_mode = None
@ab_link('!chess')
def h_chess(bot, id, target, args, full_msg):
if not target: return
for work in ch_work:
work.dump('%s <%s> %s\n' % (target, id.nick, args))
@ch_link(FOUND)
def ch_fo
|
und(work, line):
match = re.match
|
(r'(#\S+) (.*)', line.strip())
if not match: return
ab_mode.send_msg(match.group(1), match.group(2))
@ch_link(CLOSE)
@ch_link(RECV_ERR)
def ch_close_recv_error(work, *args):
kill_work(work)
yield runtime.sleep(RECONNECT_DELAY_SECONDS)
init_work(work.address)
|
SimplyAutomationized/python-snap7
|
snap7/six.py
|
Python
|
mit
| 26,731
| 0.001459
|
"""Utilities for writing code that runs on Python 2 and 3"""
# Copyright (c) 2010-2014 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import functools
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.7.3"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
# This is a bit ugly, but it avoids running this again.
delattr(obj.__class__, self.name)
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _SixMetaPathImporter(object):
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod
def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]
def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("StringIO",
|
"StringIO", "io"),
MovedAttribute("UserDic
|
t", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonM
|
darinmcgill/forker
|
tests/TestRequest.py
|
Python
|
gpl-3.0
| 3,544
| 0.000282
|
#!/usr/bin/env python
from __future__ import print_function
import unittest
from forker import Request
import socket
import os
import sys
import re
_example_request = b"""GET /README.md?xyz HTTP/1.1
Host: localhost:8080
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8
Accept-Encoding: gzip, deflate, sdch, br
Accept-Language: en-US,en;q=0.8
Cookie:trail=6231214290744395; scent=6457421329820405
"""
HELLO_WORLD = b"Hello world!\n"
def simple_app(environ, start_response):
status = environ and '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return [HELLO_WORLD]
class AppClass:
def __init__(self, environ, start_response):
self.environ = environ
self.start = start_response
def __iter__(self):
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
self.start(status, response_headers)
yield HELLO_WORLD
class TestRequest(unittest.TestCase):
def test_socket(self):
test_data = b"hello\nworld!"
client, server = socket.socketpair()
client.send(test_data)
buff = server.recv(4096)
self.assertEqual(buff, test_data)
client.close()
server.close()
def test_request(self):
client, server = socket.socketpair()
client.send(_example_request)
request = Request(sock=server)
client.close()
server.close()
self.assertEqual(request.method, "GET")
self.assertEqual(request.requested_path, "/README.md")
self.assertEqual(request.query_string, "xyz")
self.assertEqual(request.headers["host"], "localhost:8080")
self.assertFalse(request.body)
self.assertEqual(request.cookies.get("scent"), "6457421329820405")
self.assertEqual(request.cookies.get("trail"), "6231214290744395")
def test_listing(self):
r = Request(requested_path='/')
out = r.serve()
line = b"<a href='/cgi_example.sh'>cgi_example.sh</a>"
self.assertTrue(line in out)
def test_read(self):
magic = b"Y43j99j8p4Mk8S8B"
r = Request(requested_path='/TestRequest.py')
out = r.serve()
self.assertTrue(magic in out)
def test_cgi(self):
r = Request(requested_path='/cgi_example.sh', query_string="abc")
out = r.serve(allow_cgi=True)
print(out.decode()
|
)
self.assertTrue(re.match(b"HTTP/1.. 201", out))
self.assertTrue(b"QUERY_STRING=abc" in out)
def test_wsgi1(self):
client, server = socket.socketpair()
client.send(_example_request)
request = Request(sock=server)
client.close()
server.close()
out = request.wsgi(simple_app)
self.assertTrue(isinstance(out, bytes))
self.assertTrue(b'\r\n\r\n' in out)
self.assertTrue(HELLO_WORLD in out)
s
|
elf.assertTrue(out.startswith(b'HTTP/1.0 200 OK'))
def test_wsgi2(self):
client, server = socket.socketpair()
client.send(_example_request)
request = Request(sock=server)
client.close()
server.close()
out = request.wsgi(AppClass)
self.assertTrue(isinstance(out, bytes))
self.assertTrue(b'\r\n\r\n' in out)
self.assertTrue(HELLO_WORLD in out)
self.assertTrue(out.startswith(b'HTTP/1.0 200 OK'))
if __name__ == "__main__":
os.chdir(os.path.dirname(os.path.abspath(__file__)))
sys.path.append("..")
unittest.main()
|
rxncon/rxncon
|
rxncon/simulation/boolean/boolean_model.py
|
Python
|
lgpl-3.0
| 35,991
| 0.005057
|
from abc import ABCMeta
from copy import deepcopy
from enum import Enum
from itertools import product
from typing import List, Dict, Tuple, Optional
from rxncon.core.reaction import Reaction, OutputReaction
from rxncon.core.rxncon_system import RxnConSystem
from rxncon.core.spec import Spec
from rxncon.core.state import State, InteractionState
from rxncon.venntastic.sets import Set as VennSet, ValueSet, Intersection, Union, Complement, UniversalSet, EmptySet
MAX_STEADY_STATE_ITERS = 20
class BooleanModel:
"""Holds all data describing a Boolean model: a list of targets, a list of update rules and
a list of initial conditions."""
def __init__(self, targets: List['Target'], update_rules: List['UpdateRule'],
initial_conditions: 'BooleanModelState') -> None:
self.update_rules = sorted(update_rules)
self.initial_conditions = initial_conditions
self._state_targets = {str(x): x for x in targets if isinstance(x, StateTarget)}
self._reaction_targets = {str(x): x for x in targets if isinstance(x, ReactionTarget)}
self._knockout_targets = {str(x): x for x in targets if isinstance(x, KnockoutTarget)}
self._overexpression_targets = {str(x): x for x in targets if isinstance(x, OverexpressionTarget)}
self._validate_update_rules()
self._validate_initial_conditions()
self.current_state = None # type: Optional[BooleanModelState]
def set_initial_condition(self, target: 'Target', value: bool) -> None:
self.initial_conditions.set_target(target, value)
def update_rule_by_target(self, target: 'Target') -> 'UpdateRule':
for rule in self.update_rules:
if rule.target == target:
return rule
raise KeyError
def state_target_by_name(self, name: str) -> 'StateTarget':
return self._state_targets[name]
def reaction_target_by_name(self, name: str) -> 'ReactionTarget':
return self._reaction_targets[name]
def knockout_target_by_name(self, name: str) -> 'KnockoutTarget':
return self._knockout_targets[name]
def overexpression_target_by_name(self, name: str) -> 'OverexpressionTarget':
return self._overexpression_targets[name]
def step(self) -> None:
"""Takes one timestep in the Boolean model. This is rather inefficient, but not meant for
actual simulations, this is only used in the unit tests that test all different motifs
and their desired steady states."""
if not self.current_state:
self.current_state = deepcopy(self.initial_conditions)
else:
new_state = dict()
for rule in self.update_rules:
new_state[rule.target] = rule.factor.eval_boolean_func(self.current_state.target_to_value)
self.current_state = BooleanModelState(new_state)
def calc_steady_state(self) -> 'BooleanModelState':
"""Calculates the steady state by taking max MAX_STEADY_STATE_ITERS steps. If no steady state
found, raises."""
iters = 0
while iters < MAX_STEADY_STATE_ITERS:
prev = d
|
eepcopy(self.current_state)
se
|
lf.step()
if prev == self.current_state:
assert isinstance(prev, BooleanModelState)
return prev
iters += 1
raise AssertionError('Could not find steady state.')
def _validate_update_rules(self) -> None:
"""Assert that all targets appearing on the RHS in an update rule have their own LHS."""
all_lhs_targets = [] # type: List[Target]
all_rhs_targets = [] # type: List[Target]
for rule in self.update_rules:
all_lhs_targets.append(rule.target)
all_rhs_targets += rule.factor_targets
assert all(x in all_lhs_targets for x in all_rhs_targets)
def _validate_initial_conditions(self) -> None:
self.initial_conditions.validate_by_model(self)
class BooleanModelState:
def __init__(self, target_to_value: Dict['Target', bool]) -> None:
self.target_to_value = target_to_value
def __eq__(self, other):
if not isinstance(other, BooleanModelState):
return NotImplemented
else:
return self.target_to_value == other.target_to_value
def __getitem__(self, item):
return self.target_to_value[item]
def __str__(self):
return str(self.target_to_value)
def __repr__(self):
return str(self)
def set_target(self, target: 'Target', value: bool) -> None:
self.target_to_value[target] = value
def validate_by_model(self, model: BooleanModel) -> None:
"""Assert that all targets appearing in the model have a Boolean value assigned."""
model_targets = [rule.target for rule in model.update_rules]
config_targets = self.target_to_value.keys()
assert set(model_targets) == set(config_targets) and len(model_targets) == len(config_targets)
class Target(metaclass=ABCMeta):
"""Abstract base class for the different targets."""
def __hash__(self) -> int:
return hash(str(self))
def __repr__(self) -> str:
return str(self)
class ReactionTarget(Target):
"""Reaction target of the boolean model. For all non-degrading reactions the relation between
rxncon reactions and Boolean targets is 1:1. The relation for degrading reactions is more difficult
since (1) the contingencies determine what the reaction degrades (which obviously becomes problematic
in the case of a logical disjunction), and (2) the degradation of bonds should produce empty binding
partners. We refer to our paper."""
def __init__(self, reaction_parent: Reaction, contingency_variant: Optional[int]=None,
interaction_variant: Optional[int] = None, contingency_factor: VennSet['StateTarget']=None) -> None:
self.reaction_parent = reaction_parent # type: Reaction
self.produced_targets = [StateTarget(x) for x in reaction_parent.produced_states] # type: List[StateTarget]
self.consumed_targets = [StateTarget(x) for x in reaction_parent.consumed_states] # type: List[StateTarget]
self.synthesised_targets = [StateTarget(x) for x in
reaction_parent.synthesised_states] # type: List[StateTarget]
self.degraded_targets = [StateTarget(x) for x in reaction_parent.degraded_states] # type: List[StateTarget]
self.contingency_variant_index = contingency_variant
self.interaction_variant_index = interaction_variant
if contingency_factor is None:
self.contingency_factor = UniversalSet() # type: VennSet[StateTarget]
else:
self.contingency_factor = contingency_factor # type: VennSet[StateTarget]
def __hash__(self) -> int:
return hash(str(self))
def __eq__(self, other: object) -> bool:
if not isinstance(other, Target):
return NotImplemented
return isinstance(other, ReactionTarget) and self.reaction_parent == other.reaction_parent and \
self.contingency_variant_index == other.contingency_variant_index and \
self.interaction_variant_index == other.interaction_variant_index
def __str__(self) -> str:
suffix = ''
if self.interaction_variant_index is not None and self.contingency_variant_index is not None:
suffix = '#c{}/i{}'.format(self.contingency_variant_index, self.interaction_variant_index)
elif self.contingency_variant_index is not None and self.interaction_variant_index is None:
suffix = '#c{}'.format(self.contingency_variant_index)
elif self.interaction_variant_index is not None and self.contingency_variant_index is None:
suffix = '#i{}'.format(self.interaction_variant_index)
return str(self.reaction_parent) + suffix
def __repr__(self) -> str:
return str(self)
def produces(self, state_target: 'StateTarget') -> bool:
return state_target in self.produced_targets
def consumes(self, state_target: 'StateTarget') -> bool:
return state_target in sel
|
jeffreyliu3230/scrapi
|
scrapi/harvesters/plos.py
|
Python
|
apache-2.0
| 3,868
| 0.002068
|
"""PLoS-API-harvester
=================
<p>To run "harvester.py" please follow the instructions:</p>
<ol>
<li>Create an account on <a href="
|
http://register.plos.org/ambra-registration/register.action">PLOS API</a></li>
<li>Sign in <a href="http://alm.
|
plos.org/">here</a> and click on your account name. Retrieve your API key.</li>
<li>Create a new file in the folder named "settings.py". In the file, put<br>
<code>API_KEY = (your API key)</code></li>
</ol>
Sample API query: http://api.plos.org/search?q=publication_date:[2015-01-30T00:00:00Z%20TO%202015-02-02T00:00:00Z]&api_key=ayourapikeyhere&rows=999&start=0
"""
from __future__ import unicode_literals
import logging
from datetime import date, timedelta
from lxml import etree
from scrapi import requests
from scrapi import settings
from scrapi.base import XMLHarvester
from scrapi.linter.document import RawDocument
from scrapi.base.helpers import default_name_parser, build_properties, compose, single_result, date_formatter
logger = logging.getLogger(__name__)
try:
from scrapi.settings import PLOS_API_KEY
except ImportError:
PLOS_API_KEY = None
logger.error('No PLOS_API_KEY found, PLoS will always return []')
class PlosHarvester(XMLHarvester):
short_name = 'plos'
long_name = 'Public Library of Science'
url = 'http://www.plos.org/'
namespaces = {}
MAX_ROWS_PER_REQUEST = 999
BASE_URL = 'http://api.plos.org/search'
def fetch_rows(self, start_date, end_date):
query = 'publication_date:[{}T00:00:00Z TO {}T00:00:00Z]'.format(start_date, end_date)
resp = requests.get(self.BASE_URL, params={
'q': query,
'rows': '0',
'api_key': PLOS_API_KEY,
})
total_rows = etree.XML(resp.content).xpath('//result/@numFound')
total_rows = int(total_rows[0]) if total_rows else 0
current_row = 0
while current_row < total_rows:
response = requests.get(self.BASE_URL, throttle=5, params={
'q': query,
'start': current_row,
'api_key': PLOS_API_KEY,
'rows': self.MAX_ROWS_PER_REQUEST,
})
for doc in etree.XML(response.content).xpath('//doc'):
yield doc
current_row += self.MAX_ROWS_PER_REQUEST
def harvest(self, start_date=None, end_date=None):
start_date = start_date or date.today() - timedelta(settings.DAYS_BACK)
end_date = end_date or date.today()
if not PLOS_API_KEY:
return []
return [
RawDocument({
'filetype': 'xml',
'source': self.short_name,
'doc': etree.tostring(row),
'docID': row.xpath("str[@name='id']")[0].text,
})
for row in
self.fetch_rows(start_date.isoformat(), end_date.isoformat())
if row.xpath("arr[@name='abstract']")
or row.xpath("str[@name='author_display']")
]
schema = {
'uris': {
'canonicalUri': ('//str[@name="id"]/node()', compose('http://dx.doi.org/{}'.format, single_result)),
},
'contributors': ('//arr[@name="author_display"]/str/node()', default_name_parser),
'providerUpdatedDateTime': ('//date[@name="publication_data"]/node()', compose(date_formatter, single_result)),
'title': ('//str[@name="title_display"]/node()', single_result),
'description': ('//arr[@name="abstract"]/str/node()', single_result),
'publisher': {
'name': ('//str[@name="journal"]/node()', single_result)
},
'otherProperties': build_properties(
('eissn', '//str[@name="eissn"]/node()'),
('articleType', '//str[@name="article_type"]/node()'),
('score', '//float[@name="score"]/node()')
)
}
|
DraXus/andaluciapeople
|
oauth_access/views.py
|
Python
|
agpl-3.0
| 1,715
| 0.004665
|
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from oauth_access.access import OAuthAccess
from oauth_access.exceptions import MissingToken
def oauth_login(request, service,
redirect_field_name="next", redirect_to_session_key="redirect_to"):
access = OAuthAccess(service)
if not service == "facebook":
token = access.unauthorized_token()
request.session["%s_unauth_token" % service] = token.to_string()
else:
token = None
if hasattr(request, "session")
|
:
request.session[redirect_to_session_key] = request.GET.get(redirect_field_name)
return HttpResponseRedirect(access.authorization_url(token))
def oauth_callback(request
|
, service):
ctx = RequestContext(request)
access = OAuthAccess(service)
unauth_token = request.session.get("%s_unauth_token" % service, None)
try:
#print "oauth_callback unauth_token = %s" % unauth_token
#print "oauth_callback request.GET = %s" % request.GET
auth_token = access.check_token(unauth_token, request.GET)
#print "oauth_login auth_token = %s" % auth_token
except MissingToken:
ctx.update({"error": "token_missing"})
else:
if auth_token:
cback = access.callback(request, access, auth_token)
return cback.redirect()
else:
# @@@ not nice for OAuth 2
ctx.update({"error": "token_mismatch"})
return render_to_response("oauth_access/oauth_error.html", ctx)
def finish_signup(request, service):
access = OAuthAccess(service)
return access.callback.finish_signup(request, service)
|
nacl-webkit/chrome_deps
|
tools/telemetry/telemetry/temporary_http_server.py
|
Python
|
bsd-3-clause
| 1,660
| 0.010843
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import socket
import subprocess
import sys
import urlparse
from telemetry import util
class TemporaryHTTPServer(object):
def __init__(self, browser_backend, path):
self._server = None
self._devnull = None
self._path = path
self._forwarder = None
self._host_port = util.GetAvailableLocalPort()
assert os.path.exists(path), path
assert os.path.isdir(path), path
self._devnull = open(os.devnull, 'w')
self._server = subprocess.Popen(
[sys.executable, '-m', 'SimpleHTTPServer', str(self._host_port)],
cwd=self._path,
stdout=self._devnull, stderr=self._devnull)
self._forwarder = browser_backend.CreateForward
|
er(
util.PortPair(self._host_port,
browser_backend.GetRemotePort(self._host_port)))
def IsServerUp():
return not socket.socket().connect_ex(('localhos
|
t', self._host_port))
util.WaitFor(IsServerUp, 5)
@property
def path(self):
return self._path
def __enter__(self):
return self
def __exit__(self, *args):
self.Close()
def __del__(self):
self.Close()
def Close(self):
if self._forwarder:
self._forwarder.Close()
self._forwarder = None
if self._server:
self._server.kill()
self._server = None
if self._devnull:
self._devnull.close()
self._devnull = None
@property
def url(self):
return self._forwarder.url
def UrlOf(self, path):
return urlparse.urljoin(self.url, path)
|
ctoher/pymatgen
|
pymatgen/util/io_utils.py
|
Python
|
mit
| 2,727
| 0.000367
|
# coding: utf-8
from __future__ import unicode_literals
"""
This module provides utility classes for io operations.
"""
__author__ = "Shyue Ping Ong, Rickard Armiento, Anubhav Jain, G Matteo, Ioannis Petousis"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__status__ = "Production"
__date__ = "Sep 23, 2011"
import re
from monty.io import zopen
def clean_lines(string_list, remove_empty_lines=True):
"""
Strips whitespace, carriage returns and empty lines from a list of strings.
Args:
string_list: List of strings
remove_empty_lines: Set to True to skip lines which are empty after
stripping.
Returns:
List of clean strings with no whitespaces.
"""
for s in string_list:
clean_s = s
if '#' in s:
ind = s.index('#')
clean_s = s[:ind]
clean_s = clean_s.strip()
if (not remove_empty_lines) or clean_s != '':
yield clean_s
def micro_pyawk(filename, search, results=None, debug=None, postdebug=None):
"""
Small awk-mimicking search routine.
'file' is file to search through.
'search' is the "search program", a list of lists/tuples with 3 elements;
i.e. [[regex,test,ru
|
n],[regex,test,run],...]
'results' is a an object that your search program will have access to for
storing results.
Here regex is either as a Regex object, or a string that we compile into a
Regex. test and run are callable objects.
This function goes through each line in filename, and if regex matches that
line *and* test(results,line)==True (or test == None) we execute
run(results,match),where ma
|
tch is the match object from running
Regex.match.
The default results is an empty dictionary. Passing a results object let
you interact with it in run() and test(). Hence, in many occasions it is
thus clever to use results=self.
Author: Rickard Armiento, Ioannis Petousis
Returns:
results
"""
if results is None:
results = {}
# Compile strings into regexs
for entry in search:
entry[0] = re.compile(entry[0])
with zopen(filename, "rt") as f:
for line in f:
for entry in search:
match = re.search(entry[0], line)
if match and (entry[1] is None
or entry[1](results, line)):
if debug is not None:
debug(results, match)
entry[2](results, match)
if postdebug is not None:
postdebug(results, match)
return results
|
naomi-/exploration
|
Enemy.py
|
Python
|
mit
| 1,023
| 0
|
import pygame
from pygame.locals import *
import constants as c
class Enemy:
def __init__(self, x, y, health, movement_pattern, direction, img):
self.x = x
self.y = y
self.health = health
self.movement_pattern = movement_pattern
self.direction = direction
self.img = img
def update(self, platforms_list, WORLD, avatar):
#
|
do updates based on movement_pattern
if self.movement_pattern == "vertical":
if self.direction == "up":
self.y -=
|
2
elif self.direction == "down":
self.y += 2
else:
self.y = self.y
if self.y > avatar.y + 30:
self.direction = "up"
elif self.y < avatar.y - 30:
self.direction = "down"
else:
self.direction = "stay"
self.display(WORLD)
def display(self, WORLD):
WORLD.blit(self.img, (self.x, self.y))
|
Smarsh/django
|
tests/regressiontests/forms/localflavor/cz.py
|
Python
|
bsd-3-clause
| 4,319
| 0.001158
|
# -*- coding: utf-8 -*-
# Tests for the contrib/localflavor/ CZ Form Fields
tests = r"""
# CZPostalCodeField #########################################################
>>> from django.contrib.localflavor.cz.forms import CZPostalCodeField
>>> f = CZPostalCodeField()
>>> f.clean('84545x')
Traceback (most recent call last):
...
ValidationError: [u'Enter a postal code in the format XXXXX or XXX XX.']
>>> f.clean('91909')
u'91909'
>>> f.clean('917 01')
u'91701'
>>> f.clean('12345')
u'12345'
>>> f.clean('123456')
Traceback (most recent call last):
...
ValidationError: [u'Enter a postal code in the format XXXXX or XXX XX.']
>>> f.clean('1234')
Traceback (most recent call last):
...
ValidationError: [u'Enter a postal code in the format XXXXX or XXX XX.']
>>> f.clean('123 4')
Traceback (most recent call last):
...
ValidationError: [u'Enter a postal code in the format XXXXX or XXX XX.']
# CZRegionSelect ############################################################
>>> from django.contrib.localflavor.cz.forms import CZRegionSelect
>>> w = CZRegionSelect()
>>> w.render('regions', 'TT')
u'<select name="regions">\n<option value="PR">Prague</option>\n<option value="CE">Central Bohemian Region</option>\n<option value="SO">South Bohemian Region</option>\n<option value="PI">Pilsen Region</option>\n<option value="CA">Carlsbad Region</option>\n<option value="US">Usti Region</option>\n<option value="LB">Liberec Region</option>\n<option value="HK">Hradec Region</option>\n<option value="PA">Pardubice Region</option>\n<option value="VY">Vysocina Region</option>\n<option value="SM">South Moravian Region</option>\n<option value="OL">Olomouc Region</option>\n<option value="ZL">Zlin Region</option>\n<option value="MS">Moravian-Silesian Region</option>\n</select>'
# CZBirthNumberField ########################################################
>>> from django.contrib.localflavor.cz.forms import CZBirthNumberField
>>> f = CZBirthNumberField()
>>> f.clean('880523/1237')
u'880523/1237'
>>> f.clean('8805231237')
u'8805231237'
>>> f.clean('880523/000')
u'880523/000'
>>> f.clean('880523000')
u'880523000'
>>> f.clean('882101/0011')
u'882101/0011'
>>> f.clean('880523/1237', 'm')
u'880523/1237'
>>> f.clean('885523/1231', 'f')
u'885523/1231'
>>> f.clean('123456/12')
Traceback (most recent call last):
...
ValidationError: [u'Enter a birth number in the format XXXXXX/XXXX or XXXXXXXXXX.']
>>> f.clean('123456/12345')
Traceback (most recent call last):
...
ValidationError: [u'Enter a birth number in the format XXXXXX/XXXX or X
|
XXXXXXXXX.']
>>> f.clean('12345612
|
')
Traceback (most recent call last):
...
ValidationError: [u'Enter a birth number in the format XXXXXX/XXXX or XXXXXXXXXX.']
>>> f.clean('12345612345')
Traceback (most recent call last):
...
ValidationError: [u'Enter a birth number in the format XXXXXX/XXXX or XXXXXXXXXX.']
>>> f.clean('881523/0000', 'm')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid birth number.']
>>> f.clean('885223/0000', 'm')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid birth number.']
>>> f.clean('881223/0000', 'f')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid birth number.']
>>> f.clean('886523/0000', 'f')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid birth number.']
>>> f.clean('880523/1239')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid birth number.']
>>> f.clean('8805231239')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid birth number.']
>>> f.clean('990101/0011')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid birth number.']
# CZICNumberField ########################################################
>>> from django.contrib.localflavor.cz.forms import CZICNumberField
>>> f = CZICNumberField()
>>> f.clean('12345679')
u'12345679'
>>> f.clean('12345601')
u'12345601'
>>> f.clean('12345661')
u'12345661'
>>> f.clean('12345610')
u'12345610'
>>> f.clean('1234567')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid IC number.']
>>> f.clean('12345660')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid IC number.']
>>> f.clean('12345600')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid IC number.']
"""
|
giuserpe/leeno
|
src/Ultimus.oxt/python/pythonpath/LeenoUtils.py
|
Python
|
lgpl-2.1
| 7,316
| 0.002873
|
'''
Often used utility functions
Copyright 2020 by Massimo Del Fedele
'''
import sys
import uno
from com.sun.star.beans import PropertyValue
from datetime import date
import calendar
import PyPDF2
'''
ALCUNE COSE UTILI
La finestra che contiene il documento (o componente) corrente:
desktop.CurrentFrame.ContainerWindow
Non cambia nulla se è aperto un dialogo non modale,
ritorna SEMPRE il frame del documento.
desktop.ContainerWindow ritorna un None -- non so a che serva
Per ottenere le top windows, c'è il toolkit...
tk = ctx.ServiceManager.createInstanceWithContext("com.sun.star.awt.Toolkit", ctx)
tk.getTopWindowCount() ritorna il numero delle topwindow
tk.getTopWIndow(i) ritorna una topwindow dell'elenco
tk.getActiveTopWindow () ritorna la topwindow attiva
La topwindow attiva, per essere attiva deve, appunto, essere attiva, indi avere il focus
Se si fa il debug, ad esempio, è probabile che la finestra attiva sia None
Resta quindi SEMPRE il problema di capire come fare a centrare un dialogo sul componente corrente.
Se non ci sono dialoghi in esecuzione, il dialogo creato prende come parent la ContainerWindow(si suppone...)
e quindi viene posizionato in base a quella
Se c'è un dialogo aperto e nell'event handler se ne apre un altro, l'ultimo prende come parent il precedente,
e viene quindi posizionato in base a quello e non alla schermata principale.
Serve quindi un metodo per trovare le dimensioni DELLA FINESTRA PARENT di un dialogo, per posizionarlo.
L'oggetto UnoControlDialog permette di risalire al XWindowPeer (che non serve ad una cippa), alla XView
(che mi fornisce la dimensione del dialogo ma NON la parent...), al UnoControlDialogModel, che fornisce
la proprietà 'DesktopAsParent' che mi dice SOLO se il dialogo è modale (False) o non modale (True)
L'unica soluzione che mi viene in mente è tentare con tk.ActiveTopWindow e, se None, prendere quella del desktop
'''
def getComponentContext():
'''
Get current application's component context
'''
try:
if __global_context__ is not None:
return __global_context__
return uno.getComponentContext()
except Exception:
return uno.getComponentContext()
def getDesktop():
'''
Get current application's LibreOffice desktop
'''
ctx = getComponentContext()
return ctx.ServiceManager.createInstanceWithContext("com.sun.star.frame.Desktop", ctx)
def getDocument():
'''
Get active document
'''
desktop = getDesktop()
# try to activate current frame
# needed sometimes because UNO doesnt' find the correct window
# when debugging.
try:
desktop.getCurrentFrame().activate()
except Exception:
pass
return desktop.getCurrentComponent()
def getServiceManager():
'''
Gets the service manager
'''
return getComponentC
|
ontext().ServiceManager
def createUnoService(serv):
'''
create an UNO service
'''
return getComponentContext().getServiceManager().crea
|
teInstance(serv)
def MRI(target):
ctx = getComponentContext()
mri = ctx.ServiceManager.createInstanceWithContext("mytools.Mri", ctx)
mri.inspect(target)
def isLeenoDocument():
'''
check if current document is a LeenO document
'''
try:
return getDocument().getSheets().hasByName('S2')
except Exception:
return False
def DisableDocumentRefresh(oDoc):
'''
Disabilita il refresh per accelerare le procedure
'''
oDoc.lockControllers()
oDoc.addActionLock()
def EnableDocumentRefresh(oDoc):
'''
Riabilita il refresh
'''
oDoc.removeActionLock()
oDoc.unlockControllers()
def getGlobalVar(name):
if type(__builtins__) == type(sys):
bDict = __builtins__.__dict__
else:
bDict = __builtins__
return bDict.get('LEENO_GLOBAL_' + name)
def setGlobalVar(name, value):
if type(__builtins__) == type(sys):
bDict = __builtins__.__dict__
else:
bDict = __builtins__
bDict['LEENO_GLOBAL_' + name] = value
def initGlobalVars(dict):
if type(__builtins__) == type(sys):
bDict = __builtins__.__dict__
else:
bDict = __builtins__
for key, value in dict.items():
bDict['LEENO_GLOBAL_' + key] = value
def dictToProperties(values, unoAny=False):
'''
convert a dictionary in a tuple of UNO properties
if unoAny is True, return the result in an UNO Any variable
otherwise use a python tuple
'''
ps = tuple([PropertyValue(Name=n, Value=v) for n, v in values.items()])
if unoAny:
ps = uno.Any('[]com.sun.star.beans.PropertyValue', ps)
return ps
def daysInMonth(dat):
'''
returns days in month of date dat
'''
month = dat.month + 1
year = dat.year
if month > 12:
month = 1
year += 1
dat2 = date(year=year, month=month, day=dat.day)
t = dat2 - dat
return t.days
def firstWeekDay(dat):
'''
returns first week day in month from dat
monday is 0
'''
return calendar.weekday(dat.year, dat.month, 1)
DAYNAMES = ['Lun', 'Mar', 'Mer', 'Gio', 'Ven', 'Sab', 'Dom']
MONTHNAMES = [
'Gennaio', 'Febbraio', 'Marzo', 'Aprile',
'Maggio', 'Giugno', 'Luglio', 'Agosto',
'Settembre', 'Ottobre', 'Novembre', 'Dicembre'
]
def date2String(dat, fmt = 0):
'''
conversione data in stringa
fmt = 0 25 Febbraio 2020
fmt = 1 25/2/2020
fmt = 2 25-02-2020
fmt = 3 25.02.2020
'''
d = dat.day
m = dat.month
if m < 10:
ms = '0' + str(m)
else:
ms = str(m)
y = dat.year
if fmt == 1:
return str(d) + '/' + ms + '/' + str(y)
elif fmt == 2:
return str(d) + '-' + ms + '-' + str(y)
elif fmt == 3:
return str(d) + '.' + ms + '.' + str(y)
else:
return str(d) + ' ' + MONTHNAMES[m - 1] + ' ' + str(y)
def string2Date(s):
if '.' in s:
sp = s.split('.')
elif '/' in s:
sp = s.split('/')
elif '-' in s:
sp = s.split('-')
else:
return date.today()
if len(sp) != 3:
raise Exception
day = int(sp[0])
month = int(sp[1])
year = int(sp[2])
return date(day=day, month=month, year=year)
def countPdfPages(path):
'''
Returns the number of pages in a PDF document
using external PyPDF2 module
'''
with open(path, 'rb') as f:
pdf = PyPDF2.PdfFileReader(f)
return pdf.getNumPages()
def replacePatternWithField(oTxt, pattern, oField):
'''
Replaces a string pattern in a Text object
(for example '[PATTERN]') with the given field
'''
# pattern may be there many times...
repl = False
pos = oTxt.String.find(pattern)
while pos >= 0:
#create a cursor
cursor = oTxt.createTextCursor()
# use it to select the pattern
cursor.collapseToStart()
cursor.goRight(pos, False)
cursor.goRight(len(pattern), True)
# remove the pattern from text
cursor.String = ''
# insert the field at cursor's position
cursor.collapseToStart()
oTxt.insertTextContent(cursor, oField, False)
# next occurrence of pattern
pos = oTxt.String.find(pattern)
repl = True
return repl
|
TrimBiggs/calico
|
calico/test/test_calcollections.py
|
Python
|
apache-2.0
| 5,048
| 0
|
# -*- coding: utf-8 -*-
# Copyright 2015 Metaswitch Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
calico.test.test_calcollections
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Test for collections library.
"""
import logging
from mock import Mock, call, patch
from calico.calcollections import SetDelta, MultiDict
from unittest2 import TestCase
_log = logging.getLogger(__name__)
class TestSetDelta(TestCase):
def setUp(self):
self.set = set("abc")
self.delta = SetDelta(self.set)
def test_add(self):
self.delta.add("c")
self.delta.add("d")
# Only "d" added, "c" was already present.
self.assertEqual(self.delta.added_entries, set(["d"]))
# Now apply, should mutate the set.
self.assertEqual(self.set, set("abc"))
self.delta.apply_and_reset()
self.assertEqual(self.set, set("abcd"))
self.assertEqual(self.delta.added_entries, set())
def test_remove(self):
self.delta.remove("c")
self.delta.remove("d")
# Only "c" added, "d" was already missing.
self.assertEqual(self.delta.removed_entries, set(["c"]))
# Now apply, should mutate the set.
self.assertEqual(self.set, set("abc"))
self.delta.apply_and_reset()
self.assertEqual(self.set, set("ab"))
self.assertEqual(self.delta.removed_entries, set())
def test_add_and_remove(self):
self.delta.add("c") # No-op, already present.
self.delta.add("d") # Put in added set.
self.delta.add("e") # Will remain in added set.
self.delta.remove("c") # Recorded in remove set.
self.delta.remove("d") # Cancels the pending add only.
self.delta.remove("f") # No-op.
self.assertEqual(self.delta.added_entries, set("e"))
self.assertEqual(self.delta.removed_entries, set("c"))
self.delta.apply_and_reset()
self.assertEqual(self.set, set("abe"))
def test_size(self):
self.assertTrue(self.delta.empty)
self.assertEqual(self.delta.resulting_size, 3)
self.delta.add("c") # No-op, already present.
self.assertEqual(self.delta.resulting_size, 3)
self.delta.add("d") # Put in added set.
self.assertEqual(self.delta.resulting_size, 4)
self.delta.add("e") # Will remain in added set.
self.assertEqual(self.delta.resulting_size, 5)
self.delta.remove("c") # Recorded in remove set.
self.assertEqual(self.delta.resulting_size, 4)
self.delta.remove("d") # Cancels the pending add only.
self.assertEqual(self.delta.resulting_size, 3)
self.delta.remove("f") # No-op.
self.assertEqual(self.delta.resulting_size, 3)
class TestMultiDict(TestCase):
def setUp(self):
super(TestMultiDict, self).setUp()
self.index = MultiDict()
def test_add_single(self):
self.index.add("k", "v")
self.assertTrue(self.index.contains("k", "v"))
self.assertEqual(set(self.index.iter_values("k")),
set(["v"]))
def test_add_remove_single(self):
self.index.add("k", "v")
self.index.d
|
iscard("k", "v")
self.assertFalse(self.index.contains("k", "v"))
self.assertEqual(self.index._index, {})
def test_empty(self):
self.assertFalse(bool(self.index))
self.assertEqual(self.index.num_items("k"), 0)
self.assertEqual(list(self.index.iter_values("k")), [])
def test_add_multiple(self):
self.index.add("k", "v")
self.assertTrue(bool(self.index))
|
self.assertEqual(self.index.num_items("k"), 1)
self.index.add("k", "v")
self.assertEqual(self.index.num_items("k"), 1)
self.index.add("k", "v2")
self.assertEqual(self.index.num_items("k"), 2)
self.index.add("k", "v3")
self.assertEqual(self.index.num_items("k"), 3)
self.assertIn("k", self.index)
self.assertNotIn("k2", self.index)
self.assertTrue(self.index.contains("k", "v"))
self.assertTrue(self.index.contains("k", "v2"))
self.assertTrue(self.index.contains("k", "v3"))
self.assertEqual(self.index._index, {"k": set(["v", "v2", "v3"])})
self.assertEqual(set(self.index.iter_values("k")),
set(["v", "v2", "v3"]))
self.index.discard("k", "v")
self.index.discard("k", "v2")
self.assertTrue(self.index.contains("k", "v3"))
self.index.discard("k", "v3")
self.assertEqual(self.index._index, {})
|
fabioz/PyDev.Debugger
|
tests_python/resources/_pydev_coverage_cyrillic_encoding_py3.py
|
Python
|
epl-1.0
| 135
| 0.014815
|
# -*-
|
coding: iso-8859-5 -*-
# Ʋ³´µ¶
class DummyƲ³´µ¶(object):
def Print(self):
print ('Ʋ³´µ¶')
Dummy
|
Ʋ³´µ¶().Print()
|
sql-machine-learning/sqlflow
|
python/runtime/dbapi/pyalisa/task.py
|
Python
|
apache-2.0
| 3,699
| 0
|
# Copyright 2020 The SQLFlow Authors. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
import sys
import time
from runtime.dbapi.pyalisa.client import AlisaTaksStatus, Client
# waiting task completed
WAIT_INTEVERAL_SEC = 2
# read results while a task completed
READ_RESULTS_BATCH = 20
class Task(object): # noqa: R0205
"""Task encapsulates operations to submit the alisa task.
Args:
config(Config): the config for building the task
"""
def __init__(self, config):
self.config = config
self.cli = Client(config)
def exec_sql(self, code, output=sys.stdout, resultful=False):
"""submit the sql statements to alisa server, write the logs to output
Args:
code: sql stateme
|
nts
resultful: has result
output: like sys.stdout
"""
task_id, status = self.cli.create_sql_task(code)
return self._tracking(task_id, status, output, resultful)
def exec_pyodps(self, code, args, output=sys.stdout):
|
"""submit the python code to alisa server, write the logs to output
Args:
code: python code
args: args for python code
output: such as sys.stdout
"""
task_id, status = self.cli.create_pyodps_task(code, args)
return self._tracking(task_id, status, output, False)
def _tracking(self, task_id, status, output, resultful):
return self._tracking_with_log(
task_id, status, output,
resultful) if self.config.verbose else self._tracking_quietly(
task_id, status, resultful)
def _tracking_with_log(self, task_id, status, output, resultful):
log_idx = 0
while not self.cli.completed(status):
if status in (AlisaTaksStatus.ALISA_TASK_WAITING,
AlisaTaksStatus.ALISA_TASK_ALLOCATE):
output.write('waiting for resources')
elif status == AlisaTaksStatus.ALISA_TASK_RUNNING and log_idx >= 0:
self.cli.read_logs(task_id, log_idx, output)
time.sleep(WAIT_INTEVERAL_SEC)
status = self.cli.get_status(task_id)
if status == AlisaTaksStatus.ALISA_TASK_EXPIRED:
output.write('timeout while waiting for resources')
else:
# assert log_idx>=0
self.cli.read_logs(task_id, log_idx, output)
# assert log_idex<0
if status == AlisaTaksStatus.ALISA_TASK_COMPLETED:
return self.cli.get_results(
task_id, READ_RESULTS_BATCH) if resultful else []
raise Exception('task={}, invalid status={}'.format(task_id, status))
def _tracking_quietly(self, task_id, status, resultful):
while not self.cli.completed(status):
time.sleep(WAIT_INTEVERAL_SEC)
status = self.cli.get_status(task_id)
if status != AlisaTaksStatus.ALISA_TASK_COMPLETED:
raise Exception(
'task({}) status is {} which means incompleted.'.format(
task_id, status))
if resultful:
return self.cli.get_results(task_id, READ_RESULTS_BATCH)
return []
|
google/pymql
|
test/type_link_test.py
|
Python
|
apache-2.0
| 36,235
| 0.001711
|
#!/usr/bin/python2.6
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitati
|
ons under the License.
# -*- coding: utf-8 -*-
#
"""type link."""
__author__ = 'bneutra@google.com (Brendan Neutra)'
# thanks warren for these dimetests
import google3
from pymql.mql import error
from pymql.test import mql_fixture
class MQLTest(mql_fixture.MQLTest):
"""type link tests."""
def setUp(self):
self.SetMockPath('data/type_link.yaml')
super(MQLTest, self).setUp()
se
|
lf.env = {'as_of_time': '2010-05-01'}
def testLinkMasterProperty(self):
"""link:null (master_property) of obj property."""
query = """
{
"/people/person/place_of_birth": {
"link": null,
"id": null
},
"id": "/en/bob_dylan"
}
"""
exp_response = """
{
"/people/person/place_of_birth": {
"link": "/people/person/place_of_birth",
"id": "/en/duluth"
},
"id": "/en/bob_dylan"
}
"""
self.DoQuery(query, exp_response=exp_response)
def testLinkMasterValueProperty(self):
"""link:null (master_property) of value property."""
query = """
{
"/people/person/date_of_birth": {
"link": null,
"value": null
},
"id": "/en/bob_dylan"
}
"""
exp_response = """
{
"/people/person/date_of_birth": {
"link": "/people/person/date_of_birth",
"value": "1941-05-24"
},
"id": "/en/bob_dylan"
}
"""
self.DoQuery(query, exp_response=exp_response)
def testReadTypeLinkMasterPropertyOfObjProperty(self):
"""read /type/link/master_property of obj property."""
query = """
{
"/people/person/place_of_birth": {
"link": {
"master_property": null
},
"id": null
},
"id": "/en/bob_dylan"
}
"""
exp_response = """
{
"/people/person/place_of_birth": {
"link": {
"master_property": "/people/person/place_of_birth"
},
"id": "/en/duluth"
},
"id": "/en/bob_dylan"
}
"""
self.DoQuery(query, exp_response=exp_response)
def testReadTypeLinkTypeOfObjProperty(self):
"""read /type/link/type of obj property."""
query = """
{
"/people/person/place_of_birth": {
"link": {
"type": null
},
"id": null
},
"id": "/en/bob_dylan"
}
"""
exp_response = """
{
"/people/person/place_of_birth": {
"link": {
"type": "/type/link"
},
"id": "/en/duluth"
},
"id": "/en/bob_dylan"
}
"""
self.DoQuery(query, exp_response=exp_response)
def testReadTypeLinkReverseOfObjProperty(self):
"""read /type/link/reverse of obj property."""
query = """
{
"/people/person/place_of_birth": {
"link": {
"reverse": null
},
"id": null
},
"id": "/en/bob_dylan"
}
"""
exp_response = """
{
"/people/person/place_of_birth": {
"link": {
"reverse": false
},
"id": "/en/duluth"
},
"id": "/en/bob_dylan"
}
"""
self.DoQuery(query, exp_response=exp_response)
def testReadTypeLinkAttributionOfObjProperty(self):
"""read /type/link/attribution of obj property."""
query = """
{
"/people/person/place_of_birth": {
"link": {
"attribution": null
},
"id": null
},
"id": "/en/bob_dylan"
}
"""
exp_response = """
{
"/people/person/place_of_birth": {
"link": {
"attribution": "/user/cvolkert"
},
"id": "/en/duluth"
},
"id": "/en/bob_dylan"
}
"""
self.DoQuery(query, exp_response=exp_response)
def testReadTypeLinkCreatorOfObjProperty(self):
"""read /type/link/creator of obj property."""
query = """
{
"/people/person/place_of_birth": {
"link": {
"creator": null
},
"id": null
},
"id": "/en/bob_dylan"
}
"""
exp_response = """
{
"/people/person/place_of_birth": {
"link": {
"creator": "/user/cvolkert"
},
"id": "/en/duluth"
},
"id": "/en/bob_dylan"
}
"""
self.DoQuery(query, exp_response=exp_response)
def testReadTypeLinkTimestampOfObjProperty(self):
"""read /type/link/timestamp of obj property."""
query = """
{
"/people/person/place_of_birth": {
"link": {
"timestamp": null
},
"id": null
},
"id": "/en/bob_dylan"
}
"""
exp_response = """
{
"/people/person/place_of_birth": {
"link": {
"timestamp": "2007-10-23T09:07:43.0024Z"
},
"id": "/en/duluth"
},
"id": "/en/bob_dylan"
}
"""
self.DoQuery(query, exp_response=exp_response)
def testReadTypeLinkSourceOfObjProperty(self):
"""read /type/link/source of obj property."""
query = """
{
"/people/person/place_of_birth": {
"link": {
"source": null
},
"id": null
},
"id": "/en/bob_dylan"
}
"""
exp_response = """
{
"/people/person/place_of_birth": {
"link": {
"source": "Bob Dylan"
},
"id": "/en/duluth"
},
"id": "/en/bob_dylan"
}
"""
self.DoQuery(query, exp_response=exp_response)
def testReadTypeLinkTargetOfObjProperty(self):
"""read /type/link/target of obj property."""
query = """
{
"/people/person/place_of_birth": {
"link": {
"target": null
},
"id": null
},
"id": "/en/bob_dylan"
}
"""
exp_response = """
{
"/people/person/place_of_birth": {
"link": {
"target": "Duluth"
},
"id": "/en/duluth"
},
"id": "/en/bob_dylan"
}
"""
self.DoQuery(query, exp_response=exp_response)
def testReadTypeLinkTargetOfObjArrayProperty(self):
"""read /type/link/target of obj array property."""
query = """
{
"/people/person/children": [{
"link": {
"source": [
{
"id": null
}
]
},
"id": null
}],
"id": "/en/bob_dylan"
}
"""
exp_response = """
{
"/people/person/children": [
{
"id": "/en/jakob_dylan",
"link": {
"source": [{
"id": "/en/jakob_dylan"
}]
}
},
{
"id": "/en/jesse_dylan",
"link": {
"source": [{
"id": "/en/jesse_dylan"
}]
}
},
{
"id": "/en/desiree_gabrielle_dennis_dylan",
"link": {
"source": [{
"id": "/en/desiree_gabrielle_dennis_dylan"
}]
}
},
{
"id": "/en/maria_dylan",
"link": {
"source": [{
"id": "/en/maria_dylan"
}]
}
},
{
"id": "/en/sam_dylan",
"link": {
"source": [{
"id": "/en/sam_dylan"
}]
}
},
{
"id": "/en/anna_dylan",
"link": {
"source": [{
"id": "/en/anna_dylan"
}]
}
}
],
"id": "/en/bob_dylan"
}
"""
self.DoQuery(query, exp_response=exp_response)
def testReadTypeLinkTargetOfValueProperty(self):
"""read /type/link/target of value property."""
query = """
{
"/people/person/date_of_birth": {
"link": {
"target": null
},
|
zeyuanxy/leet-code
|
vol4/number-of-islands/number-of-islands.py
|
Python
|
mit
| 851
| 0.00235
|
class Solution(object):
def search(self, grid, x, y, s):
if grid[x][y] == '0' or (x, y) in s:
return s
s.add((x, y))
if x - 1 >= 0:
s = self.search(grid, x - 1, y, s)
if x + 1 < len(grid):
s = self.search(grid, x + 1, y, s)
if y - 1 >= 0:
|
s = self.search(grid, x, y - 1, s)
if y + 1 < len(grid[0]):
s = self.search(grid, x, y + 1, s)
return s
def numIslands(self, grid):
"""
:type grid: List[List[str]]
:rtype: int
"
|
""
ans = 0
s = set()
for x in range(len(grid)):
for y in range(len(grid[0])):
if grid[x][y] == '1' and (x, y) not in s:
ans += 1
s = self.search(grid, x, y, s)
return ans
|
bobpoekert/tornado-threadpool
|
tests.py
|
Python
|
mit
| 2,117
| 0.001417
|
import thread_pool
from tornado.testing import AsyncTestCase
from unittest import TestCase
import time, socket
from tornado.ioloop import IOLoop
from tornado.iostream import IOStream
from functools import partial
class ThreadPoolTestCase(AsyncTestCase):
def tearDown(self):
thread_pool.thread_pool = thread_pool.ThreadPool()
def test_run(self):
def callback():
self.stop()
thread_pool.thread_pool.run(callback)
self.wait(timeout=0.2)
@thread_pool.in_thread_pool
def sleep(self):
time.sleep(0.1)
self.stop()
def test_in_thread_pool(self):
start = time.time()
self.sleep()
self.assertLess(time.time(), start + 0.1)
self.wait()
self.assertGreater(time.time(), start + 0.1)
def test_in_ioloop(self):
self.done = False
self._test_in_ioloop()
IOLoop.instance().start()
self.assertTrue(self.done)
@thread_pool.in_thread_pool
def _test_in_ioloop(self):
time.sleep(0.1)
self._test_in_ioloop_2()
@thread_pool.in_ioloop
def _test_in_ioloop_2(self):
self.done = True
IOLoop.instance
|
().stop()
def test_blocking_war
|
n(self):
self._fired_warning = False
thread_pool.blocking_warning = self.warning_fired
self.blocking_method()
self.assertTrue(self._fired_warning)
@thread_pool.blocking
def blocking_method(self):
time.sleep(0.1)
def warning_fired(self, fn):
self._fired_warning = True
class TheadPoolDoSTestCase(TestCase):
def tearDown(self):
thread_pool.thread_pool = thread_pool.ThreadPool()
def setUp(self):
self.entered = 0
self.exited = 0
def exit(self):
time.sleep(0.01)
self.exited += 1
def test_DoS(self):
for i in xrange(100):
self.entered += 1
thread_pool.thread_pool.run(self.exit)
time.sleep(0.5)
self.assertEqual(self.entered, self.exited)
time.sleep(1)
self.assertEqual(len(thread_pool.thread_pool.threads), 0)
|
Jian-Zhan/customarrayformatter
|
openpyxl/workbook.py
|
Python
|
mit
| 8,298
| 0.000603
|
# file openpyxl/workbook.py
# Copyright (c) 2010-2011 openpyxl
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# @license: http://www.opensource.org/licenses/mit-license.php
# @author: see AUTHORS file
"""Workbook is the top-level container for all document information."""
__docformat__ = "restructuredtext en"
# Python stdlib imports
import datetime
import os
import threading
# package imports
from openpyxl.worksheet import Worksheet
from openpyxl.writer.dump_worksheet import DumpWorksheet, save_dump
from openpyxl.writer.strings import StringTableBuilder
from openpyxl.namedrange import NamedRange
from openpyxl.style import Style
from openpyxl.writer.excel import save_workbook
from openpyxl.shared.exc import ReadOnlyWorkbookException
from openpyxl.shared.date_time import CALENDAR_WINDOWS_1900, CALENDAR_MAC_1904
from openpyxl.shared.xmltools import fromstring
from openpyxl.shared.ooxml import NAMESPACES, SHEET_MAIN_NS
class DocumentProperties(object):
"""High-level properties of the document."""
def __init__(self):
self.creator = 'Unknown'
self.last_modified_by = self.creator
self.created = datetime.datetime.now()
self.modified = datetime.datetime.now()
self.title = 'Untitled'
self.subject = ''
self.description = ''
self.keywords = ''
self.category = ''
self.company = 'Microsoft Corporation'
self.excel_base_date = CALENDAR_WINDOWS_1900
class DocumentSecurity(object):
"""Security information about the document."""
def __init__(self):
self.lock_revision = False
self.lock_structure = False
self.lock_windows = False
self.revision_password = ''
self.workbook_password = ''
class Workbook(object):
"""Workbook is the container for all other parts of the document."""
def __init__(self, optimized_write=False, encoding='utf-8',
worksheet_class=Worksheet,
optimized_worksheet_class=DumpWorksheet,
guess_types=True):
self.worksheets = []
self._active_sheet_index = 0
self._named_ranges = []
self.properties = DocumentProperties()
self.style = Style()
self.security = DocumentSecurity()
self.__optimized_write = optimized_write
self.__optimized_read = False
self.__thread_local_data = threading.local()
self.strings_table_builder = StringTableBuilder()
self.loaded_theme = None
self._worksheet_class = worksheet_class
self._optimized_worksheet_class = optimized_worksheet_class
self.vba_archive = None
self.style_properties = None
self._guess_types = guess_types
self.encoding = encoding
if not optimized_write:
self.worksheets.append(self._worksheet_class(parent_workbook=self))
def read_workbook_settings(self, xml_source):
root = fromstring(xml_source)
view = root.find('*/' '{%s}workbookView' % SHEET_MAIN_NS)
if 'activeTab' in view.attrib:
self._active_sheet_index = int(view.attrib['activeTab'])
@property
def _local_data(self):
return self.__thread_local_data
@property
def excel_base_date(self):
return self.properties.excel_base_date
def _set_optimized_read(self):
self.__optimized_read = True
def get_active_sheet(self):
"""Returns the current active sheet."""
return self.worksheets[self._active_sheet_index]
def create_sheet(self, index=None, title=None):
"""Create a worksheet (at an optional index).
:param index: optional position at which the sheet will be inserted
:type index: int
"""
if self.__optimized_read:
raise ReadOnlyWorkbookException('Cannot create new sheet in a read-only workbook')
if self.__optimized_write :
new_ws = self._optimized_worksheet_class(
parent_workbook=self, title=title)
else:
if title is not None:
new_ws = self._worksheet_class(
parent_workbook=self, title=title)
else:
new_ws = self._worksheet_class(parent_workbook=self)
self.add_sheet(worksheet=new_ws, index=index)
return new_ws
def add_sheet(self, worksheet, index=None):
"""Add an existing worksheet (at an optional index)."""
assert isinstance(worksheet, self._worksheet_class), "The parameter you have given is not of the type '%s'" % self._worksheet_class.__name__
if index is None:
index = len(self.worksheets)
self.worksheets.insert(index, worksheet)
def remove_sheet(self, worksheet):
"""Remove a worksheet from this workbook."""
self.worksheets.remove(worksheet)
def get_sheet_by_name(self, name):
"""Returns a worksheet by its name.
Returns None if no worksheet has the name specified.
:param name: the name of the worksheet to look for
:type name: string
"""
requested_sheet = None
for sheet in self.worksheets:
if sheet.title == name:
requested_sheet = sheet
break
return requested_sheet
def get_index(self, worksheet):
"""Return the index of the worksheet."""
return self.worksheets.index(worksheet)
def get_sheet_names(self):
"""Returns the list of the names of worksheets in the workbook.
Names are returned in the worksheets order.
:rtype: list of strings
"""
return [s.title for s in self.worksheets]
def create_named_range(self, name, worksheet, range, scope=None):
"""Create a new named_range on a worksheet"""
assert isinstance(worksheet, self._worksheet_class)
named_range = NamedRange(name, [(worksheet, range)], scope)
self.add_named_range(named_range)
def get_named_ranges(self):
"""Return all named ranges"""
return self._named_ranges
def add_named_range(self, named_range):
"""Add an existing named_
|
range to the list of named_ranges."""
self._named_ranges.append(named_range)
def get_named_range(self, name):
"""Return the range specified by name."""
requested_range = None
for named_range in self._named_ranges:
if named_range.name == name:
requested_range = named_range
break
|
return requested_range
def remove_named_range(self, named_range):
"""Remove a named_range from this workbook."""
self._named_ranges.remove(named_range)
def save(self, filename):
"""Save the current workbook under the given `filename`.
Use this function instead of using an `ExcelWriter`.
.. warning::
When creating your workbook using `optimized_write` set to True,
you will only be able to call this function once. Subsequents attempts to
modify or save the file will raise an :class:`openpyxl.shared.exc.WorkbookAlreadySaved` exception.
"""
if
|
mambocab/cassandra
|
pylib/cqlshlib/cql3handling.py
|
Python
|
apache-2.0
| 55,793
| 0.001667
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .cqlhandling import CqlParsingRuleSet, Hint
from cassandra.metadata import maybe_escape_name
simple_cql_types = set(('ascii', 'bigint', 'blob', 'boolean', 'counter', 'date', 'decimal', 'double', 'duration', 'float',
'inet', 'int', 'smallint', 'text', 'time', 'timestamp', 'timeuuid', 'tinyint', 'uuid', 'varchar', 'varint'))
simple_cql_types.difference_update(('set', 'map', 'list'))
from . import helptopics
cqldocs = helptopics.CQL3HelpTopics()
class UnexpectedTableStructure(UserWarning):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return 'Unexpected table structure; may not translate correctly to CQL. ' + self.msg
SYSTEM_KEYSPACES = ('system', 'system_schema', 'system_traces', 'system_auth', 'system_distributed')
NONALTERBALE_KEYSPACES = ('system', 'system_schema')
class Cql3ParsingRuleSet(CqlParsingRuleSet):
columnfamily_layout_options = (
('bloom_filter_fp_chance', None),
('comment', None),
('dclocal_read_repair_chance', 'local_read_repair_chance'),
('gc_grace_seconds', None),
('min_index_interval', None),
('max_index_interval', None),
('read_repair_chance', None),
('default_time_to_live', None),
('speculative_retry', None),
('memtable_flush_period_in_ms', None),
('cdc', None)
)
columnfamily_layout_map_options = (
# (CQL3 option name, schema_columnfamilies column name (or None if same),
# list of known map keys)
('compaction', 'compaction_strategy_options',
('class', 'max_threshold', 'tombstone_compaction_interval', 'tombstone_thresho
|
ld', 'enabled', 'unchecked_tombstone_compaction', 'only_purge_repaired_tombstones')),
('compression', 'compression_parameters',
('sstable_compression', 'chunk_length_kb', 'crc_check_chance')),
('caching', None,
('rows_per_p
|
artition', 'keys')),
)
obsolete_cf_options = ()
consistency_levels = (
'ANY',
'ONE',
'TWO',
'THREE',
'QUORUM',
'ALL',
'LOCAL_QUORUM',
'EACH_QUORUM',
'SERIAL'
)
size_tiered_compaction_strategy_options = (
'min_sstable_size',
'min_threshold',
'bucket_high',
'bucket_low'
)
leveled_compaction_strategy_options = (
'sstable_size_in_mb',
'fanout_size'
)
date_tiered_compaction_strategy_options = (
'base_time_seconds',
'max_sstable_age_days',
'min_threshold',
'max_window_size_seconds',
'timestamp_resolution'
)
time_window_compaction_strategy_options = (
'compaction_window_unit',
'compaction_window_size',
'min_threshold',
'timestamp_resolution'
)
@classmethod
def escape_value(cls, value):
if value is None:
return 'NULL' # this totally won't work
if isinstance(value, bool):
value = str(value).lower()
elif isinstance(value, float):
return '%f' % value
elif isinstance(value, int):
return str(value)
return "'%s'" % value.replace("'", "''")
@classmethod
def escape_name(cls, name):
if name is None:
return 'NULL'
return "'%s'" % name.replace("'", "''")
@staticmethod
def dequote_name(name):
name = name.strip()
if name == '':
return name
if name[0] == '"' and name[-1] == '"':
return name[1:-1].replace('""', '"')
else:
return name.lower()
@staticmethod
def dequote_value(cqlword):
cqlword = cqlword.strip()
if cqlword == '':
return cqlword
if cqlword[0] == "'" and cqlword[-1] == "'":
cqlword = cqlword[1:-1].replace("''", "'")
return cqlword
CqlRuleSet = Cql3ParsingRuleSet()
# convenience for remainder of module
completer_for = CqlRuleSet.completer_for
explain_completion = CqlRuleSet.explain_completion
dequote_value = CqlRuleSet.dequote_value
dequote_name = CqlRuleSet.dequote_name
escape_value = CqlRuleSet.escape_value
# BEGIN SYNTAX/COMPLETION RULE DEFINITIONS
syntax_rules = r'''
<Start> ::= <CQL_Statement>*
;
<CQL_Statement> ::= [statements]=<statementBody> ";"
;
# the order of these terminal productions is significant:
<endline> ::= /\n/ ;
JUNK ::= /([ \t\r\f\v]+|(--|[/][/])[^\n\r]*([\n\r]|$)|[/][*].*?[*][/])/ ;
<stringLiteral> ::= <quotedStringLiteral>
| <pgStringLiteral> ;
<quotedStringLiteral> ::= /'([^']|'')*'/ ;
<pgStringLiteral> ::= /\$\$(?:(?!\$\$).)*\$\$/;
<quotedName> ::= /"([^"]|"")*"/ ;
<float> ::= /-?[0-9]+\.[0-9]+/ ;
<uuid> ::= /[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/ ;
<blobLiteral> ::= /0x[0-9a-f]+/ ;
<wholenumber> ::= /[0-9]+/ ;
<identifier> ::= /[a-z][a-z0-9_]*/ ;
<colon> ::= ":" ;
<star> ::= "*" ;
<endtoken> ::= ";" ;
<op> ::= /[-+=%/,().]/ ;
<cmp> ::= /[<>!]=?/ ;
<brackets> ::= /[][{}]/ ;
<integer> ::= "-"? <wholenumber> ;
<boolean> ::= "true"
| "false"
;
<unclosedPgString>::= /\$\$(?:(?!\$\$).)*/ ;
<unclosedString> ::= /'([^']|'')*/ ;
<unclosedName> ::= /"([^"]|"")*/ ;
<unclosedComment> ::= /[/][*].*$/ ;
<term> ::= <stringLiteral>
| <integer>
| <float>
| <uuid>
| <boolean>
| <blobLiteral>
| <collectionLiteral>
| <functionLiteral> <functionArguments>
| "NULL"
;
<functionLiteral> ::= (<identifier> ( "." <identifier> )?)
| "TOKEN"
;
<functionArguments> ::= "(" ( <term> ( "," <term> )* )? ")"
;
<tokenDefinition> ::= token="TOKEN" "(" <term> ( "," <term> )* ")"
| <term>
;
<cident> ::= <quotedName>
| <identifier>
| <unreservedKeyword>
;
<colname> ::= <cident> ; # just an alias
<collectionLiteral> ::= <listLiteral>
| <setLiteral>
| <mapLiteral>
;
<listLiteral> ::= "[" ( <term> ( "," <term> )* )? "]"
;
<setLiteral> ::= "{" ( <term> ( "," <term> )* )? "}"
;
<mapLiteral> ::= "{" <term> ":" <term> ( "," <term> ":" <term> )* "}"
;
<anyFunctionName> ::= ( ksname=<cfOrKsName> dot="." )? udfname=<cfOrKsName> ;
<userFunctionName> ::= ( ksname=<nonSystemKeyspaceName> dot="." )? udfname=<cfOrKsName> ;
<refUserFunctionName> ::= udfname=<cfOrKsName> ;
<userAggregateName> ::= ( ksname=<nonSystemKeyspaceName> dot="." )? udaname=<cfOrKsName> ;
<functionAggregateName> ::= ( ksname=<nonSystemKeyspaceName> dot="." )? functionname=<cfOrKsName> ;
<aggregateName> ::= <userAggregateName>
;
<functionName> ::= <functionAggregateName>
| "TOKEN"
;
<statementBody> ::= <useStatement>
| <selectStatement>
| <dataChangeStatement>
| <schemaChangeStatement>
| <authenticationStatement>
| <authorizationStatement>
;
<dataChangeStatement> ::= <insertStatement>
| <updateStatement>
|
manjitkumar/drf-url-filters
|
example_app/serializers.py
|
Python
|
mit
| 525
| 0
|
# django-drf imports
from rest_framework import serializers
# app level imports
from .models import Player, Team
class PlayerSerializer(serializers.ModelSerializer):
class Meta:
model = Player
fields = (
'id', 'name', 'rating', 'teams',
'install_ts', 'update_ts'
)
class TeamSerializer(serializers.ModelSerializer):
clas
|
s Meta:
model = Team
fields = (
'id', 'name', 'rating', 'players',
'i
|
nstall_ts', 'update_ts'
)
|
openqt/algorithms
|
leetcode/python/ac/lc003-longest-substring-without-repeating-characters.py
|
Python
|
gpl-3.0
| 1,545
| 0.000647
|
# coding=utf-8
import unittest
"""3. Longest Substring Without Repeating Characters
https://leetcode.com/problems/longest-substring-without-repeating-characters/description/
Given a string, find the length of the **longest substring** without repeating
characters.
**Examples:**
Given `"abcabcbb"`, the answer is `"abc"`, which the length is 3.
Given `"bbbbb"`, the answer is `"b"`, with the length of 1.
Given `"pwwkew"`, the answer is `"wke"`, with the length of 3. Note that the
answer must be a **substring** , `"pwke"` is a _subsequence_ and not a
substring.
Similar Questions:
Longest Substring with At Most Two Distinct Characters (longest-substring-with-at-most-two-distinct-characters)
"""
class Solution(unittest.TestCase):
def lengthOfLongestSubstring(self, s):
"""
:type s: str
:rtype: int
"""
cache = {}
val, pos = 0, 0
while pos < len(s):
if s[pos] in cache:
pos = cache[s[pos]] + 1
val = max(val, len(cache))
cache.clear()
else:
cache[s[pos]] = pos
pos += 1
val = max(val, len(cache))
return val
def test(self):
self.assertEqual(self.lengthOfLongestSubstring("abcabcbb"), 3)
self.assertEqual(self.lengthOfL
|
ongestSubstring("bbbbb"), 1)
self.assertEqual(self.lengthOfLongestSubstring("pwwkew"), 3)
self.assertEqual(s
|
elf.lengthOfLongestSubstring("c"), 1)
if __name__ == "__main__":
unittest.main()
|
blake-sheridan/py
|
test/test_grammar.py
|
Python
|
apache-2.0
| 200
| 0.005
|
import unittest
from b.grammar import Parser
|
class ParserTests(unittest.TestCase):
def test_parse(self):
p = Parser()
p.parse('123 "thing
|
s"')
raise NotImplementedError
|
qmagico/gae-migrations
|
tests/my/migrations_pau_na_migration/migration_paunamigration_0001.py
|
Python
|
mit
| 414
| 0.007246
|
from my.models import QueD
|
oidura
# Opcional. Retorn
|
a quantas migracoes devem ser rodadas por task (default = 1000)
MIGRATIONS_PER_TASK = 2
# Descricao amigavel dessa alteracao no banco
DESCRIPTION = 'multiplica por 2'
def get_query():
""" Retorna um objeto query das coisas que precisam ser migradas """
return QueDoidura.query()
def migrate_one(entity):
entity.v2 = entity.v1 * 2
entity.put()
|
ianastewart/cwltc-admin
|
pos/services.py
|
Python
|
mit
| 8,750
| 0.002058
|
import datetime
import logging
from decimal import Decimal
from django.db import transaction
from django.http import HttpResponse
from openpyxl import Workbook
from openpyxl.utils import get_column_letter
from openpyxl.styles import Font
from .models import Transaction, LineItem, Layout, PosPayment, Item, Location, TWO_PLACES
from members.models import InvoiceItem, ItemType
from pos.models import VisitorBook
stdlogger = logging.getLogger(__name__)
class Error(Exception):
"""
Base class for exceptions in this module
"""
pass
class PosServicesError(Error):
"""
Error while processing payment
"""
def __init__(self, message):
self.message = message
@transaction.atomic
def create_transaction_from_receipt(
creator_id, terminal, layout_id, receipt,
|
total, people, attended, creation_date=None
):
"""
Create Transaction, LineItem and PosPayment records in the database
Return a description of it
"""
try:
complimentary = False
count = len(people)
dec_total = (Decimal(total) / 100).quantize(TWO_PLACES)
item_type = Layout.objects.get(pk=layout_id).item_type
if count > 0:
person_id = int(people[0]["id"])
if person_id == -1:
|
complimentary = True
person_id = None
else:
person_id = None
trans = Transaction(
creation_date=creation_date,
creator_id=creator_id,
person_id=person_id,
terminal=terminal,
item_type=item_type,
total=dec_total,
billed=Transaction.BilledState.UNBILLED.value,
cash=person_id == None and not complimentary,
complimentary=complimentary,
split=count > 1,
attended=attended,
)
trans.save()
for item_dict in receipt:
line_item = LineItem(
item_id=item_dict["id"],
sale_price=Decimal(item_dict["sale_price"]).quantize(TWO_PLACES),
cost_price=Decimal(item_dict["cost_price"]).quantize(TWO_PLACES),
quantity=1,
transaction=trans,
)
line_item.save()
if complimentary:
return ("Complimentary", dec_total)
if trans.cash:
return ("Cash", dec_total)
pay_total = Decimal(0)
for person in people:
pos_payment = PosPayment(
transaction=trans,
person_id=person["id"],
billed=False,
total=(Decimal(person["amount"]) / 100).quantize(TWO_PLACES),
)
pay_total += pos_payment.total
pos_payment.save()
if pay_total != dec_total:
stdlogger.error(
f"ERROR: POS Transaction total: {dec_total} unequal to Payment total: {pay_total} Id: {trans.id}"
)
return (people[0]["name"], dec_total)
except Exception as e:
raise PosServicesError("Error creating transaction")
def delete_billed_transactions(before_date):
"""
Delete transactions that have been billed and linked items and payments
"""
trans = Transaction.objects.filter(billed=Transaction.BilledState.BILLED.value, creation_date__lt=before_date)
count = trans.count()
trans.delete()
return count
def delete_billed_visitors(before_date):
"""
Delete visitor book entries that have been billed
"""
visitors = VisitorBook.objects.filter(billed=True, creation_date__lt=before_date)
count = visitors.count()
visitors.delete()
return count
def dump_items_to_excel(item_type_id):
""" https://djangotricks.blogspot.co.uk/2013/12/how-to-export-data-as-excel.html """
queryset = Item.objects.filter(item_type_id=item_type_id)
response = HttpResponse(content_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")
response["Content-Disposition"] = "attachment; filename=Items.xlsx"
wb = Workbook()
ws = wb.active
ws.title = "Items"
row_num = 0
columns = [("Description", 40), ("Price", 10)]
for col_num in range(len(columns)):
c = ws.cell(row=row_num + 1, column=col_num + 1)
c.value = columns[col_num][0]
c.font = Font(sz=12, bold=True)
ws.column_dimensions[get_column_letter(col_num + 1)].width = columns[col_num][1]
for obj in queryset:
row_num += 1
row = [obj.description, obj.sale_price]
for col_num in range(len(row)):
c = ws.cell(row=row_num + 1, column=col_num + 1)
c.value = row[col_num]
if col_num == 1:
c.number_format = "£0.00"
wb.save(response)
return response
def dump_layout_to_excel(layout):
""" https://djangotricks.blogspot.co.uk/2013/12/how-to-export-data-as-excel.html """
array, items = build_pos_array(layout)
response = HttpResponse(content_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")
response["Content-Disposition"] = "attachment; filename=Price list.xlsx"
wb = Workbook()
ws = wb.active
ws.title = "Price List"
widths = [10, 40, 10]
for col_num in range(len(widths)):
ws.column_dimensions[get_column_letter(col_num + 1)].width = widths[col_num]
c = ws.cell(row=1, column=2, value="Price List")
row_num = 2
for row in array:
for col_num in range(len(row)):
if col_num == 0:
if len(row[col_num]) > 2:
description = row[col_num][2]
ws.cell(row=row_num, column=1, value=description)
row_num += 1
else:
if len(row[col_num]) > 2:
item = row[col_num][2]
ws.cell(row=row_num, column=2, value=item.description)
c = ws.cell(row=row_num, column=3, value=item.sale_price)
c.number_format = "£0.00"
row_num += 1
wb.save(response)
return response
def export_pos(transactions, payments):
response = HttpResponse(content_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")
response["Content-Disposition"] = "attachment; filename=POS data.xlsx"
wb = Workbook()
ws = wb.active
ws.title = "Transactions"
dump_transactions(ws, transactions)
ws = wb.create_sheet(title="Payments")
dump_payments(ws, payments)
wb.save(response)
return response
def dump_transactions(ws, transactions):
header = [
"Id",
"Date",
"Type",
"Person_id",
"Person",
"Total",
"Comp",
"Cash",
"Billed",
"Split",
"Attended",
"Terminal",
]
ws.append(header)
for trans in transactions:
row = [
trans.id,
trans.creation_date,
trans.item_type_id,
trans.person_id,
trans.person.fullname if trans.person_id else "",
trans.total,
trans.complimentary,
trans.cash,
trans.billed,
trans.split,
trans.attended,
trans.terminal,
]
ws.append(row)
def dump_payments(ws, payments):
header = ["Id", "Trans_id", "Person_id", "Person", "Total", "Billed"]
ws.append(header)
for p in payments:
row = [p.id, p.transaction.id, p.person_id, p.person.fullname if p.person_id else "", p.total, p.billed]
ws.append(row)
def build_pos_array(layout=None):
"""
Build an array of rows and columns
Col[0] is the description for a row
Cells will contain items
Returns the used items for managing the layout
"""
rows = []
for r in range(1, Location.ROW_MAX + 1):
cols = []
for c in range(0, Location.COL_MAX + 1):
cols.append([r, c])
rows.append(cols)
items = None
if layout: # true when managing a layout
locations = (
Location.objects.filter(layout_id=layout.id)
.order_by("row", "col")
.prefetch_related("item")
|
7kbird/chrome
|
net/PRESUBMIT.py
|
Python
|
bsd-3-clause
| 1,034
| 0.005803
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Chromium presubmit script for src/net.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details on the presubmit API built into gcl.
"""
def GetPreferredTryMasters(project, change):
masters = {
'tryserver.chromium.linux': {
'linux_chromium_rel_swarming': set(['defaulttests']),
},
'tryserver.chromium.mac': {
'mac_chromium_rel_
|
swarming': set(['defaulttests']),
},
'tryserver.chromium.win': {
'win_chromium_rel_swarming': set(['defaulttests']),
}
}
# Changes that touch NSS files will likely need a corresponding OpenSSL edit.
# Conveniently, this one glob also matches _openssl.* changes too.
if any('nss' in f.LocalPath() for f in change.AffectedFiles()):
masters['tryserver.chromium.linux'].setdefault(
|
'linux_redux', set()).add('defaulttests')
return masters
|
jepio/JKalFilter
|
docs/conf.py
|
Python
|
gpl-2.0
| 8,011
| 0.00699
|
# -*- coding: utf-8 -*-
#
# JKal-Filter documentation build configuration file, created by
# sphinx-quickstart on Thu Jul 24 16:56:49 2014.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration val
|
ues are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use
|
os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.coverage', 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode']
autodoc_default_flags = ['members', 'private-members','special-members', 'show-inheritance']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.txt'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'JKalFilter'
copyright = u'2014, jepio'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'JKal-Filterdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
'papersize': 'a4paper',
# The font size ('10pt', '11pt' or '12pt').
'pointsize': '12pt',
# Additional stuff for the LaTeX preamble.
'preamble': '\usepackage{amsmath} \usepackage{amssymb}',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'JKal-Filter.tex', u'JKal-Filter Documentation',
u'jepio', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'jkal-filter', u'JKal-Filter Documentation',
[u'jepio'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'JKal-Filter', u'JKal-Filter Documentation',
u'jepio', 'JKal-Filter', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
|
caesar2164/edx-platform
|
lms/djangoapps/shoppingcart/models.py
|
Python
|
agpl-3.0
| 91,861
| 0.003103
|
# pylint: disable=arguments-differ
""" Models for the shopping cart and assorted purchase types """
from collections import namedtuple
from datetime import datetime
from datetime import timedelta
from decimal import Decimal
import json
import analytics
from io import BytesIO
from django.db.models import Q, F
import pytz
import logging
import smtplib
import StringIO
import csv
from boto.exception import BotoServerError # this is a super-class of SESError and catches connection errors
from django.dispatch import receiver
from django.db import models
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.core.mail import send_mail
from django.contrib.auth.models import User
from django.utils.translation import ugettext as _, ugettext_lazy
from django.db import transaction
from django.db.models import Sum, Count
from django.db.models.signals import post_save, post_delete
from django.core.urlresolvers import reverse
from model_utils.managers import InheritanceManager
from model_utils.models import TimeStampedModel
from django.core.mail.message import EmailMessage
from xmodule.modulestore.django import modulestore
from eventtracking import tracker
from courseware.courses import get_course_by_id
from config_models.models import ConfigurationModel
from course_modes.models import CourseMode
from edxmako.shortcuts import render_to_string
from student.models import CourseEnrollment, UNENROLL_DONE, EnrollStatusChange
from util.query import use_read_replica_if_available
from openedx.core.djangoapps.xmodule_django.models import CourseKeyField
from .exceptions import (
InvalidCartItem,
PurchasedCallbackException,
ItemAlreadyInCartException,
AlreadyEnrolledInCourseException,
CourseDoesNotExistException,
MultipleCouponsNotAllowedException,
InvalidStatusToRetire,
UnexpectedOrderItemStatus,
ItemNotFoundInCartException
)
from shoppingcart.pdf import PDFInvoice
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
log = logging.getLogger("shoppingcart")
ORDER_STATUSES = (
# The user is selecting what he/she wants to purchase.
('cart', 'cart'),
# The user has been sent to the external payment processor.
# At this point, the order should NOT be modified.
# If the user returns to the payment flow, he/she will start a new order.
('paying', 'paying'),
# The user has successfully purchased the items in the order.
('purchased', 'purchased'),
# The user's order has been refunded.
('refunded', 'refunded'),
# The user's order went through, but the order was erroneously left
# in 'cart'.
('defunct-cart', 'defunct-cart'),
# The user's order went through, but the order was erroneously left
# in 'paying'.
('defunct-paying', 'defunct-paying'),
)
# maps order statuses to their defunct states
ORDER_STATUS_MAP = {
'cart': 'defunct-cart',
'paying': 'defunct-paying',
}
# we need a tuple to represent the primary key of various OrderItem subclasses
OrderItemSubclassPK = namedtuple('OrderItemSubclassPK', ['cls', 'pk'])
class OrderTypes(object):
"""
This class specify purchase OrderTypes.
"""
PERSONAL = 'personal'
BUSINESS = 'business'
ORDER_TYPES = (
(PERSONAL, 'personal'),
(BUSINESS, 'business'),
)
class Order(models.Model):
"""
This is the model for an order. Before purchase, an Order and its related OrderItems are used
as the shopping cart.
FOR ANY USER, THERE SHOULD ONLY EVER BE ZERO OR ONE ORDER WITH STATUS='cart'.
"""
class Meta(object):
app_label = "shoppingcart"
user = models.ForeignKey(User, db_index=True)
currency = models.CharField(default="usd", max_length=8) # lower case ISO currency codes
status = models.CharField(max_length=32, default='cart', choices=ORDER_STATUSES)
purchase_time = models.DateTimeField(null=True, blank=True)
refunded_time = models.DateTimeField(null=True, blank=True)
# Now we store data needed to generate a reasonable receipt
# These fields only make sense after the purchase
bill_to_first = models.CharField(max_length=64, blank=True)
bill_to_last = models.CharField(max_length=64, blank=True)
bill_to_street1 = models.CharField(max_length=128, blank=True)
bill_to_street2 = models.CharField(max_length=128, blank=True)
bill_to_city = models.CharField(max_length=64, blank=True)
bill_to_state = models.CharField(max_length=8, blank=True)
bill_to_postalcode = models.CharField(max_length=16, blank=True)
bill_to_country = models.CharField(max_length=64, blank=True)
bill_to_ccnum = models.CharField(max_length=8, blank=True) # last 4 digits
bill_to_cardtype = models.CharField(max_length=32, blank=True)
# a JSON dump of the CC processor response, for completeness
processor_reply_dump = models.TextField(blank=True)
# bulk purchase registration code workflow billing details
company_name = models.CharField(max_length=255, null=True, blank=True)
company_contact_name = models.CharField(max_length=255, null=True, blank=True)
company_contact_email = models.CharField(max_length=255, null=True, blank=True)
recip
|
ient_name = models.CharField(max_length=255, null=True, blank=True)
recipient_email = models.CharField(max_length=255, null=True, blank=True)
customer_reference_number = models.CharField(max_length=63, null=True, blank=True)
order_type = models.CharField(max_length=32, default='personal', choices=OrderTypes.ORDER_TYPES)
@classmethod
def get_cart_for_user(cls, user):
"""
|
Always use this to preserve the property that at most 1 order per user has status = 'cart'
"""
# find the newest element in the db
try:
cart_order = cls.objects.filter(user=user, status='cart').order_by('-id')[:1].get()
except ObjectDoesNotExist:
# if nothing exists in the database, create a new cart
cart_order, _created = cls.objects.get_or_create(user=user, status='cart')
return cart_order
@classmethod
def does_user_have_cart(cls, user):
"""
Returns a boolean whether a shopping cart (Order) exists for the specified user
"""
return cls.objects.filter(user=user, status='cart').exists()
@classmethod
def user_cart_has_items(cls, user, item_types=None):
"""
Returns true if the user (anonymous user ok) has
a cart with items in it. (Which means it should be displayed.
If a item_type is passed in, then we check to see if the cart has at least one of
those types of OrderItems
"""
if not user.is_authenticated():
return False
cart = cls.get_cart_for_user(user)
if not item_types:
# check to see if the cart has at least some item in it
return cart.has_items()
else:
# if the caller is explicitly asking to check for particular types
for item_type in item_types:
if cart.has_items(item_type):
return True
return False
@classmethod
def remove_cart_item_from_order(cls, item, user):
"""
Removes the item from the cart if the item.order.status == 'cart'.
Also removes any code redemption associated with the order_item
"""
if item.order.status == 'cart':
log.info("order item %s removed for user %s", str(item.id), user)
item.delete()
# remove any redemption entry associated with the item
CouponRedemption.remove_code_redemption_from_item(item, user)
@property
def total_cost(self):
"""
Return the total cost of the cart. If the order has been purchased, returns total of
all purchased and not refunded items.
"""
return sum(i.line_cost for i in self.orderitem_set.filter(status=self.status))
def has_items(self, item_type=None):
"""
Does the cart have any items in it?
If an item_type is passed in then we check to see if ther
|
sparkslabs/kamaelia
|
Sketches/RJL/bittorrent/BitTorrent/bittorrent-console.py
|
Python
|
apache-2.0
| 7,540
| 0.00809
|
#!/usr/bin/env python
# The contents of this file are subject to the BitTorrent Open Source License
# Version 1.1 (the License). You may not copy or use this file, in either
# source code or executable form, except in compliance with the License. You
# may obtain a copy of the License at http://www.bittorrent.com/license/.
#
# Software distributed under the License is distributed on an AS IS basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
# Written by Bram Cohen, Uoti Urpala and John Hoffman
# Converted to a kamaelia threadedcomponent by Ryan Lothian
from __future__ import division
from BitTorrent.platform import install_translation
install_translation()
import sys
import os
import threading
from time import time, strftime, sleep
|
from cStringIO
|
import StringIO
from Axon.ThreadedComponent import threadedcomponent
from Axon.Component import component
from BitTorrent.download import Feedback, Multitorrent
from BitTorrent.defaultargs import get_defaults
from BitTorrent.parseargs import printHelp
from BitTorrent.zurllib import urlopen
from BitTorrent.bencode import bdecode
from BitTorrent.ConvertedMetainfo import ConvertedMetainfo
from BitTorrent.prefs import Preferences
from BitTorrent import configfile
from BitTorrent import BTFailure
from BitTorrent import version
from BitTorrent import GetTorrent
class Lagger(component):
def main(self):
while 1:
yield 1
sleep(0.05)
class TorrentClient(threadedcomponent):
"""Using threadedcomponent so we don't have to worry about blocking IO or making
mainline yield periodically"""
Inboxes = { "inbox" : "Commands, e.g. shutdown",
"control" : "NOT USED",
}
Outboxes = { "outbox" : "State change information, e.g. finished",
"signal" : "NOT USED",
}
def __init__(self, torrentfilename):
super(TorrentClient, self).__init__()
self.torrentfilename = torrentfilename
self.done = False
def main(self):
print "TorrentClient.run"
"""Main loop"""
uiname = 'bittorrent-console'
defaults = get_defaults(uiname)
defaults.append(('twisted', 0,
_("Use Twisted network libraries for network connections. 1 means use twisted, 0 means do not use twisted, -1 means autodetect, and prefer twisted")))
metainfo = None
config, args = configfile.parse_configuration_and_args(defaults, uiname)
try:
metainfo, errors = GetTorrent.get( self.torrentfilename )
if errors:
raise BTFailure(_("Error reading .torrent file: ") + '\n'.join(errors))
else:
self.dl = DLKamaelia(metainfo, config, self)
self.dl.run()
except BTFailure, e:
print str(e)
sys.exit(1)
self.outqueues["outbox"].put("exited")
def checkInboxes(self):
while not self.inqueues["inbox"].empty():
command = self.inqueues["inbox"].get()
if command == "shutdown":
self.dl.multitorrent.rawserver.doneflag.set()
def finished(self):
"""Called by DL class when the download has completed successfully"""
self.done = True
self.send("complete", "outbox")
print "BitTorrent debug: finished"
def error(self, errormsg):
"""Called by DL if an error occurs"""
print strftime('[%H:%M:%S] ') + errormsg
self.send("failed", "outbox")
def display(self, statistics):
"""Called by DL to display status updates"""
# Forward on to next component
self.send(statistics, "outbox")
def set_torrent_values(self, name, path, size, numpieces):
self.file = name
self.downloadTo = path
self.fileSize = size
self.numpieces = numpieces
class DLKamaelia(Feedback):
"""This class accepts feedback from the multitorrent downloader class
which it can then pass back to the inboxes of TorrentClient"""
def __init__(self, metainfo, config, interface):
self.doneflag = threading.Event()
self.metainfo = metainfo
self.config = Preferences().initWithDict(config)
self.d = interface
def run(self):
try:
self.multitorrent = Multitorrent(self.config, self.doneflag,
self.global_error)
# raises BTFailure if bad
metainfo = ConvertedMetainfo(bdecode(self.metainfo))
torrent_name = metainfo.name_fs
if self.config['save_as']:
if self.config['save_in']:
raise BTFailure(_("You cannot specify both --save_as and "
"--save_in"))
saveas = self.config['save_as']
elif self.config['save_in']:
saveas = os.path.join(self.config['save_in'], torrent_name)
else:
saveas = torrent_name
self.d.set_torrent_values(metainfo.name, os.path.abspath(saveas),
metainfo.total_bytes, len(metainfo.hashes))
self.torrent = self.multitorrent.start_torrent(metainfo,
Preferences(self.config), self, saveas)
except BTFailure, e:
print str(e)
return
self.get_status()
#self.multitorrent.rawserver.install_sigint_handler() - can only be done on the main thread so does not work with Kamaelia
self.multitorrent.rawserver.listen_forever( self.d )
self.d.display({'activity':_("shutting down"), 'fractionDone':0})
self.torrent.shutdown()
print "BitTorrent Debug: shutting down"
def reread_config(self):
try:
newvalues = configfile.get_config(self.config, 'bittorrent-console')
except Exception, e:
self.d.error(_("Error reading config: ") + str(e))
return
self.config.update(newvalues)
# The set_option call can potentially trigger something that kills
# the torrent (when writing this the only possibility is a change in
# max_files_open causing an IOError while closing files), and so
# the self.failed() callback can run during this loop.
for option, value in newvalues.iteritems():
self.multitorrent.set_option(option, value)
for option, value in newvalues.iteritems():
self.torrent.set_option(option, value)
def get_status(self):
self.multitorrent.rawserver.add_task(self.get_status,
self.config['display_interval'])
status = self.torrent.get_status(self.config['spew'])
self.d.display(status)
def global_error(self, level, text):
self.d.error(text)
def error(self, torrent, level, text):
self.d.error(text)
def failed(self, torrent, is_external):
self.doneflag.set()
def finished(self, torrent):
self.d.finished()
if __name__ == '__main__':
from Kamaelia.Util.PipelineComponent import pipeline
from Kamaelia.Util.Console import ConsoleReader, ConsoleEchoer
# download a linux distro
pipeline(
ConsoleReader(">>> ", ""),
TorrentClient("http://www.tlm-project.org/public/distributions/damnsmall/current/dsl-2.4.iso.torrent"),
ConsoleEchoer(),
).run()
|
tinenbruno/ml-buff
|
tests/helpers/feature_value_helper_test.py
|
Python
|
mit
| 2,534
| 0.007103
|
from ml_buff.database import session_scope
from ml_buff.models import feature, feature_value, input_data, base_feature_record
from ml_buff.helpers.feature_value_helper import FeatureValueHelper
class TestFeature1(base_feature_record.BaseFeatureRecord):
def calculate(self, input_data):
return [1]
class TestFeature2(base_feature_record.BaseFeatureRecord):
def calculate(self, input_data):
return [2]
class TestFeatureCalculate(base_feature_record.BaseFeatureRecord):
def calculate(self, input_data):
return self._input_data_values
def test_createAll():
test_input_data = (input_data.InputData(1, 'createAll'), input_data.InputData(2, 'createAll'))
with session_scope() as session:
for test_input_datum in test_input_data:
session.add(test_input_datum)
with session_scope() as session:
test_input_data = session.query(input_data.InputData).filter(input_data.InputData.dataset_name == 'createAll').all()
for test_input_datum in test_input_data:
session.expunge(test_input_datum)
input_data_list = {}
for test_input_datum in test_input_data:
input_data_list[test_input_datum.id] = [1,
|
2, 3]
FeatureValueHelper.createAll(input_data_list)
for test_input_datum in test_inp
|
ut_data:
value1 = TestFeature1().getValue(test_input_datum)
value2 = TestFeature2().getValue(test_input_datum)
assert value1.value == [1]
assert value2.value == [2]
def test_forceUpdateForInput():
test_input_data = (input_data.InputData(1, 'createAll'), input_data.InputData(2, 'createAll'))
with session_scope() as session:
for test_input_datum in test_input_data:
session.add(test_input_datum)
with session_scope() as session:
test_input_data = session.query(input_data.InputData).filter(input_data.InputData.dataset_name == 'createAll').all()
for test_input_datum in test_input_data:
session.expunge(test_input_datum)
input_data_list = {}
for test_input_datum in test_input_data:
input_data_list[test_input_datum.id] = [1, 2, 3]
FeatureValueHelper.createAll(input_data_list)
for test_input_datum in test_input_data:
value = TestFeatureCalculate().getValue(test_input_datum)
assert value.value == [1,2,3]
FeatureValueHelper.forceUpdateForInput(test_input_data[0].id, [1])
value = TestFeatureCalculate().getValue(test_input_data[0])
assert value.value == [1]
|
tktrungna/leetcode
|
Python/shortest-distance-from-all-buildings.py
|
Python
|
mit
| 2,335
| 0.0197
|
"""
QUESTION:
You want to build a house on an empty land which reaches all buildings in the shortest amount of distance. You are
given a 2D grid of values 0, 1 or 2, where:
Each 0 marks an empty land w
|
hich you can
|
pass by freely.
Each 1 marks a building which you cannot pass through.
Each 2 marks an obstacle which you cannot pass through.
The distance is calculated using Manhattan Distance, where distance(p1, p2) = |p2.x - p1.x| + |p2.y - p1.y|.
For example, given three buildings at (0,0), (0,4), (2,2), and an obstacle at (0,2):
1 - 0 - 2 - 0 - 1
| | | | |
0 - 0 - 0 - 0 - 0
| | | | |
0 - 0 - 1 - 0 - 0
The point (1,2) is an ideal empty land to build a house, as the total travel distance of 3+3+1=7 is minimal So return 7
Note:
There will be at least one building. If it is not possible to build such house according to the above rules, return -1.
Hide Company Tags Google Zenefits
Hide Tags Breadth-first Search
Hide Similar Problems (M) Walls and Gates (H) Best Meeting Point
ANSWER:
BFS
"""
class Solution(object):
def shortestDistance(self,grid):
num_building = 0
m, n = len(grid), len(grid[0])
times = [[0]*n for _ in xrange(m)]
dis = [[0]*n for _ in xrange(m)]
num = 0
ans = float('inf')
for i in xrange(m):
for j in xrange(n):
if grid[i][j] != 1:
continue
num += 1
queue = [(i,j,0)]
visited = set([])
while queue:
x,y,d = queue.pop(0)
visited.add((x,y))
for dz in zip([1,0,-1,0],[0,1,0,-1]):
nx, ny = x + dz[0], y+dz[1]
if 0<=nx <m and 0<=ny<n and times[nx][ny] == num-1 and grid[nx][ny] == 0:
queue.append((nx,ny,d+1))
dis[nx][ny] += d+1
times[nx][ny] += 1
for i in xrange(m):
for j in xrange(n):
if times[i][j] == num:
ans = min(ans,dis[i][j])
return ans if ans != float('inf') else -1
if __name__ == '__main__':
print Solution().shortestDistance([[1,0,2,0,1],[0,0,0,0,0],[0,0,1,0,0]])
print Solution().shortestDistance([[1,1],[0,1]])
|
bjolivot/ansible
|
lib/ansible/module_utils/sros.py
|
Python
|
gpl-3.0
| 4,609
| 0.004339
|
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c) 2016 Peter Sprygada, <psprygada@ansible.com>
#
# Redistribution and use in source and binary forms, with or without
# modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice,
# this list of conditions and the following disclaimer in the
# documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
from ansible.module_utils.basic import env_fallback
from ansible.module_utils.network_common import to_list, ComplexList
from ansible.module_utils.connection import exec_command
_DEVICE_CONFIGS = {}
sros_argument_spec = {
'host': dict(),
'port': dict(type='int'),
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
'timeout': dict(type='int'),
'provider': dict(type='dict')
}
def check_args(module, warnings):
provider = module.params['provider'] or {}
for key in sros_argument_spec:
if key != 'provider' and module.params[key]:
warnings.append('argument %s has been deprecated and will be '
'removed in a future version' % key)
def get_config(module, flags=[]):
cmd = 'admin display-config '
cmd += ' '.join(flags)
cmd = cmd.strip()
try:
return _DEVICE_CONFIGS[cmd]
except KeyError:
rc, out, err = exec_command(module, cmd)
if rc != 0:
module.fail_json(msg='unable to retrieve current config', stderr=err)
cfg = str(out).strip()
_DEVICE_CONFIGS[cmd] = cfg
return cfg
def to_commands(module, commands):
spec = {
'command': dict(key=True),
'prompt': dict(),
'answer': dict()
}
transform = ComplexList(spec, module)
return transform(commands)
def run_commands(module, commands, check_rc=True):
responses = list()
commands = to_commands(module, to_list(commands))
for cmd in commands:
cmd = module.jsonify(cmd)
rc, out, err = exec_command(module, cmd)
if check_rc and rc != 0:
module.fail_json(msg=err, rc=rc)
responses.append(out)
return responses
def load_config(module, commands):
for command in to_list(commands):
rc, out, err = exec_command(module, command)
if rc != 0:
module.fail_json(msg=err, command=command, rc=rc)
exec_command(module, 'exit all')
def rollback_enabled(self):
if self._rollback_enabled is not None:
return self._rollback_enabled
resp = self.execute
|
(['show system rollback'])
match = re.search(r'^Rollback Location\s+:\s(\S+)', resp[0], re.M)
self._rollback_enabled = match.group(1) != 'None'
return self._rollback_enabled
def load_config_w_rollback(self,
|
commands):
if self.rollback_enabled:
self.execute(['admin rollback save'])
try:
self.configure(commands)
except NetworkError:
if self.rollback_enabled:
self.execute(['admin rollback revert latest-rb',
'admin rollback delete latest-rb'])
raise
if self.rollback_enabled:
self.execute(['admin rollback delete latest-rb'])
|
SahSih/ARStreaming360Display
|
RealTimeVideoStitch/motion_detector.py
|
Python
|
mit
| 2,815
| 0.019893
|
# USAGE
# python motion_detector.py
# python motion_detector.py --video videos/example_01.mp4
# import the necessary packages
import argparse
import datetime
import imutils
import time
import cv2
# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-v", "--video", help="path to the video file")
ap.add_argument("-a", "--min-area", type=int, default=500, help="minimum area size")
args = vars(ap.parse_args())
# if the video argument is None, then we are reading from webcam
if args.get("video", None) is None:
camera = cv2.VideoCapture(0)
time.sleep(0.25)
# otherwise, we are reading from a video file
else:
camera = cv2.VideoCapture(1)
time.sleep(0.25)
# initialize the first frame in the video stream
firstFrame = None
# loop over the frames of the video
while True:
# grab the current frame and initialize the occupied/unoccupied
# text
(grabbed, frame) = camera.read()
text = "Unoccupied"
# if the frame could not be grabbed, then we have reached the end
# of the video
if not grabbed:
break
# resize the frame, convert it to grayscale, and blur it
frame = imutils.resize(frame, width=500)
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (21, 21), 0)
# if the first frame is None, initialize it
if firstFrame is None:
firstFrame = gray
continue
# compute the absolute difference between the current frame and
# first frame
frameDelta = cv2.absdiff(firstFrame, gray)
thresh = cv2.threshold(frameDelta, 25, 255, cv2.THRESH_BINARY)[1]
# dilate the thresholded image to fill in holes, then find contours
# on thresholded image
thresh = cv2.dilate(thresh, None, iterations=2)
(cnts, _) = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_SIMPLE)
# loop over the contours
for c in cnts:
# if the contour is too small, ignore it
if cv2.contourArea(c) < args["min_area"]:
continue
# compute the bounding box for the contour, draw it on the frame,
# and update the text
(x, y, w, h) = cv2.boundingRect(c)
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)
text = "Occupied"
# draw the text and timestamp on the frame
cv2.putText(frame, "Room Status: {}".format(te
|
xt), (10, 20),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)
cv2.putText(frame, datetime.da
|
tetime.now().strftime("%A %d %B %Y %I:%M:%S%p"),
(10, frame.shape[0] - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.35, (0, 0, 255), 1)
# show the frame and record if the user presses a key
cv2.imshow("Security Feed", frame)
cv2.imshow("Thresh", thresh)
cv2.imshow("Frame Delta", frameDelta)
key = cv2.waitKey(1) & 0xFF
# if the `q` key is pressed, break from the lop
if key == ord("q"):
break
# cleanup the camera and close any open windows
camera.release()
cv2.destroyAllWindows()
|
fluentstream/asterisk-p2p
|
res/pjproject/tests/pjsua/scripts-call/150_srtp_1_1.py
|
Python
|
gpl-2.0
| 340
| 0.023529
|
# $Id:
|
150_srtp_1_1.py 369517 2012-07-01 17:28:57Z file $
#
from inc_cfg import *
test_param = TestParam(
"Callee=optional SRTP, caller=optional SRTP",
[
InstanceParam("callee", "--null-audio --use-srtp=1 --srtp-secure=0 --max-calls=1"),
InstanceParam("caller", "--null-audio --use-srtp=1 --srtp-secure=0 --max-calls=1
|
")
]
)
|
mrshu/iepy
|
iepy/webui/corpus/migrations/0005_auto_20140923_1502.py
|
Python
|
bsd-3-clause
| 412
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('corpus', '0004_auto_20140923_1501'),
]
operations = [
migrations.RenameField(
model_name='labeledrelationevidence',
old_na
|
me='date',
|
new_name='modification_date',
),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.