code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
#author Matt Jacobsen
'''
This program will learn and predict words and sentences using a Hierarchical Hidden Markov Model (HHMM).
Implement a Baum-Welch algorithm (like EM?) to learn parameters
Implement a Viterbi algorithm to learn structure.
Implement a forward-backward algorithm (like BP) to do inference over the evidence.
'''
'''
can do things like adjust sutff to be more like stuff
#probabilities for a single word
#states --> s t u f f
#emisions --> s t u f
'''
import sys, pprint as pp
class HMM(object):
numstates = 2
#prior probabilities
pprob = [0.5, 0.5]
#transition probabilities
aprob = [[0.8, 0.2],
[0.2, 0.8]]
#emission probabilities
bprob = [[0.6, 0.4],
[0.4, 0.6]]
bmap = {
'l': 0,
'r': 1
}
def __init__(self):
pass
#compute forward probabilities
def forward(self, O):
pi = self.pprob
a = self.aprob
b = self.bprob
bmap = self.bmap
#will be used to store alpha_t+1
#initialization
alpha = [[1.0]*len(O) for i in range(self.numstates)]
for t in range(0, len(O)):
for i in range(0, self.numstates):
alpha[i][t] = pi[i] * b[i][bmap[O[t]]]
#recursion
for t in range(1, len(O)):
for j in range(0, self.numstates):
sum_i = 0.0
for i in range(0, self.numstates):
sum_i += alpha[i][t-1] * a[i][j]
alpha[j][t] = sum_i * b[j][bmap[O[t]]]
#normalize alpha to avoid underflow
for t in range(0, len(O)-1):
for n in range(0,len(alpha)):
alpha[n][t] = alpha[n][t] / sum(alpha[n])
return alpha
#compute backward probabilities
def backward(self, O):
pi = self.pprob
a = self.aprob
b = self.bprob
bmap = self.bmap
#initialization
beta = [[1.0]*len(O) for i in range(self.numstates)]
#recursion
for t in range(len(O)-2, -1, -1):
for i in range(self.numstates-1, -1, -1):
sum_i = 0.0
for j in range(self.numstates-1, -1, -1):
sum_i += a[i][j] * beta[i][t+1]
beta[i][t] = sum_i * b[i][bmap[O[t]]]
#normalize alpha to avoid underflow
for t in range(0, len(O)-1):
for n in range(0,len(beta)):
beta[n][t] = beta[n][t] / sum(beta[n])
return beta
#compute smoother posterior probabilities
def posterior(self, O):
alpha = self.forward(O)
beta = self.backward(O)
p = [0.0]*self.numstates
#dot product between alpha and beta
for i in range(0, len(p)):
p[i] = [0.0] * len(alpha[i])
for j in range(0, len(alpha[i])):
p[i][j] += alpha[i][j] * beta[i][j]
#normalize to be a distribution
sum_p_i = [0.0]*len(p[0])
for i in range(0,len(p)):
for j in range(0, len(p[i])):
sum_p_i[j] += p[i][j]
for i in range(0,len(p)):
for j in range(0, len(p[i])):
p[i][j] = p[i][j] / sum_p_i[j]
return p
#learn HMM parameters (emission and transition probabilities) from a set of observations
def baumwelch():
pass
#learn HMM structure from a set of observations
def viterbi():
pass
if __name__ == "__main__":
if len(sys.argv) < 2:
print 'missing test input'
sys.exit()
hmm = HMM()
'''
print 'forward'
pp.pprint(hmm.forward(sys.argv[1]))
print 'backward'
pp.pprint(hmm.backward(sys.argv[1]))
'''
print 'posterior'
pp.pprint(hmm.posterior(sys.argv[1]))
|
GccX11/machine-learning
|
hmm.py
|
Python
|
mit
| 3,184
|
from __future__ import absolute_import
from __future__ import print_function
import os
import numpy
import matplotlib.pyplot as plt
import datetime
import clawpack.visclaw.colormaps as colormap
import clawpack.visclaw.gaugetools as gaugetools
import clawpack.clawutil.data as clawutil
import clawpack.amrclaw.data as amrclaw
import clawpack.geoclaw.data as geodata
from clawpack.geoclaw.util import fetch_noaa_tide_data
import clawpack.geoclaw.surge.plot as surgeplot
try:
from setplotfg import setplotfg
except:
setplotfg = None
def setplot(plotdata=None):
""""""
if plotdata is None:
from clawpack.visclaw.data import ClawPlotData
plotdata = ClawPlotData()
# clear any old figures,axes,items data
plotdata.clearfigures()
plotdata.format = 'ascii'
# Load data from output
clawdata = clawutil.ClawInputData(2)
clawdata.read(os.path.join(plotdata.outdir, 'claw.data'))
physics = geodata.GeoClawData()
physics.read(os.path.join(plotdata.outdir, 'geoclaw.data'))
surge_data = geodata.SurgeData()
surge_data.read(os.path.join(plotdata.outdir, 'surge.data'))
friction_data = geodata.FrictionData()
friction_data.read(os.path.join(plotdata.outdir, 'friction.data'))
# Load storm track
track = surgeplot.track_data(os.path.join(plotdata.outdir, 'fort.track'))
# Set afteraxes function
def surge_afteraxes(cd):
surgeplot.surge_afteraxes(cd, track, plot_direction=False,
kwargs={"markersize": 4})
# Color limits
surface_limits = [-5.0, 5.0]
speed_limits = [0.0, 3.0]
wind_limits = [0, 64]
pressure_limits = [935, 1013]
friction_bounds = [0.01, 0.04]
def friction_after_axes(cd):
plt.title(r"Manning's $n$ Coefficient")
# ==========================================================================
# Plot specifications
# ==========================================================================
regions = {"Gulf": {"xlimits": (clawdata.lower[0], clawdata.upper[0]),
"ylimits": (clawdata.lower[1], clawdata.upper[1]),
"figsize": (6.4, 4.8)},
"Texas Gulf Coast": {"xlimits": (-99.2, -94.2),
"ylimits": (26.4, 30.4),
"figsize": (6, 6)}}
for (name, region_dict) in regions.items():
# Surface Figure
plotfigure = plotdata.new_plotfigure(name="Surface - %s" % name)
plotfigure.kwargs = {"figsize": region_dict['figsize']}
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = "Surface"
plotaxes.xlimits = region_dict["xlimits"]
plotaxes.ylimits = region_dict["ylimits"]
plotaxes.afteraxes = surge_afteraxes
surgeplot.add_surface_elevation(plotaxes, bounds=surface_limits)
surgeplot.add_land(plotaxes)
plotaxes.plotitem_dict['surface'].amr_patchedges_show = [0] * 10
plotaxes.plotitem_dict['land'].amr_patchedges_show = [0] * 10
# Speed Figure
plotfigure = plotdata.new_plotfigure(name="Currents - %s" % name)
plotfigure.kwargs = {"figsize": region_dict['figsize']}
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = "Currents"
plotaxes.xlimits = region_dict["xlimits"]
plotaxes.ylimits = region_dict["ylimits"]
plotaxes.afteraxes = surge_afteraxes
surgeplot.add_speed(plotaxes, bounds=speed_limits)
surgeplot.add_land(plotaxes)
plotaxes.plotitem_dict['speed'].amr_patchedges_show = [0] * 10
plotaxes.plotitem_dict['land'].amr_patchedges_show = [0] * 10
#
# Friction field
#
plotfigure = plotdata.new_plotfigure(name='Friction')
plotfigure.show = friction_data.variable_friction and True
plotaxes = plotfigure.new_plotaxes()
plotaxes.xlimits = regions['Gulf']['xlimits']
plotaxes.ylimits = regions['Gulf']['ylimits']
# plotaxes.title = "Manning's N Coefficient"
plotaxes.afteraxes = friction_after_axes
plotaxes.scaled = True
surgeplot.add_friction(plotaxes, bounds=friction_bounds, shrink=0.9)
plotaxes.plotitem_dict['friction'].amr_patchedges_show = [0] * 10
plotaxes.plotitem_dict['friction'].colorbar_label = "$n$"
#
# Hurricane Forcing fields
#
# Pressure field
plotfigure = plotdata.new_plotfigure(name='Pressure')
plotfigure.show = surge_data.pressure_forcing and True
plotaxes = plotfigure.new_plotaxes()
plotaxes.xlimits = regions['Gulf']['xlimits']
plotaxes.ylimits = regions['Gulf']['ylimits']
plotaxes.title = "Pressure Field"
plotaxes.afteraxes = surge_afteraxes
plotaxes.scaled = True
surgeplot.add_pressure(plotaxes, bounds=pressure_limits)
surgeplot.add_land(plotaxes)
# Wind field
plotfigure = plotdata.new_plotfigure(name='Wind Speed')
plotfigure.show = surge_data.wind_forcing and True
plotaxes = plotfigure.new_plotaxes()
plotaxes.xlimits = regions['Gulf']['xlimits']
plotaxes.ylimits = regions['Gulf']['ylimits']
plotaxes.title = "Wind Field"
plotaxes.afteraxes = surge_afteraxes
plotaxes.scaled = True
surgeplot.add_wind(plotaxes, bounds=wind_limits)
surgeplot.add_land(plotaxes)
# ========================================================================
# Figures for gauges
# ========================================================================
plotfigure = plotdata.new_plotfigure(name='Gauge Surfaces', figno=300,
type='each_gauge')
plotfigure.show = True
plotfigure.clf_each_gauge = True
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
#Time Conversions
def days2seconds(days):
return days * 60.0**2 * 24.0
stations = [('8773037', 'Seadrift'),
('8773701', 'Port OConnor'),
('8774230', 'Aransas Wildlife Refuge'),
('8775237', 'Port Aransas'),
('8775296', 'USS Lexington')]
landfall_time = numpy.datetime64('2017-08-25T10:00')
begin_date = datetime.datetime(2017, 8, 24)
end_date = datetime.datetime(2017, 8, 28)
def get_actual_water_levels(station_id):
# Fetch water levels and tide predictions for given station
date_time, water_level, tide = fetch_noaa_tide_data(station_id,
begin_date, end_date)
# Calculate times relative to landfall
seconds_rel_landfall = (date_time - landfall_time) / numpy.timedelta64(1, 's')
# Subtract tide predictions from measured water levels
water_level -= tide
return seconds_rel_landfall, water_level
def gauge_afteraxes(cd):
station_id, station_name = stations[cd.gaugeno - 1]
seconds_rel_landfall, actual_level = get_actual_water_levels(station_id)
axes = plt.gca()
surgeplot.plot_landfall_gauge(cd.gaugesoln, axes)
axes.plot(seconds_rel_landfall, actual_level, 'g')
# Fix up plot - in particular fix time labels
axes.set_title(station_name)
axes.set_xlabel('Seconds relative to landfall')
axes.set_ylabel('Surface (m)')
axes.set_xlim([days2seconds(-1), days2seconds(3)])
axes.set_ylim([-1, 5])
axes.set_xticks([-days2seconds(-1), 0, days2seconds(1), days2seconds(2), days2seconds(3)])
#axes.set_xticklabels([r"$-1$", r"$0$", r"$1$", r"$2$", r"$3$"])
#axes.grid(True)
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
plotaxes.afteraxes = gauge_afteraxes
# Plot surface as blue curve:
plotitem = plotaxes.new_plotitem(plot_type='1d_plot')
plotitem.plot_var = 3
plotitem.plotstyle = 'b-'
#
# Gauge Location Plot
#
def gauge_location_afteraxes(cd):
plt.subplots_adjust(left=0.12, bottom=0.06, right=0.97, top=0.97)
surge_afteraxes(cd)
gaugetools.plot_gauge_locations(cd.plotdata, gaugenos='all',
format_string='ko', add_labels=False)
#Plot for gauge location 1
plotfigure = plotdata.new_plotfigure(name="Gauge Location 1")
plotfigure.show = True
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = 'Gauge Location 1'
plotaxes.scaled = True
plotaxes.xlimits = [-96.83, -96.63]
plotaxes.ylimits = [28.33, 28.43]
plotaxes.afteraxes = gauge_location_afteraxes
surgeplot.add_surface_elevation(plotaxes, bounds=surface_limits)
surgeplot.add_land(plotaxes)
plotaxes.plotitem_dict['surface'].amr_patchedges_show = [0] * 10
plotaxes.plotitem_dict['land'].amr_patchedges_show = [0] * 10
#Plot for gauge location 2
plotfigure = plotdata.new_plotfigure(name="Gauge Location 2")
plotfigure.show = True
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = 'Gauge Location 2'
plotaxes.scaled = True
plotaxes.xlimits = [-96.48, -96.28]
plotaxes.ylimits = [28.40, 28.50]
plotaxes.afteraxes = gauge_location_afteraxes
surgeplot.add_surface_elevation(plotaxes, bounds=surface_limits)
surgeplot.add_land(plotaxes)
plotaxes.plotitem_dict['surface'].amr_patchedges_show = [0] * 10
plotaxes.plotitem_dict['land'].amr_patchedges_show = [0] * 10
#Plot for gauge location 3
plotfigure = plotdata.new_plotfigure(name="Gauge Location 3")
plotfigure.show = True
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = 'Gauge Location 3'
plotaxes.scaled = True
plotaxes.xlimits = [-96.85, -96.65]
plotaxes.ylimits = [28.17, 28.27]
plotaxes.afteraxes = gauge_location_afteraxes
surgeplot.add_surface_elevation(plotaxes, bounds=surface_limits)
surgeplot.add_land(plotaxes)
plotaxes.plotitem_dict['surface'].amr_patchedges_show = [0] * 10
plotaxes.plotitem_dict['land'].amr_patchedges_show = [0] * 10
#Plot for gauge location 4
plotfigure = plotdata.new_plotfigure(name="Gauge Location 4")
plotfigure.show = True
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = 'Gauge Location 4'
plotaxes.scaled = True
plotaxes.xlimits = [-97.15, -96.95]
plotaxes.ylimits = [27.79, 27.89]
plotaxes.afteraxes = gauge_location_afteraxes
surgeplot.add_surface_elevation(plotaxes, bounds=surface_limits)
surgeplot.add_land(plotaxes)
plotaxes.plotitem_dict['surface'].amr_patchedges_show = [0] * 10
plotaxes.plotitem_dict['land'].amr_patchedges_show = [0] * 10
#Plot for gauge location 5
plotfigure = plotdata.new_plotfigure(name="Gauge Location 5")
plotfigure.show = True
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = 'Gauge Location 5'
plotaxes.scaled = True
plotaxes.xlimits = [-97.48, -97.28]
plotaxes.ylimits = [27.75, 27.85]
plotaxes.afteraxes = gauge_location_afteraxes
surgeplot.add_surface_elevation(plotaxes, bounds=surface_limits)
surgeplot.add_land(plotaxes)
plotaxes.plotitem_dict['surface'].amr_patchedges_show = [0] * 10
plotaxes.plotitem_dict['land'].amr_patchedges_show = [0] * 10
# -----------------------------------------
# Parameters used only when creating html and/or latex hardcopy
# e.g., via pyclaw.plotters.frametools.printframes:
plotdata.printfigs = True # print figures
plotdata.print_format = 'png' # file format
plotdata.print_framenos = 'all' # list of frames to print
plotdata.print_gaugenos = [1, 2, 3, 4, 5] # list of gauges to print
plotdata.print_fignos = 'all' # list of figures to print
plotdata.html = True # create html files of plots?
plotdata.latex = True # create latex file of plots?
plotdata.latex_figsperline = 2 # layout of plots
plotdata.latex_framesperline = 1 # layout of plots
plotdata.latex_makepdf = False # also run pdflatex?
plotdata.parallel = True # parallel plotting
return plotdata
|
mandli/surge-examples
|
harvey/setplot.py
|
Python
|
mit
| 12,304
|
"""
locally connected implimentation on the lip movement data.
Akm Ashiquzzaman
13101002@uap-bd.edu
Fall 2016
after 1 epoch , val_acc: 0.0926
"""
from __future__ import print_function, division
#random seed fixing for reproducibility
import numpy as np
np.random.seed(1337)
import time
#Data loading
X_train = np.load('videopart43.npy')
Y_train = np.load('audiopart43.npy')
#Reshaping to the 'th' order to feed into the cnn
X_train = X_train.reshape((X_train.shape[0],53*53)).astype('float32')
Y_train = Y_train.reshape((Y_train.shape[0],4702)).astype('float32')
#setting batch_size and epoch
batchSize = 20
tt_epoch = 1
from keras.models import Sequential
from keras.layers import Dense,Dropout, Activation
#time to measure the experiment.
tt = time.time()
#model building starts here
seq = Sequential()
#first lc layer
seq.add(Dense(2048,input_dim=(53*53)))
seq.add(Activation('relu'))
seq.add(Dropout(0.25))
#second lc layer
seq.add(Dense(4096))
seq.add(Activation('relu'))
seq.add(Dropout(0.5))
#3rd lc layers
seq.add(Dense(1024))
seq.add(Activation('relu'))
seq.add(Dropout(0.5))
seq.add(Dense(1024))
seq.add(Activation('relu'))
seq.add(Dropout(0.5))
seq.add(Dense(4702))
seq.add(Activation('softmax'))
seq.compile(loss='binary_crossentropy', optimizer='sgd', metrics=['accuracy'])
#checkpoint import
from keras.callbacks import ModelCheckpoint
from os.path import isfile, join
#weight file name
weight_file = 'lc_weights.h5'
#loading previous weight file for resuming training
if isfile(weight_file):
seq.load_weights(weight_file)
#weight-checkmark
checkpoint = ModelCheckpoint(weight_file, monitor='acc', verbose=1, save_best_only=True, mode='min')
callbacks_list = [checkpoint]
print('total time: ',time.time()-tt)
seq.fit(X_train,Y_train,batch_size=batchSize, nb_epoch=tt_epoch
,validation_split=0.2,callbacks=callbacks_list)
#generating prediction for testing
pred = seq.predict(X_train,batch_size=batchSize,verbose=1)
print('pred shape',pred.shape)
print('pred dtype',pred.dtype)
np.save('pred-lc.npy',pred)
|
zamanashiq3/code-DNN
|
dense_v1.py
|
Python
|
mit
| 2,055
|
import datetime
import io
import boto3
import mock
import pytest
import requests
import testfixtures
from botocore.exceptions import ClientError
from opentracing.ext import tags
from opentracing_instrumentation.client_hooks import boto3 as boto3_hooks
DYNAMODB_ENDPOINT_URL = 'http://localhost:4569'
S3_ENDPOINT_URL = 'http://localhost:4572'
DYNAMODB_CONFIG = {
'endpoint_url': DYNAMODB_ENDPOINT_URL,
'aws_access_key_id': '-',
'aws_secret_access_key': '-',
'region_name': 'us-east-1',
}
S3_CONFIG = dict(DYNAMODB_CONFIG, endpoint_url=S3_ENDPOINT_URL)
def create_users_table(dynamodb):
dynamodb.create_table(
TableName='users',
KeySchema=[{
'AttributeName': 'username',
'KeyType': 'HASH'
}],
AttributeDefinitions=[{
'AttributeName': 'username',
'AttributeType': 'S'
}],
ProvisionedThroughput={
'ReadCapacityUnits': 9,
'WriteCapacityUnits': 9
}
)
@pytest.fixture
def dynamodb_mock():
import moto
with moto.mock_dynamodb2():
dynamodb = boto3.resource('dynamodb', region_name='us-east-1')
create_users_table(dynamodb)
yield dynamodb
@pytest.fixture
def dynamodb():
dynamodb = boto3.resource('dynamodb', **DYNAMODB_CONFIG)
try:
dynamodb.Table('users').delete()
except ClientError as error:
# you can not just use ResourceNotFoundException class
# to catch an error since it doesn't exist until it's raised
if error.__class__.__name__ != 'ResourceNotFoundException':
raise
create_users_table(dynamodb)
# waiting until the table exists
dynamodb.meta.client.get_waiter('table_exists').wait(TableName='users')
return dynamodb
@pytest.fixture
def s3_mock():
import moto
with moto.mock_s3():
s3 = boto3.client('s3', region_name='us-east-1')
yield s3
@pytest.fixture
def s3():
return boto3.client('s3', **S3_CONFIG)
@pytest.fixture(autouse=True)
def patch_boto3():
boto3_hooks.install_patches()
try:
yield
finally:
boto3_hooks.reset_patches()
def assert_last_span(kind, service_name, operation, tracer, response=None):
span = tracer.recorder.get_spans()[-1]
request_id = response and response['ResponseMetadata'].get('RequestId')
assert span.operation_name == 'boto3:{}:{}:{}'.format(
kind, service_name, operation
)
assert span.tags.get(tags.SPAN_KIND) == tags.SPAN_KIND_RPC_CLIENT
assert span.tags.get(tags.COMPONENT) == 'boto3'
assert span.tags.get('boto3.service_name') == service_name
if request_id:
assert span.tags.get('aws.request_id') == request_id
def _test_dynamodb(dynamodb, tracer):
users = dynamodb.Table('users')
response = users.put_item(Item={
'username': 'janedoe',
'first_name': 'Jane',
'last_name': 'Doe',
})
assert_last_span('resource', 'dynamodb', 'put_item', tracer, response)
response = users.get_item(Key={'username': 'janedoe'})
user = response['Item']
assert user['first_name'] == 'Jane'
assert user['last_name'] == 'Doe'
assert_last_span('resource', 'dynamodb', 'get_item', tracer, response)
try:
dynamodb.Table('test').delete_item(Key={'username': 'janedoe'})
except ClientError as error:
response = error.response
assert_last_span('resource', 'dynamodb', 'delete_item', tracer, response)
response = users.creation_date_time
assert isinstance(response, datetime.datetime)
assert_last_span('resource', 'dynamodb', 'describe_table', tracer)
def _test_s3(s3, tracer):
fileobj = io.BytesIO(b'test data')
bucket = 'test-bucket'
response = s3.create_bucket(Bucket=bucket)
assert_last_span('client', 's3', 'create_bucket', tracer, response)
response = s3.upload_fileobj(fileobj, bucket, 'test.txt')
assert_last_span('client', 's3', 'upload_fileobj', tracer, response)
def is_service_running(endpoint_url, expected_status_code):
try:
# feel free to suggest better solution for this check
response = requests.get(endpoint_url, timeout=1)
return response.status_code == expected_status_code
except requests.exceptions.ConnectionError:
return False
def is_dynamodb_running():
return is_service_running(DYNAMODB_ENDPOINT_URL, 502)
def is_s3_running():
return is_service_running(S3_ENDPOINT_URL, 200)
def is_moto_presented():
try:
import moto
return True
except ImportError:
return False
@pytest.mark.skipif(not is_dynamodb_running(),
reason='DynamoDB is not running or cannot connect')
def test_boto3_dynamodb(thread_safe_tracer, dynamodb):
_test_dynamodb(dynamodb, thread_safe_tracer)
@pytest.mark.skipif(not is_moto_presented(),
reason='moto module is not presented')
def test_boto3_dynamodb_with_moto(thread_safe_tracer, dynamodb_mock):
_test_dynamodb(dynamodb_mock, thread_safe_tracer)
@pytest.mark.skipif(not is_s3_running(),
reason='S3 is not running or cannot connect')
def test_boto3_s3(s3, thread_safe_tracer):
_test_s3(s3, thread_safe_tracer)
@pytest.mark.skipif(not is_moto_presented(),
reason='moto module is not presented')
def test_boto3_s3_with_moto(s3_mock, thread_safe_tracer):
_test_s3(s3_mock, thread_safe_tracer)
@testfixtures.log_capture()
def test_boto3_s3_missing_func_instrumentation(capture):
class Patcher(boto3_hooks.Boto3Patcher):
S3_FUNCTIONS_TO_INSTRUMENT = 'missing_func',
Patcher().install_patches()
capture.check(('root', 'WARNING', 'S3 function missing_func not found'))
@mock.patch.object(boto3_hooks, 'patcher')
def test_set_custom_patcher(default_patcher):
patcher = mock.Mock()
boto3_hooks.set_patcher(patcher)
assert boto3_hooks.patcher is not default_patcher
assert boto3_hooks.patcher is patcher
boto3_hooks.install_patches()
boto3_hooks.reset_patches()
patcher.install_patches.assert_called_once()
patcher.reset_patches.assert_called_once()
|
uber-common/opentracing-python-instrumentation
|
tests/opentracing_instrumentation/test_boto3.py
|
Python
|
mit
| 6,158
|
from .file_logger import FileLogger
|
philipperemy/tensorflow-phased-lstm
|
helpers/__init__.py
|
Python
|
mit
| 35
|
"""Script to execute CPU and get a best move using Minimax algorithm"""
import copy
from common import board_full, win
OPP = [1, 0]
def eval_rc(board, player, glength, roc):
"""Returns row or column score"""
score_sum = 0
clone_board = board
if roc == "c":
clone_board = [[board[j][i] for j in xrange(glength)] for i in xrange(glength)]
for i in xrange(glength):
score = 0
if clone_board[i][0] == player:
score = 1
else:
score = -1
for j in xrange(1, glength):
if clone_board[i][j] == player and score > 0:
score = score * 10
elif board[i][j] == player and score < 0:
score = 0
break
elif board[i][j] == player:
score = 1
elif board[i][j] == OPP[player] and score < 0:
score = score * 10
elif board[i][j] == OPP[player] and score > 0:
score = 0
break
elif board[i][j] == OPP[player]:
score = 1
score_sum = score_sum + score
return score_sum
def eval_diags(board, player, glength):
"""Returns diagonal score"""
score = 0
if board[0][0] == player:
score = 1
elif board[0][0] == OPP[player]:
score = -1
for i in range(1, glength):
if board[i][i] == player and score > 0:
score = score * 10
elif board[i][i] == player and score < 0:
score = 0
break
elif board[i][i] == player:
score = 1
elif board[i][i] == OPP[player] and score < 0:
score = score * 10
elif board[i][i] == OPP[player] and score > 0:
score = 0
break
elif board[i][i] == OPP[player]:
score = 1
score_sum = score
score = 0
if board[glength - 1][0] == player:
score = 1
else:
score = -1
for i in range(1, glength):
if board[glength - i - 1][i] == player and score > 0:
score = score * 10
elif board[glength - i - 1][i] == player and score < 0:
score = 0
break
elif board[glength - i - 1][i] == player:
score = 1
elif board[glength - i - 1][i] == OPP[player] and score < 0:
score = score * 10
elif board[glength - i - 1][i] == OPP[player] and score > 0:
score = 0
break
elif board[glength - i - 1][i] == OPP[player]:
score = 1
score_sum = score_sum + score
return score_sum
def evaluate(board, player, glength):
"""Evaluates the score for the player based on horizontal, vertical and diagonal advantages"""
score = eval_rc(board, player, glength, "r")
score += eval_rc(board, player, glength, "c")
score += eval_diags(board, player, glength)
return score
def get_moves(board, glength):
"""Returns all possible moves"""
moves = []
for i in range(glength):
for j in range(glength):
if board[i][j] == -1:
moves = moves + [[i, j]]
return moves
def gen_board(board, player, pos):
"""Returns a new clone board by playing a move"""
new_board = copy.deepcopy(board)
new_board[pos[0]][pos[1]] = player
return new_board
def if_second_move(board, glength):
"""Returns True if it is the second move of the game, otherwise False"""
check = 0
for i in xrange(glength):
for j in xrange(glength):
if board[i][j] == 0 or board[i][j] == 1:
check += 1
if check > 1:
return False
return True
def minimax(board, player, depth, glength):
"""Returns the best move for the CPU by traversing
all best CPU and worst user moves with depth
"""
moves = get_moves(board, glength)
if not moves:
return None
if len(moves) == 1 or if_second_move(board, glength):
return moves[0]
best_move = moves[0]
best_score = 0.0
for move in moves:
clone_board = gen_board(board, player, move)
if win(clone_board, player, glength):
return move
for move in moves:
clone_board = gen_board(board, OPP[player], move)
if win(clone_board, OPP[player], glength):
return move
for move in moves:
clone_board = gen_board(board, player, move)
if win(clone_board, player, glength):
return move
score = min_play(clone_board, OPP[player], depth, glength)
if best_score < score:
best_score = score
best_move = move
return best_move
def min_play(board, player, depth, glength):
"""Returns the worst score for the player"""
moves = get_moves(board, glength)
if not moves or depth == 0:
return evaluate(board, player, glength)
best_score = float('inf')
for move in moves:
clone_board = gen_board(board, player, move)
if win(clone_board, player, glength):
return evaluate(clone_board, player, glength)
score = max_play(clone_board, OPP[player], depth - 1, glength)
if score < best_score:
best_score = score
return best_score
def max_play(board, player, depth, glength):
"""Returns the best score for the CPU"""
moves = get_moves(board, glength)
if not moves or depth == 0:
return evaluate(board, player, glength)
best_score = float('-inf')
for move in moves:
clone_board = gen_board(board, player, move)
if win(clone_board, player, glength):
return evaluate(clone_board, player, glength)
score = max_play(clone_board, OPP[player], depth - 1, glength)
if score > best_score:
best_score = score
return best_score
|
sk364/N_by_N_Tic_Tac_Toe
|
cpu.py
|
Python
|
mit
| 5,851
|
# Problem 19: Counting Sundays
# https://projecteuler.net/problem=19
def is_leapyear(year):
if year%4 == 0 and year%100 != 0 or year%400 == 0:
return 1
else:
return 0
month = [31, 28, 31, 30, 31, 30,
31, 31, 30, 31, 30, 31]
def days_of_month(m, y):
return month[m-1] + (is_leapyear(y) if m == 2 else 0)
def days_of_year(y):
return sum(month) + is_leapyear(y)
# date 1 Jan 1900 represented as (1, 1, 1900)
# 1 Jan 1900 was Monday, days is 1
# 7 Jan 1900 was Sunday, days is 7
def date_to_days(date):
dy, mn, yr = date
days = dy
for y in range(1900, yr):
days += days_of_year(y)
for m in range(1, mn):
days += days_of_month(m, yr)
return days
def is_sunday(days):
return days % 7 == 0
def cs():
count = 0
for y in range(1901, 2000+1):
for m in range(1, 12+1):
days = date_to_days((1, m, y))
if is_sunday(days):
count += 1
return count
#
def test():
return 'No test'
def main():
return cs()
if __name__ == '__main__':
import sys
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
print(test())
else:
print(main())
|
yehnan/project_euler_python
|
p019.py
|
Python
|
mit
| 1,291
|
#!/usr/bin/env python3
# Copyright (c) 2016-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Encode and decode Bitcoin addresses.
- base58 P2PKH and P2SH addresses.
- bech32 segwit v0 P2WPKH and P2WSH addresses.
- bech32m segwit v1 P2TR addresses."""
import enum
import unittest
from .script import (
CScript,
OP_0,
OP_TRUE,
hash160,
hash256,
sha256,
taproot_construct,
)
from .segwit_addr import encode_segwit_address
from .util import assert_equal
ADDRESS_BCRT1_UNSPENDABLE = 'bcrt1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq3xueyj'
ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR = 'addr(bcrt1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq3xueyj)#juyq9d97'
# Coins sent to this address can be spent with a witness stack of just OP_TRUE
ADDRESS_BCRT1_P2WSH_OP_TRUE = 'bcrt1qft5p2uhsdcdc3l2ua4ap5qqfg4pjaqlp250x7us7a8qqhrxrxfsqseac85'
class AddressType(enum.Enum):
bech32 = 'bech32'
p2sh_segwit = 'p2sh-segwit'
legacy = 'legacy' # P2PKH
chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
def create_deterministic_address_bcrt1_p2tr_op_true():
"""
Generates a deterministic bech32m address (segwit v1 output) that
can be spent with a witness stack of OP_TRUE and the control block
with internal public key (script-path spending).
Returns a tuple with the generated address and the internal key.
"""
internal_key = (1).to_bytes(32, 'big')
scriptPubKey = taproot_construct(internal_key, [(None, CScript([OP_TRUE]))]).scriptPubKey
address = encode_segwit_address("bcrt", 1, scriptPubKey[2:])
assert_equal(address, 'bcrt1p9yfmy5h72durp7zrhlw9lf7jpwjgvwdg0jr0lqmmjtgg83266lqsekaqka')
return (address, internal_key)
def byte_to_base58(b, version):
result = ''
str = b.hex()
str = chr(version).encode('latin-1').hex() + str
checksum = hash256(bytes.fromhex(str)).hex()
str += checksum[:8]
value = int('0x' + str, 0)
while value > 0:
result = chars[value % 58] + result
value //= 58
while (str[:2] == '00'):
result = chars[0] + result
str = str[2:]
return result
def base58_to_byte(s):
"""Converts a base58-encoded string to its data and version.
Throws if the base58 checksum is invalid."""
if not s:
return b''
n = 0
for c in s:
n *= 58
assert c in chars
digit = chars.index(c)
n += digit
h = '%x' % n
if len(h) % 2:
h = '0' + h
res = n.to_bytes((n.bit_length() + 7) // 8, 'big')
pad = 0
for c in s:
if c == chars[0]:
pad += 1
else:
break
res = b'\x00' * pad + res
# Assert if the checksum is invalid
assert_equal(hash256(res[:-4])[:4], res[-4:])
return res[1:-4], int(res[0])
def keyhash_to_p2pkh(hash, main=False):
assert len(hash) == 20
version = 0 if main else 111
return byte_to_base58(hash, version)
def scripthash_to_p2sh(hash, main=False):
assert len(hash) == 20
version = 5 if main else 196
return byte_to_base58(hash, version)
def key_to_p2pkh(key, main=False):
key = check_key(key)
return keyhash_to_p2pkh(hash160(key), main)
def script_to_p2sh(script, main=False):
script = check_script(script)
return scripthash_to_p2sh(hash160(script), main)
def key_to_p2sh_p2wpkh(key, main=False):
key = check_key(key)
p2shscript = CScript([OP_0, hash160(key)])
return script_to_p2sh(p2shscript, main)
def program_to_witness(version, program, main=False):
if (type(program) is str):
program = bytes.fromhex(program)
assert 0 <= version <= 16
assert 2 <= len(program) <= 40
assert version > 0 or len(program) in [20, 32]
return encode_segwit_address("sys" if main else "bcrt", version, program)
def script_to_p2wsh(script, main=False):
script = check_script(script)
return program_to_witness(0, sha256(script), main)
def key_to_p2wpkh(key, main=False):
key = check_key(key)
return program_to_witness(0, hash160(key), main)
def script_to_p2sh_p2wsh(script, main=False):
script = check_script(script)
p2shscript = CScript([OP_0, sha256(script)])
return script_to_p2sh(p2shscript, main)
def check_key(key):
if (type(key) is str):
key = bytes.fromhex(key) # Assuming this is hex string
if (type(key) is bytes and (len(key) == 33 or len(key) == 65)):
return key
assert False
def check_script(script):
if (type(script) is str):
script = bytes.fromhex(script) # Assuming this is hex string
if (type(script) is bytes or type(script) is CScript):
return script
assert False
class TestFrameworkScript(unittest.TestCase):
def test_base58encodedecode(self):
def check_base58(data, version):
self.assertEqual(base58_to_byte(byte_to_base58(data, version)), (data, version))
check_base58(bytes.fromhex('1f8ea1702a7bd4941bca0941b852c4bbfedb2e05'), 111)
check_base58(bytes.fromhex('3a0b05f4d7f66c3ba7009f453530296c845cc9cf'), 111)
check_base58(bytes.fromhex('41c1eaf111802559bad61b60d62b1f897c63928a'), 111)
check_base58(bytes.fromhex('0041c1eaf111802559bad61b60d62b1f897c63928a'), 111)
check_base58(bytes.fromhex('000041c1eaf111802559bad61b60d62b1f897c63928a'), 111)
check_base58(bytes.fromhex('00000041c1eaf111802559bad61b60d62b1f897c63928a'), 111)
check_base58(bytes.fromhex('1f8ea1702a7bd4941bca0941b852c4bbfedb2e05'), 0)
check_base58(bytes.fromhex('3a0b05f4d7f66c3ba7009f453530296c845cc9cf'), 0)
check_base58(bytes.fromhex('41c1eaf111802559bad61b60d62b1f897c63928a'), 0)
check_base58(bytes.fromhex('0041c1eaf111802559bad61b60d62b1f897c63928a'), 0)
check_base58(bytes.fromhex('000041c1eaf111802559bad61b60d62b1f897c63928a'), 0)
check_base58(bytes.fromhex('00000041c1eaf111802559bad61b60d62b1f897c63928a'), 0)
|
syscoin/syscoin
|
test/functional/test_framework/address.py
|
Python
|
mit
| 6,052
|
from textx.exceptions import TextXSemanticError
def query_processor(query):
if not query.condition is None:
query.condition.conditionName = adapter_for_query(query)
for query in query.parent.queries:
if (not hasattr(query, 'property')) and (query.sortBy not in query.parent.properties):
line, col = query.parent._tx_metamodel.parser.pos_to_linecol(
object._tx_position)
raise TextXSemanticError("ERROR: (at %d, %d) Object %s has no property named %s." %
(line, col, query.parent.object.name, query.parent.property.name))
elif (not hasattr(query, 'sortBy')) and (query.sortBy not in query.parent.properties):
line, col = query.parent._tx_metamodel.parser.pos_to_linecol(
object._tx_position)
raise TextXSemanticError("ERROR: (at %d, %d) Object %s has no property named %s." %
(line, col, query.parent.object.name, query.parent.property.name))
else:
return True
def adapter_for_query(queryObject):
try:
return {
'lowerThan': 'lt',
'greaterThan': 'gt',
'lessEqual': 'le',
'greaterEqual': 'ge',
'equal': 'e'
}[queryObject.condition.conditionName]
except:
return queryObject.condition.conditionName
class Query(object):
def __init__(self, parent, name, property=None, condition=None,sortBy=None, order=None, rangeFrom=None, rangeTo=None ):
self.name = name
self.parent = parent
self.property = property
self.condition = condition
self.sortBy = sortBy
self.order=order
self.rangeFrom = rangeFrom
self.rangeTo = rangeTo
|
theshammy/GenAn
|
src/concepts/query.py
|
Python
|
mit
| 1,811
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'User'
db.create_table(u'accounts_user', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('password', self.gf('django.db.models.fields.CharField')(max_length=128)),
('last_login', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('is_superuser', self.gf('django.db.models.fields.BooleanField')(default=False)),
('email', self.gf('django.db.models.fields.EmailField')(unique=True, max_length=254)),
('first_name', self.gf('django.db.models.fields.CharField')(max_length=30, null=True)),
('last_name', self.gf('django.db.models.fields.CharField')(max_length=254, null=True)),
('date_joined', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('is_staff', self.gf('django.db.models.fields.BooleanField')(default=False)),
('is_active', self.gf('django.db.models.fields.BooleanField')(default=True)),
('is_colector', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal(u'accounts', ['User'])
# Adding M2M table for field groups on 'User'
m2m_table_name = db.shorten_name(u'accounts_user_groups')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('user', models.ForeignKey(orm[u'accounts.user'], null=False)),
('group', models.ForeignKey(orm[u'auth.group'], null=False))
))
db.create_unique(m2m_table_name, ['user_id', 'group_id'])
# Adding M2M table for field user_permissions on 'User'
m2m_table_name = db.shorten_name(u'accounts_user_user_permissions')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('user', models.ForeignKey(orm[u'accounts.user'], null=False)),
('permission', models.ForeignKey(orm[u'auth.permission'], null=False))
))
db.create_unique(m2m_table_name, ['user_id', 'permission_id'])
def backwards(self, orm):
# Deleting model 'User'
db.delete_table(u'accounts_user')
# Removing M2M table for field groups on 'User'
db.delete_table(db.shorten_name(u'accounts_user_groups'))
# Removing M2M table for field user_permissions on 'User'
db.delete_table(db.shorten_name(u'accounts_user_user_permissions'))
models = {
u'accounts.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '254'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_colector': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '254', 'null': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['accounts']
|
luanfonceca/econet
|
econet/accounts/migrations/0001_initial.py
|
Python
|
mit
| 5,920
|
# https://leetcode.com/problems/valid-parentheses/
class Solution(object):
def isValid(self, s):
"""
:type s: str
:rtype: bool
"""
if not s:
return True
stack = []
for i in xrange(len(s)):
# if its opening it, its getting deeper so add to stack
if s[i] in "([{":
stack.append(s[i])
# if not it must be a closing parenth
# in which case check if stack is empty if not pop and check
# whether popped elem is closed with the current item
else:
if len(stack) == 0:
return False
last = stack.pop()
if s[i] == ")" and last != "(": return False
if s[i] == "]" and last != "[": return False
if s[i] == "}" and last != "{": return False
return len(stack) == 0
|
young-geng/leet_code
|
problems/20_valid-parentheses/main.py
|
Python
|
mit
| 920
|
from .base import ScannerBase
class WhoisAtiTnScanner(ScannerBase):
def __init__(self, *args):
super(WhoisAtiTnScanner, self).__init__(*args)
self._tokenizer += [
'skip_empty_line',
'scan_available',
'scan_disclaimer',
'scan_keyvalue'
]
def scan_available(self):
if self._input.skip("^Domain (.+) not found"):
self._ast['status:available'] = True
return True
def scan_disclaimer(self):
if self._input.match("All rights reserved"):
self._ast['field:disclaimer'] = "\n".join(self._scan_lines_to_array("(.+)\n"))
return True
|
huyphan/pyyawhois
|
yawhois/scanner/whois_ati_tn.py
|
Python
|
mit
| 674
|
import sys
from django.core.management.base import BaseCommand, CommandError
import nflgame
from terminaltables import AsciiTable
from ...models import Player, Team, Season, Week, WeeklyStats
class Command(BaseCommand):
help = 'takes option position, displays top players as table'
def add_arguments(self, parser):
# Named (optional) arguments
parser.add_argument('position', nargs=1)
def handle(self, *args, **options):
p = options['position']
if p:
Player.show_top_players(position=p[0])
else:
Player.show_top_players()
|
johnshiver/football_tools
|
football/core/management/commands/show_top_players.py
|
Python
|
mit
| 607
|
from django.apps import AppConfig
class PerscriptionsConfig(AppConfig):
name = 'prescriptions'
|
jimga150/HealthNet
|
HealthNet/prescriptions/apps.py
|
Python
|
mit
| 101
|
from unittest import TestCase
from safeurl.core import getRealURL
class MainTestCase(TestCase):
def test_decodeUrl(self):
self.assertEqual(getRealURL('http://bit.ly/1gaiW96'),
'https://www.yandex.ru/')
def test_decodeUrlArray(self):
self.assertEqual(
getRealURL(['http://bit.ly/1gaiW96', 'http://bit.ly/1gaiW96']),
['https://www.yandex.ru/', 'https://www.yandex.ru/'])
def test_errorDecodeUrl(self):
self.assertEqual(getRealURL('http://bit.ly.wrong/wrong'),
'Failed')
def test_errorDecodeUrlArray(self):
self.assertEqual(
getRealURL(
['http://bit.ly.wrong/wrong', 'http://bit.ly.wrong/wrong']),
['Failed', 'Failed'])
def test_errorWithOkDecodeUrlArray(self):
self.assertEqual(
getRealURL(['http://bit.ly.wrong/wrong', 'http://bit.ly/1gaiW96',
'http://bit.ly.wrong/wrong']),
['Failed', 'https://www.yandex.ru/', 'Failed'])
|
FrodoTheTrue/safeurl
|
tests/tests.py
|
Python
|
mit
| 1,050
|
from django.core.management import call_command
import pytest
import septentrion
def test_showmigrations_command_override(mocker):
mock_django_handle = mocker.patch(
'django.core.management.commands.showmigrations.Command.handle')
mock_show_migrations = mocker.patch(
'septentrion.show_migrations', return_value=b'')
call_command('showmigrations')
assert mock_django_handle.called is False
assert mock_show_migrations.called is True
@pytest.mark.parametrize("manage", [True, False, None])
def test_north_manage_migrations(mocker, settings, manage):
if manage is not None:
settings.NORTH_MANAGE_DB = manage
if manage is None and hasattr(settings, 'NORTH_MANAGE_DB'):
del settings.NORTH_MANAGE_DB
mock = mocker.patch('septentrion.show_migrations', return_value=b'')
call_command('showmigrations')
assert mock.called == bool(manage)
def test_showmigrations_schema_not_inited(capsys, mocker):
mock_version = mocker.patch(
'septentrion.db.get_current_schema_version')
# schema not inited
mock_version.return_value = None
call_command('showmigrations')
captured = capsys.readouterr()
assert 'Current version is None' in captured.out
def test_showmigrations_schema(capsys, mocker):
# schema inited
mock_version = mocker.patch(
'septentrion.db.get_current_schema_version')
mock_version.return_value = septentrion.versions.Version.from_string('1.1')
mock_plan = mocker.patch(
'septentrion.core.build_migration_plan')
mock_plan.return_value = [
{
'version': "Version 1.2",
'plan': [
('a-ddl.sql', True, '/somewhere/a-ddl.sql', False),
('b-ddl.sql', False, '/somewhere/b-ddl.sql', True),
]
},
{
'version': "Version 1.3",
'plan': [
('c-ddl.sql', False, '/somewhere/c-ddl.sql', False),
]
}
]
call_command('showmigrations')
captured = capsys.readouterr()
assert "Current version is 1.1" in captured.out
assert "Target version is 1.3" in captured.out
assert "Version 1.2" in captured.out
assert "[X] \x1b[0ma-ddl.sql" in captured.out
assert "[ ] \x1b[0mb-ddl.sql" in captured.out
assert "Version 1.3" in captured.out
assert "[ ] \x1b[0mc-ddl.sql" in captured.out
|
novafloss/django-north
|
tests/test_showmigrations_command.py
|
Python
|
mit
| 2,400
|
# pylint: disable=missing-docstring
import unittest
import numpy as np
# pylint bug on next line
from tensorflow.python.client import device_lib # pylint: disable=no-name-in-module
from cleverhans.devtools.checks import CleverHansTest
HAS_GPU = 'GPU' in {x.device_type for x in device_lib.list_local_devices()}
class TestMNISTTutorialKeras(CleverHansTest):
def test_mnist_tutorial_keras(self):
import tensorflow as tf
from cleverhans_tutorials import mnist_tutorial_keras
# Run the MNIST tutorial on a dataset of reduced size
test_dataset_indices = {'train_start': 0,
'train_end': 5000,
'test_start': 0,
'test_end': 333,
'nb_epochs': 2,
'testing': True}
g = tf.Graph()
with g.as_default():
np.random.seed(42)
report = mnist_tutorial_keras.mnist_tutorial(**test_dataset_indices)
# Check accuracy values contained in the AccuracyReport object
self.assertTrue(report.train_clean_train_clean_eval > 0.90)
self.assertTrue(report.train_clean_train_adv_eval < 0.05)
self.assertTrue(report.train_adv_train_clean_eval > 0.90)
self.assertTrue(report.train_adv_train_adv_eval > 0.30)
atol_fac = 5e-2 if HAS_GPU else 1e-6
g = tf.Graph()
with g.as_default():
np.random.seed(42)
report_2 = mnist_tutorial_keras.mnist_tutorial(**test_dataset_indices)
self.assertClose(report.train_clean_train_clean_eval,
report_2.train_clean_train_clean_eval,
atol=atol_fac * 1)
self.assertClose(report.train_clean_train_adv_eval,
report_2.train_clean_train_adv_eval,
atol=atol_fac * 1)
self.assertClose(report.train_adv_train_clean_eval,
report_2.train_adv_train_clean_eval,
atol=atol_fac * 1)
self.assertClose(report.train_adv_train_adv_eval,
report_2.train_adv_train_adv_eval,
atol=atol_fac * 1)
if __name__ == '__main__':
unittest.main()
|
openai/cleverhans
|
tests_tf/test_mnist_tutorial_keras.py
|
Python
|
mit
| 2,129
|
# https://canvas.instructure.com/doc/api/assignments.html
from datetime import datetime
from canvas.core.courses import get_courses, get_courses_whitelisted, get_course_people, get_courses_by_account_id
from canvas.core.io import write_xlsx_file, tada
from canvas.core.assignments import get_assignments
def assignments_turnitin_msonline_list():
terms = ['2017-1SP']
programs = ['NFNPO', 'NCMO']
synergis = True
course_whitelist = get_courses_whitelisted([])
header = ['term', 'program', 'SIS ID', 'course name', 'assignment name', 'assignment URL', 'due date', 'points',
'group assignment', 'faculty of record']
rows = []
for course in course_whitelist or get_courses(terms, programs, synergis):
course_id = course['id']
if not get_course_people(course_id, 'student'):
continue
course_sis_id = course['sis_course_id']
program = course['course_sis_info']['program']
for assignment in get_assignments(course_id):
if 'external_tool' in assignment['submission_types']:
row = [terms[0],
program,
course_sis_id,
course['name'],
assignment['name'],
assignment['html_url'],
assignment['due_at'][0:10] if assignment['due_at'] else '',
assignment['points_possible'] if assignment['points_possible'] else '',
'X' if 'group_category_id' in assignment and assignment['group_category_id'] else '',
', '.join([p['name'] for p in get_course_people(course_id, 'Faculty of record')])]
rows.append(row)
print(row)
write_xlsx_file('turnitin_assignments_spring_{}'
.format(datetime.now().strftime('%Y.%m.%d.%H.%M.%S')), header, rows)
def assignments_turnitin_msonline_list_dev():
accounts = {'DEV FNPO': '168920', 'DEV CMO': '168922'}
header = ['program', 'course name', 'assignment name', 'assignment URL', 'points']
rows = []
for account in accounts:
for course in get_courses_by_account_id(accounts[account], 'DEFAULT'):
course_id = course['id']
for assignment in get_assignments(course_id):
if 'external_tool' in assignment['submission_types']:
row = [
account,
course['name'],
assignment['name'],
assignment['html_url'],
assignment['points_possible'] if assignment['points_possible'] else '']
rows.append(row)
print(row)
write_xlsx_file('turnitin_assignments_spring_dev_{}'
.format(datetime.now().strftime('%Y.%m.%d.%H.%M.%S')), header, rows)
if __name__ == '__main__':
# assignments_turnitin_msonline_list()
assignments_turnitin_msonline_list_dev()
tada()
|
dgrobani/py3-canvaslms-api
|
assignments/assignments_turnitin_msonline_list.py
|
Python
|
mit
| 3,100
|
from django.conf.urls import patterns, include, url
from django.conf import settings
# Here, user contacts.profile will cause some 'mismatch' since contacts is also a module
from profile import ProfileView
from contacts import ContactsView
from authen import Authenticate
strid = settings.CONTACT_URL['strid']
user = settings.CONTACT_URL['user']
contact = settings.CONTACT_URL['contact']
auth = settings.CONTACT_URL['auth']
urlpatterns = patterns('',
url(r'^api/'+auth+'$', Authenticate.as_view()),
url(r'^api/(?P<'+strid+r'>\w{16})/$', ProfileView.as_view()),
url(r'^api/(?P<'+strid+r'>\w{16})/(?P<'+contact+r'>\d+)/$', ContactsView.as_view()),
url(r'^(?P<'+user+r'>\w{5,18})/(?P<'+strid+r'>\w{16})/$', ProfileView.as_view()),
url(r'^(?P<'+user+r'>\w{5,18})/(?P<'+strid+r'>\w{16})/(?P<'+contact+r'>\d+)/$', ContactsView.as_view()),
)
|
sharehub/DBRest
|
dbrest/contacts/urls.py
|
Python
|
mit
| 858
|
#!/usr/bin/env python3
import logging
import os
import urllib.parse
import urllib.request
import tarfile
from tooldog import TMP_DIR
from .utils import *
LOGGER = logging.getLogger(__name__)
class CodeCollector(object):
"""
Class to download source code from a https://bio.tools entry
"""
ZIP_NAME = "tool.zip"
TAR_NAME = "tool.tar"
TMP_NAME = "tmp"
def __init__(self, biotool):
"""
:param biotool: Biotool object
:type biotool: :class:`tooldog.biotool_model.Biotool`
"""
self.biotool = biotool
def _make_tar(self, file_path, tarname):
with tarfile.open(tarname, mode='w') as archive:
archive.add(file_path, arcname=self.ZIP_NAME)
def _get_from_repository(self, url):
"""
Get source code from a repository link
:param url: url of the repository
:type url: STRING
"""
# Here we deal with repository, have to use regex to test the url and
# use appropriate strategy to get the code depending the type of repository
if "github.com" in url:
return self._get_from_github(url)
else:
LOGGER.warn("The url ({}) is not a Github url".format(url))
LOGGER.warn("ToolDog only deals with Github repository for the moment...")
def _get_from_github(self, url):
try:
zip_url = os.path.join(url, "archive/master.zip")
response = urllib.request.urlopen(zip_url)
data = response.read()
LOGGER.info('Writing data to zip file...')
zip_path = os.path.join(TMP_DIR, self.ZIP_NAME)
tar_path = os.path.join(TMP_DIR, self.TAR_NAME)
write_to_file(zip_path, data, 'wb')
LOGGER.info('Making tar...')
self._make_tar(zip_path, tar_path)
return tar_path
except:
LOGGER.warn('Something went wrong with the following Github repository: {}'.format(zip_url))
def _get_from_source_code(self, url):
"""
Get source code from a source code link
:param url: url of the source code
:type url: STRING
"""
return None
def get_source(self):
"""
Retrieve source code of the tool using links provided in https://bio.tools
"""
source_code = None
links = self.biotool.informations.links
for link in links:
link_type = link.type.lower().translate(str.maketrans(' ', '_'))
try:
source_code = getattr(self, '_get_from_{}'.format(link_type))(link.url)
except AttributeError:
LOGGER.warn(link_type + ' link type is not processed yet by ToolDog.')
if source_code is not None:
# For the moment, consider that if a source code has been found,
# we just leave the loop.
break
return source_code
|
khillion/ToolDog
|
tooldog/analyse/code_collector.py
|
Python
|
mit
| 2,950
|
import pytest
import os
from polyglotdb import CorpusContext
acoustic = pytest.mark.skipif(
pytest.config.getoption("--skipacoustics"),
reason="remove --skipacoustics option to run"
)
def test_to_csv(acoustic_utt_config, export_test_dir):
export_path = os.path.join(export_test_dir, 'results_export.csv')
with CorpusContext(acoustic_utt_config) as g:
q = g.query_graph(g.phone).filter(g.phone.label == 'aa')
q = q.columns(g.phone.label.column_name('label'),
g.phone.duration.column_name('duration'),
g.phone.begin.column_name('begin'))
q = q.order_by(g.phone.begin.column_name('begin'))
q.to_csv(export_path)
# ignore ids
expected = [['label', 'duration', 'begin'],
['aa', 0.0783100000000001, 2.70424],
['aa', 0.12199999999999989, 9.32077],
['aa', 0.03981000000000279, 24.56029]]
with open(export_path, 'r') as f:
i = 0
for line in f.readlines():
line = line.strip()
if line == '':
continue
line = line.split(',')
print(line)
if i != 0:
line = [line[0], float(line[1]), float(line[2])]
assert line[0] == expected[i][0]
assert line[1:] == pytest.approx(expected[i][1:], 1e-3)
else:
assert line == expected[i]
i += 1
with CorpusContext(acoustic_utt_config) as g:
q = g.query_graph(g.phone).filter(g.phone.label == 'aa')
q = q.columns(g.phone.label,
g.phone.duration,
g.phone.begin)
q = q.order_by(g.phone.begin)
q.to_csv(export_path)
# ignore ids
expected = [['node_phone_label', 'node_phone_duration', 'node_phone_begin'],
['aa', 0.0783100000000001, 2.70424],
['aa', 0.12199999999999989, 9.32077],
['aa', 0.03981000000000279, 24.56029]]
with open(export_path, 'r') as f:
i = 0
for line in f.readlines():
line = line.strip()
print(line)
if line == '':
continue
line = line.split(',')
print(line)
if i != 0:
line = [line[0], float(line[1]), float(line[2])]
assert line[0] == expected[i][0]
assert line[1:] == pytest.approx(expected[i][1:], 1e-3)
else:
assert line == expected[i]
i += 1
@acoustic
def test_csv_vot(acoustic_utt_config, vot_classifier_path, export_test_dir):
export_path = os.path.join(export_test_dir, 'results_export_vot.csv')
with CorpusContext(acoustic_utt_config) as g:
g.reset_acoustics()
g.reset_vot()
stops = ['p', 't', 'k'] # , 'b', 'd', 'g']
g.encode_class(stops, 'stops')
g.analyze_vot(stop_label="stops",
classifier=vot_classifier_path,
vot_min=15,
vot_max=250,
window_min=-30,
window_max=30)
q = g.query_graph(g.phone).filter(g.phone.label.in_(stops)).columns(g.phone.vot.begin,
g.phone.vot.end).order_by(g.phone.begin)
q.to_csv(export_path)
p_true = [(1.593, 1.649), (1.832, 1.848), (1.909, 1.98), (2.116, 2.137), (2.687, 2.703),
(2.829, 2.8440000000000003), (2.934, 2.9490000000000003), (3.351, 3.403), (5.574, 5.593999999999999),
(6.207, 6.2219999999999995), (6.736, 6.755999999999999), (7.02, 7.0489999999999995), (9.255, 9.287),
(9.498, 9.514999999999999), (11.424, 11.479999999999999), (13.144, 13.206), (13.498, 13.523),
(25.125, 25.14)]
p_csv = []
with open(export_path, 'r') as f:
f.readline()
for line in f:
line = line.strip()
if line == '':
continue
line = line.split(',')
p_csv.append((float(line[0]), float(line[1])))
for t, r in zip(p_true, p_csv):
assert r == t
|
PhonologicalCorpusTools/PyAnnotationGraph
|
tests/test_io_csv.py
|
Python
|
mit
| 4,206
|
import sys
import socket
import os
import os.path
from optparse import OptionParser
#import scipy as scp
import numpy as np
import matplotlib.pyplot as plt
import pylab
import genome_management.kg_file_handling as kgf
import math
def file_exists(ls,file):
for f in ls:
if(f==file):
return 1
return 0
def mkdir(dir,file):
ls_dir = os.listdir(dir)
if(not(file_exists(ls_dir,file))):
command = "mkdir %s/%s"%(dir,file)
os.system(command)
return "%s/%s"%(dir,file)
class region_info:
def __init__(self,name,chr,start,end,TID):
self.name = name
self.chr = chr
self.start = start
self.end = end
self.frequencies_by_pop = {}
self.cps_by_genome = {}
self.transcript_id = TID
self.TID = TID
self.cps_all = []
self.pop_by_genome = {}
def add_info_from_genome(self,cp,genome):
if(not(genome.pop in self.frequencies_by_pop)):
self.frequencies_by_pop[genome.pop] = []
self.frequencies_by_pop[genome.pop].append(cp)
self.cps_by_genome[genome.genome_name] = cp
self.pop_by_genome[genome.genome_name] = genome.pop
self.cps_all.append(cp)
#def get_var(self):
# self.vars = {}
#self.cps_all = np.array(self.cps_all)
# varT = self.cps_all.var()
# self.vars["all"]=varT
# self.means = {}
# meanT = self.cps_all.mean(1)
# self.means["all"] = meanT
# for pop,copies_by_pop in self.frequencies_by_pop.iteritems():
# copies_by_pop = np.array(copies_by_pop)
# self.vars[pop] = self.summary[:,pop_index].var(1)
# self.means[pop] = self.summary[:,pop_index].mean(1)
# self.vsts = {}
# self.fsts = {}
# for pop,pop_index in self.indivs_by_pop.iteritems():
# for pop_2,pop_index_2 in self.indivs_by_pop.iteritems():
# n_pop = float(pop_index.shape[0])
# n_pop_2 = float(pop_index_2.shape[0])
# both_pops = np.r_[self.indivs_by_pop[pop],self.indivs_by_pop[pop_2]]
# var_both = self.summary[:,both_pops].var(1)
# N = n_pop+n_pop_2
# self.vsts["_".join([pop,pop_2])] = (var_both - ((self.vars[pop]*n_pop+self.vars[pop_2]*n_pop_2)/N)) / var_both
def make_output_file(region,region_info,outdir,cell_line_info,genome_info):
outfile_name = "%s/%s_pop_summary.csv"%(outdir,region_info.name)
FOUT = open(outfile_name,'w')
FOUT.write("indiv,cp,pop,cell lines fixed, cell lines in Nitrogen,coverage\n")
for indiv,cp in region_info.cps_by_genome.iteritems():
pop = region_info.pop_by_genome[indiv]
output = indiv in cell_line_info and cell_line_info[indiv] or ""
output = "%s,%d,%s,%s,%f\n"%(indiv,cp,pop,output,genome_info.genomes[indiv].coverage)
FOUT.write(output)
print output
def make_simple_plot(region,region_info,outdir,cell_line_info,genome_info):
plt.rc('grid',color='0.75',linestyle='l',linewidth='0.1')
f=plt.figure()
f.set_figwidth(6)
f.set_figheight(6)
axescolor = '#f6f6f6'
left, width = 0.1, 0.8
rect1 = [left, 0.1, width, 0.8] #left, bottom, width, height
ax = f.add_axes(rect1)
colors = {'Yoruba':'r','European':'b','Asian':'g'}
for indiv,cp in region_info.cps_by_genome.iteritems():
cvg = genome_info.genomes[indiv].coverage
fixed_cell_line = cell_line_info[indiv].split(",")[0].rstrip() == "yes"
liquid_nitrogen_cell_line = cell_line_info[indiv].split(",")[1].rstrip() == "yes"
color = colors[genome_info.genomes[indiv].pop]
ax.plot(np.array([cvg]),np.array([cp]),'%so'%(color))
ax.set_xlabel("cvg",size=20)
ax.set_ylabel("copy",size=20)
ax.set_title("%s"%(region_info.name),size=20)
f.savefig("%s/%s_copy_vs_cvg.pdf"%(outdir,region_info.name),format='pdf')
plt.close(1)
def make_histogram(region,region_info,outdir,great_ape_gene_hashes):
print region_info.name
plt.rc('grid',color='0.75',linestyle='l',linewidth='0.1')
f=plt.figure()
f.set_figwidth(10)
f.set_figheight(10)
nbins=0
mx=0
mn=100
do_apes=True
great_ape_cps = {}
if do_apes:
for ape,gene_hash in great_ape_gene_hashes.iteritems():
if not region_info.TID in gene_hash:
do_apes=False
print "ID does not exist for APE"
print region_info.TID
break
great_ape_cps[ape] = gene_hash[region_info.TID]
mx=int(max(great_ape_cps[ape],mx))
mn=int(min(great_ape_cps[ape],mn))
axescolor = '#f6f6f6'
left, width = 0.1, 0.8
rect1 = [left, 0.1, width, 0.8] #left, bottom, width, height
for pop,freq_info in region_info.frequencies_by_pop.iteritems():
#nbins = int(round(max(nbins,max(freq_info))))
mx=int(max(max(freq_info),mx))
mn=int(min(min(freq_info),mn))
#nbins+=1
nbins = mx-mn+1
labels = []
pop_to_hists = {}
for pop,freq_info in region_info.frequencies_by_pop.iteritems():
print pop,freq_info
pop_to_hists[pop] = np.histogram(np.array(freq_info),bins=nbins,range=[mn,mx],normed=True,new=True)[0]
print np.histogram(np.array(freq_info),bins=nbins,range=[mn,mx],normed=True,new=True)
print pop_to_hists[pop]
x = np.arange(mn,mx+1)
width=.25
print x
for i in range(x.shape[0]):
labels.append(str(x[i]))
ax = f.add_axes(rect1)
bars = {}
leg = []
leg_colors = []
lines = []
k=0
colors = ['r','g','b','o']
starty = .9
sub=.03
i=0
for pop,freqs in region_info.frequencies_by_pop.iteritems():
med = np.median(np.array(freqs))
sig2 = np.array(freqs).var()
leg.append("%s med: %d var: %.1f"%(pop,int(med),sig2))
i+=1
for pop,hist in pop_to_hists.iteritems():
bars[pop] = ax.bar(x+k*width,hist,width,color=colors[k],alpha=0.5)
leg_colors.append(colors[k])
#ax.legend(bars[pop][0],pop)
lines.append(bars[pop][0])
k+=1
ape_colors = ['orange','purple','yellow','brown']
k=0
if do_apes:
for ape,cp in great_ape_cps.iteritems():
bars_ape = ax.bar(np.array([cp]),np.array([.1]),width/2,color=ape_colors[k],alpha=.8)
leg.append("%s %f"%(ape,cp))
lines.append(bars_ape[0])
k+=1
ax.set_xticks(x+width*k/2)
ax.set_xticklabels(labels,size=20)
ax.grid(color='k',linestyle='--',linewidth=1,alpha=.3)
yticklabels = [str(x) for x in np.arange(0,1,.1)]
ax.set_yticklabels(yticklabels,size=20)
ax.set_ylabel("%",size=20)
ax.set_xlabel("cp number",size=20)
ax.legend(lines,leg)
ax.set_title("%s"%(region_info.name),size=20)
f.savefig("%s/%s_pop_hist.pdf"%(outdir,region_info.name),format='pdf')
plt.close(1)
return
k=0
for pop,ihist in percent_hists.iteritems():
percent_hists[pop] = ihist/ihist.sum()
#jhplot(x,hist,"|%s"%(colors[k]))
#hist(x)
vlines(x+float(k)/3,zeros,percent_hists[pop],color=colors[k],linewidth=7)
k+=1
leg.append(pop)
#legend(leg)
title("percent")
print leg
legend(leg)
f.get_axes()[0].xaxis.set_ticks(range(21))
#f.add_axes([0,40,0,1],xticks=[0,1,2,3,4,5,6,8,9,10,11,12,13,14,15,16,17,18,19,20],label='axis2',axisbg='g')
#[0,1,2,3,4,5,6,8,9,10,11,12,13,14,15,16,17,18,19,20])
f=figure(2)
k=0
for pop,ihist in mode_hists.iteritems():
mode_hists[pop] = ihist/ihist.sum()
#plot(x,hist,"|%s"%(colors[k]))
#hist(x)
vlines(x+float(k)/5,zeros,mode_hists[pop],color=colors[k],linewidth=7)
k+=1
legend(leg)
title("Predicted copy number %s"%(name))
xlabel("predicted copy number")
ylabel("percentage of population")
f.get_axes()[0].xaxis.set_ticks(range(21))
savefig("%smode_hist.png"%(name),format='png')
print percent_hists
print mode_hists
def load_plot_regions(fn_regions):
if fn_regions == None: return []
plot_regions = []
for line in open(fn_regions,'r').readlines():
if line[0] == "#": continue
print line
sline = line.split()
uID = "%s:"%(sline[1])
uID += ":".join(sline[2:5])
plot_regions.append(uID)
print uID
return plot_regions
def get_transcript_ids(fn_transcript_id):
print fn_transcript_id
gene_id_list = open(fn_transcript_id,'r').readlines()
transcript_ids = {}
for gene_info in gene_id_list:
(TID,name,chr,start,end,unmasked_len,GCp) = gene_info.split()
transcript_ids["%s:%s:%s"%(chr,start,end)] = {"tid":TID,"chr":chr,"start":start,"end":end,"unmasked":unmasked_len,"GC":GCp}
return transcript_ids
def get_cp_by_gene(gene_file):
cps_by_TID = {}
for line in open(gene_file,'r').readlines():
if len(line.split()) == 0: continue
(chr,start,end,TID,cp) = line.split()
cps_by_TID[TID] = float(cp)
return cps_by_TID
def get_calkan_cp_calls(fn_great_ape_cps_files):
calkan_cp_calls = {}
if(fn_great_ape_cps_files!=None):
for line in open(fn_great_ape_cps_files,'r').readlines():
(genome,gene_file) = line.split()
calkan_cp_calls[genome] = get_cp_by_gene(gene_file)
return calkan_cp_calls
if __name__=='__main__':
opts = OptionParser()
opts.add_option('','--input_file_name',dest='input_file_name')
opts.add_option('','--input_genomes',dest='fn_input_genomes')
opts.add_option('','--outdir',dest='outdir')
opts.add_option('','--sex_pop_index',dest='fn_sex_pop_index')
#opts.add_option('','--analysis_dir',dest='fn_analysis_dir')
opts.add_option('','--input_regions',dest='input_regions',default=None)
opts.add_option('','--out_file',dest='outfile',default=None)
opts.add_option('','--regress',dest='regress',action='store_true',default=False)
opts.add_option('','--plot_regions',dest='plot_regions',default=None)
opts.add_option('','--do_plotting',action="store_true",dest='do_plotting',default=False)
opts.add_option('','--great_ape_cps_files',dest='fn_great_ape_cps_files',default=None)
opts.add_option('','--cell_line_information',dest='fn_cell_line_info',default=None)
opts.add_option('','--output_coverage',dest='output_cvg',action='store_true',default=False)
opts.add_option('','--simple_plot',dest='simple_plot',action='store_true',default=False)
opts.add_option('','--input_dir',dest='input_dir',default=None)
#opts.add_option('','--transcript_id_file',dest='fn_transcript_id')
#opts.add_option('','--call_metric',dest='outfile',default="summary")
#opts.add_option('','--out_genomes',dest='fn_out_genomes')
(o, args) = opts.parse_args()
great_ape_cps = get_calkan_cp_calls(o.fn_great_ape_cps_files)
cell_line_info = {}
if o.fn_cell_line_info != None:
read_cell_line_info = open(o.fn_cell_line_info,'r').readlines()
for cell_line_line in read_cell_line_info:
(name,cells_fixed,in_nitrogen) = cell_line_line.split(",")
cell_line_info[name] = "%s,%s"%(cells_fixed,in_nitrogen.rstrip())
print cell_line_info[name]
mkdir("./",o.outdir)
print "loading genome information"
genome_info = kgf.genome_info(o.fn_input_genomes,o.fn_sex_pop_index,QC_check=o.output_cvg)
print "done"
regions_by_uID = {}
#print o.input_regions
expected_len = 0
if o.input_regions != None:
for l in open(o.input_regions,'r').readlines():
expected_len+= (l[0]!="#") and 1
input_genomes = open(o.fn_input_genomes,'r').readlines()
plot_regions = load_plot_regions(o.plot_regions)
outstr = "\t".join(["name", "chr", "start", "end", "TID"])
for input_genomes_line in input_genomes:
(genome_id,fn_wssd_dir,fn_bac_dir,chunk_dir,primary_analysis_dir) = input_genomes_line.split()
if genome_id[0] == "#": continue
genome_ob = genome_info.genomes[genome_id]
if o.input_dir is None:
input_file = "%s/%s/ml_region_analysis/%s"%(primary_analysis_dir,genome_id,o.input_file_name)
else:
input_file = "%s/%s_%s"%(o.input_dir,o.input_file_name,genome_id)
print input_file
##########check the output file exists
#if(not(os.path.exists("%s/%s/ml_region_analysis/%s"%(primary_analysis_dir,genome_id,o.input_file_name)))):
if not os.path.exists(input_file):
print "%s does not appear to exist" % (input_file)
print
print '%s my have failed previous QC or may still be running' % (genome_id)
continue
##############check the output file is of the correct length
#################here we coudl also put "take the first n"
#analyzed_by_ml_lines = open("%s/%s/ml_region_analysis/%s"%(primary_analysis_dir,genome_id,o.input_file_name)).readlines()
analyzed_by_ml_lines = open(input_file, "r").readlines()
if(len(analyzed_by_ml_lines) != expected_len):
print "expected:%d encountered:%d" % (expected_len, len(analyzed_by_ml_lines))
print "expected number of lines in %s does not match that in %s" % (analyzed_by_ml_lines, o.input_regions)
#continue
print "\t getting information %s" %(genome_id)
outstr += "\t%s" % genome_id
for analysis_line in analyzed_by_ml_lines:
(name,TID,chr,start,end,cp,bywnd_cp,median,ll,regressed_cp,regressed_cp_by_wnd,regressed_cp_median) = analysis_line.split()
if o.regress:
cp = float(regressed_cp_median)
else:
cp = float(median)
uID = "%s:%s:%s:%s"%(TID,chr,start,end)
if(not(uID in regions_by_uID)):
regions_by_uID[uID] = region_info(name,chr,start,end,TID)
regions_by_uID[uID].add_info_from_genome(cp,genome_ob)
outstr+="\n"
for region_uID, region_inf in regions_by_uID.iteritems():
outstr+="\t".join([region_inf.name,region_inf.chr,region_inf.start,region_inf.end,region_inf.transcript_id])
#for genome_id,genome in genome_info.genomes.iteritems():
for input_genomes_line in input_genomes:
(genome_id,fn_wssd_dir,fn_bac_dir,chunk_dir,primary_analysis_dir) = input_genomes_line.split()
if genome_id[0] =="#": continue
if genome_id in region_inf.cps_by_genome:
#print genome_id
outstr+="\t%f"%(region_inf.cps_by_genome[genome_id])
else:
print "ERROR genome_id not in region_info"
print genome_id
print region_inf.cps_by_genome
sys.exit(1)
outstr+="\n"
# print outstr
if o.outfile != None:
open("%s/%s"%(o.outdir,o.outfile),'w').write(outstr)
#print percent_hists[pop]
#print hist
# percent_hists[pop]=ihist + percent_hists[pop]
# mode_hists[pop][np.where(ihist==np.amax(ihist))[0]]+=1
|
EichlerLab/read_depth_genotyper
|
scripts/make_ml_output_summary.py
|
Python
|
mit
| 15,332
|
#!/usr/bin/env python
# a script to delete the contents of an s3 buckets
# import the sys and boto3 modules
import sys
import boto3
# create an s3 resource
s3 = boto3.resource('s3')
# iterate over the script arguments as bucket names
for bucket_name in sys.argv[1:]:
# use the bucket name to create a bucket object
bucket = s3.Bucket(bucket_name)
# delete the bucket's contents and print the response or error
for key in bucket.objects.all():
try:
response = key.delete()
print response
except Exception as error:
print error
|
managedkaos/AWS-Python-Boto3
|
s3/delete_contents.py
|
Python
|
mit
| 599
|
from data import *
from draw import *
img, hiden_x = get_img_class()
print img.shape
print img
d_idx = np.random.randint(0, 50)
x_x, obs_x, obs_y, obs_tfs, new_ob_x, new_ob_y, new_ob_tf, imgs = gen_data()
print show_dim(x_x)
print show_dim(obs_x)
print show_dim(obs_y)
print show_dim(obs_tfs)
print show_dim(new_ob_x)
print show_dim(new_ob_y)
print show_dim(new_ob_tf)
obss = zip([np.argmax(obx[d_idx]) for obx in obs_x],
[np.argmax(oby[d_idx]) for oby in obs_y],
[obtf[d_idx] for obtf in obs_tfs])
obss = [((x[0],x[1]), x[2]) for x in obss]
print "hidden number value ", np.argmax(x_x[d_idx])
draw_obs(obss, "test_obs.png")
img = imgs[d_idx]
draw(np.reshape(img, [L,L,1]), "test_orig.png")
print img
|
evanthebouncy/nnhmm
|
mnist_haar/check_data.py
|
Python
|
mit
| 735
|
#!/usr/bin/python
# Abbas (Ar:User:Elph), 2012
# -*- coding: utf-8 -*-
import catlib ,pagegenerators
import wikipedia,urllib,gzip,codecs,re
import MySQLdb as mysqldb
import config
pagetop=u"'''تاریخ آخری تجدید:''''': ~~~~~ '''بذریعہ:''' [[user:{{subst:Currentuser}}|{{subst:Currentuser}}]]''\n\n"
pagetop+=u'\nفہرست 100 بلند پایہ صارفین بلحاظ شراکت بدون روبہ جات۔\n'
pagetop+=u'\nمزید دیکھیں: [[ویکیپیڈیا:رودادہائے ڈیٹابیس/فہرست ویکیپیڈیا صارفین بلحاظ شراکت|شماریات مع روبہ جات]]۔\n'
pagetop+=u'\n{| class="wikitable sortable"\n'
pagetop+=u'!شمار!!صارف!!شراکت\n|-\n'
pagedown=u'\n|}\n[[زمرہ:ویکیپیڈیا شماریات]]'
adress=u"ویکیپیڈیا:رودادہائے ڈیٹابیس/فہرست ویکیپیڈیا صارفین بلحاظ شراکت/بدون روبہ جات"
#adress=u"user:محمد شعیب/test44"
message=u"روبالہ:تجدید شماریات"
count=0
line_items=' '
rowfa=' '
rowic=' '
rowi=' '
rowit=' '
rowfi=' '
rowfia=' '
#---------------------------------------------- sql part--------------
site = wikipedia.getSite("ur")
query = "SELECT user_name, user_editcount FROM user WHERE user_name NOT IN (SELECT user_name FROM user_groups INNER JOIN user ON user_id = ug_user WHERE ug_group = 'bot') ORDER BY user_editcount DESC LIMIT 100;"
#query = "SELECT user_name, user_editcount FROM user WHERE user_name NOT 'روبہ خوش آمدید' AND user_name NOT IN (SELECT user_name FROM user_groups INNER JOIN user ON user_id = ug_user WHERE ug_group = 'bot') ORDER BY user_editcount DESC LIMIT 100;"
wikipedia.output(u'Executing query:\n%s' % query)
conn = mysqldb.connect("urwiki.labsdb", db = site.dbName(),
user = config.db_username,
passwd = config.db_password)
cursor = conn.cursor()
query = query.encode(site.encoding())
cursor.execute(query)
results = cursor.fetchall()
#---------------------------------------------- end of sql part---------
count=0
for row in results:
count+=1
rowi=unicode(str(row[0]),'UTF-8')
rowi2=unicode(str(row[1]),'UTF-8')
rowfa+=u'\n|'+str(count)+u'||[[user:'+rowi+u'|'+rowi+u']]||'
rowfa+=u'[[special:Contributions/{{subst:formatnum:'+rowi+u'}}|{{subst:formatnum:'+rowi2+u'}}]]\n|-\n'
text=rowfa.strip()
text=pagetop+text+pagedown
page = wikipedia.Page(site,adress)
page.put(text,message)
|
UrduWikipedia/DB_reports
|
EditCounterNoBots.py
|
Python
|
mit
| 2,518
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
ZetCode PyQt5 tutorial
In this example, we create three toggle buttons.
They will control the background colour of a
QFrame.
author: Jan Bodnar
website: zetcode.com
last edited: January 2015
"""
import sys
from PyQt5.QtWidgets import (QWidget, QPushButton,
QFrame, QApplication)
from PyQt5.QtGui import QColor
class Example(QWidget):
def __init__(self):
super().__init__()
self.initUI()
def initUI(self):
self.col = QColor(0, 0, 0)
redb = QPushButton('Red', self)
redb.setCheckable(True)
redb.move(10, 10)
redb.clicked[bool].connect(self.setColor)
redb = QPushButton('Green', self)
redb.setCheckable(True)
redb.move(10, 60)
redb.clicked[bool].connect(self.setColor)
blueb = QPushButton('Blue', self)
blueb.setCheckable(True)
blueb.move(10, 110)
blueb.clicked[bool].connect(self.setColor)
self.square = QFrame(self)
self.square.setGeometry(150, 20, 100, 100)
self.square.setStyleSheet("QWidget { background-color: %s }" %
self.col.name())
self.setGeometry(300, 300, 280, 170)
self.setWindowTitle('Toggle button')
self.show()
def setColor(self, pressed):
source = self.sender()
if pressed:
val = 255
else: val = 0
if source.text() == "Red":
self.col.setRed(val)
elif source.text() == "Green":
self.col.setGreen(val)
else:
self.col.setBlue(val)
self.square.setStyleSheet("QFrame { background-color: %s }" %
self.col.name())
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = Example()
sys.exit(app.exec_())
|
mskovacic/Projekti
|
raspberrypi/isprobavanje/pyqt5/Toggle_button.py
|
Python
|
mit
| 1,988
|
from django.contrib.auth.models import User
from django.db import models
from .utils import create_slug
class BaseModel(models.Model):
created = models.DateTimeField(auto_now_add=True)
last_updated = models.DateTimeField(auto_now=True)
class Meta():
abstract = True
|
makaimc/txt2react
|
core/models.py
|
Python
|
mit
| 292
|
from django.views.generic import CreateView, DetailView
from .models import TestModel
class TestCreateView(CreateView):
template_name = 'test_tinymce/create.html'
fields = ('content',)
model = TestModel
class TestDisplayView(DetailView):
template_name = 'test_tinymce/display.html'
context_object_name = 'test_model'
model = TestModel
|
romanvm/django-tinymce4-lite
|
test_tinymce/views.py
|
Python
|
mit
| 363
|
from graph.graph_server import GraphServer
__all__ = ['GraphServer']
|
AndreasMadsen/bachelor-code
|
visualizer/__init__.py
|
Python
|
mit
| 71
|
import RPi.GPIO as GPIO
import time
# Configure the Pi to use the BCM (Broadcom) pin names, rather than the pin positions
GPIO.setmode(GPIO.BCM)
relay_pin = 18
GPIO.setup(relay_pin, GPIO.OUT)
try:
while True:
GPIO.output(relay_pin, True)
time.sleep(2)
GPIO.output(relay_pin, False)
time.sleep(2)
finally:
print("Cleaning up")
GPIO.cleanup()
|
simonmonk/hacking2
|
pi/ch07_relay_click.py
|
Python
|
mit
| 433
|
# import json
# import pandas as pd
import numpy as np
import os
from core.lda_engine import model_files
from pandas import DataFrame
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from core.keyword_db import keyword_dbs
def db_connect(base, model_name='dss'):
try:
path = 'sqlite:///' + os.path.join(os.getcwd(), base, keyword_dbs[model_name] + '.sqlite')
except KeyError:
path = 'sqlite:///' + os.path.join(os.getcwd(), base, model_files[model_name].split(".")[0] + '.sqlite')
print("Connecting to: ", path)
return create_engine(path)
def toDataFrame(sql, session):
tmpt = session.execute(sql)
return DataFrame(tmpt.fetchall(), columns=tmpt.keys())
def get_database(model_name, return_keyword=False):
engine = db_connect("databases", model_name=model_name)
Session = sessionmaker(bind=engine)
session = Session()
doc = "select * from documents"
auth = "select * from authors"
Author = toDataFrame(auth, session)
Author.index = Author.id
Document = toDataFrame(doc, session)
Document.index = Document.id
Key_Auth = '''
select authors_id, keywords_id, keyword, first_name, last_name
from keywords k, documents_keywords dk, documents_authors da, authors a, documents d
where a.id = da.authors_id and d.id = da.documents_id and d.id = dk.documents_id and k.id = dk.keywords_id
'''
Key_Auth_alt = '''
select authors_id, keywords_id, keyword, first_name, last_name
from keywords k, documents_keywords dk, documents_authors da, authors a, documents d
where a.id = da.authors_id and d.id = da.documents_id and d.id = dk.documents_id and k.id = dk.keywords_id
'''
tmpt = session.execute(Key_Auth)
KA = DataFrame(tmpt.fetchall(), columns=list(tmpt.keys()))
Docu_Auth = '''
select authors_id, documents_id, first_name, last_name, title
from authors a, documents b, documents_authors c
where a.id=c.authors_id and c.documents_id=b.id;
'''
tmpt = session.execute(Docu_Auth)
DA = DataFrame(tmpt.fetchall(), columns=list(tmpt.keys()))
Key_Freq = '''
select keywords.id, keyword, freqency
from (select keywords_id, count(*) freqency from documents_keywords group by keywords_id) a, keywords
where keywords.id = a.keywords_id
'''
a = session.execute(Key_Freq)
Keyword = DataFrame(a.fetchall(), columns=list(a.keys()))
Keyword.index = Keyword.id
DocNum = session.execute('select count(*) from documents').first()[0]
Keyword.loc[:, 'weight'] = np.log(DocNum / Keyword.freqency)
if not return_keyword:
return Author, Document, KA, DA
else:
return Author, Document, KA, DA, Keyword
def get_top_keywords(model_name, author_id, n):
engine = db_connect("databases", model_name=model_name)
Session = sessionmaker(bind=engine)
session = Session()
Key_Auth_ID = '''
select keyword, count(*) as frequency
from (select authors_id, keywords_id, keyword
from keywords k,
documents_keywords dk,
documents_authors da,
authors a,
documents d
where a.id = da.authors_id and
d.id = da.documents_id and
d.id = dk.documents_id and
k.id = dk.keywords_id and
authors_id = {}) as KA
group by keywords_id
order by frequency
'''.format(author_id)
tmpt = session.execute(Key_Auth_ID)
return DataFrame(tmpt.fetchall(), columns=list(tmpt.keys()))[:n].values.tolist()
|
conferency/find-my-reviewers
|
core/helper/tables.py
|
Python
|
mit
| 3,572
|
import numpy as np
import pywt
from scipy.misc import imresize
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
X_L = 10
L = 14
N_BATCH = 50
OBS_SIZE = 30
# ---------------------------- helpers
def vectorize(coords):
retX, retY = np.zeros([L]), np.zeros([L])
retX[coords[0]] = 1.0
retY[coords[1]] = 1.0
return retX, retY
# show dimension of a data object (list of list or a tensor)
def show_dim(lst1):
if hasattr(lst1, '__len__') and len(lst1) > 0:
return [len(lst1), show_dim(lst1[0])]
else:
try:
return lst1.get_shape()
except:
try:
return lst1.shape
except:
return type(lst1)
# -------------------------------------- making the datas
# assume X is already a 2D matrix
def mk_query(X):
avg = np.median(X)
X = X + avg
def query(O):
Ox, Oy = O
if X[Ox][Oy] > 0.0:
return [1.0, 0.0]
else:
return [0.0, 1.0]
return query
def sample_coord():
Ox, Oy = np.random.randint(0,L), np.random.randint(0,L)
if 0 <= Ox < L:
if 0 <= Oy < L:
return Ox, Oy
return sample_coord()
def gen_O(X):
query = mk_query(X)
Ox, Oy = sample_coord()
O = (Ox, Oy)
return O, query(O)
def get_img_class():
img, _x = mnist.train.next_batch(1)
img = np.reshape(img[0], [28, 28])
img = imresize(img, (L,L)) / 255.0
A,(B,C,D) = pywt.dwt2(img, 'haar')
img = np.reshape(np.array([A,B,C,D]), [L, L])
return img, _x
def gen_data():
x = []
obs_x = [[] for i in range(OBS_SIZE)]
obs_y = [[] for i in range(OBS_SIZE)]
obs_tfs = [[] for i in range(OBS_SIZE)]
new_ob_x = []
new_ob_y = []
new_ob_tf = []
imgs = []
for bb in range(N_BATCH):
# generate a hidden variable X
# get a single thing out
img, _x = get_img_class()
imgs.append(img)
# add to x
x.append(_x[0])
# generate new observation
_new_ob_coord, _new_ob_lab = gen_O(img)
_new_ob_x, _new_ob_y = vectorize(_new_ob_coord)
new_ob_x.append(_new_ob_x)
new_ob_y.append(_new_ob_y)
new_ob_tf.append(_new_ob_lab)
# generate observations for this hidden variable x
for ob_idx in range(OBS_SIZE):
_ob_coord, _ob_lab = gen_O(img)
_ob_x, _ob_y = vectorize(_ob_coord)
obs_x[ob_idx].append(_ob_x)
obs_y[ob_idx].append(_ob_y)
obs_tfs[ob_idx].append(_ob_lab)
return np.array(x, np.float32),\
np.array(obs_x, np.float32),\
np.array(obs_y, np.float32),\
np.array(obs_tfs, np.float32),\
np.array(new_ob_x, np.float32),\
np.array(new_ob_y, np.float32),\
np.array(new_ob_tf, np.float32), imgs
|
evanthebouncy/nnhmm
|
mnist_haar/data.py
|
Python
|
mit
| 2,672
|
import os
from sqlalchemy import create_engine, MetaData
from sqlalchemy.orm import scoped_session, sessionmaker
metadata = MetaData()
def get_sa_db_uri(driver='', username='', password='', host='', port='', database=''):
"""get SQLAlchemy DB URI: driver://username:password@host:port/database"""
assert driver
if driver == 'sqlite':
# get absolute file path
if not database.startswith('/'):
db_file = os.path.abspath(database)
else:
db_file = database
db_uri = '%s:///%s' % (driver, db_file)
else:
db_uri = ('%s://%s:%s@%s:%s/%s' %
(driver, username, password, host, port, database))
return db_uri
class DB(object):
def __init__(self, db_uri):
self.engine = create_engine(db_uri, convert_unicode=True)
self.session = scoped_session(
sessionmaker(autocommit=False,
autoflush=False,
bind=self.engine))
def init_database(self):
metadata.create_all(bind=self.engine)
|
schettino72/serveronduty
|
websod/database.py
|
Python
|
mit
| 1,071
|
"""
Grade API v1 URL specification
"""
from django.conf.urls import url, patterns
import views
urlpatterns = patterns(
'',
url(r'^grades/courses/$', views.CourseGradeList.as_view()),
url(r'^grades/courses/(?P<org>[A-Za-z0-9_.-]+)[+](?P<name>[A-Za-z0-9_.-]+)[+](?P<run>[A-Za-z0-9_.-]+)/$', views.CourseGradeDetail.as_view()),
url(r'^grades/students/$', views.StudentList.as_view()),
url(r'^grades/students/(?P<student_id>[0-9]+)/$', views.StudentGradeDetail.as_view()),
)
|
jaygoswami2303/course_dashboard_api
|
v2/GradeAPI/urls.py
|
Python
|
mit
| 492
|
from OpenGLCffi.GLES1 import params
@params(api='gles1', prms=['target', 'numAttachments', 'attachments'])
def glDiscardFramebufferEXT(target, numAttachments, attachments):
pass
|
cydenix/OpenGLCffi
|
OpenGLCffi/GLES1/EXT/EXT/discard_framebuffer.py
|
Python
|
mit
| 181
|
# Quick script to calculate GPA given a class list file.
# Class list file should be a csv with COURSE_ID,NUM_UNITS,GRADE
# GRADE should be LETTER with potential modifiers after that
# registrar.mit.edu/classes-grades-evaluations/grades/calculating-gpa
import argparse
import pandas as pd
def get_parser():
# Get the argument parser for this script
parser = argparse.ArgumentParser()
parser.add_argument('-F', '--filename', help='Filename for grades')
return parser
class GPACalculator:
def __init__(self, fname):
# Load file via pandas
self.__data = pd.read_csv(
fname,
header=None,
names=['course', 'units', 'grade']
)
def calc_gpa(self):
# Map grades to grade points
grade_points = self.__data.grade.apply(self.__grade_point_mapper)
# Multiply pointwise by units
grade_points_weighted = grade_points * self.__data.units
# Sum weighted units
weighted_units_sum = grade_points_weighted.sum()
# Divide by total units
gpa_raw = weighted_units_sum / self.__data.units.sum()
# Round to nearest tenth
return round(gpa_raw, 1)
def __grade_point_mapper(self, grade):
# Maps a string letter grade to a numerical value
# MIT 5.0 scale
grade_map = {
'A': 5,
'B': 4,
'C': 3,
'D': 2,
'F': 0,
}
first_char = grade[0].upper()
try:
return grade_map[first_char]
except:
raise ValueError('Invalid grade {grade}'.format(grade=grade))
if __name__ == '__main__':
# Set up argument parsing
parser = get_parser()
args = parser.parse_args()
# Make sure filename is present
if not args.filename:
raise ValueError('Must provide filename via -F, --filename')
# Create calculator
calc = GPACalculator(args.filename)
# Execute and print
gpa = calc.calc_gpa()
print(gpa)
|
kalyons11/kevin
|
kevin/playground/gpa.py
|
Python
|
mit
| 2,017
|
# Create the data.
from numpy import pi, sin, cos, mgrid
dphi, dtheta = pi/250.0, pi/250.0
[phi,theta] = mgrid[0:pi+dphi*1.5:dphi,0:2*pi+dtheta*1.5:dtheta]
m0 = 4; m1 = 3; m2 = 2; m3 = 3; m4 = 6; m5 = 2; m6 = 6; m7 = 4;
r = sin(m0*phi)**m1 + cos(m2*phi)**m3 + sin(m4*theta)**m5 + cos(m6*theta)**m7
x = r*sin(phi)*cos(theta)
y = r*cos(phi)
z = r*sin(phi)*sin(theta)
# View it.
from mayavi import mlab
s = mlab.mesh(x, y, z)
mlab.show()
|
Robbie1977/NRRDtools
|
test.py
|
Python
|
mit
| 436
|
"""Bulk importer for manually-prepared tariff CSV.
This probably won't be used again following initial data load, so
could be deleted after that.
"""
import csv
import logging
from datetime import datetime
from django.core.management.base import BaseCommand
from django.db import transaction
from dmd.models import TariffPrice
from dmd.models import DMDVmpp
from dmd.models import DMDProduct
from frontend.models import ImportLog
logger = logging.getLogger(__name__)
class Command(BaseCommand):
args = ''
help = 'Imports a CSV of historic tariff prices'
def add_arguments(self, parser):
parser.add_argument('--csv')
def handle(self, *args, **options):
with open(options['csv']) as f:
with transaction.atomic():
month = None
counter = 0
for row in csv.DictReader(f):
month = datetime.strptime(row['Month'], '%d/%m/%Y')
counter += 1
if 'Category A' in row['DT Cat']:
tariff_category = 1
elif 'Category C' in row['DT Cat']:
tariff_category = 3
elif 'Category M' in row['DT Cat']:
tariff_category = 11
else:
raise
try:
vpid = DMDVmpp.objects.get(pk=row['VMPP']).vpid
product = DMDProduct.objects.get(
vpid=vpid, concept_class=1)
except DMDVmpp.DoesNotExist:
logger.error(
"Could not find VMPP with id %s",
row['VMPP'], exc_info=True)
continue
except DMDProduct.DoesNotExist:
logger.error(
"Could not find DMDProduct with vpid %s",
vpid, exc_info=True)
continue
TariffPrice.objects.get_or_create(
date=month,
product=product,
vmpp_id=row['VMPP'],
tariff_category_id=tariff_category,
price_pence=int(row['DT Price']))
ImportLog.objects.create(
category='tariff',
filename=options['csv'],
current_at=month)
|
annapowellsmith/openpresc
|
openprescribing/dmd/management/commands/bulk_import_drug_tariff.py
|
Python
|
mit
| 2,479
|
import sublime, sublime_plugin
from .. import config
from .. import globals
from .. import logger
from ..debug_client import DebugClient
from ..clicks import Clicks
log = logger.get('cmd_attach_debugger')
def lookup_ref(id, refs):
for ref in refs:
if id == ref['handle']:
return ref
return None
def open_file(data):
if '/' not in data['script'].replace('\\', '/'):
print('[NDBG] Internal scripts (%s) doesn\'t supported for now. Sorry :(' % data['script'])
return
# TODO: fetch node's internal scripts with `scripts` request
window = sublime.active_window()
filename = '%s:%d:%d' % (data['script'], data['line'], 1) # it's better to use '1' instead of data['column']
src = window.open_file(filename, sublime.ENCODED_POSITION)
window.set_view_index(src, 0, 0)
if 'exception' in data:
src.set_status('node_error', data['exception'])
def trace_callback(data):
body = data['body']
refs = data['refs']
trace = []
funcLen = 0
for frame in body['frames']:
func = frame['func']['name'] or frame['func']['inferredName'] or 'Anonymous'
script = lookup_ref(frame['script']['ref'], refs)
trace.append({'func': func, 'script': script['name'], 'line': int(frame['line']) + 1, 'column': int(frame['column']) + 1})
l = len(func)
if funcLen < l:
funcLen = l
text = '%s\n' % globals.exception
globals.exception = None
for line in trace:
s = '\t%s (%s:%d:%d)\n' % (line['func'].ljust(funcLen), line['script'], line['line'], line['column'])
globals.clicks.add(sublime.Region(len(text), len(text + s)), open_file, line)
text = text + s
globals.st.run_command('node_debugger_insert_text', {'text': text})
def exception_callback(data):
log('exception', data)
body = data['body']
window = sublime.active_window()
if config.get('show_stacktrace'):
globals.exception = body['exception']['text']
window.set_layout(config.get('debug_layout'))
# Create new buffer for stacktrace
globals.st = st = window.new_file()
st.set_scratch(True)
st.set_name(config.get('stacktrace_name'))
st.settings().set('word_wrap', False)
st.settings().set('syntax', 'Packages/' + globals.prefix + '/node stacktrace.tmLanguage')
window.set_view_index(st, 1, 0)
# Request backtrace
globals.client.execute('backtrace', trace_callback, inlineRefs=True)
# Open file with error
open_file({'script': body['script']['name'], 'line': body['sourceLine'] + 1, 'column': body['sourceColumn'] + 1, 'exception': body['exception']['text']})
def after_compile_callback(data):
pass
def disconnect_handler(e):
log('disconnect_handler', e)
globals.client = None
class NodeDebuggerAttachCommand(sublime_plugin.ApplicationCommand):
def run(self):
if globals.client:
globals.client.close()
address = config.get('address')
try:
globals.original_layout = sublime.active_window().get_layout()
globals.clicks = Clicks()
globals.client = client = DebugClient(address)
client.on_disconnect(disconnect_handler)
# client.add_handler('break', exception_callback)
client.add_handler('exception', exception_callback)
client.add_handler('afterCompile', after_compile_callback)
client.execute_sync('setexceptionbreak', lambda data: client.execute('continue', lambda x: str(1)), type='uncaught', enabled=True)
except (IOError) as e:
log('Error connecting to %s' % address, e)
message = 'Error connecting to node.js instance at %s' % address
sublime.error_message(message)
|
DeniSix/SublimeNodeStacktrace
|
node_debugger/commands/attach_debugger.py
|
Python
|
mit
| 3,423
|
from bokeh.plotting import figure, output_file, show
p = figure(width=400, height=400)
p.circle(2, 3, radius=.5, alpha=0.5)
output_file('out.html')
show(p)
|
Serulab/Py4Bio
|
code/ch14/basiccircle.py
|
Python
|
mit
| 157
|
from django.conf import settings
PIPELINE = getattr(settings, 'PIPELINE', not settings.DEBUG)
PIPELINE_ROOT = getattr(settings, 'PIPELINE_ROOT', settings.STATIC_ROOT)
PIPELINE_URL = getattr(settings, 'PIPELINE_URL', settings.STATIC_URL)
PIPELINE_STORAGE = getattr(settings, 'PIPELINE_STORAGE',
'pipeline.storage.PipelineFinderStorage')
PIPELINE_FALLBACK_STORAGE = getattr(settings, 'PIPELINE_FALLBACK_STORAGE',
'pipeline.storage.FallbackStaticStorage')
PIPELINE_CSS_COMPRESSOR = getattr(settings, 'PIPELINE_CSS_COMPRESSOR',
'pipeline.compressors.yui.YUICompressor'
)
PIPELINE_JS_COMPRESSOR = getattr(settings, 'PIPELINE_JS_COMPRESSOR',
'pipeline.compressors.yui.YUICompressor'
)
PIPELINE_COMPILERS = getattr(settings, 'PIPELINE_COMPILERS', [])
PIPELINE_PRECOMPILERS = getattr(settings, 'PIPELINE_PRECOMPILERS', [])
PIPELINE_CSS = getattr(settings, 'PIPELINE_CSS', {})
PIPELINE_JS = getattr(settings, 'PIPELINE_JS', {})
PIPELINE_TEMPLATE_NAMESPACE = getattr(settings, 'PIPELINE_TEMPLATE_NAMESPACE', "window.JST")
PIPELINE_TEMPLATE_EXT = getattr(settings, 'PIPELINE_TEMPLATE_EXT', ".jst")
PIPELINE_TEMPLATE_FUNC = getattr(settings, 'PIPELINE_TEMPLATE_FUNC', "template")
PIPELINE_DISABLE_WRAPPER = getattr(settings, 'PIPELINE_DISABLE_WRAPPER', False)
PIPELINE_CSSTIDY_BINARY = getattr(settings, 'PIPELINE_CSSTIDY_BINARY', '/usr/bin/env csstidy')
PIPELINE_CSSTIDY_ARGUMENTS = getattr(settings, 'PIPELINE_CSSTIDY_ARGUMENTS', '--template=highest')
PIPELINE_YUI_BINARY = getattr(settings, 'PIPELINE_YUI_BINARY', '/usr/bin/env yuicompressor')
PIPELINE_YUI_CSS_ARGUMENTS = getattr(settings, 'PIPELINE_YUI_CSS_ARGUMENTS', '')
PIPELINE_YUI_JS_ARGUMENTS = getattr(settings, 'PIPELINE_YUI_JS_ARGUMENTS', '')
PIPELINE_CLOSURE_BINARY = getattr(settings, 'PIPELINE_CLOSURE_BINARY', '/usr/bin/env closure')
PIPELINE_CLOSURE_ARGUMENTS = getattr(settings, 'PIPELINE_CLOSURE_ARGUMENTS', '')
PIPELINE_UGLIFYJS_BINARY = getattr(settings, 'PIPELINE_UGLIFYJS_BINARY', '/usr/bin/env uglifyjs')
PIPELINE_UGLIFYJS_ARGUMENTS = getattr(settings, 'PIPELINE_UGLIFYJS_ARGUMENTS', '')
PIPELINE_COFFEE_SCRIPT_BINARY = getattr(settings, 'PIPELINE_COFFEE_SCRIPT_BINARY', '/usr/bin/env coffee')
PIPELINE_COFFEE_SCRIPT_ARGUMENTS = getattr(settings, 'PIPELINE_COFFEE_SCRIPT_ARGUMENTS', '')
PIPELINE_SASS_BINARY = getattr(settings, 'PIPELINE_SASS_BINARY', '/usr/bin/env sass')
PIPELINE_SASS_ARGUMENTS = getattr(settings, 'PIPELINE_SASS_ARGUMENTS', '')
PIPELINE_STYLUS_BINARY = getattr(settings, 'PIPELINE_STYLUS_BINARY', '/usr/bin/env stylus')
PIPELINE_STYLUS_ARGUMENTS = getattr(settings, 'PIPELINE_STYLUS_ARGUMENTS', '')
PIPELINE_LESS_BINARY = getattr(settings, 'PIPELINE_LESS_BINARY', '/usr/bin/env lessc')
PIPELINE_LESS_ARGUMENTS = getattr(settings, 'PIPELINE_LESS_ARGUMENTS', '')
PIPELINE_MIMETYPES = getattr(settings, 'PIPELINE_MIMETYPES', (
('text/coffeescript', '.coffee'),
('text/less', '.less'),
('text/javascript', '.js'),
('text/x-sass', '.sass'),
('text/x-scss', '.scss')
))
PIPELINE_EMBED_MAX_IMAGE_SIZE = getattr(settings, 'PIPELINE_EMBED_MAX_IMAGE_SIZE', 32700)
PIPELINE_EMBED_PATH = getattr(settings, 'PIPELINE_EMBED_PATH', r'[/]?embed/')
if PIPELINE_COMPILERS is None:
PIPELINE_COMPILERS = []
|
fahhem/django-pipeline
|
pipeline/conf/settings.py
|
Python
|
mit
| 3,228
|
#This is a cell with a custom comment as marker
x=10
y=11
print(x+y)
|
HugoGuillen/nb2py
|
tutorial_files/custom.py
|
Python
|
mit
| 70
|
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/stable/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import os
import sys
from distutils.command.config import config
import guzzle_sphinx_theme
import tomli
from dunamai import Version
root = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
sys.path.insert(0, root)
# -- Project information -----------------------------------------------------
# General project metadata is stored in pyproject.toml
with open(os.path.join(root, "pyproject.toml"), "rb") as f:
config = tomli.load(f)
project_meta = config["tool"]["poetry"]
print(project_meta)
project = project_meta["name"]
author = project_meta["authors"][0]
description = project_meta["description"]
url = project_meta["homepage"]
title = project + " Documentation"
_version = Version.from_git()
# The full version, including alpha/beta/rc tags
release = _version.serialize(metadata=False)
# The short X.Y.Z version
version = _version.base
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
needs_sphinx = "2.0"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.napoleon",
"sphinx_autodoc_typehints",
"guzzle_sphinx_theme",
"sphinxcontrib_dooble",
]
# Include a separate entry for special methods, like __init__, where provided.
autodoc_default_options = {
"member-order": "bysource",
"special-members": True,
"exclude-members": "__dict__,__weakref__",
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
source_suffix = ".rst"
# The master toctree document.
master_doc = "index"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = "en"
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_translator_class = "guzzle_sphinx_theme.HTMLTranslator"
html_theme_path = guzzle_sphinx_theme.html_theme_path()
html_theme = "guzzle_sphinx_theme"
html_title = title
html_short_title = project + " " + version
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
html_theme_options = {"projectlink": url}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
html_sidebars = {"**": ["logo-text.html", "globaltoc.html", "searchbox.html"]}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = project + "doc"
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [(master_doc, project + ".tex", title, author, "manual")]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, project.lower(), title, [author], 1)]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, project, title, author, project, description, "Miscellaneous")
]
# -- Extension configuration -------------------------------------------------
|
ReactiveX/RxPY
|
docs/conf.py
|
Python
|
mit
| 5,837
|
from celery import shared_task
import sync
@shared_task
def auto_sync_app_models_task():
sync.auto_sync_app_models()
|
vittoriozamboni/django-data-sync
|
django_data_sync/tasks.py
|
Python
|
mit
| 123
|
# Program to find the averge of numbers in a file
def main():
#Get the filename with the numbers
fileName = input("What file are the numbers in? ")
#var to contain all the content of the file
infile = open(fileName, 'r')
#var to keep track of the sum of those numbers
sum = 0.0
#var to keep track of the sum
count = 0
#var with the first line of the file
line = infile.readline()
#iterate through all lines in the document
while line != "":
#math to sum the line with the total sum
print(line)
sum = sum + eval(line)
print(sum)
#increment the count var by 1
count = count + 1
#read in the next line of the file
line = infile.readline()
print("\nThe average of the numbers is", sum / count)
main()
|
src053/PythonComputerScience
|
chap8/average6.py
|
Python
|
cc0-1.0
| 727
|
#!/usr/bin/env python
import datetime
import logging
import os
from urllib.parse import urljoin
from utils import utils, inspector
# https://www.sigar.mil/
archive = 2008
# options:
# standard since/year options for a year range to fetch from.
#
# Notes for IG's web team:
#
SPOTLIGHT_REPORTS_URL = "https://www.sigar.mil/Newsroom/spotlight/spotlight.xml"
SPEECHES_REPORTS_URL = "https://www.sigar.mil/Newsroom/speeches/speeches.xml"
TESTIMONY_REPORTS_URL = "https://www.sigar.mil/Newsroom/testimony/testimony.xml"
PRESS_RELEASES_URL = "https://www.sigar.mil/Newsroom/pressreleases/press-releases.xml"
REPORT_URLS = [
("other", SPOTLIGHT_REPORTS_URL),
("press", SPEECHES_REPORTS_URL),
("testimony", TESTIMONY_REPORTS_URL),
("press", PRESS_RELEASES_URL),
("audit", "https://www.sigar.mil/audits/auditreports/reports.xml"),
("inspection", "https://www.sigar.mil/audits/inspectionreports/inspection-reports.xml"),
("audit", "https://www.sigar.mil/audits/financialreports/Financial-Audits.xml"),
("other", "https://www.sigar.mil/SpecialProjects/projectreports/reports.xml"),
("other", "https://www.sigar.mil/Audits/alertandspecialreports/alert-special-reports.xml"),
("semiannual_report", "https://www.sigar.mil/quarterlyreports/index.xml"),
]
BASE_REPORT_URL = "https://www.sigar.mil/allreports/index.aspx"
def run(options):
year_range = inspector.year_range(options, archive)
# Pull the reports
for report_type, report_url in REPORT_URLS:
doc = utils.beautifulsoup_from_url(report_url)
results = doc.select("item")
if not results:
raise inspector.NoReportsFoundError("SIGAR (%s)" % report_type)
for result in results:
report = report_from(result, report_url, report_type, year_range)
if report:
inspector.save_report(report)
def report_from(result, landing_url, report_type, year_range):
report_url = report_url_for_landing_page(result.find("link").next.strip(), landing_url)
if report_url in ("https://www.sigar.mil/pdf/audits/Financial_Audits/SIGAR _14-15\u2013FA.pdf",
"https://www.sigar.mil/pdf/audits/Financial_Audits/SIGAR_14-14\u2013FA.pdf"):
report_url = report_url.replace("\u2013", "-")
report_filename = report_url.split("/")[-1]
report_id, extension = os.path.splitext(report_filename)
if result.title:
title = result.title.text.strip()
else:
title = report_id
published_on_text = result.find("pubdate").text.strip()
published_on = parse_date(published_on_text)
if report_id == "SIGAR-14-42-AL" and title == "SIGAR 14-42-AL":
# this report is posted in both "spotlight" and "special reports"
return
if report_id == "SIGAR_CSIS_Speech" and published_on.year == 2017:
# There are two speeches with the same file name
report_id += "_2017"
if published_on.year not in year_range:
logging.debug("[%s] Skipping, not in requested range." % report_url)
return
report = {
'inspector': 'sigar',
'inspector_url': "https://www.sigar.mil",
'agency': 'sigar',
'agency_name': "Special Inspector General for Afghanistan Reconstruction",
'type': report_type,
'report_id': report_id,
'url': report_url,
'title': title,
'published_on': datetime.datetime.strftime(published_on, "%Y-%m-%d"),
}
if report_url.startswith(("https://www.justice.gov/",
"http://www.justice.gov/",
"https://www.fbi.gov/",
"http://www.fbi.gov/",
"https://www.usaid.gov/",
"http://www.usaid.gov/")):
if not os.path.splitext(report_url)[1]:
report['file_type'] = "html"
return report
def report_url_for_landing_page(relative_url, landing_url):
"""
We need to mimic the logic used in https://www.sigar.mil/js/AllReports.js
case SPOTLIGHT:
Title = "Spotlight";
Link = Link.replace("../ReadFile.aspx", "../newsroom/ReadFile.aspx");
Link = Link.replace("../../", "../");
break;
case SPEECHES:
Title = "Speeches";
Link = Link.replace("../ReadFile.aspx", "../newsroom/ReadFile.aspx");
Link = Link.replace("../../", "../");
break;
case TESTIMONY:
Title = "Testimony";
Link = Link.replace("../ReadFile.aspx", "../newsroom/ReadFile.aspx");
Link = Link.replace("../../", "../");
break;
case PRESSRELEASES:
Link = Link.replace("../", "../newsroom/");
Link = Link.replace("../ReadFile.aspx", "../newsroom/ReadFile.aspx");
break;
"""
relative_url = relative_url.replace("â\x80\x93", "–")
if landing_url == SPOTLIGHT_REPORTS_URL:
relative_url = relative_url.replace("../ReadFile.aspx", "../newsroom/ReadFile.aspx")
relative_url = relative_url.replace("../../", "../")
elif landing_url == SPEECHES_REPORTS_URL:
relative_url = relative_url.replace("../ReadFile.aspx", "../newsroom/ReadFile.aspx")
relative_url = relative_url.replace("../../", "../")
elif landing_url == TESTIMONY_REPORTS_URL:
relative_url = relative_url.replace("../ReadFile.aspx", "../newsroom/ReadFile.aspx")
relative_url = relative_url.replace("../../", "../")
elif landing_url == PRESS_RELEASES_URL:
relative_url = relative_url.replace("../", "../newsroom/")
relative_url = relative_url.replace("../ReadFile.aspx", "../newsroom/ReadFile.aspx")
return urljoin(BASE_REPORT_URL, relative_url)
def parse_date(text):
for format in [
'%A, %B %d, %Y',
'%A, %B %dst, %Y',
'%A, %B %dnd, %Y',
'%A, %B %drd, %Y',
'%A, %B %dth, %Y'
]:
try:
return datetime.datetime.strptime(text, format)
except ValueError:
pass
raise Exception("Couldn't parse date from {}".format(text))
utils.run(run) if (__name__ == "__main__") else None
|
divergentdave/inspectors-general
|
inspectors/sigar.py
|
Python
|
cc0-1.0
| 5,811
|
#!/usr/bin/env python
import gtk, sys, string
class Socket:
def __init__(self):
window = gtk.Window()
window.set_default_size(200, 200)
socket = gtk.Socket()
window.add(socket)
print "Socket ID:", socket.get_id()
if len(sys.argv) == 2:
socket.add_id(long(sys.argv[1]))
window.connect("destroy", gtk.main_quit)
socket.connect("plug-added", self.plugged_event)
window.show_all()
def plugged_event(self, widget):
print "A plug has been inserted."
Socket()
gtk.main()
|
Programmica/pygtk-tutorial
|
examples/socket.py
|
Python
|
cc0-1.0
| 572
|
# Copyright (C) 2016 Sysdig inc.
# All rights reserved
# Author: Luca Marturana (luca@sysdig.com)
import os
import traceback
from inspect import isfunction
import sys
import functools
# ensure file descriptor will be closed on execve
O_CLOEXEC = 524288 # cannot use octal because they have different syntax on python2 and 3
NULL_FD = os.open("/dev/null", os.O_WRONLY | os.O_NONBLOCK | O_CLOEXEC)
class Args(object):
"""
Use this class to tell Tracer to extract positional function arguments
and emit them to the trace:
@Tracer(enter_args={"n": Args(0)})
def myfunction(n):
pass
myfunction(9)
"""
def __init__(self, i):
self.i = i
def __call__(self, args):
return args[self.i]
class Kwds(object):
"""
Use this class to tell Tracer to extract keyword function arguments
and emit them to the trace:
@Tracer(enter_args={"n": Kwds("n")})
def myfunction(n):
pass
myfunction(n=9)
"""
def __init__(self, key):
self.key = key
def __call__(self, kwds):
return kwds[self.key]
class ReturnValue(object):
"""
Use this class to tell Tracer to extract return value of a function
and emit them to the trace:
@Tracer(exit_args={"n": ReturnValue})
def myfunction():
return 8
myfunction()
"""
pass
class Tracer(object):
"""
This class allows you to add a tracer to a function, a method or
to instrument specific part of code. Use it as decorator:
@Tracer
def myfuction():
pass
or using `with` syntax:
with Tracer():
pass
"""
def __init__(self, tag=None, enter_args=None, exit_args=None):
"""
Create a new Tracer, all arguments are optional:
tag -- tag name, by default is auto-detected by the code line or function name
enter_args -- dictionary of enter arguments for the trace, use Args, Kwds to extract function arguments
exit_args -- dictionary of exit arguments for the trace, use ReturnValue to extract function return value
"""
self.__detect_tag(tag)
self.enter_args = enter_args if not enter_args is None else {}
self.exit_args = exit_args if not exit_args is None else {}
def __detect_tag(self, tag):
if isinstance(tag, str):
self.tag = tag
elif isfunction(tag):
self.tag = tag.__name__
self.wrapped_func = tag
self.function_calls = 0
else:
tb = traceback.extract_stack(None, 3)[0]
filepath = tb[0]
filepath = filepath[filepath.rfind("/",0, filepath.rfind("/"))+1:]
filepath = filepath.replace(".", "\.")
self.tag = "%s\:%d(%s)" % (filepath, tb[1], tb[2].replace("<","\<").replace(">","\>"))
def __emit_trace(self, direction, args=None):
if args is None:
args = {}
args_s = ",".join(["%s=%s" % item for item in args.items()])
tracer = "%s:t:%s:%s:" % (direction, self.tag, args_s)
if sys.version_info[0] == 3:
tracer = bytes(tracer, 'ascii')
try:
os.write(NULL_FD, tracer)
except OSError:
pass
def __enter__(self):
self.__emit_trace(">", self.enter_args)
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
self.__emit_trace("<")
def __call__(self, *args, **kwds):
if len(args) == 1 and callable(args[0]):
# This happens when Tracer is used as:
#
# @Tracer(enter_args= ..)
# def myf(): ....
self.__detect_tag(args[0])
return self
else:
# This happens when Tracer is used as:
#
# @Tracer
# def myf(): ....
if self.function_calls == 0:
enter_args = {}
for key, value in self.enter_args.items():
if isinstance(value, Args):
enter_args[key] = value(args)
elif isinstance(value, Kwds):
enter_args[key] = value(kwds)
elif isinstance(value, str):
enter_args[key] = value
self.__emit_trace(">", enter_args)
# function_calls counter helps to detect recursive calls
# and print them only once
self.function_calls += 1
res = self.wrapped_func(*args, **kwds)
self.function_calls -= 1
if self.function_calls == 0:
exit_args = {}
for key, value in self.exit_args.items():
if value == ReturnValue:
exit_args[key] = res
elif isinstance(value, str):
exit_args[key] = value
self.__emit_trace("<", exit_args)
return res
def start(self, tag=None, args=None):
"""
Emit a tracer enter event.
This method allows to fine control trace emission
t = Tracer()
t.start()
[code]
t.stop()
is equal to:
with Tracer():
[code]
tag -- same as __init__
args -- dictionary of enter args
"""
self.__detect_tag(tag)
self.__emit_trace(">", args)
def stop(self, args=None):
"""
Emit an exit trace event
See start() for further details
"""
self.__emit_trace("<", args)
def span(self, tag=None, enter_args=None, exit_args=None):
"""
Create a nested span inside a tracer,
the usage is the same of Tracer() itself:
with Tracer() as t:
[code]
with t.span() as child:
[othercode]
"""
t = Tracer("", enter_args, exit_args)
t.__detect_tag(tag)
t.tag = "%s.%s" % (self.tag, t.tag)
return t
def __get__(self, obj, objtype):
# This is needed to support decorating methods
# instead of spare functions
return functools.partial(self.__call__, obj)
|
draios/tracers-py
|
sysdig_tracers.py
|
Python
|
gpl-2.0
| 5,463
|
# -*- coding: UTF-8 -*-
#The sum of the squares of the first ten natural numbers is,
#1² + 2² + ... + 10² = 385
#The square of the sum of the first ten natural numbers is,
#(1 + 2 + ... + 10)² = 552 = 3025
#Hence the difference between the sum of the squares of the first ten natural numbers and the square of the sum is 3025 − 385 = 2640.
#Find the difference between the sum of the squares of the first one hundred natural numbers and the square of the sum.
sumsquare = 0
sum = 0
for i in range(1, 101):
sumsquare = sumsquare + i*i
for j in range(1, 101):
sum = sum + j
print(sum)
print(sum*sum)
print(sum*sum - sumsquare)
|
eHanseJoerg/learning-machine-learning
|
01 project euler/projecteuler6.py
|
Python
|
gpl-2.0
| 648
|
f = open('main_h.tex','w')
f.write("""\documentclass[a4paper,5pt,twocolumn,titlepage]{article}
\usepackage{mathpazo}
\usepackage{xeCJK}
\usepackage{pstricks,pst-node,pst-tree}
\usepackage{titlesec}
\\titleformat*{\section}{\sf}
\\titleformat*{\subsection}{\sf}
%\setsansfont{DejaVu Sans Mono}
\setsansfont{Source Code Pro}
%\setsansfont{Monaco}
%\setsansfont{Liberation Mono}
%\setsansfont{Luxi Mono}
%\setsansfont{Ubuntu Mono}
%\setsansfont{Droid Sans Mono}
\usepackage{tocloft}
\\renewcommand\cftsecfont{\sf}
%\\renewcommand\cftsubsecfont{\sf}
\setCJKmainfont{SimHei}
\setCJKsansfont{SimHei}
\setCJKmonofont{SimHei}
\usepackage{graphicx}
\usepackage{amsmath}
\usepackage{xcolor}
\usepackage{type1cm}
\usepackage{booktabs}
\usepackage{geometry}
%\usepackage[landscape]{geometry}
\geometry{left=1cm,right=1cm,top=1cm,bottom=1.5cm,headsep=0.2cm}
\usepackage{courier}
%\usepackage{time}
%\usepackage{charter}
\usepackage{fancyhdr}
\usepackage{listings}
\lstset{
breaklines=true,
tabsize=2,
%numbers=left,
%numbersep=4pt,
%numberstyle=\sf\scriptsize,
commentstyle=\sf\scriptsize,
basicstyle=\sf\scriptsize,
%frame=leftline,
escapeinside=``,
extendedchars=false
}
\usepackage[CJKbookmarks=true,
colorlinks,
linkcolor=black,
anchorcolor=black,
citecolor=black]{hyperref}
\AtBeginDvi{\special{pdf:tounicode UTF8-UCS2}}
\usepackage{indentfirst}
\setlength{\parindent}{0em}
\\newcommand*{\TitleFont}{%
\\fontsize{50}{80}%
\\selectfont}
\\usepackage{graphicx}
\\title{\TitleFont{Code Library}
\\begin{center}
\includegraphics[scale=2]{./image1.png}
\end{center}
}
\\author{Himemiya Nanao @ Perfect Freeze}
\setmainfont{Linux Libertine O}
\usepackage{tocloft}
\cftsetindents{section}{0.1in}{0.2in}
\cftsetindents{subsection}{.2in}{0.3in}
\cftsetindents{subsubsection}{.3in}{0.45in}
\\begin{document}
\maketitle \\tableofcontents
\\newpage
\pagenumbering{arabic}
""")
import os
import string
for x,y,z in os.walk('.'):
if x == '.':
continue
f.write('\n\section{'+string.capwords(x[2:])+'}\n')
for files in z:
if ((files == '.ds_store') or (files == '.DS_Store') or (files.endswith('~')) or files.endswith('.pdf')):
continue
ot=files
if ot.endswith(""".cpp"""):
ot=ot[:-4];
elif ot.endswith(""".cxx"""):
ot=ot[:-4];
elif ot.endswith("""java"""):
ot=ot[:-5]
elif ot.endswith('tex'):
f.write('\\input{\"'+x+'/'+files+'\"}\n')
continue
f.write('\subsection{'+ot+'}\n')
fname = x+'/'+files
fname = fname.lower()
if files.count('.')!=0:
if fname.endswith(""".java"""):
lang = """Java"""
else:
lang = """C++"""
f.write('\\lstinputlisting[language='+lang+']{\"'+fname+'\"}\n')
# print files.count('.')
# print files
else:
f.write('\\lstinputlisting{\"'+fname+'\"}\n')
f.write(
"""
\end{document}
""")
f.close()
|
himemeizhi/Code-Library
|
2.py
|
Python
|
gpl-2.0
| 2,805
|
#!/usr/bin/env python
import os
import sys
import tempfile
import shutil
if sys.version_info[:2] == (2, 6):
import unittest2 as unittest
else:
import unittest
from avocado.utils import process
basedir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..')
basedir = os.path.abspath(basedir)
DEBUG_OUT = """Variant 16: amd@examples/mux-environment.yaml, virtio@examples/mux-environment.yaml, mint@examples/mux-environment.yaml, debug@examples/mux-environment.yaml
/distro/mint:init => systemv@examples/mux-environment.yaml:/distro/mint
/env/debug:opt_CFLAGS => -O0 -g@examples/mux-environment.yaml:/env/debug
/hw/cpu/amd:cpu_CFLAGS => -march=athlon64@examples/mux-environment.yaml:/hw/cpu/amd
/hw/cpu/amd:joinlist => ['first_item']@examples/mux-selftest.yaml:/hw/cpu + ['second', 'third']@examples/mux-selftest.yaml:/hw/cpu/amd
/hw/disk/virtio:disk_type => virtio@examples/mux-environment.yaml:/hw/disk/virtio
/hw/disk:corruptlist => nonlist@examples/mux-selftest.yaml:/hw/disk
/hw:corruptlist => ['upper_node_list']@examples/mux-selftest.yaml:/hw
"""
class MultiplexTests(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp(prefix='avocado_' + __name__)
def run_and_check(self, cmd_line, expected_rc):
os.chdir(basedir)
result = process.run(cmd_line, ignore_status=True)
self.assertEqual(result.exit_status, expected_rc,
"Command %s did not return rc "
"%d:\n%s" % (cmd_line, expected_rc, result))
return result
def test_mplex_plugin(self):
cmd_line = './scripts/avocado multiplex examples/tests/sleeptest.py.data/sleeptest.yaml'
expected_rc = 0
self.run_and_check(cmd_line, expected_rc)
def test_mplex_plugin_nonexistent(self):
cmd_line = './scripts/avocado multiplex nonexist'
expected_rc = 2
result = self.run_and_check(cmd_line, expected_rc)
self.assertIn('No such file or directory', result.stderr)
def test_mplex_debug(self):
cmd_line = ('./scripts/avocado multiplex -c -d '
'/:examples/mux-selftest.yaml '
'/:examples/mux-environment.yaml '
'/:examples/mux-selftest.yaml '
'/:examples/mux-environment.yaml')
expected_rc = 0
result = self.run_and_check(cmd_line, expected_rc)
self.assertIn(DEBUG_OUT, result.stdout)
def test_run_mplex_noid(self):
cmd_line = ('./scripts/avocado run --job-results-dir %s --sysinfo=off '
'--multiplex examples/tests/sleeptest.py.data/sleeptest.yaml' % self.tmpdir)
expected_rc = 2
self.run_and_check(cmd_line, expected_rc)
def test_run_mplex_passtest(self):
cmd_line = ('./scripts/avocado run --job-results-dir %s --sysinfo=off passtest '
'--multiplex examples/tests/sleeptest.py.data/sleeptest.yaml' % self.tmpdir)
expected_rc = 0
self.run_and_check(cmd_line, expected_rc)
def test_run_mplex_doublepass(self):
cmd_line = ('./scripts/avocado run --job-results-dir %s --sysinfo=off passtest passtest '
'--multiplex examples/tests/sleeptest.py.data/sleeptest.yaml' % self.tmpdir)
self.run_and_check(cmd_line, expected_rc=0)
def test_run_mplex_failtest(self):
cmd_line = ('./scripts/avocado run --job-results-dir %s --sysinfo=off passtest failtest '
'--multiplex examples/tests/sleeptest.py.data/sleeptest.yaml' % self.tmpdir)
expected_rc = 1
self.run_and_check(cmd_line, expected_rc)
def test_run_double_mplex(self):
cmd_line = ('./scripts/avocado run --job-results-dir %s --sysinfo=off passtest --multiplex '
'examples/tests/sleeptest.py.data/sleeptest.yaml '
'examples/tests/sleeptest.py.data/sleeptest.yaml' % self.tmpdir)
expected_rc = 0
self.run_and_check(cmd_line, expected_rc)
def test_run_mplex_params(self):
cmd_line = ('./scripts/avocado run --job-results-dir %s --sysinfo=off examples/tests/env_variables.sh '
'--multiplex examples/tests/env_variables.sh.data'
'/env_variables.yaml '
'--show-job-log' % self.tmpdir)
expected_rc = 0
result = self.run_and_check(cmd_line, expected_rc)
for msg in ('A', 'ASDFASDF', 'This is very long\nmultiline\ntext.'):
msg = ('[stdout] Custom variable: ' +
'\n[stdout] '.join(msg.splitlines()))
self.assertIn(msg, result.stdout, "Multiplexed variable should "
"produce:"
"\n %s\nwhich is not present in the output:\n %s"
% ("\n ".join(msg.splitlines()),
"\n ".join(result.stdout.splitlines())))
def tearDown(self):
shutil.rmtree(self.tmpdir)
if __name__ == '__main__':
unittest.main()
|
will-Do/avocado
|
selftests/functional/test_multiplex.py
|
Python
|
gpl-2.0
| 5,108
|
# -*- coding: utf-8 -*-
from rest_framework.routers import (Route,
DynamicDetailRoute,
SimpleRouter,
DynamicListRoute)
from app.api.account.views import AccountViewSet
from app.api.podcast.views import PodcastViewSet, EpisodeViewSet
class CustomRouter(SimpleRouter):
"""
A router for read-only APIs, which doesn't use trailing slashes.
"""
routes = [
Route(
url=r'^{prefix}{trailing_slash}$',
mapping={
'get': 'list',
'post': 'create'
},
name='{basename}-list',
initkwargs={'suffix': 'List'}
),
# Dynamically generated list routes.
# Generated using @list_route decorator
# on methods of the viewset.
DynamicListRoute(
url=r'^{prefix}/{methodnamehyphen}{trailing_slash}$',
name='{basename}-{methodnamehyphen}',
initkwargs={}
),
# Detail route.
Route(
url=r'^{prefix}/{lookup}{trailing_slash}$',
mapping={
'get': 'retrieve',
'put': 'update',
'patch': 'partial_update',
'delete': 'destroy'
},
name='{basename}-detail',
initkwargs={'suffix': 'Instance'}
),
# Dynamically generated detail routes.
# Generated using @detail_route decorator on methods of the viewset.
DynamicDetailRoute(
url=r'^{prefix}/{lookup}/{methodnamehyphen}{trailing_slash}$',
name='{basename}-{methodnamehyphen}',
initkwargs={}
),
]
router = CustomRouter()
router.register(r'accounts', AccountViewSet)
router.register(r'podcasts', PodcastViewSet)
router.register(r'episodes', EpisodeViewSet)
urlpatterns = router.urls
|
Podcastor/podcastor-backend
|
src/app/api/urls.py
|
Python
|
gpl-2.0
| 1,923
|
'''
Created on Jun 15, 2014
@author: geraldine
'''
import socket
import fcntl
import struct
def get_ip_address(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(s.fileno(),0x8915,struct.pack('256s', ifname[:15]))[20:24])
|
DeeDee22/nelliepi
|
src/ch/fluxkompensator/nelliepi/IPAddressFinder.py
|
Python
|
gpl-2.0
| 283
|
# -*- coding: utf-8 -*-
#
# This file is part of EventGhost.
# Copyright © 2005-2019 EventGhost Project <http://www.eventghost.org/>
#
# EventGhost is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 2 of the License, or (at your option)
# any later version.
#
# EventGhost is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along
# with EventGhost. If not, see <http://www.gnu.org/licenses/>.
"""
Builds a file that would import all used modules.
This way we trick py2exe to include all standard library files and some more
packages and modules.
"""
import os
import sys
import warnings
from os.path import join
# Local imports
import builder
MODULES_TO_IGNORE = [
"__phello__.foo",
"antigravity",
"unittest",
"win32com.propsys.propsys",
"wx.lib.graphics",
"wx.lib.rpcMixin",
"wx.lib.wxcairo",
"wx.build.config",
]
HEADER = """\
# -*- coding: utf-8 -*-
#
# This file is part of EventGhost.
# Copyright © 2005-2019 EventGhost Project <http://www.eventghost.org/>
#
# EventGhost is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 2 of the License, or (at your option)
# any later version.
#
# EventGhost is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along
# with EventGhost. If not, see <http://www.gnu.org/licenses/>.
#-----------------------------------------------------------------------------
# This file was automatically created by the BuildImports.py script.
# Don't try to edit this file yourself.
#-----------------------------------------------------------------------------
#pylint: disable-msg=W0611,W0622,W0402,E0611,F0401
"""
warnings.simplefilter('error', DeprecationWarning)
class BuildImports(builder.Task):
description = "Build Imports.py"
def Setup(self):
self.outFileName = join(self.buildSetup.pyVersionDir, "Imports.py")
if self.buildSetup.showGui:
if os.path.exists(self.outFileName):
self.activated = False
else:
self.activated = bool(self.buildSetup.args.build)
def DoTask(self):
"""
Starts the actual work.
"""
buildSetup = self.buildSetup
MODULES_TO_IGNORE.extend(buildSetup.excludeModules)
globalModuleIndex, badModules = ReadGlobalModuleIndex(
join(buildSetup.pyVersionDir, "Global Module Index.txt")
)
MODULES_TO_IGNORE.extend(badModules)
pyDir = sys.real_prefix if hasattr(sys, "real_prefix") else sys.prefix
stdLibModules = (
FindModulesInPath(join(pyDir, "DLLs"), "", True) +
FindModulesInPath(join(pyDir, "Lib"), "", True)
)
notFoundModules = []
for module in globalModuleIndex:
if module in stdLibModules:
continue
if module in sys.builtin_module_names:
continue
if ShouldBeIgnored(module):
continue
notFoundModules.append(module)
if notFoundModules:
print " Modules found in global module index but not in scan:"
for module in notFoundModules:
print " ", module
#print "Modules found in scan but not in global module index:"
#for module in stdLibModules:
# if module not in globalModuleIndex:
# print " ", module
outfile = open(self.outFileName, "wt")
outfile.write(HEADER)
for module in stdLibModules:
outfile.write("import %s\n" % module)
# add every .pyd of the current directory
for package in buildSetup.includeModules:
outfile.write("\n# modules found for package '%s'\n" % package)
for module in GetPackageModules(package):
outfile.write("import %s\n" % module)
outfile.write("\n")
outfile.close()
class DummyStdOut: #IGNORE:W0232 class has no __init__ method
"""
Just a dummy stdout implementation, that suppresses all output.
"""
def write(self, dummyData): #IGNORE:C0103
"""
A do-nothing write.
"""
pass
def FindModulesInPath(path, prefix="", includeDeprecated=False):
"""
Find modules and packages for a given filesystem path.
"""
if prefix:
prefix += "."
print " Scanning:", path
modules = []
for root, dirs, files in os.walk(path):
package = root[len(path) + 1:].replace("\\", ".")
package = prefix + package
for directory in dirs[:]:
if (
not os.path.exists(join(root, directory, "__init__.py")) or
ShouldBeIgnored(package + "." + directory)
):
dirs.remove(directory)
if ShouldBeIgnored(package) or package.rfind(".test") > 0:
continue
if package != prefix:
isOk, eType, eMesg = TestImport(package)
if isOk:
modules.append(package)
package += "."
for filename in files:
name, extension = os.path.splitext(filename)
if extension.lower() not in (".py", ".pyd"):
continue
moduleName = package + name
if ShouldBeIgnored(moduleName) or moduleName.endswith(".__init__"):
continue
if moduleName == "MimeWrite":
print "found"
isOk, eType, eMesg = TestImport(moduleName, includeDeprecated)
if not isOk:
if not eType == "DeprecationWarning":
print " ", moduleName, eType, eMesg
continue
modules.append(moduleName)
return modules
def GetPackageModules(package):
"""
Returns a list with all modules of the package.
"""
moduleList = []
tail = join("Lib", "site-packages", package) + ".pth"
pthPaths = [join(sys.prefix, tail)]
if hasattr(sys, "real_prefix"):
pthPaths.append(join(sys.real_prefix, tail))
for pthPath in pthPaths:
if os.path.exists(pthPath):
for path in ReadPth(pthPath):
moduleList.extend(FindModulesInPath(path))
break
else:
mod = __import__(package)
moduleList.append(package)
if hasattr(mod, "__path__"):
paths = mod.__path__
else:
if mod.__file__.endswith(".pyd"):
return moduleList
paths = [os.path.dirname(mod.__file__)]
for path in paths:
moduleList.extend(FindModulesInPath(path, package))
return moduleList
def GetPydFiles(path):
"""
Returns a list of all .pyd modules in supplied path.
"""
files = []
for filepath in os.listdir(path):
moduleName, extension = os.path.splitext(os.path.basename(filepath))
if extension.lower() == ".pyd":
files.append(moduleName)
return files
def ReadGlobalModuleIndex(infile):
"""
Read the global module index file (created by copy&paste from the Python
documentation) and sort out all modules that are not available on Windows.
"""
modules = []
badModules = []
inFile = open(infile, "r")
for line in inFile.readlines():
if line.startswith("#"):
continue
parts = line.strip().split(" ", 1)
if len(parts) > 1:
if parts[1].startswith("(") and parts[1].find("Windows") < 0:
badModules.append(parts[0])
continue
# if parts[1].find("Deprecated:") >= 0:
# print line
modules.append(parts[0])
inFile.close()
return modules, badModules
def ReadPth(path):
"""
Read a .PTH file and return the paths inside as a list
"""
result = []
pthFile = open(path, "rt")
for line in pthFile:
if line.strip().startswith("#"):
continue
result.append(join(os.path.dirname(path), line.strip()))
return result
def ShouldBeIgnored(moduleName):
"""
Return True if the supplied module should be ignored, because it is a
module or submodule in MODULES_TO_IGNORE.
"""
moduleParts = moduleName.split(".")
modulePartsLength = len(moduleParts)
for module in MODULES_TO_IGNORE:
ignoreParts = module.split(".")
ignorePartsLength = len(ignoreParts)
if ignorePartsLength > modulePartsLength:
continue
if moduleParts[:ignorePartsLength] == ignoreParts:
return True
return False
def TestImport(moduleName, includeDeprecated=False):
"""
Test if the given module can be imported without error.
"""
#print "Testing", moduleName
oldStdOut = sys.stdout
oldStdErr = sys.stderr
sys.stdout = DummyStdOut()
try:
__import__(moduleName)
return (True, "", "")
except DeprecationWarning, exc:
return includeDeprecated, "DeprecationWarning", str(exc)
except ImportError, exc:
return False, "ImportError", str(exc)
except SyntaxError, exc:
return False, "SyntaxError", str(exc)
except Exception, exc:
return False, "Exception", str(exc)
finally:
sys.stdout = oldStdOut
sys.stderr = oldStdErr
|
topic2k/EventGhost
|
_build/builder/BuildImports.py
|
Python
|
gpl-2.0
| 9,896
|
from __future__ import print_function
num = 17
test = 2
while test < num:
if num % test == 0 and num != test:
print(num,'equals',test, '*', num/test)
print(num,'is not a prime number')
break
test = test + 1
else:
print(num,'is a prime number!')
|
sjm-ec/cbt-python
|
Units/06-Loops/GoodExample3.py
|
Python
|
gpl-2.0
| 257
|
class check_privilege_dbadm():
"""
check_privilege_dbadm:
The DBADM (database administration) role grants the authority to a user to perform
administrative tasks on a specific database. It is recommended that dbadm role be granted
to authorized users only.
"""
# References:
# https://benchmarks.cisecurity.org/downloads/show-single/?file=db2.120
TITLE = 'DBADM Role'
CATEGORY = 'Privilege'
TYPE = 'sql'
SQL = "SELECT DISTINCT grantee, granteetype FROM syscat.dbauth WHERE dbadmauth='Y'"
verbose = False
skip = False
result = {}
def do_check(self, *results):
output = ''
self.result['level'] = 'GREEN'
for rows in results:
for row in rows:
self.result['level'] = 'YELLOW'
output += 'DBADM granted to %s\n' % (row[0])
if 'GREEN' == self.result['level']:
output = 'No users granted DBADM.'
self.result['output'] = output
return self.result
def __init__(self, parent):
print('Performing check: ' + self.TITLE)
|
foospidy/DbDat
|
plugins/db2/check_privilege_dbadm.py
|
Python
|
gpl-2.0
| 1,117
|
#
# xfce.py
#
# Copyright (C) 2010 Fabio Erculiani
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from installclass import BaseInstallClass
from constants import *
from product import *
from flags import flags
import os, types
import iutil
import gettext
_ = lambda x: gettext.ldgettext("anaconda", x)
import installmethod
from sabayon import Entropy
from sabayon.livecd import LiveCDCopyBackend
class InstallClass(BaseInstallClass):
id = "sabayon_lxde"
name = N_("Kogaion LXDE")
_pixmap_dirs = os.getenv("PIXMAPPATH", "/usr/share/pixmaps").split(":")
for _pix_dir in _pixmap_dirs:
_pix_path = os.path.join(_pix_dir, "lxde.pg")
if os.path.isfile(_pix_path):
pixmap = _pix_path
dmrc = "LXDE"
if Entropy().is_sabayon_steambox():
dmrc = "steambox"
_description = N_("Select this installation type for a default installation "
"with the LXDE desktop environment. "
"A small lightweight and functional working environment at your service.")
_descriptionFields = (productName,)
sortPriority = 10000
if not Entropy().is_installed("lxde-base/lxde-common"):
hidden = 1
def configure(self, anaconda):
BaseInstallClass.configure(self, anaconda)
BaseInstallClass.setDefaultPartitioning(self,
anaconda.storage, anaconda.platform)
def setSteps(self, anaconda):
BaseInstallClass.setSteps(self, anaconda)
anaconda.dispatch.skipStep("welcome", skip = 1)
#anaconda.dispatch.skipStep("network", skip = 1)
def getBackend(self):
return LiveCDCopyBackend
def productMatches(self, oldprod):
if oldprod is None:
return False
if oldprod.startswith(productName):
return True
return False
def versionMatches(self, oldver):
try:
oldVer = float(oldver)
newVer = float(productVersion)
except ValueError:
return True
return newVer >= oldVer
def __init__(self):
BaseInstallClass.__init__(self)
|
Rogentos/rogentos-anaconda
|
installclasses/lxde.py
|
Python
|
gpl-2.0
| 2,674
|
import os
import select
fds = os.open("data", os.O_RDONLY)
while True:
reads, _, _ = select.select(fds, [], [], 2.0)
if 0 < len(reads):
d = os.read(reads[0], 10)
if d:
print "-> ", d
else:
break
else:
print "timeout"
|
mrniranjan/python-scripts
|
reboot/system/system4.py
|
Python
|
gpl-2.0
| 288
|
"""
PaStA - Patch Stack Analysis
Copyright (c) OTH Regensburg, 2019
Author:
Ralf Ramsauer <ralf.ramsauer@oth-regensburg.de>
This work is licensed under the terms of the GNU GPL, version 2. See
the COPYING file in the top-level directory.
"""
import os
import sys
from fuzzywuzzy import fuzz
from logging import getLogger
from multiprocessing import Pool, cpu_count
from tqdm import tqdm
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from pypasta import *
log = getLogger(__name__[-15:])
repo = None
def shortlog(repo, hash, prefix=''):
commit = repo[hash]
log.info('%s%s: %s' % (prefix, hash, commit.subject))
def load_subject(message_id):
# FIXME respect non-unique message ids
message = repo.mbox.get_messages(message_id)[0]
subject = message['Subject']
if subject is None or not isinstance(subject, str):
return None
return message_id, subject
def check_mbox(config, argv):
parser = argparse.ArgumentParser(prog='check_mbox',
description='Check consistency of mailbox '
'result')
parser.add_argument('-v', dest='verbose', default=False,
action='store_true', help='Also dump detected patches')
parser.add_argument('-l', dest='lookup', default=False, action='store_true',
help='Perform a simple lookup')
parser.add_argument('-rd', dest='respect_date', default=False,
action='store_true', help='Respect author date')
parser.add_argument('range', type=str, nargs=1, help='Revision range')
args = parser.parse_args(argv)
if config.mode != config.Mode.MBOX:
log.error('Only works in Mbox mode!')
return -1
global repo
repo = config.repo
# !FIXME Not aligned with current API
_, cluster = config.load_patch_groups()
range = repo.get_commithash_range(args.range[0])
repo.cache_commits(range)
found = []
not_found = []
log.info('Processing %s' % args.range[0])
date_selector = get_date_selector(repo, None, 'AD')
for commit_hash in range:
commit = repo[commit_hash]
if commit_hash not in cluster:
not_found.append(commit_hash)
continue
mails = cluster.get_downstream(commit_hash)
if len(mails) == 0:
not_found.append(commit_hash)
continue
if not args.respect_date:
found.append(commit_hash)
continue
# We have to respect the author date in order to filter out backports.
if PatchComposition.is_forwardport(repo, cluster, date_selector, commit_hash):
found.append(commit_hash)
else:
not_found.append(commit_hash)
if args.verbose:
for detected in found:
shortlog(repo, detected)
for message_id in cluster.get_downstream(detected):
shortlog(repo, message_id, ' -> ')
log.info('Commit hashes with no mapped Message-Id:')
for missing in not_found:
shortlog(repo, missing)
log.info('Stats: %d/%d clusters have at least one mail assigned' %
(len(found), len(found) + len(not_found)))
if not args.lookup:
return 0
ids = repo.mbox.get_ids(allow_invalid=True)
valid_ids = repo.mbox.get_ids(allow_invalid=False)
with Pool(cpu_count()) as p:
result = tqdm(p.imap(load_subject, ids), total=len(ids))
result = dict(filter(None, result))
for missing in not_found:
commit = repo[missing]
original_subject = commit.subject.lower()
printed = False
for message_id, subject in result.items():
subject = subject.lower()
is_patch = ' PATCH' if message_id in valid_ids else 'NO PATCH'
if fuzz.ratio(original_subject, subject) > 80:
if not printed:
log.info('%s ("%s") might be...' %
(missing, commit.subject))
printed = True
log.info(' -> (%s) %s ("%s")' %
(is_patch, message_id.ljust(55), subject))
|
lfd/PaStA
|
bin/pasta_check_mbox.py
|
Python
|
gpl-2.0
| 4,216
|
#!/usr/bin/python
# -*- coding: iso-8859-15 -*-
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
import smallsmilhandler
import sys
import os
class KaraokeLocal(smallsmilhandler.SmallSMILHandler):
def __init__(self, fich):
parser = make_parser()
sHandler = smallsmilhandler.SmallSMILHandler()
parser.setContentHandler(sHandler)
parser.parse(fich)
self.list_tags = sHandler.get_tags()
def __str__(self):
todo = ""
for diccionarios in self.list_tags:
frase = ""
for clave in diccionarios.keys():
if clave != "name" and diccionarios[clave] != "":
frase = frase + clave + "=" + diccionarios[clave] + "\t"
todo = todo + diccionarios['name'] + "\t" + frase + "\n"
return todo
def do_local(self):
list_recurso = []
for diccionarios in self.list_tags:
for clave in diccionarios.keys():
if clave == "src":
recurso = diccionarios[clave]
os.system("wget -q " + recurso)
list_recurso = recurso.split("/")
recurso = list_recurso[-1]
diccionarios[clave] = recurso
if __name__ == "__main__":
try:
fich = open(sys.argv[1])
except IndexError:
print "Usage: python karaoke.py file.smil."
KL = KaraokeLocal(fich)
print KL
KL.do_local()
print KL
|
calvarezpe/ptavi-p3
|
karaoke.py
|
Python
|
gpl-2.0
| 1,492
|
#
# Extensible User Folder
#
# (C) Copyright 2000-2004 The Internet (Aust) Pty Ltd
# ACN: 082 081 472 ABN: 83 082 081 472
# All Rights Reserved
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
# Author: Andrew Milton <akm@theinternet.com.au>
# $Id: __init__.py,v 1.1 2004/11/10 14:15:53 akm Exp $
import nullGroupSource
# If this fails due to NUG being absent, just skip it
try:
import zodbGroupSource
except ImportError:
pass
|
denys-duchier/Scolar
|
ZopeProducts/exUserFolder/GroupSources/__init__.py
|
Python
|
gpl-2.0
| 1,150
|
# -*- coding: UTF-8 -*-
## Zap-History Browser by AliAbdul
from Components.ActionMap import ActionMap
from Components.config import config, ConfigInteger, ConfigSelection, ConfigSubsection, getConfigListEntry
from Components.ConfigList import ConfigListScreen
from Components.Label import Label
from Components.Language import language
from Components.MenuList import MenuList
from Components.MultiContent import MultiContentEntryText
from enigma import eListboxPythonMultiContent, eServiceCenter, gFont, getDesktop, eSize
from os import environ
from Plugins.Plugin import PluginDescriptor
from Screens.ChannelSelection import ChannelSelection
from Screens.ParentalControlSetup import ProtectedScreen
from Screens.Screen import Screen
from Tools.Directories import resolveFilename, SCOPE_LANGUAGE, SCOPE_PLUGINS
import gettext
################################################
def localeInit():
lang = language.getLanguage()
environ["LANGUAGE"] = lang[:2]
gettext.bindtextdomain("enigma2", resolveFilename(SCOPE_LANGUAGE))
gettext.textdomain("enigma2")
gettext.bindtextdomain("ZapHistoryBrowser", "%s%s" % (resolveFilename(SCOPE_PLUGINS), "Extensions/ZapHistoryBrowser/locale/"))
def _(txt):
t = gettext.dgettext("ZapHistoryBrowser", txt)
if t == txt:
t = gettext.gettext(txt)
return t
localeInit()
language.addCallback(localeInit)
################################################
config.plugins.ZapHistoryConfigurator = ConfigSubsection()
config.plugins.ZapHistoryConfigurator.enable_zap_history = ConfigSelection(choices = {"off": _("disabled"), "on": _("enabled"), "parental_lock": _("disabled at parental lock")}, default="on")
config.plugins.ZapHistoryConfigurator.maxEntries_zap_history = ConfigInteger(default=20, limits=(1, 60))
################################################
def addToHistory(instance, ref):
if config.plugins.ZapHistoryConfigurator.enable_zap_history.value == "off":
return
if config.ParentalControl.configured.value and config.plugins.ZapHistoryConfigurator.enable_zap_history.value == "parental_lock":
if parentalControl.getProtectionLevel(ref.toCompareString()) != -1:
return
if instance.servicePath is not None:
tmp = instance.servicePath[:]
tmp.append(ref)
try: del instance.history[instance.history_pos+1:]
except: pass
instance.history.append(tmp)
hlen = len(instance.history)
if hlen > config.plugins.ZapHistoryConfigurator.maxEntries_zap_history.value:
del instance.history[0]
hlen -= 1
instance.history_pos = hlen-1
ChannelSelection.addToHistory = addToHistory
################################################
class ZapHistoryConfigurator(ConfigListScreen, Screen):
skin = """
<screen position="center,center" size="420,70" title="%s" >
<widget name="config" position="0,0" size="420,70" scrollbarMode="showOnDemand" />
</screen>""" % _("Zap-History Configurator")
def __init__(self, session):
Screen.__init__(self, session)
self.session = session
ConfigListScreen.__init__(self, [
getConfigListEntry(_("Enable zap history:"), config.plugins.ZapHistoryConfigurator.enable_zap_history),
getConfigListEntry(_("Maximum zap history entries:"), config.plugins.ZapHistoryConfigurator.maxEntries_zap_history)])
self["actions"] = ActionMap(["OkCancelActions"], {"ok": self.save, "cancel": self.exit}, -2)
def save(self):
for x in self["config"].list:
x[1].save()
self.close()
def exit(self):
for x in self["config"].list:
x[1].cancel()
self.close()
################################################
class ZapHistoryBrowserList(MenuList):
def __init__(self, list, enableWrapAround=True):
MenuList.__init__(self, list, enableWrapAround, eListboxPythonMultiContent)
desktopSize = getDesktop(0).size()
if desktopSize.width() == 1920:
self.l.setItemHeight(30)
self.l.setFont(0, gFont("Regular", 28))
self.l.setFont(1, gFont("Regular", 25))
elif desktopSize.width() == 1280:
self.l.setItemHeight(21)
self.l.setFont(0, gFont("Regular", 21))
self.l.setFont(1, gFont("Regular", 16))
else:
self.l.setItemHeight(21)
self.l.setFont(0, gFont("Regular", 21))
self.l.setFont(1, gFont("Regular", 16))
def ZapHistoryBrowserListEntry(serviceName, eventName):
desktopSize = getDesktop(0).size()
if desktopSize.width() == 1920:
res = [serviceName]
res.append(MultiContentEntryText(pos=(0, 0), size=(230, 30), font=0, text=serviceName))
res.append(MultiContentEntryText(pos=(240, 0), size=(550, 30), font=1, text=eventName))
return res
elif desktopSize.width() == 1280:
res = [serviceName]
res.append(MultiContentEntryText(pos=(0, 0), size=(180, 22), font=0, text=serviceName))
res.append(MultiContentEntryText(pos=(190, 0), size=(550, 16), font=1, text=eventName))
return res
else:
res = [serviceName]
res.append(MultiContentEntryText(pos=(0, 0), size=(180, 22), font=0, text=serviceName))
res.append(MultiContentEntryText(pos=(190, 0), size=(550, 16), font=1, text=eventName))
return res
################################################
class ZapHistoryBrowser(Screen, ProtectedScreen):
skin = """
<screen position="670,440" size="560,210" title="%s" >
<ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" transparent="1" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" transparent="1" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,0" size="140,40" transparent="1" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/blue.png" position="420,0" size="140,40" transparent="1" alphatest="on" />
<widget name="key_red" position="0,0" zPosition="1" size="140,40" font="Regular;20" valign="center" halign="center" backgroundColor="#1f771f" transparent="1" />
<widget name="key_green" position="140,0" zPosition="1" size="140,40" font="Regular;20" valign="center" halign="center" backgroundColor="#1f771f" transparent="1" />
<widget name="key_yellow" position="280,0" zPosition="1" size="140,40" font="Regular;20" valign="center" halign="center" backgroundColor="#1f771f" transparent="1" />
<widget name="key_blue" position="420,0" zPosition="1" size="140,40" font="Regular;20" valign="center" halign="center" backgroundColor="#1f771f" transparent="1" />
<widget name="list" position="0,40" size="560,180" scrollbarMode="showOnDemand" />
</screen>""" % _("Zap-History")
def __init__(self, session, servicelist):
Screen.__init__(self, session)
ProtectedScreen.__init__(self)
self.session = session
self.servicelist = servicelist
self.serviceHandler = eServiceCenter.getInstance()
self.allowChanges = True
self["list"] = ZapHistoryBrowserList([])
self["key_red"] = Label(_("Clear"))
self["key_green"] = Label(_("Delete"))
self["key_yellow"] = Label(_("Zap+Close"))
self["key_blue"] = Label(_("Config"))
self["actions"] = ActionMap(["OkCancelActions", "ColorActions"],
{
"ok": self.zap,
"cancel": self.close,
"red": self.clear,
"green": self.delete,
"yellow": self.zapAndClose,
"blue": self.config
}, prio=-1)
self.onLayoutFinish.append(self.buildList)
def buildList(self):
list = []
for x in self.servicelist.history:
if len(x) == 2: # Single-Bouquet
ref = x[1]
else: # Multi-Bouquet
ref = x[2]
info = self.serviceHandler.info(ref)
if info:
name = info.getName(ref).replace('\xc2\x86', '').replace('\xc2\x87', '')
event = info.getEvent(ref)
if event is not None:
eventName = event.getEventName()
if eventName is None:
eventName = ""
else:
eventName = ""
else:
name = "N/A"
eventName = ""
list.append(ZapHistoryBrowserListEntry(name, eventName))
list.reverse()
self["list"].setList(list)
def zap(self):
length = len(self.servicelist.history)
if length > 0:
self.servicelist.history_pos = (length - self["list"].getSelectionIndex()) - 1
self.servicelist.setHistoryPath()
def clear(self):
if self.allowChanges:
for i in range(0, len(self.servicelist.history)):
del self.servicelist.history[0]
self.buildList()
self.servicelist.history_pos = 0
def delete(self):
if self.allowChanges:
length = len(self.servicelist.history)
if length > 0:
idx = (length - self["list"].getSelectionIndex()) - 1
del self.servicelist.history[idx]
self.buildList()
currRef = self.session.nav.getCurrentlyPlayingServiceReference()
idx = 0
for x in self.servicelist.history:
if len(x) == 2: # Single-Bouquet
ref = x[1]
else: # Multi-Bouquet
ref = x[2]
if ref == currRef:
self.servicelist.history_pos = idx
break
else:
idx += 1
def zapAndClose(self):
self.zap()
self.close()
def config(self):
if self.allowChanges:
self.session.open(ZapHistoryConfigurator)
def isProtected(self):
return config.ParentalControl.setuppinactive.value and config.ParentalControl.configured.value
def pinEntered(self, result):
if result is None:
self.allowChanges = False
elif not result:
self.allowChanges = False
else:
self.allowChanges = True
################################################
def main(session, servicelist, **kwargs):
session.open(ZapHistoryBrowser, servicelist)
def Plugins(**kwargs):
return PluginDescriptor(name=_("Zap-History Browser"), where=PluginDescriptor.WHERE_EXTENSIONSMENU, fnc=main)
|
popazerty/openhdf-enigma2
|
lib/python/Plugins/Extensions/ZapHistoryBrowser/plugin.py
|
Python
|
gpl-2.0
| 9,376
|
# **********************************************************************
#
# Copyright (c) 2003-2017 ZeroC, Inc. All rights reserved.
#
# This copy of Ice is licensed to you under the terms described in the
# ICE_LICENSE file included in this distribution.
#
# **********************************************************************
import sys, Ice, Test
def test(b):
if not b:
raise RuntimeError('test assertion failed')
def allTests(communicator):
sys.stdout.write("testing stringToProxy... ")
sys.stdout.flush()
base = communicator.stringToProxy("test:default -p 12010")
test(base)
print("ok")
sys.stdout.write("testing checked cast... ")
sys.stdout.flush()
obj = Test.TestIntfPrx.checkedCast(base)
test(obj)
test(obj == base)
print("ok")
sys.stdout.write("creating/destroying/recreating object adapter... ")
sys.stdout.flush()
adapter = communicator.createObjectAdapterWithEndpoints("TransientTestAdapter", "default")
try:
communicator.createObjectAdapterWithEndpoints("TransientTestAdapter", "default")
test(False)
except Ice.LocalException:
pass
adapter.destroy()
adapter = communicator.createObjectAdapterWithEndpoints("TransientTestAdapter", "default")
adapter.destroy()
print("ok")
sys.stdout.write("creating/activating/deactivating object adapter in one operation... ")
sys.stdout.flush()
obj.transient()
print("ok")
sys.stdout.write("deactivating object adapter in the server... ")
sys.stdout.flush()
obj.deactivate()
print("ok")
sys.stdout.write("testing connection closure... ");
sys.stdout.flush();
for x in range(10):
initData = Ice.InitializationData();
initData.properties = communicator.getProperties().clone();
comm = Ice.initialize(initData);
comm.stringToProxy("test:default -p 12010").ice_pingAsync();
comm.destroy();
print("ok");
sys.stdout.write("testing whether server is gone... ")
sys.stdout.flush()
try:
obj.ice_timeout(100).ice_ping() # Use timeout to speed up testing on Windows
test(False)
except Ice.LocalException:
print("ok")
return obj
|
ljx0305/ice
|
python/test/Ice/adapterDeactivation/AllTests.py
|
Python
|
gpl-2.0
| 2,229
|
import re
from jaglt import *
from jaglf import *
''' Regexes '''
JRE_Num = [
re.compile(r"[0-8]+o"), #Octal
re.compile(r"[\dA-F]+x"), #Hex
re.compile(r"(?:-?\d+(?:\.(?:\d+)?)?|\.\d+|-?\d+)e-?\d+"), #Scientific
re.compile(r"-?\d+(?:\.(?:\d+)?)?|-?\.\d+"), #Decimal
]
JRE_Str = re.compile(r'"(?:[^\\"]|\\.)*"') #String syntactic sugar
JRE_EStr = re.compile(r"'(?:[^\\/]|\\.)*'") #Escaped string syntactic sugar
''' Preprocessor for shorthands '''
def preprocess(string):
string = re.sub(r'([^\s\d\}orfuwF/%z])([orfuwF/%z])(?=([^"\\]*(\\.|"([^"\\]*\\.)*[^"\\]*"))*[^"]*$)(?=([^\'\\]*(\\.|\'([^\'\\]*\\.)*[^\'\\]*\'))*[^\']*$)', r"{\1}\2", string) #Shorthand for one function map
return string
''' Make a bracket map for array '''
def makeOuterMap(string, start, end, escape=None):
if string:
q, m = [], []
lst = None
for i, x in enumerate(string):
if (escape and lst != escape) or not escape:
if x == start:
q.append(i)
elif x == end:
if len(q) == 1:
m.append((q.pop(), i))
else:
q.pop()
lst = x
return m
else:
return []
''' Level Classification '''
def classifyLevel(string):
c = []
lists = map(lambda x: (x[0], x[1], JArray), makeOuterMap(string, "(", ")")) #Extend with arrays
lists.extend(map(lambda x: (x[0], x[1], JBlock), makeOuterMap(string, "{", "}"))) #Extend with blocks
lists.extend(map(lambda x: (x.start(), x.end(), str) , JRE_Str.finditer(string))) #Extend with strings
lists.extend(map(lambda x: (x.start(), x.end(), EStr), JRE_EStr.finditer(string)))
c.extend(lists)
ints = []
for r in JRE_Num:
matches = map(lambda x: (x.start(), x.end(), JNum) ,list(r.finditer(string))) #Get matches for int type
matches = filter(lambda x: not any(y[0] <= x[0] < y[1] for y in ints), matches) #Filter out overlapping int matches
ints.extend(matches)
c.extend(ints)
symbols = [True for i in range(len(string))] #Make a map to detect symbols
for s, e, _ in c: #Filter out all already detected types
if _ in [JArray, JBlock]:
e = e + 1
for x in range(s, e):
symbols[x] = False
for i, v in enumerate(string): #Filter out all whitespace
if re.match(r"\s", v):
symbols[i] = False
for i, s in enumerate(symbols): #Make everything a symbol
if s:
c.append((i, i+1, JFunc))
c = filter(lambda x: not any(y[0] < x[0] < y[1] for y in lists), c) #Filter out any elements in arrays or blocks
return sorted(c)
''' Recursively (possibly) create array '''
def makeArray(string):
inner = string[1:]
lev = classifyLevel(inner)
arr = []
for s, e, clss in lev:
if clss == JNum:
arr.append(JNum(inner[s:e]))
elif clss == JFunc:
arr.append(JFunc(inner[s:e]))
elif clss in [JArray, JBlock]:
arr.append(makeArray(inner[s:e]))
elif clss == str:
arr.append(JArray(map(lambda x: JNum(ord(x)), inner[s+1:e-1])))
elif clss == EStr:
arr.append(JArray(map(lambda x: JNum(ord(x)), inner[s+1:e-1].decode("string_escape"))))
if string[0] == "(":
return JArray(arr)
return JBlock(arr)
''' Tokenizer '''
def tokenize(string):
string = preprocess(string)
il = classifyLevel(string)
tokens = []
for s, e, clss in il:
if clss == JNum:
tokens.append(JNum(string[s:e]))
elif clss == JFunc:
tokens.append(JFunc(string[s:e]))
elif clss in [JArray, JBlock]:
tokens.append(makeArray(string[s:e]))
elif clss == str:
tokens.append(JArray(map(lambda x: JNum(ord(x)), string[s+1:e-1])))
elif clss == EStr:
tokens.append(JArray(map(lambda x: JNum(ord(x)), string[s+1:e-1].decode("string_escape"))))
return tokens
|
globby/Jagl
|
jaglk.py
|
Python
|
gpl-2.0
| 3,653
|
# -*- coding: utf-8 -*-
import ctypes, os, sys, unittest
from PySide.QtCore import *
from PySide.QtGui import *
import ScintillaCallable
sys.path.append("..")
from bin import ScintillaEditPy
scintillaDirectory = ".."
scintillaIncludeDirectory = os.path.join(scintillaDirectory, "include")
sys.path.append(scintillaIncludeDirectory)
import Face
class Form(QDialog):
def __init__(self, parent=None):
super(Form, self).__init__(parent)
self.resize(460,300)
# Create widget
self.edit = ScintillaEditPy.ScintillaEdit(self)
class XiteWin():
def __init__(self, test=""):
self.face = Face.Face()
self.face.ReadFromFile(os.path.join(scintillaIncludeDirectory, "Scintilla.iface"))
self.test = test
self.form = Form()
scifn = self.form.edit.send(int(self.face.features["GetDirectFunction"]["Value"]), 0, 0)
sciptr = ctypes.c_char_p(self.form.edit.send(
int(self.face.features["GetDirectPointer"]["Value"]), 0,0))
self.ed = ScintillaCallable.ScintillaCallable(self.face, scifn, sciptr)
self.form.show()
def DoStuff(self):
print(self.test)
self.CmdTest()
def DoEvents(self):
QApplication.processEvents()
def CmdTest(self):
runner = unittest.TextTestRunner()
tests = unittest.defaultTestLoader.loadTestsFromName(self.test)
results = runner.run(tests)
print(results)
sys.exit(0)
xiteFrame = None
def main(test):
global xiteFrame
app = QApplication(sys.argv)
xiteFrame = XiteWin(test)
xiteFrame.DoStuff()
sys.exit(app.exec_())
|
Vinatorul/notepad-plus-plus
|
scintilla/test/XiteQt.py
|
Python
|
gpl-2.0
| 1,546
|
# -*- coding: utf-8 -*-
from functools import partial
from types import NoneType
from navmazing import NavigateToSibling, NavigateToAttribute
from cfme.exceptions import DestinationNotFound
from cfme.fixtures import pytest_selenium as sel
from cfme.provisioning import provisioning_form as request_form
from cfme.web_ui import (
Form, Select, Table, accordion, fill, paginator,
flash, form_buttons, tabstrip, DHTMLSelect, Input, Tree, AngularSelect,
BootstrapTreeview, toolbar as tb, match_location, CheckboxTable)
from utils import version, fakeobject_or_object
from utils.appliance import Navigatable
from utils.appliance.implementations.ui import CFMENavigateStep, navigate_to, navigator
from utils.update import Updateable
from utils.pretty import Pretty
from utils.version import current_version
cfg_btn = partial(tb.select, "Configuration")
policy_btn = partial(tb.select, "Policy")
accordion_tree = partial(accordion.tree, "Catalog Items")
dynamic_tree = Tree("//div[@id='basic_info_div']//ul[@class='dynatree-container']")
entry_tree = BootstrapTreeview('automate_treebox')
listview_table = CheckboxTable(table_locator='//div[@id="list_grid"]/table')
template_select_form = Form(
fields=[
('template_table', Table('//div[@id="prov_vm_div"]/table')),
('add_button', form_buttons.add),
('cancel_button', form_buttons.cancel)
]
)
# Forms
basic_info_form = Form(
fields=[
('name_text', Input("name")),
('description_text', Input("description")),
('display_checkbox', Input("display")),
('select_catalog', AngularSelect('catalog_id')),
('select_dialog', AngularSelect('dialog_id')),
('select_orch_template', AngularSelect('template_id')),
('select_provider', AngularSelect('manager_id')),
('select_config_template', AngularSelect('template_id')),
('field_entry_point', Input("fqname")),
('retirement_entry_point', Input("retire_fqname")),
('edit_button', form_buttons.save),
('apply_btn', '//a[normalize-space(.)="Apply"]')
])
# TODO: Replace with Taggable
edit_tags_form = Form(
fields=[
("select_tag", AngularSelect('tag_cat')),
("select_value", AngularSelect('tag_add'))
])
detail_form = Form(
fields=[
('long_desc', Input('long_description')),
])
resources_form = Form(
fields=[
('choose_resource', Select("//select[@id='resource_id']")),
('add_button', form_buttons.add),
('save_button', form_buttons.save)
])
button_group_form = Form(
fields=[
('btn_group_text', Input("name")),
('btn_group_hvr_text', Input("description")),
('add_button', form_buttons.add)
])
button_form = Form(
fields=[
('btn_text', Input("name")),
('btn_hvr_text', Input("description")),
('select_dialog', Select("//select[@id='dialog_id']")),
('system_process', Select("//select[@id='instance_name']")),
('request', Input("object_request")),
('add_button', form_buttons.add)
])
match_page = partial(match_location, title='Catalogs', controller='catalog')
class CatalogItem(Updateable, Pretty, Navigatable):
pretty_attrs = ['name', 'item_type', 'catalog', 'catalog_name', 'provider', 'domain']
def __init__(self, item_type=None, vm_name=None, name=None, description=None,
display_in=False, catalog=None, dialog=None,
catalog_name=None, orch_template=None, provider_type=None,
provider=None, config_template=None, prov_data=None, domain="ManageIQ (Locked)",
appliance=None):
self.item_type = item_type
self.vm_name = vm_name
self.name = name
self.description = description
self.display_in = display_in
self.catalog = catalog
self.dialog = dialog
self.catalog_name = catalog_name
self.orch_template = orch_template
self.provider = provider
self.config_template = config_template
self.provider_type = provider_type
self.provisioning_data = prov_data
self.domain = domain
Navigatable.__init__(self, appliance=appliance)
def __str__(self):
return self.name
def create(self):
# Create has sequential forms, the first is only the provider type
navigate_to(self, 'Add')
# For element not found exception (To be removed)
sel.sleep(5)
sel.select("//select[@id='st_prov_type']",
self.provider_type or self.item_type or 'Generic')
sel.wait_for_element(basic_info_form.name_text)
catalog = fakeobject_or_object(self.catalog, "name", "Unassigned")
dialog = fakeobject_or_object(self.dialog, "name", "No Dialog")
# Need to provide the (optional) provider name to the form, not the object
provider_name = None
provider_required_types = ['AnsibleTower', 'Orchestration']
if self.item_type in provider_required_types \
or self.provider_type in provider_required_types:
provider_name = self.provider.name
# For tests where orchestration template is None
orch_template = None
if self.orch_template:
orch_template = self.orch_template.template_name
fill(basic_info_form, {'name_text': self.name,
'description_text': self.description,
'display_checkbox': self.display_in,
'select_catalog': catalog.name,
'select_dialog': dialog.name,
'select_orch_template': orch_template,
'select_provider': provider_name,
'select_config_template': self.config_template})
if not (self.item_type in provider_required_types):
sel.click(basic_info_form.field_entry_point)
if version.current_version() < "5.7":
dynamic_tree.click_path("Datastore", self.domain, "Service", "Provisioning",
"StateMachines", "ServiceProvision_Template", "default")
else:
entry_tree.click_path("Datastore", self.domain, "Service", "Provisioning",
"StateMachines", "ServiceProvision_Template", "default")
sel.click(basic_info_form.apply_btn)
if version.current_version() >= "5.7" and self.item_type == "AnsibleTower":
sel.click(basic_info_form.retirement_entry_point)
entry_tree.click_path("Datastore", self.domain, "Service", "Retirement",
"StateMachines", "ServiceRetirement", "Generic")
sel.click(basic_info_form.apply_btn)
if self.catalog_name is not None \
and self.provisioning_data is not None \
and not isinstance(self.provider, NoneType):
tabstrip.select_tab("Request Info")
tabstrip.select_tab("Catalog")
template = template_select_form.template_table.find_row_by_cells({
'Name': self.catalog_name,
'Provider': self.provider.name
})
sel.click(template)
request_form.fill(self.provisioning_data)
sel.click(template_select_form.add_button)
def update(self, updates):
navigate_to(self, 'Edit')
fill(basic_info_form, {'name_text': updates.get('name', None),
'description_text':
updates.get('description', None)},
action=basic_info_form.edit_button)
flash.assert_success_message('Service Catalog Item "{}" was saved'.format(self.name))
def delete(self, from_dest='All'):
if from_dest in navigator.list_destinations(self):
navigate_to(self, from_dest)
else:
msg = 'cfme.services.catalogs.catalog_item does not have destination {}'\
.format(from_dest)
raise DestinationNotFound(msg)
if from_dest == 'All':
# select the row for deletion
listview_table.select_row_by_cells({'Name': self.name,
'Description': self.description})
cfg_btn(version.pick({version.LOWEST: 'Remove Items from the VMDB',
'5.7': 'Remove Catalog Items'}), invokes_alert=True)
if from_dest == 'Details':
cfg_btn(version.pick({version.LOWEST: 'Remove Item from the VMDB',
'5.7': 'Remove Catalog Item'}), invokes_alert=True)
sel.handle_alert()
flash.assert_success_message(version.pick(
{version.LOWEST: 'The selected 1 Catalog Item were deleted',
'5.7': 'The selected 1 Catalog Item was deleted'}))
def add_button_group(self):
navigate_to(self, 'Details')
cfg_btn("Add a new Button Group", invokes_alert=True)
sel.wait_for_element(button_group_form.btn_group_text)
fill(button_group_form, {'btn_group_text': "group_text",
'btn_group_hvr_text': "descr"})
if current_version() > "5.5":
select = AngularSelect("button_image")
select.select_by_visible_text("Button Image 1")
else:
select = DHTMLSelect("div#button_div")
select.select_by_value(1)
sel.click(button_group_form.add_button)
flash.assert_success_message('Buttons Group "descr" was added')
def add_button(self):
navigate_to(self, 'Details')
cfg_btn('Add a new Button', invokes_alert=True)
sel.wait_for_element(button_form.btn_text)
fill(button_form, {'btn_text': "btn_text",
'btn_hvr_text': "btn_descr"})
if current_version() > "5.5":
select = AngularSelect("button_image")
select.select_by_visible_text("Button Image 1")
else:
select = DHTMLSelect("div#button_div")
select.select_by_value(2)
fill(button_form, {'select_dialog': self.dialog,
'system_process': "Request",
'request': "InspectMe"})
sel.click(button_form.add_button)
flash.assert_success_message('Button "btn_descr" was added')
def edit_tags(self, tag, value):
navigate_to(self, 'Details')
policy_btn('Edit Tags', invokes_alert=True)
fill(edit_tags_form, {'select_tag': tag,
'select_value': value},
action=form_buttons.save)
flash.assert_success_message('Tag edits were successfully saved')
class CatalogBundle(Updateable, Pretty, Navigatable):
pretty_attrs = ['name', 'catalog', 'dialog']
def __init__(self, name=None, description=None, display_in=None, catalog=None, dialog=None,
appliance=None):
self.name = name
self.description = description
self.display_in = display_in
self.catalog = catalog
self.dialog = dialog
Navigatable.__init__(self, appliance=appliance)
def __str__(self):
return self.name
def create(self, cat_items):
navigate_to(self, 'Add')
domain = "ManageIQ (Locked)"
fill(basic_info_form, {'name_text': self.name,
'description_text': self.description,
'display_checkbox': self.display_in,
'select_catalog': str(self.catalog),
'select_dialog': str(self.dialog)})
sel.click(basic_info_form.field_entry_point)
if sel.text(basic_info_form.field_entry_point) == "":
if version.current_version() < "5.7":
dynamic_tree.click_path("Datastore", domain, "Service", "Provisioning",
"StateMachines", "ServiceProvision_Template", "default")
else:
entry_tree.click_path("Datastore", domain, "Service", "Provisioning",
"StateMachines", "ServiceProvision_Template", "default")
sel.click(basic_info_form.apply_btn)
tabstrip.select_tab("Resources")
for cat_item in cat_items:
fill(resources_form, {'choose_resource': cat_item})
sel.click(resources_form.add_button)
flash.assert_success_message('Catalog Bundle "{}" was added'.format(self.name))
def update(self, updates):
navigate_to(self, 'Edit')
fill(basic_info_form, {'name_text': updates.get('name', None),
'description_text':
updates.get('description', None)})
tabstrip.select_tab("Resources")
fill(resources_form, {'choose_resource':
updates.get('cat_item', None)},
action=resources_form.save_button)
flash.assert_success_message('Catalog Bundle "{}" was saved'.format(self.name))
@navigator.register(CatalogItem, 'All')
class ItemAll(CFMENavigateStep):
prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn')
def am_i_here(self):
return match_page(summary='All Service Catalog Items')
def step(self):
self.prerequisite_view.navigation.select('Services', 'Catalogs')
tree = accordion.tree('Catalog Items')
tree.click_path('All Catalog Items')
def resetter(self):
tb.refresh()
tb.select('List View')
# Ensure no rows are checked
if paginator.page_controls_exist():
sel.check(paginator.check_all())
sel.uncheck(paginator.check_all())
@navigator.register(CatalogItem, 'Details')
class ItemDetails(CFMENavigateStep):
prerequisite = NavigateToSibling('All')
# No am_i_here() due to summary duplication between item and bundle
def step(self):
listview_table.click_row_by_cells({'Name': self.obj.name,
'Description': self.obj.description,
'Type': 'Item'})
def resetter(self):
tb.refresh()
@navigator.register(CatalogItem, 'Add')
class ItemAdd(CFMENavigateStep):
prerequisite = NavigateToSibling('All')
def am_i_here(self):
return match_page(summary='Adding a new Service Catalog Item')
def step(self):
cfg_btn('Add a New Catalog Item')
@navigator.register(CatalogItem, 'Edit')
class ItemEdit(CFMENavigateStep):
prerequisite = NavigateToSibling('Details')
def am_i_here(self):
return match_page(summary='Editing Service Catalog Item "{}"'.format(self.obj.name))
def step(self):
cfg_btn('Edit this Item')
@navigator.register(CatalogBundle, 'All')
class BundleAll(CFMENavigateStep):
prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn')
def am_i_here(self):
return match_page(summary='All Service Catalog Items')
def step(self):
self.prerequisite_view.navigation.select('Services', 'Catalogs')
tree = accordion.tree('Catalog Items')
tree.click_path('All Catalog Items')
def resetter(self):
tb.refresh()
tb.select('List View')
# Ensure no rows are checked
if paginator.page_controls_exist():
sel.check(paginator.check_all())
sel.uncheck(paginator.check_all())
@navigator.register(CatalogBundle, 'Details')
class BundleDetails(CFMENavigateStep):
prerequisite = NavigateToSibling('All')
# No am_i_here() due to summary duplication between item and bundle
def step(self):
listview_table.click_row_by_cells({'Name': self.obj.name,
'Description': self.obj.description,
'Type': 'Bundle'})
def resetter(self):
tb.refresh()
@navigator.register(CatalogBundle, 'Add')
class BundleAdd(CFMENavigateStep):
prerequisite = NavigateToSibling('All')
def am_i_here(self):
return match_page(summary='Adding a new Catalog Bundle')
def step(self):
cfg_btn('Add a New Catalog Bundle')
@navigator.register(CatalogBundle, 'Edit')
class BundleEdit(CFMENavigateStep):
prerequisite = NavigateToSibling('Details')
def am_i_here(self):
return match_page(summary='Editing Catalog Bundle "{}"'.format(self.obj.name))
def step(self):
cfg_btn('Edit this Item')
|
rananda/cfme_tests
|
cfme/services/catalogs/catalog_item.py
|
Python
|
gpl-2.0
| 16,459
|
# A comment, this is so you can read your program later.
# Anything after the # is ignored by python.
print "I could have code like this." # and the comment after is ignored
# You can also use a comment to "disable" or comment out a piece of code:
# print "This won't run."
print "This will run."
# Adding another few lines just for fun.
print 'Q: Why does the "#" in "print "Hi # there." not get ignored?'
# print 'The # in that code is inside a string, so it will put into the string until the ending " character is hit. These pound characters are just considered characters and are not considered comments.'
# Another way to put it: (aren't instead of "are not")
print "The # in that code is inside a string, so it will put into the string until the ending \" character is hit. These pound characters are just considered characters and aren't considered comments."
# The backslash will escape the special character, as seen on the code above. Isaac Albeniz - Asturias :-)
|
estebanfallasf/python_training
|
ex2.py
|
Python
|
gpl-2.0
| 978
|
import MDAnalysis
import matplotlib.pyplot as plt
import numpy as np
from MDAnalysis.analysis.align import *
from MDAnalysis.analysis.rms import rmsd
def ligRMSD(u,ref):
"""
This function produces RMSD data and plots for ligand.
:input
1) Universe of Trajectory
2) reference universe
:return
1) matplot object
2) array for RMSD data.
"""
RMSD_lig = []
ligand = u.select_atoms("(resid 142:146) and not name H*") ## include selection based on user description
#current = u.select_atoms("segname BGLC and not name H*")
reference = ref.select_atoms("(resid 142:146) and not name H*")
for ts in u.trajectory:
A = ligand.coordinates()
B = reference.coordinates()
C = rmsd(A,B)
RMSD_lig.append((u.trajectory.frame, C))
RMSD_lig = np.array(RMSD_lig)
#print RMSD_lig
import matplotlib.pyplot as plt
ax = plt.subplot(111)
ax.plot(RMSD_lig[:,0], RMSD_lig[:,1], 'r--', lw=2, label=r"$R_G$")
ax.set_xlabel("Frame")
ax.set_ylabel(r"RMSD of ligand ($\AA$)")
#ax.figure.savefig("RMSD_ligand.pdf")
#plt.draw()
handles, labels = ax.get_legend_handles_labels()
ax.legend(handles, labels, loc = 'lower left')
return ax, RMSD_lig
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='This function will plot RMSD for a given universe (trajectory).')
parser.add_argument('-j', '--jobname', help='Enter your job name and it will appear as first coloumn in the result file', default='Test')
parser.add_argument('-trj', '--trajectory', help='Filename of Trajecotry file.', required=True)
parser.add_argument('-top', '--topology', help='Filename of psf/topology file', required=True)
args = parser.parse_args()
u = MDAnalysis.Universe(args.topology, args.trajectory)
ref = MDAnalysis.Universe(args.topology, args.trajectory)
ligandRMSD = []
fig,ligandRMSD = ligRMSD(u,ref)
#print caRMSD
np.savetxt(args.jobname+"_ligRMSD.data", ligandRMSD)
fig.figure.savefig(args.jobname+"_ligRMSD.pdf")
|
mktumbi/SimAnaRep
|
SimRepAnaligRMSD.py
|
Python
|
gpl-2.0
| 2,124
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011-2012 Domsense s.r.l. (<http://www.domsense.com>).
# Copyright (C) 2012 Agile Business Group sagl (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "VAT on payment",
"version": "2.0",
'category': 'Generic Modules/Accounting',
"depends": ["account_voucher_cash_basis"],
"author": "Agile Business Group",
"description": """
See 'account_voucher_cash_basis' description.
To activate the VAT on payment behaviour, this module adds a checkbox on invoice form: 'Vat on payment'
Moreover, three things have to be configured:
- On account object, Related account used for real registrations on a VAT on payment basis
- On journal object, Related journal used for shadow registrations on a VAT on payment basis
- On tax code object, Related tax code used for real registrations on a VAT on payment basis
Requirements: https://docs.google.com/spreadsheet/ccc?key=0Aodwq17jxF4edDJaZ2dOQkVEN0hodEtfRmpVdlg2Vnc#gid=0
Howto:
http://planet.domsense.com/en/2012/10/vat-on-payment-treatment-with-openerp/
""",
'website': 'http://www.agilebg.com',
'init_xml': [],
'update_xml': [
'account_view.xml',
'company_view.xml',
],
'demo_xml': [], # TODO YAML tests
'installable': True,
'active': False,
}
|
syci/domsense-agilebg-addons
|
account_vat_on_payment/__openerp__.py
|
Python
|
gpl-2.0
| 2,244
|
# vim:ts=4:et
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
from struct import unpack
import os.path
from math import pi, sqrt
import bpy
from bpy_extras.object_utils import object_data_add
from mathutils import Vector,Matrix,Quaternion
from bpy_extras.io_utils import ImportHelper
from bpy.props import BoolProperty, FloatProperty, StringProperty, EnumProperty
from bpy.props import FloatVectorProperty, PointerProperty
from .mu import MuEnum, Mu, MuColliderMesh, MuColliderSphere, MuColliderCapsule
from .mu import MuColliderBox, MuColliderWheel
from .shader import make_shader
from .material import make_material
from . import collider, properties
EXCLUDED_OBJECTS=['flare', 'busted', 'flag']
def create_uvs(mu, uvs, mesh, name):
uvlay = mesh.uv_textures.new(name)
uvloop = mesh.uv_layers[name]
for i, uvl in enumerate(uvloop.data):
v = mesh.loops[i].vertex_index
uvl.uv = uvs[v]
def create_mesh(mu, mumesh, name):
mesh = bpy.data.meshes.new(name)
faces = []
for sm in mumesh.submeshes:
faces.extend(sm)
mesh.from_pydata(mumesh.verts, [], faces)
if mumesh.uvs:
create_uvs(mu, mumesh.uvs, mesh, name + ".UV")
if mumesh.uv2s:
create_uvs(mu, mumesh.uv2s, mesh, name + ".UV2")
return mesh
def create_mesh_object(name, mesh, transform):
obj = bpy.data.objects.new(name, mesh)
obj.rotation_mode = 'QUATERNION'
if transform:
obj.location = Vector(transform.localPosition)
obj.rotation_quaternion = Quaternion(transform.localRotation)
obj.scale = Vector(transform.localScale)
else:
obj.location = Vector((0, 0, 0))
obj.rotation_quaternion = Quaternion((1,0,0,0))
obj.scale = Vector((1,1,1))
bpy.context.scene.objects.link(obj)
return obj
def copy_spring(dst, src):
dst.spring = src.spring
dst.damper = src.damper
dst.targetPosition = src.targetPosition
def copy_friction(dst, src):
dst.extremumSlip = src.extremumSlip
dst.extremumValue = src.extremumValue
dst.asymptoteSlip = src.asymptoteSlip
dst.extremumValue = src.extremumValue
dst.stiffness = src.stiffness
def create_light(mu, mulight, transform):
ltype = ('SPOT', 'SUN', 'POINT', 'AREA')[mulight.type]
light = bpy.data.lamps.new(transform.name, ltype)
light.color = mulight.color[:3]
light.distance = mulight.range
light.energy = mulight.intensity
if ltype == 'SPOT' and hasattr(mulight, "spotAngle"):
light.spot_size = mulight.spotAngle * pi / 180
obj = bpy.data.objects.new(transform.name, light)
obj.rotation_mode = 'QUATERNION'
obj.location = Vector(transform.localPosition)
# Blender points spotlights along local -Z, unity along local +Z
# which is Blender's +Y, so rotate 90 degrees around local X to
# go from Unity to Blender
rot = Quaternion((0.5**0.5,0.5**0.5,0,0))
obj.rotation_quaternion = rot * Quaternion(transform.localRotation)
obj.scale = Vector(transform.localScale)
properties.SetPropMask(obj.muproperties.cullingMask, mulight.cullingMask)
bpy.context.scene.objects.link(obj)
return obj
property_map = {
"m_LocalPosition.x": ("obj", "location", 0, 1),
"m_LocalPosition.y": ("obj", "location", 2, 1),
"m_LocalPosition.z": ("obj", "location", 1, 1),
"m_LocalRotation.x": ("obj", "rotation_quaternion", 1, -1),
"m_LocalRotation.y": ("obj", "rotation_quaternion", 3, -1),
"m_LocalRotation.z": ("obj", "rotation_quaternion", 2, -1),
"m_LocalRotation.w": ("obj", "rotation_quaternion", 0, 1),
"m_LocalScale.x": ("obj", "scale", 0, 1),
"m_LocalScale.y": ("obj", "scale", 2, 1),
"m_LocalScale.z": ("obj", "scale", 1, 1),
"m_Intensity": ("data", "energy", 0, 1),
}
def create_fcurve(action, curve, propmap):
dp, ind, mult = propmap
fps = bpy.context.scene.render.fps
fc = action.fcurves.new(data_path = dp, index = ind)
fc.keyframe_points.add(len(curve.keys))
for i, key in enumerate(curve.keys):
x,y = key.time * fps, key.value * mult
fc.keyframe_points[i].co = x, y
fc.keyframe_points[i].handle_left_type = 'FREE'
fc.keyframe_points[i].handle_right_type = 'FREE'
if i > 0:
dist = (key.time - curve.keys[i - 1].time) / 3
dx, dy = dist * fps, key.tangent[0] * dist * mult
else:
dx, dy = 10, 0.0
fc.keyframe_points[i].handle_left = x - dx, y - dy
if i < len(curve.keys) - 1:
dist = (curve.keys[i + 1].time - key.time) / 3
dx, dy = dist * fps, key.tangent[1] * dist * mult
else:
dx, dy = 10, 0.0
fc.keyframe_points[i].handle_right = x + dx, y + dy
return True
def create_action(mu, path, clip):
#print(clip.name)
actions = {}
for curve in clip.curves:
if not curve.path:
mu_path = path
else:
mu_path = "/".join([path, curve.path])
if mu_path not in mu.objects:
print("Unknown path: %s" % (mu_path))
continue
obj = mu.objects[mu_path]
if curve.property not in property_map:
print("%s: Unknown property: %s" % (mu_path, curve.property))
continue
propmap = property_map[curve.property]
subpath, propmap = propmap[0], propmap[1:]
if subpath != "obj":
obj = getattr (obj, subpath)
name = ".".join([clip.name, curve.path, subpath])
if name not in actions:
actions[name] = bpy.data.actions.new(name), obj
act, obj = actions[name]
if not create_fcurve(act, curve, propmap):
continue
for name in actions:
act, obj = actions[name]
if not obj.animation_data:
obj.animation_data_create()
track = obj.animation_data.nla_tracks.new()
track.name = clip.name
track.strips.new(act.name, 1.0, act)
def create_collider(mu, muobj):
col = muobj.collider
name = muobj.transform.name
if type(col) == MuColliderMesh:
name = name + ".collider"
mesh = create_mesh(mu, col.mesh, name)
else:
mesh = bpy.data.meshes.new(name)
obj = create_mesh_object(name, mesh, None)
obj.muproperties.isTrigger = False
if type(col) != MuColliderWheel:
obj.muproperties.isTrigger = col.isTrigger
if type(col) == MuColliderMesh:
obj.muproperties.collider = 'MU_COL_MESH'
elif type(col) == MuColliderSphere:
obj.muproperties.radius = col.radius
obj.muproperties.center = col.center
obj.muproperties.collider = 'MU_COL_SPHERE'
elif type(col) == MuColliderCapsule:
obj.muproperties.radius = col.radius
obj.muproperties.height = col.height
obj.muproperties.direction = properties.dir_map[col.direction]
obj.muproperties.center = col.center
obj.muproperties.collider = 'MU_COL_CAPSULE'
elif type(col) == MuColliderBox:
obj.muproperties.size = col.size
obj.muproperties.center = col.center
obj.muproperties.collider = 'MU_COL_BOX'
elif type(col) == MuColliderWheel:
obj.muproperties.radius = col.radius
obj.muproperties.suspensionDistance = col.suspensionDistance
obj.muproperties.center = col.center
obj.muproperties.mass = col.mass
copy_spring(obj.muproperties.suspensionSpring, col.suspensionSpring)
copy_friction(obj.muproperties.forwardFriction, col.forwardFriction)
copy_friction(obj.muproperties.sideFriction, col.sidewaysFriction)
obj.muproperties.collider = 'MU_COL_WHEEL'
if type(col) != MuColliderMesh:
collider.build_collider(obj)
return obj
def create_object(mu, muobj, parent, create_colliders, parents):
def isExcludedObject(muobj):
for obj in EXCLUDED_OBJECTS:
if obj in muobj.transform.name.lower():
return True
return False
obj = None
mesh = None
if isExcludedObject(muobj):
return None
if hasattr(muobj, "shared_mesh"):
mesh = create_mesh(mu, muobj.shared_mesh, muobj.transform.name)
for poly in mesh.polygons:
poly.use_smooth = True
obj = create_mesh_object(muobj.transform.name, mesh, muobj.transform)
elif hasattr(muobj, "skinned_mesh_renderer"):
smr = muobj.skinned_mesh_renderer
mesh = create_mesh(mu, smr.mesh, muobj.transform.name)
for poly in mesh.polygons:
poly.use_smooth = True
obj = create_mesh_object(muobj.transform.name, mesh, muobj.transform)
mumat = mu.materials[smr.materials[0]]
mesh.materials.append(mumat.material)
if hasattr(muobj, "renderer"):
if mesh:
mumat = mu.materials[muobj.renderer.materials[0]]
mesh.materials.append(mumat.material)
if not obj:
if hasattr(muobj, "light"):
obj = create_light(mu, muobj.light, muobj.transform)
if not obj:
obj = create_mesh_object(muobj.transform.name, None, muobj.transform)
parents.append(muobj.transform.name)
path = "/".join(parents)
mu.objects[path] = obj
if hasattr(muobj, "tag_and_layer"):
obj.muproperties.tag = muobj.tag_and_layer.tag
obj.muproperties.layer = muobj.tag_and_layer.layer
if create_colliders and hasattr(muobj, "collider"):
cobj = create_collider(mu, muobj)
cobj.parent = obj
obj.parent = parent
for child in muobj.children:
create_object(mu, child, obj, create_colliders, parents)
if hasattr(muobj, "animation"):
for clip in muobj.animation.clips:
create_action(mu, path, clip)
parents.remove(muobj.transform.name)
return obj
def convert_bump(pixels, width, height):
outp = list(pixels)
for y in range(1, height - 1):
for x in range(1, width - 1):
index = (y * width + x) * 4
p = pixels[index:index + 4]
nx = (p[3]-128) / 127.
nz = (p[2]-128) / 127.
#n = [p[3],p[2],int(sqrt(1-nx**2-nz**2)*127 + 128),255]
n = [p[3],p[2],255,255]
outp[index:index + 4] = n
return outp
def load_mbm(mbmpath):
mbmfile = open(mbmpath, "rb")
header = mbmfile.read(20)
magic, width, height, bump, bpp = unpack("<5i", header)
if magic != 0x50534b03: # "\x03KSP" as little endian
raise
if bpp == 32:
pixels = mbmfile.read(width * height * 4)
elif bpp == 24:
pixels = [0, 0, 0, 255] * width * height
for i in range(width * height):
p = mbmfile.read(3)
l = i * 4
pixels[l:l+3] = list(p)
else:
raise
if bump:
pixels = convert_bump(pixels, width, height)
return width, height, pixels
def load_dds(dds_image):
pixels = list(dds_image.pixels[:])
rowlen = dds_image.size[0] * 4
height = dds_image.size[1]
for y in range(int(height/2)):
ind1 = y * rowlen
ind2 = (height - 1 - y) * rowlen
t = pixels[ind1 : ind1 + rowlen]
pixels[ind1:ind1+rowlen] = pixels[ind2:ind2+rowlen]
pixels[ind2:ind2+rowlen] = t
if dds_image.name[-6:-4] == "_n":
pixels = convert_bump(pixels, dds_image.size[0], height)
dds_image.pixels = pixels[:]
def load_image(name, path):
img_path = os.path.join(path, name)
if any(name == os.path.basename(packed_img.filepath) \
for packed_img in bpy.data.images):
# Add the directory name between the file name and the extension
basename, ext = os.path.splitext(name)
img_path = basename + os.path.split(path)[-1] + ext
if name[-4:].lower() in [".png", ".tga"]:
img = bpy.data.images.load(os.path.join(path, name))
elif name[-4:].lower() == ".dds":
img = bpy.data.images.load(os.path.join(path, name))
load_dds(img)
elif name[-4:].lower() == ".mbm":
w,h, pixels = load_mbm(os.path.join(path, name))
img = bpy.data.images.new(name, w, h)
img.pixels[:] = map(lambda x: x / 255.0, pixels)
# Pack image and change filepath to avoid texture overriding
img.pack(True)
img.filepath = img_path
def create_textures(mu, path):
# Note: DDS textures are previously converted to .png in exporter
# so here the extension saved in .mu is not the good one
extensions = [".png" ,".dds", ".mbm", ".tga"]
#texture info is in the top level object
for tex in mu.textures:
base = os.path.splitext(tex.name)[0]
for e in extensions:
name = base + e
texture_path = os.path.join(path, name)
if os.path.exists(texture_path):
load_image(name, path)
tx = bpy.data.textures.new(tex.name, 'IMAGE')
tx.use_preview_alpha = True
tx.image = bpy.data.images[name]
break
pass
def add_texture(mu, mat, mattex):
i, s, o = mattex.index, mattex.scale, mattex.offset
mat.texture_slots.add()
ts = mat.texture_slots[0]
ts.texture = bpy.data.textures[mu.textures[i].name]
ts.use_map_alpha = True
ts.texture_coords = 'UV'
ts.scale = s + (1,)
ts.offset = o + (0,)
def create_materials(mu, use_classic=False):
#material info is in the top level object
for mumat in mu.materials:
if(use_classic):
mumat.material = make_material(mumat, mu)
else:
mumat.material = make_shader(mumat, mu)
def import_mu(self, context, filepath, create_colliders, use_classic_material=False):
operator = self
undo = bpy.context.user_preferences.edit.use_global_undo
bpy.context.user_preferences.edit.use_global_undo = False
for obj in bpy.context.scene.objects:
obj.select = False
mu = Mu()
if not mu.read(filepath):
bpy.context.user_preferences.edit.use_global_undo = undo
operator.report({'ERROR'},
"Unrecognized format: %s %d" % (mu.magic, mu.version))
return {'CANCELLED'}
create_textures(mu, os.path.dirname(filepath))
create_materials(mu, use_classic_material)
mu.objects = {}
obj = create_object(mu, mu.obj, None, create_colliders, [])
bpy.context.scene.objects.active = obj
obj.select = True
bpy.context.user_preferences.edit.use_global_undo = undo
return {'FINISHED'}
class ImportMu(bpy.types.Operator, ImportHelper):
'''Load a KSP Mu (.mu) File'''
bl_idname = "import_object.ksp_mu"
bl_label = "Import Mu"
bl_description = """Import a KSP .mu model."""
bl_options = {'REGISTER', 'UNDO'}
filename_ext = ".mu"
filter_glob = StringProperty(default="*.mu", options={'HIDDEN'})
create_colliders = BoolProperty(name="Create Colliders",
description="Disable to import only visual and hierarchy elements",
default=True)
def execute(self, context):
keywords = self.as_keywords (ignore=("filter_glob",))
return import_mu(self, context, **keywords)
|
sketchfab/io_object_mu
|
import_mu.py
|
Python
|
gpl-2.0
| 15,783
|
def mysql_read():
mysql_info = {}
with open('/etc/openstack.cfg', 'r') as f:
for i in f.readlines():
if i.split('=', 1)[0] in ('DASHBOARD_HOST',
'DASHBOARD_PASS',
'DASHBOARD_NAME',
'DASHBOARD_USER',
'DASHBOARD_PORT'):
data = i.split('=', 1)
mysql_info[data[0]] = data[1].strip()
return mysql_info
|
ChinaMassClouds/copenstack-server
|
openstack/src/horizon-2014.2/openstack_dashboard/openstack/common/utils.py
|
Python
|
gpl-2.0
| 515
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
# import codecs
# import json
# class stokeScrapyPipeline(object):
# def __init__(self):
# self.file=codecs.open("stokeScrapy.json",mode="wb",encoding='utf-8')
# self.file.write('{"hah"'+':[')
import pymongo
from scrapy.conf import settings
from scrapy.exceptions import DropItem
from scrapy import log
#MongoDBPipeline
class MongoDBPipeline( object ):
def __init__( self ):
connection = pymongo.MongoClient(
settings[ 'MONGODB_SERVER' ],
settings[ 'MONGODB_PORT' ]
)
db = connection[settings[ 'MONGODB_DB' ]]
self .collection = db[settings[ 'MONGODB_COLLECTION' ]]
def process_item( self , item, spider):
valid = True
for data in item:
if not data:
valid = False
raise DropItem( "Missing {0}!" . format (data))
if valid:
self .collection.insert( dict (item))
log.msg( "Stoke added to MongoDB database!" ,
level = log.DEBUG, spider = spider)
return item
# def process_item(self, item, spider):
# line = json.dumps(dict(item))+","
# self.file.write(line.decode("unicode_escape"))
# return item
|
disappearedgod/stokeScrapy
|
stokeScrapy/pipelines.py
|
Python
|
gpl-2.0
| 1,353
|
"""
* Copyright (c) 2017 SUSE LLC
*
* openATTIC is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by
* the Free Software Foundation; version 2.
*
* This package is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
"""
from ceph_radosgw.rgw_client import RGWClient
from django.conf import settings
from django.http import HttpResponse
from django.test import TestCase
from rest_framework import status
from sysutils.database_utils import make_default_admin
import json
import mock
class RGWClientTestCase(TestCase):
@staticmethod
def _mock_settings(Settings_mock):
Settings_mock.RGW_API_HOST = 'host'
Settings_mock.RGW_API_PORT = 42
Settings_mock.RGW_API_SCHEME = 'https'
Settings_mock.RGW_API_ADMIN_RESOURCE = 'ADMIN_RESOURCE'
Settings_mock.RGW_API_USER_ID = 'USER_ID'
Settings_mock.RGW_API_ACCESS_KEY = 'ak'
Settings_mock.RGW_API_SECRET_KEY = 'sk'
@mock.patch('ceph_radosgw.rgw_client.Settings')
def test_load_settings(self, Settings_mock):
RGWClientTestCase._mock_settings(Settings_mock)
RGWClient._load_settings() # Also test import of awsauth.S3Auth
self.assertEqual(RGWClient._host, 'host')
self.assertEqual(RGWClient._port, 42)
self.assertEqual(RGWClient._ssl, True)
self.assertEqual(RGWClient._ADMIN_PATH, 'ADMIN_RESOURCE')
self.assertEqual(RGWClient._SYSTEM_USERID, 'USER_ID')
instance = RGWClient._user_instances[RGWClient._SYSTEM_USERID]
self.assertEqual(instance.userid, 'USER_ID')
@mock.patch('ceph_radosgw.views.Settings')
def test_user_delete(self, Settings_mock):
make_default_admin()
self.assertTrue(self.client.login(username=settings.OAUSER, password='openattic'))
Settings_mock.RGW_API_USER_ID = 'admin'
response = self.client.delete('/api/ceph_radosgw/user/delete?uid=admin')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn('Can not delete the user', response.data['detail'])
@mock.patch('ceph_nfs.models.GaneshaExport.objects.filter')
def test_bucket_delete(self, filter_mock):
make_default_admin()
self.assertTrue(self.client.login(username=settings.OAUSER, password='openattic'))
filter_mock.return_value = [4, 8, 15, 16, 23, 42]
response = self.client.delete('/api/ceph_radosgw/bucket/delete?bucket=test01')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn('Can not delete the bucket', response.data['detail'])
@mock.patch('ceph_radosgw.rgw_client.Settings')
@mock.patch('ceph_radosgw.views.proxy_view')
@mock.patch('ceph_nfs.models.GaneshaExport.objects.filter')
def test_bucket_get(self, filter_mock, proxy_view_mock, Settings_mock):
RGWClientTestCase._mock_settings(Settings_mock)
proxy_view_mock.return_value = HttpResponse(json.dumps({
'owner': 'floyd',
'bucket': 'my_data'
}))
make_default_admin()
self.assertTrue(self.client.login(username=settings.OAUSER, password='openattic'))
filter_mock.return_value = [0, 8, 15]
response = self.client.get('/api/ceph_radosgw/bucket/get?bucket=test01')
content = json.loads(response.content)
self.assertIn('is_referenced', content)
self.assertTrue(content['is_referenced'])
filter_mock.return_value = []
response = self.client.get('/api/ceph_radosgw/bucket/get?bucket=test02')
content = json.loads(response.content)
self.assertIn('is_referenced', content)
self.assertFalse(content['is_referenced'])
|
openattic/openattic
|
backend/ceph_radosgw/tests.py
|
Python
|
gpl-2.0
| 3,934
|
# -*- mode: python; indent-tabs-mode: nil; tab-width: 3 -*-
# vim: set tabstop=3 shiftwidth=3 expandtab:
#
# Copyright (C) 2001-2005 Ichiro Fujinaga, Michael Droettboom,
# and Karl MacMillan
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
import wx
from wx.lib import buttons
from gamera.gui import gamera_icons, compat_wx
# This is our own custom toolbar class.
# We had to implement our own, since the one in wxWindows one supports
# one toolbar per wxFrame (on MSW, at least). Since we want two
# independent toolbars in the classifier window, we have to create our
# own toolbar using a wxPanel and a wxBoxSizer.
if wx.Platform != '__WXMAC__' and hasattr(buttons, 'ThemedGenBitmapButton'):
ButtonClass = buttons.ThemedGenBitmapButton
ToggleButtonClass = buttons.ThemedGenBitmapToggleButton
else:
ButtonClass = buttons.GenBitmapButton
ToggleButtonClass = buttons.GenBitmapToggleButton
class ToolBar(wx.Panel):
def __init__(self, parent, id=-1, hideable=1):
self._close_toolbar_bitmap = gamera_icons.getToolbarCloseBitmap()
self._open_toolbar_bitmap = gamera_icons.getToolbarOpenBitmap()
self.controls = []
self.layout_update_controls = []
self.sizer = wx.BoxSizer(wx.HORIZONTAL)
wx.Panel.__init__(
self, parent, id,
style=wx.CLIP_CHILDREN|wx.NO_FULL_REPAINT_ON_RESIZE)
self.SetSizer(self.sizer)
self._closed = 0
def AddSimpleTool(self, id, bitmap, help_string, callback=None, toggle=0):
if not toggle:
button = ButtonClass(self, id, bitmap, size=wx.Size(30,30))
else:
button = ToggleButtonClass(self, id, bitmap, size=wx.Size(30,30))
button.SetBezelWidth(1)
button.SetUseFocusIndicator(False)
compat_wx.set_tool_tip(button, help_string)
if callback:
compat_wx.handle_event_1(self, wx.EVT_BUTTON, callback, id)
self.sizer.Add(button, flag=wx.ALIGN_CENTER)
self.sizer.SetSizeHints(self)
self.controls.append(button)
return button
def AddMenuTool(self, id, text, help_string, callback=None, toggle=0):
if not toggle:
button = buttons.GenBitmapTextButton(
self, id, None, text, size=wx.Size(48, 28))
else:
button = buttons.GenBitmapTextToggleButton(
self, id, None, text, size=wx.Size(48,28))
button.SetBitmapLabel(gamera_icons.getToolbarMenuBitmap())
button.SetBezelWidth(1)
button.SetUseFocusIndicator(False)
compat_wx.set_tool_tip(button, help_string)
if callback:
compat_wx.handle_event_1(self, wx.EVT_BUTTON, callback, id)
self.sizer.Add(button, flag=wx.ALIGN_CENTER)
self.sizer.SetSizeHints(self)
self.controls.append(button)
return button
def AddControl(self, control):
self.sizer.Add(control, flag=wx.ALIGN_CENTER)
self.sizer.SetSizeHints(self)
self.controls.append(control)
def AddSeparator(self):
self.sizer.Add(wx.Panel(self, -1, size=wx.Size(5, 2)))
self.sizer.SetSizeHints(self)
def OnHideToolbar(self, event):
self.close_button.Hide()
self.open_button.Show()
for control in self.controls:
control.Hide()
self.SetSizeHints(-1, -1, -1, -1, -1, -1)
self.SetSize(wx.Size(self.GetSize().x, 12))
self.Layout()
self.GetParent().Layout()
self.GetParent().Refresh()
def OnShowToolbar(self, event):
self.close_button.Show()
self.open_button.Hide()
for control in self.controls:
control.Show()
self.sizer.SetSizeHints(self)
self.Layout()
self.GetParent().Layout()
self.GetParent().Refresh()
|
hsnr-gamera/gamera
|
gamera/gui/toolbar.py
|
Python
|
gpl-2.0
| 4,334
|
import json
import sys
import logging
import logging.handlers
def load_config():
'''Loads application configuration from a JSON file'''
try:
json_data = open('config.json')
config = json.load(json_data)
json_data.close()
return config
except Exception:
print """There was an error loading config.json.
Make sure that the file exists and it's a valid JSON file."""
sys.exit(1)
def init_logger(file_name='clouddump.log'):
'''
Initializes the logging file and module
parameters
----------
file_name: A string with the name of the file to write the logs in
'''
logger = logging.getLogger('clouddump')
log_file_handler = logging.handlers.RotatingFileHandler(
file_name, maxBytes = 10**9)
log_format = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
log_file_handler.setFormatter(log_format)
logger.addHandler(log_file_handler)
logger.setLevel(logging.DEBUG)
if len(sys.argv) > 1:
if sys.argv[1] == '-v' or sys.argv[1] == '--verbose':
console = logging.StreamHandler()
console.setLevel(logging.INFO)
logger.addHandler(console)
|
svera/clouddump
|
tools.py
|
Python
|
gpl-2.0
| 1,226
|
'''
Created on 14 Jun 2016
@author: gjermund.vingerhagen
'''
import numpy as np
import scipy.interpolate as intp
import linecache
import utmconverter as utm
def splitHead(inp):
return inp
def lineToArr(l1):
arra = np.array(np.fromstring(l1[144:1024],dtype=int,sep=' '))
for i in range(1,30):
arra = np.append(arra,np.fromstring(l1[1024*i:1024*(i+1)],dtype=int,sep=' '))
return arra
def findClosestPoint(east,north):
try:
dtminfo = getDTMFile(east,north)
eastLine = round((east-dtminfo[1])//10)
northLine = round((north-dtminfo[2])//10)
east_delta = (east-dtminfo[1])%10
north_delta = (north-dtminfo[1])%10
return [eastLine,northLine,dtminfo[0],east_delta,north_delta,dtminfo[1],dtminfo[2]]
except:
raise Exception("Closest point has no DTM file ")
def readFile(filename):
line1 = open("C:\\python\\dtms\\{}".format(filename), 'r').read(500000)
print(line1[0:134])
print(line1[150:156])
print(line1[156:162])
print(line1[162:168])
print(line1[529:535])
print(line1[535:541])
print('{:9}{}'.format('MinEast:',line1[546:570]))
print('{:9}{}'.format('MinNorth:',line1[570:594]))
print(line1[594:618])
print(line1[618:642])
print(line1[642:666])
print(line1[666:690])
print(line1[690:714])
print(line1[714:738])
print(line1[738:762])
print(line1[762:786])
print('{:9}{}'.format('dy:',line1[816:828]))
print('{:9}{}'.format('dx:',line1[828:840]))
print('{:10}{}'.format('Rows:',line1[858:864]))
print('-----')
print()
minEast = float(line1[546:570])
minNorth = float(line1[570:594])
print(line1[1024+30720*0:1024+144+30720*0])
#===============================================================================
# print(line1[1168:2048])
# print(line1[1024*2:1024*3])
# print(line1[1024*4:1024*5])
#===============================================================================
def getElevation(eastL,northL,dtmfile):
rows = 5041
head = 1024
lhead = 144
blockSize = 30720
eastLine = eastL
northLine = northL
with open("C:\\python\\dtms\\{}".format(dtmfile), 'r') as fin:
fin.seek(head+blockSize*eastLine)
data = fin.read(blockSize)
if northLine < 146:
s = 144+northLine*6
else:
c = (northLine-146) // 170 +1
d = (northLine-146) % 170
s = 1024*(c)+d*6
return float(data[s:s+6])/10
def getElevationArea(eastLmin,northLmin,eastLmax,northLmax,dtmfile):
rows = 5041
head = 1024
lhead = 144
blockSize = 30720
rect = []
with open("C:\\python\\dtms\\{}".format(dtmfile), 'r') as fin:
for eastLine in range(eastLmin,eastLmax+1):
line = []
fin.seek(head+blockSize*eastLine)
data = fin.read(blockSize)
for northLine in range(northLmin,northLmax):
if northLine < 146:
s = 144+northLine*6
else:
c = (northLine-146) // 170 +1
d = (northLine-146) % 170
s = 1024*(c)+d*6
line.append(int(data[s:s+6]))
rect.append(line)
return rect
def calculateEle(x,y,coordsys='utm'):
if coordsys == 'latlon':
east, north, zone_number, zone_letter = utm.from_latlon(x, y)
else:
east,north = x,y
try:
p = findClosestPoint(east, north)
dpx = p[3]
dpy = p[4]
ele1 = getElevation(p[0], p[1],p[2])
ele2 = getElevation(p[0]+1, p[1],p[2])
ele3 = getElevation(p[0], p[1]+1,p[2])
ele4 = getElevation(p[0]+1, p[1]+1,p[2])
#c_ele = getInterpolatedEle(ele1,ele2,ele3,ele4,[dpx,dpy])[2]
d_ele = interpolateEle2(ele1,ele2,ele3,ele4,[dpx,dpy])
return d_ele
except Exception:
raise Exception("Something went wrong")
def getInterpolatedEle(p1e=10,p2e=5,p3e=5,p4e=0,pxc=[5,5]):
if sum(pxc)>10:
p1 = np.array([10,10,p4e])
else:
p1 = np.array([0,0,p1e])
p2 = np.array([10,0,p2e])
p3 = np.array([0,10,p3e])
px = np.array([pxc[0],pxc[1]])
a = p2-p1
b = p3-p1
N = np.cross(a,b)
c = px-p1[:2]
x = -(N[0]*c[0]+N[1]*c[1]) / N[2]
C = np.array([c[0],c[1],x])
p4 = p1 + C
return p4
def interpolateEle2(p1e=10,p2e=5,p3e=5,p4e=0,pxc=[5,5]):
x = np.array([0,10])
y = np.array( [0,10])
z = np.array([[p1e,p3e],[p2e,p4e]])
p1=pxc[0]
p2=pxc[1]
f = intp.RectBivariateSpline(x,y,z,kx=1, ky=1, s=0)
return f(p1,p2)[0][0]
def getDTMFile(east,north):
try:
dtmfile = getDTMdict()
for key in dtmfile:
if north>=dtmfile[key][1] and north<=dtmfile[key][1]+50000:
if east>=dtmfile[key][0] and east<=dtmfile[key][0]+50000:
return [key,int(dtmfile[key][0]),int(dtmfile[key][1])]
except:
raise Exception('DTM file not available')
def getDTMdict():
dtmfile = dict()
dtmfile['6404_3_10m_z32.dem'] = [399800,6399900]
dtmfile['6404_4_10m_z32.dem'] = [399800,6449800]
dtmfile['7005_2_10m_z32.dem'] = [549800,6999800]
dtmfile['6503_3_10m_z32.dem'] = [299800,6499800]
dtmfile['6903_1_10m_z32.dem'] = [349800,6949800]
dtmfile['6904_4_10m_z32.dem'] = [399795,6949795]
dtmfile['6505_4_10m_z32.dem'] = [499800,6549800]
dtmfile['6504_1_10m_z32.dem'] = [449800,6549800]
dtmfile['6604_2_10m_z32.dem'] = [449800,6599800]
dtmfile['6605_3_10m_z32.dem'] = [499800,6599800]
dtmfile['6603_2_10m_z32.dem'] = [349800,6599800]
dtmfile['6506_1_10m_z32.dem'] = [649800,6549800]
dtmfile['6506_2_10m_z32.dem'] = [649800,6503000]
dtmfile['6506_3_10m_z32.dem'] = [599800,6503000]
dtmfile['6506_4_10m_z32.dem'] = [599800,6549800]
return dtmfile
def hasDTMFile(minEast, minNorth,maxEast,maxNorth):
dtmfile = getDTMdict()
dtm = getDTMFile(minEast, minNorth)
if dtm != -1:
if (maxEast-50000)< dtm[1] and (maxNorth-50000)<dtm[2]:
return True
return False
if __name__ == "__main__":
readFile('6506_3_10m_z32.dem')
|
gjermv/potato
|
sccs/gpx/dtmdata.py
|
Python
|
gpl-2.0
| 6,663
|
import os
from iconpacker import IconList
test_icon = "/media/hda7/Graphics/png/Classic_Truck/128.png"
icon_theme = IconList()
def initialization():
treestore = icon_theme.setup_treeview('data/legacy-icon-mapping.xml')
if treestore != None:
for i in icon_theme.icon_list:
icon_theme.set_item(i, test_icon)
return True
return False
def test_read_theme_file(fname):
treestore = icon_theme.setup_treeview('data/legacy-icon-mapping.xml')
if treestore != None:
return icon_theme.read_theme_file(fname)
return False
"""
def path_generation():
test_path = os.tempnam()
os.mkdir(test_path)
for i in range(0,101):
os.system("mkdir %s/test_dir_%s>/dev/null" % (test_path, i))
if get_unique_path (test_path, "test_dir"):
return False
else:
os.rmdir(test_path+"/test_dir_50")
fpath, name = icon_theme.get_unique_path (test_path, "test_dir")
if name=="test_dir_50":
os.system("rm -rf %s" % test_path)
return True
else:
return False
"""
def build_icon_paths ():
icon_theme.build_paths()
return True
def test_icon_copying():
if icon_theme.copy_icon_files():
d = os.stat(test_icon)
expected_size = d[6] * len(icon_theme.icon_list) - (4096 * len(icon_theme.categories_list))
print expected_size
os.system ('du -c -b ' + icon_theme.tmp_ico_path)
return True
return False
def test_resizing():
if icon_theme.resize_icons():
return True
else:
return False
def test_make_theme():
if icon_theme.make_icon_theme('TestTheme'):
return True
else:
return False
def test_write_icon_theme():
icon_theme.write_theme_file()
def start_testing():
print "Testing treeview initialization ..."
if initialization():
print "treeview initialization [PASSED]"
else:
print "treeview initialization [FAILED]"
return False
"""
print "Testing Unique path generator ..."
if path_generation():
print "Unique Path generation [PASSED]"
else:
print "Unique Path generation [FAILED]"
return False
"""
print "Testing directory generation ..."
if build_icon_paths():
print "building icon paths [PASSED]"
else:
print "building icon paths [FAILED]"
return False
print "Testing Icon copying ..."
if test_icon_copying():
print "Icon copying [PASSED]"
else:
print "Icon copying [FAILED]"
return False
print "Testing icon resizing ..."
if test_resizing():
print "Resizing [PASSED]"
else:
print "Resizing [FAILED]"
return False
print "Testing Theme creation ..."
if test_make_theme():
print "Theme creation [PASSES]"
else:
print "Theme Creation [FAILED]"
return False
print "Testing index file creation ..."
if test_write_icon_theme():
print "Index file creation [PASSED]"
else:
print "Index file creation [FAILED]"
return False
#os.system("rm -rf %s/*" % icon_theme.build_path)
def test_writing_themes():
initialization()
icon_theme.theme_name = "TestTheme"
if icon_theme.write_icon_theme():
print "Theme Written"
else:
print "Failed"
if __name__=="__main__":
start_testing()
|
tmahmood/iconpacker
|
test_iconpacker.py
|
Python
|
gpl-2.0
| 2,987
|
import os
import sys
import string
from SCons.Script import *
from utils import _make_path_relative
BuildOptions = {}
Projects = []
Rtt_Root = ''
Env = None
class Win32Spawn:
def spawn(self, sh, escape, cmd, args, env):
# deal with the cmd build-in commands which cannot be used in
# subprocess.Popen
if cmd == 'del':
for f in args[1:]:
try:
os.remove(f)
except Exception as e:
print 'Error removing file: %s' % e
return -1
return 0
import subprocess
newargs = string.join(args[1:], ' ')
cmdline = cmd + " " + newargs
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
# Make sure the env is constructed by strings
_e = {k: str(v) for k, v in env.items()}
# Windows(tm) CreateProcess does not use the env passed to it to find
# the executables. So we have to modify our own PATH to make Popen
# work.
old_path = os.environ['PATH']
os.environ['PATH'] = _e['PATH']
try:
proc = subprocess.Popen(cmdline, env=_e,
startupinfo=startupinfo, shell=False)
except Exception as e:
print 'Error in calling:\n%s' % cmdline
print 'Exception: %s: %s' % (e, os.strerror(e.errno))
return e.errno
finally:
os.environ['PATH'] = old_path
return proc.wait()
def PrepareBuilding(env, root_directory, has_libcpu=False, remove_components = []):
import SCons.cpp
import rtconfig
global BuildOptions
global Projects
global Env
global Rtt_Root
Env = env
Rtt_Root = root_directory
# add compability with Keil MDK 4.6 which changes the directory of armcc.exe
if rtconfig.PLATFORM == 'armcc':
if not os.path.isfile(os.path.join(rtconfig.EXEC_PATH, 'armcc.exe')):
if rtconfig.EXEC_PATH.find('bin40') > 0:
rtconfig.EXEC_PATH = rtconfig.EXEC_PATH.replace('bin40', 'armcc/bin')
Env['LINKFLAGS']=Env['LINKFLAGS'].replace('RV31', 'armcc')
# reset AR command flags
env['ARCOM'] = '$AR --create $TARGET $SOURCES'
env['LIBPREFIX'] = ''
env['LIBSUFFIX'] = '.lib'
env['LIBLINKPREFIX'] = ''
env['LIBLINKSUFFIX'] = '.lib'
env['LIBDIRPREFIX'] = '--userlibpath '
# patch for win32 spawn
if env['PLATFORM'] == 'win32':
win32_spawn = Win32Spawn()
win32_spawn.env = env
env['SPAWN'] = win32_spawn.spawn
if env['PLATFORM'] == 'win32':
os.environ['PATH'] = rtconfig.EXEC_PATH + ";" + os.environ['PATH']
else:
os.environ['PATH'] = rtconfig.EXEC_PATH + ":" + os.environ['PATH']
# add program path
env.PrependENVPath('PATH', rtconfig.EXEC_PATH)
# add library build action
act = SCons.Action.Action(BuildLibInstallAction, 'Install compiled library... $TARGET')
bld = Builder(action = act)
Env.Append(BUILDERS = {'BuildLib': bld})
# parse rtconfig.h to get used component
PreProcessor = SCons.cpp.PreProcessor()
f = file('rtconfig.h', 'r')
contents = f.read()
f.close()
PreProcessor.process_contents(contents)
BuildOptions = PreProcessor.cpp_namespace
# add copy option
AddOption('--copy',
dest='copy',
action='store_true',
default=False,
help='copy rt-thread directory to local.')
AddOption('--copy-header',
dest='copy-header',
action='store_true',
default=False,
help='copy header of rt-thread directory to local.')
AddOption('--cscope',
dest='cscope',
action='store_true',
default=False,
help='Build Cscope cross reference database. Requires cscope installed.')
AddOption('--clang-analyzer',
dest='clang-analyzer',
action='store_true',
default=False,
help='Perform static analyze with Clang-analyzer. '+\
'Requires Clang installed.\n'+\
'It is recommended to use with scan-build like this:\n'+\
'`scan-build scons --clang-analyzer`\n'+\
'If things goes well, scan-build will instruct you to invoke scan-view.')
if GetOption('clang-analyzer'):
# perform what scan-build does
env.Replace(
CC = 'ccc-analyzer',
CXX = 'c++-analyzer',
# skip as and link
LINK = 'true',
AS = 'true',)
env["ENV"].update(x for x in os.environ.items() if x[0].startswith("CCC_"))
# only check, don't compile. ccc-analyzer use CCC_CC as the CC.
# fsyntax-only will give us some additional warning messages
env['ENV']['CCC_CC'] = 'clang'
env.Append(CFLAGS=['-fsyntax-only', '-Wall', '-Wno-invalid-source-encoding'])
env['ENV']['CCC_CXX'] = 'clang++'
env.Append(CXXFLAGS=['-fsyntax-only', '-Wall', '-Wno-invalid-source-encoding'])
# remove the POST_ACTION as it will cause meaningless errors(file not
# found or something like that).
rtconfig.POST_ACTION = ''
# add build library option
AddOption('--buildlib',
dest='buildlib',
type='string',
help='building library of a component')
AddOption('--cleanlib',
dest='cleanlib',
action='store_true',
default=False,
help='clean up the library by --buildlib')
# add target option
AddOption('--target',
dest='target',
type='string',
help='set target project: mdk/iar/vs/ua')
#{target_name:(CROSS_TOOL, PLATFORM)}
tgt_dict = {'mdk':('keil', 'armcc'),
'mdk4':('keil', 'armcc'),
'iar':('iar', 'iar'),
'vs':('msvc', 'cl'),
'vs2012':('msvc', 'cl'),
'cb':('keil', 'armcc'),
'ua':('keil', 'armcc')}
tgt_name = GetOption('target')
if tgt_name:
# --target will change the toolchain settings which clang-analyzer is
# depend on
if GetOption('clang-analyzer'):
print '--clang-analyzer cannot be used with --target'
sys.exit(1)
SetOption('no_exec', 1)
try:
rtconfig.CROSS_TOOL, rtconfig.PLATFORM = tgt_dict[tgt_name]
except KeyError:
print 'Unknow target: %s. Avaible targets: %s' % \
(tgt_name, ', '.join(tgt_dict.keys()))
sys.exit(1)
elif (GetDepend('RT_USING_NEWLIB') == False and GetDepend('RT_USING_NOLIBC') == False) \
and rtconfig.PLATFORM == 'gcc':
AddDepend('RT_USING_MINILIBC')
# add comstr option
AddOption('--verbose',
dest='verbose',
action='store_true',
default=False,
help='print verbose information during build')
if not GetOption('verbose'):
# override the default verbose command string
env.Replace(
ARCOMSTR = 'AR $TARGET',
ASCOMSTR = 'AS $TARGET',
ASPPCOMSTR = 'AS $TARGET',
CCCOMSTR = 'CC $TARGET',
CXXCOMSTR = 'CXX $TARGET',
LINKCOMSTR = 'LINK $TARGET'
)
# we need to seperate the variant_dir for BSPs and the kernels. BSPs could
# have their own components etc. If they point to the same folder, SCons
# would find the wrong source code to compile.
bsp_vdir = 'build/bsp'
kernel_vdir = 'build/kernel'
# board build script
objs = SConscript('SConscript', variant_dir=bsp_vdir, duplicate=0)
# include kernel
objs.extend(SConscript(Rtt_Root + '/src/SConscript', variant_dir=kernel_vdir + '/src', duplicate=0))
# include libcpu
if not has_libcpu:
objs.extend(SConscript(Rtt_Root + '/libcpu/SConscript',
variant_dir=kernel_vdir + '/libcpu', duplicate=0))
# include components
objs.extend(SConscript(Rtt_Root + '/components/SConscript',
variant_dir=kernel_vdir + '/components',
duplicate=0,
exports='remove_components'))
return objs
def PrepareModuleBuilding(env, root_directory):
import rtconfig
global Env
global Rtt_Root
Env = env
Rtt_Root = root_directory
# add build/clean library option for library checking
AddOption('--buildlib',
dest='buildlib',
type='string',
help='building library of a component')
AddOption('--cleanlib',
dest='cleanlib',
action='store_true',
default=False,
help='clean up the library by --buildlib')
# add program path
env.PrependENVPath('PATH', rtconfig.EXEC_PATH)
def GetConfigValue(name):
assert type(name) == str, 'GetConfigValue: only string parameter is valid'
try:
return BuildOptions[name]
except:
return ''
def GetDepend(depend):
building = True
if type(depend) == type('str'):
if not BuildOptions.has_key(depend) or BuildOptions[depend] == 0:
building = False
elif BuildOptions[depend] != '':
return BuildOptions[depend]
return building
# for list type depend
for item in depend:
if item != '':
if not BuildOptions.has_key(item) or BuildOptions[item] == 0:
building = False
return building
def AddDepend(option):
BuildOptions[option] = 1
def MergeGroup(src_group, group):
src_group['src'] = src_group['src'] + group['src']
if group.has_key('CCFLAGS'):
if src_group.has_key('CCFLAGS'):
src_group['CCFLAGS'] = src_group['CCFLAGS'] + group['CCFLAGS']
else:
src_group['CCFLAGS'] = group['CCFLAGS']
if group.has_key('CPPPATH'):
if src_group.has_key('CPPPATH'):
src_group['CPPPATH'] = src_group['CPPPATH'] + group['CPPPATH']
else:
src_group['CPPPATH'] = group['CPPPATH']
if group.has_key('CPPDEFINES'):
if src_group.has_key('CPPDEFINES'):
src_group['CPPDEFINES'] = src_group['CPPDEFINES'] + group['CPPDEFINES']
else:
src_group['CPPDEFINES'] = group['CPPDEFINES']
if group.has_key('LINKFLAGS'):
if src_group.has_key('LINKFLAGS'):
src_group['LINKFLAGS'] = src_group['LINKFLAGS'] + group['LINKFLAGS']
else:
src_group['LINKFLAGS'] = group['LINKFLAGS']
if group.has_key('LIBS'):
if src_group.has_key('LIBS'):
src_group['LIBS'] = src_group['LIBS'] + group['LIBS']
else:
src_group['LIBS'] = group['LIBS']
if group.has_key('LIBPATH'):
if src_group.has_key('LIBPATH'):
src_group['LIBPATH'] = src_group['LIBPATH'] + group['LIBPATH']
else:
src_group['LIBPATH'] = group['LIBPATH']
def DefineGroup(name, src, depend, **parameters):
global Env
if not GetDepend(depend):
return []
# find exist group and get path of group
group_path = ''
for g in Projects:
if g['name'] == name:
group_path = g['path']
if group_path == '':
group_path = GetCurrentDir()
group = parameters
group['name'] = name
group['path'] = group_path
if type(src) == type(['src1']):
group['src'] = File(src)
else:
group['src'] = src
if group.has_key('CCFLAGS'):
Env.Append(CCFLAGS = group['CCFLAGS'])
if group.has_key('CPPPATH'):
Env.Append(CPPPATH = group['CPPPATH'])
if group.has_key('CPPDEFINES'):
Env.Append(CPPDEFINES = group['CPPDEFINES'])
if group.has_key('LINKFLAGS'):
Env.Append(LINKFLAGS = group['LINKFLAGS'])
# check whether to clean up library
if GetOption('cleanlib') and os.path.exists(os.path.join(group['path'], GroupLibFullName(name, Env))):
if group['src'] != []:
print 'Remove library:', GroupLibFullName(name, Env)
do_rm_file(os.path.join(group['path'], GroupLibFullName(name, Env)))
# check whether exist group library
if not GetOption('buildlib') and os.path.exists(os.path.join(group['path'], GroupLibFullName(name, Env))):
group['src'] = []
if group.has_key('LIBS'): group['LIBS'] = group['LIBS'] + [GroupLibName(name, Env)]
else : group['LIBS'] = [GroupLibName(name, Env)]
if group.has_key('LIBPATH'): group['LIBPATH'] = group['LIBPATH'] + [GetCurrentDir()]
else : group['LIBPATH'] = [GetCurrentDir()]
if group.has_key('LIBS'):
Env.Append(LIBS = group['LIBS'])
if group.has_key('LIBPATH'):
Env.Append(LIBPATH = group['LIBPATH'])
objs = Env.Object(group['src'])
if group.has_key('LIBRARY'):
objs = Env.Library(name, objs)
# merge group
for g in Projects:
if g['name'] == name:
# merge to this group
MergeGroup(g, group)
return objs
# add a new group
Projects.append(group)
return objs
def GetCurrentDir():
conscript = File('SConscript')
fn = conscript.rfile()
name = fn.name
path = os.path.dirname(fn.abspath)
return path
PREBUILDING = []
def RegisterPreBuildingAction(act):
global PREBUILDING
assert callable(act), 'Could only register callable objects. %s received' % repr(act)
PREBUILDING.append(act)
def PreBuilding():
global PREBUILDING
for a in PREBUILDING:
a()
def GroupLibName(name, env):
import rtconfig
if rtconfig.PLATFORM == 'armcc':
return name + '_rvds'
elif rtconfig.PLATFORM == 'gcc':
return name + '_gcc'
return name
def GroupLibFullName(name, env):
return env['LIBPREFIX'] + GroupLibName(name, env) + env['LIBSUFFIX']
def BuildLibInstallAction(target, source, env):
lib_name = GetOption('buildlib')
for Group in Projects:
if Group['name'] == lib_name:
lib_name = GroupLibFullName(Group['name'], env)
dst_name = os.path.join(Group['path'], lib_name)
print 'Copy %s => %s' % (lib_name, dst_name)
do_copy_file(lib_name, dst_name)
break
def DoBuilding(target, objects):
program = None
# check whether special buildlib option
lib_name = GetOption('buildlib')
if lib_name:
# build library with special component
for Group in Projects:
if Group['name'] == lib_name:
lib_name = GroupLibName(Group['name'], Env)
objects = Env.Object(Group['src'])
program = Env.Library(lib_name, objects)
# add library copy action
Env.BuildLib(lib_name, program)
break
else:
# merge the repeated items in the Env
if Env.has_key('CPPPATH') : Env['CPPPATH'] = list(set(Env['CPPPATH']))
if Env.has_key('CPPDEFINES'): Env['CPPDEFINES'] = list(set(Env['CPPDEFINES']))
if Env.has_key('LIBPATH') : Env['LIBPATH'] = list(set(Env['LIBPATH']))
if Env.has_key('LIBS') : Env['LIBS'] = list(set(Env['LIBS']))
program = Env.Program(target, objects)
EndBuilding(target, program)
def EndBuilding(target, program = None):
import rtconfig
Env.AddPostAction(target, rtconfig.POST_ACTION)
if GetOption('target') == 'mdk':
from keil import MDKProject
from keil import MDK4Project
template = os.path.isfile('template.Uv2')
if template:
MDKProject('project.Uv2', Projects)
else:
template = os.path.isfile('template.uvproj')
if template:
MDK4Project('project.uvproj', Projects)
else:
print 'No template project file found.'
if GetOption('target') == 'mdk4':
from keil import MDKProject
from keil import MDK4Project
MDK4Project('project.uvproj', Projects)
if GetOption('target') == 'iar':
from iar import IARProject
IARProject('project.ewp', Projects)
if GetOption('target') == 'vs':
from vs import VSProject
VSProject('project.vcproj', Projects, program)
if GetOption('target') == 'vs2012':
from vs2012 import VS2012Project
VS2012Project('project.vcxproj', Projects, program)
if GetOption('target') == 'cb':
from codeblocks import CBProject
CBProject('project.cbp', Projects, program)
if GetOption('target') == 'ua':
from ua import PrepareUA
PrepareUA(Projects, Rtt_Root, str(Dir('#')))
if GetOption('copy') and program != None:
MakeCopy(program)
if GetOption('copy-header') and program != None:
MakeCopyHeader(program)
if GetOption('cscope'):
from cscope import CscopeDatabase
CscopeDatabase(Projects)
def SrcRemove(src, remove):
if type(src[0]) == type('str'):
for item in src:
if os.path.basename(item) in remove:
src.remove(item)
return
for item in src:
if os.path.basename(item.rstr()) in remove:
src.remove(item)
def GetVersion():
import SCons.cpp
import string
rtdef = os.path.join(Rtt_Root, 'include', 'rtdef.h')
# parse rtdef.h to get RT-Thread version
prepcessor = SCons.cpp.PreProcessor()
f = file(rtdef, 'r')
contents = f.read()
f.close()
prepcessor.process_contents(contents)
def_ns = prepcessor.cpp_namespace
version = int(filter(lambda ch: ch in '0123456789.', def_ns['RT_VERSION']))
subversion = int(filter(lambda ch: ch in '0123456789.', def_ns['RT_SUBVERSION']))
if def_ns.has_key('RT_REVISION'):
revision = int(filter(lambda ch: ch in '0123456789.', def_ns['RT_REVISION']))
return '%d.%d.%d' % (version, subversion, revision)
return '0.%d.%d' % (version, subversion)
def GlobSubDir(sub_dir, ext_name):
import os
import glob
def glob_source(sub_dir, ext_name):
list = os.listdir(sub_dir)
src = glob.glob(os.path.join(sub_dir, ext_name))
for item in list:
full_subdir = os.path.join(sub_dir, item)
if os.path.isdir(full_subdir):
src += glob_source(full_subdir, ext_name)
return src
dst = []
src = glob_source(sub_dir, ext_name)
for item in src:
dst.append(os.path.relpath(item, sub_dir))
return dst
def file_path_exist(path, *args):
return os.path.exists(os.path.join(path, *args))
def do_rm_file(src):
if os.path.exists(src):
os.unlink(src)
def do_copy_file(src, dst):
import shutil
# check source file
if not os.path.exists(src):
return
path = os.path.dirname(dst)
# mkdir if path not exist
if not os.path.exists(path):
os.makedirs(path)
shutil.copy2(src, dst)
def do_copy_folder(src_dir, dst_dir):
import shutil
# check source directory
if not os.path.exists(src_dir):
return
if os.path.exists(dst_dir):
shutil.rmtree(dst_dir)
shutil.copytree(src_dir, dst_dir)
source_ext = ["c", "h", "s", "S", "cpp", "xpm"]
source_list = []
def walk_children(child):
global source_list
global source_ext
# print child
full_path = child.rfile().abspath
file_type = full_path.rsplit('.',1)[1]
#print file_type
if file_type in source_ext:
if full_path not in source_list:
source_list.append(full_path)
children = child.all_children()
if children != []:
for item in children:
walk_children(item)
def MakeCopy(program):
global source_list
global Rtt_Root
global Env
target_path = os.path.join(Dir('#').abspath, 'rt-thread')
if Env['PLATFORM'] == 'win32':
RTT_ROOT = Rtt_Root.lower()
else:
RTT_ROOT = Rtt_Root
if target_path.startswith(RTT_ROOT):
return
for item in program:
walk_children(item)
source_list.sort()
# filte source file in RT-Thread
target_list = []
for src in source_list:
if Env['PLATFORM'] == 'win32':
src = src.lower()
if src.startswith(RTT_ROOT):
target_list.append(src)
source_list = target_list
# get source path
src_dir = []
for src in source_list:
src = src.replace(RTT_ROOT, '')
if src[0] == os.sep or src[0] == '/':
src = src[1:]
path = os.path.dirname(src)
sub_path = path.split(os.sep)
full_path = RTT_ROOT
for item in sub_path:
full_path = os.path.join(full_path, item)
if full_path not in src_dir:
src_dir.append(full_path)
for item in src_dir:
source_list.append(os.path.join(item, 'SConscript'))
for src in source_list:
dst = src.replace(RTT_ROOT, '')
if dst[0] == os.sep or dst[0] == '/':
dst = dst[1:]
print '=> ', dst
dst = os.path.join(target_path, dst)
do_copy_file(src, dst)
# copy tools directory
print "=> tools"
do_copy_folder(os.path.join(RTT_ROOT, "tools"), os.path.join(target_path, "tools"))
do_copy_file(os.path.join(RTT_ROOT, 'AUTHORS'), os.path.join(target_path, 'AUTHORS'))
do_copy_file(os.path.join(RTT_ROOT, 'COPYING'), os.path.join(target_path, 'COPYING'))
def MakeCopyHeader(program):
global source_ext
source_ext = []
source_ext = ["h", "xpm"]
global source_list
global Rtt_Root
global Env
target_path = os.path.join(Dir('#').abspath, 'rt-thread')
if Env['PLATFORM'] == 'win32':
RTT_ROOT = Rtt_Root.lower()
else:
RTT_ROOT = Rtt_Root
if target_path.startswith(RTT_ROOT):
return
for item in program:
walk_children(item)
source_list.sort()
# filte source file in RT-Thread
target_list = []
for src in source_list:
if Env['PLATFORM'] == 'win32':
src = src.lower()
if src.startswith(RTT_ROOT):
target_list.append(src)
source_list = target_list
for src in source_list:
dst = src.replace(RTT_ROOT, '')
if dst[0] == os.sep or dst[0] == '/':
dst = dst[1:]
print '=> ', dst
dst = os.path.join(target_path, dst)
do_copy_file(src, dst)
# copy tools directory
print "=> tools"
do_copy_folder(os.path.join(RTT_ROOT, "tools"), os.path.join(target_path, "tools"))
do_copy_file(os.path.join(RTT_ROOT, 'AUTHORS'), os.path.join(target_path, 'AUTHORS'))
do_copy_file(os.path.join(RTT_ROOT, 'COPYING'), os.path.join(target_path, 'COPYING'))
|
DigFarmer/aircraft
|
tools/building.py
|
Python
|
gpl-2.0
| 23,177
|
#from: http://stackoverflow.com/questions/10361820/simple-twisted-echo-client
#and
#from: http://stackoverflow.com/questions/510357/python-read-a-single-character-from-the-user
from twisted.internet.threads import deferToThread as _deferToThread
from twisted.internet import reactor
class ConsoleInput(object):
def __init__(self, stopFunction, reconnectFunction):
self.stopFunction = stopFunction
self.reconnectFunction = reconnectFunction
def start(self):
self.terminator = 'q'
self.restart = 'r'
self.getKey = _Getch()
self.startReceiving()
def startReceiving(self, s = ''):
if s == self.terminator:
self.stopFunction()
elif s == self.restart:
self.reconnectFunction()
_deferToThread(self.getKey).addCallback(self.startReceiving)
else:
_deferToThread(self.getKey).addCallback(self.startReceiving)
class _Getch:
"""
Gets a single character from standard input. Does not echo to the screen.
"""
def __init__(self):
try:
self.impl = _GetchWindows()
except ImportError:
self.impl = _GetchUnix()
def __call__(self): return self.impl()
class _GetchUnix:
def __init__(self):
import tty, sys
def __call__(self):
import sys, tty, termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
class _GetchWindows:
def __init__(self):
import msvcrt
def __call__(self):
import msvcrt
return msvcrt.getch()
|
tpainter/df_everywhere
|
df_everywhere/util/consoleInput.py
|
Python
|
gpl-2.0
| 1,794
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016 CERN.
#
# Invenio is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Deposit API."""
from __future__ import absolute_import, print_function
from flask import current_app
from invenio_pidstore.models import PersistentIdentifier
from invenio_indexer.api import RecordIndexer
from invenio_deposit.receivers import \
index_deposit_after_publish as original_index_deposit_after_publish
from invenio_jsonschemas import current_jsonschemas
from .api import Project
from .tasks import datacite_register
def index_deposit_after_publish(sender, action=None, pid=None, deposit=None):
"""Index the record after publishing."""
project_schema = current_jsonschemas.path_to_url(Project._schema)
if deposit['$schema'] == project_schema:
if action == 'publish':
# index videos (records)
pid_values = Project(data=deposit).video_ids
ids = [str(p.object_uuid)
for p in PersistentIdentifier.query.filter(
PersistentIdentifier.pid_value.in_(pid_values)).all()]
# index project (record)
_, record = deposit.fetch_published()
ids.append(str(record.id))
RecordIndexer().bulk_index(iter(ids))
else:
original_index_deposit_after_publish(sender=sender, action=action,
pid=pid, deposit=deposit)
def datacite_register_after_publish(sender, action=None, pid=None,
deposit=None):
"""Mind DOI with DataCite after the deposit has been published."""
if action == "publish" and \
current_app.config['DEPOSIT_DATACITE_MINTING_ENABLED']:
recid_pid, record = deposit.fetch_published()
datacite_register.delay(recid_pid.pid_value, str(record.id))
|
omelkonian/cds
|
cds/modules/deposit/receivers.py
|
Python
|
gpl-2.0
| 2,693
|
# coding=utf-8
#默认参数
# 定义默认参数要牢记一点:默认参数必须指向不变对象
# 默认值是列表、字典或大部分类的实例等易变的对象的时候又有所不同。例如,下面的函数在后续调用过程中会累积传给它的参数:
def f1(a, L=[]):
L.append(a)
return L
print(f1(1))
print(f1(2))
print(f1(3))
# 如果你不想默认值在随后的调用中共享,可以像这样编写函数:
def f2(a, L=None):
if L is None:
L = []
L.append(a)
return L
print(f2(1))
print(f2(2))
print(f2(3))
# 可变参数---可变参数允许你传入0个或任意个参数,这些可变参数在函数调用时自动组装为一个tuple。
# 在参数前加上星号 *
def calc_sum(*numbers):
sum = 0
for i in numbers:
sum += i
return sum
print(calc_sum(1, 2, 3))
# 如果参数本身就是一个list或者tuple,则在list或者tuple前加一个*号,变为可变参数传入
num_tuple = (1, 2, 3, 4, 5, 6)
print(calc_sum(*num_tuple))
# 关键字参数---关键字参数允许你传入0个或任意个含参数名的参数,这些关键字参数在函数内部自动组装为一个dict
#
# 如果可变参数和关键字参数同时出现时,可变参数必须出现在关键字参数前面
def person(name, age, **kw):
print('name:', name, 'age:', age, 'other:', kw)
person('Michael', 30)
person('Bob', 35, city='Beijing')
person('Adam', 45, gender='M', job='Engineer')
# 参数列表的分拆
# 1. *-操作符将参数从列表或元组中分拆开来
# 2. **-操作符让字典传递关键字参数
args = [3, 6]
print(list(range(*args)))
extra = {'city': 'Beijing', 'job': 'Engineer'}
person('Jack', 24, **extra)
# 文档字符串
def my_function():
"""Do nothing, but document it.
No, really, it doesn't do anything.
"""
pass
print(my_function.__doc__)
|
youlangu/Study
|
Python/function.py
|
Python
|
gpl-2.0
| 1,887
|
# -*- coding: utf-8 -*-
#############################################################
# This file was automatically generated on 2022-01-18. #
# #
# Python Bindings Version 2.1.29 #
# #
# If you have a bugfix for this file and want to commit it, #
# please fix the bug in the generator. You can find a link #
# to the generators git repository on tinkerforge.com #
#############################################################
from collections import namedtuple
try:
from .ip_connection import Device, IPConnection, Error, create_char, create_char_list, create_string, create_chunk_data
except (ValueError, ImportError):
from ip_connection import Device, IPConnection, Error, create_char, create_char_list, create_string, create_chunk_data
GetAcceleration = namedtuple('Acceleration', ['x', 'y', 'z'])
GetConfiguration = namedtuple('Configuration', ['data_rate', 'full_scale'])
GetAccelerationCallbackConfiguration = namedtuple('AccelerationCallbackConfiguration', ['period', 'value_has_to_change'])
GetContinuousAccelerationConfiguration = namedtuple('ContinuousAccelerationConfiguration', ['enable_x', 'enable_y', 'enable_z', 'resolution'])
GetFilterConfiguration = namedtuple('FilterConfiguration', ['iir_bypass', 'low_pass_filter'])
GetSPITFPErrorCount = namedtuple('SPITFPErrorCount', ['error_count_ack_checksum', 'error_count_message_checksum', 'error_count_frame', 'error_count_overflow'])
GetIdentity = namedtuple('Identity', ['uid', 'connected_uid', 'position', 'hardware_version', 'firmware_version', 'device_identifier'])
class BrickletAccelerometerV2(Device):
"""
Measures acceleration in three axis
"""
DEVICE_IDENTIFIER = 2130
DEVICE_DISPLAY_NAME = 'Accelerometer Bricklet 2.0'
DEVICE_URL_PART = 'accelerometer_v2' # internal
CALLBACK_ACCELERATION = 8
CALLBACK_CONTINUOUS_ACCELERATION_16_BIT = 11
CALLBACK_CONTINUOUS_ACCELERATION_8_BIT = 12
FUNCTION_GET_ACCELERATION = 1
FUNCTION_SET_CONFIGURATION = 2
FUNCTION_GET_CONFIGURATION = 3
FUNCTION_SET_ACCELERATION_CALLBACK_CONFIGURATION = 4
FUNCTION_GET_ACCELERATION_CALLBACK_CONFIGURATION = 5
FUNCTION_SET_INFO_LED_CONFIG = 6
FUNCTION_GET_INFO_LED_CONFIG = 7
FUNCTION_SET_CONTINUOUS_ACCELERATION_CONFIGURATION = 9
FUNCTION_GET_CONTINUOUS_ACCELERATION_CONFIGURATION = 10
FUNCTION_SET_FILTER_CONFIGURATION = 13
FUNCTION_GET_FILTER_CONFIGURATION = 14
FUNCTION_GET_SPITFP_ERROR_COUNT = 234
FUNCTION_SET_BOOTLOADER_MODE = 235
FUNCTION_GET_BOOTLOADER_MODE = 236
FUNCTION_SET_WRITE_FIRMWARE_POINTER = 237
FUNCTION_WRITE_FIRMWARE = 238
FUNCTION_SET_STATUS_LED_CONFIG = 239
FUNCTION_GET_STATUS_LED_CONFIG = 240
FUNCTION_GET_CHIP_TEMPERATURE = 242
FUNCTION_RESET = 243
FUNCTION_WRITE_UID = 248
FUNCTION_READ_UID = 249
FUNCTION_GET_IDENTITY = 255
DATA_RATE_0_781HZ = 0
DATA_RATE_1_563HZ = 1
DATA_RATE_3_125HZ = 2
DATA_RATE_6_2512HZ = 3
DATA_RATE_12_5HZ = 4
DATA_RATE_25HZ = 5
DATA_RATE_50HZ = 6
DATA_RATE_100HZ = 7
DATA_RATE_200HZ = 8
DATA_RATE_400HZ = 9
DATA_RATE_800HZ = 10
DATA_RATE_1600HZ = 11
DATA_RATE_3200HZ = 12
DATA_RATE_6400HZ = 13
DATA_RATE_12800HZ = 14
DATA_RATE_25600HZ = 15
FULL_SCALE_2G = 0
FULL_SCALE_4G = 1
FULL_SCALE_8G = 2
INFO_LED_CONFIG_OFF = 0
INFO_LED_CONFIG_ON = 1
INFO_LED_CONFIG_SHOW_HEARTBEAT = 2
RESOLUTION_8BIT = 0
RESOLUTION_16BIT = 1
IIR_BYPASS_APPLIED = 0
IIR_BYPASS_BYPASSED = 1
LOW_PASS_FILTER_NINTH = 0
LOW_PASS_FILTER_HALF = 1
BOOTLOADER_MODE_BOOTLOADER = 0
BOOTLOADER_MODE_FIRMWARE = 1
BOOTLOADER_MODE_BOOTLOADER_WAIT_FOR_REBOOT = 2
BOOTLOADER_MODE_FIRMWARE_WAIT_FOR_REBOOT = 3
BOOTLOADER_MODE_FIRMWARE_WAIT_FOR_ERASE_AND_REBOOT = 4
BOOTLOADER_STATUS_OK = 0
BOOTLOADER_STATUS_INVALID_MODE = 1
BOOTLOADER_STATUS_NO_CHANGE = 2
BOOTLOADER_STATUS_ENTRY_FUNCTION_NOT_PRESENT = 3
BOOTLOADER_STATUS_DEVICE_IDENTIFIER_INCORRECT = 4
BOOTLOADER_STATUS_CRC_MISMATCH = 5
STATUS_LED_CONFIG_OFF = 0
STATUS_LED_CONFIG_ON = 1
STATUS_LED_CONFIG_SHOW_HEARTBEAT = 2
STATUS_LED_CONFIG_SHOW_STATUS = 3
def __init__(self, uid, ipcon):
"""
Creates an object with the unique device ID *uid* and adds it to
the IP Connection *ipcon*.
"""
Device.__init__(self, uid, ipcon, BrickletAccelerometerV2.DEVICE_IDENTIFIER, BrickletAccelerometerV2.DEVICE_DISPLAY_NAME)
self.api_version = (2, 0, 1)
self.response_expected[BrickletAccelerometerV2.FUNCTION_GET_ACCELERATION] = BrickletAccelerometerV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletAccelerometerV2.FUNCTION_SET_CONFIGURATION] = BrickletAccelerometerV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletAccelerometerV2.FUNCTION_GET_CONFIGURATION] = BrickletAccelerometerV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletAccelerometerV2.FUNCTION_SET_ACCELERATION_CALLBACK_CONFIGURATION] = BrickletAccelerometerV2.RESPONSE_EXPECTED_TRUE
self.response_expected[BrickletAccelerometerV2.FUNCTION_GET_ACCELERATION_CALLBACK_CONFIGURATION] = BrickletAccelerometerV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletAccelerometerV2.FUNCTION_SET_INFO_LED_CONFIG] = BrickletAccelerometerV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletAccelerometerV2.FUNCTION_GET_INFO_LED_CONFIG] = BrickletAccelerometerV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletAccelerometerV2.FUNCTION_SET_CONTINUOUS_ACCELERATION_CONFIGURATION] = BrickletAccelerometerV2.RESPONSE_EXPECTED_TRUE
self.response_expected[BrickletAccelerometerV2.FUNCTION_GET_CONTINUOUS_ACCELERATION_CONFIGURATION] = BrickletAccelerometerV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletAccelerometerV2.FUNCTION_SET_FILTER_CONFIGURATION] = BrickletAccelerometerV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletAccelerometerV2.FUNCTION_GET_FILTER_CONFIGURATION] = BrickletAccelerometerV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletAccelerometerV2.FUNCTION_GET_SPITFP_ERROR_COUNT] = BrickletAccelerometerV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletAccelerometerV2.FUNCTION_SET_BOOTLOADER_MODE] = BrickletAccelerometerV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletAccelerometerV2.FUNCTION_GET_BOOTLOADER_MODE] = BrickletAccelerometerV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletAccelerometerV2.FUNCTION_SET_WRITE_FIRMWARE_POINTER] = BrickletAccelerometerV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletAccelerometerV2.FUNCTION_WRITE_FIRMWARE] = BrickletAccelerometerV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletAccelerometerV2.FUNCTION_SET_STATUS_LED_CONFIG] = BrickletAccelerometerV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletAccelerometerV2.FUNCTION_GET_STATUS_LED_CONFIG] = BrickletAccelerometerV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletAccelerometerV2.FUNCTION_GET_CHIP_TEMPERATURE] = BrickletAccelerometerV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletAccelerometerV2.FUNCTION_RESET] = BrickletAccelerometerV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletAccelerometerV2.FUNCTION_WRITE_UID] = BrickletAccelerometerV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletAccelerometerV2.FUNCTION_READ_UID] = BrickletAccelerometerV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletAccelerometerV2.FUNCTION_GET_IDENTITY] = BrickletAccelerometerV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.callback_formats[BrickletAccelerometerV2.CALLBACK_ACCELERATION] = (20, 'i i i')
self.callback_formats[BrickletAccelerometerV2.CALLBACK_CONTINUOUS_ACCELERATION_16_BIT] = (68, '30h')
self.callback_formats[BrickletAccelerometerV2.CALLBACK_CONTINUOUS_ACCELERATION_8_BIT] = (68, '60b')
ipcon.add_device(self)
def get_acceleration(self):
"""
Returns the acceleration in x, y and z direction. The values
are given in gₙ/10000 (1gₙ = 9.80665m/s²). The range is
configured with :func:`Set Configuration`.
If you want to get the acceleration periodically, it is recommended
to use the :cb:`Acceleration` callback and set the period with
:func:`Set Acceleration Callback Configuration`.
"""
self.check_validity()
return GetAcceleration(*self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_GET_ACCELERATION, (), '', 20, 'i i i'))
def set_configuration(self, data_rate, full_scale):
"""
Configures the data rate and full scale range.
Possible values are:
* Data rate of 0.781Hz to 25600Hz.
* Full scale range of ±2g up to ±8g.
Decreasing data rate or full scale range will also decrease the noise on
the data.
"""
self.check_validity()
data_rate = int(data_rate)
full_scale = int(full_scale)
self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_SET_CONFIGURATION, (data_rate, full_scale), 'B B', 0, '')
def get_configuration(self):
"""
Returns the configuration as set by :func:`Set Configuration`.
"""
self.check_validity()
return GetConfiguration(*self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_GET_CONFIGURATION, (), '', 10, 'B B'))
def set_acceleration_callback_configuration(self, period, value_has_to_change):
"""
The period is the period with which the :cb:`Acceleration`
callback is triggered periodically. A value of 0 turns the callback off.
If the `value has to change`-parameter is set to true, the callback is only
triggered after the value has changed. If the value didn't change within the
period, the callback is triggered immediately on change.
If it is set to false, the callback is continuously triggered with the period,
independent of the value.
If this callback is enabled, the :cb:`Continuous Acceleration 16 Bit` callback
and :cb:`Continuous Acceleration 8 Bit` callback will automatically be disabled.
"""
self.check_validity()
period = int(period)
value_has_to_change = bool(value_has_to_change)
self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_SET_ACCELERATION_CALLBACK_CONFIGURATION, (period, value_has_to_change), 'I !', 0, '')
def get_acceleration_callback_configuration(self):
"""
Returns the callback configuration as set by
:func:`Set Acceleration Callback Configuration`.
"""
self.check_validity()
return GetAccelerationCallbackConfiguration(*self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_GET_ACCELERATION_CALLBACK_CONFIGURATION, (), '', 13, 'I !'))
def set_info_led_config(self, config):
"""
Configures the info LED (marked as "Force" on the Bricklet) to be either turned off,
turned on, or blink in heartbeat mode.
"""
self.check_validity()
config = int(config)
self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_SET_INFO_LED_CONFIG, (config,), 'B', 0, '')
def get_info_led_config(self):
"""
Returns the LED configuration as set by :func:`Set Info LED Config`
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_GET_INFO_LED_CONFIG, (), '', 9, 'B')
def set_continuous_acceleration_configuration(self, enable_x, enable_y, enable_z, resolution):
"""
For high throughput of acceleration data (> 1000Hz) you have to use the
:cb:`Continuous Acceleration 16 Bit` or :cb:`Continuous Acceleration 8 Bit`
callbacks.
You can enable the callback for each axis (x, y, z) individually and choose a
resolution of 8 bit or 16 bit.
If at least one of the axis is enabled and the resolution is set to 8 bit,
the :cb:`Continuous Acceleration 8 Bit` callback is activated. If at least
one of the axis is enabled and the resolution is set to 16 bit,
the :cb:`Continuous Acceleration 16 Bit` callback is activated.
The returned values are raw ADC data. If you want to put this data into
a FFT to determine the occurrences of specific frequencies we recommend
that you use the data as is. It has all of the ADC noise in it. This noise
looks like pure noise at first glance, but it might still have some frequnecy
information in it that can be utilized by the FFT.
Otherwise you have to use the following formulas that depend on the configured
resolution (8/16 bit) and the full scale range (see :func:`Set Configuration`) to calculate
the data in gₙ/10000 (same unit that is returned by :func:`Get Acceleration`):
* 16 bit, full scale 2g: acceleration = value * 625 / 1024
* 16 bit, full scale 4g: acceleration = value * 1250 / 1024
* 16 bit, full scale 8g: acceleration = value * 2500 / 1024
If a resolution of 8 bit is used, only the 8 most significant bits will be
transferred, so you can use the following formulas:
* 8 bit, full scale 2g: acceleration = value * 256 * 625 / 1024
* 8 bit, full scale 4g: acceleration = value * 256 * 1250 / 1024
* 8 bit, full scale 8g: acceleration = value * 256 * 2500 / 1024
If no axis is enabled, both callbacks are disabled. If one of the continuous
callbacks is enabled, the :cb:`Acceleration` callback is disabled.
The maximum throughput depends on the exact configuration:
.. csv-table::
:header: "Number of axis enabled", "Throughput 8 bit", "Throughout 16 bit"
:widths: 20, 20, 20
"1", "25600Hz", "25600Hz"
"2", "25600Hz", "15000Hz"
"3", "20000Hz", "10000Hz"
"""
self.check_validity()
enable_x = bool(enable_x)
enable_y = bool(enable_y)
enable_z = bool(enable_z)
resolution = int(resolution)
self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_SET_CONTINUOUS_ACCELERATION_CONFIGURATION, (enable_x, enable_y, enable_z, resolution), '! ! ! B', 0, '')
def get_continuous_acceleration_configuration(self):
"""
Returns the continuous acceleration configuration as set by
:func:`Set Continuous Acceleration Configuration`.
"""
self.check_validity()
return GetContinuousAccelerationConfiguration(*self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_GET_CONTINUOUS_ACCELERATION_CONFIGURATION, (), '', 12, '! ! ! B'))
def set_filter_configuration(self, iir_bypass, low_pass_filter):
"""
Configures IIR Bypass filter mode and low pass filter roll off corner frequency.
The filter can be applied or bypassed and the corner frequency can be
half or a ninth of the output data rate.
.. image:: /Images/Bricklets/bricklet_accelerometer_v2_filter.png
:scale: 100 %
:alt: Accelerometer filter
:align: center
:target: ../../_images/Bricklets/bricklet_accelerometer_v2_filter.png
.. versionadded:: 2.0.2$nbsp;(Plugin)
"""
self.check_validity()
iir_bypass = int(iir_bypass)
low_pass_filter = int(low_pass_filter)
self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_SET_FILTER_CONFIGURATION, (iir_bypass, low_pass_filter), 'B B', 0, '')
def get_filter_configuration(self):
"""
Returns the configuration as set by :func:`Set Filter Configuration`.
.. versionadded:: 2.0.2$nbsp;(Plugin)
"""
self.check_validity()
return GetFilterConfiguration(*self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_GET_FILTER_CONFIGURATION, (), '', 10, 'B B'))
def get_spitfp_error_count(self):
"""
Returns the error count for the communication between Brick and Bricklet.
The errors are divided into
* ACK checksum errors,
* message checksum errors,
* framing errors and
* overflow errors.
The errors counts are for errors that occur on the Bricklet side. All
Bricks have a similar function that returns the errors on the Brick side.
"""
self.check_validity()
return GetSPITFPErrorCount(*self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_GET_SPITFP_ERROR_COUNT, (), '', 24, 'I I I I'))
def set_bootloader_mode(self, mode):
"""
Sets the bootloader mode and returns the status after the requested
mode change was instigated.
You can change from bootloader mode to firmware mode and vice versa. A change
from bootloader mode to firmware mode will only take place if the entry function,
device identifier and CRC are present and correct.
This function is used by Brick Viewer during flashing. It should not be
necessary to call it in a normal user program.
"""
self.check_validity()
mode = int(mode)
return self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_SET_BOOTLOADER_MODE, (mode,), 'B', 9, 'B')
def get_bootloader_mode(self):
"""
Returns the current bootloader mode, see :func:`Set Bootloader Mode`.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_GET_BOOTLOADER_MODE, (), '', 9, 'B')
def set_write_firmware_pointer(self, pointer):
"""
Sets the firmware pointer for :func:`Write Firmware`. The pointer has
to be increased by chunks of size 64. The data is written to flash
every 4 chunks (which equals to one page of size 256).
This function is used by Brick Viewer during flashing. It should not be
necessary to call it in a normal user program.
"""
self.check_validity()
pointer = int(pointer)
self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_SET_WRITE_FIRMWARE_POINTER, (pointer,), 'I', 0, '')
def write_firmware(self, data):
"""
Writes 64 Bytes of firmware at the position as written by
:func:`Set Write Firmware Pointer` before. The firmware is written
to flash every 4 chunks.
You can only write firmware in bootloader mode.
This function is used by Brick Viewer during flashing. It should not be
necessary to call it in a normal user program.
"""
self.check_validity()
data = list(map(int, data))
return self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_WRITE_FIRMWARE, (data,), '64B', 9, 'B')
def set_status_led_config(self, config):
"""
Sets the status LED configuration. By default the LED shows
communication traffic between Brick and Bricklet, it flickers once
for every 10 received data packets.
You can also turn the LED permanently on/off or show a heartbeat.
If the Bricklet is in bootloader mode, the LED is will show heartbeat by default.
"""
self.check_validity()
config = int(config)
self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_SET_STATUS_LED_CONFIG, (config,), 'B', 0, '')
def get_status_led_config(self):
"""
Returns the configuration as set by :func:`Set Status LED Config`
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_GET_STATUS_LED_CONFIG, (), '', 9, 'B')
def get_chip_temperature(self):
"""
Returns the temperature as measured inside the microcontroller. The
value returned is not the ambient temperature!
The temperature is only proportional to the real temperature and it has bad
accuracy. Practically it is only useful as an indicator for
temperature changes.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_GET_CHIP_TEMPERATURE, (), '', 10, 'h')
def reset(self):
"""
Calling this function will reset the Bricklet. All configurations
will be lost.
After a reset you have to create new device objects,
calling functions on the existing ones will result in
undefined behavior!
"""
self.check_validity()
self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_RESET, (), '', 0, '')
def write_uid(self, uid):
"""
Writes a new UID into flash. If you want to set a new UID
you have to decode the Base58 encoded UID string into an
integer first.
We recommend that you use Brick Viewer to change the UID.
"""
self.check_validity()
uid = int(uid)
self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_WRITE_UID, (uid,), 'I', 0, '')
def read_uid(self):
"""
Returns the current UID as an integer. Encode as
Base58 to get the usual string version.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_READ_UID, (), '', 12, 'I')
def get_identity(self):
"""
Returns the UID, the UID where the Bricklet is connected to,
the position, the hardware and firmware version as well as the
device identifier.
The position can be 'a', 'b', 'c', 'd', 'e', 'f', 'g' or 'h' (Bricklet Port).
A Bricklet connected to an :ref:`Isolator Bricklet <isolator_bricklet>` is always at
position 'z'.
The device identifier numbers can be found :ref:`here <device_identifier>`.
|device_identifier_constant|
"""
return GetIdentity(*self.ipcon.send_request(self, BrickletAccelerometerV2.FUNCTION_GET_IDENTITY, (), '', 33, '8s 8s c 3B 3B H'))
def register_callback(self, callback_id, function):
"""
Registers the given *function* with the given *callback_id*.
"""
if function is None:
self.registered_callbacks.pop(callback_id, None)
else:
self.registered_callbacks[callback_id] = function
AccelerometerV2 = BrickletAccelerometerV2 # for backward compatibility
|
Tinkerforge/brickv
|
src/brickv/bindings/bricklet_accelerometer_v2.py
|
Python
|
gpl-2.0
| 22,844
|
from neolib.plots.Step import Step
from neolib.NST import NST
import time
class HealPetPet(Step):
_paths = {
'links': '//*[@id="content"]/table/tr/td[2]//a/@href',
'img': '//*[@id="content"]/table/tr/td[2]/div/img/@src',
'cert': '//area/@href',
}
_HEALS = {
'http://images.neopets.com/altador/misc/petpet_act_b_ffabe6bc57.gif': 0,
'http://images.neopets.com/altador/misc/petpet_act_a_2a605ae262.gif': 1,
'http://images.neopets.com/altador/misc/petpet_act_c_5f4438778c.gif': 2,
'http://images.neopets.com/altador/misc/petpet_act_d_42b934a33b.gif': 3,
}
def __init__(self, usr):
super().__init__(usr, '', '', False)
# Setup link
self.link = ['http://www.neopets.com/altador/petpet.phtml?ppheal=1',
'http://www.neopets.com/altador/petpet.phtml?ppheal=1&sthv=%s']
# Setup checks
self._checks = ['']
def execute(self, last_pg=None):
# Heal the PetPet 10 times to get the certificate
check = ''
for i in range(0, 11):
if check:
pg = self._usr.get_page(check)
else:
pg = self._usr.get_page(self.link[0])
f = open('test.html', 'w', encoding='utf-8')
f.write(pg.content)
f.close()
if len(self._xpath('cert', pg)) > 0:
print('Found certificate!')
url = self._base_url + self._xpath('cert', pg)[0]
pg = self._usr.get_page(url)
f = open('test.html', 'w', encoding='utf-8')
f.write(pg.content)
f.close()
print('Saved page')
exit()
links = self._xpath('links', pg)
action = self._HEALS[self._xpath('img', pg)[0]]
url = self._base_url + links[action]
print('URL: ' + url)
pg = self._usr.get_page(url)
links = self._xpath('links', pg)
check = self._base_url + links[4]
f = open('test.html', 'w', encoding='utf-8')
f.write(pg.content)
f.close()
if len(self._xpath('cert', pg)) > 0:
print('Found certificate!')
url = self._base_url + self._xpath('cert', pg)[0]
pg = self._usr.get_page(url)
f = open('test.html', 'w', encoding='utf-8')
f.write(pg.content)
f.close()
print('Saved page')
exit()
# Wait till the next minute to check on the petpet
wait = (60 - NST.sec) + 1
print('Waiting ' + str(wait) + ' seconds')
time.sleep(wait)
|
jmgilman/neolib2
|
neolib/plots/altador/steps/HealPetPet.py
|
Python
|
gpl-2.0
| 2,736
|
#!/usr/bin/env python
# coding: utf-8
"""
multiprocessTask.py
~~~~~~~~~~~~~~~~~~~
a multiprocess model of producer/consumer
task = Task(work_func, 1, 3, counter=0, a='', callback=cb)
results = task.run()
for i in xrange(26):
lines = ["%d" % i] * random.randint(10, 20)
task.put(lines)
task.finish()
"""
import os
import time
from multiprocessing import Pool as ProcessPool, Manager, cpu_count
__all__ = ['Producer', 'Consumer', 'Task']
class Callable(object):
def __call__(self, *args, **kwargs):
raise NotImplementedError('%s not callable' % self)
def run(self, *args, **kwargs):
raise NotImplementedError('%s.run() not implemented' % self)
class Producer(Callable):
def __init__(self, todo_list=None, max_qsize=None):
manager = Manager()
self._q = manager.Queue()
self._q_lock = manager.Lock()
self._q_close_event = manager.Event()
self._max_qsize = max_qsize or 0
todo_list = todo_list or []
if isinstance(todo_list, (list, tuple)) and len(todo_list) > 0:
self.put(todo_list)
super(Producer, self).__init__()
@property
def q_size(self):
return self._q.qsize()
def __call__(self, q, lock, close_event, *args, **kwargs):
for i, data in enumerate(self.run()):
with lock:
q.put(data)
print 'pid %s put %d: %s' % (os.getpid(), i, data)
def run(self):
while 1:
with self._q_lock:
if self._q.empty():
if self._q_close_event.is_set():
break
else:
time.sleep(0.01)
continue
yield self._q.get()
def put(self, *todos):
for todo in todos:
with self._q_lock:
self._q.put(todo)
def finish(self):
try:
self._q_close_event.set()
except Exception as e:
print e
class Consumer(Callable):
def __init__(self, fn=None):
self._fn = fn
self.results = []
super(Consumer, self).__init__()
def __call__(self, q, lock, close_event, *args, **kwargs):
while 1:
with lock:
if q.empty():
if close_event.is_set():
break
else:
time.sleep(0.01)
continue
data = q.get()
self.results.append(self.run(data, *args, **kwargs))
return self.results
def run(self, data, *args, **kwargs):
if self._fn:
return self._fn(data, *args, **kwargs)
class Task(object):
"""
a multiprocess model of producer/consumer
"""
def __init__(self, fn,
producer_count=None,
consumer_count=None,
callback=None,
batch=True,
counter=None,
**shared
):
"""
init producer/consumer task
Args:
fn: consumer called func(data, counter, q_size, *args, **shared_vars)
producer_count: producer process count, default: 1
consumer_count: consumer process count, default: cpu_count - 1
callback: callback func after f calling completed
batch: if True, `task.put(todo_list)` 'todo_list' will be do all at once in batches;
False, todo_list will be do one by one
counter: process shared counter, need custom imp in <fn>
**shared: process shared object data
"""
cpus = cpu_count()
if producer_count is None or producer_count < 1 or producer_count > cpu_count():
producer_count = 1
if consumer_count is None or consumer_count < 1 or consumer_count > cpu_count():
consumer_count = cpus - 1
print 'producer_count=%s consumer_count=%s' % (producer_count, consumer_count)
self._callback = callback
self.batch = batch
manager = Manager()
self.q = manager.Queue()
self.lock = manager.Lock()
self.event = manager.Event()
self._counter = manager.Value('counter', counter or 0)
self._shared = {var_name: manager.Value(var_name, var_value) for var_name, var_value in shared.iteritems()}
self.producerProcessList = [Producer() for _ in xrange(producer_count)]
self.consumerProcessList = [Consumer(fn=fn) for _ in xrange(consumer_count)]
self.pool = ProcessPool(consumer_count + producer_count)
@property
def q_size(self):
return self.q.qsize() + sum([x.q_size or 0 for x in self.producerProcessList])
@property
def counter(self):
return self._counter.value
@property
def shared(self):
return {var_name: var_value_proxy.value for var_name, var_value_proxy in self._shared.iteritems()}
def put(self, todo_list):
producer = self.producerProcessList.pop(0)
if self.batch:
producer.put(todo_list)
else:
producer.put(*todo_list)
self.producerProcessList.append(producer)
time.sleep(0.01)
def run(self, *args, **kwargs):
results = []
arg = (self.q, self.lock, self.event, self._counter, self.q_size)
kwargs.update(self._shared)
for producer in self.producerProcessList:
self.pool.apply_async(producer, arg + args, kwargs)
for consumer in self.consumerProcessList:
results.append(self.pool.apply_async(consumer, arg + args, kwargs, self._cb))
return results
def _cb(self, *args, **kwargs):
if self._callback:
self._callback(self.counter, self._shared)
def finish(self):
for producer in self.producerProcessList:
producer.finish()
self.pool.close()
time.sleep(0.03)
self.event.set()
self.pool.join()
# def work(data, counter, *args, **kwargs):
# pid = os.getpid()
# print '%s doing %s' % (pid, data)
# # counter = args[0] if len(args) > 0 else None
# if counter:
# counter.value += 1
# kwargs['var_a'].value += chr(len(kwargs['var_a'].value) + 65)
# return '%s result' % pid
#
#
# def cb(*args, **kwargs):
# print 'callback', args, kwargs
#
#
# def test():
# import random
# n = 0
# task = Task(work, 1, 3, counter=n, var_a='', callback=cb)
# results = task.run()
# for i in xrange(26):
# lines = ["%d" % i] * random.randint(10, 20)
# task.put(lines)
#
# task.finish()
#
# print 'end counter', task.counter
# print 'shared.var_a', task.shared['var_a']
# print 'results:\n' + '\n'.join([str(res.get()) for res in results])
#
# if __name__ == '__main__':
# test()
|
Vito2015/pyextend
|
pyextend/core/thread/multiprocessTask.py
|
Python
|
gpl-2.0
| 6,887
|
#!/usr/bin/env python
# -*- Mode: Python; tab-width: 4 -*-
#
# Netfarm Mail Archiver - release 2
#
# Copyright (C) 2005-2007 Gianluigi Tiesi <sherpya@netfarm.it>
# Copyright (C) 2005-2007 NetFarm S.r.l. [http://www.netfarm.it]
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTIBILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
# ======================================================================
## @file backend_xmlrpc.py
## XMLrpc Storage and Archive Backend
__doc__ = '''Netfarm Archiver - release 2.1.0 - XmlRpc backend'''
__version__ = '2.1.0'
__all__ = [ 'Backend' ]
from archiver import *
from sys import exc_info
from xmlrpclib import ServerProxy, Error
from urlparse import urlparse
from time import mktime
_prefix = 'XmlRpc Backend: '
##
class BadUrlSyntax(Exception):
"""BadUrlSyntax Bad url syntax in config file"""
pass
class Backend(BackendBase):
"""XMLrpc Backend using python-xmlrpc
This backend can be used with a xmlrpc capable server like zope"""
def __init__(self, config, stage_type, ar_globals):
"""The constructor"""
self.config = config
self.type = stage_type
self.LOG = ar_globals['LOG']
try:
self.url = config.get(self.type, 'url')
self.method = config.get(self.type, 'method')
self.server = ServerProxy(self.url)
except:
raise BadConfig, 'Bad config in xmlrpc backend'
self.LOG(E_ALWAYS, 'XmlRpc Backend (%s) at %s' % (self.type, self.url))
def process(self, data):
"""Archive backend proces
@param data: The data argument is a dict containing mail info and the mail itself
@return: year as status and pid as code"""
## FIXME wrap with xmlrpc DateTime - time.struct_time objects cannot be marshalled
data['m_date'] = mktime(data['m_date'])
self.LOG(E_TRACE, 'XmlRpc Backend (%s): ready to process %s' % (self.type, data))
try:
getattr(self.server, self.method)({'data': data})
except Error, v:
del v ## FIXME Fill error
return 0, 443, 'Error'
return 0, 200, 'Ok'
def shutdown(self):
"""Backend Shutdown callback"""
self.LOG(E_ALWAYS, 'XmlRpc Backend (%s): closing connection' % self.type)
self.server = None
|
sherpya/archiver
|
backend_xmlrpc.py
|
Python
|
gpl-2.0
| 2,693
|
# -*- coding:utf-8 -*-
# Made by Kei Choi(hanul93@gmail.com)
import os # ÆÄÀÏ »èÁ¦¸¦ À§ÇØ import
import kernel
#---------------------------------------------------------------------
# KavMain Ŭ·¡½º
# ŰÄÞ¹é½Å ¿£Áø ¸ðµâÀÓÀ» ³ªÅ¸³»´Â Ŭ·¡½ºÀÌ´Ù.
# ÀÌ Å¬·¡½º°¡ ¾øÀ¸¸é ¹é½Å ¿£Áø Ä¿³Î ¸ðµâ¿¡¼ ·ÎµùÇÏÁö ¾Ê´Â´Ù.
#---------------------------------------------------------------------
class KavMain :
#-----------------------------------------------------------------
# init(self, plugins)
# ¹é½Å ¿£Áø ¸ðµâÀÇ ÃʱâÈ ÀÛ¾÷À» ¼öÇàÇÑ´Ù.
#-----------------------------------------------------------------
def init(self, plugins) : # ¹é½Å ¸ðµâ ÃʱâÈ
self.virus_name = 'Dummy-Test-File (not a virus)' # Áø´ÜÇÏ´Â ¾Ç¼ºÄÚµå À̸§
# ¾Ç¼ºÄÚµå ÆÐÅÏ µî·Ï
self.dummy_pattern = 'Dummy Engine test file - KICOM Anti-Virus Project, 2012, Kei Choi'
return 0
#-----------------------------------------------------------------
# uninit(self)
# ¹é½Å ¿£Áø ¸ðµâÀÇ Á¾·áÈ ÀÛ¾÷À» ¼öÇàÇÑ´Ù.
#-----------------------------------------------------------------
def uninit(self) : # ¹é½Å ¸ðµâ Á¾·áÈ
try :
del self.virus_name
del self.dummy_pattern
except :
pass
return 0
#-----------------------------------------------------------------
# scan(self, filehandle, filename)
# ¾Ç¼ºÄڵ带 °Ë»çÇÑ´Ù.
# ÀÎÀÚ°ª : mmhandle - ÆÄÀÏ mmap ÇÚµé
# : scan_file_struct - ÆÄÀÏ ±¸Á¶Ã¼
# : format - ¹Ì¸® ºÐ¼®µÈ ÆÄÀÏ Æ÷¸Ë
# ¸®Åϰª : (¾Ç¼ºÄÚµå ¹ß°ß ¿©ºÎ, ¾Ç¼ºÄÚµå À̸§, ¾Ç¼ºÄÚµå ID) µîµî
#-----------------------------------------------------------------
def scan(self, mmhandle, scan_file_struct, format) :
ret_value = {}
ret_value['result'] = False # ¹ÙÀÌ·¯½º ¹ß°ß ¿©ºÎ
ret_value['virus_name'] = '' # ¹ÙÀÌ·¯½º À̸§
ret_value['scan_state'] = kernel.NOT_FOUND # 0:¾øÀ½, 1:°¨¿°, 2:ÀǽÉ, 3:°æ°í
ret_value['virus_id'] = -1 # ¹ÙÀÌ·¯½º ID
try :
# ¹Ì¸® ºÐ¼®µÈ ÆÄÀÏ Æ÷¸ËÁß¿¡ Dummy Æ÷¸ËÀÌ Àִ°¡?
fformat = format['ff_dummy']
# ¹Ì¸® ºÐ¼®µÈ ÆÄÀÏ Æ÷¸Ë¿¡ Å©±â°¡ 65Byte?
if fformat['size'] != len(self.dummy_pattern) :
raise SystemError
# ÆÄÀÏÀ» ¿¾î ¾Ç¼ºÄÚµå ÆÐÅϸ¸Å ÆÄÀÏ¿¡¼ Àд´Ù.
filename = scan_file_struct['real_filename']
fp = open(filename)
buf = fp.read(len(self.dummy_pattern)) # ÆÐÅÏÀº 65 Byte Å©±â
fp.close()
# ¾Ç¼ºÄÚµå ÆÐÅÏÀ» ºñ±³ÇÑ´Ù.
if buf == self.dummy_pattern :
# ¾Ç¼ºÄÚµå ÆÐÅÏÀÌ °®´Ù¸é °á°ú °ªÀ» ¸®ÅÏÇÑ´Ù.
ret_value['result'] = True # ¹ÙÀÌ·¯½º ¹ß°ß ¿©ºÎ
ret_value['virus_name'] = self.virus_name # ¹ÙÀÌ·¯½º À̸§
ret_value['scan_state'] = kernel.INFECTED# 0:¾øÀ½, 1:°¨¿°, 2:ÀǽÉ, 3:°æ°í
ret_value['virus_id'] = 0 # ¹ÙÀÌ·¯½º ID
return ret_value
except :
pass
# ¾Ç¼ºÄڵ带 ¹ß°ßÇÏÁö ¸øÇßÀ½À» ¸®ÅÏÇÑ´Ù.
return ret_value
#-----------------------------------------------------------------
# disinfect(self, filename, malwareID)
# ¾Ç¼ºÄڵ带 Ä¡·áÇÑ´Ù.
# ÀÎÀÚ°ª : filename - ÆÄÀÏ À̸§
# : malwareID - Ä¡·áÇÒ ¾Ç¼ºÄÚµå ID
# ¸®Åϰª : ¾Ç¼ºÄÚµå Ä¡·á ¿©ºÎ
#-----------------------------------------------------------------
def disinfect(self, filename, malwareID) : # ¾Ç¼ºÄÚµå Ä¡·á
try :
# ¾Ç¼ºÄÚµå Áø´Ü °á°ú¿¡¼ ¹ÞÀº ID °ªÀÌ 0Àΰ¡?
if malwareID == 0 :
os.remove(filename) # ÆÄÀÏ »èÁ¦
return True # Ä¡·á ¿Ï·á ¸®ÅÏ
except :
pass
return False # Ä¡·á ½ÇÆÐ ¸®ÅÏ
#-----------------------------------------------------------------
# listvirus(self)
# Áø´Ü/Ä¡·á °¡´ÉÇÑ ¾Ç¼ºÄÚµåÀÇ ¸ñ·ÏÀ» ¾Ë·ÁÁØ´Ù.
#-----------------------------------------------------------------
def listvirus(self) : # Áø´Ü °¡´ÉÇÑ ¾Ç¼ºÄÚµå ¸ñ·Ï
vlist = [] # ¸®½ºÆ®Çü º¯¼ö ¼±¾ð
vlist.append(self.virus_name) # Áø´ÜÇÏ´Â ¾Ç¼ºÄÚµå À̸§ µî·Ï
return vlist
#-----------------------------------------------------------------
# getinfo(self)
# ¹é½Å ¿£Áø ¸ðµâÀÇ ÁÖ¿ä Á¤º¸¸¦ ¾Ë·ÁÁØ´Ù. (¹öÀü, Á¦ÀÛÀÚ...)
#-----------------------------------------------------------------
def getinfo(self) :
info = {} # »çÀüÇü º¯¼ö ¼±¾ð
info['author'] = 'Kei Choi' # Á¦ÀÛÀÚ
info['version'] = '1.0' # ¹öÀü
info['title'] = 'Dummy Scan Engine' # ¿£Áø ¼³¸í
info['kmd_name'] = 'dummy' # ¿£Áø ÆÄÀϸí
# ÆÐÅÏ »ý¼º³¯Â¥¿Í ½Ã°£Àº ¾ø´Ù¸é ºôµå ½Ã°£À¸·Î ÀÚµ¿ ¼³Á¤
info['date'] = 0 # ÆÐÅÏ »ý¼º ³¯Â¥
info['time'] = 0 # ÆÐÅÏ »ý¼º ½Ã°£
info['sig_num'] = 1 # ÆÐÅÏ ¼ö
return info
#-----------------------------------------------------------------
# format(self, mmhandle, filename)
# Dummy Àü¿ë Æ÷¸Ë ºÐ¼®±âÀÌ´Ù.
#-----------------------------------------------------------------
def format(self, mmhandle, filename) :
try :
fformat = {} # Æ÷¸Ë Á¤º¸¸¦ ´ãÀ» °ø°£
mm = mmhandle
if mm[0:5] == 'Dummy' : # Çì´õ üũ
fformat['size'] = len(mm) # Æ÷¸Ë ÁÖ¿ä Á¤º¸ ÀúÀå
ret = {}
ret['ff_dummy'] = fformat
return ret
except :
pass
return None
|
yezune/kicomav
|
Engine/plugins/dummy.py
|
Python
|
gpl-2.0
| 5,530
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/home/yeison/Documentos/python/developing/pinguino/pinguino-ide/qtgui/gide/bloques/widgets/control_slider.ui'
#
# Created: Wed Mar 4 01:39:58 2015
# by: pyside-uic 0.2.15 running on PySide 1.2.2
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_Frame(object):
def setupUi(self, Frame):
Frame.setObjectName("Frame")
Frame.resize(237, 36)
Frame.setWindowTitle("")
self.gridLayout = QtGui.QGridLayout(Frame)
self.gridLayout.setSpacing(0)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setObjectName("gridLayout")
self.lineEdit_2 = QtGui.QLineEdit(Frame)
self.lineEdit_2.setMaximumSize(QtCore.QSize(46, 16777215))
font = QtGui.QFont()
font.setFamily("Ubuntu Mono")
font.setPointSize(15)
font.setWeight(75)
font.setBold(True)
self.lineEdit_2.setFont(font)
self.lineEdit_2.setStyleSheet("color: rgb(255, 255, 255);\n"
"background-color: rgba(255, 255, 255, 0);")
self.lineEdit_2.setText("0000")
self.lineEdit_2.setFrame(False)
self.lineEdit_2.setReadOnly(True)
self.lineEdit_2.setObjectName("lineEdit_2")
self.gridLayout.addWidget(self.lineEdit_2, 0, 1, 1, 1)
self.horizontalSlider = QtGui.QSlider(Frame)
self.horizontalSlider.setCursor(QtCore.Qt.PointingHandCursor)
self.horizontalSlider.setFocusPolicy(QtCore.Qt.NoFocus)
self.horizontalSlider.setMaximum(1023)
self.horizontalSlider.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSlider.setInvertedAppearance(False)
self.horizontalSlider.setTickPosition(QtGui.QSlider.NoTicks)
self.horizontalSlider.setTickInterval(128)
self.horizontalSlider.setObjectName("horizontalSlider")
self.gridLayout.addWidget(self.horizontalSlider, 0, 2, 1, 1)
self.retranslateUi(Frame)
QtCore.QMetaObject.connectSlotsByName(Frame)
def retranslateUi(self, Frame):
pass
|
emmanuelol/pinguino-ide
|
qtgui/gide/bloques/widgets/control_slider.py
|
Python
|
gpl-2.0
| 2,130
|
# Example import configuration.
import_templates = [{
'id': 'my_import',
'label': 'My Import (Trident)',
'defaults': [
('ds', '16607027920896001'),
('itt', '1'),
('mr', '1'),
('impstp', '1'),
('asa', '1'),
('impjun', '0'),
('dtd', '5'),
{
'id': 'dr',
'label': 'Directory containing files to import',
'type': 'directory',
'default': 'files',
},
('clean_old_data', '1'),
('from_today', '1'),
],
'admins': [('root', 'admin@example.com')],
'uploaders': [('uploader', ['uploader@rcsmobility.com'])],
'run_results_notifications': {
'load': ('alice@example.com',),
'warn': ('bob@example.com',),
},
}]
|
Open-Transport/synthese
|
legacy/projects/template/manager/imports_config.py
|
Python
|
gpl-2.0
| 784
|
from tablelist import *
|
egaxegax/dbCarta
|
tablelist/__init__.py
|
Python
|
gpl-2.0
| 24
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('servicos', '0007_auto_20210416_0841'),
]
operations = [
migrations.AlterField(
model_name='servico',
name='data_ultimo_uso',
field=models.DateField(help_text='Data em que o servi\xe7o foi utilizado pela Casa Legislativa pela \xfaltima vez', null=True, verbose_name='Data da \xfaltima utiliza\xe7\xe3o', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='servico',
name='erro_atualizacao',
field=models.TextField(help_text='Erro ocorrido na \xfaltima tentativa de verificar a data de \xfaltima atualiza\xe7\xe3o do servi\xe7o', verbose_name='Erro na atualiza\xe7\xe3o', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='tiposervico',
name='modo',
field=models.CharField(max_length=1, verbose_name='modo de presta\xe7\xe3o do servi\xe7o', choices=[(b'H', 'Hospedagem'), (b'R', 'Registro')]),
preserve_default=True,
),
migrations.AlterField(
model_name='tiposervico',
name='nome',
field=models.CharField(max_length=60, verbose_name='nome'),
preserve_default=True,
),
migrations.AlterField(
model_name='tiposervico',
name='sigla',
field=models.CharField(max_length=b'12', verbose_name='sigla'),
preserve_default=True,
),
migrations.AlterField(
model_name='tiposervico',
name='string_pesquisa',
field=models.TextField(help_text='Par\xe2metros da pesquisa para averiguar a data da \xfaltima atualiza\xe7\xe3o do servi\xe7o. Formato:<br/><ul><li>/caminho/da/pesquisa/?parametros [xml|json] campo.de.data</li>', verbose_name='string de pesquisa', blank=True),
preserve_default=True,
),
]
|
interlegis/sigi
|
sigi/apps/servicos/migrations/0008_auto_20210519_1117.py
|
Python
|
gpl-2.0
| 2,103
|
# Copyright (C) 2013-2018 Samuel Damashek, Peter Foley, James Forcier, Srijay Kasturi, Reed Koser, Christopher Reffett, and Tris Wilson
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from ..helpers import arguments
from ..helpers.command import Command
from ..helpers.orm import Permissions
@Command('acl', ['config', 'db'], role="owner")
def cmd(send, msg, args):
"""Handles permissions
Syntax: {command} (--add|--remove) --nick (nick) --role (admin)
"""
parser = arguments.ArgParser(args['config'])
parser.add_argument('--nick', action=arguments.NickParser, required=True)
parser.add_argument('--role', choices=['admin'], required=True)
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('--add', action='store_true')
group.add_argument('--remove', action='store_true')
try:
cmdargs = parser.parse_args(msg)
except arguments.ArgumentException as e:
send(str(e))
return
session = args['db']
admin = session.query(Permissions).filter(Permissions.nick == cmdargs.nick).first()
if cmdargs.add:
if admin is None:
session.add(Permissions(nick=cmdargs.nick, role=cmdargs.role))
send(f"{cmdargs.nick} is now an {cmdargs.role}.")
else:
send(f"{admin.nick} is already an {admin.role}.")
else:
if admin is None:
send(f"{cmdargs.nick} was not an {cmdargs.role}.")
else:
session.delete(admin)
send(f"{admin.nick} is no longer an {admin.role}.")
|
tjcsl/cslbot
|
cslbot/commands/acl.py
|
Python
|
gpl-2.0
| 2,213
|
"""Gets information about the mesh of a case. Makes no attempt to manipulate
the mesh, because this is better left to the OpenFOAM-utilities"""
from PyFoam.RunDictionary.SolutionDirectory import SolutionDirectory
from PyFoam.RunDictionary.ListFile import ListFile
from PyFoam.Error import PyFoamException
from PyFoam.RunDictionary.ParsedParameterFile import ParsedFileHeader
from os import path
import re
class MeshInformation:
"""Reads Information about the mesh on demand"""
def __init__(self,
case,
time="constant",
processor=None,
region=None):
""":param case: Path to the case-directory
:param time: Time for which the mesh should be looked at
:param processor: Name of the processor directory for decomposed cases"""
self.sol=SolutionDirectory(case,paraviewLink=False,archive=None,region=region)
self.time=time
self.processor=processor
def nrOfFaces(self):
try:
return self.faces
except AttributeError:
try:
faces=ListFile(self.sol.polyMeshDir(time=self.time,processor=self.processor),"faces")
self.faces=faces.getSize()
except IOError:
faces=ListFile(self.sol.polyMeshDir(processor=self.processor),"faces")
self.faces=faces.getSize()
return self.faces
def nrOfPoints(self):
try:
return self.points
except AttributeError:
try:
points=ListFile(self.sol.polyMeshDir(time=self.time,processor=self.processor),"points")
self.points=points.getSize()
except IOError:
points=ListFile(self.sol.polyMeshDir(processor=self.processor),"points")
self.points=points.getSize()
return self.points
def nrOfCells(self):
try:
return self.cells
except:
try:
try:
owner=ParsedFileHeader(path.join(self.sol.polyMeshDir(time=self.time,processor=self.processor),"owner"))
except IOError:
owner=ParsedFileHeader(path.join(self.sol.polyMeshDir(processor=self.processor),"owner"))
mat=re.compile('.+nCells: *([0-9]+) .+').match(owner["note"])
self.cells=int(mat.group(1))
return self.cells
except:
raise PyFoamException("Not Implemented")
|
Unofficial-Extend-Project-Mirror/openfoam-extend-Breeder-other-scripting-PyFoam
|
PyFoam/RunDictionary/MeshInformation.py
|
Python
|
gpl-2.0
| 2,537
|
#!/usr/bin/python
import socket
import os
import time
import shutil
import sys
import re
import datetime
import argparse
# NCMD Libs
import ncmd_print as np
from ncmd_print import MessageLevel as MessageLevel
import ncmd_commands as ncmds
import ncmd_fileops as nfops
MAX_TRANSFER_BYTES=2048
QUIT_CMD = "quit now"
HOST = ""
PORT = 10123
ROOT_DIR_PATH = "/share/CACHEDEV1_DATA"
# Set up the server socket
def bindServerSocket(port):
server_sock = None
try:
server_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_sock.bind((HOST, port))
np.print_msg("Successfully bound server socket to port:{0}".format(PORT), MessageLevel.INFO)
except Exception as err:
np.print_msg("Failed to bind server socket to port:{0}".format(PORT), MessageLevel.ERROR)
server_sock = None
return server_sock
# Accept incoming socket connections
def acceptConnection(server_sock):
server_sock.listen(1)
conn, addr = server_sock.accept()
return (conn, addr)
# Validate a path against the server mount
def validatePath(path, server_mnt):
result = False
# Paths beginning with the server mount are considered 'valid'
if path.find(server_mnt) == 0:
result = True
return result
# Validate source / destination paths
def validatePaths(paths, server_mnt):
result = True
for path in paths:
if not validatePath(path, server_mnt):
result = False
break
return result
# Deal with generating the appropriate response for a command
def processResponse(ncmd, success):
nresp = ''
if ncmds.getCommandBlock(ncmd):
if success:
nresp = ncmds.genCmdSuccessResp(ncmd)
else:
nresp = ncmds.genCmdFailureResp(ncmd)
else:
pass # No response for non-blocking
return nresp
# Handle the current command string -- the actual file operations occur here
def processCmd(ncmd, args):
quit = False
cmd_success = True
np.print_msg("Received command: {0}".format(ncmd), MessageLevel.INFO)
dest = ncmds.getCommandDest(ncmd)
srcs = ncmds.getCommandSrcs(ncmd)
if ncmds.isQuitSequence(ncmd):
quit = True
else:
if args.validate_server_mount:
srcs_valid = validatePaths(srcs, args.validate_server_mount)
dest_valid = validatePath(dest, args.validate_server_mount)
cmd_success = srcs_valid and dest_valid
# Only try and conduct file operations when validation is disabled,
# or if validation is enabled, and it passes.
if cmd_success:
if ncmds.isMove(ncmd):
for src in srcs:
if not nfops.move(src, dest):
cmd_success = False
elif ncmds.isCopy(ncmd):
for src in srcs:
if not nfops.copy(src, dest):
cmd_success = False
elif ncmds.isRemove(ncmd):
# The naming here isn't ideal, but this code gets the job done!
for src in srcs:
if not nfops.remove(src):
cmd_success = False
if not nfops.remove(dest):
cmd_success = False
return quit, cmd_success
# Deal with the current connection, getting, sending, and closing
def processConnection(conn, args):
ncmd = conn.recv(ncmds.MAX_CMD_SIZE)
quit, cmd_success = processCmd(ncmd, args)
resp = processResponse(ncmd, cmd_success)
if len(resp) > 0:
try:
conn.send(resp)
except Exception as err:
np.print_msg(msg, MessageLevel.ERROR)
conn.close()
return quit
def getArgs():
parser = argparse.ArgumentParser(description='Copy, move, remove quickly on a remotely mounted folder.')
parser.add_argument('--port', type=int, help='Specify a custom port.')
parser.add_argument('--validate_server_mount', type=str, help='Specify a mount on the server to validate incoming paths against.')
return parser.parse_args()
def main():
# Get the port
args = getArgs()
server_port = PORT
if args.port:
server_port = args.port
# Bind the sever socket
server_sock = bindServerSocket(server_port)
if server_sock:
while True:
conn = None
try:
conn, addr = acceptConnection(server_sock)
np.print_msg("Successfully connected to client: {0}:{1}".format(addr[0], PORT), MessageLevel.INFO)
except socket.error as msg:
np.print_msg(msg, MessageLevel.ERROR)
conn = None
if conn:
quit = processConnection(conn, args)
if quit:
np.print_msg("Server shutdown requested @ {0}...".format(datetime.datetime.now()), MessageLevel.INFO)
break
# Keep this at the end for safety!
if server_sock:
server_sock.close()
if __name__ == '__main__':
main()
|
nathankrueger/ncmd
|
ncmd_server.py
|
Python
|
gpl-2.0
| 4,429
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @first_date 20160129
# @date 20160129
# @version 0.0
"""auth for Users API
"""
from flask import abort
from flask.views import MethodView
from flask.ext.login import login_required, current_user
from sqlalchemy.exc import IntegrityError
from webargs.flaskparser import use_args
from . import users_bp
from ..mixins import RestfulViewMixin
from ...models.users import User
from ...schemas.users import SignupSchema, LoginSchema, ResetPasswordSchema
from ...error_handlers import user_errors
class SignupView(RestfulViewMixin, MethodView):
@use_args(SignupSchema, locations=('json',))
def post(self, args):
user = User(**args)
try:
user.add()
except IntegrityError as err:
err.data = user_errors.USER_ERR_1001_REGISTERED_ACC
raise
return self.get_response(status=201)
class LoginView(RestfulViewMixin, MethodView):
@use_args(LoginSchema, locations=('json',))
def post(self, args):
user = User.authenticate(**args)
if not user:
abort(401)
key = user.login() # It will return key
return self.get_response({"key": key}, status=200)
class LogoutView(RestfulViewMixin, MethodView):
decorators = (login_required,)
def post(self):
user = current_user
user.logout()
return self.get_response(status=200)
class ResetPasswordView(RestfulViewMixin, MethodView):
decorators = (login_required,)
@use_args(ResetPasswordSchema, locations=('json',))
def put(self, args):
user = current_user
if not user.check_password(args['old_password']):
abort(401)
user.set_password(args['new_password'])
user.update()
return self.get_response(status=200)
# Url patterns: To register views in blueprint
users_bp.add_url_rule('/signup', view_func=SignupView.as_view('signup'))
users_bp.add_url_rule('/login', view_func=LoginView.as_view('login'))
users_bp.add_url_rule('/logout', view_func=LogoutView.as_view('logout'))
users_bp.add_url_rule('/reset_password', view_func=ResetPasswordView.as_view('reset-password'))
|
pythonistas-tw/academy
|
web-api/tonypythoneer/db-exercise/v2/app/views/users/auth.py
|
Python
|
gpl-2.0
| 2,194
|
from console.main.command_handler.commands.command import Command
class SimpleCommand(Command):
pass
|
lubokkanev/cloud-system
|
console/main/command_handler/commands/simple_command.py
|
Python
|
gpl-2.0
| 107
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2012-2013 Hector Martin "marcan" <hector@marcansoft.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 or version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import os
import sys
import configargparse
RESDIR = os.path.join(os.path.dirname(sys.modules[__name__].__file__), 'res')
CFG = {
'fontdir': os.path.join(RESDIR, 'fonts'),
'gfxdir': os.path.join(RESDIR, 'gfx'),
'webdir': os.path.join(RESDIR, 'web'),
}
def init_argparser():
configargparse.init_argument_parser(
default_config_files=['/etc/blitzloop/cfg', '~/.blitzloop/cfg'])
parser = configargparse.get_argument_parser()
parser.add_argument(
'--fullscreen', default=False, action='store_true',
help='run blitzloop fullscreen')
def get_argparser():
return configargparse.get_argument_parser()
def get_opts():
opts, unknown = get_argparser().parse_known_args()
return opts
def get_res_path(t, fp):
return os.path.join(CFG[t], fp)
def get_resfont_path(fp):
return get_res_path('fontdir', fp)
def get_resgfx_path(fp):
return get_res_path('gfxdir', fp)
def get_webres_path(fp):
return get_res_path('webdir', fp)
def map_from(x, min, max):
return (x-min) / (max-min)
def map_to(x, min, max):
return min + x * (max - min)
init_argparser()
|
yacoob/blitzloop
|
blitzloop/util.py
|
Python
|
gpl-2.0
| 1,930
|
#
# SPDX-License-Identifier: MIT
#
import glob
import os
import shutil
import tempfile
from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import runCmd, bitbake, get_bb_vars
class oeGoToolchainSelfTest(OESelftestTestCase):
"""
Test cases for OE's Go toolchain
"""
@staticmethod
def get_sdk_environment(tmpdir_SDKQA):
pattern = os.path.join(tmpdir_SDKQA, "environment-setup-*")
# FIXME: this is a very naive implementation
return glob.glob(pattern)[0]
@staticmethod
def get_sdk_toolchain():
bb_vars = get_bb_vars(['SDK_DEPLOY', 'TOOLCHAIN_OUTPUTNAME'],
"meta-go-toolchain")
sdk_deploy = bb_vars['SDK_DEPLOY']
toolchain_name = bb_vars['TOOLCHAIN_OUTPUTNAME']
return os.path.join(sdk_deploy, toolchain_name + ".sh")
@classmethod
def setUpClass(cls):
super(oeGoToolchainSelfTest, cls).setUpClass()
cls.tmpdir_SDKQA = tempfile.mkdtemp(prefix='SDKQA')
cls.go_path = os.path.join(cls.tmpdir_SDKQA, "go")
# Build the SDK and locate it in DEPLOYDIR
bitbake("meta-go-toolchain")
cls.sdk_path = oeGoToolchainSelfTest.get_sdk_toolchain()
# Install the SDK into the tmpdir
runCmd("sh %s -y -d \"%s\"" % (cls.sdk_path, cls.tmpdir_SDKQA))
cls.env_SDK = oeGoToolchainSelfTest.get_sdk_environment(cls.tmpdir_SDKQA)
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.tmpdir_SDKQA, ignore_errors=True)
super(oeGoToolchainSelfTest, cls).tearDownClass()
def run_sdk_go_command(self, gocmd):
cmd = "cd %s; " % self.tmpdir_SDKQA
cmd = cmd + ". %s; " % self.env_SDK
cmd = cmd + "export GOPATH=%s; " % self.go_path
cmd = cmd + "${CROSS_COMPILE}go %s" % gocmd
return runCmd(cmd).status
def test_go_dep_build(self):
proj = "github.com/golang"
name = "dep"
ver = "v0.3.1"
archive = ".tar.gz"
url = "https://%s/%s/archive/%s%s" % (proj, name, ver, archive)
runCmd("cd %s; wget %s" % (self.tmpdir_SDKQA, url))
runCmd("cd %s; tar -xf %s" % (self.tmpdir_SDKQA, ver+archive))
runCmd("mkdir -p %s/src/%s" % (self.go_path, proj))
runCmd("mv %s/dep-0.3.1 %s/src/%s/%s"
% (self.tmpdir_SDKQA, self.go_path, proj, name))
retv = self.run_sdk_go_command('build %s/%s/cmd/dep'
% (proj, name))
self.assertEqual(retv, 0,
msg="Running go build failed for %s" % name)
|
schleichdi2/OPENNFR-6.3-CORE
|
opennfr-openembedded-core/meta/lib/oeqa/selftest/cases/gotoolchain.py
|
Python
|
gpl-2.0
| 2,594
|
#!/usr/bin/env python
# -*- coding: iso-8859-2 -*-
#
# Copyright (C) 2007 Adam Folmert <afolmert@gmail.com>
#
# This file is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This file is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
#
#
#
"""This is the module for models used in Mentor GUI"""
import release
__author__ = '%s <%s>' % \
( release.authors['afolmert'][0], release.authors['afolmert'][1])
__license__ = release.license
__version__ = release.version
import sys
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from cards import Card, Cards
from utils import isstring, log
from utils_qt import tr
class CardModel(QAbstractItemModel):
"""Model to be used for list and tree view."""
class InvalidIndexError(Exception): pass
class ModelNotActiveError(Exception): pass
def __init__(self, parent=None):
QAbstractListModel.__init__(self, parent)
self.cards = Cards()
def _checkIndex(self, index):
if index is None or not index.isValid() or index == QModelIndex():
raise CardModel.InvalidIndexError, "Invalid index given"
def _checkActive(self):
if not self.isActive():
raise CardModel.ModelNotActiveError, "Model is not active. Use open first."
def open(self, dbpath):
self.cards.open(str(dbpath))
# FIXME why these do not work??
self.reset()
# ^ self.emit(SIGNAL('modelReset()'))
def close(self):
self.emit(SIGNAL('modelAboutToBeReset()'))
self.cards.close()
self.reset()
def filepath(self):
"""Returns path to currently open database"""
if self.cards.isOpen():
return self.cards.db_path
else:
return None
def isActive(self):
return self.cards.isOpen()
def parent(self, index):
return QModelIndex()
def rowCount(self, parent=QModelIndex()):
if parent.isValid():
return 0
else:
if self.cards.isOpen():
return self.cards.getCardsCount()
else:
return 0
def columnCount(self, parent=QModelIndex()):
if parent.isValid():
return 0
else:
if self.cards.isOpen():
return 5
else:
return 0
def index(self, row, column, parent=QModelIndex()):
if row < 0 or column < 0 or not self.cards.isOpen():
return QModelIndex()
else:
# returns index with given card id
header = self.cards.getCardHeaders('', row, row + 1)
if len(header) == 1:
return self.createIndex(row, column, int(header[0][0]))
else:
return QModelIndex()
# for display role only id+question in following columns will be
# for specific data , in the following columns
def data(self, index, role=Qt.DisplayRole):
self._checkIndex(index)
if role not in (Qt.DisplayRole, Qt.UserRole):
return QVariant()
card = self.cards.getCard(index.internalId())
if role == Qt.UserRole:
return card
else:
if index.column() == 0:
return QVariant('#%d %s' % (card.id, str(card.question).strip()))
elif index.column() == 1:
return QVariant('%s' % str(card.answer).strip())
elif index.column() == 2:
return QVariant('%s' % str(card.question_hint).strip())
elif index.column() == 3:
return QVariant('%s' % str(card.answer_hint).strip())
elif index.column() == 4:
return QVariant('%s' % str(card.score))
else:
return QVariant()
def flags(self, index):
return QAbstractListModel.flags(self, index) | Qt.ItemIsEnabled | Qt.ItemIsSelectable
def headerData(self, section, orientation, role=Qt.DisplayRole):
if role == Qt.DisplayRole:
if orientation == Qt.Horizontal:
if section == 0:
return QVariant("Question")
elif section == 1:
return QVariant("Answer")
elif section == 2:
return QVariant(tr("Question hint"))
elif section == 3:
return QVariant(tr("Answer hint"))
elif section == 4:
return QVariant(tr("Score"))
else:
return QVariant()
else:
return QVariant(str(section))
return QVariant()
def getPreviousIndex(self, index):
"""Returns previous index before given or given if it's first."""
self._checkIndex(index)
if index.row() == 0:
return index
else:
return self.index(index.row() - 1, 0)
# pointer , get row before
def getNextIndex(self, index):
"""Returns next index after given or given if it's last."""
self._checkIndex(index)
if index.row() == self.rowCount() - 1:
return index
else:
return self.index(index.row() + 1, 0)
# get row after ?
# TODO
# what about inserting rows
# and moving rows up and down ??
# must have parameter position or display position ??
# TODO
# add special handlers like rowsAboutToBeInserted etc .
# right now only model to be reset
def addNewCard(self):
"""Adds a new empty card."""
self.emit(SIGNAL('modelAboutToBeReset()'))
rowid = self.cards.addCard(Card())
# TODO is it ok to return it here?
result = self.createIndex(self.cards.getCardsCount(), 0, rowid)
# cards.addCard(Card())
# TODO
# why these do not work ?
self.reset()
# self.emit(SIGNAL('modelReset()'))
#
return result
def deleteCard(self, index):
self._checkIndex(index)
self.emit(SIGNAL('modelAboutToBeReset()'))
self.cards.deleteCard(index.internalId())
# why these do not work??
self.reset()
# self.emit(SIGNAL('modelReset()'))
# cards - deleteCard card_id
# TODO question
# how to update card if peg is somewhere else ?
# maybe keep blob as well ?
# the items are then splitted
def updateCard(self, index, question, answer):
self._checkIndex(index)
card = Card(index.internalId(), question, answer)
self.cards.updateCard(card)
# update data in the model
self.emit(SIGNAL('dataChanged(QModelIndex)'), index)
# TODO model should not have any algorithms - it should be just as a proxy
# between database and any more advanced algorithm
# e.g. database importer
# btw. they should use the same classes with the probe program
# TODO progress bar for importing and possibility to cancel if is a long
# operatoin
def importQAFile(self, file, clean=True):
"""Import cards from given question&answer file.
@param file can be file name or file like object
"""
self.emit(SIGNAL('modelAboutToBeReset()'))
self._checkActive()
if isstring(file):
file = open(file, 'rt')
if clean:
self.cards.deleteAllCards()
prefix = ''
last_prefix = ''
card = Card()
for line in file.readlines():
if line.upper().startswith('Q:') or line.upper().startswith('A:'):
last_prefix = prefix
prefix = line[:2].upper()
line = line[3:]
# if new card then recreate
if prefix == 'Q:' and prefix != last_prefix:
if not card.isEmpty():
self.cards.addCard(card, False)
card = Card()
if line.strip() != '':
if prefix == 'Q:':
card.question += line
else: # prefix == a
card.answer += line
# add last card
if not card.isEmpty():
self.cards.addCard(card)
# TODO do it in a real transaction way
# in case of error do a rollback
self.cards.commit()
self.reset()
# FIXME
# How should I design it ?
# Right now it is just a container (stack) for a bunch of cards which get
# randomized
class DrillModel(QAbstractItemModel):
"""Model for drilling cards"""
# scores
Good, Bad = range(2)
def __init__(self, parent=None):
QAbstractItemModel.__init__(self, parent)
self.cards = []
def parent(self, index=QModelIndex()):
return QModelIndex()
def rowCount(self, parent=QModelIndex()):
if parent.isValid():
return 0
else:
return len(self.cards)
def columnCount(self, parent=QModelIndex()):
if parent.isValid():
return 0
else:
return 1
def index(self, row, column, parent=QModelIndex()):
if parent.isValid():
return QModelIndex()
else:
if row >= 0 and row < len(self.cards) and column == 0:
return self.createIndex(row, column, None)
else:
return QModelIndex()
def data(self, index, role=Qt.DisplayRole):
if role not in (Qt.DisplayRole,):
return QVariant()
else:
if index.row() < len(self.cards):
card = self.cards[index.row()]
return QVariant("%d %s" % (card.id, card.question))
else:
return QVariant()
def headerData(self, section, orientation, role=Qt.DisplayRole):
return QVariant(str(section))
# return QAbstractItemModel.headerData(self, section, orientation, role)
def flags(self, index):
return Qt.ItemIsEnabled | Qt.ItemIsSelectable
def addCard(self, card):
self.emit(SIGNAL('modelAboutToBeReset()'))
self.cards.append(card)
self.reset()
def clear(self):
self.emit(SIGNAL('modelAboutToBeReset()'))
self.cards.clear()
self.reset()
def selectNextCard(self):
# take from the stack and put it on top
if len(self.cards) > 0:
self.emit(SIGNAL('modelAboutToBeReset()'))
result = self.cards[0]
self.cards = self.cards[1:]
self.cards.append(result)
self.reset()
return result
else:
return Card()
def removeCard(self, card):
try:
self.emit(SIGNAL('modelAboutToBeReset()'))
self.cards.remove(card)
self.reset()
except:
pass
def scoreCard(self, card, score):
if score == DrillModel.Good:
log("Card: $card will be removed from drill.")
self.removeCard(card)
def shuffleCards(self):
from random import shuffle
self.emit(SIGNAL('modelAboutToBeReset()'))
shuffle(self.cards)
self.reset()
def printCards(self):
print "Printing cards..."
sys.stdout.flush()
i = 0
for card in self.cards:
print "%d %s\n" % (i, str(card))
sys.stdout.flush()
i += 1
print "Done."
sys.stdout.flush()
def main():
pass
if __name__ == '__main__':
main()
|
afolmert/mentor
|
src/models.py
|
Python
|
gpl-2.0
| 12,011
|
from django.forms import ModelForm
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
class UserLogin(ModelForm):
class Meta:
model = User
fields = ['username', 'password']
class UserRegister(UserCreationForm):
email = forms.EmailField(required=True)
first_name = forms.CharField(required=True)
last_name = forms.CharField(required=True)
class Meta:
model = User
fields = ['username']
def save(self, commit=True):
user = super(UserRegister, self).save(commit=False)
user.email = self.cleaned_data['email']
user.first_name = self.cleaned_data['first_name'],
user.last_name = self.cleaned_data['last_name']
if commit:
user.save()
return user
class UserProfile(ModelForm):
class Meta:
model = User
fields = ['username', 'email', 'first_name', 'last_name']
def __init__(self, *args, **kwargs):
super(UserProfile, self).__init__(*args, **kwargs)
self.fields["username"].disabled = True
self.fields["email"].disabled = True
|
GAngelov5/Sportvendor
|
sportvendor/sportvendor/users/forms.py
|
Python
|
gpl-2.0
| 1,164
|
import os
import numpy as np
from scipy import stats
import matplotlib.pyplot as plt
from matplotlib import cm
import atmath
# Define the observable
srcDir = '../runPlasim/postprocessor/indices/'
# SRng = np.array([1260, 1360, 1380, 1400, 1415, 1425, 1430, 1433,
# 1263, 1265, 1270, 1280, 1300, 1330, 1360, 1435])
# restartStateRng = np.concatenate((['cold']*8, ['warm']*8), 0)
SRng = np.array([1263, 1265, 1270, 1280, 1300, 1330, 1360, 1435])
restartStateRng = ['warm']*8
#SRng = np.array([1263, 1265])
#restartStateRng = ['warm']*2
firstYear = 101
lastYear = 4200
yearsPerFile = 100
daysPerYear = 360
#indexChoice = ('globmst',)
#indexChoice = ('npolemst',)
#indexChoice = ('globdep',)
#indexChoice = ('eqdep',)
#indexChoice = ('MTG',)
#indexChoice = ('areabelowtf20nhemi',)
indexChoice = ('areabelowtfnhemi',)
# Case definition
spinupYears = 100 # Remove spinup period from time-series
spinup = spinupYears * daysPerYear
sampFreq = 1 # (days^{-1})
# Plot settings
fs_default = 'x-large'
fs_latex = 'xx-large'
fs_xlabel = fs_default
fs_ylabel = fs_default
fs_xticklabels = fs_default
fs_yticklabels = fs_default
fs_legend_title = fs_default
fs_legend_labels = fs_default
fs_cbar_label = fs_default
# figFormat = 'eps'
figFormat = 'png'
dpi = 300
varRng = np.empty((SRng.shape[0],))
skewRng = np.empty((SRng.shape[0],))
kurtRng = np.empty((SRng.shape[0],))
lagMax = 80
#lagMax = daysPerYear * 5
ccfRng = np.empty((SRng.shape[0], lagMax*2+1))
for k in np.arange(SRng.shape[0]):
S = SRng[k]
restartState = restartStateRng[k]
# Create directories
resDir = '%s_%s/' % (restartState, S)
dstDir = resDir
indicesPath = '%s/%s/' % (srcDir, resDir)
os.system('mkdir stats %s %s/seasonal %s/anom 2> /dev/null' % (dstDir, dstDir, dstDir))
# Read datasets
obsName = '%s_%d_%05d_%05d_anom' % (restartState, S, firstYear, lastYear)
indexFile = '%s_%s_%d_%05d_%05d.txt' \
% (indexChoice[0], restartState, S, firstYear, lastYear)
print 'Reading index file %s...' % indexFile
observable = np.loadtxt('%s/%s' % (indicesPath, indexFile))
ntFull = observable.shape[0]
obsName += '_%s' % indexChoice[0]
# Get time steps array
time = np.arange(spinup, ntFull)
nt = ntFull - spinup
observable = observable[spinup:]
seasonal = np.empty((daysPerYear,))
anom = np.empty((nt,))
for day in np.arange(daysPerYear):
seasonal[day] = observable[day::daysPerYear].mean()
anom[day::daysPerYear] = observable[day::daysPerYear] - seasonal[day]
varRng[k] = anom.var()
skewRng[k] = stats.skew(anom)
kurtRng[k] = stats.kurtosis(anom)
ccfRng[k] = atmath.ccf(anom, anom, lagMax=lagMax)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(np.arange(1, daysPerYear+1), seasonal)
ax.set_xlabel(r'days', fontsize=fs_latex)
ax.set_ylabel(indexChoice[0], fontsize=fs_latex)
plt.setp(ax.get_xticklabels(), fontsize=fs_xticklabels)
plt.setp(ax.get_yticklabels(), fontsize=fs_yticklabels)
plt.title('Seasonal cycle for case %s_%d\n\sigma = %.5f' % (restartState, S, seasonal.std()))
fig.savefig('%s/seasonal/seasonal_%s.%s' % (dstDir, obsName, figFormat),
bbox_inches='tight', dpi=dpi)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(time[200*daysPerYear:203*daysPerYear], anom[200*daysPerYear:203*daysPerYear])
ax.set_xlabel(r'days', fontsize=fs_latex)
ax.set_ylabel(indexChoice[0], fontsize=fs_latex)
plt.setp(ax.get_xticklabels(), fontsize=fs_xticklabels)
plt.setp(ax.get_yticklabels(), fontsize=fs_yticklabels)
plt.title('Anomalies for case %s_%d\n\sigma = %.5f' % (restartState, S, anom.std()))
fig.savefig('%s/anom/anom_%s.%s' % (dstDir, obsName, figFormat),
bbox_inches='tight', dpi=dpi)
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(SRng, varRng)
fig.savefig('stats/variance_%s.%s' % (indexChoice[0], figFormat), bbox_inches='tight', dpi=dpi)
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(SRng, skewRng)
fig.savefig('stats/skewness_%s.%s' % (indexChoice[0], figFormat), bbox_inches='tight', dpi=dpi)
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(SRng, kurtRng)
fig.savefig('stats/kurtosis_%s.%s' % (indexChoice[0], figFormat), bbox_inches='tight', dpi=dpi)
fig = plt.figure()
ax = fig.add_subplot(111)
for k in np.arange(SRng.shape[0]/2):
S = SRng[k]
ax.plot(np.arange(-lagMax, lagMax+1), ccfRng[k], label=str(S), linestyle='-')
for k in np.arange(SRng.shape[0]/2, SRng.shape[0]):
S = SRng[k]
ax.plot(np.arange(-lagMax, lagMax+1), ccfRng[k], label=str(S), linestyle='--')
ax.legend(loc='upper right')
ax.set_xlim(0, lagMax)
ax.set_ylim(-0.05, 1.)
fig.savefig('stats/acf_%s.%s' % (indexChoice[0], figFormat), bbox_inches='tight', dpi=dpi)
|
atantet/transferPlasim
|
statistics/plotIndices.py
|
Python
|
gpl-2.0
| 4,841
|