repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
abo-abo/edx-platform | common/lib/xmodule/xmodule/modulestore/tests/test_split_modulestore.py | Python | agpl-3.0 | 55,767 | 0.003389 | '''
Created on Mar 25, 2013
@author: dmitchell
'''
import datetime
import subprocess
import unittest
import uuid
from importlib import import_module
from xblock.fields import Scope
from xmodule.course_module import CourseDescriptor
from xmodule.modulestore.exceptions import InsufficientSpecificationError, ItemNotFoundError, VersionConflictError, \
DuplicateItemError
from xmodule.modulestore.locator import CourseLocator, BlockUsageLocator, VersionTree, DefinitionLocator
from xmodule.modulestore.inheritance import InheritanceMixin
from xmodule.x_module import XModuleMixin
from pytz import UTC
from path import path
import re
import random
class SplitModuleTest(unittest.TestCase):
'''
The base set of tests manually populates a db w/ courses which have
versions. It creates unique collection names and removes them after all
tests finish.
'''
# Snippets of what would be in the django settings envs file
DOC_STORE_CONFIG = {
'host': 'localhost',
'db': 'test_xmodule',
'collection': 'modulestore{0}'.format(uuid.uuid4().hex),
}
modulestore_options = {
'default_class': 'xmodule.raw_module.RawDescriptor',
'fs_root': '',
'xblock_mixins': (InheritanceMixin, XModuleMixin)
}
MODULESTORE = {
'ENGINE': 'xmodule.modulestore.split_mongo.SplitMongoModuleStore',
'DOC_STORE_CONFIG': DOC_STORE_CONFIG,
'OPTIONS': modulestore_options
}
# don't create django dependency; so, duplicates common.py in envs
match = re.search(r'(.*?/common)(?:$|/)', path(__file__))
COMMON_ROOT = match.group(1)
modulestore = None
# These version_guids correspond to values hard-coded in fixture files
# used for these tests. The files live in mitx/fixtures/splitmongo_json/*
GUID_D0 = "1d00000000000000dddd0000" # v12345d
GUID_D1 = "1d00000000000000dddd1111" # v12345d1
GUID_D2 = "1d00000000000000dddd2222" # v23456d
GUID_D3 = "1d00000000000000dddd3333" # v12345d0
GUID_D4 = "1d00000000000000dddd4444" # v23456d0
GUID_D5 = "1d00000000000000dddd5555" # v345679d
GUID_P = "1d00000000000000eeee0000" # v23456p
@staticmethod
def bootstrapDB():
'''
Loads the initial data into the db ensuring the collection name is
unique.
'''
collection_prefix = SplitModuleTest.MODULESTORE['DOC_STORE_CONFIG']['collection'] + '.'
dbname = SplitModuleTest.MODULESTORE['DOC_STORE_CONFIG']['db']
processes = [
subprocess.Popen([
'mongoimport', '-d', dbname, '-c',
collection_prefix + collection, '--jsonArray',
'--file',
SplitModuleTest.COMMON_ROOT + '/test/data/splitmongo_json/' + collection + '.json'
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
for collection in ('active_versions', 'structures', 'definitions')]
for p in processes:
stdout, st | derr = p.communicate()
if p.returncode != 0:
print "Couldn't run mongoimport:"
print stdout
print stderr
raise Exception("DB did not init correctly")
@classmethod
def tearDownClass(cls):
collection_prefix = SplitModuleTest.MODULESTORE['DOC_STORE_CONFIG']['collection'] + '.'
if SplitModuleTest.m | odulestore:
for collection in ('active_versions', 'structures', 'definitions'):
modulestore().db.drop_collection(collection_prefix + collection)
# drop the modulestore to force re init
SplitModuleTest.modulestore = None
def findByIdInResult(self, collection, _id):
"""
Result is a collection of descriptors. Find the one whose block id
matches the _id.
"""
for element in collection:
if element.location.usage_id == _id:
return element
class SplitModuleCourseTests(SplitModuleTest):
'''
Course CRUD operation tests
'''
def test_get_courses(self):
courses = modulestore().get_courses(branch='draft')
# should have gotten 3 draft courses
self.assertEqual(len(courses), 3, "Wrong number of courses")
# check metadata -- NOTE no promised order
course = self.findByIdInResult(courses, "head12345")
self.assertEqual(course.location.course_id, "GreekHero")
self.assertEqual(
str(course.location.version_guid), self.GUID_D0,
"course version mismatch"
)
self.assertEqual(course.category, 'course', 'wrong category')
self.assertEqual(len(course.tabs), 6, "wrong number of tabs")
self.assertEqual(
course.display_name, "The Ancient Greek Hero",
"wrong display name"
)
self.assertEqual(
course.advertised_start, "Fall 2013",
"advertised_start"
)
self.assertEqual(
len(course.children), 3,
"children")
self.assertEqual(str(course.definition_locator.definition_id), "ad00000000000000dddd0000")
# check dates and graders--forces loading of descriptor
self.assertEqual(course.edited_by, "testassist@edx.org")
self.assertEqual(str(course.previous_version), self.GUID_D1)
self.assertDictEqual(course.grade_cutoffs, {"Pass": 0.45})
def test_branch_requests(self):
# query w/ branch qualifier (both draft and published)
def _verify_published_course(courses_published):
""" Helper function for verifying published course. """
self.assertEqual(len(courses_published), 1, len(courses_published))
course = self.findByIdInResult(courses_published, "head23456")
self.assertIsNotNone(course, "published courses")
self.assertEqual(course.location.course_id, "wonderful")
self.assertEqual(str(course.location.version_guid), self.GUID_P,
course.location.version_guid)
self.assertEqual(course.category, 'course', 'wrong category')
self.assertEqual(len(course.tabs), 4, "wrong number of tabs")
self.assertEqual(course.display_name, "The most wonderful course",
course.display_name)
self.assertIsNone(course.advertised_start)
self.assertEqual(len(course.children), 0,
"children")
_verify_published_course(modulestore().get_courses(branch='published'))
# default for branch is 'published'.
_verify_published_course(modulestore().get_courses())
def test_search_qualifiers(self):
# query w/ search criteria
courses = modulestore().get_courses(branch='draft', qualifiers={'org': 'testx'})
self.assertEqual(len(courses), 2)
self.assertIsNotNone(self.findByIdInResult(courses, "head12345"))
self.assertIsNotNone(self.findByIdInResult(courses, "head23456"))
courses = modulestore().get_courses(
branch='draft',
qualifiers={'edited_on': {"$lt": datetime.datetime(2013, 3, 28, 15)}})
self.assertEqual(len(courses), 2)
courses = modulestore().get_courses(
branch='draft',
qualifiers={'org': 'testx', "prettyid": "test_course"})
self.assertEqual(len(courses), 1)
self.assertIsNotNone(self.findByIdInResult(courses, "head12345"))
def test_get_course(self):
'''
Test the various calling forms for get_course
'''
locator = CourseLocator(version_guid=self.GUID_D1)
course = modulestore().get_course(locator)
self.assertIsNone(course.location.course_id)
self.assertEqual(str(course.location.version_guid), self.GUID_D1)
self.assertEqual(course.category, 'course')
self.assertEqual(len(course.tabs), 6)
self.assertEqual(course.display_name, "The Ancient Greek Hero")
self.assertEqual(course.graceperiod, datetime.timedelta(hours=2))
self.assertIsNone(course.advertised_start)
self.assertEqual(l |
EduardoMolina/SU2 | meson_scripts/init.py | Python | lgpl-2.1 | 7,414 | 0.020637 | #!/usr/bin/env python3
## \file init.py
# \brief Initializes necessary dependencies for SU2 either using git or it
# fetches zip files.
# \author T. Albring
# \version 7.0.3 "Blackbird"
#
# SU2 Project Website: https://su2code.github.io
#
# The SU2 Project is maintained by the SU2 Foundation
# (http://su2foundation.org)
#
# Copyright 2012-2020, SU2 Contributors (cf. AUTHORS.md)
#
# SU2 is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# SU2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SU2. If not, see <http://www.gnu.org/licenses/>.
import sys, os, subprocess, shutil, urllib.request, zipfile, time
def remove_file(path, retries=3, sleep=0.1):
for i in range(retries):
try:
os.remove(path)
except OSError:
time.sleep(sleep)
else:
break
def init_submodules(method = 'auto'):
cur_dir = sys.path[0]
# This information of the modules is used if projects was not cloned using git
# The sha tag must be maintained manually to point to the correct commit
sha_version_codi = '501dcf0305df147481630f20ce37c2e624fb351f'
github_repo_codi = 'https://github.com/scicompkl/CoDiPack'
sha_version_medi = 'edde14f9ac4026b72b1e130f61c0a78e8652afa5'
github_repo_medi = 'https://github.com/SciCompKL/MeDiPack'
sha_version_meson = '0435691e83fb7172e2a9635d2eb32d5521089916'
github_repo_meson = 'https://github.com/mesonbuild/meson'
sha_version_ninja = '2d15b04e411229cb902332957281622119025e77'
github_repo_ninja = 'https://github.com/ninja-build/ninja'
medi_name = 'MeDiPack'
codi_name = 'CoDiPack'
meson_name = 'meson'
ninja_name= 'ninja'
base_path = cur_dir + os.path.sep + 'externals' + os.path.sep
alt_name_medi = base_path + 'medi'
alt_name_codi = base_path + 'codi'
alt_name_meson = base_path + 'meson'
alt_name_ninja = base_path + 'ninja'
if method == 'auto':
is_git = is_git_directory(cur_dir)
elif method == 'git':
is_git = True
elif method == 'url':
is_git = False
else:
print('Invalid method')
sys.exit(1)
# If directory was cloned using git, use submodule feature
# to check and initialize submodules if necessary
if is_git:
submodule_status(alt_name_codi, sha_version_codi)
submodule_status(alt_name_medi, sha_version_medi)
submodule_status(alt_name_meson, sha_version_meson)
submodule_status(alt_name_ninja, sha_version_ninja)
# Otherwise download the zip file from git
else:
download_module(codi_name, alt_name_codi, github_repo_codi, sha_version_codi)
download_module(medi_name, alt_name_medi, github_repo_medi, sha_version_medi)
download_module(meson_name, alt_name_meson, github_repo_meson, sha_version_meson)
download_module(ninja_name, alt_name_ninja, github_repo_ninja, sha_version_ninja)
def is_git_directory(path = '.'):
try:
p = subprocess.call(["git", "branch"], stderr=subprocess.STDOUT, stdout=open(os.devnull, 'w'), cwd=path)
except FileNotFoundError:
print("git command not found. Using fall-back method to init submodules")
return False
except subprocess.CalledProcessError:
print("Directory was not cloned using git. Using fall-back method to init submodules")
return False
return p == 0
def submodule_status(path, sha_commit):
if not os.path.exists(path + os.path.sep + sha_commit):
# Check the status of the submodule
status = subprocess.run(['git', 'submodule','status', path], stdout=subprocess.PIPE, check = True, cwd = sys.path[0]).stdout.decode('utf-8')
# The first character of the output indicates the status of the submodule
# '+' : The submodule does not match the SHA-1 currently in the index of the repository
# '-' : The submodule is not initialized
# ' ' : Correct version of submodule is initial | ized
status_indicator = status[0][0]
if status_indicator == '+':
# Write a warning that the sha tags do not match
sys.stderr.write('WARNING: the currently checked out submodule commit in '
+ path + ' does not match the SHA-1 found in the index.\n')
sys.stderr.write('Use \'git submodule updat | e --init '+ path + '\' to reset the module if necessary.\n')
elif status_indicator == '-':
# Initialize the submodule if necessary
print('Initialize submodule ' + path + ' using git ... ')
subprocess.run(['git', 'submodule', 'update', '--init', path], check = True, cwd = sys.path[0])
# Check that the SHA tag stored in this file matches the one stored in the git index
cur_sha_commit = status[1:].split(' ')[0]
if (cur_sha_commit != sha_commit):
print('SHA-1 tag stored in index does not match SHA tag stored in this script.')
def download_module(name, alt_name, git_repo, commit_sha):
# ZipFile does not preserve file permissions.
# This is a workaround for that problem:
# https://stackoverflow.com/questions/39296101/python-zipfile-removes-execute-permissions-from-binaries
class MyZipFile(zipfile.ZipFile):
def _extract_member(self, member, targetpath, pwd):
if not isinstance(member, zipfile.ZipInfo):
member = self.getinfo(member)
targetpath = super()._extract_member(member, targetpath, pwd)
attr = member.external_attr >> 16
if attr != 0:
os.chmod(targetpath, attr)
return targetpath
if not os.path.exists(alt_name + os.path.sep + commit_sha):
if os.path.exists(alt_name) and os.listdir(alt_name):
print('Directory ' + alt_name + ' is not empty')
print('Maybe submodules are already cloned with git?')
sys.exit(1)
else:
print('Downloading ' + name + ' \'' + commit_sha + '\'')
filename = commit_sha + '.zip'
url = git_repo + '/archive/' + filename
if not os.path.exists(sys.path[0] + os.path.sep + filename):
try:
urllib.request.urlretrieve (url, commit_sha + '.zip')
except RuntimeError as e:
print(e)
print('Download of module ' + name + ' failed.')
print('Get archive at ' + url)
print('and place it in the source code root folder')
print('Run meson.py again')
sys.exit()
# Unzip file
zipf = MyZipFile(sys.path[0] + os.path.sep + filename)
zipf.extractall(sys.path[0] + os.path.sep + 'externals')
# Remove directory if exists
if os.path.exists(alt_name):
os.rmdir(alt_name)
os.rename(sys.path[0] + os.path.sep + 'externals' + os.path.sep + name + '-' + commit_sha, alt_name)
# Delete zip file
remove_file(sys.path[0] + os.path.sep + filename)
# Create identifier
f = open(alt_name + os.path.sep + commit_sha, 'w')
f.close()
if __name__ == '__main__':
if sys.version_info[0] < 3:
raise Exception("Script must be run using Python 3")
# Set up the build environment, i.e. clone or download all submodules
init_submodules(sys.argv[1])
sys.exit(0)
|
FXIhub/hummingbird | examples/flash/holo-2017/conf_tof.py | Python | bsd-2-clause | 6,533 | 0.013929 | # Import analysis/plotting modules
import analysis.event
import analysis.hitfinding
import analysis.pixel_detector
import analysis.sizing
import plotting.image
import plotting.line
import plotting.correlation
import plotting.histogram
from backend.record import add_record
import numpy as np
import time
import ipc
import utils.reader
import re
import os
import utils.cxiwriter
scanInjector = False
scanXmin = -250
scanXmax = 250
scanXbins = 500
scanZmin = 88
scanZmax = 100
scanZbins = 220/2
scanYmin = 94
scanYmax = 97
scanYbins = 20
outputEveryImage = True
do_sizing = False
do_write = False
do_showhybrid = False
move_half = True
#Detector params
detector_distance = 220e-03
gap_top=0.8e-03
gap_bottom=3.0e-03
gap_total=gap_top+gap_bottom
ny=1024 |
nx=1024
pixel_size=7.5e-05
center_shift=int((gap_top-gap_bottom)/pixel_size)
# Quick config parameters
hitScoreThreshold = 13000
aduThreshold = 200
strong_hit_threshold = 60000
#experiment_fo | lder = "/data/beamline/current"
experiment_folder = "/asap3/flash/gpfs/bl1/2017/data/11001733"
# Specify the facility
state = {}
state['Facility'] = 'FLASH'
# Specify folders with frms6 and darkcal data
state['FLASH/DataGlob'] = os.path.join(experiment_folder, "raw/pnccd/block-02/holography_*_*_*_*.frms6")
state['FLASH/DataRe'] = os.path.join(experiment_folder, "raw/pnccd/block-02/holography_.+_.+_([0-9]{4})_.+.frms6")
#state['FLASH/DataGlob'] = os.path.join(experiment_folder, "raw/pnccd/block-02/holography_*_*_*_*.frms6")
state['FLASH/CalibGlob'] = os.path.join(experiment_folder, "processed/calib/block-02/*.darkcal.h5")
state['FLASH/DAQFolder'] = "/asap3/flash/gpfs/bl1/2017/data/11001733/processed/daq"
state['FLASH/MotorFolder'] = '/home/tekeberg/Beamtimes/Holography2017/motor_positions/motor_data.data'
state['FLASH/DAQBaseDir'] = os.path.join(experiment_folder, "raw/hdf/block-02")
state['do_offline'] = True
state['online_start_from_run'] = False
#state['FLASH/ProcessingRate'] = 1
#Mask
Mask = utils.reader.MaskReader("/asap3/flash/gpfs/bl1/2017/data/11001733/processed/mask_v1.h5", "/data")
mask = Mask.boolean_mask
#Mask out center
mask_center=np.ones((ny, nx), dtype=np.bool)
radius=30
#radius=70
cx=0
cy=0
xx,yy=np.meshgrid(np.arange(nx), np.arange(ny))
rr=(xx-nx/2)**2+(yy-ny/2)**2 >= (radius**2)
mask_center &= rr
mask_center &= mask
# Sizing parameters
# ------
binning = 4
centerParams = {'x0' : (512 - (nx-1)/2.)/binning,
'y0' : (512 + center_shift -(ny-1)/2.)/binning,
'maxshift' : int(np.ceil(10./binning)),
'threshold': 1,
'blur' : 4}
modelParams = {'wavelength': 5.3, #in nm
'pixelsize': 75*binning, #um
'distance': 220., #mm
'material': 'sucrose'}
sizingParams = {'d0':20., # in nm
'i0':1., # in mJ/um2
'brute_evals':10}
# Physical constants
h = 6.62606957e-34 #[Js]
c = 299792458 #[m/s]
hc = h*c #[Jm]
eV_to_J = 1.602e-19 #[J/eV]
#res = modelParams["distance"] * 1E-3* modelParams["wavelength"] * 1E-9 / ( pixelsize_native * nx_front )
#expected_diameter = 150
# Thresholds for good sizing fits
fit_error_threshold = 2.6E-3#4.0e-3
photon_error_threshold = 3000
diameter_min = 40 #[nm]
diameter_max = 90 #[nm]
def calculate_epoch_times(evt, time_sec, time_usec):
add_record(evt['ID'], 'ID', 'time', time_sec.data + 1.e-6*time_usec.data)
#add_record(evt['ID'], 'ID', 'timeAgo', time.time() - (time_sec.data + 1.e-6*time_usec.data))
# Calculating timeAgo with 606 second offset due to miscalibration of pnCCD server clock
#add_record(evt['ID'], 'ID', 'timeAgo', -606. + time.time() - (time_sec.data + 1.e-6*time_usec.data))
add_record(evt['ID'], 'ID', 'timeAgo', 0. + time.time() - (time_sec.data + 1.e-6*time_usec.data))
def beginning_of_run():
if do_write:
global W
W = utils.cxiwriter.CXIWriter("/asap3/flash/gpfs/bl1/2017/data/11001733/processed/tof_88_91.h5", chunksize=10)
# This function is called for every single event
# following the given recipe of analysis
def onEvent(evt):
# Processing rate [Hz]
analysis.event.printProcessingRate()
# try:
# has_tof = True
# evt["DAQ"]["TOF"]
# print "We have TOF data!"
# except RuntimeError:
# has_tof = False
# #print "No TOF"
has_tof = False
detector_type = "photonPixelDetectors"
detector_key = "pnCCD"
if move_half:
detector = evt[detector_type][detector_key]
detector = analysis.pixel_detector.moveHalf(evt, detector, horizontal=int(gap_total/pixel_size), outkey='data_half-moved')
mask_center_s = analysis.pixel_detector.moveHalf(evt, add_record(evt["analysis"], "analysis", "mask", mask_center), horizontal=int(gap_total/pixel_size), outkey='mask_half-moved').data
detector_type = "analysis"
detector_key = "data_half-moved"
# Do basic hitfinding using lit pixels
analysis.hitfinding.countLitPixels(evt, evt[detector_type][detector_key],
aduThreshold=aduThreshold,
hitscoreThreshold=hitScoreThreshold, mask=mask_center_s)
hit = bool(evt["analysis"]["litpixel: isHit"].data)
strong_hit=evt["analysis"]["litpixel: hitscore"].data>strong_hit_threshold
plotting.line.plotHistory(add_record(evt["analysis"],"analysis","total ADUs", evt[detector_type][detector_key].data.sum()),
label='Total ADU', hline=hitScoreThreshold, group='Metric')
plotting.line.plotHistory(evt["analysis"]["litpixel: hitscore"],
label='Nr. of lit pixels', hline=hitScoreThreshold, group='Metric')
analysis.hitfinding.hitrate(evt, hit, history=50)
if hit and has_tof:
print evt["DAQ"]["TOF"].data
print evt["motorPositions"]["InjectorZ"].data
plotting.line.plotTrace(evt["DAQ"]["TOF"], label='TOF', history=100, tracelen=20000, name="TOF", group="TOF")
plotting.line.plotHistory(evt["motorPositions"]["InjectorZ"], label="InjectorZ (with TOF)", group="TOF")
plotting.image.plotImage(evt[detector_type][detector_key], name="pnCCD (Hits with TOF)", group='TOF', mask=mask_center_s)
D = {}
D['TOF'] = evt['DAQ']['TOF'].data
D['pnCCD'] = evt[detector_type][detector_key].data
D['InjectorZ'] = evt["motorPositions"]["InjectorZ"].data
if do_write:
W.write_slice(D)
def end_of_run():
if do_write:
W.close()
|
zhlinh/leetcode | 0171.Excel Sheet Column Number/test.py | Python | apache-2.0 | 153 | 0 | #!/usr/bin/env python
# | -*- coding: utf-8 -*-
from solution import Solution
s = "AA"
# s = 'Z'
sol | = Solution()
res = sol.titleToNumber(s)
print(res)
|
EttusResearch/gnuradio | gnuradio-runtime/examples/mp-sched/synthetic.py | Python | gpl-3.0 | 4,544 | 0.004401 | #!/usr/bin/env python
#
# Copyright 2008,2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from gnuradio import gr, eng_notation
from gnuradio import blocks, filter
from gnuradio.eng_option import eng_option
from optparse import OptionParser
import os
class pipeline(gr.hier_block2):
def __init__(self, nstages, ntaps=256):
"""
Create a pipeline of nstages of filter.fir_filter_fff's connected in serial
terminating in a blocks.null_sink.
"""
gr.hier_block2.__init__(self, "pipeline",
gr.io_signature(1, 1, gr.sizeof_float),
gr.io_signature(0, 0, 0))
taps = ntaps*[1.0/ntaps]
upstream = self
for i in range(nstages):
op = filter.fir_filter_fff(1, taps)
self.connect(upstream, op)
upstream = op
self.connect(upstream, blocks.null_sink(gr.sizeof_float))
class top(gr.top_block):
def __init__(self):
gr.top_block.__init__(self)
default_nsamples = 10e6
parser=OptionParser(option_class=eng_option)
parser.add_option("-p", "--npipelines", type="intx", default=1,
metavar="NPIPES", help="the number of pipelines to create (default=%default)")
parser.add_option("-s", "--nstages", type="intx", default=1,
metavar="NSTAGES", help="the number of stages in each pipeline (default=%default)")
parser.add_option("-N", "--nsamples", type="eng_float", default=default_nsamples,
help=("the number of samples to run through the graph (default=%s)" %
(eng_notation.num_to_str(default_nsamples))))
parser.add_option("-m", "--machine-readable", action="store_true", default=False,
help="enable machine readable output")
(options, args) = parser.parse_args()
if len(args) != 0:
parser.print_help()
raise SystemExit, 1
self.npipes = options.npipelines
self.nstages = options.nstages
self.nsamples = options.nsamples
self.machine_readable = options.machine_readable
ntaps = 256
# Something vaguely like floating point ops
self.flop = 2 * ntaps * options.npipelines * options.nstages * options.nsamples
src = blocks.null_source(gr.sizeof_float)
head = blocks.head(gr.sizeof_float, int(options.nsamples))
self.connect(src, head)
for n in range(options.npipelines):
self.connect(he | ad, pipeline(options.nstages, ntaps))
def time_it(tb):
start = os.times()
tb.run()
stop = os.times()
delta = map((lambda a, b: a-b), stop, start)
user, sys, childrens_user, childrens_sys, real = delta
total_user = user + childrens_user
total_sys = sys + childrens_sys
if tb.machine_readable:
print "%3d %3d %.3e %7.3f %7.3f %7.3f %7.3f %.6e %.3e" % (
tb.npipes, tb.nstages, tb.nsamples, re | al, total_user, total_sys, (total_user+total_sys)/real, tb.flop, tb.flop/real)
else:
print "npipes %7d" % (tb.npipes,)
print "nstages %7d" % (tb.nstages,)
print "nsamples %s" % (eng_notation.num_to_str(tb.nsamples),)
print "real %7.3f" % (real,)
print "user %7.3f" % (total_user,)
print "sys %7.3f" % (total_sys,)
print "(user+sys)/real %7.3f" % ((total_user + total_sys)/real,)
print "pseudo_flop %s" % (eng_notation.num_to_str(tb.flop),)
print "pseudo_flop/real %s" % (eng_notation.num_to_str(tb.flop/real),)
if __name__ == "__main__":
try:
tb = top()
time_it(tb)
except KeyboardInterrupt:
raise SystemExit, 128
|
slipstream/SlipStreamClient | client/src/setup.py | Python | apache-2.0 | 2,355 | 0.000425 | #!/usr/bin/env python
import os
from distutils.core import setup
def _fullsplit(path, result=None):
"""
Split a pathname into components (the opposite of os.path.join) in a
platform-neutral way.
"""
if result is None:
result = []
head, tail = os.path.split(path)
if head == '':
return [tail] + result
if head == path:
return result
return _fullsplit(head, [tail] + result)
def get_packages(basepkg_name, root_dir=''):
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages = []
root_dir = os.path.join(os.path.dirname(__file__), root_dir)
| cwd = os.getcwd()
if root_dir != '':
os.chdir(root_dir)
for dirpath, dirnames, filenames in os.walk(basepkg_name):
# Ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith('.'):
del dirnames[i]
if '__init__.py' in filenames:
packages.append('.'.join(_fullsplit(dirpath)))
os.chdir(cwd)
return packages
NAME = | 'slipstream-client'
VERSION = '1.0-0'
DESCRIPTION = 'SlipStream client'
LONG_DESCRIPTION = 'SlipStream client: API and CLI'
AUTHOR = 'SixSq Sarl, (sixsq.com)'
AUTHOR_EMAIL = 'info@sixsq.com'
LICENSE = 'Apache License, Version 2.0'
PLATFORMS = 'Any'
URL = 'http://sixsq.com'
# Cheese shop (PyPI)
CLASSIFIERS = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: Unix",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Libraries :: Python Modules"
]
root_dir = 'main/python'
basepkg_name = 'slipstream'
packages = get_packages(basepkg_name, root_dir)
setup(
name=NAME,
version=VERSION,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
license=LICENSE,
platforms=PLATFORMS,
url=URL,
classifiers=CLASSIFIERS,
packages=packages,
package_dir={'slipstream': 'main/python/slipstream'},
requires=['httplib2'],
namespace_packages=['slipstream']
)
|
MERegistro/meregistro | meregistro/apps/registro/forms/DependenciaFuncionalFormFilters.py | Python | bsd-3-clause | 1,595 | 0.004397 | # -*- coding: UTF-8 -*-
from django import forms
from apps.registro.models import DependenciaFuncional, Jurisdiccion, TipoGestion, TipoDependenciaFuncional, TipoEducacion
class DependenciaFuncionalFormFilters(forms.Form):
jurisdiccion = forms.ModelChoiceField(queryset=Jurisdiccion.objects.order_by('nombre'), label='Jurisdicción', required=False)
tipo_dependencia_funcional = forms.ModelChoiceField(queryset=TipoDependenciaFuncional.objects.order_by('nombre'), label='Tipo de dependencia', required=False)
tipo_gestion = forms.ModelChoiceField(queryset=TipoGestion.objects.order_by('nombre'), label='Tipo de gestión', required= | False)
nombre = forms.CharField(max_length=50, label='Nombre', required=False)
def buildQuery(self, q=None):
"""
Crea o refina un query de búsqueda.
"""
if q is None:
q = DependenciaFuncional.objects.all()
if self.is_valid():
def filter_by(field):
return self.cleaned_data.has_key(field) and self.cleaned_data[field] != '' and self.cleaned_data[field] is not None
| if filter_by('nombre'):
q = q.filter(nombre__icontains=self.cleaned_data['nombre'])
if filter_by('jurisdiccion'):
q = q.filter(jurisdiccion=self.cleaned_data['jurisdiccion'])
if filter_by('tipo_gestion'):
q = q.filter(tipo_gestion=self.cleaned_data['tipo_gestion'])
if filter_by('tipo_dependencia_funcional'):
q = q.filter(tipo_dependencia_funcional=self.cleaned_data['tipo_dependencia_funcional'])
return q
|
tensor-tang/Paddle | python/paddle/fluid/contrib/tests/test_distributed_reader.py | Python | apache-2.0 | 1,305 | 0 | # Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "Licen | se");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, | software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
import paddle.fluid as fluid
import os
def data_generator():
data = [0, 1, 2, 3]
for val in data:
yield val
class TestDistributedReader(unittest.TestCase):
def test_distributed_reader(self):
trainer_num = 4
os.environ['PADDLE_TRAINER_ID'] = str(1)
os.environ['PADDLE_TRAINERS_NUM'] = str(trainer_num)
reader = fluid.contrib.reader.distributed_batch_reader(data_generator)
data = next(reader())
assert data == 1
os.unsetenv('PADDLE_TRAINER_ID')
os.unsetenv('PADDLE_TRAINERS_NUM')
if __name__ == '__main__':
unittest.main()
|
zstackio/zstack-woodpecker | zstackwoodpecker/test/test_engine.py | Python | apache-2.0 | 351 | 0.014245 | '''
@author: Frank
'''
import unittest
from zstackwoodpecker.engine import engine
class Test(unittest.TestCase):
def testName(self):
logfd = open('/tmp/log', 'w')
engine.execute_case('test/testcase2.py', logfd)
|
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.te | stName']
unittest.main() |
pydotorg/pypi | tools/email_renamed_users.py | Python | bsd-3-clause | 3,025 | 0.000661 | import smtplib
import pickle
import sys
import os
from email.mime.text import MIMEText
# Workaround current bug in docutils:
# http://permalink.gmane.org/gmane.text.docutils.devel/6324
import docutils.utils
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path = | [root] + sys.path
import config
import store
config = config.Config("config.ini")
store = store.Store(config)
EMAIL_PLURAL = """
Hello there!
PyPI has begun to enforce restrictions on what a valid Python package name
contains.
These rules are:
* Must contain ONLY ASCII letters, digits, underscores, hyphens, and periods
* Must begin and end with an ASCII letter or digit
You are listed as an owner or maintainer on %(old)s.
Due to the new rules these packages will be renamed to %(new)s.
The | se new names represent what someone using pip or easy_install would already
have had to use in order to install your packages.
I am sorry for any inconvenience this may have caused you.
"""
EMAIL_SINGLE = """
Hello there!
PyPI has begun to enforce restrictions on what a valid Python package name
contains.
These rules are:
* Must contain ONLY ASCII letters, digits, underscores, hyphens, and periods
* Must begin and end with an ASCII letter or digit
You are listed as an owner or maintainer on "%(old)s".
Due to the new rules this package will be renamed to "%(new)s".
This new name represents what someone using pip or easy_install would
already have had to use in order to install your package.
I am sorry for any inconvenience this may have caused you.
"""
with open("renamed.pkl") as pkl:
renamed = pickle.load(pkl)
# Build up a list of all users to email
users = {}
for old, new in renamed:
for role in store.get_package_roles(new):
user_packages = users.setdefault(role["user_name"], [])
user_packages.append((old, new))
sent = []
# Email each user
server = smtplib.SMTP(config.mailgun_hostname)
if config.smtp_starttls:
server.starttls()
if config.smtp_auth:
server.login(config.smtp_login, config.smtp_password)
for username, packages in users.iteritems():
packages = sorted(set(packages))
user = store.get_user(username)
if not user["email"]:
continue
if len(packages) > 1:
msg = MIMEText(EMAIL_PLURAL % {
"old": ", ".join(['"%s"' % x[0] for x in packages]),
"new": ", ".join(['"%s"' % x[1] for x in packages]),
})
elif packages:
msg = MIMEText(EMAIL_SINGLE % {
"old": packages[0][0],
"new": packages[0][1],
})
msg["Subject"] = "Important notice about your PyPI packages"
msg["From"] = "donald@python.org"
msg["To"] = user["email"]
server.sendmail("donald@python.org", [user["email"]], msg.as_string())
sent.append(("donald@python.org", [user["email"]], msg.as_string()))
server.quit()
with open("sent.pkl", "w") as pkl:
pickle.dump(sent, pkl)
|
doraemonext/wechat-platform | wechat_platform/system/setting/models.py | Python | bsd-2-clause | 2,006 | 0.00168 | # -*- coding: utf-8 -*-
import logging
from django.db import models
from django.core.exceptions import ObjectDoesNotExist
logger_setting = logging.getLogger(__name__)
class SettingManager(models.Manager):
def add(self, name, value, force=False):
"""
添加一条新的设置项
:param name: 名称
:param value: 内容
:param force: 当force=True时, 如果添加的该设置项已经存在, 则强制更新
当force=False时, 如果添加的该设置项已经存在, 则不做任何操作
:return: Setting实例
"""
try:
setting = super(SettingManager, self).get_queryset().get(na | me=name)
except ObjectDoesNotExist:
setting = super(SettingManager, self).create(name=name, value=value)
logger_setting.info('Settings created [Detail] %s' % setting.__dict__)
return setting
if force:
setting.value = value
setting.s | ave()
logger_setting.info('Settings saved [Detail] %s' % setting.__dict__)
return setting
def get(self, name):
"""
根据name获取对应的选项内容
:param name: 选项名称
:return: 选项内容
"""
return super(SettingManager, self).get_queryset().get(name=name).value
def get_all(self):
"""
获取所有的设置并以字典的形式返回
:return: dict对象
"""
settings = super(SettingManager, self).get_queryset().all()
result = {}
for item in settings:
result[item.name] = item.value
return result
class Setting(models.Model):
name = models.CharField(u'选项名称', max_length=64, unique=True)
value = models.TextField(u'选项内容')
objects = models.Manager()
manager = SettingManager()
class Meta:
verbose_name = u'系统设置'
verbose_name_plural = u'系统设置'
db_table = 'setting'
|
bitmovin/bitmovin-python | bitmovin/resources/models/encodings/start/scheduling.py | Python | unlicense | 1,314 | 0.002283 | from bitmovin.errors import InvalidTypeError
from bitmovin.utils import Serializable
class Scheduling(Serializable):
def __init__(self, priority=None, prewarmed_instance_pool_ids=None):
super().__init__()
self._prewarmed_instance_pool_ids = None
self.priority = priority
self.prewarmedInstancePoolIds = prewarmed_instance_pool_ids
@property
def prewarmedInstancePoolIds(self):
return self._prewarmed_instance_pool_ids
@prewarmedInstancePoolIds.setter
def prewarmedInstancePoolIds(self, new_prewarmed_instance_pool | _ids):
if new_prewarmed_instance_pool_ids is None:
self._prewarmed_ | instance_pool_ids = None
return
if not isinstance(new_prewarmed_instance_pool_ids, list):
raise InvalidTypeError('prewarmedInstancePoolIds has to be a list of strings')
if all(isinstance(pool_id, str) for pool_id in new_prewarmed_instance_pool_ids):
self._prewarmed_instance_pool_ids = new_prewarmed_instance_pool_ids
else:
raise InvalidTypeError('prewarmedInstancePoolIds has to be a list of strings')
def serialize(self):
serialized = super().serialize()
serialized['prewarmedInstancePoolIds'] = self.prewarmedInstancePoolIds
return serialized
|
zhangtianyi1234/django-haystack | haystack/backends/elasticsearch_backend.py | Python | bsd-3-clause | 37,888 | 0.002006 | from __future__ import unicode_literals
import datetime
import re
import warnings
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db.models.loading import get_model
from django.utils import six
import haystack
from haystack.backends import BaseEngine, BaseSearchBackend, BaseSearchQuery, log_query
from haystack.constants import ID, DJANGO_CT, DJANGO_ID, DEFAULT_OPERATOR
from haystack.exceptions import MissingDependency, MoreLikeThisError
from haystack.inputs import PythonData, Clean, Exact, Raw
from haystack.models import SearchResult
from haystack.utils import get_identifier
from haystack.utils import log as logging
try:
import requests
except ImportError:
raise MissingDependency("The 'elasticsearch' backend requires the installation of 'requests'.")
try:
import pyelasticsearch
except ImportError:
raise MissingDependency("The 'elasticsearch' backend requires the installation of 'pyelasticsearch'. Please refer to the documentation.")
DATETIME_REGEX = re.compile(
r'^(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2})T'
r'(?P<hour>\d{2}):(?P<minute>\d{2}):(?P<second>\d{2})(\.\d+)?$')
class ElasticsearchSearchBackend(BaseSearchBackend):
# Word reserved by Elasticsearch for special use.
RESERVED_WORDS = (
'AND',
'NOT',
'OR',
'TO',
)
# Characters reserved by Elasticsearch for special use.
# The '\\' must come first, so as not to overwrite the other slash replacements.
RESERVED_CHARACTERS = (
'\\', '+', '-', '&&', '||', '!', '(', ')', '{', '}',
'[', ']', '^', '"', '~', '*', '?', ':', '/',
)
# Settings to add an n-gram & edge n-gram analyzer.
DEFAULT_SETTINGS = {
'settings': {
"analysis": {
"analyzer": {
"ngram_analyzer": {
"type": "custom",
"tokenizer": "lowercase",
"filter": ["haystack_ngram"]
},
"edgengram_analyzer": {
"type": "custom",
"tokenizer": "lowercase",
"filter": ["haystack_edgengram"]
}
},
"tokenizer": {
"haystack_ngram_tokenizer": {
"type": "nGram",
"min_gram": 3,
"max_gram": 15,
},
"haystack_edgengram_tokenizer": {
"type": "edgeNGram",
"min_gram": 2 | ,
"max_gram": 15,
"side": "front"
}
},
"filter": {
"haystack_ngram": {
"type": "nGram",
"min_gram": 3,
"max_gram": 15
},
| "haystack_edgengram": {
"type": "edgeNGram",
"min_gram": 2,
"max_gram": 15
}
}
}
}
}
def __init__(self, connection_alias, **connection_options):
super(ElasticsearchSearchBackend, self).__init__(connection_alias, **connection_options)
if not 'URL' in connection_options:
raise ImproperlyConfigured("You must specify a 'URL' in your settings for connection '%s'." % connection_alias)
if not 'INDEX_NAME' in connection_options:
raise ImproperlyConfigured("You must specify a 'INDEX_NAME' in your settings for connection '%s'." % connection_alias)
self.conn = pyelasticsearch.ElasticSearch(connection_options['URL'], timeout=self.timeout)
self.index_name = connection_options['INDEX_NAME']
self.log = logging.getLogger('haystack')
self.setup_complete = False
self.existing_mapping = {}
def setup(self):
"""
Defers loading until needed.
"""
# Get the existing mapping & cache it. We'll compare it
# during the ``update`` & if it doesn't match, we'll put the new
# mapping.
try:
self.existing_mapping = self.conn.get_mapping(index=self.index_name)
except Exception:
if not self.silently_fail:
raise
unified_index = haystack.connections[self.connection_alias].get_unified_index()
self.content_field_name, field_mapping = self.build_schema(unified_index.all_searchfields())
current_mapping = {
'modelresult': {
'properties': field_mapping,
'_boost': {
'name': 'boost',
'null_value': 1.0
}
}
}
if current_mapping != self.existing_mapping:
try:
# Make sure the index is there first.
self.conn.create_index(self.index_name, self.DEFAULT_SETTINGS)
self.conn.put_mapping(self.index_name, 'modelresult', current_mapping)
self.existing_mapping = current_mapping
except Exception:
if not self.silently_fail:
raise
self.setup_complete = True
def update(self, index, iterable, commit=True):
if not self.setup_complete:
try:
self.setup()
except (requests.RequestException, pyelasticsearch.ElasticHttpError) as e:
if not self.silently_fail:
raise
self.log.error("Failed to add documents to Elasticsearch: %s", e)
return
prepped_docs = []
for obj in iterable:
try:
prepped_data = index.full_prepare(obj)
final_data = {}
# Convert the data to make sure it's happy.
for key, value in prepped_data.items():
final_data[key] = self._from_python(value)
prepped_docs.append(final_data)
except (requests.RequestException, pyelasticsearch.ElasticHttpError) as e:
if not self.silently_fail:
raise
# We'll log the object identifier but won't include the actual object
# to avoid the possibility of that generating encoding errors while
# processing the log message:
self.log.error(u"%s while preparing object for update" % e.__class__.__name__, exc_info=True, extra={
"data": {
"index": index,
"object": get_identifier(obj)
}
})
self.conn.bulk_index(self.index_name, 'modelresult', prepped_docs, id_field=ID)
if commit:
self.conn.refresh(index=self.index_name)
def remove(self, obj_or_string, commit=True):
doc_id = get_identifier(obj_or_string)
if not self.setup_complete:
try:
self.setup()
except (requests.RequestException, pyelasticsearch.ElasticHttpError) as e:
if not self.silently_fail:
raise
self.log.error("Failed to remove document '%s' from Elasticsearch: %s", doc_id, e)
return
try:
self.conn.delete(self.index_name, 'modelresult', doc_id)
if commit:
self.conn.refresh(index=self.index_name)
except (requests.RequestException, pyelasticsearch.ElasticHttpError) as e:
if not self.silently_fail:
raise
self.log.error("Failed to remove document '%s' from Elasticsearch: %s", doc_id, e)
def clear(self, models=[], commit=True):
# We actually don't want to do this here, as mappings could be
# very different.
# if not self.setup_complete:
# self.setup()
try:
if not models:
self.conn.delete_index(self.index_name)
else:
models_to_delete = []
for m |
jsternberg/ansible-modules-core | cloud/amazon/ec2_group.py | Python | gpl-3.0 | 17,973 | 0.002392 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ec2_group
author: "Andrew de Quincey (@adq)"
version_added: "1.3"
short_description: maintain an ec2 VPC security group.
description:
- maintains ec2 security groups. This module has a dependency on python-boto >= 2.5
options:
name:
description:
- Name of the security group.
required: true
description:
description:
- Description of the security group.
required: true
vpc_id:
description:
- ID of the VPC to create the group in.
required: false
rules:
description:
- List of firewall inbound rules to enforce in this group (see example). If none are supplied, a default all-out rule is assumed. If an empty list is supplied, no inbound rules will be enabled.
required: false
rules_egress:
description:
- List of firewall outbound rules to enforce in this group (see example). If none are supplied, a default all-out rule is assumed. If an empty list is supplied, no outbound rules will be enabled.
required: false
version_added: "1.6"
state:
version_added: "1.4"
description:
- Create or delete a security group
required: false
default: 'present'
choices: [ "present", "absent" ]
aliases: []
purge_rules:
version_added: "1.8"
description:
- Purge existing rules on security group that are not found in rules
required: false
default: 'true'
aliases: []
purge_rules_egress:
version_added: "1.8"
description:
- Purge existing rules_egress on security group that are not found in rules_egress
required: false
default: 'true'
aliases: []
extends_documentation_fragment:
- aws
- ec2
notes:
- If a rule declares a group_name and that group doesn't exist, it will be
automatically created. In that case, group_desc should be provided as well.
The module will refuse to create a depended-on group without a description.
'''
EXAMPLES = '''
- name: example ec2 group
ec2_group:
name: example
description: an example EC2 group
vpc_id: 12345
region: eu-west-1a
aws_secret_key: SECRET
aws_access_key: ACCESS
rules:
- proto: tcp
from_port: 80
to_port: 80
cidr_ip: 0.0.0.0/0
- proto: tcp
from_port: 22
to_port: 22
cidr_ip: 10.0.0.0/8
- proto: tcp
from_port: 443
to_port: 443
group_id: amazon-elb/sg-87654321/amazon-elb-sg
- proto: tcp
from_port: 3306
to_port: 3306
group_id: 123412341234/sg-87654321/exact-name-of-sg
- proto: udp
from_port: 10050
to_port: 10050
cidr_ip: 10.0.0.0/8
- proto: udp
from_port: 10051
to_port: 10051
group_id: sg-12345678
- proto: icmp
from_port: 8 # icmp type, -1 = any type
to_port: -1 # icmp subtype, -1 = any subtype
cidr_ip: 10.0.0.0/8
- proto: all
# the containing group name may be specified here
group_name: example
rules_egress:
- proto: tcp
from_port: 80
to_port: 80
cidr_ip: 0.0.0.0/0
group_name: example-other
# description to use if example-other needs to be created
group_desc: other example EC2 group
'''
try:
import boto.ec2
from boto.ec2.securitygroup import SecurityGroup
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def make_rule_key(prefix, rule, group_id, cidr_ip):
"""Creates a unique key for an individual group rule"""
if isinstance(rule, dict):
proto, from_port, to_port = [rule.get(x, None) for x in ('proto', 'from_port', 'to_port')]
#fix for 11177
if proto not in ['icmp', 'tcp', 'udp'] and from_port == -1 and to_port == -1:
from_port = 'none'
to_port = 'none'
else: # isinstance boto.ec2.securitygroup.IPPermissions
proto, from_port, to_port = [getattr(rule, x, None) for x in ('ip_protocol', 'from_port', 'to_port')]
key = "%s-%s-%s-%s-%s-%s" % (prefix, proto, from_port, to_port, group_id, cidr_ip)
return key.lower().replace('-none', '-None')
def addRulesToLookup(rules, prefix, dict):
for rule in rules:
for grant in rule.grants:
dict[make_rule_key(prefix, rule, grant.group_id, grant.cidr_ip)] = (rule, grant)
def validate_rule(module, rule):
VALID_PARAMS = ('cidr_ip',
'group_id', 'group_name', 'group_desc',
'proto', 'from_port', 'to_port')
for k in rule:
if k not in VALID_PARAMS:
module.fail_json(msg='Invalid rule parameter \'{}\''.format(k))
if 'group_id' in rule and 'cidr_ip' in rule:
module.fail_json(msg='Specify group_id OR cidr_ip, not both')
elif 'group_name' in rule and 'cidr_ip' in rule:
module.fail_json(msg='Specify group_name OR cidr_ip, not both')
elif 'group_id' in rule and 'group_name' in rule:
module.fail_json(msg='Specify group_id OR group_name, not both')
def get_target_from_rule(module, ec2, rule, name, group, groups, vpc_id):
"""
Returns tuple of (group_id, ip) after validating rule params.
rule: Dict describing a rule.
name: Name of the security group being managed.
groups: Dict of all available security groups.
AWS accepts an ip range or a security group as target of a rule. This
function validate the rule specification and return either a non-None
group_id or a non-None ip range.
"""
FOREIGN_SECURITY_GROUP_REGEX = '^(\S+)/(sg-\S+)/(\S+)'
group_id = None
group_name = None
ip = None
target_group_created = False
if 'group_id' in rule and 'cidr_ip' in rule:
module.fail_json(msg="Specify group_id OR cidr_ip, not both")
elif 'group_name' in rule and 'cidr_ip' in rule:
module.fail_json(msg="Specify group_name OR cidr_ip, not both")
elif 'group_id' in rule and 'group_name' in rule:
module.fail_json(msg="Specify group_id OR group_name, not both")
elif 'group_id' in rule and re.match(FOREIGN_SECURITY_GROUP_REGEX, rule['group_id']):
# this is a foreign Security Group. Since you can't fetch it you must create an instance of it
owner_id, group_id, group_name = re.match(FOREIGN_SECURITY_GROUP_REGEX, rule['group_id']).groups()
group_instance = SecurityGroup(owner_id=owner_id, na | me=group_name, id=group_id)
groups[group_id] = group_instance
groups[group_name] = group_instance
elif 'group_id' in rule:
group_id = rule['group_id']
elif 'group_name' in rule:
group_name = rule['group_name']
if group_name == name:
group_id = group.id
groups[group_id] = group
groups[group_name] = group
elif group_name in groups:
group_id = groups[group_ | name].id
else:
if not rule.get('group_desc', '').strip():
module.fail_json(msg="group %s will be automatically created by rule %s and no description was provided" % (group_name, rule))
if not module.check_mode:
auto_group = ec2.create_security_group(group_name, rule['group_desc'], vpc_id=vpc_id)
group_id = auto_group.id
groups[group_id] = auto_group
groups[group_name] = auto_group
target_group_created = True
elif 'cidr_ip' in rule:
ip = rule['cidr_ip']
return group_id, ip, target_group_created
|
opencast/pyCA | tests/test_capture.py | Python | lgpl-3.0 | 3,301 | 0 | # -*- coding: utf-8 -*-
'''
Tests for basic capturing
'''
import os
import os.path
import shutil
import tempfile
import unittest
from pyca import capture, config, db, utils
from tests.tools import should_fail, terminate_fn, reload
class TestPycaCapture(unittest.TestCase):
def setUp(self):
utils.http_request = lambda x, y=False: b'xxx'
self.fd, self.dbfile = tempfile.mkstemp()
self.cadir = tempfile.mkdtemp()
preview = os.path.join(self.cadir, 'preview.png')
open(preview, 'a').close()
config.config()['agent']['database'] = 'sqlite:///' + self.dbfile
config.config()['capture']['command'] = 'touch {{dir}}/{{name}}.mp4'
config.config()['capture']['directory'] = self.cadir
config.config()['capture']['preview'] = [preview]
config.config()['services']['org.opencastproject.capture.admin'] = ['']
# Mock event
db.init()
self.event = db.BaseEvent()
self.event.uid = '123123'
self.event.title = u'äüÄÜß'
self.event.start = utils.timestamp()
self.event | .end = self.event.start
self.event.status = db.Status.UPCOMING
data = [{'data': u'äüÄÜß',
'fmttype': 'application/xml',
'x-apple-filename': 'episode.xml'},
{'data': u'äüÄÜß',
'fmttype': 'application/xml',
'x-a | pple-filename': 'series.xml'},
{'data': u'event.title=äüÄÜß\n' +
u'org.opencastproject.workflow.config.x=123\n' +
u'org.opencastproject.workflow.definition=fast',
'fmttype': 'application/text',
'x-apple-filename': 'org.opencastproject.capture.agent' +
'.properties'}]
self.event.set_data({'attach': data})
def tearDown(self):
os.close(self.fd)
os.remove(self.dbfile)
shutil.rmtree(self.cadir)
reload(capture)
reload(config)
reload(utils)
def test_start_capture(self):
capture.start_capture(self.event)
def test_start_capture_recording_command_failure(self):
config.config()['capture']['command'] = 'false'
with self.assertRaises(RuntimeError):
capture.start_capture(self.event)
def test_start_capture_sigterm(self):
config.config()['capture']['command'] = 'sleep 10'
config.config()['capture']['sigterm_time'] = 0
capture.start_capture(self.event)
def test_start_capture_sigkill(self):
config.config()['capture']['command'] = 'sleep 10'
config.config()['capture']['sigkill_time'] = 0
capture.start_capture(self.event)
def test_safe_start_capture(self):
'''Ensure that safe_start_capture always returns without error to not
disrupt the main loop.
'''
capture.start_capture = should_fail
capture.safe_start_capture(self.event)
def test_run(self):
capture.terminate = terminate_fn(1)
capture.run()
def test_sigterm(self):
with self.assertRaises(BaseException) as e:
capture.sigterm_handler(0, 0)
self.assertEqual(e.exception.code, 0)
self.assertTrue(utils.terminate())
|
txm/potato | urls.py | Python | bsd-3-clause | 156 | 0.00641 | from | django.conf.urls.defaults import *
handler500 = 'djangotoolbox.errorviews.server_error'
urlpatterns = patterns('',
(r'', i | nclude('txm.urls')),
)
|
aholmback/fuse | fuse/__about__.py | Python | mit | 489 | 0 | from __future__ import absolute_import, di | vision, print_function
__all__ = [
'__title__', '__summary__', '__uri__', '__version__', '__author__',
'__email__', '__license__', '__copyright__',
]
__title__ = 'fuse'
__summary__ = ("Composes boilerplates")
__uri__ = 'https://github.com/aholmback/fuse'
__version__ = '0.0.2.alpha.18'
__author__ = "Cre | una Developers"
__email__ = 'people@creuna.se'
__license__ = 'MIT'
__copyright__ = "Copyright 2017 {0}".format(__author__)
|
adrn/gala | gala/potential/scf/tests/test_class.py | Python | mit | 3,357 | 0.000298 | # coding: utf-8
# Third-party
import astropy.units as u
from astropy.constants import G as _G
import numpy as np
import pytest
# Project
from gala._cconfig import GSL_ENABLED
from gala.units import galactic
import gala.potential as gp
from gala.potential.potential.tests.helpers import PotentialTestBase
from gala.potential.potential.io import load
from .. import _bfe_class
G = _G.decompose(galactic).value
if not GSL_ENABLED:
pytest.skip("skipping SCF tests: they depend on GSL",
allow_module_level=True)
def test_hernquist():
nmax = 6
lmax = 2
M = 1E10
r_s = 3.5
cos_coeff = np.zeros((nmax+1, lmax+1, lmax+1))
sin_coeff = np.zeros((nmax+1, lmax+1, lmax+1))
| cos_coeff[0, 0, 0] = 1.
scf_potential = _bfe_class.SCFPotential(m=M, r_s=r_s,
S | nlm=cos_coeff, Tnlm=sin_coeff,
units=galactic)
# scf_potential = HackPotential(m=10., units=galactic)
nbins = 128
rr = np.linspace(0.1, 10., nbins)
xyz = np.zeros((3, nbins))
xyz[0] = rr * np.cos(np.pi/4.) * np.sin(np.pi/4.)
xyz[1] = rr * np.sin(np.pi/4.) * np.sin(np.pi/4.)
xyz[2] = rr * np.cos(np.pi/4.)
hernquist = gp.HernquistPotential(m=M, c=r_s, units=galactic)
bfe_pot = scf_potential.energy(xyz).value
true_pot = hernquist.energy(xyz).value
np.testing.assert_allclose(bfe_pot, true_pot)
bfe_grad = scf_potential.gradient(xyz).value
true_grad = hernquist.gradient(xyz).value
np.testing.assert_allclose(bfe_grad, true_grad)
class TestSCFPotential(PotentialTestBase):
nmax = 6
lmax = 2
Snlm = np.zeros((nmax+1, lmax+1, lmax+1))
Tnlm = np.zeros((nmax+1, lmax+1, lmax+1))
Snlm[0, 0, 0] = 1.
Snlm[2, 0, 0] = 0.5
Snlm[4, 0, 0] = 0.25
potential = _bfe_class.SCFPotential(m=1E11*u.Msun, r_s=10*u.kpc,
Snlm=Snlm, Tnlm=Tnlm, units=galactic)
w0 = [4.0, 0.7, -0.9, 0.0352238, 0.1579493, 0.02]
def test_save_load(self, tmpdir):
fn = str(tmpdir.join("{}.yml".format(self.name)))
self.potential.save(fn)
p = load(fn, module=_bfe_class)
p.energy(self.w0[:self.w0.size//2])
@pytest.mark.skipif(True, reason='no hessian implemented')
def test_hessian(self):
pass
@pytest.mark.skip(reason="to_sympy() not implemented yet")
def test_against_sympy(self):
pass
def test_compare(self):
# skip if composite potentials
if len(self.potential.parameters) == 0:
return
other = self.potential.__class__(units=self.potential.units, **self.potential.parameters)
assert other == self.potential
pars = self.potential.parameters.copy()
for k in pars.keys():
if k != 0:
pars[k] = 1.1 * pars[k]
other = self.potential.__class__(units=self.potential.units, **pars)
assert other != self.potential
def test_replace_units(self):
H = gp.Hamiltonian(self.potential)
H2 = gp.Hamiltonian(self.potential.replace_units(self.potential.units))
ww = [20., 10, 10, 0, 0.2, 0]
w1 = H.integrate_orbit(ww, t=np.array([0, 1.]))[-1].w(galactic).T
w2 = H2.integrate_orbit(ww, t=np.array([0, 1.]))[-1].w(galactic).T
assert np.allclose(w1, w2)
|
NB-Dev/django-shop | shop/addressmodel/models.py | Python | bsd-3-clause | 2,375 | 0.001684 | # -*- coding: utf-8 -*-
"""
Holds all the information relevant to the client (addresses for instance)
"""
from django.conf import settings
from django.db import models
from dja | ngo.utils.translation import ugettext_lazy as _
from django.conf import settings
BASE_ADDRESS_TEMPLATE = \
_("""
Name: %(name)s,
Address: %(address)s,
Zip-Code: %(zipcode)s,
City: %(city)s,
State: %(state)s,
Country: %(country)s
| """)
ADDRESS_TEMPLATE = getattr(settings, 'SHOP_ADDRESS_TEMPLATE',
BASE_ADDRESS_TEMPLATE)
USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
class Country(models.Model):
name = models.CharField(max_length=255)
def __unicode__(self):
return u'%s' % self.name
class Meta(object):
verbose_name = _('Country')
verbose_name_plural = _('Countries')
class Address(models.Model):
user_shipping = models.OneToOneField(USER_MODEL, related_name='shipping_address',
blank=True, null=True)
user_billing = models.OneToOneField(USER_MODEL, related_name='billing_address',
blank=True, null=True)
name = models.CharField(_('Name'), max_length=255)
address = models.CharField(_('Address'), max_length=255)
address2 = models.CharField(_('Address2'), max_length=255, blank=True)
zip_code = models.CharField(_('Zip Code'), max_length=20)
city = models.CharField(_('City'), max_length=20)
state = models.CharField(_('State'), max_length=255)
country = models.ForeignKey(Country, verbose_name=_('Country'), blank=True,
null=True)
class Meta(object):
verbose_name = _('Address')
verbose_name_plural = _("Addresses")
def __unicode__(self):
return '%s (%s, %s)' % (self.name, self.zip_code, self.city)
def clone(self):
new_kwargs = dict([(fld.name, getattr(self, fld.name))
for fld in self._meta.fields if fld.name != 'id'])
return self.__class__.objects.create(**new_kwargs)
def as_text(self):
return ADDRESS_TEMPLATE % {
'name': self.name,
'address': '%s\n%s' % (self.address, self.address2),
'zipcode': self.zip_code,
'city': self.city,
'state': self.state,
'country': self.country,
}
|
creffett/reffipe | reffipe/website/models.py | Python | gpl-3.0 | 1,104 | 0 | from django.db import models
# Create your models here.
MEASUREMENT = (
('c', 'cups'),
('tsp', 'teaspoons'),
('tbsp', 'tablespoons'),
('item', 'item'),
)
class Step(models.Model):
"""A step in a recipe"""
order = models.IntegerField()
directions = models.TextField()
recipe = models.ForeignKey('Recipe')
class Tag(models.Model):
"""A tag to identify a recipe"""
name = models.CharField(max_length=50)
recipes = models.ManyToManyField('Recipe')
def __str__(self):
return self.name
class RecipeIngredient(models.Model):
ingredient = models.ForeignKey('Ingredient')
recipe = models.ForeignKey('Rec | ipe')
quantity = models.FloatField()
unit = models.CharField(choices=MEASUREMENT, max_length=10)
description = models.CharField(max_length=100)
notes = models.TextField()
class Ingredient(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
class Recipe(models.Model):
name = models.Char | Field(max_length=100)
def __str__(self):
return self.name
|
whbruce/upm | examples/python/t8100.py | Python | mit | 3,663 | 0.001638 | #!/usr/bin/python
# Author: Jon Trulson <jtrulson@ics.com>
# Copyright (c) 2016 Intel Corporation.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
import time, sys, signal, atexit
from upm import pyupm_t8100 as sensorObj
def main():
| ## Exit handlers ##
# This function stops python from printing a stacktrace when you hit control-C
def SIGINTHandler(signum, frame):
raise SystemExit
# This function lets you run code on exit
def exitHandler():
| print("Exiting...")
sys.exit(0)
# Register exit handlers
atexit.register(exitHandler)
signal.signal(signal.SIGINT, SIGINTHandler)
# You will need to edit this example to conform to your site and your
# devices, specifically the Device Object Instance ID passed to the
# constructor, and the arguments to initMaster() that are
# appropriate for your BACnet network.
defaultDev = "/dev/ttyUSB0"
# if an argument was specified, use it as the device instead
if (len(sys.argv) > 1):
defaultDev = sys.argv[1]
print("Using device", defaultDev)
print("Initializing...")
# Instantiate an T8100 object for an T8100 device that has 568000
# as it's unique Device Object Instance ID. NOTE: You will
# certainly want to change this to the correct value for your
# device(s).
sensor = sensorObj.T8100(568000)
# Initialize our BACnet master, if it has not already been
# initialized, with the device and baudrate, choosing 1000001 as
# our unique Device Object Instance ID, 2 as our MAC address and
# using default values for maxMaster and maxInfoFrames
sensor.initMaster(defaultDev, 38400, 1000001, 2)
# Uncomment to enable debugging output
# sensor.setDebug(True);
# output the serial number and firmware revision
print()
print("Device Description:", sensor.getDeviceDescription())
print("Device Location:", sensor.getDeviceLocation())
print()
# update and print available values every 5 seconds
while (1):
# update our values
sensor.update();
print("CO2 Concentration:", end=' ')
print(sensor.getCO2(), end=' ')
print("ppm")
# we show both C and F for temperature
print("Temperature:", sensor.getTemperature(), end=' ')
print("C /", sensor.getTemperature(True), "F")
print("Humidity:", end=' ')
print(sensor.getHumidity(), end=' ')
print("%RH")
print("Relay State:", end=' ')
print(sensor.getRelayState())
print()
time.sleep(5)
if __name__ == '__main__':
main()
|
iAddz/allianceauth | eveonline/migrations/0006_allow_null_evecharacter_alliance.py | Python | gpl-2.0 | 719 | 0.002782 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-01-02 19:23
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
depe | ndencies = [
('eveonline', '0005_remove_eveallianceinfo_member_count'),
]
operations = [
migrations.AlterField(
model_name='evecharacter',
name='alliance_id',
field=models.CharField(blank=True, default='', max_length=254, null=True),
),
migrations.AlterField(
model_name='evecharacter',
name='alliance_name',
field=models.CharField(blank=True, default='', max_length=254, | null=True),
),
]
|
zhuzhezhe/weibobash | weibo_bash/weibo_bash.py | Python | mit | 2,831 | 0.001602 | #!/usr/bin/env python3
#coding:utf-8
__author__ = 'zhuzhezhe'
'''
功能实现:命令行下发布微博,获取最新微博
'''
from weibo import Client
import getopt
import sys
import configparser
versions = '0.1.5'
# 写入用户数据
def write_data(uname, pwd):
conf = configparser.ConfigParser()
conf['LOGIN'] = {}
conf['LOGIN']['username'] = uname
conf['LOGIN']['password'] = pwd
with open('config.ini', 'w') as configfile:
conf.write(configfile)
print('写入成功')
# 读取用户数据
config = configparser.ConfigParser()
config.read('config.ini')
username = ''
password = ''
if 'LOGIN' in config:
username = config['LOGIN']['username']
password = config['LOGIN']['password']
else:
print('确保已完成登陆.请填写用户名和密码.')
# 接入新浪接口基本信息
api_key = '3842240593'
api_secret = '93f0c80150239e02c52011c858b20ce6'
# 默认回调地址
redirect_url = 'https://api.weibo.com/oauth2/default.html'
# 登陆验证
c = Client(api_key=api_key,
api_secret=api_secret,
redirect_uri=redirect_url,
username=username,
password=password)
# 最新微博
def new_weibo():
try:
data = c.get('statuses/friends_timeline')["statuses"]
for i in range(len(data)):
print("用户:"+data[i]["user"]["screen_name"])
print("微博:"+data[i]["text"])
print("\n")
except Exception as err:
print(err)
print('确保已完成登陆.请填写用户名和密码.')
# 发布微博
def add_weibo(words):
try:
c.post('statuses/update', status=words)
print("发布成功!")
except Exception as err:
print(err)
print('确保已完成登陆.请填写用户名和密码.')
# 用法
def usage():
text = '--------weibobash使用帮助--------\n' \
'-h<--help>: 显示帮助信息\n' \
'-u<--user>: 输入用户名和密码\n' \
'-n<--new>: 显示20条最新微博\n' \
'-a<--add>: 发布一条微博\n'
print(text)
# 主程序
def main(): |
try:
opts, args = getopt.getopt(sys.argv[1:], "hna:vu", ["help", "new", "add=", "user"])
except getopt.GetoptError as err:
print(err)
sys.exit(2)
for o, a in opts:
if o == "-v":
print(versions)
elif o in ("-h", "--help"):
usage()
sys.exit()
elif o in ("-n", "--new"):
new_weibo()
elif o in ("-a", "--add"):
add_weibo(a)
elif | o in ("-u", "--user"):
user = input("请输入用户名:")
pwd = input("请输入密码:")
write_data(user, pwd)
else:
assert False, "unhandled option"
if __name__ == "__main__":
main()
|
eldarion/pycon | pycon/profile/views.py | Python | bsd-3-clause | 1,142 | 0.004378 | from django.shortcuts import render, redirect
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from .models import Profile
from .forms import ProfileForm
@login_required
def pr | ofile_edit(request):
next = request.GET.get("next")
profile, cre | ated = Profile.objects.get_or_create(
user=request.user,
defaults={
"first_name": request.user.first_name,
"last_name": request.user.last_name,
}
)
if request.method == "POST":
form = ProfileForm(request.POST, instance=profile)
if form.is_valid():
profile = form.save()
request.user.first_name = form.cleaned_data["first_name"]
request.user.last_name = form.cleaned_data["last_name"]
messages.add_message(request, messages.SUCCESS,
"Successfully updated profile."
)
if next:
return redirect(next)
else:
form = ProfileForm(instance=profile)
return render(request, "profiles/edit.html", {
"form": form,
"next": next,
})
|
kfirprods/tpp | python/hg-rheads.py | Python | gpl-3.0 | 482 | 0.008299 | import sys
from mercurial import hg, node, ui
def main():
"""print (possibly remote) heads
Prints a series of lines consisting of hashes and branch names.
Specify a local or remote repository, defaulting to the configured remote.
"""
repo = sys.argv[1]
other = hg.peer(ui.ui(), {}, repo)
for tag, heads in other.branchmap().iteritems():
print "%s %s" % (node.short(heads[0]), tag)
if __nam | e__ == "__main__":
main()
| |
openaid-IATI/deprecated-version-OIPA-v2 | iati/data/migrations/0009_auto__add_countrystatistics.py | Python | agpl-3.0 | 18,746 | 0.007042 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'CountryStatistics'
db.create_table('data_countrystatistics', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('country', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['data.Country'], unique=True)),
('total_activities', self.gf('django.db.models.fields.IntegerField')(default=0)),
))
db.send_create_signal('data', ['CountryStatistics'])
def backwards(self, orm):
# Deleting model 'CountryStatistics'
db.delete_table('data_countrystatistics')
models = {
'data.activitystatistics': {
'Meta': {'object_name': 'ActivityStatistics'},
'iati_identifier': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['data.IATIActivity']", 'unique': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'total_budget': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '2', 'blank': 'True'})
},
'data.activitystatustype': {
'Meta': {'object_name': 'ActivityStatusType'},
'code': ('django.db.models.fields.IntegerField', [], {'max_length': '8', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.Country']", 'null': 'True', 'blank': 'True'})
},
'data.aidtype': {
'Meta': {'object_name': 'AidType'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '5', 'primary_key': 'True'})
},
'data.budget': {
'Meta': {'object_name': 'Budget'},
'currency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.CurrencyType']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'period_end': ('django.db.models.fields.DateField', [], {}),
'period_start': ('django.db.models.fields.DateField', [], {}),
'type': ('django.db.models.fields.IntegerField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '2'})
},
'data.collaborationtype': {
'Meta': {'object_name': 'CollaborationType'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '55', 'primary_key': 'True'})
},
'data.country': {
'Meta': {'object_name': 'Country'},
'iso': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'})
},
'data.countrystatistics': {
'Meta': {'object_name': 'CountryStatistics'},
'country': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['data.Country']", 'unique': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'total_activities': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'data.currencytype': {
'Meta': {'object_name': 'CurrencyType'},
'code': ('django.db.models.fields.CharField', [ | ], {'max_length': '3', 'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.Country']", 'null': 'True', 'blank': 'True'})
},
'data.financetype': {
'Meta': {'object_name': 'FinanceType'},
'code': ('django.db.models.fields.IntegerField', [], {'max_length': '5', 'primary_key': 'True'})
},
'data.flowtyp | e': {
'Meta': {'object_name': 'FlowType'},
'code': ('django.db.models.fields.IntegerField', [], {'max_length': '5', 'primary_key': 'True'})
},
'data.iatiactivity': {
'Meta': {'object_name': 'IATIActivity'},
'activity_status': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.ActivityStatusType']", 'null': 'True', 'blank': 'True'}),
'collaboration_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.CollaborationType']", 'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'default_aid_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.AidType']", 'null': 'True', 'blank': 'True'}),
'default_finance_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.FinanceType']", 'null': 'True', 'blank': 'True'}),
'default_flow_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.FlowType']", 'null': 'True', 'blank': 'True'}),
'default_tied_status_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.TiedAidStatusType']", 'null': 'True', 'blank': 'True'}),
'end_actual': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'end_planned': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'iati_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'reporting_organisation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.Organisation']"}),
'start_actual': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'start_planned': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
'data.iatiactivitybudget': {
'Meta': {'object_name': 'IATIActivityBudget', '_ormbases': ['data.Budget']},
'budget_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['data.Budget']", 'unique': 'True', 'primary_key': 'True'}),
'iati_activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.IATIActivity']"})
},
'data.iatiactivitycontact': {
'Meta': {'object_name': 'IATIActivityContact'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'iati_activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.IATIActivity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mailing_address': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'organisation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'person_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'telephone': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
'data.iatiactivitycountry': {
'Meta': {'object_name': 'IATIActivityCountry'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.Country']"}),
'iati_activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.IATIActivity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'percentage': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'data.iatiactivitydescript |
sloria/sepal | sepal/datasets/admin.py | Python | bsd-3-clause | 1,904 | 0.003676 | from sepal.datasets.models import *
from django.contrib import admin
class InstanceInline(admin.TabularInline):
model = Instance
extra = 1
class FeatureValueInline(admin.TabularInline):
model = FeatureValue
extra = 1
class LabelValueInline(admin.TabularInline):
model = LabelValue
extra = 3
class DatasetInline(admin.TabularInline):
| model = Dataset
class DatasetAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['name', 'description', 'species', ]}),
('Date information', {'fields': ['created_at'], 'classes': ['collapse']})
]
list_display = ('name', 'created_at', 'description', 'pk')
inlines = [InstanceInline, ]
search_fields = ['name']
class InstanceAdmin(admin.ModelAdmin):
fieldsets = [
| (None, {'fields': ['dataset', 'audio']}),
]
list_display = ('dataset', 'pk', 'audio')
inlines = [FeatureValueInline]
class AudioAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['audio_file', 'slug', ]}),
]
list_display = ('audio_file', 'slug',)
class FeatureAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['name', 'display_name', 'unit']})
]
list_display = ('name', 'display_name', 'unit', 'pk')
# inlines = [FeatureInstanceInline,]
class LabelNameAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['name']})
]
list_display = ('name', 'pk')
inlines = [LabelValueInline]
class LabelValueAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['value', 'label_name']})
]
list_display = ('value', 'pk')
inlines = []
admin.site.register(Dataset, DatasetAdmin)
admin.site.register(Instance, InstanceAdmin)
admin.site.register(Audio, AudioAdmin)
admin.site.register(Feature, FeatureAdmin)
admin.site.register(LabelName, LabelNameAdmin)
admin.site.register(LabelValue, LabelValueAdmin) |
mattaustin/django-thummer | thummer/migrations/0001_initial.py | Python | apache-2.0 | 1,008 | 0.003968 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import sorl.thumbnail.fields
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='WebpageSnapshot',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('url', models.URLField(db_index=True)),
('image', sorl.thumbn | ail.fields.ImageField(upload_to='thummer/snapshots', null=True, editable=False)),
('capture_width', models.IntegerField(default=1680, editable=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('captured_at', models.DateTimeField(null=True, editable=False)),
],
options={
'ordering': ['-capt | ured_at'],
'get_latest_by': 'captured_at',
},
),
]
|
Azure/WALinuxAgent | dcr/scenarios/agent-persist-firewall/run2.py | Python | apache-2.0 | 2,711 | 0.005164 | from dcr.scenario_utils.common_utils import get_current_agent_name
from dcr.scenario_utils.test_orchestrator import TestFuncObj, TestOrchestrator
from persist_firewall_helpers import verify_wire_ip_in_iptables, verify_system_rebooted, generate_svg, \
verify_wire_ip_unreachable_for_non_root, verify_wire_ip_reachable_for_root, run_systemctl_command, \
firewalld_service_enabled, print_stateful_debug_data
def check_external_service_status():
agent_name = get_current_agent_name()
# Check if firewall active on the Vm
if firewalld_service_enabled():
# If yes, then print its status
ec, _, __ = run_systemctl_command("firewalld.service", command="status")
if ec != 0:
raise Exception("Something wrong with firewalld.service!")
# Else print status of our custom service
else:
service_name = "{0}-network-setup.service".format(agent_name)
# Check if enabled, if not then raise Error
ec, stdout, stderr = run_systemctl_command(service_name, command="is-enabled")
if ec != 0:
raise Exception("Service should be enabled!")
# Check if failed, if so then raise Error
ec, stdout, stderr = run_systemctl_command(service_name, command="is-failed")
if ec == 0:
raise Exception("The service should not be in a failed state!")
# Finally print the status of the service
run_systemctl_command(service_name, command="status")
print("\nDisable Guest Agent service for more verbose testing")
ec, _, __ = run_systemctl_command(service_name="{0}.service".format(agent_name), command="disable")
if ec != 0:
raise Exception("Agent not disabled properly!")
if __name__ == '__main__':
tests = [
TestFuncObj("Verify system rebooted", verify_system_rebooted, raise_on_error=True),
TestFuncObj("G | enerate SVG", lambda: generate_svg(svg_name="agent_running.svg")),
TestFuncObj("Verify wireIP unreachable for non-root", verify_wire_ip_unreachable_for_non_root),
TestFuncObj("Verify wireIP reachable for root", verify_wire_ | ip_reachable_for_root),
TestFuncObj("Verify_Wire_IP_IPTables", lambda: verify_wire_ip_in_iptables(max_retry=1)),
TestFuncObj("Verify External services", check_external_service_status)
]
test_orchestrator = TestOrchestrator("PersistFirewall-VM2", tests=tests)
test_orchestrator.run_tests()
# Print stateful debug data before reboot because the state might be lost after
print_stateful_debug_data()
test_orchestrator.generate_report_on_vm("test-result-pf-run2.xml")
assert not test_orchestrator.failed, f"Test Suite: {test_orchestrator.name} failed"
|
JackDanger/sentry | src/sentry/interfaces/template.py | Python | bsd-3-clause | 3,261 | 0 | """
sentry.interfaces.template
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
__all__ = ('Template',)
from sentry.interfaces.base import Interface, InterfaceValidationError
from sentry.interfaces.stacktrace import get_context
from sentry.utils.safe import trim
class Template(Interface):
"""
A rendered template (generally used like a single frame in a stacktrace).
The attributes ``filename``, ``context_line``, and ``lineno`` are required.
>>> {
>>> "abs_path": "/real/file/name.html"
>>> "filename": "file/name.html",
>>> "pre_context": [
>>> "line1",
>>> "line2"
>>> ],
>>> "context_line": "line3",
>>> "lineno": 3,
>>> "post_context": [
>>> "line4",
>>> "line5"
>>> ],
>>> }
.. note:: This interface can be passed as the 'template' key in addition
to the full interface path.
"""
score = 1100
@classmethod
def to_python(cls, data):
if not data.get('filename'):
raise InterfaceValidationError("Missing 'filename'")
if not data.get('context_line'):
raise InterfaceValidationError("Missing 'context_line'")
if not data.get('lineno'):
raise InterfaceValidationError("Missing 'lineno'")
kwargs = {
'abs_path': trim(data.get('abs_path', None), 256),
'filename': trim(data['filename'], 256),
'context_line': trim(data.get('context_line', None), 256),
'lineno': int(data['lineno']),
# TODO(dcramer): trim pre/post_context
'pre_context': data.get('pre_context'),
'post_context': data.get('post_context'),
}
return cls(**kwargs)
def get_alias(self):
return 'template'
def get_path(self):
return 'sentry.interfaces.Template'
def get_hash(self):
return [self.filename, self.context_line]
def to_string(self, event, is_public=False, **kwargs):
context = get_context(
lineno=self.lineno,
context_line=self.context_line,
pre_context=self.pre_context,
post_context=self.post_context,
filename=self.filename,
)
result = [
'Stacktrace (most recent call last):', '',
self.get_traceback(event, context)
]
return '\n'.join(result)
def get_traceback(self, event, context):
result = [
event.message, '',
'File "%s", line %s' % (self.filename, self.lineno), '',
]
result.extend([n[1].strip('\n') for n in c | ontext])
return '\n'.join(result)
def get_api_context(self, is_public=False):
| return {
'lineNo': self.lineno,
'filename': self.filename,
'context': get_context(
lineno=self.lineno,
context_line=self.context_line,
pre_context=self.pre_context,
post_context=self.post_context,
filename=self.filename,
),
}
|
theelectricbrain/Drones-Drifters | drones_n_drifters/georef/geo_referencing.py | Python | agpl-3.0 | 4,964 | 0.004633 | #!/usr/bin/python2.7
# encoding: utf-8
from __future__ import division
import numpy as np
# from pyproj import Proj, pj_list, pj_ellps
import cv2
def geo_ref_tracks(tracks, frame, uav, debug=False):
"""
Geo-references tracks'points
:param tracks: list of drifters' trajectories
:param frame: CV2 frame
:param uav: UAV class object
:return: geo-referenced tracks in degrees and tracks relative to center point in meters
"""
# Meter per pixel ratio
# TODO: Lens correction could be needed here
diagLength = 2.0 * np.tan(np.deg2rad(uav.FOV/2.0)) * uav.altitude
nx = float(frame.shape[1])
ny = float(frame.shape[0])
phi = np.arctan(ny / nx)
horiMpP = diagLength * np.cos(phi) / nx # horizontal meters per pixel ratio
vertiMpP = diagLength * np.sin(phi) / ny # vertical meters per pixel ratio.
if uav.yaw < 0.0: # UAV convention
alibi = True
else:
alibi = False
yaw = np.abs(np.deg2rad(uav.yaw))
# Need this before of tuples
tracksInDeg = []
tracksInRelativeM = []
for tr in tracks:
tracksInDeg.append([])
tracksInRelativeM.append([])
# Relative distance
for tr, TR in zip(tracks, tracksInRelativeM):
for pt in tr:
pt = list(pt)
x = (pt[0] - (nx/2.0)) * horiMpP
y = ((ny - pt[1]) - (ny/2.0)) * vertiMpP # Origin frame is top left corner
if alibi:
# Correction with Active (aka Alibi) transformation
xr = x * np.cos(yaw) - y * np.sin(yaw)
yr = x * np.sin(yaw) + y * np.cos(yaw)
else:
# Correction with Passive (aka Alias) transformation
xr = x*np.cos(yaw) + y*np.sin(yaw)
| yr = y*np.cos(yaw) - x*np.sin(yaw)
TR.append([xr, yr])
# Conversion deg. to m. / Version 2.0
y2lat = 1.0 / (110.54 * 1000.0)
x2lon = 1.0 / (111.320 * 1000.0 * np.cos(np.deg2rad(uav.centreCoordinates[1])))
lonC, latC = uav.centreCoordinates[0], uav.centreCoordinates[1]
for tr, trM in zip(tracksInDeg, tra | cksInRelativeM):
for ptM in trM:
lon, lat = lonC + (ptM[0] * x2lon), latC + (ptM[1] * y2lat)
tr.append([lon, lat])
# Conversion deg. to m. / version 1.0
# proj = raw_input("Use default projection UTM/WGS84 (yes/no)?: ").upper()
# if proj in "YES":
# myproj = Proj(proj='utm', ellps='WGS84') # LatLon with WGS84 datum used by GPS units
# else:
# print "Choose a coordinate projection from the following list:"
# for key in pj_list:
# print key + ": " + pj_list[key]
# proj = raw_input("Type in the coordinate projection: ")
# print "Choose a coordinate ellipse from the following list:"
# for key in pj_list:
# print key + ": " + pj_list[key]
# ellps = raw_input("Type in the coordinate ellipse: ")
# myproj = Proj(proj=proj, ellps=ellps)
# xc, yc = myproj(uav.centreCoordinates[0], uav.centreCoordinates[1])
# # Absolute distance and conversion m. to deg.
# for tr, trM in zip(tracksInDeg, tracksInRelativeM):
# for ptM in trM:
# x, y = xc + ptM[0], yc + ptM[1]
# lon, lat = myproj(x, y, inverse=True)
# tr.append([lon, lat])
# # Recompute relative distance in new referential
# tracksInRelativeM = []
# for tr in tracks:
# tracksInRelativeM.append([])
# lat2m = 110.54 * 1000.0
# lon2m = 111.320 * 1000.0 * np.cos(np.deg2rad(uav.centreCoordinates[1]))
# for tr, trM in zip(tracksInDeg, tracksInRelativeM):
# for pt in tr:
# x = lon2m * (pt[0] - uav.centreCoordinates[0])
# y = lat2m * (pt[1] - uav.centreCoordinates[1])
# trM.append([x, y])
return tracksInDeg, tracksInRelativeM
# TODO: def geo_ref_contours
def geo_ref_contours(surfTurbArea, uav, debug=False):
"""
Geo-references surface turbulence areas
:param surfTurbArea: frame of surface turbulence areas
:param uav: UAV object
:return: geo-referenced contours
"""
# Find contours from white areas
imgray = cv2.cvtColor(surfTurbArea,cv2.COLOR_BGR2GRAY)
ret, thresh = cv2.threshold(imgray,127,255,0)
im2, contours, hierarchy = cv2.findContours(thresh,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
if debug:
im = cv2.drawContours(surfTurbArea, contours, -1, (0,255,0), 3)
cv2.namedWindow('Areas & contours', cv2.WINDOW_NORMAL)
cv2.resizeWindow('Areas & contours', 1200, 1200)
cv2.imshow('Areas & contours', im)
# Reformating
contoursList = []
for cnt in contours:
coordsList = []
for coords in cnt:
coordsList.append(tuple(coords[0]))
contoursList.append(coordsList)
# Georeference contours
contoursInDeg, contoursInM = geo_ref_tracks(contoursList, surfTurbArea, uav, debug=debug)
return contoursInDeg
|
khshim/lemontree | lemontree/__init__.py | Python | mit | 117 | 0.017094 | "" | "
LemonTree is simple, flexible, and experimental deep learning package based on Theano.
"""
__version__ = '0.0 | .4' |
dbiesecke/plugin.video.xstream | sites/kinox_to.py | Python | gpl-3.0 | 34,288 | 0.011549 | # -*- coding: utf-8 -*-
import urllib
import logger
from resources.lib.gui.gui import cGui
from resources.lib.gui.guiElement import cGuiElement
from resources.lib.handler.requestHandler import cRequestHandler
from resources.lib.parser import cParser
from resources.lib.util import cUtil
from resources.lib.config import cConfig
from json import loads
import re
from resources.lib.handler.ParameterHandler import ParameterHandler
from resources.lib import jsunprotect
SITE_IDENTIFIER = 'kinox_to'
SITE_NAME = 'Kinox.to'
SITE_ICON = 'kinox.png'
oConfig = cConfig()
domain = oConfig.getSetting('kinox_to-domain')
####
URL_MAIN = 'http://' + domain
URL_CINEMA_PAGE = URL_MAIN + '/Cine-Films.html'
URL_GENRE_PAGE = URL_MAIN +'/Genre.html'
URL_MOVIE_PAGE = URL_MAIN + '/Movies.html'
URL_SERIE_PAGE = URL_MAIN + '/Series.html'
URL_DOCU_PAGE = URL_MAIN + '/Documentations.html'
URL_FAVOURITE_MOVIE_PAGE = URL_MAIN + '/Popular-Movies.html'
URL_FAVOURITE_SERIE_PAGE = URL_MAIN + '/Popular-Series.html'
URL_FAVOURITE_DOCU_PAGE = URL_MAIN + '/Popular-Documentations.html'
URL_LATEST_SERIE_PAGE = URL_MAIN + '/Latest-Series.html'
URL_LATEST_DOCU_PAGE = URL_MAIN + '/Latest-Documentations.html'
URL_SEARCH = URL_MAIN + '/Search.html'
URL_MIRROR = URL_MAIN + '/aGET/Mirror/'
URL_EPISODE_URL = URL_MAIN + '/aGET/MirrorByEpisode/'
URL_AJAX = URL_MAIN + '/aGET/List/'
| URL_LANGUAGE = URL_MAIN + '/aSET/PageLang/1'
def load():
logger.info("Load %s" % SITE_NAME)
| sSecurityValue = __getSecurityCookieValue()
if sSecurityValue == '':
pass
elif sSecurityValue == False:
return
oParams = ParameterHandler()
oParams.setParam('securityCookie', sSecurityValue)
## Create all main menu entries
oGui = cGui()
oParams.setParam('sUrl', URL_MAIN)
oParams.setParam('page', 1)
oParams.setParam('mediaType', 'news')
oGui.addFolder(cGuiElement('Neues von Heute',SITE_IDENTIFIER,'showNews'),oParams)
oParams.setParam('sUrl', URL_MOVIE_PAGE)
oParams.setParam('mediaType', 'movie')
oGui.addFolder(cGuiElement('Filme',SITE_IDENTIFIER,'showMovieMenu'),oParams)
oParams.setParam('sUrl', URL_SERIE_PAGE)
oParams.setParam('mediaType', 'series')
oGui.addFolder(cGuiElement('Serien',SITE_IDENTIFIER,'showSeriesMenu'),oParams)
oParams.setParam('sUrl', URL_DOCU_PAGE)
oParams.setParam('mediaType', 'documentation')
oGui.addFolder(cGuiElement('Dokumentationen',SITE_IDENTIFIER,'showDocuMenu'),oParams)
oParams.setParam('sUrl', URL_SEARCH)
oParams.setParam('mediaType', '')
oGui.addFolder(cGuiElement('Suche',SITE_IDENTIFIER,'showSearch'),oParams)
oGui.setEndOfDirectory()
######## Allgemeines
def __createMenuEntry(oGui, sFunction, sLabel, dOutputParameter):
oParams = ParameterHandler()
# Create all paramters out of lOuputParameter
try:
for param,value in dOutputParameter.items():
oParams.setParam(param, value)
except Exception, e:
logger.error("Can't add parameter to menu entry with label: %s: %s" % (sLabel, e))
#oParams = ""
# Create the gui element
oGuiElement = cGuiElement()
oGuiElement.setSiteName(SITE_IDENTIFIER)
oGuiElement.setFunction(sFunction)
oGuiElement.setTitle(sLabel)
oGui.addFolder(oGuiElement, oParams)
######## Seitenspezifisch
def showMovieMenu():
oGui = cGui()
oParams = ParameterHandler()
oGui.addFolder(cGuiElement('Kinofilme',SITE_IDENTIFIER,'showCinemaMovies'),oParams)
oGui.addFolder(cGuiElement('A-Z',SITE_IDENTIFIER,'showCharacters'),oParams)
oGui.addFolder(cGuiElement('Genres',SITE_IDENTIFIER,'showGenres'),oParams)
oParams.setParam('sUrl', URL_FAVOURITE_MOVIE_PAGE)
oGui.addFolder(cGuiElement('Beliebteste Filme', SITE_IDENTIFIER, 'showFavItems'),oParams)
oGui.setEndOfDirectory()
def showSeriesMenu():
oGui = cGui()
oParams = ParameterHandler()
oGui.addFolder(cGuiElement('A-Z',SITE_IDENTIFIER,'showCharacters'),oParams)
#oGui.addFolder(cGuiElement('Genres',SITE_IDENTIFIER,'showGenres'),oParams)
oParams.setParam('sUrl', URL_FAVOURITE_SERIE_PAGE)
oGui.addFolder(cGuiElement('Beliebteste Serien',SITE_IDENTIFIER,'showFavItems'),oParams)
oParams.setParam('sUrl', URL_LATEST_SERIE_PAGE)
oGui.addFolder(cGuiElement('Neuste Serien',SITE_IDENTIFIER,'showFavItems'),oParams)
oGui.setEndOfDirectory()
def showDocuMenu():
oGui = cGui()
oParams = ParameterHandler()
oGui.addFolder(cGuiElement('A-Z',SITE_IDENTIFIER,'showCharacters'),oParams)
#oGui.addFolder(cGuiElement('Genres',SITE_IDENTIFIER,'showGenres'),oParams)
oParams.setParam('sUrl', URL_FAVOURITE_DOCU_PAGE)
oGui.addFolder(cGuiElement('Beliebteste Dokumentationen',SITE_IDENTIFIER,'showFavItems'),oParams)
oParams.setParam('sUrl', URL_LATEST_DOCU_PAGE)
oGui.addFolder(cGuiElement('Neuste Dokumentationen',SITE_IDENTIFIER,'showFavItems'),oParams)
oGui.setEndOfDirectory()
def __createLanguage(sLangID):
if sLangID == "1":
return 'de'
elif sLangID == "2" or sLangID == "15":
return 'en'
elif sLangID == "7":
return 'tu'
elif sLangID == "4":
return 'ch'
elif sLangID == "5":
return 'sp'
elif sLangID == "6":
return 'fr'
elif sLangID == "8":
return 'jp'
elif sLangID == '11':
return 'it'
elif sLangID == "16":
return 'nl'
elif sLangID == "25":
return 'ru'
return sLangID
def __checkSubLanguage(sTitle):
if not ' subbed*' in sTitle:
return [sTitle, '']
temp = sTitle.split(' *')
subLang = temp[-1].split('subbed*')[0].strip()
title = ' '.join(temp[0:-1]).strip()
if subLang == 'german':
return [title, 'de']
else:
return [title, subLang]
def __getHtmlContent(sUrl = None, sSecurityValue = None):
oParams = ParameterHandler()
# Test if a url is available and set it
if sUrl is None and not oParams.exist('sUrl'):
logger.error("There is no url we can request.")
return False
else:
if sUrl is None:
sUrl = oParams.getValue('sUrl')
# Test if a security value is available
if sSecurityValue is None:
if oParams.exist("securityCookie"):
sSecurityValue = oParams.getValue("securityCookie")
if not sSecurityValue:
sSecurityValue = ''
# preferred language
sPrefLang = __getPreferredLanguage()
# Make the request
oRequest = cRequestHandler(sUrl)
oRequest.addHeaderEntry('Cookie', sPrefLang+sSecurityValue+'ListDisplayYears=Always;')
oRequest.addHeaderEntry('Referer', URL_MAIN)
oRequest.addHeaderEntry('Accept', '*/*')
oRequest.addHeaderEntry('Host', domain)
return oRequest.request()
def __getPreferredLanguage():
sLanguage = oConfig.getSetting('prefLanguage')
if sLanguage == '0':
sPrefLang = 'ListNeededLanguage=25%2C24%2C26%2C2%2C5%2C6%2C7%2C8%2C11%2C15%2C16%2C9%2C12%2C13%2C14%2C17%2C4'
elif sLanguage == '1':
sPrefLang = 'ListNeededLanguage=25%2C24%2C26%2C5%2C6%2C7%2C8%2C11%2C15%2C16%2C9%2C12%2C13%2C14%2C17%2C4%2C1'
else:
sPrefLang = ''
return sPrefLang
def __getSecurityCookieValue():
oRequestHandler = cRequestHandler(URL_MAIN, False)
oRequestHandler.removeNewLines(False)
oRequestHandler.removeBreakLines(False)
sHtmlContent = oRequestHandler.request()
sPattern = "var hash=\[(.*?)\]"
oParser = cParser()
aResult = oParser.parse(sHtmlContent, sPattern)
if aResult[0] == False:
logger.error("Can't find script file for cookie")
result = jsunprotect.jsunprotect(sHtmlContent)
if not result:
logger.error("Not protected or Deactivator not found")
return ''
else:
logger.info(result)
oRequestHandler = cRequestHandler(URL_MAIN+'/?'+result, False)
oRequestHandler.addHeaderEntry('Referer', URL_MAIN)
#oRequestHandler.addHeaderEntry('Accept', '*/*')
oRequestHandler.addHeaderEntry('Host', domain)
oRequestHandler.request()
return |
ufo22940268/my-tours | content/prog/tour/list_comprehension.py | Python | bsd-3-clause | 128 | 0.03125 | squares | = [x**2 for x in range(10)]
print squares
couples = [(x, y) for x in [1,2,3] for y in [3,1,4] if x != y]
print couples | |
wileeam/airflow | airflow/security/utils.py | Python | apache-2.0 | 2,840 | 0.000352 | #!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Various security-related utils."""
import re
import socket
from airflow.utils.net import get_hostname
def get_components(principal):
"""
get_components(principal) -> (short name, instance (FQDN), realm)
``principal`` is the kerberos principal to parse.
"""
if not principal:
return None
return re.split(r'[\/@]', str(principal))
def replace_h | ostname_pattern(components, host=None):
"""Replaces hostname with the right pattern including lowercase of the name."""
fqdn = host
if not fqdn or fqdn == '0.0.0.0':
fqdn = get_hostname()
return '%s/%s@%s' % (components[0], fqdn.lower(), components[2])
def get_fqdn(hostname_or_ip=None):
"""Retrieves FQDN - hostname for the IP or hostname."""
try:
if hostname_or_ip:
| fqdn = socket.gethostbyaddr(hostname_or_ip)[0]
if fqdn == 'localhost':
fqdn = get_hostname()
else:
fqdn = get_hostname()
except OSError:
fqdn = hostname_or_ip
return fqdn
def principal_from_username(username, realm):
"""Retrieves principal from the user name and realm."""
if ('@' not in username) and realm:
username = "{}@{}".format(username, realm)
return username
|
Alberto-Beralix/Beralix | i386-squashfs-root/usr/share/pyshared/ubuntuone-client/ubuntuone/platform/windows/ipc_client.py | Python | gpl-3.0 | 23,763 | 0.00021 | # -*- coding: utf-8 -*-
#
# Authors: Manuel de la Pena <manuel@canonical.com>
# Alejandro J. Cura <alecu@canonical.com>
#
# Copyright 2011 Canonical Ltd.
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
"""Client lib to simplify the ipc client code."""
import logging
from functools import wraps
from twisted.internet import defer
from twisted.spread.pb import Referenceable, PBClientFactory
from ubuntuone.platform.windows.ipc import RemoteMeta, ipc_client_connect
logger = logging.getLogger("ubuntuone.SyncDaemon.Client")
class SyncDaemonClientError(Exception):
"""Error ocurred when trying to be a client."""
class SyncDaemonClientConnectionError(SyncDaemonClientError):
"""Error ocurrend when trying to connect."""
def remote(function):
"""Decorate the function to make the remote call."""
@wraps(function)
def remote_wrapper(*args, **kwargs):
"""Return the deferred for the remote call."""
fixed_args = args[1:]
logger.info('Performing %s as a remote call.', function.func_name)
return args[0].remote.callRemote(function.func_name, *fixed_args,
**kwargs)
return remote_wrapper
def signal(function):
"""Decorate a function to perform the signal callback."""
@wraps(function)
def callback_wrapper(*args, **kwargs):
"""Return the result of the callback if present."""
callback = getattr(args[0], function.func_name + '_cb', None)
if callback is not None:
fixed_args = args[1:]
if not kwargs:
return callback(*fixed_args)
return callback(*fixed_args, **kwargs)
return callback_wrapper
class RemoteClient(object):
"""Represent a client for remote calls."""
signal_handlers = []
def __init__(self, remote_object):
"""Create instance."""
self.remote = remote_object
def register_to_signals(self):
"""Register to the signals."""
return self.remote.callRemote('register_to_signals', self,
self.signal_handlers)
def unregister_to_signals(self):
"""Register to the signals."""
return self.remote.callRemote('unregister_to_signals', self)
class RemoteHandler(object, Referenceable):
"""Represents a handler that can be called so that is called remotely. | """
def __init__(self, cb):
"""Create a new instance."""
self.cb = cb
def remote_execute(self):
"""Execute the callback."""
if self.cb:
self.cb()
def callbacks(callbacks_indexes=None, callbacks_names=None):
" | ""Ensure that the callbacks can be remotely called."""
def decorator(function):
"""Decorate the function to make sure the callbacks can be executed."""
@wraps(function)
def callbacks_wrapper(*args, **kwargs):
"""Set the paths to be absolute."""
fixed_args = list(args)
if callbacks_indexes:
for current_cb in callbacks_indexes:
fixed_args[current_cb] = RemoteHandler(args[current_cb])
fixed_args = tuple(fixed_args)
if callbacks_names:
for current_key, current_index in callbacks_names:
try:
kwargs[current_key] = RemoteHandler(
kwargs[current_key])
except KeyError:
if len(args) >= current_index + 1:
fixed_args[current_index] = RemoteHandler(
args[current_index])
fixed_args = tuple(fixed_args)
return function(*fixed_args, **kwargs)
return callbacks_wrapper
return decorator
class StatusClient(RemoteClient, Referenceable):
"""Client used to access the status of the daemon."""
__metaclass__ = RemoteMeta
# calls that will be accessible remotely
signal_handlers = [
'on_content_queue_changed',
'on_invalid_name',
'on_broken_node',
'on_status_changed',
'on_download_started',
'on_download_file_progress',
'on_download_finished',
'on_upload_started',
'on_upload_file_progress',
'on_upload_finished',
'on_account_changed',
'on_metaqueue_changed',
]
def __init__(self, remote_status):
"""Creates the instance."""
super(StatusClient, self).__init__(remote_status)
@remote
def current_status(self):
"""Return the current status of the system, one of: local_rescan,
offline, trying_to_connect, server_rescan or online.
"""
@remote
def current_downloads(self):
"""Return a list of files with a download in progress."""
@remote
def waiting(self):
"""Return a list of the operations in action queue."""
@remote
def waiting_metadata(self):
"""Return a list of the operations in the meta-queue.
As we don't have meta-queue anymore, this is faked.
"""
@remote
def waiting_content(self):
"""Return a list of files that are waiting to be up- or downloaded.
As we don't have content-queue anymore, this is faked.
"""
@remote
def current_uploads(self):
""" return a list of files with a upload in progress """
@signal
def on_content_queue_changed(self):
"""Emit ContentQueueChanged."""
@signal
def on_invalid_name(self, dirname, filename):
"""Emit InvalidName."""
@signal
def on_broken_node(self, volume_id, node_id, mdid, path):
"""Emit BrokenNode."""
@signal
def on_status_changed(self, state):
"""Emit StatusChanged."""
@signal
def on_download_started(self, download):
"""Emit DownloadStarted."""
@signal
def on_download_file_progress(self, download, **info):
"""Emit DownloadFileProgress."""
@signal
def on_download_finished(self, download, **info):
"""Emit DownloadFinished."""
@signal
def on_upload_started(self, upload):
"""Emit UploadStarted."""
@signal
def on_upload_file_progress(self, upload, **info):
"""Emit UploadFileProgress."""
@signal
def on_upload_finished(self, upload, **info):
"""Emit UploadFinished."""
@signal
def on_account_changed(self, account_info):
"""Emit AccountChanged."""
@signal
def on_metaqueue_changed(self):
"""Emit MetaQueueChanged."""
class EventsClient(RemoteClient, Referenceable):
"""Client use to access the status api."""
__metaclass__ = RemoteMeta
# calls that will be accessible remotely
signal_handlers = ['on_event',]
def __init__(self, remote_events):
"""Creates the instance."""
super(EventsClient, self).__init__(remote_events)
@remote
def push_event(self, event_name, args):
"""Push an event to the event queue."""
@signal
def on_event(self, event):
"""Emit on event."""
class SyncDaemonClient(RemoteClient, Referenceable):
"""The Daemon ipc interface client."""
__metaclass__ = RemoteMeta
# calls that will be accessible remotely
signal_handlers = ['on_root_mismatch', 'on_quota_exceeded']
def __init__(self, remote_daemon):
"""Creates the instance."""
super(SyncDaemonClient, self).__init__(remote_daemon)
@remote
def connect(self):
"""Connect to the server."""
@remote
def disconnect(self):
|
concordusapps/alchemist | alchemist/db/operations/shell.py | Python | mit | 1,145 | 0 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import, division
from .. import engine
import os
def shell(database='default'):
"""Runs the command-line client for the specified database.
"""
target = engine[database]
dialect = engine[database].dialect.name
if dialect == 'mysql':
args = ['mysql']
if target.url.username:
args += ['--user=%s' % target.url.username]
if target.url.password:
args += ['--password=%s' % t | arget.url.password]
if 'unix_socket' in target.url.query:
args += ["--socket=%s" % target.url.query['unix_socket']]
elif target.url.host:
args += ["--host=%s" % target.url.host]
if target.url.port:
| args += ["--port=%s" % target.url.port]
if target.url.database:
args += [target.url.database]
elif dialect == 'sqlite':
args = ['sqlite3', target.url.database]
else: # pragma: nocoverage
raise RuntimeError(
'Database shell not available for the dialect %r' % dialect)
os.execvp(args[0], args)
|
mchristopher/PokemonGo-DesktopMap | app/pylibs/win32/Cryptodome/Hash/keccak.py | Python | mit | 8,329 | 0.001201 | # ===================================================================
#
# Copyright (c) 2015, Legrandin <helderijs@gmail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ===================================================================
"""Keccak family of cryptographic hash algorithms.
`Keccak`_ is the winning algorithm of the SHA-3 competition organized by NIST.
What eventually became SHA-3 is a variant incompatible to Keccak,
even though the security principles and margins remain the same.
If you are interested in writing SHA-3 compliant code, you must use
the modules ``SHA3_224``, ``SHA3_256``, ``SHA3_384`` or ``SHA3_512``.
This module implements the Keccak hash functions for the 64 bit word
length (b=1600) and the fixed digest sizes of 224, 256, 384 and 512 bits.
>>> from Cryptodome.Hash import keccak
>>>
>>> keccak_hash = keccak.new(digest_bits=512)
>>> keccak_hash.update(b'Some data')
>>> print keccak_hash.hexdigest()
.. _Keccak: http://www.keccak.noekeon.org/Keccak-specifications.pdf
"""
from Cryptodome.Util.py3compat import bord
from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib,
VoidPointer, SmartPointer,
create_string_buffer,
get_raw_buffer, c_size_t,
expect_byte_string)
_raw_keccak_lib = load_pycryptodome_raw_lib("Cryptodome.Hash._keccak",
"""
int keccak_init(void **state,
size_t capacity_bytes,
uint8_t padding_byte);
int keccak_destroy(void *state);
int keccak_absorb(void *state,
const uint8_t *in,
size_t len);
int keccak_squeeze(const void *state,
uint8_t *out,
size_t len);
int keccak_digest(void *state, uint8_t *digest, size_t len);
""")
class Keccak_Hash(object):
"""Class that implements a Keccak hash
"""
def __init__(self, data, digest_bytes, update_after_digest):
#: The size of the resulting hash in bytes.
self.digest_size = digest_bytes
self._update_after_digest = update_after_digest
self._digest_done = False
state = VoidPointer()
result = _raw_keccak_lib.keccak_init(state.address_of(),
c_size_t(self.digest_size * 2),
0x01)
if result:
raise ValueError("Error %d while instantiating keccak" % result)
self._state = SmartPointer(state.get(),
_raw_keccak_lib.keccak_destroy)
if data:
self.update(data)
def update(self, data):
"""Continue hashing of a message by consuming the next chunk of data.
Repeated calls are equivalent to a single call with the concatenation
of all the arguments. In other words:
>>> m.update(a); m.update(b)
is equivalent to:
>>> m.update(a+b)
:Parameters:
data : byte string
The next chunk of the message being hashed.
"""
if self._digest_done and not self._update_after_digest:
raise TypeError("You can only call 'digest' or 'hexdigest' on this object")
expect_byte_string(data)
result = _raw_keccak_lib.keccak_absorb(self._state.get(),
data,
c_size_t(len(data)))
if result:
raise ValueError("Error %d while updating keccak" % result)
return self
def digest(self):
"""Return the **binary** (non-printable) digest of the message that has been hashed so far.
You cannot update the hash anymore after the first call to ``digest``
(or ``hexdigest``).
:Return: A byte string of `digest_size` bytes.
It may contain non-ASCII characters, including null bytes.
"""
self._digest_done = True
bfr = create_string_buffer(self.digest_size)
result = _raw_keccak_lib.keccak_digest(self._state.get(),
bfr,
c_size_t(self.digest_size))
if result:
raise ValueError("Error %d while squeezing keccak" % result)
return get_raw_buffer(bfr)
def hexdigest(self):
"""Return the **printable** digest of the message that has been hashed so far.
This method does not change the state of the hash object.
:Return: A string of 2* `digest_size` characters. It contains only
hexadecimal ASCII digits.
"""
return "".join(["%02x" % bord(x) for x in self.digest()])
def new(self, **kwargs):
if "digest_bytes" not in kwargs and "digest_bits" not in kwargs:
kwargs["digest_bytes"] = self.digest_size
return new(**kwargs)
def new(**kwargs):
"""Return a fresh instance of the hash object.
:Keywords:
data : byte string
Optional. The very first chunk of the message to hash.
It is equivalent to an early call to ``update()``.
digest_bytes : integer
The size of the digest, in bytes (28, 32, 48, 64).
digest_bits : integer
The size of the digest, in bits (224, 256, 384, 512).
update_after_digest : boolean
Optional. By default, a hash object cannot be updated anymore after
the digest is computed. When this flag is ``True``, such check
is no longer enforced.
:Return: A `Keccak_Hash` object
"""
data = kwargs.pop("data", None)
update_after_digest = kwargs.pop("update_after_digest", False)
digest_bytes = kwargs.pop("digest_bytes", None)
digest_bits = kwargs.pop("digest_ | bits", None)
if None not in (digest_bytes, digest_bits):
raise TypeError("Only one digest parameter must be provided")
if (None, None) == (digest_bytes, digest_bits):
raise TypeError("Digest size (bits, bytes) not provided")
if digest_bytes is not None:
if digest_bytes not in (28, 32, 48, 64):
raise ValueError("'digest_bytes' must be: 28, 32, 48 or 64")
else:
if digest_bits | not in (224, 256, 384, 512):
raise ValueError("'digest_bytes' must be: 224, 256, 384 or 512")
digest_bytes = digest_bits // 8
|
jaidevd/scikit-learn | sklearn/neighbors/base.py | Python | bsd-3-clause | 30,649 | 0 | """Base and mixin classes for nearest neighbors"""
# Authors: Jake Vanderplas <vanderplas@astro.washington.edu>
# Fabian Pedregosa <fabian.pedregosa@inria.fr>
# Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Sparseness support by Lars Buitinck
# Multi-output support by Arnaud Joly <a.joly@ulg.ac.be>
#
# License: BSD 3 clause (C) INRIA, University of Amsterdam
import warnings
from abc import ABCMeta, abstractmethod
import numpy as np
from scipy.sparse import csr_matrix, issparse
from .ball_tree import BallTree
from .kd_tree import KDTree
from ..base import BaseEstimator
from ..metrics import pairwise_distances
from ..metrics.pairwise import PAIRWISE_DISTANCE_FUNCTIONS
from ..utils import check_X_y, check_array, _get_n_jobs, gen_even_slices
from ..utils.fixes import argpartition
from ..utils.multiclas | s import check_classification_targets
from ..externals import six
from ..externals.joblib import Parallel, delayed
from ..exceptions import NotFittedError
from ..exce | ptions import DataConversionWarning
VALID_METRICS = dict(ball_tree=BallTree.valid_metrics,
kd_tree=KDTree.valid_metrics,
# The following list comes from the
# sklearn.metrics.pairwise doc string
brute=(list(PAIRWISE_DISTANCE_FUNCTIONS.keys()) +
['braycurtis', 'canberra', 'chebyshev',
'correlation', 'cosine', 'dice', 'hamming',
'jaccard', 'kulsinski', 'mahalanobis',
'matching', 'minkowski', 'rogerstanimoto',
'russellrao', 'seuclidean', 'sokalmichener',
'sokalsneath', 'sqeuclidean',
'yule', 'wminkowski']))
VALID_METRICS_SPARSE = dict(ball_tree=[],
kd_tree=[],
brute=PAIRWISE_DISTANCE_FUNCTIONS.keys())
def _check_weights(weights):
"""Check to make sure weights are valid"""
if weights in (None, 'uniform', 'distance'):
return weights
elif callable(weights):
return weights
else:
raise ValueError("weights not recognized: should be 'uniform', "
"'distance', or a callable function")
def _get_weights(dist, weights):
"""Get the weights from an array of distances and a parameter ``weights``
Parameters
===========
dist : ndarray
The input distances
weights : {'uniform', 'distance' or a callable}
The kind of weighting used
Returns
========
weights_arr : array of the same shape as ``dist``
if ``weights == 'uniform'``, then returns None
"""
if weights in (None, 'uniform'):
return None
elif weights == 'distance':
# if user attempts to classify a point that was zero distance from one
# or more training points, those training points are weighted as 1.0
# and the other points as 0.0
if dist.dtype is np.dtype(object):
for point_dist_i, point_dist in enumerate(dist):
# check if point_dist is iterable
# (ex: RadiusNeighborClassifier.predict may set an element of
# dist to 1e-6 to represent an 'outlier')
if hasattr(point_dist, '__contains__') and 0. in point_dist:
dist[point_dist_i] = point_dist == 0.
else:
dist[point_dist_i] = 1. / point_dist
else:
with np.errstate(divide='ignore'):
dist = 1. / dist
inf_mask = np.isinf(dist)
inf_row = np.any(inf_mask, axis=1)
dist[inf_row] = inf_mask[inf_row]
return dist
elif callable(weights):
return weights(dist)
else:
raise ValueError("weights not recognized: should be 'uniform', "
"'distance', or a callable function")
class NeighborsBase(six.with_metaclass(ABCMeta, BaseEstimator)):
"""Base class for nearest neighbors estimators."""
@abstractmethod
def __init__(self):
pass
def _init_params(self, n_neighbors=None, radius=None,
algorithm='auto', leaf_size=30, metric='minkowski',
p=2, metric_params=None, n_jobs=1):
self.n_neighbors = n_neighbors
self.radius = radius
self.algorithm = algorithm
self.leaf_size = leaf_size
self.metric = metric
self.metric_params = metric_params
self.p = p
self.n_jobs = n_jobs
if algorithm not in ['auto', 'brute',
'kd_tree', 'ball_tree']:
raise ValueError("unrecognized algorithm: '%s'" % algorithm)
if algorithm == 'auto':
if metric == 'precomputed':
alg_check = 'brute'
else:
alg_check = 'ball_tree'
else:
alg_check = algorithm
if callable(metric):
if algorithm == 'kd_tree':
# callable metric is only valid for brute force and ball_tree
raise ValueError(
"kd_tree algorithm does not support callable metric '%s'"
% metric)
elif metric not in VALID_METRICS[alg_check]:
raise ValueError("Metric '%s' not valid for algorithm '%s'"
% (metric, algorithm))
if self.metric_params is not None and 'p' in self.metric_params:
warnings.warn("Parameter p is found in metric_params. "
"The corresponding parameter from __init__ "
"is ignored.", SyntaxWarning, stacklevel=3)
effective_p = metric_params['p']
else:
effective_p = self.p
if self.metric in ['wminkowski', 'minkowski'] and effective_p < 1:
raise ValueError("p must be greater than one for minkowski metric")
self._fit_X = None
self._tree = None
self._fit_method = None
def _fit(self, X):
if self.metric_params is None:
self.effective_metric_params_ = {}
else:
self.effective_metric_params_ = self.metric_params.copy()
effective_p = self.effective_metric_params_.get('p', self.p)
if self.metric in ['wminkowski', 'minkowski']:
self.effective_metric_params_['p'] = effective_p
self.effective_metric_ = self.metric
# For minkowski distance, use more efficient methods where available
if self.metric == 'minkowski':
p = self.effective_metric_params_.pop('p', 2)
if p < 1:
raise ValueError("p must be greater than one "
"for minkowski metric")
elif p == 1:
self.effective_metric_ = 'manhattan'
elif p == 2:
self.effective_metric_ = 'euclidean'
elif p == np.inf:
self.effective_metric_ = 'chebyshev'
else:
self.effective_metric_params_['p'] = p
if isinstance(X, NeighborsBase):
self._fit_X = X._fit_X
self._tree = X._tree
self._fit_method = X._fit_method
return self
elif isinstance(X, BallTree):
self._fit_X = X.data
self._tree = X
self._fit_method = 'ball_tree'
return self
elif isinstance(X, KDTree):
self._fit_X = X.data
self._tree = X
self._fit_method = 'kd_tree'
return self
X = check_array(X, accept_sparse='csr')
n_samples = X.shape[0]
if n_samples == 0:
raise ValueError("n_samples must be greater than 0")
if issparse(X):
if self.algorithm not in ('auto', 'brute'):
warnings.warn("cannot use tree with sparse input: "
"using brute force")
if self.effective_metric_ not in VALID_METRICS_SPARSE['brute']:
raise ValueError("metric '%s' not valid for sparse input"
|
YuhanLin1105/Multi-User-Blog | utils/helper_cookie.py | Python | mit | 368 | 0 | import hmac
class Helper_cookie:
secret = 'This is a secret'
| @classmethod
def make_secure_val(self, val):
return '%s|%s' % (val, hmac.new(self.secret, val).hexdigest())
@classmethod
def check_secure_val(self, secure_val):
val = secure_val.split('|')[0]
if secure_val == se | lf.make_secure_val(val):
return val
|
chrischambers/django-calendartools | test_project/event/tests/test_managers.py | Python | bsd-3-clause | 8,416 | 0.001188 | from datetime import datetime, timedelta
from django.test import TestCase
from django.contrib.auth.models import User
from nose.tools import *
from event.models import Calendar, Event, Occurrence, Attendance
class TestCommonManager(TestCase):
def setUp(self):
self.user = User.objects.create(username='TestyMcTesterson')
self.calendar = Calendar.objects.create(name='Basic', slug='basic')
self.events = []
for status, label in Event.STATUS:
self.events.append(Event.objects.create(
name='Event',
slug='%s-event' % label.lower(),
creator=self.user,
status=status
))
self.start = datetime.now() + timedelta(minutes=30)
self.finish = self.start + timedelta(hours=2)
self.occurrences = []
for status, label in Occurrence.STATUS:
self.occurrences.append(Occurrence.objects.create(
event=self.events[0], start=self.start, finish=self.finish,
status=status, calendar=self.calendar
))
self.model = Calendar
def _test_status_properties(self, prop, status):
assert_equal(
set(getattr(self.model.objects, prop)),
set(self.model.objects.filter(status=status))
)
def test_inactive_property(self):
self._test_status_properties('inactive', self.model.STATUS.inactive)
def test_hidden_property(self):
self._test_status_properties('hidden', self.model.STATUS.hidden)
def test_cancelled_property(self):
self._test_status_properties('cancelled', self.model.STATUS.cancelled)
def test_published_property(self):
self._test_status_properties('published', self.model.STATUS.published)
def test_visible_method(self):
assert_equal(
set(self.model.objects.visible()),
set(self.model.objects.exclude(status__in=self.model.objects.hidden_statuses))
)
assert_equal(
set(self.model.objects.visible(user=self.user)),
set(self.model.objects.exclude(status__in=self.model.objects.hidden_statuses))
)
self.user.is_staff = True
self.user.save()
assert_equal(
set(self.model.objects.visible(user=self.user)),
set(self.model.objects.exclude(status__in=self.model.objects.hidden_statuses_for_admins))
)
self.user.is_superuser = True
self.user.is_staff = False
self.user.save()
assert_equal(
set(self.model.objects.visible(user=self.user)),
set(self.model.objects.exclude(status__in=self.model.objects.hidden_statuses_for_admins))
)
class TestCalendarManager(TestCommonManager):
def setUp(self):
super(TestCalendarManager, self).setUp()
class TestEventManager(TestCommonManager):
def setUp(self):
super(TestEventManager, self).setUp()
self.model = Event
class TestOccurrenceManager(TestCommonManager):
def setUp(self):
super(TestOccurrenceManager, self).setUp()
self.model = Occurrence
self.event = self.occurrences[0].event
def test_visible_with_hidden_eve | nt(self):
self.event.status = Eve | nt.STATUS.hidden
self.event.save()
assert_equal(
set(Occurrence.objects.visible()),
set(Occurrence.objects.none())
)
assert_equal(
set(Occurrence.objects.visible(self.user)),
set(Occurrence.objects.none())
)
self.user.is_staff = True
self.user.save()
assert_equal(
set(Occurrence.objects.visible(self.user)),
set(Occurrence.objects.exclude(status__in=Occurrence.objects.hidden_statuses_for_admins))
)
self.user.is_superuser = True
self.user.is_staff = False
self.user.save()
assert_equal(
set(Occurrence.objects.visible(self.user)),
set(Occurrence.objects.exclude(status__in=Occurrence.objects.hidden_statuses_for_admins))
)
def test_visible_with_inactive_event(self):
self.event.status = Event.STATUS.inactive
self.event.save()
assert_equal(
set(Occurrence.objects.visible()),
set(Occurrence.objects.none())
)
assert_equal(
set(Occurrence.objects.visible(self.user)),
set(Occurrence.objects.none())
)
self.user.is_staff = True
self.user.save()
assert_equal(
set(Occurrence.objects.visible(self.user)),
set(Occurrence.objects.none())
)
self.user.is_superuser = True
self.user.is_staff = False
self.user.save()
assert_equal(
set(Occurrence.objects.visible(self.user)),
set(Occurrence.objects.none())
)
def test_visible_with_hidden_calendar(self):
self.calendar.status = Calendar.STATUS.hidden
self.calendar.save()
assert_equal(
set(Occurrence.objects.visible()),
set(Occurrence.objects.none())
)
assert_equal(
set(Occurrence.objects.visible(self.user)),
set(Occurrence.objects.none())
)
self.user.is_staff = True
self.user.save()
assert_equal(
set(Occurrence.objects.visible(self.user)),
set(Occurrence.objects.exclude(status__in=Occurrence.objects.hidden_statuses_for_admins))
)
self.user.is_superuser = True
self.user.is_staff = False
self.user.save()
assert_equal(
set(Occurrence.objects.visible(self.user)),
set(Occurrence.objects.exclude(status__in=Occurrence.objects.hidden_statuses_for_admins))
)
def test_visible_with_inactive_calendar(self):
self.calendar.status = Calendar.STATUS.inactive
self.calendar.save()
assert_equal(
set(Occurrence.objects.visible()),
set(Occurrence.objects.none())
)
assert_equal(
set(Occurrence.objects.visible(self.user)),
set(Occurrence.objects.none())
)
self.user.is_staff = True
self.user.save()
assert_equal(
set(Occurrence.objects.visible(self.user)),
set(Occurrence.objects.none())
)
self.user.is_superuser = True
self.user.is_staff = False
self.user.save()
assert_equal(
set(Occurrence.objects.visible(self.user)),
set(Occurrence.objects.none())
)
class TestAttendanceManager(TestCase):
def setUp(self):
self.user = User.objects.create(username='TestyMcTesterson')
self.calendar = Calendar.objects.create(name='Basic', slug='basic')
self.events = []
for status, label in Event.STATUS:
self.events.append(Event.objects.create(
name='Event',
slug='%s-event' % label.lower(),
creator=self.user,
status=status
))
self.start = datetime.now() + timedelta(minutes=30)
self.finish = self.start + timedelta(hours=2)
self.occurrences = []
for status, label in Occurrence.STATUS:
self.occurrences.append(Occurrence.objects.create(
event=self.events[0], start=self.start, finish=self.finish,
status=status, calendar=self.calendar
))
self.model = Calendar
self.attendance = Attendance.objects.create(
user=self.user,
occurrence = self.occurrences[0],
status=Attendance.STATUS.booked,
)
def test_active_manager_property(self):
assert_equal(
set(Attendance.objects.active),
set(Attendance.objects.all())
)
for i, state in enumerate(Attendance.objects.inactive_statuses):
Attendance.objects.create(
user=User.objects.create(username='Test%s' % i),
occurrence=self.occurrences[0],
status=state
)
assert_not_equal(
|
huggingface/transformers | src/transformers/data/data_collator.py | Python | apache-2.0 | 76,850 | 0.004608 | # Copyright 2020 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
import warnings
from dataclasses import dataclass
from typing import Any, Callable, Dict, List, NewType, Optional, Tuple, Union
from ..file_utils import PaddingStrategy
from ..models.bert import BertTokenizer, BertTokenizerFast
from ..tokenization_utils_base import BatchEncoding, PreTrainedTokenizerBase
InputDataClass = NewType("InputDataClass", Any)
"""
A DataCollator is a function that takes a list of samples from a Dataset and collate them into a batch, as a dictionary
of PyTorch/TensorFlow tensors or NumPy arrays.
"""
DataCollator = NewType("DataCollator", Callable[[List[InputDataClass]], Dict[str, Any]])
class DataCollatorMixin:
def __call__(self, features, return_tensors=None):
if return_tensors is None:
return_tensors = self.return_tensors
if return_tensors == "tf":
return self.tf_call(features)
elif return_tensors == "pt":
return self.torch_call(features)
elif return_tensors == "np":
return self.numpy_call(features)
else:
raise ValueError(f"Framework '{return_tensors}' not recognized!")
def default_data_collator(features: List[InputDataClass], return_tensors="pt") -> Dict[str, Any]:
"""
Very simple data collator that simply collates batches of dict-like objects and performs special handling for
potential keys named:
- `label`: handles a single value (int or float) per object
- `label_ids`: handles a list of values per object
Does not do any additional preprocessing: property names of the input object will be used as corresponding inputs
to the model. See glue and ner for example of how it's useful.
"""
# In this function we'll make the assumption that all `features` in the batch
# have the same attributes.
# So we will look at the first element as a proxy for what attributes exist
# on the whole batch.
if return_tensors == "pt":
return torch_default_data_collator(features)
elif return_tensors == "tf":
return tf_default_data_collator(features)
elif return_tensors == "np":
return numpy_default_data_collator(features)
@dataclass
class DefaultDataCollator(DataCollatorMixin):
"""
Very simple data collator that simply collates batches of dict-like objects and performs special handling for
potential keys named:
- `label`: handles a single value (int or float) per object
- `label_ids`: handles a list of values per object
Does not do any additional preprocessing: property names of the input object will be used as corresponding inputs
to the model. See glue and ner for example of how it's useful.
This is an object (like other data collators) rather than a pure function like default_data_collator. This can be
helpful if you need to set a return_tensors value at initialization.
Args:
return_tensors (`str`):
The type of Tensor to return. Allowable values are "np", "pt" and "tf".
"""
return_tensors: str = "pt"
def __call__(self, features: List[Dict[str, Any]], return_tensors=None) -> Dict[str, Any]:
if return_tensors is None:
return_tensors = self.return_tensors
return default_data_collator(features, return_tensors)
def torch_default_data_collator(features: List[InputDataClass]) -> Dict[str, Any]:
import torch
if not isinstance(features[0], (dict, BatchEncoding)):
features = [vars(f) for f in features]
first = features[0]
batch = {}
# Special handling for labels.
# Ensure that tensor is created with the correct type
# (it should be automatically the case, but let's make sure of it.)
if "label" in first and first["label"] is not None:
label = first["label"].item() if isinstance(first["label"], torch.Tensor) else first["label"]
dtype = torch.long if isinstance(label, int) else torch.float
| batch["labels"] = torch.tensor([f["label"] for f in features], dtype=dtype)
elif "label_ids" in first and first["label_ids"] is not None:
if isinstance(first["label_ids"], torch.Tensor):
| batch["labels"] = torch.stack([f["label_ids"] for f in features])
else:
dtype = torch.long if type(first["label_ids"][0]) is int else torch.float
batch["labels"] = torch.tensor([f["label_ids"] for f in features], dtype=dtype)
# Handling of all other possible keys.
# Again, we will use the first element to figure out which key/values are not None for this model.
for k, v in first.items():
if k not in ("label", "label_ids") and v is not None and not isinstance(v, str):
if isinstance(v, torch.Tensor):
batch[k] = torch.stack([f[k] for f in features])
else:
batch[k] = torch.tensor([f[k] for f in features])
return batch
def tf_default_data_collator(features: List[InputDataClass]) -> Dict[str, Any]:
import numpy as np
import tensorflow as tf
if not isinstance(features[0], (dict, BatchEncoding)):
features = [vars(f) for f in features]
first = features[0]
batch = {}
# Special handling for labels.
# Ensure that tensor is created with the correct type
# (it should be automatically the case, but let's make sure of it.)
if "label" in first and first["label"] is not None:
label_col_name = "label"
elif "label_ids" in first and first["label_ids"] is not None:
label_col_name = "label_ids"
elif "labels" in first and first["labels"] is not None:
label_col_name = "labels"
else:
label_col_name = None
if label_col_name is not None:
if isinstance(first[label_col_name], tf.Tensor):
dtype = tf.int64 if first[label_col_name].dtype.is_integer() else tf.float32
elif isinstance(first[label_col_name], np.ndarray) or isinstance(first[label_col_name], np.generic):
dtype = tf.int64 if np.issubdtype(first[label_col_name].dtype, np.integer) else tf.float32
elif isinstance(first[label_col_name], (tuple, list)):
dtype = tf.int64 if isinstance(first[label_col_name][0], int) else tf.float32
else:
dtype = tf.int64 if isinstance(first[label_col_name], int) else tf.float32
batch["labels"] = tf.convert_to_tensor([f[label_col_name] for f in features], dtype=dtype)
# Handling of all other possible keys.
# Again, we will use the first element to figure out which key/values are not None for this model.
for k, v in first.items():
if k not in ("label", "label_ids", "labels") and v is not None and not isinstance(v, str):
if isinstance(v, (tf.Tensor, np.ndarray)):
batch[k] = tf.stack([f[k] for f in features])
else:
batch[k] = tf.convert_to_tensor([f[k] for f in features])
return batch
def numpy_default_data_collator(features: List[InputDataClass]) -> Dict[str, Any]:
import numpy as np
if not isinstance(features[0], (dict, BatchEncoding)):
features = [vars(f) for f in features]
first = features[0]
batch = {}
# Special handling for labels.
# Ensure that tensor is created with the correct type
# (it should be automatically the case, but let's make sure of it.)
if "label" in first and first["label"] is not None:
label = first["label"].item() if isinstance(first["label"], np.ndarray) else first["label"]
dtype = np.int64 if isinstance(label, int) else np.float32
bat |
uehara1414/serverctl-prototype | serverctl_prototype/settings.py | Python | mit | 3,666 | 0.001091 | """
Django settings for serverctl_prototype project.
Generated by 'django-admin startproject' using Django 1.10.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '930m&u3@mu-35a8)hk9^!oa3$)lhbp#1kk4%xxryn9zr!ku=we'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework_swagger',
'serverctl'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'serverctl_prototype.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'serverctl_prototype.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': os.environ['POSTGRES_DB'],
'USER': os.environ['POSTGRES_USER'], |
'PASSWORD': os.environ['POSTGRES_PASSWORD'],
'HOST': 'postgres',
'PORT': '5432',
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contr | ib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
REST_FRAMEWORK = {
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
]
}
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'ja'
TIME_ZONE = 'Asia/Tokyo'
USE_I18N = True
USE_L10N = True
USE_TZ = True
LOGIN_URL = '/login/'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (BASE_DIR, )
|
GrimRanger/GeneticAlgorithm | GeneticLib/genetic_engine.py | Python | mit | 512 | 0.011719 |
class GeneticEngine:
genomLength = 10
generationCount = 10
individualCount = 10
selectionType = 10
crossingType = 10
useMutation = 1
mutationPercent = 50
"""constructor"""
def __init__(self, fitnessFunction):
return 0
"""main body"""
def run():
return 0
def generateFirstGeneration():
return 0
|
def selection():
return 0
def crossing( | ):
return 0
def mutation():
return 0
|
nicproulx/mne-python | mne/viz/tests/test_raw.py | Python | bsd-3-clause | 11,805 | 0.000085 | # Authors: Eric Larson <larson.eric.d@gmail.com>
#
# License: Simplified BSD
import numpy as np
import os.path as op
import warnings
from numpy.testing import assert_raises, assert_equal
from mne import read_events, pick_types, Annotations
from mne.io import read_raw_fif
from mne.utils import requires_version, run_tests_if_main
from mne.viz.utils import _fake_click, _annotation_radio_clicked
from mne.viz import plot_raw, plot_sensors
# Set our plotters to test mode
import matplotlib
matplotlib.use('Agg') # for testing don't use X server
warnings.simplefilter('always') # enable b/c these tests throw warnings
base_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
raw_fname = op.join(base_dir, 'test_raw.fif')
event_name = op.join(base_dir, 'test-eve.fif')
def _get_raw():
"""Get raw data."""
raw = read_raw_fif(raw_fname, preload=True)
# Throws a warning about a changed unit.
with warnings.catch_warnings(record=True):
raw.set_channel_types({raw.ch_names[0]: 'ias'})
raw.pick_channels(raw.ch_names[:9])
raw.info.normalize_proj() # Fix projectors after subselection
return raw
def _get_events():
"""Get events."""
return read_events(event_name)
def _annotation_helper(raw):
"""Helper for testing interactive annotations."""
import matplotlib.pyplot as plt
n_anns = 0 if raw.annotations is None else len(raw.annotations.onset)
fig = raw.plot()
data_ax = fig.axes[0]
fig.canvas.key_press_event('a') # annotation mode
# modify description
ann_fig = plt.gcf()
for key in ' test':
ann_fig.canvas.key_press_event(key)
ann_fig.canvas.key_press_event('enter')
ann_fig = plt.gcf()
# XXX: _fake_click raises an error on Agg backend
_annotation_radio_clicked('', ann_fig.radio, data_ax.selector)
# draw annotation
_fake_click(fig, data_ax, [1., 1.], xform='data', button=1, kind='press')
_fake_click(fig, data_ax, [5., 1.], xform='data', button=1, kind='motion')
_fake_click(fig, data_ax, [5., 1.], xform='data', button=1, kind='release')
# hover event
_fake_click(fig, data_ax, [4.5, 1.], xform='data', button=None,
kind='motion')
_fake_click(fig, data_ax, [4.7, 1.], xform='data', button=None,
kind='motion')
# modify annotation from end
_fake_click(fig, data_ax, [5., 1.], xform='data', button=1, kind='press')
_fake_click(fig, data_ax, [2.5, 1.], xform='data', button=1, kind='motion')
_fake_click(fig, data_ax, [2.5, 1.], xform='data', button=1,
kind='release')
# modify annotation from beginning
_fake_click(fig, data_ax, [1., 1.], xform='data', button=1, kind='press')
_fake_click(fig, data_ax, [1.1, 1.], xform='data', button=1, kind='motion')
_fake_click(fig, data_ax, [1.1, 1.], xform='data', button=1,
kind='release')
assert_equal(len(raw.annotations.onset), n_anns + 1)
assert_equal(len(raw.annotations.duration), n_anns + 1)
assert_equal(len(raw.annotations.description), n_anns + 1)
assert_equal(raw.annotations.description[n_anns], 'BAD test')
# draw another annotation merging the two
_fake_click(fig, data_ax, [5.5, 1.], xform='data', button=1, kind='press')
_fake_click(fig, data_ax, [2., 1.], xform='data', button=1, kind='motion')
_fake_click(fig, data_ax, [2., 1.], xform='data', button=1, kind='release')
# delete the annotation
_fake_click(fig, data_ax, [1.5, 1.], xform='data', button=3, kind='press')
fig.canvas.key_press_event('a') # exit annotation mode
assert_equal(len(raw.annotations.onset), n_anns)
assert_equal(len(raw.annotations.duration), n_anns)
assert_equal(len(raw.annotations.description), n_anns)
plt.close('all')
def test_plot_raw():
"""Test plotting of raw data."""
import matplotlib.pyplot as plt
raw = _get_raw()
events = _get_events()
plt.close('all') # ensure all are closed
with warnings.catch_warnings(record=True):
fig = raw.plot(events=events, show_options=True)
# test mouse clicks
x = fig.get_axes()[0].lines[1].get_xdata().mean()
y = fig.get_axes()[0].lines[1].get_ydata().mean()
data_ax = fig.axes[0]
_fake_click(fig, data_ax, [x, y], xform='data') # mark a bad channel
_fake_click(fig, data_ax, [x, y], xform='data') # unmark a bad channel
_fake_click(fig, data_ax, [0.5, 0.999]) # click elsewhere in 1st axes
_fake_click(fig, data_ax, [-0.1, 0.9]) # click on y-label
_fake_click(fig, fig.get_axes()[1], [0.5, 0.5]) # change time
_fake_click(fig, fig.get_axes()[2], [0.5, 0.5]) # change channels
_fake_click(fig, fig.get_axes()[3], [0.5, 0.5]) # open SSP window
fig.canvas.button_press_event(1, 1, 1) # outside any axes
fig.canvas.scroll_event(0.5, 0.5, -0.5) # scroll down
fig.canvas.scroll_event(0.5, 0.5, 0.5) # scroll up
# sadly these fail when no renderer is used (i.e., when using Agg):
# ssp_fig = set(plt.get_fignums()) - set([fig.number])
# assert_equal(len(ssp_fig), 1)
# ssp_fig = plt.figure(list(ssp_fig)[0])
# ax = ssp_fig.get_axes()[0] # only one axis is used
# t = [c for c in ax.get_children() if isinstance(c,
# matplotlib.text.Text)]
# pos = np.array(t[0].get_position()) + 0.01
# _fake_click(ssp_fig, ssp_fig.get_axes()[0], pos, xform='data') # off
# _fake_click(ssp_fig, ssp_fig.get_axes()[0], pos, xform='data') # on
# test keypresses
for key in ['down', 'up', 'right', 'left', 'o', '-', '+', '=',
'pageup', 'pagedown', 'home', 'end', '?', 'f11', 'escape']:
fig.canvas.key_press_event(key)
# Color setting
assert_raises(KeyError, raw.plot, event_color={0: 'r'})
assert_raises(TypeError, raw.plot, event_color={'foo': 'r'})
annot = Annotations([10, 10 + raw.first_samp / raw.info['sfreq']],
[10, 10], ['test', 'test'], raw.info['meas_date'])
raw.annotations = annot
fig = plot_raw(raw, events=events, event_color={-1: 'r', 998: 'b'})
plt.close('all')
for order in ['position', 'selection', range(len(raw.ch_names))[::-4],
[1, 2, 4, 6]]:
fig = raw.plot(order=order)
x = fig.get_axes()[0].lines[1].get_xdata()[10]
y = fig.get_axes()[0].lines[1].get_ydata()[10]
_fake_click(fig, data_ax, [x, y], xform='data') # mark bad
fig.canvas.key_press_event('down') # change selection
_fake_click(fig, fig.get_axes()[2], [0.5, 0.5]) # change channels
if order in ('position', 'selection'):
sel_fig = plt.figure(1)
topo_ax = sel_fig.axes[1]
_fake_click(sel_fig, topo_ax, [-0.425, 0.20223853],
xform='data')
fig.canvas.key_press_event('down')
fig.canvas.key_press_event('up')
fig.canvas.scroll_event(0.5, 0.5, -1) # scroll down
fig.canvas.scroll_event(0.5, 0.5, 1) # scroll up
_fake_click(sel_fig, topo_ax, [-0.5, 0.], xform='data')
_fake_click(sel_fig, topo_ax, [0.5, 0.], xform='data',
kind='motion')
_fake_click(sel_fig, topo_ax, [0.5, 0.5], xform='data',
kind='motion')
_fake_click(sel_fig, topo_ax, [-0.5, 0.5], xform='data',
| kind='release')
plt.close('all')
# test if meas_date has only one element
raw.info['meas_date'] = np.array([raw.info['meas_date'][0]],
dtype=np.int32)
raw.annotations = Annotations([1 + raw.first_samp / raw.info['sfreq']],
| [5], ['bad'])
raw.plot()
plt.close('all')
def test_plot_annotations():
"""Test annotation mode of the plotter."""
raw = _get_raw()
_annotation_helper(raw)
raw.annotations = Annotations([42], [1], 'test' |
aaibfer/mtoaUtils | scripts/aiDistanceLocator.py | Python | mit | 13,950 | 0.041864 | """
aiDistanceLocator v1.0.2
----------------------------------------------------------------------------------------------------
- create a locator as a reference to adjust camera's focus distance or light attenuation parameters
----------------------------------------------------------------------------------------------------
Tool by Aaron Ibanez
Last update on 25/04/2017
----------------------------------------------------------------------------------------------------
Put script in \Documents\maya\201X\scripts
In Python tab of Maya's script editor, execute code:
import aiDistanceLocator
aiDistanceLocator.UI()
----------------------------------------------------------------------------------------------------
"""
""" IMPORT MODULES """
from functools import partial
import maya.cmds as cmds
import sys
""" DEFINE FUNCTIONS """
####################################################################################################################################
#######################################################____CAMERA LOCATOR____#######################################################
cam = []
## define selection
def camSelection():
global sel, cam
sel = cmds.ls(selection = True, cameras = True, dagObjects = True)
cam = cmds.listRelatives(sel, parent = True, type = "transform")
if len(sel) == 0:
sys.stdout.write("\nNo cameras selected.\n")
## create camera locator
def createCamLocator():
camSelection()
if len(sel) > 0:
for c in cam:
if not cmds.objExists("grp_" + c + "_focusDistance"):
# create a locator and group it
cmds.spaceLocator(name = "lct_" + c + "_focusDistance", position = (0, 0, 0))
annotation = cmds.annotate("lct_" + c + "_focusDistance", text = c + "_focusDistance")
cmds.setAttr(annotation + ".displayArrow", 0)
ann = cmds.listRelatives(annotation, parent = True, type = "transform")
cmds.rename(ann, "ann_" + c + "_focusDistance")
cmds.parent("ann_" + c + "_focusDistance", "lct_" + c + "_focusDistance")
cmds.group("lct_" + c + "_focusDistance", name = "grp_" + c + "_focusDistance")
cmds.select(cam)
# constrain group's position and rotation to camera's
cmds.parentConstraint(c, "grp_" + c + "_focusDistance", name = "grp_" + c + "_focusDistance" + "_parentConstraint", maintainOffset = False, decompRotationToChild = False, skipTranslate = "none", skipRotate = "none")
# conect camera's focus distance attribute to locator's Z position attribute
cmds.expression(string = "lct_" + c + "_focusDistance.translateZ = -(" + c + ".aiFocusDistance)", name = "expression_" + c + "_focusDistance")
# lock transformations
transformations = ["tx", "ty", "tz", "rx", "ry", "rz", "sx", "sy", "sz"]
for t in transformations:
cmds.setAttr("grp_" + c + "_focusDistance" + "." + t, lock = True)
cmds.setAttr("lct_" + c + "_focusDistance" + "." + t, lock = True)
## remove camera locator
def removeCamLocator():
camSelection()
if len(sel) > 0:
for c in cam:
if cmds.objExists("grp_" + c + "_focusDistance"):
cmds.delete("grp_" + c + "_focusDistance", "expression_" + c + "_focusDistance")
## camera locator scale
def camLocatorScale(camLocatorScaleValue, *args):
camSelection()
if len(sel) > 0:
for c in cam:
camLctScale = cmds.intSliderGrp(camLocatorScaleValue, query = True, value = True)
scale = ["localScaleX", "localScaleY"]
for s in scale:
if cmds.objExists("grp_" + c + "_focusDistance"):
cmds.setAttr("lct_" + c + "_focusDistance." + s, camLctScale)
####################################################################################################################################
#########################################################____LIGHT LOCATOR____######################################################
lgt = []
## define selection
def lgtSelection():
global sel, lgt
sel = cmds.ls(selection = True, exactType = ("areaLight", "pointLight", "spotLight", "aiAreaLight", "aiMeshLight", "aiPhotometricLight"), dagObjects = True)
lgt = cmds.listRelatives(sel, parent = True, type = "transform")
if len(sel) == 0:
sys.stdout.write("\nNo lights selected.\n")
## create light locators
def createLgtLocator():
lgtSelection()
if len(sel) > 0:
for l in lgt:
if not cmds.objExists("grp_" + l + "_lightDecay"):
# check for light decay filter used in the current light/s
if len(cmds.ls(exactType = "aiLightDecay")) > 0:
try:
for f in cmds.ls(exactType = "aiLightDecay"):
if f in cmds.listConnections(l + ".aiFilters"):
# create locators and group it
cmds.spaceLocator(name = "lct_" + l + "_nearStart", position = (0, 0, 0))
annotation1 = cmds.annotate("lct_" + l + "_nearStart", text = l + "_nearStart")
cmds.setAttr(annotation1 + ".displayArrow", 0)
ann1 = cmds.listRelatives(annotation1, parent = True, type = "transform")
cmds.rename(ann1, "ann_" + l + "_nearStart")
cmds.parent("ann_" + l + "_nearStart", "lct_" + l + "_nearStart")
cmds.spaceLocator(name = "lct_" + l + "_nearEnd", position = (0, 0, 0))
annotation2 = cmds.annotate("lct_" + l + "_nearEnd", text = l + "_nearEnd")
cmds.setAttr(annotation2 + ".displayArrow", 0)
ann2 = cmds.listRelatives(annotation2, parent = True, type = "transform")
cmds.rename(ann2, "ann_" + l + "_nearEnd")
cmds.parent("ann_" + l + "_nearEnd", "lct_" + l + "_nearEnd")
cmds.spaceLocator(name = "lct_" + l + "_farStart", position = (0, 0, 0))
annotation3 = cmds.annotate("lct_" + l + "_farStart", text = l + "_farStart")
cmds.setAttr(annotation3 + ".displayArrow", 0)
ann3 = cmds.listRelatives(annotation3, parent = True, type = "transform")
cmds.rename(ann3, "ann_" + l + "_farStart")
cmds.parent("ann_" + l + "_farStart", "lct_" + l + "_farStart")
cmds.spaceLocator(name = "lct_" + l + "_farEnd", position = (0, 0, 0))
annotation4 = cmds.annotate("lct_" + l + "_farEnd", text = l + "_farEnd")
cmds.setAttr(annotation4 + ".displayArrow", 0)
ann4 = cmds.listRelatives(annotation4, parent = True, type = "transform")
cmds.rename(ann4, "ann_" + l + "_farEnd")
cmds.parent("ann_" + l + "_farEnd", "lct_" + l + "_farEnd")
cmds.group("lct_" + l + "_nearStart", "lct_" + l + "_nearEnd", "lct_" + l + "_farStart", "lct_" + l + "_farEnd", name = "grp_" + l + "_li | ghtDecay")
cmds.select(lgt)
# constrain group's position and rotation to light's
cmds.parentConstraint(l, "grp_" + l + "_lightDecay", name = "grp_" + l + "_lightDecay" + "_parentConstraint", maintainOffset = False, decompRotationToChild = False, skipTranslate = "none", skipRotate = "none")
# conect light's decay attribute to locator's Z local position attribute
| cmds.expression(string = "lct_" + l + "_nearStart.translateZ = -(" + f + ".nearStart)", name = "expression_" + l + "_nearStart")
cmds.expression(string = "lct_" + l + "_nearEnd.translateZ = -(" + f + ".nearEnd)", name = "expression_" + l + "_nearEnd")
cmds.expression(string = "lct_" + l + "_farStart.translateZ = -(" + f + ".farStart)", name = "expression_" + l + "_farStart")
cmds.expression(string = "lct_" + l + "_farEnd.translateZ = -(" + f + ".farEnd)", name = "expression_" + l + "_farEnd")
# lock transformations
transformations = ["tx", "ty", "tz", "rx", "ry", "rz", "sx", "sy", "sz"]
for t in transformations:
cmds.setAttr("grp_" + l + "_lightDecay" + "." + t, lock = True)
cmds.setAttr("lct_" + l + "_nearStart" + "." + t, lock = True)
cmds.setAttr("lct_" + l + "_nearEnd" + "." + t, lock = True)
cmds.setAttr("lct_" + l + "_farStart" + "." + t, lock = True)
cmds.setAttr("lct_" + l + "_farEnd" + "." + t, lock = True)
else:
sys.stdout.write("\nSelected lights are not using any aiLightDecay filter.\n")
except TypeError:
sys.stdou |
sergey-senozhatsky/anaconda-11-vlan-support | iw/bootloader_advanced_gui.py | Python | gpl-2.0 | 3,639 | 0.007969 | #
# bootloader_advanced.py: gui advanced bootloader configuration dialog
#
# Jeremy Katz <katzj@redhat.com>
#
# Copyright 2001-2002 Red Hat, Inc.
#
# This software may be freely redistributed under the terms of the GNU
# library public license.
#
# You should have received a copy of the GNU Library Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
import gtk
import gobject
import iutil
import partedUtils
import gui
from iw_gui import *
from rhpl.translate import _, N_
from bootlocwidget import BootloaderLocationWidget
class AdvancedBootloaderWindow(InstallWindow):
windowTitle = N_("Advanced Boot Loader Configuration")
def __init__(self, ics):
InstallWindow.__init__(self, ics)
self.parent = ics.getICW().window
def getPrev(self):
pass
def | getNext(self):
# forcing lba32 can be a bad idea.. make sure they really want to
if (self.forceLBA.get_active() and not self.bl.forceLBA32):
rc = self.intf.messageWindow(_("Warning"),
_("Forcing the use of LBA32 for your bootloader when "
"not supported by the BIOS can cause your machine "
"to be unable to boot.\n\n"
"Would you like to continue and force LBA32 mode?"),
| type = "custom",
custom_buttons = [_("Cancel"),
_("Force LBA32")])
if rc != 1:
raise gui.StayOnScreen
# set forcelba
self.bl.setForceLBA(self.forceLBA.get_active())
# set kernel args
self.bl.args.set(self.appendEntry.get_text())
# set the boot device
self.bl.setDevice(self.blloc.getBootDevice())
# set the drive order
self.bl.drivelist = self.blloc.getDriveOrder()
# set up the vbox with force lba32 and kernel append
def setupOptionsVbox(self):
self.options_vbox = gtk.VBox(False, 5)
self.options_vbox.set_border_width(5)
self.forceLBA = gtk.CheckButton(_("_Force LBA32 (not normally required)"))
self.options_vbox.pack_start(self.forceLBA, False)
self.forceLBA.set_active(self.bl.forceLBA32)
label = gui.WrappingLabel(_("If you wish to add default options to the "
"boot command, enter them into "
"the 'General kernel parameters' field."))
label.set_alignment(0.0, 0.0)
self.options_vbox.pack_start(label, False)
label = gui.MnemonicLabel(_("_General kernel parameters"))
self.appendEntry = gtk.Entry()
label.set_mnemonic_widget(self.appendEntry)
args = self.bl.args.get()
if args:
self.appendEntry.set_text(args)
box = gtk.HBox(False, 0)
box.pack_start(label)
box.pack_start(self.appendEntry)
al = gtk.Alignment(0.0, 0.0)
al.add(box)
self.options_vbox.pack_start(al, False)
def getScreen(self, anaconda):
self.dispatch = anaconda.dispatch
self.bl = anaconda.id.bootloader
self.intf = anaconda.intf
thebox = gtk.VBox (False, 10)
# boot loader location bits (mbr vs boot, drive order)
self.blloc = BootloaderLocationWidget(anaconda, self.parent)
thebox.pack_start(self.blloc.getWidget(), False)
thebox.pack_start (gtk.HSeparator(), False)
# some optional things
self.setupOptionsVbox()
thebox.pack_start(self.options_vbox, False)
return thebox
|
alinasirbu/eurora_job_power_prediction | Python_scripts_regression/one_user_apply.py | Python | gpl-2.0 | 6,590 | 0.044613 | ##this script applies component models to the data of the selected month
#component predictions are summed to obtain global prediction
##where the component model does not exist (because data is too little), averages are used in the spitrit of the EAM
import csv
import gzip
import datetime
import numpy as np
import matplotlib
import pylab as pl
import math
import pickle as pkl
import sys
import math
from sklearn.svm import SVR
from sklearn.feature_extraction.text import CountVectorizer
import time
def user_run(user,comp):
global max_power
global time_scale
global pred
t=time.time()
data=list(csv.reader(gzip.open('data/'+user+'_jobs.csv.gz','r')))
apply_data=[r for r in data[1:] if r[0][:4]=='2014' and int(r[0][5:7])==month and int(r[comp+8])>0 and float(r[comp+2])>0]
apply_data=sorted(apply_data,key=lambda r:(float(r[1][:-8]),r[0]))
if len(apply_data)==0:
print('no data for component' + str(comp))
return {},0
job_time=[(r[0],r[1]) for r in apply_data]
try:
#load model
cv, model,C,epsilon,gamma,time_scale,max_power=pkl.load(file=gzip.open('results'+str(month)+'global/'+user+'_model_global'+str(comp)+'.pkl.gz','r'))
pred=test(cv,model,apply_data,user,'apply'+str(comp),comp)
result=1
pkl.dump(time.time()-t, file=open('results'+str(month)+'global'+str(min_train)+'/'+user+'time_apply_'+str(comp)+'.pkl','w'))
except IOError:
#use avgs
print ('no model for comp '+str(comp))
pred= (np.array([r[9:14] for r in apply_data],dtype=np.float)*user_avg.get(user,global_avg))[:,comp-1]
result=0
return {job_time[i]:pred[i] for i in range(len(pred))},result
def test(cv,model,data,user,code,comp):
test_power=np.array([float(r[2+comp])/max_power for r in data ])
times=[datetime.datetime.strptime(r[0],'%Y-%m-%d %H:%M:%S UTC') for r in data]
features=np.array([d[8:] for d in data],dtype=np.float)
features[:,0]=features[:,0]/time_scale
jobs=list(set([(r[1],r[2]) for r in data]))
name_features=cv.transform([d[2] for d in data]).toarray()
features=np.hstack((features,name_features))
job_ids=[r[1] for r in data]
prediction=model.predict(features)
rmse=math.sqrt(np.average(((prediction-test_power)*max | _power)**2))
nrmse=math.sqrt(np.average(((prediction-test_power)/test_power)**2))
corr=np.corrcoef(prediction,test_power)[0,1]
r2=1-(sum((prediction-test_power)**2)/sum((test_power-np.average(test_power))**2))
pl.figure(figsize=(6,7))
pl.subplot(211)
pl.plot(prediction*max_power,test_power*max_power,'+')
if math.isnan(corr) or math.isnan(r2) or math.isnan(rmse):
pl.title("RMSPE="+str(nrmse)+"RMSE="+str(rmse)+" Corr="+str(corr)+" R2="+str(r2) | )
else:
pl.title("RMSPE="+str(int(nrmse*1000)/1000.0)+" RMSE="+str(int(rmse*1000)/1000.0)+" Corr="+str(int(corr*1000)/1000.0)+" R2="+str(int(r2*1000)/1000.0))
pl.xlabel('Predicted power')
pl.ylabel('Real power')
pl.plot([max(pl.xlim()[0],pl.ylim()[0]),min(pl.xlim()[1],pl.ylim()[1])],[max(pl.xlim()[0],pl.ylim()[0]),min(pl.xlim()[1],pl.ylim()[1])])
pl.subplot(212)
pl.plot(test_power*max_power)
pl.plot(prediction*max_power)
pl.ylabel('Power')
pl.xlabel('Data point')
#pl.legend(('Real power','Predicted power'))
pl.subplots_adjust(hspace=0.35)
pl.savefig('results'+str(month)+'global'+str(min_train)+'/'+user+code+'.pdf')
pl.close()
pkl.dump((nrmse,rmse,corr,r2,prediction*max_power,test_power*max_power,times,job_ids),file=gzip.open('results'+str(month)+'global'+str(min_train)+'/'+user+'test'+code+'.pkl.gz','w'))
return prediction*max_power
max_power=1000
time_scale=1
user=sys.argv[1]
month=int(sys.argv[2])
min_train=int(sys.argv[3])
data=list(csv.reader(gzip.open('data/'+user+'_jobs.csv.gz','r')))
train_data=[r for r in data[1:] if r[0][:4]=='2014' and int(r[0][5:7])<month]
if len(train_data)<min_train:
print('Too little training data for this user')
exit()
apply_data=[r for r in data[1:] if r[0][:4]=='2014' and int(r[0][5:7])==month]
apply_data=sorted(apply_data,key=lambda r:(float(r[1][:-8]),r[0]))
if len(apply_data)<10:
print('not enough test data for month ' + str(month))
else:
#read averages, complete grid with global
global_avg=np.array(list(csv.reader(open('data/'+'avg_global_'+str(month)+'.csv','r'))),dtype=np.float)[0]
user_avg_data=list(csv.reader(open('data/'+'avg_user_'+str(month)+'.csv','r')))
user_avg={}
for u in user_avg_data:
for i in range(1,len(u)):
if u[i]=='':
u[i]=global_avg[i-1]
user_avg[u[0]]=np.array(u[1:],dtype=np.float)
#repeat user run for each component
component_power=[]
model_count=0
for comp in range(1,6):
pred,result=user_run(user,comp)
component_power.append(pred)
model_count+=result
if model_count==0:
print('no model for this user')
exit()
#combine components to obtain total power
total_power=np.apply_along_axis(sum,1,np.array([r[3:8] for r in apply_data],dtype=np.float))
predicted_power=[]
for d in apply_data:
pp=0
for i in range(5):
if d[i+9]!='0' and d[i+3]!='0': #second check is just to filter invalid data
pp+=component_power[i][(d[0],d[1])]
predicted_power.append(pp)
predicted_power=np.array(predicted_power)
#plot total with r2 and rmse
rmse=math.sqrt(np.average((predicted_power-total_power)**2))
nrmse=math.sqrt(np.average(((predicted_power-total_power)/total_power)**2))
corr=np.corrcoef(predicted_power,total_power)[0,1]
r2=1-(sum((predicted_power-total_power)**2)/sum((total_power-np.average(total_power))**2))
pl.figure(figsize=(6,7))
pl.subplot(211)
pl.plot(predicted_power,total_power,'+')
if math.isnan(corr) or math.isnan(r2) or math.isnan(rmse):
pl.title("RMSPE="+str(nrmse)+"RMSE="+str(rmse)+" Corr="+str(corr)+" R2="+str(r2))
else:
pl.title("RMSPE="+str(int(nrmse*1000)/1000.0)+" RMSE="+str(int(rmse*1000)/1000.0)+" Corr="+str(int(corr*1000)/1000.0)+" R2="+str(int(r2*1000)/1000.0))
pl.xlabel('Predicted power')
pl.ylabel('Real power')
pl.plot([max(pl.xlim()[0],pl.ylim()[0]),min(pl.xlim()[1],pl.ylim()[1])],[max(pl.xlim()[0],pl.ylim()[0]),min(pl.xlim()[1],pl.ylim()[1])])
pl.subplot(212)
pl.plot(total_power)
pl.plot(predicted_power)
pl.ylabel('Power (W)')
pl.xlabel('Data point')
#pl.legend(('Real power','Predicted power'))
pl.subplots_adjust(hspace=0.35)
pl.savefig('results'+str(month)+'global'+str(min_train)+'/'+user+'_total.pdf')
pl.close()
pkl.dump((nrmse,rmse,corr,r2,total_power,predicted_power),file=gzip.open('results'+str(month)+'global'+str(min_train)+'/'+user+'_total.pkl.gz','w'))
|
keras-team/keras | keras/optimizers/optimizer_v2/nadam_test.py | Python | apache-2.0 | 6,798 | 0.006325 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Nadam."""
import tensorflow.compat.v2 as tf
import numpy as np
from keras.optimizers.optimizer_v2 import nadam
def get_beta_accumulators(opt, dtype):
local_step = tf.cast(opt.iterations + 1, dtype)
beta_1_t = tf.cast(opt._get_hyper("beta_1"), dtype)
beta_1_power = tf.pow(beta_1_t, local_step)
beta_2_t = tf.cast(opt._get_hyper("beta_2"), dtype)
beta_2_power = tf.pow(beta_2_t, local_step)
return (beta_1_power, beta_2_power)
def update_m_cache(m_cache, t, beta1=0.9):
mu_t = beta1 * (1 - 0.5 * 0.96**(0.004 * (t + 1)))
m_cache_t = m_cache * mu_t
return | m_cache_t
def nadam_update_numpy(param,
g_t,
t,
m,
v,
m_cache,
alpha=0 | .001,
beta1=0.9,
beta2=0.999,
epsilon=1e-8):
mu_t = beta1 * (1 - 0.5 * 0.96**(0.004 * (t + 1)))
mu_t_1 = beta1 * (1 - 0.5 * 0.96**(0.004 * (t + 2)))
m_cache_t_1 = m_cache * mu_t_1
g_prime_t = g_t / (1 - m_cache)
m_t = beta1 * m + (1 - beta1) * g_t
v_t = beta2 * v + (1 - beta2) * g_t * g_t
m_prime_t = m_t / (1 - m_cache_t_1)
v_prime_t = v_t / (1 - beta2**(t + 1))
m_bar_t = (1 - mu_t) * g_prime_t + mu_t_1 * m_prime_t
param_t = param - alpha * m_bar_t / (np.sqrt(v_prime_t) + epsilon)
return param_t, m_t, v_t
class NadamOptimizerTest(tf.test.TestCase):
def testSparse(self):
# TODO(tanzheny, omalleyt): Fix test in eager mode.
sparse_epsilon = 1e-7
for dtype in [tf.half, tf.float32, tf.float64]:
with tf.Graph().as_default(), self.cached_session():
# Initialize variables for numpy implementation.
m0, v0, m1, v1, mcache = 0.0, 0.0, 0.0, 0.0, 1.0
var0_np = np.array([1.0, 1.0, 2.0], dtype=dtype.as_numpy_dtype)
grads0_np = np.array([0.1, 0, 0.1], dtype=dtype.as_numpy_dtype)
var1_np = np.array([3.0, 3.0, 4.0], dtype=dtype.as_numpy_dtype)
grads1_np = np.array([0.01, 0, 0.01], dtype=dtype.as_numpy_dtype)
var0 = tf.Variable(var0_np)
var1 = tf.Variable(var1_np)
grads0_np_indices = np.array([0, 2], dtype=np.int32)
grads0 = tf.IndexedSlices(
tf.constant(grads0_np[grads0_np_indices]),
tf.constant(grads0_np_indices), tf.constant([3]))
grads1_np_indices = np.array([0, 2], dtype=np.int32)
grads1 = tf.IndexedSlices(
tf.constant(grads1_np[grads1_np_indices]),
tf.constant(grads1_np_indices), tf.constant([3]))
opt = nadam.Nadam(epsilon=sparse_epsilon)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
self.evaluate(tf.compat.v1.global_variables_initializer())
# Fetch params to validate initial values
self.assertAllClose([1.0, 1.0, 2.0], var0)
self.assertAllClose([3.0, 3.0, 4.0], var1)
beta1_power, beta2_power = get_beta_accumulators(opt, dtype)
# Run 3 steps of Nadam
for t in range(3):
self.assertAllCloseAccordingToType(0.9**(t + 1), beta1_power)
self.assertAllCloseAccordingToType(0.999**(t + 1), beta2_power)
update.run()
mcache = update_m_cache(mcache, t)
var0_np, m0, v0 = nadam_update_numpy(
var0_np, grads0_np, t, m0, v0, mcache, epsilon=sparse_epsilon)
var1_np, m1, v1 = nadam_update_numpy(
var1_np, grads1_np, t, m1, v1, mcache, epsilon=sparse_epsilon)
# Validate updated params
self.assertAllCloseAccordingToType(var0_np, var0)
self.assertAllCloseAccordingToType(var1_np, var1)
def testBasic(self):
# TODO(tanzheny, omalleyt): Fix test in eager mode.
for dtype in [tf.half, tf.float32, tf.float64]:
with tf.Graph().as_default(), self.cached_session():
# Initialize variables for numpy implementation.
m0, v0, m1, v1, mcache = 0.0, 0.0, 0.0, 0.0, 1.0
var0_np = np.array([1.0, 2.0], dtype=dtype.as_numpy_dtype)
grads0_np = np.array([0.1, 0.1], dtype=dtype.as_numpy_dtype)
var1_np = np.array([3.0, 4.0], dtype=dtype.as_numpy_dtype)
grads1_np = np.array([0.01, 0.01], dtype=dtype.as_numpy_dtype)
var0 = tf.Variable(var0_np)
var1 = tf.Variable(var1_np)
grads0 = tf.constant(grads0_np)
grads1 = tf.constant(grads1_np)
opt = nadam.Nadam()
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
self.evaluate(tf.compat.v1.global_variables_initializer())
# Fetch params to validate initial values
self.assertAllClose([1.0, 2.0], var0)
self.assertAllClose([3.0, 4.0], var1)
# Run 3 steps of Nadam
for t in range(3):
update.run()
mcache = update_m_cache(mcache, t)
var0_np, m0, v0 = nadam_update_numpy(var0_np, grads0_np, t, m0, v0,
mcache)
var1_np, m1, v1 = nadam_update_numpy(var1_np, grads1_np, t, m1, v1,
mcache)
# Validate updated params
self.assertAllCloseAccordingToType(var0_np, var0)
self.assertAllCloseAccordingToType(var1_np, var1)
def testConstructNAdamWithLR(self):
opt = nadam.Nadam(lr=1.0)
opt_2 = nadam.Nadam(learning_rate=0.1, lr=1.0)
opt_3 = nadam.Nadam(learning_rate=0.1)
self.assertIsInstance(opt.lr, tf.Variable)
self.assertIsInstance(opt_2.lr, tf.Variable)
self.assertIsInstance(opt_3.lr, tf.Variable)
self.evaluate(tf.compat.v1.global_variables_initializer())
self.assertAllClose(self.evaluate(opt.lr), (1.0))
self.assertAllClose(self.evaluate(opt_2.lr), (1.0))
self.assertAllClose(self.evaluate(opt_3.lr), (0.1))
def testConstructNAdamWithScheduleDecay(self):
opt = nadam.Nadam(schedule_decay=0.2)
self.assertIsInstance(opt.decay, tf.Variable)
self.evaluate(tf.compat.v1.global_variables_initializer())
self.assertAllClose(self.evaluate(opt.decay), (0.2))
if __name__ == "__main__":
tf.test.main()
|
google/grumpy | compiler/block.py | Python | apache-2.0 | 12,060 | 0.009227 | # coding=utf-8
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of t | he License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the Licen | se.
"""Classes for analyzing and storing the state of Python code blocks."""
from __future__ import unicode_literals
import abc
import collections
import re
from grumpy.compiler import expr
from grumpy.compiler import util
from grumpy.pythonparser import algorithm
from grumpy.pythonparser import ast
from grumpy.pythonparser import source
_non_word_re = re.compile('[^A-Za-z0-9_]')
class Package(object):
"""A Go package import."""
def __init__(self, name, alias=None):
self.name = name
# Use Γ as a separator since it provides readability with a low
# probability of name collisions.
self.alias = alias or 'π_' + name.replace('/', 'Γ').replace('.', 'Γ')
class Loop(object):
"""Represents a for or while loop within a particular block."""
def __init__(self, breakvar):
self.breakvar = breakvar
class Block(object):
"""Represents a Python block such as a function or class definition."""
__metaclass__ = abc.ABCMeta
def __init__(self, parent, name):
self.root = parent.root if parent else self
self.parent = parent
self.name = name
self.free_temps = set()
self.used_temps = set()
self.temp_index = 0
self.label_count = 0
self.checkpoints = set()
self.loop_stack = []
self.is_generator = False
@abc.abstractmethod
def bind_var(self, writer, name, value):
"""Writes Go statements for assigning value to named var in this block.
This is overridden in the different concrete block types since in Python,
binding a variable in, e.g. a function is quite different than binding at
global block.
Args:
writer: The Writer object where statements will be written.
name: The name of the Python variable.
value: A Go expression to assign to the variable.
"""
pass
@abc.abstractmethod
def del_var(self, writer, name):
pass
@abc.abstractmethod
def resolve_name(self, writer, name):
"""Returns a GeneratedExpr object for accessing the named var in this block.
This is overridden in the different concrete block types since name
resolution in Python behaves differently depending on where in what kind of
block its happening within, e.g. local vars are different than globals.
Args:
writer: Writer object where intermediate calculations will be printed.
name: The name of the Python variable.
"""
pass
def genlabel(self, is_checkpoint=False):
self.label_count += 1
if is_checkpoint:
self.checkpoints.add(self.label_count)
return self.label_count
def alloc_temp(self, type_='*πg.Object'):
"""Create a new temporary Go variable having type type_ for this block."""
for v in sorted(self.free_temps, key=lambda k: k.name):
if v.type_ == type_:
self.free_temps.remove(v)
self.used_temps.add(v)
return v
self.temp_index += 1
name = 'πTemp{:03d}'.format(self.temp_index)
v = expr.GeneratedTempVar(self, name, type_)
self.used_temps.add(v)
return v
def free_temp(self, v):
"""Release the GeneratedTempVar v so it can be reused."""
self.used_temps.remove(v)
self.free_temps.add(v)
def push_loop(self, breakvar):
loop = Loop(breakvar)
self.loop_stack.append(loop)
return loop
def pop_loop(self):
self.loop_stack.pop()
def top_loop(self):
return self.loop_stack[-1]
def _resolve_global(self, writer, name):
result = self.alloc_temp()
writer.write_checked_call2(
result, 'πg.ResolveGlobal(πF, {})', self.root.intern(name))
return result
class ModuleBlock(Block):
"""Python block for a module."""
def __init__(self, importer, full_package_name,
filename, src, future_features):
Block.__init__(self, None, '<module>')
self.importer = importer
self.full_package_name = full_package_name
self.filename = filename
self.buffer = source.Buffer(src)
self.strings = set()
self.future_features = future_features
def bind_var(self, writer, name, value):
writer.write_checked_call1(
'πF.Globals().SetItem(πF, {}.ToObject(), {})',
self.intern(name), value)
def del_var(self, writer, name):
writer.write_checked_call1('πg.DelVar(πF, πF.Globals(), {})',
self.intern(name))
def resolve_name(self, writer, name):
return self._resolve_global(writer, name)
def intern(self, s):
if len(s) > 64 or _non_word_re.search(s):
return 'πg.NewStr({})'.format(util.go_str(s))
self.strings.add(s)
return 'ß' + s
class ClassBlock(Block):
"""Python block for a class definition."""
def __init__(self, parent, name, global_vars):
Block.__init__(self, parent, name)
self.global_vars = global_vars
def bind_var(self, writer, name, value):
if name in self.global_vars:
return self.root.bind_var(writer, name, value)
writer.write_checked_call1('πClass.SetItem(πF, {}.ToObject(), {})',
self.root.intern(name), value)
def del_var(self, writer, name):
if name in self.global_vars:
return self.root.del_var(writer, name)
writer.write_checked_call1('πg.DelVar(πF, πClass, {})',
self.root.intern(name))
def resolve_name(self, writer, name):
local = 'nil'
if name not in self.global_vars:
# Only look for a local in an outer block when name hasn't been declared
# global in this block. If it has been declared global then we fallback
# straight to the global dict.
block = self.parent
while not isinstance(block, ModuleBlock):
if isinstance(block, FunctionBlock) and name in block.vars:
var = block.vars[name]
if var.type != Var.TYPE_GLOBAL:
local = util.adjust_local_name(name)
# When it is declared global, prefer it to anything in outer blocks.
break
block = block.parent
result = self.alloc_temp()
writer.write_checked_call2(
result, 'πg.ResolveClass(πF, πClass, {}, {})',
local, self.root.intern(name))
return result
class FunctionBlock(Block):
"""Python block for a function definition."""
def __init__(self, parent, name, block_vars, is_generator):
Block.__init__(self, parent, name)
self.vars = block_vars
self.parent = parent
self.is_generator = is_generator
def bind_var(self, writer, name, value):
if self.vars[name].type == Var.TYPE_GLOBAL:
return self.root.bind_var(writer, name, value)
writer.write('{} = {}'.format(util.adjust_local_name(name), value))
def del_var(self, writer, name):
var = self.vars.get(name)
if not var:
raise util.ParseError(
None, 'cannot delete nonexistent local: {}'.format(name))
if var.type == Var.TYPE_GLOBAL:
return self.root.del_var(writer, name)
adjusted_name = util.adjust_local_name(name)
# Resolve local first to ensure the variable is already bound.
writer.write_checked_call1('πg.CheckLocal(πF, {}, {})',
adjusted_name, util.go_str(name))
writer.write('{} = πg.UnboundLocal'.format(adjusted_name))
def resolve_name(self, writer, name):
block = self
while not isinstance(block, ModuleBlock):
if isinstance(block, FunctionBlock):
var = block.vars.get(name)
if var:
if var.type == Var.TYPE_GLOBAL:
return self._resolve_global(writer, name)
writer.write_checked_call1('πg.CheckLocal(πF, {}, {})',
util.adjust_local_name(name),
|
alexmgr/scapy-ssl_tls | src/examples/example_full_handshake_rsa_aes_sha.py | Python | gpl-2.0 | 5,347 | 0.019637 | #! /usr/bin/env python
# -*- coding: UTF-8 -*-
# Author : tintinweb@oststrom.com <github.com/tintinweb>
def sendrcv(sock, p, bufflen=1024):
sock.settimeout(5)
print "sending TLS payload"
sock.sendall(p)
resp=''
try:
while 1:
t = sock.recv(1)
if not(len(t)):
break
resp += t
except:
print "timeout"
print "received, %d -- %s"%(len(resp),repr(resp))
return resp
if __name__=="__main__":
history = []
import scapy
from scapy.all import *
import socket
#<----- for local testing only
sys.path.append("../scapy/layers")
from ssl_tls import *
import ssl_tls_crypto
#------>
target = ('192.168.220.131',4433) # MAKE SURE TO CHANGE THIS
# create tcp socket
s = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
s.connect(target)
session = ssl_tls_crypto.TLSSessionCtx()
session.rsa_load_privkey(open('c:\\_tmp\\polarssl.key','r').read())
# fake initial session packet for session tracking
sip,sport= s.getsockname()
session.insert(IP(src=sip,dst=target[0])/TCP(sport=sport,dport=target[1]))
# create TLS Handhsake / Client Hello packet
p = TLSRecord()/TLSHandshake()/TLSClientHello(compression_methods=None,
cipher_suites=[TLSCipherSuite.RSA_WITH_AES_128_CBC_SHA],
random_bytes='R'*28)
p.show()
sp =str(p)
session.insert(SSL(sp))
history.append(SSL(sp))
r = sendrcv(s,sp)
SSL(r).show()
history.append(SSL(r))
session.insert(SSL(r))
# send premaster secret
#p = TLSRecord()/TLSHandshake()/TLSClientKeyExchange()/TLSKexParamDH("haha")
client_hello = p
server_hello = SSL(r)
#generate random premaster secret
secparams = ssl_tls_crypto.TLSSecurityParameters()
# latest_version + 46rndbytes
secparams.premaster_secret = '\03\01'+'a'*22+'b'*24
print "client_random:",repr(struct.pack("!I",client_hello[TLSClientHello].gmt_unix_time)+client_hello[TLSClientHello].random_bytes)
print "server_random:",repr(struct.pack("!I",server_hello[TLSServerHello].gmt_unix_time)+server_hello[TLSServerHello].random_bytes)
secparams.generate(secparams.premaster_secret,
struct.pack("!I",client_hello[TLSClientHello].gmt_unix_time)+client_hello[TLSClientHello].random_bytes,
struct.pack("!I",server_hel | lo[TLSServerHello].gmt_unix_time)+server_hello[TLSServerHello].random_bytes)
print "master", repr(secparams.master_secret)
|
# encrypt pms with server pubkey from first cert
#extract server cert (first one counts)
cert = SSL(r)[TLSCertificateList].certificates[0].data
pubkey = ssl_tls_crypto.x509_extract_pubkey_from_der(cert)
print repr(pubkey.exportKey(format="DER"))
#print pubkey
print pubkey.can_encrypt()
print pubkey.can_sign()
print pubkey.publickey()
print repr(secparams.premaster_secret)
# PKCS1 padd encrypt with pubkey
from Crypto.Cipher import PKCS1_OAEP,PKCS1_v1_5
pkcs1_pubkey = PKCS1_v1_5.new(pubkey)
enc= pkcs1_pubkey.encrypt(secparams.premaster_secret)
print repr(enc)
print "---------------"
# manually check by decrypting the encrypted text with the privkey
with open('c:\\_tmp\\polarssl.key','r') as f:
key = RSA.importKey(f.read())
pkcs1_key = PKCS1_v1_5.new(key)
print "decrypted pms=",repr(pkcs1_key.decrypt(enc,None))
print "---------------"
pms = ''.join(enc)
print "PMS(pkcs1)==",len(pms),repr(pms)
p = TLSRecord()/TLSHandshake()/TLSClientKeyExchange()/TLSKexParamEncryptedPremasterSecret(data=pms)
#p.show2()
sp = str(p)
history.append(SSL(sp))
session.insert(SSL(sp))
r = sendrcv(s,sp)
#SSL(r).show()
#history.append(SSL(r))
# change cipherspec
p = TLSRecord()/TLSChangeCipherSpec()
#p.show2()
r = sendrcv(s,str(p))
#SSL(r).show()
print repr(session)
exit()
print secparams
# send encrypted finish with hash of previous msgs
from Crypto.Hash import MD5,SHA
hs_msgs = ''
for p in history:
for r in p.records:
print r[TLSHandshake].payload.show()
hs_msgs += str(r[TLSHandshake].payload)
print "hs_mgs_hashed:",repr(MD5.new(hs_msgs).digest()+SHA.new(hs_msgs).digest())
msg_hash= secparams.prf.prf_numbytes(secparams.master_secret,
secparams.prf.TLS_MD_CLIENT_FINISH_CONST,
MD5.new(hs_msgs).digest()+SHA.new(hs_msgs).digest(),
numbytes=12)
# TODO: incomplete
print repr(msg_hash)
p = TLSRecord()/TLSCiphertext().encrypt(TLSPlaintext().compress(str(TLSHandshake()/TLSFinished(data=msg_hash))))
#p = TLSRecord()/TLSHandshake()/TLSFinished(data=msg_hash)
r = sendrcv(s,str(p))
#SSL(r).show()
s.close()
|
staranjeet/fjord | fjord/alerts/api_views.py | Python | bsd-3-clause | 6,843 | 0 | from rest_framework import exceptions
from rest_framework import permissions
from rest_framework import serializers
import rest_framework.response
import rest_framework.views
from fjord.alerts.models import Alert, AlertFlavor, AlertSerializer, Link
from fjord.api_auth.api_utils import TokenAuthentication
from fjord.base.api_utils import NotFound, StrictArgumentsMixin
class FlavorPermission(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
# Is this token permitted to GET/POST alerts for this flavor?
token = request.auth
return token and obj.is_permitted(token)
def positive_integer(value):
if value <= 0:
raise serializers.ValidationError(
'This field must be positive and non-zero.')
def is_after(value1, value2):
return value1 and value2 and value1 > value2
class AlertsGETSerializer(StrictArgumentsMixin, serializers.Serializer):
"""Serializer that validates GET API arguments"""
flavors = serializers.CharField(required=True)
max = serializers.IntegerField(default=100, min_value=1)
start_time_start = serializers.DateTimeField(required=False)
start_time_end = serializers.DateTimeField(required=False)
end_time_start = serializers.DateTimeField(required=False)
end_time_end = serializers.DateTimeField(required=False)
created_start = serializers.DateTimeField(required=False)
created_end = serializers.DateTimeField(required=False)
def validate(self, data):
data = super(AlertsGETSerializer, self).validate(data)
errors = []
if is_after(data.get('start_time_start'), data.get('start_time_end')):
errors.append('start_time_start must occur before start_time_end.')
if is_after(data.get('end_time_start'), data.get('end_time_end')):
errors.append('end_time_start must occur before end_time_end.')
if is_after(data.get('created_start'), data.get('created_end')):
errors.append('created_start must occur before created_end.')
if errors:
raise serializers.ValidationError(errors)
return data
def validate_flavors(self, value):
flavorslugs = value.split(',')
flavors = []
errors = []
for flavorslug in flavorslugs:
try:
flavor = AlertFlavor.objects.get(slug=flavorslug)
except AlertFlavor.DoesNotExist:
errors.append(
'Flavor "{0}" does not exist.'.format(flavorslug)
)
continue
if not flavor.enabled:
errors.append(
'Flavor "{0}" is disabled.'.format(flavorslug)
)
continue
flavors.append(flavor)
if errors:
raise serializers.ValidationError(errors)
# Return a list of the validated AlertFlavor objects. We're
# (ab)using validate_flavors here since we should only be
# doing validation, but since doing the validation also
# transforms the slugs into AlertFlavor objects, we'll do them
# both here.
return flavors
class AlertsAPI(rest_framework.views.APIView):
authentication_classes = (TokenAuthentication,) |
permission_classes = (FlavorPermission,)
def get(self, request):
serializer = AlertsGETSerializer(data=request.GET)
if not | serializer.is_valid():
raise exceptions.ValidationError({'detail': serializer.errors})
data = serializer.validated_data
max_count = min(data['max'], 10000)
flavors = data['flavors']
# Make sure the token has permission to view each flavor.
for flavor in flavors:
self.check_object_permissions(request, flavor)
alerts = Alert.objects.filter(flavor__in=flavors)
if data.get('start_time_start'):
alerts = alerts.filter(start_time__gte=data['start_time_start'])
if data.get('start_time_end'):
alerts = alerts.filter(start_time__lte=data['start_time_end'])
if data.get('end_time_start'):
alerts = alerts.filter(end_time__gte=data['end_time_start'])
if data.get('end_time_end'):
alerts = alerts.filter(end_time__lte=data['end_time_end'])
if data.get('created_start'):
alerts = alerts.filter(created__gte=data['created_start'])
if data.get('created_end'):
alerts = alerts.filter(created__lte=data['created_end'])
alerts = alerts.order_by('-created')
alerts_ser = AlertSerializer(alerts[:max_count], many=True)
return rest_framework.response.Response(
{
'total': alerts.count(),
'count': len(alerts_ser.data),
'alerts': alerts_ser.data
}
)
def post(self, request):
data = request.data
try:
flavorslug = data['flavor']
except KeyError:
raise exceptions.ValidationError({
'flavor': [
'Flavor not specified in payload'
]
})
try:
flavor = AlertFlavor.objects.get(slug=flavorslug)
except AlertFlavor.DoesNotExist:
raise NotFound({
'flavor': [
'Flavor "{0}" does not exist.'.format(flavorslug)
]
})
self.check_object_permissions(request, flavor)
if not flavor.enabled:
raise exceptions.ValidationError({
'flavor': [
'Flavor "{0}" is disabled.'.format(flavorslug)
]
})
# Get the links out--we'll deal with them next.
link_data = data.pop('links', [])
# Validate the alert data
alert_ser = AlertSerializer(data=data)
if not alert_ser.is_valid():
raise exceptions.ValidationError({'detail': alert_ser.errors})
# Validate links
link_errors = []
for link_item in link_data:
if 'name' not in link_item or 'url' not in link_item:
link_errors.append(
'Missing names or urls in link data. {}'.format(
repr(link_item)))
if link_errors:
raise exceptions.ValidationError(
{'detail': {'links': link_errors}})
# Everything is good, so let's save it all to the db.
alert = alert_ser.save()
for link_item in link_data:
link = Link(
alert=alert, name=link_item['name'], url=link_item['url']
)
link.save()
return rest_framework.response.Response(
status=201,
data={
'detail': {'id': alert.id}
})
|
varunkumta/azure-linux-extensions | VMBackup/main/PluginHost.py | Python | apache-2.0 | 11,336 | 0.010586 | import time
import sys
import os
import threading
import ConfigParser
from common import CommonVariables
from pwd import getpwuid
from stat import *
import traceback
# [pre_post]
# "timeout" : (in seconds),
#
# .... other params ...
#
# "pluginName0" : "oracle_plugin", the python plugin file will have same name
# "pluginPath0" : "/abc/xyz/"
# "pluginConfigPath0" : "sdf/sdf/abcd.json"
#
#
# errorcode policy
# errorcode = 0 (CommonVariables.PrePost_PluginStatus_Successs), means success, script runs without error, warnings maybe possible
# errorcode = 5 (CommonVariables.PrePost_PluginStatus_Timeout), means timeout
# errorcode = 10 (CommonVariables.PrePost_PluginStatus_ConfigNotFound), config file not found
# errorcode = process return code, means bash script encountered some other error, like 127 for script not found
class PluginHostError(object):
def __init__(self, errorCode, pluginName):
self.errorCode = errorCode
self.pluginName = pluginName
def __str__(self):
return 'Plugin :- ', self.pluginName , ' ErrorCode :- ' + str(self.errorCode)
class PluginHostResult(object):
def __init__(self):
self.errors = []
self.anyScriptFailed = False
self.continueBackup = True
self.errorCode = 0
self.fileCode = []
self.filePath = []
def __str__(self):
errorStr = ''
for error in self.errors:
errorStr += (str(error)) + '\n'
errorStr += 'Final Error Code :- ' + str(self.errorCode) + '\n'
errorStr += 'Any script Failed :- ' + str(self.anyScriptFailed) + '\n'
errorStr += 'Continue Backup :- ' + str(self.continueBackup) + '\n'
return errorStr
class PluginHost(object):
""" description of class """
def __init__(self, logger):
self.logger = logger
self.modulesLoaded = False
self.configLocation = '/etc/azure/VMSnapshotPluginHost.conf'
self.timeoutInSeconds = 1800
self.plugins = []
self.pluginName = []
self.noOfPlugins = 0
self.preScriptCompleted = []
self.preScriptResult = []
self.postScriptCompleted = []
self.postScriptResult = []
def pre_check(self):
self.logger.log('Loading script modules now...',True,'Info')
errorCode = CommonVariables.PrePost_PluginStatus_Success
dobackup = True
fsFreeze_on = True
if not os.path.isfile(self.configLocation):
self.logger.log('Plugin host Config file does not exist in the location ' + self.configLocation, True, 'Error')
self.configLocation = './main/VMSnapshotPluginHost.conf'
permissions = self.get_permissions(self.configLocation)
if not os.path.isfile(self.configLocation):
self.logger.log('Plugin host Config file does not exist in the location ' + self.configLocation, True, 'Error')
errorCode =CommonVariables.FailedPrepostPluginhostConfigNotFound
elif not (int(permissions[1]) == 0 or int(permissions[1]) == 4) or not (int(permissions[2]) == 0 or int(permissions[2]) == 4):
self.logger.log('Plugin host Config file does not have desired permissions', True, 'Error')
errorCode = CommonVariables.FailedPrepostPluginhostConfigPermissionError
elif not self.find_owner(self.configLocation) == 'root':
self.logger.log('The owner of the Plugin host Config file ' + self.configLocation + ' is ' + self.find_owner(self.configLocation) + ' but | not root', True, 'Error' | )
errorCode = CommonVariables.FailedPrepostPluginhostConfigPermissionError
else :
errorCode,dobackup,fsFreeze_on = self.load_modules()
return errorCode,dobackup,fsFreeze_on
def load_modules(self):
# Imports all plugin modules using the information in config.json
# and initializes basic class variables associated with each plugin
len = 0
errorCode = CommonVariables.PrePost_PluginStatus_Success
dobackup = True
fsFreeze_on = True
try:
self.logger.log('config file: '+str(self.configLocation),True,'Info')
config = ConfigParser.ConfigParser()
config.read(self.configLocation)
if (config.has_option('pre_post', 'timeoutInSeconds')):
self.timeoutInSeconds = min(int(config.get('pre_post','timeoutInSeconds')),self.timeoutInSeconds)
if (config.has_option('pre_post', 'numberOfPlugins')):
len = int(config.get('pre_post','numberOfPlugins'))
self.logger.log('timeoutInSeconds: '+str(self.timeoutInSeconds),True,'Info')
self.logger.log('numberOfPlugins: '+str(len),True,'Info')
while len > 0:
pname = config.get('pre_post','pluginName'+str(self.noOfPlugins))
ppath = config.get('pre_post','pluginPath'+str(self.noOfPlugins))
pcpath = config.get('pre_post','pluginConfigPath'+str(self.noOfPlugins))
self.logger.log('Name of the Plugin is ' + pname, True)
self.logger.log('Plugin config path is ' + pcpath, True)
errorCode = CommonVariables.PrePost_PluginStatus_Success
dobackup = True
if os.path.isfile(pcpath):
permissions = self.get_permissions(pcpath)
if (int(permissions[0]) %2 == 1) or int(permissions[1]) > 0 or int(permissions[2]) > 0:
self.logger.log('Plugin Config file does not have desired permissions', True, 'Error')
errorCode = CommonVariables.FailedPrepostPluginConfigPermissionError
if not self.find_owner(pcpath) == 'root':
self.logger.log('The owner of the Plugin Config file ' + pcpath + ' is ' + self.find_owner(pcpath) + ' but not root', True, 'Error')
errorCode = CommonVariables.FailedPrepostPluginConfigPermissionError
else:
self.logger.log('Plugin host file does not exist in the location ' + pcpath, True, 'Error')
errorCode = CommonVariables.FailedPrepostPluginConfigNotFound
if(errorCode == CommonVariables.PrePost_PluginStatus_Success):
sys.path.append(ppath)
plugin = __import__(pname)
self.plugins.append(plugin.ScriptRunner(logger=self.logger,name=pname,configPath=pcpath,maxTimeOut=self.timeoutInSeconds))
errorCode,dobackup,fsFreeze_on = self.plugins[self.noOfPlugins].validate_scripts()
self.noOfPlugins = self.noOfPlugins + 1
self.pluginName.append(pname)
self.preScriptCompleted.append(False)
self.preScriptResult.append(None)
self.postScriptCompleted.append(False)
self.postScriptResult.append(None)
len = len - 1
if self.noOfPlugins != 0:
self.modulesLoaded = True
except Exception as err:
errMsg = 'Error in reading PluginHost config file : %s, stack trace: %s' % (str(err), traceback.format_exc())
self.logger.log(errMsg, True, 'Error')
errorCode = CommonVariables.FailedPrepostPluginhostConfigParsing
return errorCode,dobackup,fsFreeze_on
def find_owner(self, filename):
file_owner = ''
try:
file_owner = getpwuid(os.stat(filename).st_uid).pw_name
except Exception as err:
errMsg = 'Error in fetching owner of the file : ' + filename + ': %s, stack trace: %s' % (str(err), traceback.format_exc())
self.logger.log(errMsg, True, 'Error')
return file_owner
def get_permissions(self, filename):
permissions = '777'
try:
permissions = oct(os.stat(filename)[ST_MODE])[-3:]
self.logger.log('Permisisons of the file ' + filename + ' are ' + permissions,True)
except Exception as err:
errMsg = 'Error in fetc |
Odingod/mne-python | examples/stats/plot_cluster_stats_spatio_temporal_2samp.py | Python | bsd-3-clause | 4,284 | 0 | """
========================================================= | ================
2 samples permutation test on source data with spatio-temporal clustering
=========================================================================
Tests if the source space data are significantly different between
2 groups of subjects (simulated here using one subject's data).
The multiple comparisons problem is addressed wi | th a cluster-level
permutation test across space and time.
"""
# Authors: Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
# Eric Larson <larson.eric.d@gmail.com>
# License: BSD (3-clause)
import os.path as op
import numpy as np
from scipy import stats as stats
import mne
from mne import spatial_tris_connectivity, grade_to_tris
from mne.stats import spatio_temporal_cluster_test, summarize_clusters_stc
from mne.datasets import sample
print(__doc__)
###############################################################################
# Set parameters
data_path = sample.data_path()
stc_fname = data_path + '/MEG/sample/sample_audvis-meg-lh.stc'
subjects_dir = data_path + '/subjects'
# Load stc to in common cortical space (fsaverage)
stc = mne.read_source_estimate(stc_fname)
stc.resample(50)
stc = mne.morph_data('sample', 'fsaverage', stc, grade=5, smooth=20,
subjects_dir=subjects_dir)
n_vertices_fsave, n_times = stc.data.shape
tstep = stc.tstep
n_subjects1, n_subjects2 = 7, 9
print('Simulating data for %d and %d subjects.' % (n_subjects1, n_subjects2))
# Let's make sure our results replicate, so set the seed.
np.random.seed(0)
X1 = np.random.randn(n_vertices_fsave, n_times, n_subjects1) * 10
X2 = np.random.randn(n_vertices_fsave, n_times, n_subjects2) * 10
X1[:, :, :] += stc.data[:, :, np.newaxis]
# make the activity bigger for the second set of subjects
X2[:, :, :] += 3 * stc.data[:, :, np.newaxis]
# We want to compare the overall activity levels for each subject
X1 = np.abs(X1) # only magnitude
X2 = np.abs(X2) # only magnitude
###############################################################################
# Compute statistic
# To use an algorithm optimized for spatio-temporal clustering, we
# just pass the spatial connectivity matrix (instead of spatio-temporal)
print('Computing connectivity.')
connectivity = spatial_tris_connectivity(grade_to_tris(5))
# Note that X needs to be a list of multi-dimensional array of shape
# samples (subjects_k) x time x space, so we permute dimensions
X1 = np.transpose(X1, [2, 1, 0])
X2 = np.transpose(X2, [2, 1, 0])
X = [X1, X2]
# Now let's actually do the clustering. This can take a long time...
# Here we set the threshold quite high to reduce computation.
p_threshold = 0.0001
f_threshold = stats.distributions.f.ppf(1. - p_threshold / 2.,
n_subjects1 - 1, n_subjects2 - 1)
print('Clustering.')
T_obs, clusters, cluster_p_values, H0 = clu =\
spatio_temporal_cluster_test(X, connectivity=connectivity, n_jobs=2,
threshold=f_threshold)
# Now select the clusters that are sig. at p < 0.05 (note that this value
# is multiple-comparisons corrected).
good_cluster_inds = np.where(cluster_p_values < 0.05)[0]
###############################################################################
# Visualize the clusters
print('Visualizing clusters.')
# Now let's build a convenient representation of each cluster, where each
# cluster becomes a "time point" in the SourceEstimate
fsave_vertices = [np.arange(10242), np.arange(10242)]
stc_all_cluster_vis = summarize_clusters_stc(clu, tstep=tstep,
vertices=fsave_vertices,
subject='fsaverage')
# Let's actually plot the first "time point" in the SourceEstimate, which
# shows all the clusters, weighted by duration
subjects_dir = op.join(data_path, 'subjects')
# blue blobs are for condition A != condition B
brain = stc_all_cluster_vis.plot('fsaverage', hemi='both', colormap='mne',
subjects_dir=subjects_dir,
time_label='Duration significant (ms)')
brain.set_data_time_index(0)
brain.show_view('lateral')
brain.save_image('clusters.png')
|
tijoytom/coreclr | src/ToolBox/SOS/tests/t_cmd_dumpmodule.py | Python | mit | 1,304 | 0.000767 | import lldb
import re |
import | testutils as test
def runScenario(assembly, debugger, target):
process = target.GetProcess()
res = lldb.SBCommandReturnObject()
ci = debugger.GetCommandInterpreter()
# Run debugger, wait until libcoreclr is loaded,
# set breakpoint at Test.Main and stop there
test.stop_in_main(debugger, assembly)
ci.HandleCommand("name2ee " + assembly + " Test.Main", res)
print(res.GetOutput())
print(res.GetError())
# Interpreter must have this command and able to run it
test.assertTrue(res.Succeeded())
output = res.GetOutput()
# Output is not empty
test.assertTrue(len(output) > 0)
match = re.search('Module:\s+([0-9a-fA-F]+)', output)
# Line matched
test.assertTrue(match)
groups = match.groups()
# Match has a single subgroup
test.assertEqual(len(groups), 1)
md_addr = groups[0]
# Address must be a hex number
test.assertTrue(test.is_hexnum(md_addr))
ci.HandleCommand("dumpmodule " + md_addr, res)
print(res.GetOutput())
print(res.GetError())
# Interpreter must have this command and able to run it
test.assertTrue(res.Succeeded())
# TODO: test other use cases
# Continue current process and checks its exit code
test.exit_lldb(debugger, assembly)
|
ChristinaHammer/Client_Database | cdbgui.py | Python | mit | 60,561 | 0.01856 | """cdbgui.py
Developers: Christina Hammer, Noelle Todd
Last Updated: August 19, 2014
This file contains a class version of the interface, in an effort to
make a program with no global variables.
"""
from datetime import datetime, timedelta, date
from tkinter import *
from tkinter import ttk
from cdbifunc2 import *
import cdbvolunteer
class allobjects:
"""This class attempts to contain ALL labels, entries, etc.,
so that there are no global variables.
"""
def __init__(self, volunteerID, volunteerName, bgcolor):
"""This function declares all variables that are used by
more than one function.
"""
self.volID = volunteerID #the id of the volunteer who logged in
self.volunteerName = volunteerName
self.bgcolor = bgcolor
#Variables used later on
self.cursel = 0
self.selectedVisit = 0
self.id_list = []
self.mem_list = []
self.clientlist = list_people()
self.visitDict = {}
#holds entryboxes for family members
self.memDict = {}
self.info = {}
self.addmemberON = False #checks if member boxes have already been added
#dictionaries/lists used for date entry
self.month_li = ["January", "February", "March", "April",
"May", "June", "July", "August", "September",
"October", "November", "December"]
self.month_day_dict = {"January":31, "February":29, "March":31,
"April":30, "May":31, "June":30, "July":31,
"August":31, "September":30, "October":31,
"November":30, "December":31}
self.month_int = {1:"January", 2:"February", 3:"March",
4:"April", 5:"May", 6:"June", 7:"July",
8:"August", 9:"September", 10:"October",
11:"November", 12:"December"}
self.int_month = {"January":1, "February":2, "March":3,
"April":4, "May":5, "June":6, "July":7,
"August":8, "September":9, "October":10,
"November":11, "December":12}
#customize colors/fonts
#This will connect to the database itself,
#and retrieve the colors from there.
#self.bgcolor = 'light blue' #'lavender'
#self.labfont = 'Helvetica'
#self.labBGcolor = 'gray10'
#self.labFGcolor = 'white'
#self.cliSearLabBG = 'Coral'
#self.cliSearLabFG = 'white'
#configuring window
self.ciGui=Tk()
self.gridframe=Frame(self.ciGui).grid()
self.ciGui.configure(background=self.bgcolor)
self.ciGui.title('Food Pantry Database')
#CLIENT SEARCH SETUP
self.cslabel = Label(self.gridframe,text='Client Search',
font=("Helvetica", 16),fg='white',bg='gray10')\
.grid(row=0,column=0,columnspan=2, sticky=W)
self.csblank = Label(self.gridframe, text=' ',
font=('Helvetica',10), bg=self.bgcolor)\
.grid(row=0,column=2,sticky=E)
#Name Searchbox
self.ns = StringVar()
self.nameSearchEnt = Entry(self.gridframe, cursor = 'shuttle',
textvariable=self.ns)
self.nameSearchEnt.grid(row=2,column=0)
self.nameSearchEnt.bind('<Key>',self.nameSearch)
self.searchButton = Button(self.gridframe, text='Search Clients',
command=self.nameSearch)
self.searchButton.grid(row=2, column=1)
#Client Listbox
self.client_listbox = Listbox(self.gridframe,height=10,width=40)
self.client_listbox.bind('<<ListboxSelect>>', self.displayInfo )
self.client | _listbox.config(exportselection=0)
self.scrollb = Scrollbar(self.gridframe)
self.client_listbox.bind('<<ListboxSelect>>',self.displayInfo )
self.client_listbox.config(yscrollcommand=self.scrollb.set)
self.scrollb.config(command=self.client_listbox.yview)
self.client_listbox.grid(row=3, column=0, rowspan=5, columnspan=2)
self.scrollb.grid(row=3, column=1, rowspan=5, sticky=E+N+ | S)
self.firstSep = ttk.Separator(self.gridframe, orient='vertical')\
.grid(row=1,column=2,rowspan=40,sticky=NS)
self.NCButton = Button(self.gridframe, text='New Client',
command=self.newClientDisplay, width=25)\
.grid(row=9, column=0, columnspan=2)
#CLIENT INFORMATION SETUP
self.secondSep = ttk.Separator(self.gridframe, orient='horizontal')\
.grid(row=0,column=3,columnspan=40,sticky=EW)
self.cilabel = Label(self.gridframe, text='Client Information',
font=("Helvetica", 16),fg='white',bg='gray10')\
.grid(row=0,column=3,columnspan=12, sticky=W)
self.ciblank = Label(self.gridframe, text=' ',font=('Helvetica',10),
bg=self.bgcolor).grid(row=1,column=3,sticky=E)
#First name
self.fnv = StringVar()
self.fnlabel = Label(self.gridframe, text="First Name: ",
font=('Helvetica',12),bg=self.bgcolor)\
.grid(row=2, column=3,rowspan=2,sticky=E)
self.fname = Entry(self.gridframe, textvariable=self.fnv,bd=4)
self.fname.grid(row=2, column=4, rowspan=2, columnspan=1, sticky=W)
#Last name
self.lnv = StringVar()
self.lnlabel = Label(self.gridframe, text='Last Name: ',
font=('Helvetica',12),bg=self.bgcolor)\
.grid(row=2,column=5,rowspan=2, sticky=W)
self.lname = Entry(self.gridframe, textvariable=self.lnv,bd=4)
self.lname.grid(row=2,column=6, rowspan=2, columnspan=1, sticky=W)
#Phone
self.phv = StringVar()
self.phlabel = Label(self.gridframe, text='Phone: ',
font=('Helvetica',12),bg=self.bgcolor)\
.grid(row=2, column=7,rowspan=2, sticky=E)
self.phone = Entry(self.gridframe, textvariable=self.phv, bd=4)
self.phone.grid(row=2, column=8, columnspan=2, rowspan=2, sticky=W)
#Date of Birth
self.doblabel = Label(self.gridframe, text='Date of Birth: ',
font=('Helvetica',12),bg=self.bgcolor)\
.grid(row=4,column=3, rowspan=2, sticky=E)
self.mv = StringVar()
self.dv = StringVar()
self.yv = StringVar()
#dob month combobox
self.mob = ttk.Combobox(self.gridframe, width=10, state='readonly',
values=self.month_li, textvariable=self.mv)
self.mob.bind('<<ComboboxSelected>>', self.monthbox_select)
#dob day spinbox
self.dob = Spinbox(self.gridframe, from_=0, to=0,
textvariable=self.dv, width=5, bd=4)
#dob year spinbox
self.yob = Spinbox(self.gridframe, from_=1900, to=2500,
textvariable=self.yv, width=7, bd=4)
self.mob.grid(row=4, column=4, rowspan=2, sticky=W)
self.dob.grid(row=4, column=4, rowspan=2, sticky=E)
self.yob.grid(row=4, column=5, rowspan=2)
#Age
self.agev = StringVar()
self.avallabel = Label(self.gridframe, textvariable=self.agev,
font=('Helvetica',12),bg=self.bgcolor)\
.grid(row=4,column=6, rowspan=2)
#Date Joined
self.datejoinv = StringVar()
self.djlabel = Label(self.gridframe, text="Date Joined:",
font=('Helvetica',12), bg=self.bgcolor)\
.grid(row=4,column=7,rowspan=2, sticky=E)
self.djEntry = Entry(self.gridframe, textvariable=self.datejoinv,
bd=4).grid(row=4, column= |
eos87/Booktype | lib/booktype/apps/core/admin.py | Python | agpl-3.0 | 217 | 0 | from django.contrib import admin
from .models import Permission, Role, BookRole, BookSkeleton
admin.site.register | (Permission)
admin | .site.register(Role)
admin.site.register(BookRole)
admin.site.register(BookSkeleton)
|
kdopen/pep8-naming | run_tests.py | Python | mit | 1,995 | 0.002005 | import sys
import os
import pep8ext_naming
import re
PyCF_ONLY_AST = 1024
IS_PY3 = sys.version_info[0] == 3
IS_PY3_TEST = re.compile("^#\s*python3\s*only")
IS_PY2_TEST = re.compile("^#\s*python2\s*only")
def main():
print('Running pep8-naming tests')
test_count = 0
errors = 0
for filename in os.listdir('testsuite'):
with open(os.pat | h.join('testsuite', filename)) as fd:
lines = list(fd)
if not is_test_allowed(lines):
continue
for testcase, codes in load_tests(lines):
| test_count += 1
errors += test_file(filename, testcase, codes)
if errors == 0:
print("%s tests run successful" % test_count)
sys.exit(0)
else:
print("%i of %i tests failed" % (errors, test_count))
sys.exit(1)
def is_test_allowed(lines):
if IS_PY3 and any(IS_PY2_TEST.search(line) for line in lines[:3]):
return False
if not IS_PY3 and any(IS_PY3_TEST.search(line) for line in lines[:3]):
return False
return True
def load_tests(lines):
testcase = []
codes = []
for line in lines:
if line.startswith("#:"):
if testcase:
yield testcase, codes
del testcase[:]
codes = line.split()[1:]
else:
testcase.append(line)
if testcase and codes:
yield testcase, codes
def test_file(filename, lines, codes):
tree = compile(''.join(lines), '', 'exec', PyCF_ONLY_AST)
checker = pep8ext_naming.NamingChecker(tree, filename)
found_errors = []
for lineno, col_offset, msg, instance in checker.run():
found_errors.append(msg.split()[0])
if not found_errors and codes == ['Okay']:
return 0
errors = 0
for code in codes:
if code not in found_errors:
errors += 1
print("ERROR: %s not in %s" % (code, filename))
return errors
if __name__ == '__main__':
main()
|
legionus/billing | lib/bc/sockets.py | Python | gpl-3.0 | 4,022 | 0.037046 | #!/usr/bin/env python
#
# sockets.py
#
# Copyright (c) 2012-2013 by Alexey Gladkov
# Copyright (c) 2012-2013 by Nikolay Ivanov
#
# This file is covered by the GNU General Public License,
# which should be included with billing as the file COPYING.
#
import socket as pysocket
import threading, time, struct, errno, logging
import msgpack
class Socket(object):
def __init__(self, socket=None, fa | mily=pysocket.AF_INET, typ=pysocket.SOCK_STREAM, proto=0):
self.sock = socket or pysocket.socket(family, typ, proto)
for n in [ 'setsockopt', 'accept', 'bind', 'connect', 'listen' ]:
self.__dict__[n] = getattr(self.sock, n)
self.rd_lock = threading.Lock()
self.wr_lock = threading.Lock()
def __recv_failure_retry(self, rlen):
"""Recv socket and repeat as long as it returns with `errno' set to EINTR."""
res = []
bytes = int(rlen)
while bytes > 0:
try:
buf = self.so | ck.recv(bytes)
if not buf: break
bytes -= len(buf)
res.append(buf)
except pysocket.error, e:
if e.errno != errno.EINTR:
raise e
return ''.join(res)
def recv(self, bytes):
try:
self.rd_lock.acquire()
return self.__recv_failure_retry(bytes)
finally:
self.rd_lock.release()
return ""
def send(self, data):
try:
self.wr_lock.acquire()
data_str = msgpack.packb(data)
self.sock.sendall(struct.pack('Q', len(data_str)) + data_str)
finally:
self.wr_lock.release()
def close(self):
try:
self.sock.shutdown(pysocket.SHUT_RDWR)
self.sock.close()
except:
pass # Ignore any errors
class ServerBase(object):
def recv(self, sock):
try:
# Calculate pack size
n = struct.calcsize('Q')
# Get message length
m = sock.recv(n)
if not m:
return None
# Calculate payload size
mlen = int(struct.unpack('Q', m)[0])
# Read payload
data = sock.recv(mlen)
# Return valid python object
return msgpack.unpackb(data)
except Exception, e:
self.on_error(e)
return None
def on_connect(self, addr, sock):
pass
def on_disconnect(self, addr, sock):
pass
def on_recv(self, addr, sock, data):
pass
def on_error(self, exp):
#traceback.print_exc()
logging.error(exp)
class Server(ServerBase):
def __init__(self, host='localhost', port=9999):
self.addr = (host, port)
self.sock = Socket()
self.sock.setsockopt(pysocket.SOL_SOCKET, pysocket.SO_REUSEADDR, 1)
self.sock.setsockopt(pysocket.SOL_SOCKET, pysocket.SO_KEEPALIVE, 1)
self.sock.setsockopt(pysocket.IPPROTO_TCP, pysocket.TCP_NODELAY, 1)
def accept_connection(self, conn, addr):
try:
self.on_connect(addr, conn)
while True:
data = self.recv(conn)
if not data: break
self.on_recv(addr, conn, data)
self.on_disconnect(addr, conn)
conn.close()
except Exception, e:
self.on_error(e)
def run(self):
try:
# will fail when port as busy, or we don't have rights to bind
self.sock.bind(self.addr)
self.sock.listen(pysocket.SOMAXCONN)
while True:
sock, addr = self.sock.accept()
conn = Socket(sock)
c = threading.Thread(target=self.accept_connection, args=[conn, addr, ])
c.daemon = True
c.start()
except pysocket.error, e:
self.on_error(e)
def start(self, timeout=0):
t = threading.Thread(target=self.run)
t.daemon = True
t.start()
time.sleep(timeout)
return t
class Client(ServerBase):
def __init__(self, host='localhost', port=9999):
self.addr = (host, port)
self.sock = Socket()
def send(self, data):
try:
self.sock.send(data)
except Exception, e:
self.on_error(e)
def run(self):
try:
self.sock.connect(self.addr)
self.on_connect(self.addr, self.sock)
while True:
data = self.recv(self.sock)
if not data: break
self.on_recv(self.addr, self.sock, data)
except pysocket.error, e:
self.on_error(e)
finally:
self.on_disconnect(self.addr, self.sock)
self.sock.close()
def start(self, timeout=0):
t = threading.Thread(target=self.run)
t.daemon = True
t.start()
time.sleep(timeout)
return t
|
chrisbrake/PythonSandbox | toDo/app.py | Python | bsd-3-clause | 195 | 0 | import | responder
api = responder.API()
@api.route("/{greeting}")
async def greet_world(req, resp, *, greeting):
resp.text = f"{greeting}, world!"
if __name__ == '__m | ain__':
api.run()
|
fastly/ftw | test/integration/test_htmlcontains.py | Python | apache-2.0 | 1,658 | 0.016285 | from ftw import ruleset, testrunner, http, errors
import pytest
import re
import random
import threading
def test_logcontains(ruleset, test):
runner = testrunner.TestRunner()
for stage in test.stages:
runner.run_stage(stage)
# Should return a test error because its searching before response
def test_search1():
runner = testrunner.TestRunner()
x = ruleset.Input(dest_addr="example.com",headers={"Host":"example.com"})
http_ua = http.HttpUA()
with pytest.raises(errors.TestError):
runner.test_response(http_ua.response_object,re.compile('dog'))
# Should return a failure because it is searching for a word not there
def test_search2():
runner = testrunner.TestRunner()
x = ruleset.Input(dest_addr="example.com",headers={"Host":"example.com"})
http_ua = http.HttpUA()
http_ua.send_request(x)
| with pytest.raises(AssertionError):
runner.test_response(http_ua.response_object,re.compile('dog'))
# Should return a success because it is searching for a word not there
def test_search3():
runner = testrunner.TestRunner()
x = ruleset.Input(dest_addr="example.com",headers={"Host":"example.com"})
http_ua = http.HttpUA()
http_ua.send_request(x)
runner.test_response(http_ua.response_object,re.compile('established to be use | d for'))
# Should return a success because we found our regex
def test_search4():
runner = testrunner.TestRunner()
x = ruleset.Input(dest_addr="example.com",headers={"Host":"example.com"})
http_ua = http.HttpUA()
http_ua.send_request(x)
runner.test_response(http_ua.response_object,re.compile('.*'))
|
stvstnfrd/edx-platform | openedx/core/djangoapps/credentials/signals.py | Python | agpl-3.0 | 6,532 | 0.003368 | """
This file contains signal handlers for credentials-related functionality.
"""
from logging import getLogger
from django.contrib.sites.models import Site
from common.djangoapps.course_modes.models import CourseMode
from lms.djangoapps.certificates.models import CertificateStatuses, GeneratedCertificate
from lms.djangoapps.grades.api import CourseGradeFactory
from openedx.core.djangoapps.catalog.utils import get_programs
from openedx.core.djangoapps.credentials.models import CredentialsApiConfig
from .helpers import is_learner_records_enabled, is_learner_records_enabled_for_org # lint-amnesty, pylint: disable=unused-import
from .tasks.v1.tasks import send_grade_to_credentials
log = getLogger(__name__)
# "interesting" here means "credentials will want to know about it"
INTERESTING_MODES = CourseMode.CERTIFICATE_RELEVANT_MODES
INTERESTING_STATUSES = [
CertificateStatuses.notpassing,
CertificateStatuses.downloadable,
]
# These handlers have Credentials business logic that has bled into the LMS. But we want to filter here in order to
# not flood our task queue with a bunch of signals. So we put up with it.
def is_course_run_in_a_program(course_run_key):
""" Returns true if the given course key is in any program at all. """
# We don't have an easy way to go from course_run_key to a specific site that owns it. So just search each site.
sites = Site.objects.all()
str_key = str(course_run_key)
for site in sites:
for program in get_programs(site):
for course in program['courses']:
for course_run in course['course_runs']:
if str_key == course_run['key']:
return True
return False
def send_grade_if_interesting(user, course_run_key, mode, status, letter_grade, percent_grade, verbose=False):
""" Checks if grade is interesting to Credentials and schedules a Celery task if so. """
if verbose:
msg = u"Starting send_grade_if_interesting with params: "\
u"user [{username}], "\
u"course_run_key [{key}], "\
u"mode [{mode}], "\
u"status [{status}], "\
u"letter_grade [{letter_grade}], "\
u"percent_grade [{percent_grade}], "\
u"verbose [{verbose}]"\
.format(
username=getattr(user, 'username', None),
key=str(course_run_key),
mode=mode,
status=status,
letter_grade=letter_grade,
percent_grade=percent_grade,
verbose=verbose
)
log.info(msg)
# Avoid scheduling new tasks if certification is disabled. (Grades are a part of the records/cert story)
if not CredentialsApiConfig.current().is_learner_issuance_enabled:
if verbose:
log.info("Skipping send grade: is_learner_issuance_enabled False")
return
# Avoid scheduling new tasks if learner records are disabled for this site.
if not is_learner_records_enabled_for_org(course_run_key.org):
if verbose:
log.info(
u"Skipping send grade: ENABLE_LEARNER_RECORDS False for org [{org}]".format(
org=course_run_key.org
)
)
return
# Grab mode/status if we don't have them in hand
if mode is None or status is None:
try:
cert = GeneratedCertificate.objects.get(user=user, course_id=course_run_key) # pylint: disable=no-member
mode = cert.mode
status = cert.status
except GeneratedCertificate.DoesNotExist:
# We only care about grades for which there is a certificate.
if verbose:
log.info(
u"Skipping send grade: no cert for user [{username}] & course_id [{course_id}]".format(
username=getattr(user, 'username', None),
course_id=str(course_run_key)
)
)
return
# Don't worry about whether it's available as well as awarded. Just awarded is good enough to record a verified
# attempt at a course. We want even the grades that didn't pass the class because Credentials wants to know about
# those too.
if mode not in INTERESTING_MODES or status not in INTERESTING_STATUSES:
if verbose:
log.info(
u"Skipping send grade: mode/status uninteresting for mode [{mode}] & status [{status}]".format(
mode=mode,
status=status
)
)
return
# If the course isn't in any program, don't bother telling Credentials about it. When Credentials grows support
# for course records as well as program records, we'll need to open this up.
if not is_course_run_in_a_program(course_run_key):
if verbose:
log.info(
u"Skipping send grade: course run not in a program. [{course_id}]".format(course_id=str(course_run_key))
)
return
# Grab grades if we don't have them in hand
if letter_grade is None or percent_grade is None:
grade = CourseGradeFactory().read(user, course_key=course_run_key, create_if_needed=False)
if grade is None:
if verbose:
log.info(
u"Skipping send grade: No grade found for user [{username}] & course_id [{course_id}]".format(
username=getattr(user, 'username', None),
course_id=str(course_run_key)
)
)
return
letter_grade = grade.letter_grade
percent_grade = | grade.pe | rcent
send_grade_to_credentials.delay(user.username, str(course_run_key), True, letter_grade, percent_grade)
def handle_grade_change(user, course_grade, course_key, **kwargs):
"""
Notifies the Credentials IDA about certain grades it needs for its records, when a grade changes.
"""
send_grade_if_interesting(
user,
course_key,
None,
None,
course_grade.letter_grade,
course_grade.percent,
verbose=kwargs.get('verbose', False)
)
def handle_cert_change(user, course_key, mode, status, **kwargs):
"""
Notifies the Credentials IDA about certain grades it needs for its records, when a cert changes.
"""
send_grade_if_interesting(user, course_key, mode, status, None, None, verbose=kwargs.get('verbose', False))
|
bigblindbais/pytk | src/pytk/factory/factory.py | Python | mit | 1,952 | 0.000512 | value_None = object()
class FactoryException(Exception):
pass
class Factory:
class Item:
def __init__(self, factory, i):
self.factory = factory
self.i = i
@property
def value(self):
return self.factory.value(se | lf.i)
@value.setter
def value(self, value):
self.i = self.factory.i(value)
def copy(self):
return self.factory.item(self.i)
def __eq__(self, other):
try:
return self.factory is other.factory and self.i == other.i
except AttributeError:
return self.value == other
def __ne__(self, other):
return not self == o | ther
def __hash__(self):
return hash(self.factory) ^ hash(self.i)
def __int__(self):
return self.i
def __str__(self):
return self.factory.istr(self)
def __repr__(self):
return f'Item({self.factory.istr(self)})'
@staticmethod
def istr(item):
return str(item.value)
def i(self, value):
raise NotImplementedError
def item(self, i=None, value=value_None):
if not self.check_ivalue(i, value):
raise FactoryException('factory.item(): index and values do not match')
if i is None:
i = 0 if value is value_None else self.i(value)
return self.Item(self, i) # this might be annoying for union...
def check_ivalue(self, i, value):
return i is None or value is value_None or self.value(i) == value
def isitem(self, item):
try:
return item.factory is self and 0 <= item.i < self.nitems
except AttributeError:
return False
@property
def items(self):
return map(self.item, range(self.nitems))
def __iter__(self):
return self.items
def __len__(self):
return self.nitems
|
ekozlowski/ansible-minecraft | fabfile.py | Python | mit | 717 | 0.015342 | from fabric import api as fab
from contextlib import contextmanager
@contextmanager
def with_vagrant():
with fab.settings(user="vagrant",host_string="127.0.0.1:2222",key_filename=".vagrant | /machines/minecraft/virtualbox/private_key"):
yield
def ping(ip):
with with_vagrant():
return fab.run('ping -c 4 {}'.format(ip))
def save():
with with_vagrant():
fab.sudo('/etc/init.d/minecraft backup')
fab.get(remote_path='/srv/minecraft-server/backups/*', local_path="/Users/e003070/Dropbo | x/minecraft_backups")
def restore():
with with_vagrant():
fab.put(remote_path='/srv/minecraft-server/backups/', local_path="/Users/e003070/Dropbox/minecraft_backups")
|
Infinidat/lanister | flask_app/auth.py | Python | bsd-3-clause | 172 | 0 | from flask.ext.security import SQLAlchemyUserDatastore
from .models import Role, User, | db
# Setup Flask-Security
user_datastore = SQLAlchemyUs | erDatastore(db, User, Role)
|
carlesm/ambrosio | ambrosio/actions/__init__.py | Python | gpl-3.0 | 202 | 0 |
from Action impo | rt Action
from MusicPlayer import MusicPlayer
from SensorAction import SensorAction
from WakeAction import WakeAction
__all__ = ["Action", "MusicPlayer", "SensorAction", "WakeActio | n"]
|
SymbiFlow/icestorm | icebox/icebox_html.py | Python | isc | 26,039 | 0.004839 | #!/usr/bin/env python3
#
# Copyright (C) 2015 Clifford Wolf <clifford@clifford.at>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
import icebox
from icebox import re_match_cached, re_sub_cached
import getopt, sys, os, re
chipname = "iCE40 HX1K"
chipdbfile = "chipdb-1k.txt"
outdir = None
mode8k = False
mode384 = False
tx, ty = 0, 0
def usage():
print("Usage: %s [options]" % os.path.basename(sys.argv[0]))
print(" -x tile_x_coordinate")
print(" -y tile_y_coordinate")
print(" -d outdir")
print(" -8")
print(" -3")
sys.exit(0)
try:
opts, args = getopt.getopt(sys.argv[1:], "x:y:d:83")
except:
usage()
for o, a in opts:
if o == "-x":
tx = int(a)
elif o == "-y":
ty = int(a)
elif o == "-d":
outdir = a
elif o == "-8":
mode8k = True
chipname = "iCE40 HX8K"
chipdbfile = "chipdb-8k.txt"
elif o == "-3":
mode384 = True
chipname = "iCE40 LP384"
chipdbfile = "chipdb-384.txt"
else:
usage()
if len(args) != 0:
usage()
ic = icebox.iceconfig()
mktiles = set()
if mode384:
ic.setup_empty_384()
for x in range(1, 7): # IO top/bottom
mktiles.add((x, 0))
mktiles.add((x, 9))
for x in list(range(1, 3)) + list(range(5, 7)): # corners
mktiles.add((x, 1))
mktiles.add((x, 8))
for x in [1,6]:
mktiles.add((x, 2))
mktiles.add((x, 7))
for y in range(1, 9): # left/right IO
mktiles.add((0, y))
mktiles.add((7, y))
for x in range(3, 5): # middle square
for y in range(4, 6):
mktiles.add((x, y))
elif mode8k:
ic.setup_empty_8k()
for x in list(range(1, 3)) + list(range(8-2, 8+3)) + list(range(15, 19)) + list(range(25-2, 25+3)) + list(range(33-2, 33)):
mktiles.add((x, 0))
mktiles.add((x, 33))
for x in list(range(0, 3)) + list(range(8-1, 8+2)) + list(range(25-1, 25+2)) + list(range(33-2, 34)):
mktiles.add((x, 1))
mktiles.add((x, 32))
for x in list(range(0, 2)) + list(range(8-1, 8+2)) + list(range(25-1, 25+2)) + list(range(34-2, 34)):
mktiles.add((x, 2))
mktiles.add((x, 31))
for x in [0, 33]:
mktiles.add((x, 15))
mktiles.add((x, 16))
mktiles.add((x, 17))
mktiles.add((x, 18))
for x in [16, 17]:
mktiles.add((x, 16))
mktiles.add((x, 17))
else:
ic.setup_empty_1k()
for x in range(1, 13):
mktiles.add((x, 0))
mktiles.add((x, 17))
for x in list(range(0, 6)) + list(range(8, 14)):
mktiles.add((x, 1))
mktiles.add((x, 16))
for x in list(range(0, 5)) + list(range(9, 14)):
mktiles.add((x, 2))
mktiles.add((x, 15))
for y in range(7, 11):
mktiles.add((0, y))
mktiles.add((13, y))
for x in range(6, 8):
for y in range(8, 10):
mktiles.add((x, y))
expand_count=[0]
def print_expand_div(title):
print('<a id="exph%d" href="#" onclick="document.getElementById(\'exph%d\').style.display=\'none\'; document.getElementById(\'exp%d\').style.display=\'block\'; return false">[+] Show %s</a><div id="exp%d" style="display:none">' % (expand_count[0], expand_count[0], expand_count[0], title, expand_count[0]))
expand_count[0] += 1
def print_expand_end():
print('</div>')
def print_expand_all():
print('<a id="exph%d" href="#" onclick="for (i = 0; i < 100; i++) { document.getElementById(\'exph\'+i).style.display=\'none\'; document.getElementById(\'exp\'+i).style.display=\'block\'; }; return false">[+] Expand All</a><span id="exp%d" style="display:none"></span>' % (expand_count[0], expand_count[0]))
expand_count[0] += 1
def print_index():
print("<title>Project IceStorm – %s Overview</title>" % chipname)
print("<h1>Project IceStorm – %s Overview</h1>" % chipname)
print("""<i><a href="http://www.clifford.at/icestorm/">Project IceStorm</a> aims at documenting the bitstream format of Lattice iCE40 FPGAs
and providing simple tools for analyzing and creating bitstream files. This is work in progress.</i>""")
| print("""<p>This documentation is auto-generated by <tt>icebox_html.py</tt> from IceBox.<br/>
A machine-readable form of the database can be downloaded <a href="%s">here</a>.</p>""" % chipdbfile)
print("""<p>The iCE40 FPGA fabric is organized into tiles. The configuration bits
themselves have the same meaning in all tiles of the same type. But the way the tiles
are connected to each other depends on the types of neighbouring cells. Furthermore,
some wire names are different for e.g. an IO tile on | the left border and an IO tile on
the top border.</p>""")
print("""<p>Click on a highlighted tile below to view the bitstream details for the
tile. The highlighted tiles cover all combinations of neighbouring cells that can be found
in iCE40 FPGAs.</p>""")
print('<p><table border="1">')
for y in range(ic.max_y, -1, -1):
print("<tr>")
for x in range(ic.max_x + 1):
if mode8k:
fontsize="8px"
print('<td style="width:25px; height:20px;" align="center" valign="center"', end="")
else:
fontsize="10px"
print('<td style="width:40px; height:40px;" align="center" valign="center"', end="")
if ic.tile_pos(x, y) == None:
print('> </td>')
elif (x, y) in mktiles:
if ic.tile_type(x, y) == "IO": color = "#aee"
if ic.tile_type(x, y) == "LOGIC": color = "#eae"
if ic.tile_type(x, y) == "RAMB": color = "#eea"
if ic.tile_type(x, y) == "RAMT": color = "#eea"
print('bgcolor="%s"><span style="font-size:%s"><a style="color:#000; text-decoration:none" href="tile_%d_%d.html"><b>%s<br/>(%d %d)</b></a></span></td>' % (color, fontsize, x, y, ic.tile_type(x, y), x, y))
else:
if ic.tile_type(x, y) == "IO": color = "#8aa"
if ic.tile_type(x, y) == "LOGIC": color = "#a8a"
if ic.tile_type(x, y) == "RAMB": color = "#aa8"
if ic.tile_type(x, y) == "RAMT": color = "#aa8"
print('bgcolor="%s"><span style="font-size:%s">%s<br/>(%d %d)</span></td>' % (color, fontsize, ic.tile_type(x, y), x, y))
print("</tr>")
print("</table></p>")
def print_tile(tx, ty):
tile = ic.tile(tx, ty)
tile_type = ic.tile_type(tx, ty)
print("<title>Project IceStorm – %s %s Tile (%d %d)</title>" % (chipname, tile_type, tx, ty))
print("<h1>Project IceStorm – %s %s Tile (%d %d)</h1>" % (chipname, tile_type, tx, ty))
print("""<i><a href="http://www.clifford.at/icestorm/">Project IceStorm</a> aims at documenting the bitstream format of Lattice iCE40 FPGAs
and providing simple tools for analyzing and creating bitstream files. This is work in progress.</i>""")
print("""<p>This page describes the %s Tile (%d %d), what nets and
configuration bits it has and how it is connected to its neighbourhood.</p>""" % (tile_type, tx, ty))
visible_tiles = set()
print('<p><table border="1">')
for y in range(ty+2, ty-3, -1):
print("<tr>")
for x in range(tx-2, tx+3):
print('<td style="width:100px; height:70px;" align="center" valign="center"', end="")
if ic.tile_pos(x, y) == None:
print('> </td>')
else:
if ( |
RedhawkSDR/integration-gnuhawk | components/sub_ff_3i/tests/test_sub_ff_3i.py | Python | gpl-3.0 | 4,061 | 0.006895 | #!/usr/bin/env python
#
# This file is protected by Copyright. Please refer to the COPYRIGHT file
# distributed with this source distribution.
#
# This file is part of GNUHAWK.
#
# GNUHAWK is free software: you can redistribute it and/or modify is under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# GNUHAWK is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see http://www.gnu.org/licenses/.
#
import unittest
import ossie.utils.testing
import os
from omniORB import any
class ComponentTests(ossie.utils.testing.ScaComponentTestCase):
"""Test for all component implementations in sub_ff_3i"""
def testScaBasicBehavior(self):
#######################################################################
# Launch the component with the default execparams
execparams = self.getPropertySet(kinds=("execparam",), modes=("readwrite", "writeonly"), includeNil=False)
execparams = dict([(x.id, any.from_any(x.value)) for x in execparams])
self.launch(execparams)
#######################################################################
# Verify the basic state of the component
self.assertNotEqual(self.comp, None)
self.assertEqual(self.comp.ref._non_existent(), False)
self.assertEqual(self.comp.ref._is_a("IDL:CF/Resource:1.0"), True)
#######################################################################
# Validate that query returns all expected parameters
# Query of '[]' should return the following set of properties
expectedProps = []
expectedProps.extend(self.getPropertySet(kinds=("configure", "execparam"), modes=("readwrite", "readonly"), includeNil=True))
expectedProps.extend(self.getPropertySet(kinds=("allocate",), action="external", includeNil=True))
props = self.comp.query([])
props = dict((x.id, any.from_any(x.value)) for x in props)
# Query may return more than expected, but not less
for expectedProp in expectedProps:
self.assertEquals(props.has_key(expectedProp.id), True)
#######################################################################
# Verify that all expected ports are available
for port in self.scd.get_componentfeatures().get_ports().get_uses():
port_obj = self.comp.getPort(str(port.get_usesname()))
self.assertNotEqual(port_obj, None)
self.assertEqual(port_obj._non_existent(), False)
self.assertEqual(port_obj._is_a("IDL:CF/Port:1.0"), True)
for port in self.scd.get_componentfeatures().get_ports().get_provides():
port_obj = self.comp.getPort(str(port.get_providesname() | ))
self.assertNotEqual(port_obj, None)
self.assertEqual(port_obj._non_existent(), False)
self.assertEqual(port_obj._is_a(port.get_repid()), True)
#######################################################################
# Make sure start and stop can be called without throwing exceptions
self.comp.start()
self.c | omp.stop()
#######################################################################
# Simulate regular component shutdown
self.comp.releaseObject()
# TODO Add additional tests here
#
# See:
# ossie.utils.bulkio.bulkio_helpers,
# ossie.utils.bluefile.bluefile_helpers
# for modules that will assist with testing components with BULKIO ports
if __name__ == "__main__":
ossie.utils.testing.main("../sub_ff_3i.spd.xml") # By default tests all implementations
|
nearlyfreeapps/python-googleadwords | examples/adspygoogle/adwords/v201109/basic_operations/update_campaign.py | Python | apache-2.0 | 1,970 | 0.007614 | #!/usr/bin/python
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permi | ssions and
# limitations under the License.
"""This example updates budget delivery method for a given campaign. To get
campaigns, run get_campaigns.py.
Tags: CampaignService.mutate
"""
__author__ = 'api.kwinter@gmail | .com (Kevin Winter)'
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import AdWordsClient
campaign_id = 'INSERT_CAMPAIGN_ID_HERE'
def main(client, campaign_id):
# Initialize appropriate service.
campaign_service = client.GetCampaignService(
'https://adwords-sandbox.google.com', 'v201109')
# Construct operations and update campaign.
operations = [{
'operator': 'SET',
'operand': {
'id': campaign_id,
'budget': {
'deliveryMethod': 'ACCELERATED'
}
}
}]
campaigns = campaign_service.Mutate(operations)[0]
# Display results.
for campaign in campaigns['value']:
print ('Campaign with name \'%s\' and id \'%s\' was updated.'
% (campaign['name'], campaign['id']))
print
print ('Usage: %s units, %s operations' % (client.GetUnits(),
client.GetOperations()))
if __name__ == '__main__':
# Initialize client object.
client = AdWordsClient(path=os.path.join('..', '..', '..', '..', '..'))
main(client, campaign_id)
|
scollis/iris | lib/iris/tests/unit/fileformats/pp/test_save.py | Python | gpl-3.0 | 4,803 | 0 | # (C) British Crown Copyright 2014, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""Unit tests for the `iris.fileformats.pp.save` function."""
# Import iris.tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
import mock
from iris.fileformats._ff_cross_references import STASH_TRANS
import iris.fileformats.pp as pp
import iris.tests.stock as stock
def _pp_save_ppfield_values(cube):
"""
Emulate saving a cube as PP, and capture the resulting PP field values.
"""
# Create a test object to stand in for a real PPField.
pp_field = mock.MagicMock(spec=pp.PPField3)
# Add minimal content required by the pp.save operation.
pp_field.HEADER_DEFN = pp.PPField3.HEADER_DEFN
# Save cube to a dummy file, mocking the internally created PPField
with mock.patch('iris.fileformats.pp.PPField3',
return_value=pp_ | field):
target_filelike = mock.Mock(name='target')
target_filelike.mode = ('b')
pp.save(cube, target_filelike)
# Return pp-field mock with a | ll the written properties
return pp_field
class TestLbfcProduction(tests.IrisTest):
def setUp(self):
self.cube = stock.lat_lon_cube()
def check_cube_stash_yields_lbfc(self, stash, lbfc_expected):
if stash:
self.cube.attributes['STASH'] = stash
lbfc_produced = _pp_save_ppfield_values(self.cube).lbfc
self.assertEqual(lbfc_produced, lbfc_expected)
def test_known_stash(self):
stashcode_str = 'm04s07i002'
self.assertIn(stashcode_str, STASH_TRANS)
self.check_cube_stash_yields_lbfc(stashcode_str, 359)
def test_unknown_stash(self):
stashcode_str = 'm99s99i999'
self.assertNotIn(stashcode_str, STASH_TRANS)
self.check_cube_stash_yields_lbfc(stashcode_str, 0)
def test_no_stash(self):
self.assertNotIn('STASH', self.cube.attributes)
self.check_cube_stash_yields_lbfc(None, 0)
def check_cube_name_units_yields_lbfc(self, name, units, lbfc_expected):
self.cube.rename(name)
self.cube.units = units
lbfc_produced = _pp_save_ppfield_values(self.cube).lbfc
self.assertEqual(lbfc_produced, lbfc_expected,
'Lbfc for ({!r} / {!r}) should be {:d}, '
'got {:d}'.format(
name, units, lbfc_expected, lbfc_produced))
def test_name_units_to_lbfc(self):
# Check LBFC value produced from name and units.
self.check_cube_name_units_yields_lbfc(
'sea_ice_temperature', 'K', 209)
def test_bad_name_units_to_lbfc_0(self):
# Check that badly-formed / unrecognised cases yield LBFC == 0.
self.check_cube_name_units_yields_lbfc('sea_ice_temperature', 'degC',
0)
self.check_cube_name_units_yields_lbfc('Junk_Name', 'K',
0)
class TestLbsrceProduction(tests.IrisTest):
def setUp(self):
self.cube = stock.lat_lon_cube()
def check_cube_um_source_yields_lbsrce(
self, source_str=None, um_version_str=None, lbsrce_expected=None):
if source_str is not None:
self.cube.attributes['source'] = source_str
if um_version_str is not None:
self.cube.attributes['um_version'] = um_version_str
lbsrce_produced = _pp_save_ppfield_values(self.cube).lbsrce
self.assertEqual(lbsrce_produced, lbsrce_expected)
def test_none(self):
self.check_cube_um_source_yields_lbsrce(
None, None, 0)
def test_source_only_no_version(self):
self.check_cube_um_source_yields_lbsrce(
'Data from Met Office Unified Model', None, 1111)
def test_source_only_with_version(self):
self.check_cube_um_source_yields_lbsrce(
'Data from Met Office Unified Model 12.17', None, 12171111)
def test_um_version(self):
self.check_cube_um_source_yields_lbsrce(
'Data from Met Office Unified Model 12.17', '25.36', 25361111)
if __name__ == "__main__":
tests.main()
|
googleapis/python-network-security | google/cloud/network_security_v1beta1/types/client_tls_policy.py | Python | apache-2.0 | 8,077 | 0.001238 | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.cloud.network_security_v1beta1.types import tls
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.cloud.networksecurity.v1beta1",
manifest={
"ClientTlsPolicy",
"ListClientTlsPoliciesRequest",
"ListClientTlsPoliciesResponse",
"GetClientTlsPolicyRequest",
"CreateClientTlsPolicyRequest",
"UpdateClientTlsPolicyRequest",
"DeleteClientTlsPolicyRequest",
},
)
class ClientTlsPolicy(proto.Message):
r"""ClientTlsPolicy is a resource that specifies how a client
should authenticate connections to backends of a service. This
resource itself does not affect configuration unless it is
attached to a backend service resource.
Attributes:
name (str):
Required. Name of the ClientTlsPolicy resource. It matches
the pattern
``projects/*/locations/{location}/clientTlsPolicies/{client_tls_policy}``
description (str):
Optional. Free-text description of the
resource.
create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The timestamp when the resource
was created.
update_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The timestamp when the resource
was updated.
labels (Sequence[google.cloud.network_security_v1beta1.types.ClientTlsPolicy.LabelsEntry]):
Optional. Set of label tags associated with
the resource.
sni (str):
Optional. Server Name Indication string to
present to the server during TLS handshake. E.g:
"secure.example.com".
client_certificate (google.cloud.network_security_v1beta1.types.CertificateProvider):
Optional. Defines a mechanism to provision
client identity (public and private keys) for
peer to peer authentication. The presence of
this dictates mTLS.
server_validation_ca (Sequence[google.cloud.network_security_v1beta1.types.ValidationCA]):
Optional. Defines the mechanism to obtain the
Certificate Authority certificate to validate
the server certificate. If empty, client does
not validate the server certificate.
"""
name = proto.Field(proto.STRING, number=1,)
description = proto.Field(proto.STRING, number=2,)
create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,)
update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,)
labels = proto.MapField(proto.STRING, proto.STRING, number=5,)
sni = proto.Field(proto.STRING, number=6,)
client_certificate = proto.Field(
proto.MESSAGE, number=7, message=tls.CertificateProvider,
)
server_validation_ca = proto.RepeatedField(
proto.MESSAGE, number=8, message=tls.ValidationCA,
)
class ListClientTlsPoliciesRequest(proto.Message):
r"""Request used | by the ListClientTlsPolicies method.
Attributes:
parent (str):
Required. The project and location from which the
ClientTlsPolicies should be listed, specified in the format
``projects/*/locations/{location}``.
page_size (int):
| Maximum number of ClientTlsPolicies to return
per call.
page_token (str):
The value returned by the last
``ListClientTlsPoliciesResponse`` Indicates that this is a
continuation of a prior ``ListClientTlsPolicies`` call, and
that the system should return the next page of data.
"""
parent = proto.Field(proto.STRING, number=1,)
page_size = proto.Field(proto.INT32, number=2,)
page_token = proto.Field(proto.STRING, number=3,)
class ListClientTlsPoliciesResponse(proto.Message):
r"""Response returned by the ListClientTlsPolicies method.
Attributes:
client_tls_policies (Sequence[google.cloud.network_security_v1beta1.types.ClientTlsPolicy]):
List of ClientTlsPolicy resources.
next_page_token (str):
If there might be more results than those appearing in this
response, then ``next_page_token`` is included. To get the
next set of results, call this method again using the value
of ``next_page_token`` as ``page_token``.
"""
@property
def raw_page(self):
return self
client_tls_policies = proto.RepeatedField(
proto.MESSAGE, number=1, message="ClientTlsPolicy",
)
next_page_token = proto.Field(proto.STRING, number=2,)
class GetClientTlsPolicyRequest(proto.Message):
r"""Request used by the GetClientTlsPolicy method.
Attributes:
name (str):
Required. A name of the ClientTlsPolicy to get. Must be in
the format
``projects/*/locations/{location}/clientTlsPolicies/*``.
"""
name = proto.Field(proto.STRING, number=1,)
class CreateClientTlsPolicyRequest(proto.Message):
r"""Request used by the CreateClientTlsPolicy method.
Attributes:
parent (str):
Required. The parent resource of the ClientTlsPolicy. Must
be in the format ``projects/*/locations/{location}``.
client_tls_policy_id (str):
Required. Short name of the ClientTlsPolicy resource to be
created. This value should be 1-63 characters long,
containing only letters, numbers, hyphens, and underscores,
and should not start with a number. E.g.
"client_mtls_policy".
client_tls_policy (google.cloud.network_security_v1beta1.types.ClientTlsPolicy):
Required. ClientTlsPolicy resource to be
created.
"""
parent = proto.Field(proto.STRING, number=1,)
client_tls_policy_id = proto.Field(proto.STRING, number=2,)
client_tls_policy = proto.Field(proto.MESSAGE, number=3, message="ClientTlsPolicy",)
class UpdateClientTlsPolicyRequest(proto.Message):
r"""Request used by UpdateClientTlsPolicy method.
Attributes:
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Optional. Field mask is used to specify the fields to be
overwritten in the ClientTlsPolicy resource by the update.
The fields specified in the update_mask are relative to the
resource, not the full request. A field will be overwritten
if it is in the mask. If the user does not provide a mask
then all fields will be overwritten.
client_tls_policy (google.cloud.network_security_v1beta1.types.ClientTlsPolicy):
Required. Updated ClientTlsPolicy resource.
"""
update_mask = proto.Field(
proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask,
)
client_tls_policy = proto.Field(proto.MESSAGE, number=2, message="ClientTlsPolicy",)
class DeleteClientTlsPolicyRequest(proto.Message):
r"""Request used by the DeleteClientTlsPolicy method.
Attributes:
name (str):
Required. A name of the ClientTlsPolicy to delete. Must be
in the format
``projects/*/locations/{location}/clientTlsPolicies/*``.
"""
name = proto.Field(proto.STRING, number=1,)
__all__ = tuple(sorted(__protobuf__.manifest))
|
hehongliang/tensorflow | tensorflow/python/util/tf_should_use_test.py | Python | apache-2.0 | 4,460 | 0.007399 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Unit tests for tf_should_use."""
# pylint: disable=unused-import
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import gc
import sys
from tensorflow.python.framework import constant_op
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging
from tensorflow.python.util import tf_should_use
@contextlib.contextmanager
def reroute_error():
"""Temporarily reroute errors written to tf_logging.error into `captured`."""
with test.mock.patch.object(tf_should_use.tf_logging, 'error') as error:
with test.mock.patch.object(tf_should_use.tf_logging, 'fatal') as fatal:
yield error, fatal
class TfShouldUseTest(test.TestCase):
def testAddShouldUseWarningWhenNotUsed(self):
c = constant_op.constant(0, name='blah0')
def in_this_function():
h = tf_should_use._add_should_use_warning(c)
del h
with reroute_error() as (error, _):
in_this_function()
msg = '\n'.join(error.call_args[0])
self.assertIn('Object was never used', msg)
self.assertIn('blah0:0', msg)
self.assertIn('in_this_function', msg)
self.assertFalse(gc.garbage)
def testAddShouldUseFatalWhenNotUsed(self):
c = constant_op.constant(0, name='blah0')
def in_this_function():
h = tf_should_use._add_should_use_warning(c, fatal_error=True)
del h
with reroute_error() as (_, fatal):
in_this_function()
msg = '\n'.join(f | atal.call_args[0])
self.assertIn('Object was never used', msg)
self.assertIn('blah0:0', msg)
self.assertIn('in_this_function', msg)
self.assertFalse(gc.garbage)
def _testAddShouldUseWarningWhenUsed(self, fn, name):
c = constant_op.constant(0, name=name)
with reroute_error() as (error, fatal):
h = tf_should_use._add_should_use_warning(c)
fn(h)
| del h
error.assert_not_called()
fatal.assert_not_called()
def testAddShouldUseWarningWhenUsedWithAdd(self):
def add(h):
_ = h + 1
self._testAddShouldUseWarningWhenUsed(add, name='blah_add')
gc.collect()
self.assertFalse(gc.garbage)
def testAddShouldUseWarningWhenUsedWithGetName(self):
def get_name(h):
_ = h.name
self._testAddShouldUseWarningWhenUsed(get_name, name='blah_get_name')
gc.collect()
self.assertFalse(gc.garbage)
def testShouldUseResult(self):
@tf_should_use.should_use_result
def return_const(value):
return constant_op.constant(value, name='blah2')
with reroute_error() as (error, _):
return_const(0.0)
msg = '\n'.join(error.call_args[0])
self.assertIn('Object was never used', msg)
self.assertIn('blah2:0', msg)
self.assertIn('return_const', msg)
gc.collect()
self.assertFalse(gc.garbage)
def testShouldUseResultWhenNotReallyUsed(self):
@tf_should_use.should_use_result
def return_const(value):
return constant_op.constant(value, name='blah3')
with reroute_error() as (error, _):
with self.cached_session():
return_const(0.0)
# Creating another op and executing it does not mark the
# unused op as being "used".
v = constant_op.constant(1.0, name='meh')
self.evaluate(v)
msg = '\n'.join(error.call_args[0])
self.assertIn('Object was never used', msg)
self.assertIn('blah3:0', msg)
self.assertIn('return_const', msg)
gc.collect()
self.assertFalse(gc.garbage)
# Tests that mark_used is available in the API.
def testMarkUsed(self):
@tf_should_use.should_use_result
def return_const(value):
return constant_op.constant(value, name='blah3')
with self.cached_session():
return_const(0.0).mark_used()
if __name__ == '__main__':
test.main()
|
phantomii/restalchemy | restalchemy/tests/functional/restapi/ra_based/test_resources.py | Python | apache-2.0 | 13,940 | 0 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2016 Eugene Frolov <eugene@frolov.net.ru>
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
import uuid as pyuuid
import mock
import requests
from six.moves.urllib import parse
from restalchemy.common import utils
from restalchemy.storage import exceptions
from restalchemy.storage.sql import engines
from restalchemy.tests.functional.restapi.ra_based.microservice import (
storable_models as models)
from restalchemy.tests.functional.restapi.ra_based.microservice import consts
from restalchemy.tests.functional.restapi.ra_based.microservice import service
from restalchemy.tests.unit import base
TEMPL_SERVICE_ENDPOINT = utils.lastslash("http://127.0.0.1:%s/")
TEMPL_ROOT_COLLECTION_ENDPOINT = TEMPL_SERVICE_ENDPOINT
TEMPL_V1_COLLECTION_ENDPOINT = utils.lastslash(parse.urljoin(
TEMPL_SERVICE_ENDPOINT, 'v1'))
TEMPL_VMS_COLLECTION_ENDPOINT = utils.lastslash(parse.urljoin(
TEMPL_V1_COLLECTION_ENDPOINT, 'vms'))
TEMPL_VM_RESOURCE_ENDPOINT = parse.urljoin(TEMPL_VMS_COLLECTION_ENDPOINT, '%s')
TEMPL_POWERON_ACTION_ENDPOINT = parse.urljoin(
utils.lastslash(TEMPL_VM_RESOURCE_ENDPOINT),
'actions/poweron/invoke')
TEMPL_PORTS_COLLECTION_ENDPOINT = utils.lastslash(parse.urljoin(
utils.lastslash(TEMPL_VM_RESOURCE_ENDPOINT), 'ports'))
TEMPL_PORT_RESOURCE_ENDPOINT = parse.urljoin(TEMPL_PORTS_COLLECTION_ENDPOINT,
'%s')
class BaseResourceTestCase(base.BaseTestCase):
def get_endpoint(self, template, *args):
return template % ((self.service_port,) + tuple(args))
def setUp(self):
super(BaseResourceTestCase, self).setUp()
engines.engine_factory.configure_factory(consts.DATABASE_URI)
engine = engines.engine_factory.get_engine()
self.session = engine.get_session()
self.session.execute("""CREATE TABLE IF NOT EXISTS vms (
uuid CHAR(36) NOT NULL,
state VARCHAR(10) NOT NULL,
name VARCHAR(255) NOT NULL,
PRIMARY KEY (uuid)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;""", None)
self.service_port = random.choice(range(2100, 2200))
url = parse.urlparse(self.get_endpoint(TEMPL_SERVICE_ENDPOINT))
self._service = service.RESTService(bind_host=url.hostname,
bind_port=url.port)
self._service.start()
def tearDown(self):
super(BaseResourceTestCase, self).tearDown()
self._service.stop()
self.session.execute("DROP TABLE IF EXISTS vms;", None)
class TestRootResourceTestCase(BaseResourceTestCase):
def test_get_versions_list(self):
response = requests.get(self.get_endpoint(
TEMPL_ROOT_COLLECTION_ENDPOINT))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), ["v1"])
class TestVersionsResourceTestCase(BaseResourceTestCase):
def test_get_resources_list(self):
response = requests.get(
self.get_endpoint(TEMPL_V1_COLLECTION_ENDPOINT))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), ["vms"])
class TestVMResourceTestCase(BaseResourceTestCase):
def _insert_vm_to_db(self, uuid, name, state):
vm = models.VM(uuid=uuid, name=name, state=state)
vm.save()
def _vm_exists_in_db(self, uuid):
try:
models.VM.objects.get_one(filters={'uuid': uuid})
return True
except exceptions.RecordNotFound:
return False
@mock.patch('uuid.uuid4')
def test_create_vm_resource_successful(self, uuid4_mock):
RESOURCE_ID = pyuuid.UUID("00000000-0000-0000-0000-000000000001")
uuid4_mock.return_value = RESOURCE_ID
vm_request_body = {
"name": "test"
}
vm_response_body = {
"uuid": str(RESOURCE_ID),
"name": "test",
"state": "off"
}
LOCATION = self.get_endpoint(TEMPL_VM_RESOURCE_ENDPOINT, RESOURCE_ID)
response = requests.post(self.get_endpoint(
TEMPL_VMS_COLLECTION_ENDPOINT), json=vm_request_body)
self.assertEqual(response.status_code, 201)
self.assertEqual(response.headers['location'], LOCATION)
self.assertEqual(response.json(), vm_response_body)
def test_get_vm_resource_by_uuid_successful(self):
RESOURCE_ID = pyuuid.UUID("00000000-0000-0000-0000-000000000001")
self._insert_vm_to_db(uuid=RESOURCE_ID, name="test", state="off")
vm_response_body = {
"uuid": str(RESOURCE_ID),
"name": "test",
"state": "off"
}
VM_RES_ENDPOINT = self.get_endpoint(TEMPL_VM_RESOURCE_ENDPOINT,
RESOURCE_ID)
response = requests.get(VM_RES_ENDPOINT)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), vm_response_body)
def test_update_vm_resource_successful(self):
RESOURCE_ID = pyuuid.UUID("00000000-0000-0000-0000-000000000001")
self._insert_vm_to_db(uuid=RESOURCE_ID, name="old", state="off")
vm_request_body = {
"name": "new"
}
vm_response_body = {
"uuid": str(RESOURCE_ID),
"name": "new",
"state": "off"
}
VM_RES_ENDPOINT = self.get_endpoint(TEMPL_VM_RESOURCE_ENDPOINT,
RESOURCE_ID)
response = requests.put(VM_RES_ENDPOINT, json=vm_request_body)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), vm_response_body)
def test_delete_vm_resource_successful(self):
RESOURCE_ID = pyuuid.UUID("00000000-0000-0000-0000-000000000001")
self._insert_vm_to_db(uuid=RESOURCE_ID, name="test", state="off")
VM_RES_ENDPOINT = self.get_endpoint(TEMPL_VM_RESOURCE_ENDPOINT,
RESOURCE_ID)
response = requests.delete(VM_RES_ENDPOINT)
self.assertEqual(response.status_code, 204)
self.assertFalse(self._vm_exists_in_db(RESOURCE_ID))
def test_process_vm_action_successful(self):
RESOURCE_ID = pyuuid.UUID("00000000-0000-0000-0000-000000000001")
self._insert_vm_to_db(uuid=RESOURCE_ID, name="test", state="off")
vm_response_body = {
"uuid": str(RESOURCE_ID),
"name": "test",
"state": "on"
}
POWERON_ACT_ENDPOINT = self.get_endpoint(TEMPL_POWERON_ACTION_ENDPOINT,
RESOURCE_ID)
response = requests.post(POWERON_ACT_ENDPOINT)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), vm_response_body)
def test_get_collection_vms_successful(self):
RESOURCE_ID1 = pyuuid.UUID("00000000-0000-0000-0000-000000000001")
RESOURCE_ID2 = pyuuid.UUID("00000000-0000-0000-0000-000000000002")
self._insert_vm_to_db(uuid=RESOURCE_ID1, name="test1", state="off")
self._ | insert_vm_to_db(uui | d=RESOURCE_ID2, name="test2", state="on")
vm_response_body = [{
"uuid": str(RESOURCE_ID1),
"name": "test1",
"state": "off"
}, {
"uuid": str(RESOURCE_ID2),
"name": "test2",
"state": "on"
}]
response = requests.get(self.get_endpoint(
TEMPL_VMS_COLLECTION_ENDPOINT))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), vm_response_body |
delphcf/sis | sis/contacts/migrations/0009_auto_20150205_1942.py | Python | bsd-3-clause | 1,435 | 0.002091 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('contacts', '0008_auto_20150205_1853'),
]
operations = [
migrations.AlterModelOptions(
name='organizationmember',
options={'ordering': ('status', '-activate_date')},
),
migrations.RemoveField(
model_name='organizationmember',
name='created',
),
migrations.RemoveField(
model_name='organizationmember',
name='modified',
),
migrations.AddField(
model_name='organizationmember',
| name='activate_date',
field=models.DateTimeField(help_text='keep empty for an i | mmediate activation', null=True, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='organizationmember',
name='deactivate_date',
field=models.DateTimeField(help_text='keep empty for indefinite activation', null=True, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='organizationmember',
name='status',
field=models.IntegerField(default=1, verbose_name='status', choices=[(0, 'Inactive'), (1, 'Active')]),
preserve_default=True,
),
]
|
Aigrefin/py3learn | learn/migrations/0002_auto_20160517_1353.py | Python | mit | 427 | 0 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-05-17 11:53
from __future__ import unicode_literals
f | rom django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('learn', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='languagedictionnary',
old_name='name',
new_name='language',
),
] | |
deepmind/dm_control | dm_control/mujoco/thread_safety_test.py | Python | apache-2.0 | 3,604 | 0.009156 | # Copyright 2017 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests to check whether methods of `mujoco.Physics` are threadsafe."""
import platform
from absl.testing import absltest
from dm_control import _render
from dm_control.mujoco import engine
from dm_control.mujoco.testing import assets
from dm_control.mujoco.testing import decorators
MODEL = assets.get_contents('cartpole.xml')
NUM_STEPS = 10
# Context creation with GLFW is not threadsafe.
if _render.BACKEND == 'glfw':
# On Linux we are able to create a GLFW window in a single thread that is not
# the main thread.
# On Mac we are only allowed to create windows on the main thread, so we
# disable the `run_threaded` wrapper entirely.
NUM_THREADS = None if platform.system() == 'Darwin' else 1
else:
NUM_THREADS = 4
class ThreadSafetyTest(absltest.TestCase):
@decorators.run_threaded(num_threads=NUM_THREADS)
def test_load_physics_from_string(self):
engine.Physics.from_xml_string(MODEL)
@decorators.run_threaded(num_threads=NUM_THREADS)
def test_load_and_reload_physics_from_string(self):
physics = engine.Physics.from_xml_string(MODEL)
physics.reload_from_xml_string(MODEL)
@decorators.run_threaded(num_threads=NUM_THREADS)
def test_load_and_step_physics(self):
physics = engine.Physics.from_xml_string(MODEL)
for _ in range(NUM_STEPS):
physics.step()
@decorators.run_threaded(num_threads=NUM_THREADS)
def test_load_and_step_multiple_physics_parallel(self):
physics1 = engine.Physics.from_xml_string(MODEL)
physics2 = engine.Physics.from_xml_string(MODEL)
for _ in range(NUM_STEPS):
physics1.step()
physics2.step()
@decorators.run_threaded(num_threads=NUM_THREADS)
def test_load_and_step_multiple_physics_sequential(self):
physics1 = engine.Physics.from_xml_string(MODEL)
for _ in range(NUM_STEPS):
physics1.step()
del physics1
physics2 = engine.Physics.from_xml_string(MODEL)
for _ in range(NUM_STEPS):
physics2.step()
@decorators.run_threaded(num_threads=NUM_THREADS, calls_per_thread=5)
def test_load_physics_and_render(self):
physics = engine.Physics.from_xml_s | tring(MODEL)
# Check that frames aren't repeated - make the cartpole move.
physics.set_control([1.0])
unique_frames = set()
for _ in range(NUM_STEPS):
physics.step()
frame = physics.render(width=320, height=240, camera_id=0)
unique_frames.add(fram | e.tobytes())
self.assertLen(unique_frames, NUM_STEPS)
@decorators.run_threaded(num_threads=NUM_THREADS, calls_per_thread=5)
def test_render_multiple_physics_instances_per_thread_parallel(self):
physics1 = engine.Physics.from_xml_string(MODEL)
physics2 = engine.Physics.from_xml_string(MODEL)
for _ in range(NUM_STEPS):
physics1.step()
physics1.render(width=320, height=240, camera_id=0)
physics2.step()
physics2.render(width=320, height=240, camera_id=0)
if __name__ == '__main__':
absltest.main()
|
yukezhu/tensorflow-reinforce | run_cem_cartpole.py | Python | mit | 2,013 | 0.022851 | from __future__ import print_function
from collections import deque
import numpy as np
import gym
env_name = 'CartPole-v0'
env = gym.make(env_name)
def observation_to_action(ob, theta):
# define policy neural network
W1 = theta[:-1]
b1 = theta[-1]
return int((ob.dot(W1) + b1) < 0)
def theta_rollout(env, thet | a, num_steps, render = False):
| total_rewards = 0
observation = env.reset()
for t in range(num_steps):
action = observation_to_action(observation, theta)
observation, reward, done, _ = env.step(action)
total_rewards += reward
if render: env.render()
if done: break
return total_rewards, t
MAX_EPISODES = 10000
MAX_STEPS = 200
batch_size = 25
top_per = 0.2 # percentage of theta with highest score selected from all the theta
std = 1 # scale of standard deviation
# initialize
theta_mean = np.zeros(env.observation_space.shape[0] + 1)
theta_std = np.ones_like(theta_mean) * std
episode_history = deque(maxlen=100)
for i_episode in range(MAX_EPISODES):
# maximize function theta_rollout through cross-entropy method
theta_sample = np.tile(theta_mean, (batch_size, 1)) + np.tile(theta_std, (batch_size, 1)) * np.random.randn(batch_size, theta_mean.size)
reward_sample = np.array([theta_rollout(env, th, MAX_STEPS)[0] for th in theta_sample])
top_idx = np.argsort(-reward_sample)[:int(np.round(batch_size * top_per))]
top_theta = theta_sample[top_idx]
theta_mean = top_theta.mean(axis = 0)
theta_std = top_theta.std(axis = 0)
total_rewards, t = theta_rollout(env, theta_mean, MAX_STEPS, render = True)
episode_history.append(total_rewards)
mean_rewards = np.mean(episode_history)
print("Episode {}".format(i_episode))
print("Finished after {} timesteps".format(t+1))
print("Reward for this episode: {}".format(total_rewards))
print("Average reward for last 100 episodes: {}".format(mean_rewards))
if mean_rewards >= 195.0:
print("Environment {} solved after {} episodes".format(env_name, i_episode+1))
break
|
DewarM/oppia | core/domain/user_jobs_continuous.py | Python | apache-2.0 | 15,896 | 0.000189 | # Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apach | e License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITION | S OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Jobs for queries personalized to individual users."""
import ast
import logging
import math
from core import jobs
from core.domain import exp_services
from core.domain import feedback_services
from core.domain import rating_services
from core.domain import stats_jobs_continuous
from core.platform import models
import feconf
import utils
(exp_models, collection_models, feedback_models, user_models) = (
models.Registry.import_models([
models.NAMES.exploration, models.NAMES.collection,
models.NAMES.feedback, models.NAMES.user]))
# TODO(bhenning): Implement a working real-time layer for the recent dashboard
# updates aggregator job.
class RecentUpdatesRealtimeModel(
jobs.BaseRealtimeDatastoreClassForContinuousComputations):
pass
class DashboardRecentUpdatesAggregator(jobs.BaseContinuousComputationManager):
"""A continuous-computation job that computes a list of recent updates
of explorations and feedback threads to show on a user's dashboard.
This job does not have a working realtime component: the
RecentUpdatesRealtimeModel does nothing. There will be a delay in
propagating new updates to the dashboard; the length of the delay will be
approximately the time it takes a batch job to run.
"""
@classmethod
def get_event_types_listened_to(cls):
return []
@classmethod
def _get_realtime_datastore_class(cls):
return RecentUpdatesRealtimeModel
@classmethod
def _get_batch_job_manager_class(cls):
return RecentUpdatesMRJobManager
@classmethod
def _handle_incoming_event(cls, active_realtime_layer, event_type, *args):
pass
# Public query methods.
@classmethod
def get_recent_notifications(cls, user_id):
"""Gets a list of recent notifications to show on this user's
dashboard.
Returns a 2-tuple. The first element is a float representing the number
of milliseconds since the Epoch when the job was queued. The second
element is a list of recent updates to explorations and feedback
threads; each entry is a dict with keys 'type', 'activity_id',
'activity_title', 'last_updated_ms', 'author_id' and 'subject'. Here,
'type' is either feconf.UPDATE_TYPE_EXPLORATION_COMMIT or
feconf.UPDATE_TYPE_FEEDBACK_MESSAGE, 'activity_id' is the id of the
exploration being committed to or to which the feedback thread belongs,
and 'activity_title' is the corresponding title.
"""
user_model = user_models.UserRecentChangesBatchModel.get(
user_id, strict=False)
return (
user_model.job_queued_msec if user_model else None,
user_model.output if user_model else [])
class RecentUpdatesMRJobManager(
jobs.BaseMapReduceJobManagerForContinuousComputations):
"""Manager for a MapReduce job that computes a list of recent notifications
for explorations, collections, and feedback threads watched by a user.
"""
@classmethod
def _get_continuous_computation_class(cls):
return DashboardRecentUpdatesAggregator
@staticmethod
def _get_most_recent_activity_commits(
activity_model_cls, activity_ids_list,
activity_type, commit_type, delete_commit_message):
"""Gets and returns a list of dicts representing the most recent
commits made for each activity represented by each ID provided in the
activity_ids_list parameter. These are the latest commits made by users
to each activity (that is, it will skip over any automated commits such
as those from the Oppia migration bot).
Args:
activity_model_cls: The storage layer object for an activity, such
as exp_models.ExplorationModel.
activity_ids_list: A list of activity IDs (such as exploration IDS)
for which the latest commits will be retrieved.
activity_type: The type (string) of activity being referenced, such
as 'exploration' or 'collection'.
commit_type: This (string) represents the activity update commit
type, such as feconf.UPDATE_TYPE_EXPLORATION_COMMIT.
delete_commit_message: This (string) represents the commit message
to use when an activity is found to be deleted, such as
feconf.COMMIT_MESSAGE_EXPLORATION_DELETED.
Returns:
A tuple with two entries:
- A list (one entry per activity ID) of dictionaries with the
following keys:
- type: The value of the commit_type argument.
- activity_id: The ID of the activity for this commit.
- activity_title: The title of the activity.
- author_id: The author who made the commit.
- last_update_ms: When the commit was created.
- subject: The commit message, otherwise (if the activity
has been deleted) a message indicating that the activity
was deleted.
- A list containing valid activity model instances which are
mappable to feedback threads
"""
most_recent_commits = []
activity_models = activity_model_cls.get_multi(
activity_ids_list, include_deleted=True)
tracked_models_for_feedback = []
for ind, activity_model in enumerate(activity_models):
if activity_model is None:
logging.error(
'Could not find %s %s' % (
activity_type, activity_ids_list[ind]))
continue
# Find the last commit that is not due to an automatic migration.
latest_manual_commit_version = activity_model.version
metadata_obj = activity_model_cls.get_snapshots_metadata(
activity_model.id,
[latest_manual_commit_version],
allow_deleted=True)[0]
while metadata_obj['committer_id'] == feconf.MIGRATION_BOT_USER_ID:
latest_manual_commit_version -= 1
metadata_obj = (
activity_model_cls.get_snapshots_metadata(
activity_model.id,
[latest_manual_commit_version],
allow_deleted=True)[0])
most_recent_commits.append({
'type': commit_type,
'activity_id': activity_model.id,
'activity_title': activity_model.title,
'author_id': metadata_obj['committer_id'],
'last_updated_ms': metadata_obj['created_on_ms'],
'subject': (
delete_commit_message
if activity_model.deleted
else metadata_obj['commit_message']
),
})
# If the user subscribes to this activity, he/she is automatically
# subscribed to all feedback threads for this activity.
if not activity_model.deleted:
tracked_models_for_feedback.append(activity_model)
return (most_recent_commits, tracked_models_for_feedback)
@classmethod
def entity_classes_to_map_over(cls):
return [user_models.UserSubscriptionsModel]
@staticmethod
def map(item):
user_id = item.id
job_queued_msec = RecentUpdatesMRJobManager._get_job_queued_msec()
reducer_key = '%s@%s' % (user_id, jo |
m-rossi/matplotlib2tikz | tikzplotlib/_util.py | Python | mit | 1,258 | 0.000795 | import matplotlib.transforms
import numpy
def has_legend(axes):
return axes.get_legend() is not None
def get_legend_text(obj):
"""Check if line is in legend."""
leg = obj.axes.get_legend()
if leg is None:
return None
keys = [h.get_label() for h in leg.legendHandles if h is not None]
values = [t.get_text() for t in leg.texts]
label = obj.get_label()
d = dict(zip(keys, values))
if label in d:
return d[label]
return None
def transform_to_data_coordinates(obj, xdata, ydata):
"""The coordinates might not be in data coordinates, but could be sometimes in axes
coordinates. For example, the matplotlib command
axes.axvline(2)
will have the y coordinates set to 0 and 1, not to the limits. Therefore, a
two-stage transform has to be applied:
1. first transforming to display coordinates, then
2. from display to data.
"""
if obj | .axes is not None and obj.get_transform() != obj.axes.transData:
po | ints = numpy.array([xdata, ydata]).T
transform = matplotlib.transforms.composite_transform_factory(
obj.get_transform(), obj.axes.transData.inverted()
)
return transform.transform(points).T
return xdata, ydata
|
eedf/jeito | accounting/migrations/0008_auto_20170224_1912.py | Python | mit | 631 | 0 | # -*- co | ding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-24 18:12
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration): |
dependencies = [
('accounting', '0007_bankstatement'),
]
operations = [
migrations.AddField(
model_name='entry',
name='scan',
field=models.FileField(blank=True, upload_to='justificatif'),
),
migrations.AlterField(
model_name='bankstatement',
name='scan',
field=models.FileField(upload_to='releves'),
),
]
|
skeelogy/maya-skNoiseDeformer | python/libnoise/perlin.py | Python | mit | 12,633 | 0.078287 | # Copyright (c) 2008, Casey Duncan (casey dot duncan at gmail dot com)
# see LICENSE.txt for details
"""Perlin noise -- pure python implementation"""
"""
Skeel Lee, 1 Jun 2014
-added SimplexNoise.snoise3() which calculates fBm based on SimplexNoise.noise3()
(I need the same function name, signature and behaviour as the one from the compiled module)
"""
__version__ = '$Id: perlin.py 521 2008-12-15 03:03:52Z casey.duncan $'
from math import floor, fmod, sqrt
from random import randint
# 3D Gradient vecto | rs
_GRAD3 = ((1,1,0),(-1,1,0),(1,-1,0),(-1,-1,0),
(1,0,1),(-1,0,1),(1,0,-1),(-1,0,-1),
(0,1,1),(0,-1,1),(0,1,-1),(0,-1,-1),
(1,1,0),(0,-1,1),(-1,1,0),(0,-1,-1),
)
# 4D Gradient vectors
_GRAD4 = ((0,1,1,1), | (0,1,1,-1), (0,1,-1,1), (0,1,-1,-1),
(0,-1,1,1), (0,-1,1,-1), (0,-1,-1,1), (0,-1,-1,-1),
(1,0,1,1), (1,0,1,-1), (1,0,-1,1), (1,0,-1,-1),
(-1,0,1,1), (-1,0,1,-1), (-1,0,-1,1), (-1,0,-1,-1),
(1,1,0,1), (1,1,0,-1), (1,-1,0,1), (1,-1,0,-1),
(-1,1,0,1), (-1,1,0,-1), (-1,-1,0,1), (-1,-1,0,-1),
(1,1,1,0), (1,1,-1,0), (1,-1,1,0), (1,-1,-1,0),
(-1,1,1,0), (-1,1,-1,0), (-1,-1,1,0), (-1,-1,-1,0))
# A lookup table to traverse the simplex around a given point in 4D.
# Details can be found where this table is used, in the 4D noise method.
_SIMPLEX = (
(0,1,2,3),(0,1,3,2),(0,0,0,0),(0,2,3,1),(0,0,0,0),(0,0,0,0),(0,0,0,0),(1,2,3,0),
(0,2,1,3),(0,0,0,0),(0,3,1,2),(0,3,2,1),(0,0,0,0),(0,0,0,0),(0,0,0,0),(1,3,2,0),
(0,0,0,0),(0,0,0,0),(0,0,0,0),(0,0,0,0),(0,0,0,0),(0,0,0,0),(0,0,0,0),(0,0,0,0),
(1,2,0,3),(0,0,0,0),(1,3,0,2),(0,0,0,0),(0,0,0,0),(0,0,0,0),(2,3,0,1),(2,3,1,0),
(1,0,2,3),(1,0,3,2),(0,0,0,0),(0,0,0,0),(0,0,0,0),(2,0,3,1),(0,0,0,0),(2,1,3,0),
(0,0,0,0),(0,0,0,0),(0,0,0,0),(0,0,0,0),(0,0,0,0),(0,0,0,0),(0,0,0,0),(0,0,0,0),
(2,0,1,3),(0,0,0,0),(0,0,0,0),(0,0,0,0),(3,0,1,2),(3,0,2,1),(0,0,0,0),(3,1,2,0),
(2,1,0,3),(0,0,0,0),(0,0,0,0),(0,0,0,0),(3,1,0,2),(0,0,0,0),(3,2,0,1),(3,2,1,0))
# Simplex skew constants
_F2 = 0.5 * (sqrt(3.0) - 1.0)
_G2 = (3.0 - sqrt(3.0)) / 6.0
_F3 = 1.0 / 3.0
_G3 = 1.0 / 6.0
class BaseNoise:
"""Noise abstract base class"""
permutation = (151,160,137,91,90,15,
131,13,201,95,96,53,194,233,7,225,140,36,103,30,69,142,8,99,37,240,21,10,23,
190,6,148,247,120,234,75,0,26,197,62,94,252,219,203,117,35,11,32,57,177,33,
88,237,149,56,87,174,20,125,136,171,168,68,175,74,165,71,134,139,48,27,166,
77,146,158,231,83,111,229,122,60,211,133,230,220,105,92,41,55,46,245,40,244,
102,143,54,65,25,63,161,1,216,80,73,209,76,132,187,208,89,18,169,200,196,
135,130,116,188,159,86,164,100,109,198,173,186,3,64,52,217,226,250,124,123,
5,202,38,147,118,126,255,82,85,212,207,206,59,227,47,16,58,17,182,189,28,42,
223,183,170,213,119,248,152,2,44,154,163,70,221,153,101,155,167,43,172,9,
129,22,39,253,9,98,108,110,79,113,224,232,178,185,112,104,218,246,97,228,
251,34,242,193,238,210,144,12,191,179,162,241, 81,51,145,235,249,14,239,107,
49,192,214,31,181,199,106,157,184,84,204,176,115,121,50,45,127,4,150,254,
138,236,205,93,222,114,67,29,24,72,243,141,128,195,78,66,215,61,156,180)
period = len(permutation)
# Double permutation array so we don't need to wrap
permutation = permutation * 2
def __init__(self, period=None, permutation_table=None):
"""Initialize the noise generator. With no arguments, the default
period and permutation table are used (256). The default permutation
table generates the exact same noise pattern each time.
An integer period can be specified, to generate a random permutation
table with period elements. The period determines the (integer)
interval that the noise repeats, which is useful for creating tiled
textures. period should be a power-of-two, though this is not
enforced. Note that the speed of the noise algorithm is indpendent of
the period size, though larger periods mean a larger table, which
consume more memory.
A permutation table consisting of an iterable sequence of whole
numbers can be specified directly. This should have a power-of-two
length. Typical permutation tables are a sequnce of unique integers in
the range [0,period) in random order, though other arrangements could
prove useful, they will not be "pure" simplex noise. The largest
element in the sequence must be no larger than period-1.
period and permutation_table may not be specified together.
"""
if period is not None and permutation_table is not None:
raise ValueError(
'Can specify either period or permutation_table, not both')
if period is not None:
self.randomize(period)
elif permutation_table is not None:
self.permutation = tuple(permutation_table) * 2
self.period = len(permutation_table)
def randomize(self, period=None):
"""Randomize the permutation table used by the noise functions. This
makes them generate a different noise pattern for the same inputs.
"""
if period is not None:
self.period = period
perm = list(range(self.period))
perm_right = self.period - 1
for i in list(perm):
j = randint(0, perm_right)
perm[i], perm[j] = perm[j], perm[i]
self.permutation = tuple(perm) * 2
class SimplexNoise(BaseNoise):
"""Perlin simplex noise generator
Adapted from Stefan Gustavson's Java implementation described here:
http://staffwww.itn.liu.se/~stegu/simplexnoise/simplexnoise.pdf
To summarize:
"In 2001, Ken Perlin presented 'simplex noise', a replacement for his classic
noise algorithm. Classic 'Perlin noise' won him an academy award and has
become an ubiquitous procedural primitive for computer graphics over the
years, but in hindsight it has quite a few limitations. Ken Perlin himself
designed simplex noise specifically to overcome those limitations, and he
spent a lot of good thinking on it. Therefore, it is a better idea than his
original algorithm. A few of the more prominent advantages are:
* Simplex noise has a lower computational complexity and requires fewer
multiplications.
* Simplex noise scales to higher dimensions (4D, 5D and up) with much less
computational cost, the complexity is O(N) for N dimensions instead of
the O(2^N) of classic Noise.
* Simplex noise has no noticeable directional artifacts. Simplex noise has
a well-defined and continuous gradient everywhere that can be computed
quite cheaply.
* Simplex noise is easy to implement in hardware."
"""
def noise2(self, x, y):
"""2D Perlin simplex noise.
Return a floating point value from -1 to 1 for the given x, y coordinate.
The same value is always returned for a given x, y pair unless the
permutation table changes (see randomize above).
"""
# Skew input space to determine which simplex (triangle) we are in
s = (x + y) * _F2
i = floor(x + s)
j = floor(y + s)
t = (i + j) * _G2
x0 = x - (i - t) # "Unskewed" distances from cell origin
y0 = y - (j - t)
if x0 > y0:
i1 = 1; j1 = 0 # Lower triangle, XY order: (0,0)->(1,0)->(1,1)
else:
i1 = 0; j1 = 1 # Upper triangle, YX order: (0,0)->(0,1)->(1,1)
x1 = x0 - i1 + _G2 # Offsets for middle corner in (x,y) unskewed coords
y1 = y0 - j1 + _G2
x2 = x0 + _G2 * 2.0 - 1.0 # Offsets for last corner in (x,y) unskewed coords
y2 = y0 + _G2 * 2.0 - 1.0
# Determine hashed gradient indices of the three simplex corners
perm = self.permutation
ii = int(i) % self.period
jj = int(j) % self.period
gi0 = perm[ii + perm[jj]] % 12
gi1 = perm[ii + i1 + perm[jj + j1]] % 12
gi2 = perm[ii + 1 + perm[jj + 1]] % 12
# Calculate the contribution from the three corners
tt = 0.5 - x0**2 - y0**2
if tt > 0:
g = _GRAD3[gi0]
noise = tt**4 * (g[0] * x0 + g[1] * y0)
else:
noise = 0.0
tt = 0.5 - x1**2 - y1**2
if tt > 0:
g = _GRAD3[gi1]
noise += tt**4 * (g[0] * x1 + g[1] * y1)
tt = 0.5 - x2**2 - y2**2
if tt > 0:
g = _GRAD3[gi2]
noise += tt**4 * (g[0] * x2 + g[1] * y2)
return noise * 70.0 # scale noise to [-1, 1]
def noise3(self, x, y, z):
"""3D Perlin simplex noise.
Return a floating point val |
theboocock/plinkseq_utilities | pseq_util/phenotypes_snpmax.py | Python | mit | 4,546 | 0.011219 | """
Parses Phenotype Data from SNPMAX and places the data in a format for use
in plink/seq.
Format looks something like the one shown below.
PATIENT<TAB>PHENO1<TAB>PH | ENO2<TAB>
| 1<TAB>1<TAB>RED
2<TAB>0<TAB>BLUE
"""
from phenotype import Phenotype
import argparse
import tempfile
sample_list_pheno={}
def is_number(s):
try:
float(s)
return True
except ValueError:
return False
def is_int(s):
try:
n = float(s)
if n % 1 == 0:
return True
else:
return False
except ValueError:
return False
def parse_phenotypes(input_phenotypes, output_phenotypes, missing, delim):
if (missing == ' ' or missing == ''):
# set to something sensible
missing = '.'
with open(input_phenotypes) as input_phenotypes_f:
with open(output_phenotypes,'w') as out:
header_list = []
for i, line in enumerate(input_phenotypes_f):
if (i == 0):
header_list = [x.replace('\n','') for x in line.split(delim)]
phenotypes = [[] for i in header_list]
else:
for i, pheno in enumerate(line.split(delim)):
pheno = pheno.replace('\n','').replace('"','').replace(' ','')
if (pheno == ' ' or pheno == ''):
pheno = missing
phenotypes[i].append(pheno)
samples = phenotypes[0]
header_pheno = header_list[1:]
phenotypes = phenotypes[1:]
types = [sniff_datatype(item, missing) for item in phenotypes]
phenotype_obj = []
for item in enumerate(zip(types,header_list[1:])):
i = item[0]
types = item[1][0]
head = item[1][1]
phenotype_obj.append(Phenotype(head,str(i),types, missing))
for i, pheno in enumerate(phenotype_obj):
for j, ind_p in enumerate(phenotypes[i]):
pheno.add_data(samples[j],ind_p)
# Print the header
for w_h in phenotype_obj:
out.write(w_h.get_header())
out.write("#ID"+delim+delim.join(header_pheno)+'\n')
for samp in samples:
out.write(samp + delim + delim.join([p.get_data(samp) for p in phenotype_obj]) + '\n')
def subset_file(samples, input_p):
t_file = tempfile.NamedTemporaryFile(delete=False)
with open(samples) as s:
samples = [x.strip() for x in s]
with open(input_p) as f:
for i, line in enumerate(f):
if i == 0:
t_file.write(line)
else:
sample_id = line.split()[0]
if sample_id in samples:
t_file.write(line)
t_file.close()
return(t_file.name)
def sniff_datatype(data_column, missing):
nofails = "Integer"
is_num = [is_number(x) for x in data_column]
check_int = [is_int(x) for x in data_column]
# Preserves order of check_int
data_column = [0 if x == missing else 1 for x in data_column]
if(sum(is_num) != sum(data_column)):
return "String"
if(sum(check_int) != sum(data_column)):
return "Float"
return nofails
def main():
parser = argparse.ArgumentParser(description="Process SNPmax phenotypes")
parser.add_argument('-i','--input_phenotypes',dest='input_phenotypes'
, help="Take All phenotype variables and place them into a phenotypes file")
parser.add_argument('-o','--output',dest='output_phenotypes')
parser.add_argument('-s','--sample_file',dest='samples',
help="Line seperated sample names to extract from phenotypes")
parser.add_argument('-d','--delimiter',dest='delimiter',default='\t')
parser.add_argument('-m','--missing',dest='missing',default ="")
args = parser.parse_args()
assert args.input_phenotypes is not None, \
"-i or --input_phenotypes argument required"
assert args.output_phenotypes is not None, \
"-o or --output_phenotypes argument required"
delim = args.delimiter
missing = args.missing
if(args.samples is not None):
args.input_phenotypes = subset_file(args.samples, args.input_phenotypes)
parse_phenotypes(args.input_phenotypes, args.output_phenotypes, missing, delim)
#if(args.samples is not None):
# os.remove(args.input_phenotypes)
if __name__=="__main__":
main()
|
LogicalDash/kivy | kivy/core/camera/camera_picamera.py | Python | mit | 2,837 | 0 | '''
PiCamera Camera: Implement CameraBase with PiCamera
'''
#
# TODO: make usage of thread or multiprocess
#
__all__ = ('CameraPiCamera', )
from math import ceil
from kivy.logger import Logger
from kivy.clock import Clock
from kivy.graphics.texture import Texture
from kivy.core.camera import CameraBase
from picamera import PiCamera
import numpy
class CameraPiCamera(CameraBase):
'''Implementation of CameraBase using PiCamera
'''
_update_ev = None
def __init__(self, **kwargs):
self._camera = None
self._format = 'bgr'
self._framerate = kwargs.get('framerate', 30)
super(CameraPiCamera, self).__init__(**kwargs)
def init_camera(self):
if self._camera is not None:
self._camera.close()
self._camera = PiCamera()
self._camera.resolution = self.resolution
self._camera.framerate = self._framerate
self._camera.iso = 800
self.fps = 1. / self._framerate
if not self.stopped:
self.start()
def raw_buffer_size(self):
'''Round buffer size up to 32x16 blocks.
See https://picamera.readthedocs.io/en/release-1.13/recipes2.html#capturing-to-a-numpy-array
''' # noqa
return (
ceil(self.resolution[0] / 32.) * 32,
ceil(self.resolution[1] / 16.) * 16
)
def _update(self, dt):
if self.stopped:
return
if self._texture is None:
# Create the texture
self._texture = Texture.create(self._resolution)
self._texture.flip_vertical()
self.dispatch('on_load')
try:
bufsize = self.raw_buffer_size()
output = numpy.empty(
(bufsize[0] * bufsize[1] * 3,), dtype=numpy.uint8)
self._camera.capture(output, self._format, use_video_port=True)
# Trim the buffer to fit the actual requested resolution.
# TODO: Is there a simpler way to do all this reshuffling?
output = output.reshape((bufsize[0], bufsize[1], 3))
output = output[:self.resolution[0], :self.resolution[1], :]
self._buffer = output.reshape(
(self.resolution[0] * self.resolution[1] * 3, | ))
self._copy_to_gpu()
except KeyboardInterrupt:
raise
except Exception:
Logger.exception('PiCamera: Couldn\'t get image from Camera')
def start(self):
super(CameraPiCamera, self).start()
if self._update_ev is not None:
self._update_ev.cancel()
self._update_ev = Clock.schedule_interval(self._update | , self.fps)
def stop(self):
super(CameraPiCamera, self).stop()
if self._update_ev is not None:
self._update_ev.cancel()
self._update_ev = None
|
douze12/music-metadata-modifier | main.py | Python | gpl-2.0 | 48,079 | 0.015808 | #!/usr/bin/python3
# -*-coding:UTF-8 -*
# Needed modules : python-mutagen python-glade2
# Application to manage the metadatas information on music track files
# @author douze12
# @date 26/04/2015
import time
try:
import thread
except ImportError as e:
import _thread as thread
import os
#import pygtk
#pygtk.require("2.0")
from gi.repository import Gtk,Pango,Gdk,GLib
from os import listdir
from os.path import isfile, join, isdir
import mutagen
import re
## CONSTANTS
FILE_NAME_INDEX = 0
FILE_PATH_INDEX = 1
METADATA_INDEX = 2
MOD_METADATA_INDEX = 3
VERSION="master"
# Main class
class Application:
def __init__(self):
# flag used to manually stop the searching thread
self.stop=False
# contain the error message if the scan directory failed
self.scanError=None
# broadcast & magic wand button variables
self.broadcastButton=None
self.autoTitleButton=None
# load the glade file interface
self.builder = Gtk.Builder()
dir = os.path.dirname(__file__)
filename = os.path.join(dir, 'window.glade')
self.builder.add_from_file(filename)
self.builder.connect_signals(self)
window = self.builder.get_object("mainWindow")
window.show_all()
window.connect("delete-event", Gtk.main_quit)
# Hide the progress status grid element
self.builder.get_object("progressGrid").set_visible(False)
#get the treestore
self.treestore=self.builder.get_object("foundFileStore")
# set the rendering class for the file name column
cell=Gtk.CellRendererText()
self.builder.get_object("treeColumnFileName").pack_start(cell, True)
# add markup attribute in order to be able to insert Pango tags in file name
# necessary for bold
self.builder.get_object("treeColumnFileName").add_attribute(cell, 'markup', 0)
# load css file
self.__loadStyleFile()
# read the settings
self.__readSettingsFile()
# build the UI elements of the settings window
self.__buildSettingsWindow()
# build the UI elements of the about window
self.__buildAboutWindow()
# method called when the user choose a directory with the file selector
def onChooseFile(self, fileChoser):
print("Chosen file : " + fileChoser.get_file().get_path())
# hide the metadata grid
self.builder.get_object("metadata_grid").set_visible(False)
# show the progress status grid
self.builder.get_object("progressGrid").set_visible(True)
self.builder.get_object("spinner1").start()
# Deactivate all the inputs
self.__toggleActivation(False)
self.stop=False
self.scanError=None
# clear the current tree store
try:
rootIter=self.treestore.get_iter_from_string("0")
self.treestore.remove(rootIter)
except ValueError:
pass
# launch the search in a separate thread
thread.start_new_thread(self.__startSearchThread,(fileChoser.get_file().get_path(),))
# method called when the user stop the searching directory process
def onStopClick(self, source=None, event=None):
| self.stop=Tr | ue
# method called when the user click on a row in the file tree view
def onSelectRow(self, source=None, event=None):
(index,elem)=source.get_cursor()
self.builder.get_object("scrolledwindow2").set_visible(True)
if index == None:
return
# get the metadatas of the current row
metadataStr=self.treestore[index][METADATA_INDEX]
modMetadataStr=self.treestore[index][MOD_METADATA_INDEX]
# if we don't have metadata, we are on a directory
if (metadataStr == None):
self.__displayCommonMetadata(index)
return
metadataMap=self.__transformInMap(metadataStr)
modMetadataMap=self.__transformInMap(modMetadataStr)
# display the metadata in the information panel
self.__displayMetadata(index, metadataMap, modMetadataMap)
# method called when the user change the value of a metadata
def onChangeMetadata(self, element, userData):
(elementIndex, key) = userData
#get the String metadatas
metadataStr=self.treestore[elementIndex][METADATA_INDEX]
modMetadataStr=self.treestore[elementIndex][MOD_METADATA_INDEX]
# check if we are on a directory
if(metadataStr == None):
treeiter = self.treestore.iter_children(self.treestore.get_iter(elementIndex))
commonMap={}
# get the metadatas in common with all the tracks
self.__getCommonMetadatas(treeiter, commonMap, False)
# compare the actual value with the base common value and add/remove style class
if(commonMap.get(key) != None and commonMap.get(key) != element.get_text()):
Gtk.StyleContext.add_class(element.get_style_context(), "entry_modified")
else:
Gtk.StyleContext.remove_class(element.get_style_context(), "entry_modified")
return
# convert to Map
modMetadataMap=self.__transformInMap(modMetadataStr)
metadataMap=self.__transformInMap(metadataStr)
# change the value of the metadata with the modified value
modMetadataMap[key]=element.get_text()
# check if the current metadata has been modified compared to the base
# and add/remove the style class
if(modMetadataMap[key] != metadataMap[key]):
Gtk.StyleContext.add_class(element.get_style_context(), "entry_modified")
else:
Gtk.StyleContext.remove_class(element.get_style_context(), "entry_modified")
#reconstitute the new metadata
self.treestore[elementIndex][MOD_METADATA_INDEX]=self.__transformInString(modMetadataMap)
# bold the file name if the metadata has changed compared to the originals
self.__boldModifiedFile(elementIndex)
# method called when the user click on the magic wand button
# it guess the information of the track and modified it
def onMagicWandClick(self, source):
(index,elem)=self.builder.get_object("foundFileTree").get_cursor()
iter = self.treestore.iter_children(self.treestore.get_iter(index))
self.__magicWand(iter)
# method called when the user click on the broadcast button
# the modified metadata will be broadcast to all the tracks
# contained in the current directory
def onBroadcastClick(self, source):
grid=self.builder.get_object("metadata_grid")
children=grid.get_children()
nbRows = len(children) / 3
# make a map with the metadatas in the entries
map={}
# Check all the children of the metadata_grid
for rowId in range(1,nbRows+1):
for colId in range(1,4):
child = grid.get_child_at(colId, rowId)
print("Child => %s"%child.get_name())
if isinstance(child, Gtk.Entry):
metadataName=child.get_name().split("_")[1]
if len(metadataName) <= 0:
continue
metadataValue=child.get_text()
# get the previous value in the label beside the entry element
metadataPrevValue = grid.get_child_at(colId-1, rowId).get_label()
# add the metadata if the value is not empty or if is empty and the base value isn't
if len(metadataValue) != 0 or len(metadataPrevValue) != 0:
map[metadataName] = metadataValue
|
ju1ius/clisnips | clisnips/tui/widgets/table/__init__.py | Python | gpl-3.0 | 127 | 0 | from .column import Column
from .store import TableStore
| from .t | able import Table
__all__ = ['TableStore', 'Table', 'Column']
|
bsquidwrd/Squid-Bot | gaming/utils.py | Python | mit | 1,639 | 0.002441 | import json
import sys
from django.core import serializers
from inspect import getframeinfo, getouterframes, currentframe
DISCORD_MSG_CHAR_LIMIT = 2000
def paginate(content, *, length=DISCORD_MSG_CHAR_LIMIT, reserve=0):
"""
Split up a large string or list of strings into chunks for sending to Discord.
"""
if type(content) == str:
contentlist = content.split('\n')
elif type(content) == list:
contentl | ist = content
else:
raise ValueError("Content must be str or list, not %s" % type(content))
chunks = []
currentchunk = ''
for line in contentlist:
if len(currentchunk) + len(line) < length - reserve:
currentchunk += line + '\n'
else:
chunks.append(currentchunk)
currentchunk = ''
if currentchunk:
chunks.append(currentchunk)
re | turn chunks
def logify_object(obj):
"""
Returns a JSON string containing usually a Queryset of items based on :attr:`obj`
"""
return json.dumps(json.loads(serializers.serialize("json", obj)), sort_keys=True, indent=4)
def logify_dict(d):
"""
Returns a JSON string containing the information within a :attr:`d`
"""
return json.dumps(d, sort_keys=True, indent=4)
def logify_exception_info():
"""
Returns a string with information about the last exception that was thrown.
"""
return "Filename: {0.tb_frame.f_code.co_filename}\nLine: {0.tb_lineno}\n".format(sys.exc_info()[2])
def current_line():
"""
Returns the current line the function is called from
"""
return getouterframes(currentframe())[1].lineno
|
kn45/LTR-DNN | train.py | Python | mit | 6,330 | 0.000316 | #!/usr/bin/env python
import dataproc
import itertools
import numpy as np
import random
import time
import sys
import tensorflow as tf
from collections import defaultdict
from ltrdnn import LTRDNN
flags = tf.flags
FLAGS = flags.FLAGS
# model related:
flags.DEFINE_integer('vocab_size', 1532783, 'vocabulary size')
flags.DEFINE_integer('emb_dim', 256, 'embedding dimension')
flags.DEFINE_integer('repr_dim', 256, 'sentence representing dimension')
flags.DEFINE_string('combiner', 'sum', 'how to combine words in a sentence')
# training related:
flags.DEFINE_string('train_file', '', 'training data file')
flags.DEFINE_string('valid_file', '', 'validation data file')
flags.DEFINE_string('test_file', '', 'testing data file')
flags.DEFINE_integer('train_bs', 128, 'train batch size')
flags.DEFINE_integer('max_epoch', 1, 'max epoch')
flags.DEFINE_integer('max_iter', 1000, 'max iteration')
flags.DEFINE_float('eps', 1.0, 'zero-loss threshold epsilon in hinge loss')
flags.DEFINE_integer('eval_steps', 20, 'every how many steps to evaluate')
flags.DEFINE_string('model_ckpt_file', './model_ckpt/model.ckpt', 'model file')
flags.DEFINE_string('embedding_file', './words_embedding', 'embedding file')
# log related:
flags.DEFINE_string('log_path', './log', 'log path')
def load_embedding(embf, vocab_size, emb_size):
"""load pretrained embedding mat from file.
"""
# create a random word_embedding list.
# emb = [np.random.uniform(-0.2, 0.2, emb_size) for i in range(vocab_size)]
emb = np.zeros((vocab_size, emb_size))
with open(embf) as f:
for nl, line in enumerate(f):
flds = line.rstrip(' \n').split(' ')
word_idx = int(flds[0])
| vec = map(float, flds[1:])
emb[word_idx] = np.array(vec)
return np.array(emb)
def inp_fn(data):
"""Extract training data.
@data : line in training file.
@return : training data in required format
"""
def _random_choose(l): return random.sample(l, 1)[0]
sp_feed = defaultdict(list)
batch_size = len(data)
seq_len = 0
for i, inst in enumerate(data):
| flds = inst.split('\t')
query = map(int, flds[0].split(' '))
pos_title_num = int(flds[1])
pos_titles = flds[2:2+pos_title_num]
neg_title_num = int(flds[2+pos_title_num])
neg_titles = flds[2+pos_title_num+1:]
pos_title = _random_choose(pos_titles)
pos_title = map(int, pos_title.split(' '))
neg_title = _random_choose(neg_titles)
neg_title = map(int, neg_title.split(' '))
seq_len = max(seq_len, len(query), len(pos_title), len(neg_title))
for j, word_id in enumerate(query):
sp_feed['qry_idx'].append([i, j])
sp_feed['qry_val'].append(word_id)
for j, word_id in enumerate(pos_title):
sp_feed['pos_idx'].append([i, j])
sp_feed['pos_val'].append(word_id)
for j, word_id in enumerate(neg_title):
sp_feed['neg_idx'].append([i, j])
sp_feed['neg_val'].append(word_id)
return (sp_feed['qry_idx'], sp_feed['qry_val'], [batch_size, seq_len]), \
(sp_feed['pos_idx'], sp_feed['pos_val'], [batch_size, seq_len]), \
(sp_feed['neg_idx'], sp_feed['neg_val'], [batch_size, seq_len])
def eval_fn(inst):
"""Extract evaluating data.
@inst : line in evaluating file.
@return : evaluating data in required format
"""
def _max_len(lst): return max([len(x) for x in lst])
flds = inst.split('\t')
qrys = flds[0:1]
pos_num = int(flds[1])
poss = flds[2:2+pos_num]
neg_num = int(flds[2+pos_num])
negs = flds[2+pos_num+1:]
qrys = [map(int, x.split(' ')) for x in qrys]
poss = [map(int, x.split(' ')) for x in poss]
negs = [map(int, x.split(' ')) for x in negs]
seq_len = max(_max_len(qrys), _max_len(poss), _max_len(negs))
batch_size = len(qrys) * len(poss) * len(negs)
sp_feed = defaultdict(list)
for i, (qry, pos, neg) in enumerate(itertools.product(qrys, poss, negs)):
for j, word_id in enumerate(qry):
sp_feed['qry_idx'].append([i, j])
sp_feed['qry_val'].append(word_id)
for j, word_id in enumerate(pos):
sp_feed['pos_idx'].append([i, j])
sp_feed['pos_val'].append(word_id)
for j, word_id in enumerate(neg):
sp_feed['neg_idx'].append([i, j])
sp_feed['neg_val'].append(word_id)
return (sp_feed['qry_idx'], sp_feed['qry_val'], [batch_size, seq_len]), \
(sp_feed['pos_idx'], sp_feed['pos_val'], [batch_size, seq_len]), \
(sp_feed['neg_idx'], sp_feed['neg_val'], [batch_size, seq_len])
train_freader = dataproc.BatchReader(FLAGS.train_file, FLAGS.max_epoch)
with open(FLAGS.valid_file) as f:
valid_data = [x.rstrip('\n') for x in f.readlines()]
valid_q, valid_pt, valid_nt = inp_fn(valid_data)
mdl = LTRDNN(
vocab_size=FLAGS.vocab_size,
emb_dim=FLAGS.emb_dim,
repr_dim=FLAGS.repr_dim,
combiner=FLAGS.combiner,
eps=FLAGS.eps)
sess = tf.Session()
file_writer = tf.summary.FileWriter(FLAGS.log_path, sess.graph)
sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer())
print 'loading pretrained embedding from file'
pretrained_emb = load_embedding(
FLAGS.embedding_file, FLAGS.vocab_size, FLAGS.emb_dim)
mdl.assign_embedding(sess, pretrained_emb)
metrics = ['loss']
print 'train begin...'
for niter in xrange(FLAGS.max_iter):
batch_data = train_freader.get_batch(FLAGS.train_bs)
if not batch_data:
break
train_q, train_pt, train_nt = inp_fn(batch_data)
mdl.train_step(sess, train_q, train_pt, train_nt)
if niter % FLAGS.eval_steps != 0:
continue
train_eval = mdl.eval_step(sess, train_q, train_pt, train_nt, metrics)
valid_eval = mdl.eval_step(sess, valid_q, valid_pt, valid_nt, metrics)
ntime = time.strftime('%Y%m%d_%H:%M:%S', time.localtime(time.time()))
print ntime, niter, \
'train_loss:', train_eval, 'valid_loss:', valid_eval
save_path = mdl.saver.save(
sess, FLAGS.model_ckpt_file, global_step=mdl.global_step,
write_meta_graph=False)
print 'model saved:', save_path
with open(FLAGS.test_file) as feval:
acc = mdl.pairwise_accuracy(sess, feval, eval_fn)
print 'pairwise accuracy:', acc
sess.close()
|
sahabi/opt | rl/keras_future.py | Python | mit | 618 | 0 | import keras
import keras.layers
import keras.models
def concatenate(x):
if hasattr(keras.layers, 'Concatenate'):
return keras.layers.Concatenate()(x)
else:
return keras.layers.merge(x, mode='concat')
def add(x):
| if hasattr(keras.layers, 'Add'):
return keras.layers.Add()(x)
else:
return keras.layers.merge(x, mode='sum')
def Model(input, output, **kwargs):
if int(keras.__version__.split('.')[0]) >= 2:
return keras.models.Model(inputs=input, outputs=output, **kwargs)
else:
return keras.models.Model(inp | ut=input, output=output, **kwargs)
|
ebagdasa/tinycloud | setup.py | Python | bsd-3-clause | 861 | 0.012776 | from distutils.core import setup
setup(
name='TinyCloud',
version='0.4dev',
packages=['tinycloud',],
license='BSD',
long_description=open('README.md').read(),
scripts=['bin/tiny-add-node', 'bin/tiny-add-app', 'bin/tiny-delete-app', 'bin/tiny-draw', 'bin/tiny-get-apps',
'bin/tiny-get-servers', 'bin/tiny-delete-node', 'bin/tiny-add-flow', 'bin/tiny-deploy'],
install_requires= | [
'ansible',
'redis',
'networkx',
'scipy',
'fabric', 'knapsack'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Lin | ux',
],
)
|
apllicationCOM/youtube-dl-api-server | youtube_dl_server/youtube_dl/extractor/__init__.py | Python | unlicense | 20,811 | 0.000048 | from __future__ import unicode_literals
from .abc import ABCIE
from .abc7news import Abc7NewsIE
from .academicearth import AcademicEarthCourseIE
from .addanime import AddAnimeIE
from .adobetv import (
AdobeTVIE,
AdobeTVVideoIE,
)
from .adultswim import AdultSwimIE
from .aftenposten import AftenpostenIE
from .aftonbladet import AftonbladetIE
from .airmozilla import AirMozillaIE
from .aljazeera import AlJazeeraIE
from .alphaporno import AlphaPornoIE
from .anitube import AnitubeIE
from .anysex import AnySexIE
from .aol import AolIE
from .allocine import AllocineIE
from .aparat import AparatIE
from .appletrailers import AppleTrailersIE
from .archiveorg import ArchiveOrgIE
from .ard import ARDIE, ARDMediathekIE
from .arte import (
ArteTvIE,
ArteTVPlus7IE,
ArteTVCreativeIE,
ArteTVConcertIE,
ArteTVFutureIE,
ArteTVDDCIE,
ArteTVEmbedIE,
)
from .atresplayer import AtresPlayerIE
from .atttechchannel import ATTTechChannelIE
from .audiomack import AudiomackIE, AudiomackAlbumIE
from .azubu import AzubuIE
from .baidu import BaiduVideoIE
from .bambuser import BambuserIE, BambuserChannelIE
from .bandcamp import BandcampIE, BandcampAlbumIE
from .bbccouk import BBCCoUkIE
from .beeg import BeegIE
from .behindkink import BehindKinkIE
from .beatportpro import BeatportProIE
from .bet import BetIE
from .bild import BildIE
from .bilibili import BiliBiliIE
from .blinkx import BlinkxIE
from .bliptv import BlipTVIE, BlipTVUserIE
from .bloomberg import BloombergIE
from .bpb import BpbIE
from .br import BRIE
from .breakcom import BreakIE
from .brightcove import BrightcoveIE
from .buzzfeed import BuzzFeedIE
from .byutv import BYUtvIE
from .c56 import C56IE
from .camdemy import (
CamdemyIE,
CamdemyFolderIE
)
from .canal13cl import Canal13clIE
from .canalplus import CanalplusIE
from .canalc2 import Canalc2IE
from .cbs import CBSIE
from .cbsnews import CBSNewsIE
from .cbssports import CBSSportsIE
from .ccc import CCCIE
from .ceskatelevize import CeskaTelevizeIE
from .channel9 import Channel9IE
from .chilloutzone import ChilloutzoneIE
from .chirbit import (
ChirbitIE,
ChirbitProfileIE,
)
from .cinchcast import CinchcastIE
from .cinemassacre import CinemassacreIE
from .clipfish import ClipfishIE
from .cliphunter import CliphunterIE
from .clipsyndicate import ClipsyndicateIE
from .cloudy import CloudyIE
from .clubic import ClubicIE
from .cmt import CMTIE
from .cnet import CNETIE
from .cnn import (
CNNIE,
CNNBlogsIE,
CNNArticleIE,
)
from .collegehumor import CollegeHumorIE
from .collegerama import CollegeRamaIE
from .comedycentral import ComedyCentralIE, ComedyCentralShowsIE
from .comcarcoff import ComCarCoffIE
from .commonmistakes import CommonMistakesIE, UnicodeBOMIE
from .condenast import CondeNastIE
from .cracked import CrackedIE
from .criterion import CriterionIE
from .crooksandliars import CrooksAndLiarsIE
from .crunchyroll import (
CrunchyrollIE,
CrunchyrollShowPlaylistIE
)
from .cspan import CSpanIE
from .ctsnews import CtsNewsIE
from .dailymotion import (
DailymotionIE,
DailymotionPlaylistIE,
DailymotionUserIE,
DailymotionCloudIE,
)
from .daum import DaumIE
from .dbtv import DBTVIE
from .dctp import DctpTvIE
from .deezer import DeezerPlaylistIE
from .dfb import DFBIE
from .dhm import DHMIE
from .dotsub import DotsubIE
from .douyutv import DouyuTVIE
from .dramafever import (
DramaFeverIE,
DramaFeverSeriesIE,
)
from .dreisat import DreiSatIE
from .drbonanza import DRBonanzaIE
from .drtuber import DrTuberIE
from .drtv import DRTVIE
from .dvtv import DVTVIE
from .dump import DumpIE
from .dumpert import DumpertIE
from .defense import DefenseGouvFrIE
from .discovery import DiscoveryIE
from .divxstage import DivxStageIE
from .dropbox import DropboxIE
from .eagleplatform import EaglePlatformIE
from .ebaumsworld import EbaumsWorldIE
from .echomsk import EchoMskIE
from .ehow import EHowIE
from .eighttracks import EightTracksIE
from .einthusan import EinthusanIE
from .eitb import EitbIE
from .ellentv import (
EllenTVIE,
EllenTVClipsIE,
)
from .elpais import ElPaisIE
from .embedly import EmbedlyIE
from .engadget import EngadgetIE
from .eporner import EpornerIE
from .eroprofile import EroProfileIE
from .escapist import EscapistIE
from .espn import ESPNIE
from .everyonesmixtape import EveryonesMixtapeIE
from .exfm import ExfmIE
from .expotv import ExpoTVIE
from .extremetube import ExtremeTubeIE
from .facebook import FacebookIE
from .faz import FazIE
from .fc2 import FC2IE
from .firstpost import FirstpostIE
from .firsttv import FirstTVIE
from .fivemin import FiveMinIE
from .fivetv import FiveTVIE
from .fktv import (
FKTVIE,
FKTVPosteckeIE,
)
from .flickr import FlickrIE
from .folketinget import FolketingetIE
from .footyroom import FootyRoomIE
from .fourtube import FourTubeIE
from .foxgay import FoxgayIE
from .foxnews import FoxNewsIE
from .foxsports import FoxSportsIE
| from .franceculture import FranceCultureIE
from .franceinter import FranceInterIE
from .francetv import (
PluzzIE,
FranceTvInfoIE,
FranceTVIE,
GenerationQuoiIE,
CultureboxIE,
)
from .freesound import FreesoundIE
from .freespeech import FreespeechIE
from .freevideo import FreeVideoIE
from .funnyordie import FunnyOrDieIE
from .gamekings import GamekingsIE
| from .gameone import (
GameOneIE,
GameOnePlaylistIE,
)
from .gamersyde import GamersydeIE
from .gamespot import GameSpotIE
from .gamestar import GameStarIE
from .gametrailers import GametrailersIE
from .gazeta import GazetaIE
from .gdcvault import GDCVaultIE
from .generic import GenericIE
from .gfycat import GfycatIE
from .giantbomb import GiantBombIE
from .giga import GigaIE
from .glide import GlideIE
from .globo import GloboIE
from .godtube import GodTubeIE
from .goldenmoustache import GoldenMoustacheIE
from .golem import GolemIE
from .googleplus import GooglePlusIE
from .googlesearch import GoogleSearchIE
from .gorillavid import GorillaVidIE
from .goshgay import GoshgayIE
from .groupon import GrouponIE
from .hark import HarkIE
from .hearthisat import HearThisAtIE
from .heise import HeiseIE
from .hellporno import HellPornoIE
from .helsinki import HelsinkiIE
from .hentaistigma import HentaiStigmaIE
from .historicfilms import HistoricFilmsIE
from .history import HistoryIE
from .hitbox import HitboxIE, HitboxLiveIE
from .hornbunny import HornBunnyIE
from .hostingbulk import HostingBulkIE
from .hotnewhiphop import HotNewHipHopIE
from .howcast import HowcastIE
from .howstuffworks import HowStuffWorksIE
from .huffpost import HuffPostIE
from .hypem import HypemIE
from .iconosquare import IconosquareIE
from .ign import IGNIE, OneUPIE
from .imdb import (
ImdbIE,
ImdbListIE
)
from .imgur import ImgurIE
from .ina import InaIE
from .infoq import InfoQIE
from .instagram import InstagramIE, InstagramUserIE
from .internetvideoarchive import InternetVideoArchiveIE
from .iprima import IPrimaIE
from .iqiyi import IqiyiIE
from .ivi import (
IviIE,
IviCompilationIE
)
from .izlesene import IzleseneIE
from .jadorecettepub import JadoreCettePubIE
from .jeuxvideo import JeuxVideoIE
from .jove import JoveIE
from .jukebox import JukeboxIE
from .jpopsukitv import JpopsukiIE
from .kaltura import KalturaIE
from .kanalplay import KanalPlayIE
from .kankan import KankanIE
from .karaoketv import KaraoketvIE
from .karrierevideos import KarriereVideosIE
from .keezmovies import KeezMoviesIE
from .khanacademy import KhanAcademyIE
from .kickstarter import KickStarterIE
from .keek import KeekIE
from .kontrtube import KontrTubeIE
from .krasview import KrasViewIE
from .ku6 import Ku6IE
from .la7 import LA7IE
from .laola1tv import Laola1TvIE
from .letv import (
LetvIE,
LetvTvIE,
LetvPlaylistIE
)
from .libsyn import LibsynIE
from .lifenews import (
LifeNewsIE,
LifeEmbedIE,
)
from .liveleak import LiveLeakIE
from .livestream import (
LivestreamIE,
LivestreamOriginalIE,
LivestreamShortenerIE,
)
from .lnkgo import LnkGoIE
from .lrt import LRTIE
from .lynda import (
LyndaIE,
LyndaCourseIE
)
from .m6 import M6IE
from .macgamestore import MacGameStoreIE
from .mailru import MailRuIE
from .malemotion import Ma |
bhargavasana/activitysim | activitysim/tests/test_mnl.py | Python | agpl-3.0 | 3,724 | 0 | # ActivitySim
# Copyright (C) 2014-2015 Synthicity, LLC
# See full license in LICENSE.txt.
import os.path
import numpy as np
import pandas as pd
import pandas.util.testing as pdt
import pytest
from ..activitysim import eval_variables
from .. import mnl
# this is lifted straight from urbansim's test_mnl.py
@pytest.fixture(scope='module', params=[
('fish.csv',
'fish_choosers.csv',
pd.DataFrame(
[[-0.02047652], [0.95309824]], index=['price', 'catch'],
columns=['Alt']),
pd.DataFrame([
[0.2849598, 0.2742482, 0.1605457, 0.2802463],
[0.1498991, 0.4542377, 0.2600969, 0.1357664]],
columns=['beach', 'boat', 'charter', 'pier']))])
def test_data(request):
data, choosers, spec, probabilities = request.param
return {
'data': data,
'choosers': choosers,
'spec': spec,
'probabilities': probabilities
}
@pytest.fixture
def choosers(test_data):
filen = os.path.join(
os.path.dirname(__file__), 'data', test_data['choosers'])
return pd.read_csv(filen)
@pytest.fixture
def spec(test_data):
return test_data['spec']
@pytest.fixture
def choosers_dm(choosers, spec):
return eval_variables(spec.index, choosers)
@pytest.fixture
def utilities(choosers_dm, spec, test_data):
utils = choosers_dm.dot(spec).astype('float')
return pd.DataFrame(
utils.as_matrix().reshape(test_data['probabilities'].shape),
columns=test_data['probabilities'].columns)
def test_utils_to_probs(utilities, test_data):
probs = mnl.utils_to_probs(utilities)
pdt.assert_frame_equal(probs, test_data['probabilities'])
def test_utils_to_probs_raises():
with pytest.raises(RuntimeError):
| mnl.utils_to_probs(
pd.DataFrame([[1, 2, np.inf, 3]]))
def test_make_choices_only_one():
probs = pd.DataFrame(
[[1, 0, 0], [0, 1, 0]], columns=['a', 'b', 'c'], index=['x', 'y'])
choices = mnl.make_choices(probs)
pdt.assert_series_equal(
choices,
pd.Series([0, 1], index=['x' | , 'y']))
def test_make_choices_real_probs(random_seed, utilities):
probs = mnl.utils_to_probs(utilities)
choices = mnl.make_choices(probs)
pdt.assert_series_equal(
choices,
pd.Series([1, 2], index=[0, 1]))
@pytest.fixture(scope='module')
def interaction_choosers():
return pd.DataFrame({
'attr': ['a', 'b', 'c', 'b']},
index=['w', 'x', 'y', 'z'])
@pytest.fixture(scope='module')
def interaction_alts():
return pd.DataFrame({
'prop': [10, 20, 30, 40]},
index=[1, 2, 3, 4])
def test_interaction_dataset_no_sample(interaction_choosers, interaction_alts):
expected = pd.DataFrame({
'attr': ['a'] * 4 + ['b'] * 4 + ['c'] * 4 + ['b'] * 4,
'prop': [10, 20, 30, 40] * 4,
'chooser_idx': ['w'] * 4 + ['x'] * 4 + ['y'] * 4 + ['z'] * 4},
index=[1, 2, 3, 4] * 4)
interacted = mnl.interaction_dataset(
interaction_choosers, interaction_alts)
interacted, expected = interacted.align(expected, axis=1)
pdt.assert_frame_equal(interacted, expected)
def test_interaction_dataset_sampled(
interaction_choosers, interaction_alts, random_seed):
expected = pd.DataFrame({
'attr': ['a'] * 2 + ['b'] * 2 + ['c'] * 2 + ['b'] * 2,
'prop': [30, 40, 10, 30, 40, 10, 20, 10],
'chooser_idx': ['w'] * 2 + ['x'] * 2 + ['y'] * 2 + ['z'] * 2},
index=[3, 4, 1, 3, 4, 1, 2, 1])
interacted = mnl.interaction_dataset(
interaction_choosers, interaction_alts, sample_size=2)
interacted, expected = interacted.align(expected, axis=1)
pdt.assert_frame_equal(interacted, expected)
|
rhyswhitley/spatial_plots | src/create_matplotlib_savanna_patch.py | Python | cc0-1.0 | 3,093 | 0.005496 | #!/usr/bin/env python2.7
import os
import pickle
from matplotlib.path import Path
from shapely.geometry import Polygon, MultiPolygon
import fiona
def define_clipping(_shapePath):
"""
Reads in a shapefile from some folder and creates a Matplotlib Patch artist
from which one can clip gridded data plotted on a basemap object. The Patch
object is defined using vertices (lat/lon coordinates) and codes (drawing
commands), which make up the final PatchPath that is returned to the user.
Additionally, a polygon object is also created to extract data points from
a gridded dataset that exist with the polygon's extents.
"""
# import the shapefile using fiona
fshape = fiona.open(_shapePath)
# extract the vertices of the polygon (the coord system)
vert_2Dlist = [vl["geometry"]["coordinates"][0] for vl in fshape \
if vl["properties"]["GEZ_TERM"] in PFTS]
# flatten 2D list
vert_1Dlist = list_flat(vert_2Dlist)
# define the path by which the lines of the polygon are drawn
code_2Dlist = [create_codes(len(vl)) for vl in vert_2Dlist]
# flatten 2D list
code_1Dlist = list_flat(code_2Dlist)
# create the art path that will clip the data (Multipolygons are flattened)
clip = Path(vert_1Dlist, code_1Dlist)
#clip = PathPatch(part1, *args, **kwargs)
# create a multi-polygon object using the same list of coordinates
#mpoly = Polygon(vert_2Dlist)
x_low, y_low = map(min, zip(*vert_1Dlist))
x_high, y_high = map(max, zip(*vert_1Dlist))
# extents for the polygon
extent = {'lon':[x_low, x_hig | h], 'lat':[y_low, y_high]}
# return to user
return {'clip': clip, 'extent': extent, 'poly':vert_2Dlist}
# --------------------------------------------------------------------------------
def list_flat(List2D):
"""Flattens a 2D list"""
return [item for sublist in List2D for item in sublist]
# --------------------------------------------------------------------------------
def create | _codes(plen):
"""
Returns a list of matplotlib artist drawing codes based on the number of
polygon coordinates; First index is the starting point, Last index closes
the polygon, and all other indices draw the polygon (coordinates always
loop back to origin)
"""
return [Path.MOVETO] + [Path.LINETO]*(plen-2) + [Path.CLOSEPOLY]
# --------------------------------------------------------------------------------
def main():
# import savanna bioregion polygon and create a clipping region
sav_geom = define_clipping(SHAPEPATH)
pickle.dump(sav_geom, open(SAVEPATH, "wb"))
return 1
if __name__ == "__main__":
SHAPEPATH = os.path.expanduser("~/Savanna/Data/GiS/ecofloristic_zones/ecofloristic_zones.shp")
SAVEPATH = os.path.expanduser("~/Savanna/Data/GiS/Savanna_Bioregion_Path.pkl")
# PFTS that <broadly> define/encompass global savannas
PFTS = ["Tropical moist deciduous forest", \
"Tropical dry forest", \
"Subtropical dry forest", \
"Tropical shrubland"]
main()
|
thinkle/gourmet | gourmet/plugins/email_plugin/emailer_plugin.py | Python | gpl-2.0 | 1,934 | 0.00879 | from gi.repository import Gtk
import gourmet.gtk_extras.dialog_extras as de
from gourmet.plugin import RecDisplayModule, UIPlugin, MainPlugin, ToolPlugin
from .recipe_emailer import RecipeEmailer
from gettext import gettext as _
class EmailRecipePlugin (MainPlugin, UIPlugin):
ui_string = '''
<menubar name="RecipeIndexMenuBar">
<menu name="Tools" action="Tools">
<placeholder name="StandaloneTool">
<menuitem action="EmailRecipes"/>
</placeholder>
</menu>
</menubar>
'''
def setup_action_groups (self):
self.actionGroup = Gtk.ActionGroup(name='RecipeEmailerActionGroup')
self.actionGroup.add_actions([
('EmailRecipes',None,_('Email recipes'),
None,_('Email all selected recipes (or all recipes if no recipes are selected'),self.email_selected),
])
self.action_groups.append(self.actionGroup)
def activate (self, pluggable):
self.rg = self.pluggable = pluggable
self.add_to_uimanager(pluggable.ui_manager)
def get_selected_recs (self):
recs = self.rg.get_selected_recs_from_rec_tree()
if not recs:
recs = self.rd.fetch_all(self.rd.recipe_table, deleted=False, sort_by=[('t | itle',1)])
return recs
def email_selected (self, *args):
recs = self.get_selected_recs()
l = len(recs)
if l > 20:
if not de.getBoolean(
title=_('Email recipes'),
# only called for l>20, so fancy gettext methods
# shouldn't be necessary if my knowledge of
# linguistics s | erves me
sublabel=_('Do you really want to email all %s selected recipes?')%l,
custom_yes=_('Yes, e_mail them'),
cancel=False,
):
return
re = RecipeEmailer(recs)
re.send_email_with_attachments()
|
CCI-MOC/GUI-Backend | api/v2/serializers/post/account.py | Python | apache-2.0 | 11,411 | 0.002717 | from core.models import (
AtmosphereUser, AccountProvider, Group, Identity, Provider, Quota
)
from core.query import only_current, contains_credential
from api.v2.serializers.details.credential import CredentialSerializer
from service.driver import get_esh_driver, get_account_driver
from rtwo.exceptions import KeystoneUnauthorized
from rest_framework import serializers
class AccountSerializer(serializers.Serializer):
"""
"""
# Flags
create_account = serializers.BooleanField(default=False, write_only=True)
admin_account = serializers.BooleanField(default=False, write_only=True)
# Fields
atmo_user = serializers.CharField(write_only=True)
atmo_group = serializers.CharField(write_only=True)
provider = serializers.UUIDField(format='hex_verbose')
credentials = CredentialSerializer(many=True, write_only=True)
# Optional fields
quota = serializers.UUIDField(required=False, allow_null=True)
# allocation_source_id = serializers.CharField(required=False, allow_null=True) NOTE: Uncomment when feature is completed
def validate(self, data):
"""
Validation will:
- Ensure that user/group exists (Or create it)
-
"""
validated_data = data
self.validate_user(data['provider'])
validated_data['atmo_user'], validated_data['atmo_group'] = self._validate_user_group(data)
validated_data['provider'] = self._validate_provider(data['provider'])
# Using the validated data, ensure the user *should* be able to request these things.
# Validate that the quota exists (Or set a default)
validated_data['quota'] = self._validate_quota(data)
# Validate that the allocation source exists (Or set a default)
# validated_data['allocation_source'] = self._validate_allocation(data) NOTE: Uncomment when feature is completed
# Validate the credentials (?)
# NOTE: This method is OpenStack specific. Update this method when adding new provider types.
required_keys = self._get_required_keys(
validated_data['provider'], validated_data['create_account'])
validated_data['credentials'] = self._validate_credentials(
validated_data['provider'], data['credentials'], required_keys)
if not validated_data['admin_account']:
self.validate_account_driver(validated_data)
return validated_data
def create(self, validated_data):
username = validated_data['atmo_user']
groupname = validated_data['atmo_group']
atmo_user, atmo_group = Group.create_usergroup(
username, groupname)
provider = validated_data['provider']
provider_type = provider.get_type_name().lower()
if provider_type == 'openstack':
new_identity = self.create_openstack_identity(atmo_user, provider, validated_data)
else:
raise Exception("Cannot create accounts for provider of type %s" % provider_type)
# Always share identity with group (To enable Troposphere access)
new_identity.share(atmo_group)
admin_account = validated_data['admin_account']
if admin_account:
AccountProvider.objects.get_or_create(
provider=new_identity.provider,
identity=new_identity)
# TODO: When the refactor of rtwo/get_esh_driver is complete, validate_identity should be call-able without the django model (to avoid create-then-delete)
validate_identity(new_identity)
return new_identity
###
# Private validation methods
###
def _validate_user_group(self, data):
create_account = data['create_account']
username = data['atmo_user']
groupname = data['atmo_group']
atmo_user = AtmosphereUser.objects.filter(username=username).first()
if atmo_user:
atmo_group = Group.objects.filter(user=atmo_user).filter(name=groupname).first()
else:
atmo_group = None
if not atmo_user and not create_account:
raise serializers.ValidationError("User %s does not exist, and 'create_account' is False." % username)
if not atmo_group and not create_account:
raise serializers.ValidationError("Group %s does not exist, and 'create_account' is False." % groupname)
return username, groupname
def _validate_provider(self, provider_uuid):
"""
Validate that this provider is 'visible' w.r.t. the current user
"""
request_user = self._get_request_user()
# NOTE: With this validation, *ONLY* the creator of the provider can admininster accounts
# To allow anyone with staff/superuser to create, replace provider_manager
# provider_manager = Provider.objects
provider_manager = request_user.admin_providers
prov_qs = provider_manager.filter(
only_current(), active=True)
provider = prov_qs.filter(uuid=provider_uuid).first()
if not provider:
raise serializers.ValidationError(
"Cannot create an account for provider with UUID %s" % provider_uuid)
return provider
def _validate_quota(self, data):
quota_uuid = data.get('quota', '')
if not quota_uuid:
return Quota.default_quota()
quota = Quota.objects.filter(uuid=quota_uuid).first()
if not quota:
raise serializers.ValidationError(
"Quota '%s' not found" % quota_uuid)
return quota
def _validate_allocation(self, data):
# FIXME: Creation & Validation of allocation source logic goes *here* (post-CyVerse+AS)
return None
def _validate_openstack_credentials(self, credentials, required_keys):
"""
Note: If this can be reused elsewhere, we can move this to a class/staticmethod for service.accounts.openstack.AccountDriver
"""
keys = [c['key'] for c in credentials]
missing_keys = [key for key in required_keys if key not in keys]
if missing_keys:
raise serializers.ValidationError("Missing required key(s) for Openstack creation: %s" % missing_keys)
return credentials
def _get_required_keys(self, provider, create_account):
has_account_provider = provider.accountprovider_set.exists()
# If an account provider exists, only 'key' is required to create an account
if create_account and has_account_provider:
required_keys = ['key']
else: # Otherwise require the 'full-chain' credentials
required | _keys = ['key', 'secre | t', 'ex_project_name']
return required_keys
def _validate_credentials(self, provider, credentials, required_keys):
provider_type = provider.get_type_name().lower()
# NOTE: Looking to add a new provider type? validate the credentials here!
if provider_type == 'openstack':
valid_creds = self._validate_openstack_credentials(credentials, required_keys)
else:
valid_creds = credentials
return valid_creds
def _get_request_user(self):
if 'request' in self.context:
return self.context['request'].user
elif 'user' in self.context:
return self.context['user']
else:
raise ValueError("Expected 'request/user' to be passed in via context for this serializer")
def validate_account_driver(self, validated_data):
try:
provider = validated_data['provider']
acct_driver = get_account_driver(provider, raise_exception=True)
return acct_driver
except Exception as exc:
raise serializers.ValidationError("Attempting to create an account for provider %s failed. Message: %s" % (provider, exc.message))
def validate_user(self, provider_uuid):
request_user = self._get_request_user()
# Tests or restrictions on request-user go here
if request_user.is_staff or request_user.is_superuser:
return
if request_user.admin_providers.filter(uuid=provider_uuid).count() > 0:
return
r |
cookie-master/raspRobot | raspRobot.py | Python | gpl-2.0 | 1,012 | 0.005929 | from yapsy.PluginManager import PluginManager
from parameters import *
from raspTwitter import *
#from time import sleep
import time
def parseConfigFile(fileName):
conf = {}
f = open(fileName, 'r')
for l in f.readlines():
ls = l.split()
if ls and ('#' not in ls[0]):
conf[ls[0]] = ls[2]
f.close()
return conf
def main():
configFile = 'config'
param = Parameters()
param.conf = parseConfigFile(configFile)
param.pi = RaspTwitter(param)
| while True:
print(time.ctime())
param.dm = param.pi.getDirectMessages()
param.dmParsed = param.pi.parseDirectMessage | s(param.dm)
# Load the plugins from the plugin directory.
manager = PluginManager()
manager.setPluginPlaces(["plugins/enabled"])
manager.collectPlugins()
for plugin in manager.getAllPlugins():
plugin.plugin_object.start(param)
time.sleep(60)
if __name__ == '__main__':
main()
|
mbodock/django-cepfield | cep/forms.py | Python | mit | 2,390 | 0 | # encoding: utf-8
from __future__ import unicode_literals
import requests
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from .models import Cep
from .parser import Parser
class CepField(forms.RegexField):
SERVICE_URL = ('http://www.buscacep.correios.com.br/sistemas/'
'buscacep/resultadoBuscaCepEndereco.cfm')
invalid_cep = _('Invalid CEP')
cannot_validate = _('Cannot validate with Correios')
def __init__(self, force_correios_validation=True,
timeout=10, *args, **kwargs):
super(CepField, self).__init__(r'^\d{2}\.?\d{3}-?\d{3}$',
*args,
**kwargs)
self.force_correios_validation = force_correios_validation
self.dados = {
'bairro': None,
'logradouro': None,
'estado': None,
'cidade': None,
'cliente': None,
}
self.valido = False
self.timeout = timeout
def clean(self, value):
original_value = value
value = super(CepField, self).clean(value)
value = value.replace('.', '').replace('-', '').strip(' :')
cep = Cep.objects.get_or_create(codigo=value)
cep.original_value = original_value
if cep.valido:
return cep
self.valida_correios(value)
cep.valido = self.valido
if cep.valido:
| cep.logradouro = self.dados.get('logradouro',
self.dados.get('cliente', ''))
cep.bairro = self.dados.get('bairro', '')
cep.estado = self.dados.get('estado', '')
| cep.cidade = self.dados.get('cidade', '')
cep.complemento = self.dados.get('complemento', '')
cep.save()
return cep
def valida_correios(self, codigo):
try:
response = requests.post(
self.SERVICE_URL,
data={'relaxation': codigo},
timeout=self.timeout)
parser = Parser(response.content)
self.dados = parser.get_data()
except requests.RequestException:
if self.force_correios_validation:
raise ValidationError(self.cannot_validate)
return
self.valido = True
|
TheR3ason/map-your-location-history | LatitudePlot.py | Python | apache-2.0 | 4,022 | 0.01641 | #!/usr/bin/env python
# LatitudePlot.py
# Created 30 July 2013
# Created by snowdonjames@googlemail.com
import os, time, math
from datetime import datetime
from time import mktime
import xml.etree.ElementTree as ET
from PIL import Image, ImageDraw
def GetKmlFiles():
"""Locates and reads local .kml files, returns a list of kml dictionary data"""
KmlData = []
for dirname, dirnames, filenames in os.walk('.'):
for filename in filenames:
sp = filename.split('.')
if sp[len(sp)-1]== "kml": #locate kml files
print "Reading kml file " + filename
KmlData.append(ReadKmlFile(dirname, filename))
print KmlData
return KmlData
def ReadKmlFile(dirname, filename):
"""Parses a single kml file, returns a dict of format {time: [lat, long]}"""
KmlData = {}
kmltime = datetime.time
latlist = []
longlist = []
timelist = []
cnt =0
f = open(filename)
line = f.readline()
while line:
if 'when' in line:
timelist.append(time.strptime(ET.fromstring(line)[0].text,"%Y-%m-%dT%H:%M:%SZ"))
if 'coordinates' in line:
latlist.append(float(ET.fromstring(line)[0].text.split(',')[0]))
longlist.append(float(ET.fromstring(line)[0].text.split(',')[1]))
cnt+=1
if cnt % 5000 ==0:
print "Parsing " + filename + ": points found: " + str(cnt)
line = f.readline()
f.close()
return [latlist, longlist, timelist]
def DrawMapData(KmlData,InputImage, OutputImage, itop, ibottom, ileft, iright,xnudge,ynudge):
"""Draws kml line data on top of the specified image"""
im = Image.open(InputImage)
draw = ImageDraw.Draw(im)
cnt =0
for KmlD in KmlData:
for d in range(len(KmlD[0])-1):
#Get points x and y coordinates and draw line
x1=(LongToX(KmlD[0][d],ileft,iright,im.size[0]))+xnudge
y1=(LatToY(KmlD[1][d],itop,ibottom,im.size[1]))+ynudge
| x2=(LongToX(KmlD[0][d+1],ileft,iright,im.size[0]))+xnudge
y2=(LatToY(KmlD[1][d+1],itop,ibottom,im.size[1])) | +ynudge
if(EuclidDistance(x1,y1,x2,y2) < 10000):
#setting this around 80 works okay. Attempts to remove some noise
draw.line((x1,y1, x2,y2), fill=80)
cnt+=1
if cnt % 10000 ==0:
print "Drawing point number " + str(cnt)
im.save(OutputImage)
def LongToX(InputLong, LeftLong, RightLong, ImWidth):
"""Converts a longitude value in to an x coordinate"""
return ScalingFunc(InputLong+360, LeftLong+360, RightLong+360, ImWidth);
def LatToY(InputLat, TopLat, BottomLat, ImHeight):
"""Converts a latitude value in to a y coordinate"""
return ScalingFunc(InputLat+360, TopLat+360, BottomLat+360, ImHeight);
def EuclidDistance(x1, y1, x2, y2):
"""Calculates the euclidean distance between two points"""
return math.sqrt((x1 - x2)**2+(y1 - y2)**2)
def ScalingFunc(inputv, minv, maxv, size):
"""Helps convert latitudes and longitudes to x and y"""
if((float(maxv) -float(minv)) ==0):
return 0
return ((((float(inputv) - float(minv)) / (float(maxv) -float(minv))) * float(size)));
def ParseImageFile():
"""Reads SatelliteImageData.csv containing:
<File name of image to draw data on>,
<image top latitude>,
<image bottom lattitude>,
<image left longitude>,
<image right longitude>,
(optional) <x value nudge>,
(optional) <y value nudge>"""
with open('ImageData.csv', 'r') as f:
read_data = f.read().split(',')
while 5 <= len(read_data) < 7:
read_data.append(0)
ReturnData = [0]*7
ReturnData[0]=read_data[0]
for i in range(1,7):
ReturnData[i] = float(read_data[i])
return ReturnData
if __name__ == "__main__":
ImageData = ParseImageFile()
DrawMapData(GetKmlFiles(),ImageData[0], "LatitudeData.png", ImageData[1], ImageData[2], ImageData[3], ImageData[4],ImageData[5],ImageData[6])
|
apache/incubator-airflow | airflow/providers/amazon/aws/sensors/s3_key.py | Python | apache-2.0 | 8,164 | 0.001592 | #
# Licensed to the Apache Software Foundation (ASF) un | der one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
| # "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import re
from typing import Callable, List, Optional, Union
from urllib.parse import urlparse
from airflow.exceptions import AirflowException
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.sensors.base import BaseSensorOperator
class S3KeySensor(BaseSensorOperator):
"""
Waits for a key (a file-like instance on S3) to be present in a S3 bucket.
S3 being a key/value it does not support folders. The path is just a key
a resource.
:param bucket_key: The key being waited on. Supports full s3:// style url
or relative path from root level. When it's specified as a full s3://
url, please leave bucket_name as `None`.
:type bucket_key: str
:param bucket_name: Name of the S3 bucket. Only needed when ``bucket_key``
is not provided as a full s3:// url.
:type bucket_name: str
:param wildcard_match: whether the bucket_key should be interpreted as a
Unix wildcard pattern
:type wildcard_match: bool
:param aws_conn_id: a reference to the s3 connection
:type aws_conn_id: str
:param verify: Whether or not to verify SSL certificates for S3 connection.
By default SSL certificates are verified.
You can provide the following values:
- ``False``: do not validate SSL certificates. SSL will still be used
(unless use_ssl is False), but SSL certificates will not be
verified.
- ``path/to/cert/bundle.pem``: A filename of the CA cert bundle to uses.
You can specify this argument if you want to use a different
CA cert bundle than the one used by botocore.
:type verify: bool or str
"""
template_fields = ('bucket_key', 'bucket_name')
def __init__(
self,
*,
bucket_key: str,
bucket_name: Optional[str] = None,
wildcard_match: bool = False,
aws_conn_id: str = 'aws_default',
verify: Optional[Union[str, bool]] = None,
**kwargs,
):
super().__init__(**kwargs)
self.bucket_name = bucket_name
self.bucket_key = bucket_key
self.wildcard_match = wildcard_match
self.aws_conn_id = aws_conn_id
self.verify = verify
self.hook: Optional[S3Hook] = None
def poke(self, context):
if self.bucket_name is None:
parsed_url = urlparse(self.bucket_key)
if parsed_url.netloc == '':
raise AirflowException('If key is a relative path from root, please provide a bucket_name')
self.bucket_name = parsed_url.netloc
self.bucket_key = parsed_url.path.lstrip('/')
else:
parsed_url = urlparse(self.bucket_key)
if parsed_url.scheme != '' or parsed_url.netloc != '':
raise AirflowException(
'If bucket_name is provided, bucket_key'
' should be relative path from root'
' level, rather than a full s3:// url'
)
self.log.info('Poking for key : s3://%s/%s', self.bucket_name, self.bucket_key)
if self.wildcard_match:
return self.get_hook().check_for_wildcard_key(self.bucket_key, self.bucket_name)
return self.get_hook().check_for_key(self.bucket_key, self.bucket_name)
def get_hook(self) -> S3Hook:
"""Create and return an S3Hook"""
if self.hook:
return self.hook
self.hook = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify)
return self.hook
class S3KeySizeSensor(S3KeySensor):
"""
Waits for a key (a file-like instance on S3) to be present and be more than
some size in a S3 bucket.
S3 being a key/value it does not support folders. The path is just a key
a resource.
:param bucket_key: The key being waited on. Supports full s3:// style url
or relative path from root level. When it's specified as a full s3://
url, please leave bucket_name as `None`.
:type bucket_key: str
:param bucket_name: Name of the S3 bucket. Only needed when ``bucket_key``
is not provided as a full s3:// url.
:type bucket_name: str
:param wildcard_match: whether the bucket_key should be interpreted as a
Unix wildcard pattern
:type wildcard_match: bool
:param aws_conn_id: a reference to the s3 connection
:type aws_conn_id: str
:param verify: Whether or not to verify SSL certificates for S3 connection.
By default SSL certificates are verified.
You can provide the following values:
- ``False``: do not validate SSL certificates. SSL will still be used
(unless use_ssl is False), but SSL certificates will not be
verified.
- ``path/to/cert/bundle.pem``: A filename of the CA cert bundle to uses.
You can specify this argument if you want to use a different
CA cert bundle than the one used by botocore.
:type verify: bool or str
:type check_fn: Optional[Callable[..., bool]]
:param check_fn: Function that receives the list of the S3 objects,
and returns the boolean:
- ``True``: a certain criteria is met
- ``False``: the criteria isn't met
**Example**: Wait for any S3 object size more than 1 megabyte ::
def check_fn(self, data: List) -> bool:
return any(f.get('Size', 0) > 1048576 for f in data if isinstance(f, dict))
:type check_fn: Optional[Callable[..., bool]]
"""
def __init__(
self,
*,
check_fn: Optional[Callable[..., bool]] = None,
**kwargs,
):
super().__init__(**kwargs)
self.check_fn_user = check_fn
def poke(self, context):
if super().poke(context=context) is False:
return False
s3_objects = self.get_files(s3_hook=self.get_hook())
if not s3_objects:
return False
check_fn = self.check_fn if self.check_fn_user is None else self.check_fn_user
return check_fn(s3_objects)
def get_files(self, s3_hook: S3Hook, delimiter: Optional[str] = '/') -> List:
"""Gets a list of files in the bucket"""
prefix = self.bucket_key
config = {
'PageSize': None,
'MaxItems': None,
}
if self.wildcard_match:
prefix = re.split(r'[\[\*\?]', self.bucket_key, 1)[0]
paginator = s3_hook.get_conn().get_paginator('list_objects_v2')
response = paginator.paginate(
Bucket=self.bucket_name, Prefix=prefix, Delimiter=delimiter, PaginationConfig=config
)
keys = []
for page in response:
if 'Contents' in page:
_temp = [k for k in page['Contents'] if isinstance(k.get('Size', None), (int, float))]
keys = keys + _temp
return keys
def check_fn(self, data: List, object_min_size: Optional[Union[int, float]] = 0) -> bool:
"""Default function for checking that S3 Objects have size more than 0
:param data: List of the objects in S3 bucket.
:type data: list
:param object_min_size: Checks if the objects sizes are greater then this value.
:type object_min_size: int
"""
return all(f.get('Size', 0) > object_min_size for f in data if isinstance(f, dict))
|
JohnLunzer/flexx | flexx/app/examples/flexx_in_thread.py | Python | bsd-2-clause | 1,287 | 0.002331 | """
Example showing running Flexx' event loop in another thread.
This is not a recommended use in general.
Most parts of Flexx are not thread-save. E.g. setting properties
should generally only be done from a single thread. Event handlers
are *always* called from the same thread that runs the event loop
(unless manually called).
The app.create_server() is used to (re)create the server object. It is
important that the used IOLoop is local to the thread. This can be
accomplished by calling create_server() and sta | rt() from the same
thread, or using ``new_loop=True`` (as is done here).
"""
import time
import threading
from flexx import app, event
class | MyModel1(event.HasEvents):
@event.prop
def foo(self, v=0):
return v
@event.connect('foo')
def on_foo(self, *events):
for ev in events:
print('foo changed to', ev.new_value)
# Create model in main thread
model = MyModel1()
# Start server in its own thread
app.create_server(new_loop=True)
t = threading.Thread(target=app.start)
t.start()
# Manipulate model from main thread (the model's on_foo() gets called from other thread)
for i in range(5, 9):
time.sleep(1)
model.foo = i
# Stop event loop (this is thread-safe) and wait for thread to end
app.stop()
t.join()
|
icarito/sugar | src/jarabe/model/shell.py | Python | gpl-3.0 | 28,033 | 0 | # Copyright (C) 2006-2007 Owen Williams.
# Copyright (C) 2006-2008 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import time
from gi.repository import Gio
from gi.repository import Wnck
from gi.repository import GObject
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import GdkX11
from gi.repository import GLib
import dbus
from sugar3 import dispatch
from sugar3 import profile
from gi.repository import SugarExt
from jarabe.model.bundleregistry import get_registry
_SERVICE_NAME = 'org.laptop.Activity'
_SERVICE_PATH = '/org/laptop/Activity'
_SERVICE_INTERFACE = 'org.laptop.Activity'
_model = None
class Activity(GObject.GObject):
"""Activity which appears in the "Home View" of the Sugar shell
This class stores the Sugar Shell's metadata regarding a
given activity/application in the system. It interacts with
the sugar3.activity.* modules extensively in order to
accomplish its tasks.
"""
__gtype_name__ = 'SugarHomeActivity'
__gsignals__ = {
'pause': (GObject.SignalFlags.RUN_FIRST, None, ([])),
'resume': (GObject.SignalFlags.RUN_FIRST, None, ([])),
'stop': (GObject.SignalFlags.RUN_LAST, GObject.TYPE_BOOLEAN, ([])),
}
LAUNCHING = 0
LAUNCH_FAILED = 1
LAUNCHED = 2
def __init__(self, activity_info, activity_id, color, window=None):
"""Initialise the HomeActivity
activity_info -- sugar3.activity.registry.ActivityInfo instance,
provides the information required to actually
create the new instance. This is, in effect,
the "type" of activity being created.
activity_id -- unique identifier for this instance
of the activity type
_windows -- WnckWindows registered for the activity. The lowest
one in the stack is the main window.
"""
GObject.GObject.__init__(self)
self._windows = []
self._service = None
self._shell_windows = []
self._activity_id = activity_id
self._activity_info = activity_info
self._launch_time = time.time()
self._launch_status = Ac | tivity.LAUNCHING
if color is not None:
self._color = color
else:
self._color = profile.get_color()
if window is not None:
self.add_window(window)
self._retrieve_service()
self._name_owner_changed_handler = None
if not self._service:
bus = dbus.SessionBus()
self._name_owner_changed_handler = bus.add_signal_receiver(
self._name_owner_changed_cb,
signal_name='NameO | wnerChanged',
dbus_interface='org.freedesktop.DBus')
self._launch_completed_hid = \
get_model().connect('launch-completed',
self.__launch_completed_cb)
self._launch_failed_hid = get_model().connect('launch-failed',
self.__launch_failed_cb)
def get_launch_status(self):
return self._launch_status
launch_status = GObject.property(getter=get_launch_status)
def add_window(self, window, is_main_window=False):
"""Add a window to the windows stack."""
if not window:
raise ValueError('window must be valid')
self._windows.append(window)
if is_main_window:
window.connect('state-changed', self._state_changed_cb)
def push_shell_window(self, window):
"""Attach a shell run window (eg. view source) to the activity."""
self._shell_windows.append(window)
def pop_shell_window(self, window):
"""
Detach a shell run window (eg. view source) to the activity.
Only call this on **user initiated** deletion (loop issue).
"""
self._shell_windows.remove(window)
def has_shell_window(self):
return bool(self._shell_windows)
def stop(self):
# For web activities the Apisocket will connect to the 'stop'
# signal, thus preventing the window close. Then, on the
# 'activity.close' method, it will call close_window()
# directly.
close_window = not self.emit('stop')
if close_window:
self.close_window()
def close_window(self):
if self.get_window() is not None:
self.get_window().close(GLib.get_current_time())
for w in self._shell_windows:
w.destroy()
def remove_window_by_xid(self, xid):
"""Remove a window from the windows stack."""
for wnd in self._windows:
if wnd.get_xid() == xid:
self._windows.remove(wnd)
return True
return False
def get_service(self):
"""Get the activity service
Note that non-native Sugar applications will not have
such a service, so the return value will be None in
those cases.
"""
return self._service
def get_title(self):
"""Retrieve the application's root window's suggested title"""
if self._windows:
return self._windows[0].get_name()
else:
return None
def get_icon_path(self):
"""Retrieve the activity's icon (file) name"""
if self.is_journal():
icon_theme = Gtk.IconTheme.get_default()
info = icon_theme.lookup_icon('activity-journal',
Gtk.IconSize.SMALL_TOOLBAR, 0)
if not info:
return None
fname = info.get_filename()
del info
return fname
elif self._activity_info:
return self._activity_info.get_icon()
else:
return None
def get_icon_color(self):
"""Retrieve the appropriate icon colour for this activity
Uses activity_id to index into the PresenceService's
set of activity colours, if the PresenceService does not
have an entry (implying that this is not a Sugar-shared application)
uses the local user's profile colour for the icon.
"""
return self._color
def get_activity_id(self):
"""Retrieve the "activity_id" passed in to our constructor
This is a "globally likely unique" identifier generated by
sugar3.util.unique_id
"""
return self._activity_id
def get_bundle_id(self):
""" Returns the activity's bundle id"""
if self._activity_info is None:
return None
else:
return self._activity_info.get_bundle_id()
def get_xid(self):
"""Retrieve the X-windows ID of our root window"""
if self._windows:
return self._windows[0].get_xid()
else:
return None
def has_xid(self, xid):
"""Check if an X-window with the given xid is in the windows stack"""
if self._windows:
for wnd in self._windows:
if wnd.get_xid() == xid:
return True
return False
def get_window(self):
"""Retrieve the X-windows root window of this application
This was stored by the add_window method, which was
called by HomeModel._add_activity, which was called
via a callback that looks for all 'window-opened'
events.
We keep a stack of the windows. The lowest window in the
stack that is still valid we consider the main one.
HomeModel currently uses a dbus service query |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.