repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
natemara/aloft.py | setup.py | Python | mit | 568 | 0.02993 | from setuptools import setup
with open('requir | ements.txt') as f:
required = f.read().splitlines()
setup(
name="aloft.py",
version="0.0.4",
author="Nate Mara",
author_email="natemara@gmail.com",
description="A simple API for getting winds aloft data from NOAA",
license="MIT",
test_suite="tests",
keywords="aviation weather winds aloft",
url="https://github.com/natemara/aloft.py",
packages=['aloft'],
classifiers=[
" | Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
],
install_requires=required,
)
|
luuvish/libvio | script/test/suite/vp9.py | Python | mit | 2,045 | 0.004401 | # -*- coding: utf-8 -*-
'''
================================================================================
This confidential and proprietary software may be used only
as authorized by a licensing agreement from Thumb o'Cat Inc.
In the event of publication, the following notice is applicable:
Copyright (C) 2013 - 2014 Thumb o'Cat
All right reserved.
The entire notice above must be reproduced on all authorized copies.
================================================================================
File : vp9.py
Author(s) : Luuvish
Version : 2.0
Revision :
2.0 May 13, 2014 Executor classify
================================================================================
'''
__all__ = ('models', 'suites')
__version__ = '2.0.0'
from os.path import join
from . import rootpath
from ..model.ffmpeg import FFmpeg
from ..model.libvpx import LibVpx
models = (FFmpeg, LibVpx)
suites = (
{
'suite' : 'decode-vp9-libvpx',
'model' : 'libvpx',
'codec' : 'vp9',
'action': 'decode',
'stdout': 'vp9-libvpx.log',
'srcdir': join(rootpath, 'test/stream/vp9'),
'outdir': join(rootpath, 'test/image/vp9'),
'includes': ('*.ivf', '*.webm'),
'excludes': ('vp91-2-04-yv444.webm', )
},
{
'suite' : 'digest-vp9-libvpx',
'model' : 'libvpx',
'codec' : 'vp9',
'action': 'digest',
'std | out': 'vp9-libvpx.log',
'srcdir': join(rootpath, 'test/stream/vp9'),
'outdir': join(rootpath, 'test/digest/vp9'),
'includes': ('*.ivf', '*.webm'),
'excludes': ('vp91-2-04-yv444.webm', )
},
{
| 'suite' : 'compare-vp9-libvpx',
'model' : 'libvpx',
'codec' : 'vp9',
'action': 'compare',
'stdout': 'vp9-libvpx.log',
'srcdir': join(rootpath, 'test/stream/vp9'),
'outdir': join(rootpath, 'test/digest/vp9'),
'includes': ('*.ivf', '*.webm'),
'excludes': ('vp91-2-04-yv444.webm', )
}
)
|
shlomif/patool | patoolib/util.py | Python | gpl-3.0 | 15,258 | 0.002097 | # -*- coding: utf-8 -*-
# Copyright (C) 2010-2012 Bastian Kleineidam
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Utility functions."""
import os
import sys
import subprocess
import mimetypes
import tempfile
import traceback
from distutils.spawn import find_executable
mimedb = mimetypes.MimeTypes(strict=False)
# add missing encodings and mimetypes
mimedb.encodings_map['.bz2'] = 'bzip2'
mimedb.encodings_map['.lzma'] = 'lzma'
mimedb.encodings_map['.xz'] = 'xz'
mimedb.encodings_map['.lz'] = 'lzip'
mimedb.suffix_map['.tbz2'] = '.tar.bz2'
mimedb.add_type('application/x-lzop', '.lzo', strict=False)
mimedb.add_type('application/x-arj', '.arj', strict=False)
mimedb.add_type('application/x-lzma', '.lzma', strict=False)
mimedb.add_type('application/x-xz', '.xz', strict=False)
mimedb.add_type('application/java-archive', '.jar', strict=False)
mimedb.add_type('application/x-rar', '.rar', strict=False)
mimedb.add_type('application/x-7z-compressed', '.7z', strict=False)
mimedb.add_type('application/x-cab', '.cab', strict=False)
mimedb.add_type('application/x-rpm', '.rpm', strict=False)
mimedb.add_type('application/x-debian-package', '.deb', strict=False)
mimedb.add_type('application/x-ace', '.ace', strict=False)
# Since .a is already a common type, strict=True must be used.
mimedb.add_type('application/x-archive', '.a', strict=True)
mimedb.add_type('application/x-alzip', '.alz', strict=False)
mimedb.add_type('application/x-arc', '.arc', strict=False)
mimedb.add_type('application/x-lrzip', '.lrz', strict=False)
mimedb.add_type('application/x-lha', '.lha', strict=False)
mimedb.add_type('application/x-lzh', '.lzh', strict=False)
mimedb.add_type('application/x-rzip', '.rz', strict=False)
mimedb.add_type('application/x-zoo', '.zoo', strict=False)
mimedb.add_type('application/x-dms', '.dms', strict=False)
mimedb.add_type('application/x-zip-compressed', '.crx', strict=False)
class PatoolError (StandardError):
"""Raised when errors occur."""
pass
class memoized (object):
"""Decorator that caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned, and
not re-evaluated."""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
try:
return self.cache[args]
except KeyError:
self.cache[args] = value = self.func(*args)
return value
except TypeError:
# uncachable -- for instance, passing a list as an argument.
# Better to not cache than to blow up entirely.
return self.func(*args)
def __repr__(self):
"""Return the function's docstring."""
return self.func.__doc__
def backtick (cmd):
"""Return output from command."""
return subprocess.Popen(cmd, stdout=subprocess.PIPE).communicate()[0]
def run (cmd, **kwargs):
"""Run command without error checking.
@return: command return code"""
# Note that shell_quote_nt() result is not suitable for copy-paste
# (especially on Unix systems), but it looks nicer than shell_quote().
log_info("running %s" % " ".join(map(shell_quote_nt, cmd)))
if kwargs:
log_info(" with %s" % ", ".join("%s=%s" % (k, shell_quote(str(v)))\
for k, v in kwargs.items()))
if kwargs.get("shell"):
# for shell calls the command must be a string
cmd = " ".join(cmd)
return subprocess.call(cmd, **kwargs)
def run_checked (cmd, **kwargs):
"""Run command and raise PatoolError on error."""
retcode = run(cmd, **kwargs)
if retcode:
msg = "Command `%s' returned non-zero exit status %d" % (cmd, retcode)
raise PatoolError(msg)
return retcode
@memoized
def guess_mime (filename):
"""Guess the MIME type of given filename using file(1) and if that
fails by looking at the filename extension with the Python mimetypes
module.
The result of this function is cached.
"""
mime, encoding = guess_mime_file(filename)
if mime is None:
mime, encoding = guess_mime_mimedb(filename)
assert mime is not None or encoding is None
return mime, encoding
Encoding2Mime = {
'gzip': "application/x-gzip",
'bzip2': "application/x-bzip2",
'compress': "application/x-compress",
'lzma': "application/x-lzma",
'lzip': "application/x-lzip",
'xz': "application/x-xz",
}
Mime2Encoding = dict([(_val, _key) for _key, _val in Encoding2Mime.items()])
def guess_mime_mimedb (filename):
"""Guess MIME type from given filename.
@return: tuple (mime, encoding)
"""
mime, encoding = mimedb.guess_type(filename, strict=False)
from patoolib import ArchiveMimetypes, ArchiveCompressions
if mime not in ArchiveMimetypes and encoding in ArchiveCompressions:
# Files like 't.txt.gz' are recognized with encoding as format, and
# an unsupported mime-type like 'text/plain'. Fix this.
mime = Encoding2Mime[encoding]
encoding = None
return mime, encoding
def guess_mime_file (filename):
"""Determine MIME type of filename with file(1):
(a) using `file --mime`
(b) using `file` and look the result string
@return: tuple (mime, encoding)
"""
mime, encoding = None, None
base, ext = os.path.splitext(filename)
if ext.lower() in ('.lzma', '.alz', '.lrz'):
# let mimedb recognize these extensions
return mime, encoding
if os.path.isfile(filename):
file_prog = find_program("file")
if file_prog:
mime, encoding = guess_mime_file_mime(file_prog, filename)
if mime is None:
mime = guess_mime_file_text(file_prog, filename)
return mime, encoding
def guess_mime_file_mime (file_prog, filename):
"""Determine MIME type of filename with file(1) and --mime option.
@return: tuple (mime, encoding)
"""
mime, encoding = None, None
cmd = [file_prog, "--brief", "--mime-type", filename]
try:
mime = backtick(cmd).strip()
except OSError:
# ignore errors, as file(1) is only a fallback
return mime, encoding
from patoolib import ArchiveMimetypes
if mime in Mime2Encoding:
# try to look inside compressed archives
cmd = [file_prog, "--brief", "--mime", "--uncompress", filename]
try:
outparts = backtick(cmd).strip().split(";")
except OSError:
# ignore errors, as file(1) is only a fallback
return mime, encoding
mime2 = outparts[0].split(" ", 1)[0]
| if mime2 == 'application/x-empty':
# The uncompressor program file(1) uses is not installed.
# Try to get mime information from the file extension.
| mime2, encoding2 = guess_mime_mimedb(filename)
if mime2 in ArchiveMimetypes:
mime = mime2
encoding = encoding2
elif mime2 in ArchiveMimetypes:
mime = mime2
encoding = get_file_mime_encoding(outparts)
if mime not in ArchiveMimetypes:
mime, encoding = None, None
return mime, encoding
def get_file_mime_encoding (parts):
"""Get encoding value from splitted output of file --mime --uncompress."""
for part in parts:
for subpart in part.split(" "):
if subpart.startswith("compressed-encoding="):
mime = subpart.split("=")[1].strip()
return Mime2Encoding.get(mime)
return None
# Match file(1) output text to mime |
Senseg/robotframework | src/robot/libdocpkg/robotbuilder.py | Python | apache-2.0 | 4,395 | 0.000228 | # Copyright 2008-2012 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from robot.errors import DataError
from robot.running import TestLibrary, UserLibrary
from robot.parsing import populators
from robot import utils
from .model import LibraryDoc, KeywordDoc
class LibraryDocBuilder(object):
_argument_separator = '::'
def build(self, library):
name, args = self._split_library_name_and_args(library)
lib = TestLibrary(name, args)
libdoc = LibraryDoc(name=lib.name,
doc=self._get_doc(lib),
version=lib.version,
scope=self._get_scope(lib),
named_args=lib.supports_named_arguments)
libdoc.inits = self._get_initializers(lib)
libdoc.keywords = KeywordDocBuilder().build_keywords(lib)
return libdoc
def _split_library_name_and_args(self, library):
args = library.split(self._argument_separator)
name = args.pop(0)
return self._normalize_library_path(name), args
def _normalize_library_path(self, library):
path = library.replace('/', os.sep)
if os.path.exists(path):
return os.path.abspath(path)
return library
def _get_doc(self, lib):
return lib.doc or "Documentation for test library `%s`." % lib.name
def _get_scope(self, lib):
if hasattr(lib, 'scope'):
return {'TESTCASE': 'test case', 'TESTSUITE': 'test suite',
'GLOBAL': 'global'}[lib.scope]
return ''
def _get_initializers(self, lib):
if lib.init.arguments.maxargs:
return [KeywordDocBuilder().build_keyword(lib.init)]
return []
class ResourceDocBuilder(object):
def build(self, path):
res = self._import_resource(path)
libdoc = LibraryDoc(name=res.name, doc=self._get_doc(res),
type='resource', named_args=True)
libdoc.keywords = KeywordDocBuilder().build_keywords(res)
return libdoc
def _import_resource(self, path):
populators.PROCESS_CURDIR = False
try:
return UserLibrary(self._find_resource_file(path))
finally:
populators.PROCESS_CURDIR = True
def _find_resource_file(self, path):
if os.path.isfile(path):
return path
for dire in [item for item in sys.path if os.path.isdir(item)]:
if os.path.isfile(os.path.join(dire, path)):
return os.path.join(dire, path)
raise DataError("Resource file '%s' does not exist." % path)
def _get_doc(self, res):
doc = res.doc or "Documentation for resource file `%s`." % res.name
return utils.unescape(d | oc)
class KeywordDocBuilder(object):
def build_keywords(self, lib):
return [self.build_keyword(kw) for kw in lib.handlers.values()]
def build_keyword(self, kw):
return KeywordDoc(name=kw.name, args=self._get_args(kw), doc=kw.doc)
def _get_args(self, kw):
required, defaults = self._parse_args(kw)
args = required + ['%s=%s' % item for item in defaults]
varargs = self._n | ormalize_arg(kw.arguments.varargs, kw.type)
if varargs:
args.append('*%s' % varargs)
return args
def _parse_args(self, kw):
args = [self._normalize_arg(arg, kw.type) for arg in kw.arguments.names]
default_count = len(kw.arguments.defaults)
if not default_count:
return args, []
required = args[:-default_count]
defaults = zip(args[-default_count:], kw.arguments.defaults)
return required, defaults
def _normalize_arg(self, arg, kw_type):
if arg and kw_type == 'user':
arg = arg[2:-1] # strip ${} to make args look consistent
return arg
|
OpenAgInitiative/gro-api | gro_api/actuators/tests.py | Python | gpl-2.0 | 7,616 | 0.000919 | from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from ..gro_api.test import APITestCase, run_with_any_layout
from ..resources.models import ResourceType, ResourceProperty, ResourceEffect
from .models import ActuatorType, ControlProfile, Actuator
from .serializers import ActuatorTypeSerializer, ActuatorSerializer
class ActuatorAuthMixin:
@classmethod
def setUpTestData(cls):
cls.user = get_user_model().objects.create_user(
'actuators', 'actuators@test.com', 'actuators'
)
electricians_group = Group.objects.get(name='Electricians')
cls.user.groups.add(electricians_group)
layout_editors_group = Group.objects.get(name='LayoutEditors')
cls.user.groups.add(layout_editors_group)
def setUp(self):
self.client.force_authenticate(user=self.user)
def tearDown(self):
self.client.force_authenticate()
class ActuatorTypeTestCase(ActuatorAuthMixin, APITestCase):
@run_with_any_layout
def test_visible_fields(self):
fields = ActuatorTypeSerializer().get_fields()
fields.pop('url')
fields.pop('name')
fields.pop('resource_effect')
fields.pop('properties')
fields.pop('order')
fields.pop('is_binary')
fields.pop('actuator_count')
fields.pop('read_only')
fields.pop('actuators')
fields.pop('allowed_control_profiles')
self.assertFalse(fields)
@run_with_any_layout
def test_edit_stock_type(self):
heater_id = ResourceEffect.objects.get_by_natural_key('A', 'HE').pk
air_temp_id = ResourceProperty.objects.get_by_natural_key('A', 'TM').pk
data = {
'name': 'test',
'resource_effect': self.url_for_object('resourceEffect', heater_id),
'properties': [
self.url_for_object('resourceProperty', air_temp_id)
],
'order': 0,
| 'is_binary': True,
}
relay_air_heater_id = ActuatorType.objects.get_by_natural_key(
'Relay-Controlled Air Heater'
).pk
res = self.client.put(
self.url_for_object('actuat | orType', relay_air_heater_id), data=data
)
self.assertEqual(res.status_code, 403)
@run_with_any_layout
def test_edit_custom_type(self):
humidifier_id = ResourceEffect.objects.get_by_natural_key('A', 'HU').pk
air_temp_id = ResourceProperty.objects.get_by_natural_key('A', 'TM').pk
data = {
'name': 'Magic Humidifier',
'resource_effect' : self.url_for_object('resourceEffect', humidifier_id),
'properties': [
self.url_for_object('resourceProperty', air_temp_id)
],
'order': 0,
'is_binary': True,
}
res = self.client.post(self.url_for_object('actuatorType'), data=data)
self.assertEqual(res.status_code, 201)
self.assertEqual(data['name'], 'Magic Humidifier')
data['name'] = 'New Name'
res = self.client.put(res.data['url'], data=data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data['name'], 'New Name')
@run_with_any_layout
def test_invalid_properties(self):
heater_id = ResourceEffect.objects.get_by_natural_key('A', 'HE').pk
water_ec_id = ResourceProperty.objects.get_by_natural_key('W', 'EC').pk
data = {
'name': 'test',
'resource_effect': self.url_for_object('resourceEffect', heater_id),
'properties': [
self.url_for_object('resourceProperty', water_ec_id)
],
'order': 0,
'is_binary': True,
}
res = self.client.post(self.url_for_object('actuatorType'), data=data)
self.assertEqual(res.status_code, 400)
class ActuatorTestCase(ActuatorAuthMixin, APITestCase):
@run_with_any_layout
def test_visible_fields(self):
fields = ActuatorSerializer().get_fields()
fields.pop('url')
fields.pop('index')
fields.pop('name')
fields.pop('actuator_type')
fields.pop('control_profile')
fields.pop('resource')
fields.pop('current_override')
fields.pop('override_value')
self.assertFalse(fields)
@run_with_any_layout
def test_actuator_creation(self):
# Create a resource to install the actuator in
air_id = ResourceType.objects.get_by_natural_key('A').pk
resource_info = {
'resource_type': self.url_for_object('resourceType', air_id),
'location': self.url_for_object('enclosure', 1)
}
res = self.client.post(
self.url_for_object('resource'), data=resource_info
)
self.assertEqual(res.status_code, 201)
resource = res.data
# Create the actuator
heater_id = ActuatorType.objects.get_by_natural_key(
'Relay-Controlled Air Heater'
).pk
control_profile_id = ControlProfile.objects.get_by_natural_key(
'Relay-Controlled Air Heater', 'Default Profile'
).pk
actuator_info = {
'actuator_type': self.url_for_object('actuatorType', heater_id),
'control_profile': self.url_for_object(
'controlProfile', control_profile_id
),
'resource': resource['url'],
}
res = self.client.post(
self.url_for_object('actuator'), data=actuator_info
)
self.assertEqual(res.status_code, 201)
actuator = res.data
# Validate the index and name
res = self.client.get(actuator['actuator_type'])
self.assertEqual(res.status_code, 200)
actuator_type = res.data
num_actuators = actuator_type['actuator_count']
self.assertEqual(actuator['index'], num_actuators)
expected_name = "{} Instance {}".format(
actuator_type['name'], num_actuators
)
self.assertEqual(actuator['name'], expected_name)
# Change the name
actuator_info['name'] = 'test'
res = self.client.put(actuator['url'], data=actuator_info)
self.assertEqual(res.status_code, 200)
actuator = res.data
self.assertEqual(actuator['name'], 'test')
# Try changing the type
old_actuator_type_url = actuator['actuator_type']
circulation_id = ActuatorType.objects.get_by_natural_key(
'Relay-Controlled Humidifier'
).pk
actuator_info['actuator_type'] = self.url_for_object(
'actuatorType', circulation_id
)
res = self.client.put(actuator['url'], data=actuator_info)
self.assertEqual(res.status_code, 400)
actuator_info['actuator_type'] = old_actuator_type_url
# Create a new resource of a different type
water_id = ResourceType.objects.get_by_natural_key('W').pk
new_resource_info = {
'resource_type': self.url_for_object('resourceType', water_id),
'location': self.url_for_object('enclosure', 1)
}
res = self.client.post(
self.url_for_object('resource'), data=new_resource_info
)
self.assertEqual(res.status_code, 201)
new_resource = res.data
# Try to move the actuator to the new resource
actuator_info['resource'] = new_resource['url']
res = self.client.put(actuator['url'], data=actuator_info)
self.assertEqual(res.status_code, 400)
class ActuatorStateTestCase(APITestCase):
# TODO: Test state routes
pass
|
vmalloc/json_rest | json_rest/logging_utils.py | Python | bsd-3-clause | 269 | 0.011152 | from logging import Handler
c | lass NullHandler(Handler):
"""
NullHandler from Python 2.7 - doesn't exist on Python 2.6
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = N | one
|
kr15h/digital-fabrication-studio | examples/anne-marie-projected-notebook/switch.py | Python | mit | 1,658 | 0.00965 | import RPi.GPIO as GPIO
import time
import os
import psutil
import subprocess
# Get framebuffer resolution
proc = subprocess.Popen(["fbset | grep 'mode '"], stdout=subprocess.PIPE, shell=True)
(out, err) = proc.communicate()
i = out.index('"') + 1
out = out[i:]
i = out.index('"')
out = out[:i]
i = out.index('x')
xres = out[:i]
yres = out[i + 1:]
# Create the --win argument string
winArg = '--win 0,0,' + xres + ',' + yres
# Set u | p GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setup(18, GPIO.IN, pull_up_down=GPIO.PUD_UP)
buttonPressed = False
while True:
input_state = GPIO.input(18)
if input_state == False:
if buttonPressed == False:
print('Button Pressed')
sysCmd = '/usr/bin/omxplayer --loop --no-osd ' + winArg + ' /boot/video.mp4 &'
print(sysCmd)
| os.system(sysCmd)
buttonPressed = True
else:
for p in psutil.process_iter():
if p.name() == "omxplayer":
print('killing process "omxplayer"')
os.system('killall omxplayer')
if p.name() == "omxplayer.bin":
print('killing process "omxplayer.bin"')
os.system('killall omxplayer.bin')
#playerProcess = os.system('ps -e | grep "omxplayer"')
#print(type(playerProcess))
#if type(playerProcess) is str:
# print('Player process found, attempting to kill.')
# print('Killing process with name "omxplayer".')
# os.system('killall omxplayer')
# print('Killing process with name "omxplayer.bin"')
# os.system('killall omxplayer.bin')
buttonPressed = False
time.sleep(0.2)
|
chukysoria/pyspotify-connect | spotifyconnect/metadata.py | Python | apache-2.0 | 1,798 | 0 | from __future__ import unicode_literals
import spotifyconnect
from spotifyconnect import ffi, lib, serialized, utils
__all__ = [
'ImageSize',
'Metadata'
]
class Metadata(object):
"""A Spotify track.
"""
def __init__(self, sp_metadata):
self._sp_metadata = sp_metadata
self.playlist_name = utils.to_unicode(sp_metadata.playlist_name)
self.playlist_uri = utils.to_unicode(sp_metadata.playlist_uri)
self.track_name = utils.to_unicode(sp_metadata.tr | ack_name)
self.track_uri = utils.to_unicode(sp_metada | ta.track_uri)
self.artist_name = utils.to_unicode(sp_metadata.artist_name)
self.artist_uri = utils.to_unicode(sp_metadata.artist_uri)
self.album_name = utils.to_unicode(sp_metadata.album_name)
self.album_uri = utils.to_unicode(sp_metadata.album_uri)
self.cover_uri = utils.to_unicode(sp_metadata.cover_uri)
self.duration = sp_metadata.duration
def __repr__(self):
return 'Metadata(%s)' % self.track_uri
def __eq__(self, other):
if isinstance(other, self.__class__):
return self._sp_metadata == other._sp_metadata
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self._sp_metadata)
@serialized
def get_image_url(self, image_size):
image_url = ffi.new('char[512]')
spotifyconnect.Error.maybe_raise(
lib.SpGetMetadataImageURL(
utils.to_char(
self.cover_uri),
image_size,
image_url,
ffi.sizeof(image_url)))
return utils.to_unicode(image_url)
@utils.make_enum('kSpImageSize')
class ImageSize(utils.IntEnum):
pass
|
trailofbits/manticore | manticore/platforms/wasm.py | Python | agpl-3.0 | 17,265 | 0.002606 | from .platform import Platform
from ..wasm.structure import (
ModuleInstance,
Store,
FuncAddr,
HostFunc,
Stack,
ProtoFuncInst,
MemInst,
MemAddr,
GlobalInst,
GlobalAddr,
TableInst,
TableAddr,
ExternVal,
Module,
)
from ..wasm.types import Trap, TypeIdx, TableType, MemoryType, GlobalType, MissingExportException
from ..core.state import TerminateState
from ..core.smtlib import ConstraintSet
from functools import partial
import typing
import logging
import os
logger = logging.getLogger(__name__)
# logger.setLevel(logging.DEBUG)
def stub(arity, _state, *args):
"""Default function used for hostfunc calls when a proper import wasn't provided"""
logger.info("Called stub function with args: %s", args)
return [0 for _ in range(arity)] # TODO: Return symbolic values
class WASMWorld(Platform):
"""Manages global environment for a WASM state. Analagous to EVMWorld."""
def __init__(self, filename, name="self", **kwargs):
"""
:param filena | me: The WASM module to execute
:param kwargs: Accepts "constraints" to pass in an initial ConstraintSet
"""
super().__init__(filename, **kwargs)
#: Initial set of constraints
self.constraints = kwargs.get("constraints", Con | straintSet())
#: Prevents users from calling run without instantiating the module
self.instantiated = []
#: Backing store for functions, memories, tables, and globals
self.store = Store()
self.modules = []
self.module_names = {}
self.manual_exports = {}
self.default_module = name
self.register_module(name, filename)
#: Stores numeric values, branch labels, and execution frames
self.stack = Stack()
#: Stores concretized information used to advise execution of the next instruction.
self.advice = None
self.forward_events_from(self.stack)
self.forward_events_from(self.instance)
self.forward_events_from(self.instance.executor)
def __getstate__(self):
state = super().__getstate__()
state["modules"] = self.modules
state["store"] = self.store
state["stack"] = self.stack
state["advice"] = self.advice
state["constraints"] = self.constraints
state["instantiated"] = self.instantiated
state["module_names"] = self.module_names
state["default_module"] = self.default_module
state["manual_exports"] = self.manual_exports
return state
def __setstate__(self, state):
self.modules = state["modules"]
self.store = state["store"]
self.stack = state["stack"]
self.advice = state["advice"]
self.constraints = state["constraints"]
self.instantiated = state["instantiated"]
self.module_names = state["module_names"]
self.default_module = state["default_module"]
self.manual_exports = state["manual_exports"]
self.forward_events_from(self.stack)
self.forward_events_from(self.instance)
self.forward_events_from(self.instance.executor)
for mem in self.store.mems:
self.forward_events_from(mem)
super().__setstate__(state)
@property
def instance(self) -> ModuleInstance:
"""
:return: the ModuleInstance for the first module registered
"""
return self.modules[self.module_names[self.default_module]][1]
@property
def module(self) -> Module:
"""
:return: The first module registered
"""
return self.modules[self.module_names[self.default_module]][0]
def register_module(self, name, filename_or_alias):
"""
Provide an explicit path to a WASM module so the importer will know where to find it
:param name: Module name to register the module under
:param filename_or_alias: Name of the .wasm file that module lives in
:return:
"""
if filename_or_alias in self.module_names:
self.module_names[name] = self.module_names[filename_or_alias]
if name not in self.module_names:
self.modules.append((Module.load(filename_or_alias), ModuleInstance(self.constraints)))
self.module_names[name] = len(self.modules) - 1
self.instantiated.append(False)
def set_env(
self,
exports: typing.Dict[
str, typing.Union[ProtoFuncInst, TableInst, MemInst, GlobalInst, typing.Callable]
],
mod_name="env",
):
"""
Manually insert exports into the global environment
:param exports: Dict mapping names to functions/tables/globals/memories
:param mod_name: The name of the module these exports should fall under
"""
self.manual_exports.setdefault(mod_name, {}).update(exports)
def import_module(self, module_name, exec_start, stub_missing):
"""
Collect all of the imports for the given module and instantiate it
:param module_name: module to import
:param exec_start: whether to run the start functions automatically
:param stub_missing: whether to replace missing imports with stubs
:return: None
"""
search_paths = {"."}
# If the module isn't registered, look for it on the filesystem
if module_name not in self.module_names:
logger.debug("Module %s was not provided, attempting to load from disk", module_name)
for pth in search_paths:
possible_path = os.path.join(pth, module_name + ".wasm")
if os.path.isfile(possible_path):
self.register_module(module_name, possible_path)
break
else:
raise RuntimeError("Missing imported module: " + module_name)
if self.instantiated[self.module_names[module_name]]:
return
# Get the module and the instance from the world
module, instance = self.modules[self.module_names[module_name]]
imports = self.get_module_imports(module, exec_start, stub_missing)
instance.instantiate(self.store, module, imports, exec_start)
self.instantiated[self.module_names[module_name]] = True
logger.info("Imported %s", module_name)
def _get_export_addr(
self, export_name, mod_name=None
) -> typing.Optional[typing.Union[FuncAddr, TableAddr, MemAddr, GlobalAddr]]:
"""
Gets the address in the store of a given export
:param export_name: Name of the export to look for
:param mod_name: Name of the module the export lives in
:return: The address of the export
"""
try:
if mod_name in self.module_names: # TODO - handle mod_name.export_name
return self.modules[self.module_names[mod_name]][1].get_export_address(export_name)
except MissingExportException as exc:
logger.error("Couldn't find export %s.%s", mod_name, exc.name)
return None
def get_export(
self, export_name, mod_name=None
) -> typing.Optional[
typing.Union[ProtoFuncInst, TableInst, MemInst, GlobalInst, typing.Callable]
]:
"""
Gets the export _instance_ for a given export & module name
(basically just dereferences _get_export_addr into the store)
:param export_name: Name of the export to look for
:param mod_name: Name of the module the export lives in
:return: The export itself
"""
mod_name = self.default_module if not mod_name else mod_name
if mod_name in self.manual_exports:
if export_name in self.manual_exports[mod_name]:
return self.manual_exports[mod_name][export_name]
addr = self._get_export_addr(export_name, mod_name)
if addr is not None:
if isinstance(addr, FuncAddr):
return self.store.funcs[addr]
if isinstance(addr, TableAddr):
return self.store.funcs[addr]
if isinstance(addr, MemAddr):
return self.store.mems[addr]
|
ibuler/coco | coco/utils.py | Python | gpl-3.0 | 6,619 | 0.00047 | #!coding: utf-8
import base64
import calendar
import os
import re
import paramiko
from io import StringIO
import hashlib
import threading
import time
import pyte
def ssh_key_string_to_obj(text):
key_f = StringIO(text)
key = None
try:
key = paramiko.RSAKey.from_private_key(key_f)
except paramiko.SSHException:
pass
try:
key = paramiko.DSSKey.from_private_key(key_f)
except paramiko.SSHException:
pass
return key
def ssh_pubkey_gen(private_key=None, username='jumpserver', hostname='localhost'):
if isinstance(private_key, str):
private_key = ssh_key_string_to_obj(private_key)
if not isinstance(private_key, (paramiko.RSAKey, paramiko.DSSKey)):
raise IOError('Invalid private key')
public_key = "%(key_type)s %(key_content)s %(username)s@%(hostname)s" % {
'key_type': private_key.get_name(),
'key_content': private_key.get_base64(),
'username': username,
'hostname': hostname,
}
return public_key
def ssh_key_gen(length=2048, type='rsa', password=None,
username='jumpserver', hostname=None):
"""Generate user ssh private and public key
Use paramiko RSAKey generate it.
:return private key str and public key str
"""
if hostname is None:
hostname = os.uname()[1]
f = StringIO()
try:
if type == 'rsa':
private_key_obj = paramiko.RSAKey.generate(length)
elif type == 'dsa':
private_key_obj = paramiko.DSSKey.generate(length)
else:
raise IOError('SSH private key must be `rsa` or `dsa`')
private_key_obj.write_private_key(f, password=password)
private_key = f.getvalue()
public_key = ssh_pubkey_gen(private_key_obj, username=username, hostname=hostname)
return private_key, public_key
except IOError:
raise IOError('These is error when generate ssh key.')
def content_md5(data):
"""计算data的MD5值,经过Base64编码并返回str类型。
返回值可以直接作为HTTP Content-Type头部的值
"""
if isinstance(data, str):
data = hashlib.md5(data.encode('utf-8'))
return base64.b64encode(data.digest())
_STRPTIME_LOCK = threading.Lock()
_GMT_FORMAT = "%a, %d %b %Y %H:%M:%S GMT"
_ISO8601_FORMAT = "%Y-%m-%dT%H:%M:%S.000Z"
def to_unixtime(time_string, format_string):
with _STRPTIME_LOCK:
return int(calendar.timegm(time.strptime(str(time_string), format_string)))
def http_date(timeval=None):
"""返回符合HTTP标准的GMT时间字符串,用strftime的格式表示就是"%a, %d %b %Y %H:%M:%S GMT"。
但不能使用strftime,因为strftime的结果是和locale相关的。
"""
return formatdate(timeval, usegmt=True)
def http_to_unixtime(time_string):
"""把HTTP Date格式的字符串转换为UNIX时间(自1970年1月1日UTC零点的秒数)。
HTTP Date形如 `Sat, 05 Dec 2015 11:10:29 GMT` 。
"""
return to_unixtime(time_string, _GMT_FORMAT)
def iso8601_to_unixtime(time_string):
"""把ISO8601时间字符串(形如,2012-02-24T06:07:48.000Z)转换为UNIX时间,精确到秒。"""
return to_unixtime(time_string, _ISO8601_FORMAT)
def make_signature(access_key_secret, date=None):
if isinstance(date, bytes):
date = date.decode("utf-8")
if isinstance(date, int):
date_gmt = http_date(date)
elif date is None:
date_gmt = http_date(int(time.time()))
else:
date_gmt = date
data = str(access_key_secret) + "\n" + date_gmt
return content_md5(data)
class TtyIOParser(object):
def __init__(self, width=80, height=24):
self.screen = pyte.Screen(width, height)
self.stream = pyte.ByteStream()
self.stream.attach(self.screen)
self.ps1_pattern = re.compile(r'^\[?.*@.*\]?[\$#]\s|mysql>\s')
def clean_ps1_etc(self, command):
return self.ps1_pattern.sub('', command)
def parse_output(self, data, sep='\n'):
"""
Parse user command output
:param data: output data list like, [b'data', b'data']
:param sep: line separator
:return: output unicode data
"""
output = []
for d in data:
self.stream.feed(d)
for line in self.screen.display:
if line.strip():
output.append(line)
self.screen.reset()
return sep.join(output[0:-1])
def parse_input(self, data):
"""
Parse user | input command
:param data: input data list, like [b'data', b'data']
:return: command unicode
"""
command = []
for d in data:
| self.stream.feed(d)
for line in self.screen.display:
line = line.strip()
if line:
command.append(line)
if command:
command = command[-1]
else:
command = ''
self.screen.reset()
command = self.clean_ps1_etc(command)
return command
def wrap_with_line_feed(s, before=0, after=1):
if isinstance(s, bytes):
return b'\r\n' * before + s + b'\r\n' * after
return '\r\n' * before + s + '\r\n' * after
def wrap_with_color(text, color='white', background=None,
bolder=False, underline=False):
bolder_ = '1'
underline_ = '4'
color_map = {
'black': '30',
'red': '31',
'green': '32',
'brown': '33',
'blue': '34',
'purple': '35',
'cyan': '36',
'white': '37',
}
background_map = {
'black': '40',
'red': '41',
'green': '42',
'brown': '43',
'blue': '44',
'purple': '45',
'cyan': '46',
'white': '47',
}
wrap_with = []
if bolder:
wrap_with.append(bolder_)
if underline:
wrap_with.append(underline_)
if background:
wrap_with.append(background_map.get(background, ''))
wrap_with.append(color_map.get(color, ''))
data = '\033[' + ';'.join(wrap_with) + 'm' + text + '\033[0m'
if isinstance(text, bytes):
return data.encode('utf-8')
return data
def wrap_with_warning(text, bolder=False):
return wrap_with_color(text, color='red', bolder=bolder)
def wrap_with_info(text, bolder=False):
return wrap_with_color(text, color='brown', bolder=bolder)
def wrap_with_primary(text, bolder=False):
return wrap_with_color(text, color='green', bolder=bolder)
def wrap_with_title(text):
return wrap_with_color(text, color='black', background='green')
|
djrscally/eve-wspace | evewspace/Teamspeak/models.py | Python | gpl-3.0 | 1,834 | 0.002726 | # Eve W-Space
# Copyright (C) 2013 Andrew Austin and other contributors
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, | either version 3 of the License, or
# (at your option) any | later version. An additional term under section
# 7 of the GPL is included in the LICENSE file.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
from django.contrib.auth.models import Group, User
# Create your models here.
class TeamspeakServer(models.Model):
"""Stores teamspeak server configuration."""
host = models.CharField(max_length=100)
queryuser = models.CharField(max_length=100)
querypass = models.CharField(max_length=100)
queryport = models.IntegerField()
voiceport = models.IntegerField()
# If enforcegroups = True, any TS users who do not have a GroupMap entry will have no groups
enforcegroups = models.BooleanField()
# If enforceusers = True, any TS users without a Django user mapping will be removed
enforeceusers = models.BooleanField()
class GroupMap(models.Model):
"""Maps Django user groups to Teamspeak groups."""
tsserver = models.ForeignKey(TeamspeakServer, related_name="groupmaps")
usergroup = models.ForeignKey(Group, related_name="teamspeakgroups")
tsgroup = models.CharField(max_length=100)
|
dc3-plaso/plaso | plaso/cli/helpers/viper_analysis.py | Python | apache-2.0 | 3,976 | 0.003773 | # -*- coding: utf-8 -*-
"""The Viper analysis plugin CLI arguments helper."""
from plaso.analysis import viper
from plaso.cli.helpers import interface
from plaso.cli.helpers import manager
from plaso.lib import errors
class ViperAnalysisArgumentsHelper(interface.ArgumentsHelper):
"""Viper analysis plugin CLI arguments helper."""
NAME = u'viper_analysis'
CATEGORY = u'analysis'
DESCRIPTION = u'Argument helper for the Viper analysis plugin.'
_DEFAULT_HASH = u'sha256'
_DEFAULT_HOST = u'localhost'
_DEFAULT_PORT = 8080
_DEFAULT_PROTOCOL = u'http'
@classmethod
def AddArguments(cls, argument_group):
"""Adds command line arguments the helper supports to an argument group.
This function takes an argument parser or an argument group object and adds
to it all the command line arguments this helper supports.
Args:
argument_group (argparse._ArgumentGroup|argparse.ArgumentParser):
argparse group.
"""
argument_group.add_argument(
u'--viper-hash', u'--viper_hash', dest=u'viper_hash', type=str,
action='store', choices=viper.ViperAnalyzer.SUPPORTED_HASHES,
default=cls._DEFAULT_HASH, metavar=u'HASH', help=(
u'Type of hash to use to query the Viper server, the default is: '
u'{0:s}. Supported options: {1:s}').format(
cls._DEFAULT_HASH, u', '.join(
viper.ViperAnalyzer.SUPPORTED_HASHES)))
argument_group.add_argument(
u'--viper-host', u'--viper_host', dest=u'viper_host', type=str,
action='store', default=cls._DEFAULT_HOST, metavar=u'HOST',
help=(
u'Hostname of the Viper server to query, the de | fault is: '
u'{0:s}'.format(cls._DEFAULT_HOST)))
argument_group.add_argument(
u'--viper-port', u'--viper_port', dest=u'viper_port', type=int,
action='store', def | ault=cls._DEFAULT_PORT, metavar=u'PORT', help=(
u'Port of the Viper server to query, the default is: {0:d}.'.format(
cls._DEFAULT_PORT)))
argument_group.add_argument(
u'--viper-protocol', u'--viper_protocol', dest=u'viper_protocol',
type=str, choices=viper.ViperAnalyzer.SUPPORTED_PROTOCOLS,
action='store', default=cls._DEFAULT_PROTOCOL, metavar=u'PROTOCOL',
help=(
u'Protocol to use to query Viper, the default is: {0:s}. '
u'Supported options: {1:s}').format(
cls._DEFAULT_PROTOCOL, u', '.join(
viper.ViperAnalyzer.SUPPORTED_PROTOCOLS)))
@classmethod
def ParseOptions(cls, options, analysis_plugin):
"""Parses and validates options.
Args:
options (argparse.Namespace): parser options.
analysis_plugin (ViperAnalysisPlugin): analysis plugin to configure.
Raises:
BadConfigObject: when the output module object is of the wrong type.
BadConfigOption: when unable to connect to Viper instance.
"""
if not isinstance(analysis_plugin, viper.ViperAnalysisPlugin):
raise errors.BadConfigObject(
u'Analysis plugin is not an instance of ViperAnalysisPlugin')
lookup_hash = cls._ParseStringOption(
options, u'viper_hash', default_value=cls._DEFAULT_HASH)
analysis_plugin.SetLookupHash(lookup_hash)
host = cls._ParseStringOption(
options, u'viper_host', default_value=cls._DEFAULT_HOST)
analysis_plugin.SetHost(host)
port = cls._ParseStringOption(
options, u'viper_port', default_value=cls._DEFAULT_PORT)
analysis_plugin.SetPort(port)
protocol = cls._ParseStringOption(
options, u'viper_protocol', default_value=cls._DEFAULT_PROTOCOL)
protocol = protocol.lower().strip()
analysis_plugin.SetProtocol(protocol)
if not analysis_plugin.TestConnection():
raise errors.BadConfigOption(
u'Unable to connect to Viper {0:s}:{1:d}'.format(host, port))
manager.ArgumentHelperManager.RegisterHelper(ViperAnalysisArgumentsHelper)
|
gallifrey17/eden | modules/templates/Magnu/config.py | Python | mit | 14,481 | 0.016618 | # -*- coding: utf-8 -*-
try:
# Python 2.7
from collections import OrderedDict
except:
# Python 2.6
from gluon.contrib.simplejson.ordered_dict import OrderedDict
from gluon import current
from gluon.storage import Storage
def config(settings):
"""
Template settings for a hosted environment to allow NGOs to share data
Prototype being developed for CAR (Central African Republic)
"""
T = current.T
#settings.base.system_name = T("Magnu")
#settings.base.system_name_short = T("Magnu")
settings.base.system_name = T("Sahana")
settings.base.system_name_short = T("Sahana")
# PrePopulate data
settings.base.prepopulate = ("Magnu", "default/users")
# Theme (folder to use for views/layout.html)
settings.base.theme = "Magnu"
# Authentication settings
# Should users be allowed to register themselves?
#settings.security.self_registration = False
# Do new users need to verify their email address?
settings.auth.registration_requires_verification = True
# Do new users need to be approved by an administrator prior to being able to login?
#settings.auth.registration_requires_approval = True
settings.auth.registration_requests_organisation = True
# @ToDo:
#settings.auth.registration_requests_location = "L0"
# Approval emails get sent to all admins
settings.mail.approver = "ADMIN"
# Restrict the Location Selector to just certain countries
# NB This can also be over-ridden for specific contexts later
# e.g. Activities filtered to those of parent Project
settings.gis.countries = ("CF",) # Initially, will change
# Show LatLon boxes in the Location Selector
settings.gis.latlon_selector = True
# Uncomment to display the Map Legend as a floating DIV
settings.gis.legend = "float"
# Uncomment to open Location represent links in a Popup Window
settings.gis.popup_location_link = True
# Uncomment to Disable the Postcode selector in the LocationSelector
settings.gis.postcode_selector = False # @ToDo: Vary by country (include in the gis_config!)
# Uncomment to show the Print control:
# http://eden.sahanafoundation.org/wiki/UserGuidelines/Admin/MapPrinting
settings.gis.print_button = True
# L10n settings
# Languages used in the deployment (used for Language Toolbar & GIS Locations)
# http://www.loc.gov/standards/iso639-2/php/code_list.php
settings.L10n.languages = OrderedDict([
# ("ar", "العربية"),
# ("bs", "Bosanski"),
("en", "English"),
("fr", "Français"),
# ("de", "Deutsch"),
# ("el", "ελληνικά"),
# ("es", "Español"),
# ("it", "Italiano"),
# ("ja", "日本語"),
# ("km", "ភាសាខ្មែរ"),
# ("ko", "한국어"),
# ("ne", "नेपाली"), # Nepali
# ("prs", "دری"), # Dari
# ("ps", "پښتو"), # Pashto
# ("pt", "Português"),
# ("pt-br", "Português (Brasil)"),
# ("ru", "русский"),
# ("tet", "Tetum"),
# ("tl", "Tagalog"),
# ("ur", "اردو"),
# ("vi", "Tiếng Việt"),
# ("zh-cn", "中文 (简体)"),
# ("zh-tw", "中文 (繁體)"),
])
# Default language for Language Toolbar (& GIS Locations in future)
#settings.L10n.default_language = "en"
# Uncomment to Hide the language toolbar
#settings.L10n.display_toolbar = False
# @ToDO:These should be modified per-country
# Default timezone for users
settings.L10n.utc_offset = "+0100"
# Number formats (defaults to ISO 31-0)
# Decimal separator for numbers (defaults to ,)
#settings.L10n.decimal_separator = "."
# Thousands separator for numbers (defaults to space)
#settings.L10n.thousands_separator = ","
# Uncomment this to Translate Organisation Names/Acronyms
settings.L10n.translate_org_organisation = True
# Security Policy
# http://eden.sahanafoundation.org/wiki/S3AAA#System-widePolicy
# 1: Simple (default): Global as Reader, Authenticated as Editor
# 2: Editor role required for Update/Delete, unless record owned by session
# 3: Apply Controller ACLs
# 4: Apply both Controller & Function ACLs
# 5: Apply Controller, Function & Table ACLs
# 6: Apply Controller, Function, Table ACLs and Entity Realm
# 7: Apply Controller, Function, Table ACLs and Entity Realm + Hierarchy
# 8: Apply Controller, Function, Table ACLs, Entity Realm + Hierarchy and Delegations
#
#settings.security.policy = 7 # Organisation-ACLs
# Resources which can be directly added to the main map
settings.gis.poi_create_resources = \
(dict(c="event",
f="incident_report",
table="event_incident_report",
type="point",
label=T("Add Incident"),
layer="Incident Reports",
),
#dict(c="gis",
# f="poi",
# table="gis_poi",
# type="polygon",
# label=T("Add Area"),
# layer="Areas",
# ),
#dict(c="gis",
# f="poi",
# table="gis_poi",
# type="line",
# label=T("Add Route"),
# layer="Routes",
# ),
)
# RSS feeds
#settings.frontpage.rss = [
# {"title": "Eden",
# # Trac timeline
# "url": "http://eden.sahanafoundation.org/timeline?ticket=on&changeset=on&milestone=on&wiki=on&max=50&daysback=90&format=rss"
# },
# {"title": "Twitter",
# # @SahanaFOSS
# #"url": "https://search.twitter.com/search.rss?q=from%3ASahanaFOSS" # API v1 deprecated, so doesn't work, need to use 3rd-party service, like:
# "url": "http://www.rssitfor.me/getrss?name=@SahanaFOSS"
# # Hashtag
# #url: "http://search.twitter.com/search.atom?q=%23eqnz" # API v1 deprecated, so doesn't work, need to use 3rd-party service, like:
# #url: "http://api2.socialmention.com/search?q=%23eqnz&t=all&f=rss"
# }
#]
settings.org.groups = "Coalition / Consortium"
# @ToDo: Once we go global
# Enable the use of Organisation Branches
#settings.org.branches = True
# Show branches as tree rather than as table
#settings.org.branches_tree_view = True
#settings.org.autocomplete = True
# Uncomment this to allow multiple site contacts per site (e.g. if needing a separate contact per sector)
settings.hrm.site_contact_unique = False
# -----------------------------------------------------------------------------
def customise_org_organisation_resource(r, tablename):
s3db = current.s3db
table = s3db[tablename]
list_fields = s3db.get_config(tablename, "list_fields")
list_fields.insert(2, (T("French Name"), "name.name_l10n"))
list_fields.insert(4, (T("French Acronym"), "name.acronym_l10n"))
settings.customise_org_organisation_resource = customise_org_organisation_resource
# -----------------------------------------------------------------------------
# Comment/uncomment modules here to disable/enable them
# Modules menu is defined in modules/eden/menu.py
settings.modules = OrderedDict([
# Core modules which shouldn't be disabled
("default", Storage(
name_nice = T("Home"),
restricted = False, # Use ACLs to control access to this module
access = None, # All Users (inc Anonymous) can see this module in the default menu & access the controller
module_type = None # This item is not shown in the menu
)),
("admin", Storage(
name_nice = T("Adm | inistration"),
#description = "Site Administration",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
module_type = None # This item is ha | ndled separately for the menu
)),
("appadmin", Storage(
name_nice = T("Administration"),
#description = "Site Administration",
restricted = True,
module_type = None # No Menu
|
okoala/sublime-bak | Backup/20150720091700/Babel/Babel.py | Python | mit | 1,796 | 0.023942 | import sublime
import sublime_plugin
import json
from os.path import dirname, realpath, join
from .node_bridge import node_bridge
# monkeypatch `Region` to be iterable
sublime.Region.totuple = lambda self: (self.a, self.b)
sublime.Region.__iter__ = lambda self: self.totuple().__iter__()
BIN_PATH = join(sublime.packages_path(), dirname(realpath(__file__)), 'babel-transform.js')
class BabelCommand(sublime_plugin.TextCommand):
def run(self, edit):
view = self.view
selected_text = self.get_text()
code = self.babelify(selected_text)
if code:
w = sublime.Window.new_file(view.window())
w.settings().set('default_extension', 'js')
w.set_syntax_file(view.settings().get('syntax'))
w.set_scratch(True)
w.insert(edit, 0, code)
def babelify(self, data):
try:
return node_bridge(data, BIN_PATH, [json.dumps({
'filename': self.view.file_name(),
'debug': self.get_setting('debug'),
'ensure_newline_at_eof': self.get_setting('ensure_newline_at_eof'),
'use_local_babel': self.get_setting('use_local_babel'),
'node_modules': self.get_setting('node_modules'),
'options': self.get_setting('options')
})])
except Exception as e:
return str(e)
def get_text(self):
if not self.has_selection():
region = sublime.Region(0, self | .view.size | ())
return self.view.substr(region)
selected_text = ''
for region in self.view.sel():
selected_text = selected_text + self.view.substr(region) + '\n'
return selected_text
def has_selection(self):
for sel in self.view.sel():
start, end = sel
if start != end:
return True
return False
def get_setting(self, key):
settings = self.view.settings().get('Babel')
if settings is None:
settings = sublime.load_settings('Babel.sublime-settings')
return settings.get(key)
|
Drewsif/OpenVPN-Config-Generator | OpenVPNConfig.py | Python | mpl-2.0 | 14,441 | 0.002978 | #!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/
from __future__ import division, absolute_import, print_function, unicode_literals
import OpenSSL
import uuid
import random
import os
import json
import subprocess
import sys
sha256 = 'sha256'
if sys.version_info[:1] == (2,):
input = raw_input
sha256 = b'sha256'
def create_ca(size=2048, valid=315360000, CN=None):
"""
Creates a CA key and cert
size - The RSA key size to be used
valid - The time is seconds the key should be valid for
CN - The CN to be used for the cert. None will create a UUID
"""
if CN is None:
CN = 'CA-'+str(uuid.uuid4())
key = OpenSSL.crypto.PKey()
key.generate_key(OpenSSL.crypto.TYPE_RSA, size)
ca = OpenSSL.crypto.X509()
ca.set_version(2)
#ca.set_serial_number(1)
ca.get_subject().CN = CN
ca.gmtime_adj_notBefore(0)
ca.gmtime_adj_notAfter(valid)
ca.set_issuer(ca.get_subject())
ca.set_pubkey(key)
ca.add_extensions([
OpenSSL.crypto.X509Extension(b"basicConstraints", False, b"CA:TRUE"),
OpenSSL.crypto.X509Extension(b"keyUsage", False, b"keyCertSign, cRLSign"),
OpenSSL.crypto.X509Extension(b"subjectKeyIdentifier", False, b"hash", subject=ca)
])
ca.add_extensions([
OpenSSL.crypto.X509Extension(b"authorityKeyIdentifier", False, b"keyid:always",issuer=ca)
])
ca.sign(key, sha256)
return ca, key
def create_cert(is_server, cacert, cakey, size=2048, valid=315360000, CN=None):
"""
Creates a client/server key and cert
is_server - Must be True for a server, False for a client
cacert - The OpenSSL.crypto.X509 object of the CA
cakey - The OpenSSL.crypto.PKey object of the CA
Optional:
size - The RSA key size to be used
valid - The time is seconds the key should be valid for
CN - The CN to be used for the cert. None will create a UUID
"""
if CN is None:
if is_server:
CN='server-'+str(uuid.uuid4())
else:
CN = 'client-'+str(uuid.uuid4())
key = OpenSSL.crypto.PKey()
key.generate_key(OpenSSL.crypto.TYPE_RSA, size)
cert = OpenSSL.crypto.X509()
cert.set_version(2)
cert.set_serial_number(random.randint(1, 99999999))
cert.get_subject().CN = CN
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(valid)
cert.set_issuer(cacert.get_subject())
cert.set_pubkey(key)
if is_server:
cert.add_extensions([
OpenSSL.crypto.X509Extension(b"ba | sicConstraints", False, b"CA:FALSE"),
OpenSSL.crypto.X509Extension(b"keyUsage", False, b"digitalSignature,keyEncipherment"),
OpenSSL.crypto.X509Extension(b"extendedKeyUsage", False, b"serverAuth"),
OpenSSL.crypto.X509Extension(b"subjectKeyIdentifier", False, b"hash", subject=cert),
OpenSSL.crypto.X509Extension(b"authorityKeyIdentifier", False, b"keyid:always",issuer=cacert),
OpenSSL.crypto.X509Extension(b"nsCertType", Fa | lse, b"server")
])
else:
cert.add_extensions([
OpenSSL.crypto.X509Extension(b"basicConstraints", False, b"CA:FALSE"),
OpenSSL.crypto.X509Extension(b"keyUsage", False, b"digitalSignature"),
OpenSSL.crypto.X509Extension(b"extendedKeyUsage", False, b"clientAuth"),
OpenSSL.crypto.X509Extension(b"subjectKeyIdentifier", False, b"hash", subject=cert),
OpenSSL.crypto.X509Extension(b"authorityKeyIdentifier", False, b"keyid:always",issuer=cacert),
OpenSSL.crypto.X509Extension(b"nsCertType", False, b"client")
])
cert.sign(cakey, sha256)
return cert, key
def gen_dhparams(size=2048):
"""
Generate Diffie Hellman parameters by calling openssl. Returns a string.
I don't like doing it like this but pyopenssl doesn't seem to
have a way to do this natively.
size - The size of the prime to generate.
"""
cmd = ['openssl', 'dhparam', '-out', 'dh.tmp', str(size)]
try:
subprocess.check_call(cmd)
except subprocess.CalledProcessError:
# Sometimes we get a non-zero exit code, no idea why...
print('Calling of openssl failed... Trying again')
subprocess.check_call(cmd)
with open('dh.tmp') as dh:
params = dh.read()
os.remove('dh.tmp')
return params
def gen_tlsauth_key():
"""Generate an openvpn secret key by calling openvpn. Returns a string."""
cmd = ['openvpn', '--genkey', '--secret', 'ta.tmp']
ret = subprocess.check_call(cmd)
with open('ta.tmp') as key:
key = key.read()
os.remove('ta.tmp')
return key
def _create_server_conf(name, confdict, port, cacert, serverkey, servercert, tls_auth=False, dh_params=None, path='.'):
if dh_params is None:
dh_params = gen_dhparams()
serverfile = open(os.path.join(path, name+'_server.ovpn'), 'w')
for key, value in confdict['both'].items():
if value is False:
continue
elif value is True:
serverfile.write(key + '\n')
elif isinstance(value, list):
for v in value:
serverfile.write(key + ' ' + v + '\n')
else:
serverfile.write(key + ' ' + value + '\n')
for key, value in confdict['server'].items():
if value is False:
continue
elif value is True:
serverfile.write(key + '\n')
elif isinstance(value, list):
for v in value:
serverfile.write(key + ' ' + v + '\n')
else:
serverfile.write(key + ' ' + value + '\n')
serverfile.write('port ' + port + '\n')
if 'meta' in confdict:
if confdict['meta'].get('embedkeys', True):
serverfile.write('<ca>\n'+cacert+'</ca>\n')
serverfile.write('<key>\n'+serverkey+'</key>\n')
serverfile.write('<cert>\n'+servercert+'</cert>\n')
serverfile.write('<dh>\n'+dh_params+'</dh>\n')
if tls_auth is not False:
serverfile.write('key-direction 0\n')
serverfile.write('<tls-auth>\n'+tls_auth+'</tls-auth>\n')
def _create_client_conf(name, confdict, host, port, cacert, clientkey, clientcert, tls_auth=False, path='.'):
clientfile = open(os.path.join(path, name+'_client.ovpn'), 'w')
clientfile.write('client\n')
clientfile.write('remote ' + host + ' ' + port + '\n')
for key, value in confdict['both'].items():
if value is False:
continue
elif value is True:
clientfile.write(key + '\n')
elif isinstance(value, list):
for v in value:
clientfile.write(key + ' ' + v + '\n')
else:
clientfile.write(key + ' ' + value + '\n')
for key, value in confdict['client'].items():
if value is False:
continue
elif value is True:
clientfile.write(key + '\n')
elif isinstance(value, list):
for v in value:
clientfile.write(key + ' ' + v + '\n')
else:
clientfile.write(key + ' ' + value + '\n')
if 'meta' in confdict:
if confdict['meta'].get('embedkeys', True):
clientfile.write('<ca>\n'+cacert+'</ca>\n')
clientfile.write('<key>\n'+clientkey+'</key>\n')
clientfile.write('<cert>\n'+clientcert+'</cert>\n')
if tls_auth is not False:
clientfile.write('key-direction 1\n')
clientfile.write('<tls-auth>\n'+tls_auth+'</tls-auth>\n')
def create_confs(name, confdict, path='.', host=None, port=None):
"""
Creates the client and server configs.
name - The name of the run which is prepended to the config file names
confdict - A dictionary representing the config parameters.
"""
if host is None:
host = str(input("Enter Hostname/IP: ")).rstrip()
if port is None:
port = str(input("Enter port number: ")).rstrip()
tls_auth = False
keysize = None
dhsize = None
if 'meta' in c |
plamut/ggrc-core | src/ggrc/models/option.py | Python | apache-2.0 | 765 | 0.013072 | # Copyright (C) 2 | 017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from ggrc import db
from ggrc.models.deferred import deferred
from ggrc.models.mixins import Base, Described
class Option(Described, Base, db.Model):
__tablename__ = 'options'
role = db.Column(db.String)
# TODO: inherit from Titled mixin (note: title | is nullable here)
title = deferred(db.Column(db.String), 'Option')
required = deferred(db.Column(db.Boolean), 'Option')
def __str__(self):
return self.title
@staticmethod
def _extra_table_args(cls):
return (
db.Index('ix_options_role', 'role'),
)
_publish_attrs = [
'role',
'title',
'required',
]
_sanitize_html = [
'title',
]
|
vanant/googleads-dfa-reporting-samples | python/v2.0/get_user_role_permissions.py | Python | apache-2.0 | 2,441 | 0.004097 | #!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example displays all of the available subaccount permissions.
To get a subaccount ID, run get_subaccounts.py.
Tags: subaccounts.get, userRolePermissions.list
"""
__author__ = ('api.jimper@gmail.com (Jonathon Imperiosi)')
import argparse
import sys
from apiclient import sample_tools
from oauth2client import client
# Declare command-lin | e flags.
argparser = argparse.ArgumentParser(add_help=False)
argparser.add_argument(
'profile_id', type=int,
help='The ID of the profile to list permissions for')
argparser.add_argument(
'subaccount_id', type=int,
help='The ID of the subaccount to list permissions for')
def main(argv):
# Authenticate and construct service.
service, flags = sample_tools.init(
argv, 'dfareporting', 'v2.0', __doc__, __file__, parents=[argparser],
scope=['https://www.g | oogleapis.com/auth/dfareporting',
'https://www.googleapis.com/auth/dfatrafficking'])
profile_id = flags.profile_id
subaccount_id = flags.subaccount_id
try:
# Construct and execute the subaccount request.
request = service.subaccounts().get(
profileId=profile_id, id=subaccount_id)
subaccount = request.execute()
# Construct the user role permissions request.
request = service.userRolePermissions().list(
profileId=profile_id, ids=subaccount['availablePermissionIds'])
# Execute request and print response.
result = request.execute()
for permission in result['userRolePermissions']:
print ('Found user role permission with ID %s and name "%s".'
% (permission['id'], permission['name']))
except client.AccessTokenRefreshError:
print ('The credentials have been revoked or expired, please re-run the '
'application to re-authorize')
if __name__ == '__main__':
main(sys.argv)
|
132nd-etcher/EMFT | emft/plugins/reorder/value/output_folder.py | Python | gpl-3.0 | 1,091 | 0 | # coding=utf-8
import typing
from collections import MutableMapping
from emft.core.logging import make_logger
from emft.core.path import Path
from emft.core.singleton import Singleton
LOGGER = make_logger(__name__)
# noinspection PyAbstractClass
class OutputFolder(Path):
pass
class OutputFolders(MutableMapping, metaclass=Singleton):
ACTIVE_OUTPUT_FOLDER = None
ACTIVE_OUTPUT_FOLDER_NAME = None
def __init__(self, init_dict: dict = None):
self._data = init_dict or dict()
def __getitem__(self, key) -> OutputFolder:
return self._data.__getitem__(key)
def __iter__(self) -> typing.Iterator[str]:
return self._data.__iter__()
def values(self) -> typing.List[OutputFolder]:
return list(self._data.values())
@property
def data(self) -> dict:
return self._data
| def __len_ | _(self) -> int:
return self._data.__len__()
def __delitem__(self, key):
return self._data.__delitem__(key)
def __setitem__(self, key, value: OutputFolder):
return self._data.__setitem__(key, value)
|
baverman/apns-client | apnsclient/transport.py | Python | apache-2.0 | 27,323 | 0.00377 | # Copyright 2014 Sardar Yumatov
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import binascii
import datetime
from struct import unpack
# python 3 support
import six
__all__ = ('Session', 'Connection')
# module level logger
LOG = logging.getLogger(__name__)
class Session(object):
""" The front-end for the underlying connection pool. """
# Default APNs addresses.
ADDRESSES = {
"push_sandbox": ("gateway.sandbox.push.apple.com", 2195),
"push_production": ("gateway.push.apple.com", 2195),
"feedback_sandbox": ("feedback.sandbox.push.apple.com", 2196),
"feedback_production": ("feedback.push.apple.com", 2196),
}
# Default timeout for attempting a new connection.
DEFAULT_CONNECT_TIMEOUT = 10
# Default write buffer size. Should be close to MTU size.
DEFAULT_WRITE_BUFFER_SIZE = 2048
# Default timeout for write operations.
DEFAULT_WRITE_TIMEOUT = 20
# Default read buffer size, used by feedback.
DEFAULT_READ_BUFFER_SIZE = 2048
# Default timeout for read operations.
DEFAULT_READ_TIMEOUT = 20
# Default timeout waiting for error response at the end message send operation.
DEFAULT_READ_TAIL_TIMEOUT = 3
def __init__(self, pool="apnsclient.backends.stdio",
connect_timeout=DEFAULT_CONNECT_TIMEOUT,
write_buffer_size=DEFAULT_WRITE_BUFFER_SIZE,
write_timeout=DEFAULT_WRITE_TIMEOUT,
read_buffer_size=DEFAULT_READ_BUFFER_SIZE,
read_timeout=DEFAULT_READ_TIMEOUT,
read_tail_timeout=DEFAULT_READ_TAIL_TIMEOUT,
**pool_options):
""" The front-end to the underlying connection pool. The purpose of this
class is to hide the transport implementation that is being used for
networking. Default implementation uses built-in python sockets and
``select`` for asynchronous IO.
:Arguments:
- pool (str, type or object): networking layer implementation.
- connect_timeout (float): timeout for new connections.
- write_buffer_size (int): chunk size for sending the message.
- write_timeout (float): maximum time to send single chunk in seconds.
- read_buffer_size (int): feedback buffer size for reading.
- read_timeout (float): timeout for reading single feedback block.
- read_tail_timeout (float): timeout for reading status frame after message is sent.
- pool_options (kwargs): passed as-is to the pool class on instantiation.
"""
# IO deafults
self.connect_timeout = connect_timeout
self.write_buffer_size = write_buffer_size
self.write_timeout = write_timeout
self.read_buffer_size = read_buffer_size
self.read_timeout = read_timeout
self.read_tail_timeout = read_tail_timeout
# class name given by qualified name
if isinstance(pool, six.string_types):
pool_module = __import__(pool)
for name in pool.split('.')[1:]:
try:
pool_module = getattr(pool_module, name)
except AttributeError:
raise ImportError("Can't load pool backend", pool)
try:
pool = getattr(pool_module, "Backend")
except AttributeError:
raise ImportError("Can't find Backend class in pool module", pool)
# resolved or given as class
if isinstance(pool, type):
pool = pool(**pool_options)
self.pool = pool
if LOG.isEnabledFor(logging.DEBUG):
LOG.debug("New session, WB: %sb/%ss, RB: %sb/%ss, TT: %ss, Pool: %s",
write_buffer_size, write_timeout,
read_buffer_size, read_timeout,
read_tail_timeout,
pool.__class__.__module__)
@classmethod
def get_address(cls, address):
""" Maps address to (host, port) tuple. """
if not isinstance(address, (list, tuple)):
addr = cls.ADDRESSES.get(address)
if addr is None:
raise ValueError("Unknown address mapping: {0}".format(address))
address = addr
return address
def new_connection(self, address="feedback_sandbox", certificate=None, **cert_params):
""" Obtain new connection to APNs. This method will not re-use existing
connection from the pool. The connection will be closed after use.
Unlike :func:`get_connection` this method does not c | ache the
connection. Use it to fetch feedback from APNs and then close when
you are done.
:Arguments:
- address (str or tuple): target address.
- | certificate (:class:`BaseCertificate`): provider's certificate instance.
- cert_params (kwargs): :class:`BaseCertificate` arguments, used if ``certificate`` instance is not given.
"""
if certificate is not None:
cert = certificate
else:
cert = self.pool.get_certificate(cert_params)
address = self.get_address(address)
return Connection(address, cert, self, use_cache=False)
def get_connection(self, address="push_sanbox", certificate=None, **cert_params):
""" Obtain cached connection to APNs.
Session caches connection descriptors, that remain open after use.
Caching saves SSL handshaking time. Handshaking is lazy, it will be
performed on first message send.
You can provide APNs address as ``(hostname, port)`` tuple or as
one of the strings:
- push_sanbox -- ``("gateway.sandbox.push.apple.com", 2195)``, the default.
- push_production -- ``("gateway.push.apple.com", 2195)``
- feedback_sandbox -- ``("feedback.sandbox.push.apple.com", 2196)``
- feedback_production -- ``("feedback.push.apple.com", 2196)``
:Arguments:
- address (str or tuple): target address.
- certificate (:class:`BaseCertificate`): provider's certificate instance.
- cert_params (kwargs): :class:`BaseCertificate` arguments, used if ``certificate`` instance is not given.
"""
if certificate is not None:
cert = certificate
else:
cert = self.pool.get_certificate(cert_params)
address = self.get_address(address)
return Connection(address, cert, self, use_cache=True)
def outdate(self, delta):
""" Close open unused connections in the pool that are left untouched
for more than ``delta`` time.
You may call this method in a separate thread or run it in some
periodic task. If you don't, then all connections will remain open
until session is shut down. It might be an issue if you care about
your open server connections.
:Arguments:
delta (``timedelta``): maximum age of unused connection.
"""
if LOG.isEnabledFor(logging.DEBUG):
if delta.total_seconds() == 0.0:
LOG.debug("Shutdown session")
else:
LOG.debug("Outdating session with delta: %s", delta)
self.pool.outdate(delta)
def shutdown(self):
""" Shutdown all connections in the pool. This method does will not close
connections being use at the calling time. |
hunter-cameron/Bioinformatics | python/checkm_select_bins.py | Python | mit | 1,283 | 0.005456 |
import argparse
import pandas
parser = argparse.ArgumentParser(description="Subsets a checkm tab-separated outfile to include only entries that have the specified completeness/contamination level")
parser.add_argument("-checkm", help="the checkm out file", required=True)
parser.add_argument("-completeness", help="completeness value")
parser.add_argument("-comp_metric", help="the comparison to completeness to select [%(default)s]", choices=["<", "<=", "=", ">=", ">"], default=">=")
parser.add_argument("-contamination", help="contamination value")
parser.add_argument("-cont_metric", help="the comparison to contamination to select [%(default)s]", choices=["<", "<=", "=", ">=", ">"], default="<=")
parser.add_argument("-out", help="the output checkm tsv output [%(default)s]", default="checkm_subset.tsv")
args = parser.parse_args()
df = pandas.read_csv(args.checkm, se | p="\t", header=0, index_col=0)
if args.completeness:
df = df.query("Completeness {metric} {value}".format(metric=args.comp_metric, value=args.completeness))
if args.contamination:
df = df.query("Contamination {metric} {value}".format(me | tric=args.cont_metric, value=args.contamination))
df = df.sort_index(0, 'Completeness', ascending=False)
df.to_csv(args.out, sep="\t", index_label="Bin Id")
|
NaturalSolutions/NS.Bootstrap | Back/ecoreleve_server/Views/__init__.py | Python | mit | 2,874 | 0.013918 |
### test if the match url is integer
def integers(*segment_names):
def predicate(info, request):
match = info['match']
for segment_name in segment_names:
try:
print (segment_names)
match[segment_name] = int(match[segment_name])
if int(match[segment_name]) == 0 :
print(' ****** ACTIONS FORMS ******')
return False
except (TypeError, ValueError):
return False
return True
return predicate
def add_routes(config):
config.add_route('weekData', 'ecoReleve-Sensor/weekData')
##### Security routes #####
config.add_route('security/login', 'ecoReleve-Core/security/login')
config.add_route('security/logout', 'ecoReleve-Core/security/logout')
config.add_route('security/has_access', 'ecoReleve-Core/security/has_access')
##### User #####
config.add_route('core/user', 'ecoReleve-Core/user')
config.add_route('core/currentUser', 'ecoReleve-Core/currentUser')
##### Monitored sites #####
config.add_route('monitoredSites', 'ecoReleve-Core/monitoredSites')
config.add_route('monit | oredSites/id', 'ecoReleve-Core/monitoredSites/{id}')
##### Stations ##### |
config.add_route('area', 'ecoReleve-Core/area')
config.add_route('locality', 'ecoReleve-Core/locality')
config.add_route('stations', 'ecoReleve-Core/stations/')
config.add_route('stations/id', 'ecoReleve-Core/stations/{id}',custom_predicates = (integers('id'),))
config.add_route('stations/action', 'ecoReleve-Core/stations/{action}')
config.add_route('stations/id/protocols', 'ecoReleve-Core/stations/{id}/protocols',custom_predicates = (integers('id'),))
config.add_route('stations/id/protocols/obs_id', 'ecoReleve-Core/stations/{id}/protocols/{obs_id}',custom_predicates = (integers('id'),))
##### Protocols #####
# config.add_route('protocols', 'ecoReleve-Core/protocols')
config.add_route('protocols', 'ecoReleve-Core/protocols/')
config.add_route('protocols/id', 'ecoReleve-Core/protocols/{id}',custom_predicates = (integers('id'),))
config.add_route('protocols/action', 'ecoReleve-Core/protocols/{action}')
config.add_route('observation/id', 'ecoReleve-Core/observations/{id}')
config.add_route('observation', 'ecoReleve-Core/observations')
##### Protocols types #####
config.add_route('protocolTypes', 'ecoReleve-Core/protocolTypes')
##### Sensors datas (Argos + GSM + RFID) #####
config.add_route('sensors/datas', 'ecoReleve-Sensor/{type}/datas')
config.add_route('sensors/id/datas', 'ecoReleve-Sensor/{type}/{id}/datas')
##### Sensors caracteristics(Argos + GSM + RFID) #####
config.add_route('sensors', 'ecoReleve-Sensor/{type}')
config.add_route('sensors/id', 'ecoReleve-Sensor/{type}/{id}')
|
rento19962/cadquery | tests/TestImporters.py | Python | lgpl-3.0 | 1,871 | 0.006948 | """
Tests file importers such as STEP
"""
#core modules
import StringIO
from cadquery import *
from cadquery import exporters
from cadquery import importers
from tests import BaseTest
#where unit test output will be saved
import sys
if sys.platform.startswith("win"):
OUTDIR = "c:/temp"
else:
OUTDIR = "/tmp"
class TestImporters(BaseTest):
def importBox(self, importType, fileName):
"""
Exports a simple box to a STEP file and t | hen imports it again
:param importType: The type of file we're importing (STEP, STL, etc)
:param fileName: The path and name of the file to write to
"""
#We're importing a STEP file
if importType == importers.ImportTypes.STEP:
#We first need to build a simple shape to export
shape = | Workplane("XY").box(1, 2, 3).val()
#Export the shape to a temporary file
shape.exportStep(fileName)
# Reimport the shape from the new STEP file
importedShape = importers.importShape(importType,fileName)
#Check to make sure we got a solid back
self.assertTrue(importedShape.val().ShapeType() == "Solid")
#Check the number of faces and vertices per face to make sure we have a box shape
self.assertTrue(importedShape.faces("+X").size() == 1 and importedShape.faces("+X").vertices().size() == 4)
self.assertTrue(importedShape.faces("+Y").size() == 1 and importedShape.faces("+Y").vertices().size() == 4)
self.assertTrue(importedShape.faces("+Z").size() == 1 and importedShape.faces("+Z").vertices().size() == 4)
def testSTEP(self):
"""
Tests STEP file import
"""
self.importBox(importers.ImportTypes.STEP, OUTDIR + "/tempSTEP.step")
if __name__ == '__main__':
import unittest
unittest.main()
|
hds-lab/textvisdrg | msgvis/apps/groups/models.py | Python | mit | 2,224 | 0.004946 | from django.db import models
from msgvis.apps.corpus import utils
from msgvis.apps.corpus import models as corpus_models
from msgvis.apps.enhance import models as enhance_models
from django.contrib.auth.models import User
import operator
from django.utils import timezone
class Group(models.Model):
"""
A group of messages, created by inclusive and exclusive keywords.
"""
owner = models.ForeignKey(User, default=None, null=True)
"""User"""
order = models.IntegerField(default=0)
"""The order in a user's group set"""
name = models.CharField(max_length=250, default=None, blank=True)
"""The group name."""
dataset = models.ForeignKey(corpus_models.Dataset, related_name='groups')
"""Which :class:`corpus_models.Dataset` this group belongs to"""
created_at = models.DateTimeField(auto_now_add=True)
"""The group created time"""
keywords = models.TextField(default="", blank=True)
"""keywords for including / excluding messages."""
include_types = models.ManyToManyField(corpus_models.MessageType, null=True, blank=True, default=None)
"""include tweets/retweets/replies"""
is_search_record = models.BooleanField(default=False)
deleted = models.BooleanField(default=False)
#messages = models.ManyToManyField(corpus_models.Message, null=True, blank=True, default=None, related_name='groups')
#"""The set of :class:`corpus_models.Message` that belong to this group."""
@property
def messages(self):
return self.dataset.get_advanced_search_results(self.keywords | , self.include_types.all())
@property
def message_count(self):
return self.messages.count()
def __repr__(self):
return self.name
def __unicode__(self):
return self.__repr__()
class | ActionHistory(models.Model):
"""
A model to record history
"""
owner = models.ForeignKey(User, default=None, null=True)
created_at = models.DateTimeField(default=timezone.now, db_index=True)
"""Created time"""
from_server = models.BooleanField(default=False)
type = models.CharField(max_length=100, default="", blank=True, db_index=True)
contents = models.TextField(default="", blank=True) |
ateliedocodigo/py-healthcheck | healthcheck/__init__.py | Python | mit | 310 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from functools import reduce # noqa
except Excepti | on:
pass
try:
from .tornado_handler import TornadoHandler # noqa
except ImportError:
pass
from .environmentdump import Environment | Dump # noqa
from .healthcheck import HealthCheck # noqa
|
gautes/TrollCoinCore | contrib/devtools/copyright_header.py | Python | mit | 22,402 | 0.005089 | #!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import re
import fnmatch
import sys
import subprocess
import datetime
import os
################################################################################
# file filtering
################################################################################
EXCLUDE = [
# libsecp256k1:
'src/secp256k1/include/secp256k1.h',
'src/secp256k1/include/secp256k1_ecdh.h',
'src/secp256k1/include/secp256k1_recovery.h',
'src/secp256k1/include/secp256k1_schnorr.h',
'src/secp256k1/src/java/org_trollcoin_NativeSecp256k1.c',
'src/secp256k1/src/java/org_trollcoin_NativeSecp256k1.h',
'src/secp256k1/src/java/org_trollcoin_Secp256k1Context.c',
'src/secp256k1/src/java/org_trollcoin_Secp256k1Context.h',
# auto generated:
'src/univalue/lib/univalue_escapes.h',
'src/qt/trollcoinstrings.cpp',
'src/chainparamsseeds.h',
# other external copyrights:
'src/tinyformat.h',
'src/leveldb/util/env_win.cc',
'src/crypto/ctaes/bench.c',
'test/functional/test_framework/bignum.py',
# python init:
'*__init__.py',
]
EXCLUDE_COMPILED = re.compile('|'.join([fnmatch.translate(m) for m in EXCLUDE]))
INCLUDE = ['*.h', '*.cpp', '*.cc', '*.c', '*.py']
INCLUDE_COMPILED = re.compile('|'.join([fnmatch.translate(m) for m in INCLUDE]))
def applies_to_file(filename):
return ((EXCLUDE_COMPILED.match(filename) is None) and
(INCLUDE_COMPILED.match(filename) is not None))
################################################################################
# obtain list of files in repo according to INCLUDE and EXCLUDE
################################################################################
GIT_LS_CMD = 'git ls-files'
def call_git_ls():
out = subprocess.check_output(GIT_LS_CMD.split(' '))
return [f for f in out.decode("utf-8").split('\n') if f != '']
def get_filenames_to_examine():
filenames = call_git_ls()
return sorted([filename for filename in filenames if
applies_to_file(filename)])
################################################################################
# define and compile regexes for the patterns we are looking for
################################################################################
COPYRIGHT_WITH_C = 'Copyright \(c\)'
COPYRIGHT_WITHOUT_C = 'Copyright'
ANY_COPYRIGHT_STYLE = '(%s|%s)' % (COPYRIGHT_WITH_C, COPYRIGHT_WITHOUT_C)
YEAR = "20[0-9][0-9]"
YEAR_RANGE = '(%s)(-%s)?' % (YEAR, YEAR)
YEAR_LIST = '(%s)(, %s)+' % (YEAR, YEAR)
ANY_YEAR_STYLE = '(%s|%s)' % (YEAR_RANGE, YEAR_LIST)
ANY_COPYRIGHT_STYLE_OR_YEAR_STYLE = ("%s %s" % (ANY_COPYRIGHT_STYLE,
ANY_YEAR_STYLE))
ANY | _COPYRIGHT_COMPILED = re.compile(ANY_COPYRIGHT_STYLE_OR_YEAR_STYLE)
def compile_copyright_regex(copyright_style, year_style, name):
return re.compile('%s %s %s' % (copyright_style, year_style, name))
EXPECTED_HOLDER_ | NAMES = [
"Satoshi Nakamoto\n",
"The Trollcoin Core developers\n",
"The Trollcoin Core developers \n",
"Trollcoin Core Developers\n",
"the Trollcoin Core developers\n",
"The Trollcoin developers\n",
"The LevelDB Authors\. All rights reserved\.\n",
"BitPay Inc\.\n",
"BitPay, Inc\.\n",
"University of Illinois at Urbana-Champaign\.\n",
"MarcoFalke\n",
"Pieter Wuille\n",
"Pieter Wuille +\*\n",
"Pieter Wuille, Gregory Maxwell +\*\n",
"Pieter Wuille, Andrew Poelstra +\*\n",
"Andrew Poelstra +\*\n",
"Wladimir J. van der Laan\n",
"Jeff Garzik\n",
"Diederik Huys, Pieter Wuille +\*\n",
"Thomas Daede, Cory Fields +\*\n",
"Jan-Klaas Kollhof\n",
"Sam Rushing\n",
"ArtForz -- public domain half-a-node\n",
]
DOMINANT_STYLE_COMPILED = {}
YEAR_LIST_STYLE_COMPILED = {}
WITHOUT_C_STYLE_COMPILED = {}
for holder_name in EXPECTED_HOLDER_NAMES:
DOMINANT_STYLE_COMPILED[holder_name] = (
compile_copyright_regex(COPYRIGHT_WITH_C, YEAR_RANGE, holder_name))
YEAR_LIST_STYLE_COMPILED[holder_name] = (
compile_copyright_regex(COPYRIGHT_WITH_C, YEAR_LIST, holder_name))
WITHOUT_C_STYLE_COMPILED[holder_name] = (
compile_copyright_regex(COPYRIGHT_WITHOUT_C, ANY_YEAR_STYLE,
holder_name))
################################################################################
# search file contents for copyright message of particular category
################################################################################
def get_count_of_copyrights_of_any_style_any_holder(contents):
return len(ANY_COPYRIGHT_COMPILED.findall(contents))
def file_has_dominant_style_copyright_for_holder(contents, holder_name):
match = DOMINANT_STYLE_COMPILED[holder_name].search(contents)
return match is not None
def file_has_year_list_style_copyright_for_holder(contents, holder_name):
match = YEAR_LIST_STYLE_COMPILED[holder_name].search(contents)
return match is not None
def file_has_without_c_style_copyright_for_holder(contents, holder_name):
match = WITHOUT_C_STYLE_COMPILED[holder_name].search(contents)
return match is not None
################################################################################
# get file info
################################################################################
def read_file(filename):
return open(os.path.abspath(filename), 'r').read()
def gather_file_info(filename):
info = {}
info['filename'] = filename
c = read_file(filename)
info['contents'] = c
info['all_copyrights'] = get_count_of_copyrights_of_any_style_any_holder(c)
info['classified_copyrights'] = 0
info['dominant_style'] = {}
info['year_list_style'] = {}
info['without_c_style'] = {}
for holder_name in EXPECTED_HOLDER_NAMES:
has_dominant_style = (
file_has_dominant_style_copyright_for_holder(c, holder_name))
has_year_list_style = (
file_has_year_list_style_copyright_for_holder(c, holder_name))
has_without_c_style = (
file_has_without_c_style_copyright_for_holder(c, holder_name))
info['dominant_style'][holder_name] = has_dominant_style
info['year_list_style'][holder_name] = has_year_list_style
info['without_c_style'][holder_name] = has_without_c_style
if has_dominant_style or has_year_list_style or has_without_c_style:
info['classified_copyrights'] = info['classified_copyrights'] + 1
return info
################################################################################
# report execution
################################################################################
SEPARATOR = '-'.join(['' for _ in range(80)])
def print_filenames(filenames, verbose):
if not verbose:
return
for filename in filenames:
print("\t%s" % filename)
def print_report(file_infos, verbose):
print(SEPARATOR)
examined = [i['filename'] for i in file_infos]
print("%d files examined according to INCLUDE and EXCLUDE fnmatch rules" %
len(examined))
print_filenames(examined, verbose)
print(SEPARATOR)
print('')
zero_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] == 0]
print("%4d with zero copyrights" % len(zero_copyrights))
print_filenames(zero_copyrights, verbose)
one_copyright = [i['filename'] for i in file_infos if
i['all_copyrights'] == 1]
print("%4d with one copyright" % len(one_copyright))
print_filenames(one_copyright, verbose)
two_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] == 2]
print("%4d with two copyrights" % len(two_copyrights))
print_filenames(two_copyrights, verbose)
three_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] == 3]
print("%4d with three copyrights" % len(three_copyrights))
print_filenames(three_copyrights, verbose)
four_or_more_copy |
napalm-automation/napalm-yang | napalm_yang/models/openconfig/network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global_/graceful_restart/state/__init__.py | Python | apache-2.0 | 26,895 | 0.00119 | # -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/mpls/signaling-protocols/rsvp-te/global/graceful-restart/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State information associated with
RSVP graceful-restart
"""
__slots__ = (
"_path_helper", "_extmethods", "__enable", "__restart_time", "__recovery_time"
)
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__enable = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="enable",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__restart_time = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="restart-time",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=False,
)
self.__recovery_time = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4 | 294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="recovery-time",
parent=self,
pa | th_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"mpls",
"signaling-protocols",
"rsvp-te",
"global",
"graceful-restart",
"state",
]
def _get_enable(self):
"""
Getter method for enable, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global/graceful_restart/state/enable (boolean)
YANG Description: Enables graceful restart on the node.
"""
return self.__enable
def _set_enable(self, v, load=False):
"""
Setter method for enable, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global/graceful_restart/state/enable (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_enable is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_enable() directly.
YANG Description: Enables graceful restart on the node.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="enable",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """enable must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__enable = t
if hasattr(self, "_set"):
self._set()
def _unset_enable(self):
self.__enable = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="enable",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_restart_time(self):
"""
Getter method for restart_time, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global/graceful_restart/state/restart_time (uint32)
YANG Description: Graceful restart time (seconds).
"""
return self.__restart_time
def _set_restart_time(self, v, load=False):
"""
Setter method for restart_time, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global/graceful_restart/state/restart_time (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_restart_time is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_restart_time() directly.
YANG Description: Graceful restart time (seconds).
"""
if hasattr(v, "_utype"):
v = v._utyp |
EoRImaging/katalogss | katalogss/fhd_pype.py | Python | bsd-2-clause | 7,360 | 0.016576 | '''
Read and manipulate FHD output in Python.
'''
import os
import numpy as np
from scipy.io import readsav
from astropy.io import fits
global _fhd_base
_fhd_base = '/nfs/eor-09/r1/djc/EoR2013/Aug23/'
def fhd_base():
'Return path to FHD output directory.'
global _fhd_base
return _fhd_base
def set_fhd_base(path):
'''
Set the FHD output directory returned by `fhd_base` method.
Parameters
----------
path: string
The path to the FHD output directory
'''
global _fhd_base
if not path.endswith('/'): path+='/'
_fhd_base = path
if not os.path.exists(_fhd_base): raise ValueError,'%s does not exist. Use `set_fhd_base` to update.'%_fhd_base
return
def get_obslist(fhd_run):
'''
Get the list of obsids with deconvolution ouput.
Parameters
----------
fhd_run: string
The name identifier of the FHD run, e.g. \'pac_decon_eor1_June2016\'.
'''
decon_dir='%sfhd_%s/deconvolution/'%(fhd_base(),fhd_run)
obs=os.listdir(decon_dir)
obs=[o[:10] for o in obs if o.endswith('fhd.sav')]
obs.sort()
return obs
def read_sourcelist(fhdsav, tag='component_array'):
'''
Get the list of obsids with deconvolution ouput.
Parameters
----------
fhdsav: string fhd_run: string
Full path to the IDL save file containing a source list structure.
tag: string, optional
The tag name of the source list in the IDL structure. Defaults to \'component_array\'.
'''
cat = readsav(fhdsav)[tag]
items = [cat.id, cat.x, cat.y, cat.ra, cat.dec,
np.vstack(c | at.flux).T['i'][0],
cat.gain,cat.alpha, cat.freq, cat.flag]
items | = [item.astype(np.float64) for item in items]
cat = dict(zip(['id','x','y','ra','dec','flux','gain','alpha','freq',
'flag'],items))
return cat
def gen_cal_cat(cat, freq=180., alpha=-0.8, file_path='catalog.sav'):
'''
Generate IDL structure and save file from `katalogss` catalog dict for input to FHD.
Parameters
----------
cat: dict
The source catalog. Keys (\'ra\', \'dec\', \'flux\') are required. Keys (\'alpha\', \'freq\') are optional.
freq: float, optional
Frequency (MHz) assigned only if \'freq\' is not in `cat`. Defaults to 180.
alpha: float, optional
Spectral index assigned only if \'alpha\' is not in `cat`. Defaults to -0.8.
file_path: string, optional
File path passed to generate_calibration_catalog.pro. Defaults to \'catalog.sav\'.
'''
cat_keys=cat.keys()
cat['id']=np.argsort(cat['flux'])[::-1]
nsrcs = len(cat['id'])
if 'alpha' in cat_keys:pass
else: cat['alpha'] = alpha*np.ones(nsrcs)
if 'freq' in cat_keys:pass
else: cat['freq']=float(freq)*np.ones(nsrcs)
import pidly
idl=pidly.IDL()
idl('!Quiet=1')
idl.id=cat['id']
idl.ra=cat['ra']
idl.dec=cat['dec']
idl.n=len(cat['id'])
idl.freq=cat['freq']
idl.alpha=cat['alpha']
idl.fluxi=cat['flux']
idl('sl=source_comp_init(n_sources=n,ra=ra,dec=dec,freq=freq,flux=fluxi,alpha=alpha,id=id)')
idl('generate_calibration_catalog,sl,file_path=\'%s\''%file_path)
idl.close()
return cat
def fetch_comps(fhd_run, obsids=None):
'''
Return the FHD deconvolved source components.
Parameters
----------
fhd_run: string
The name identifier of the FHD run, e.g. \'pac_decon_eor1_June2016\'.
obsids: list-like, optional
Obsids (as strings) to fetch data from. Defaults to all deconvolved.
'''
decon_dir='%sfhd_%s/deconvolution/'%(fhd_base(),fhd_run)
meta_dir='%sfhd_%s/metadata/'%(fhd_base(),fhd_run)
if obsids is None: obsids = get_obslist(decon_dir)
comps={}
for o in obsids:
print 'Fetching compoonent array from %s_fhd.sav'%o
fhdsav=decon_dir+o+'_fhd.sav'
comps[o] = read_sourcelist(fhdsav)
return comps
def fetch_meta(fhd_run, obsids=None):
'''
Return meta data needed for the FHD deconvolved source components.
Parameters
----------
fhd_run: string
The name identifier of the FHD run, e.g. \'pac_decon_eor1_June2016\'.
obsids: list-like, optional
Obsids (as strings) to fetch data from. Defaults to all deconvolved.
'''
decon_dir='%sfhd_%s/deconvolution/'%(fhd_base(),fhd_run)
meta_dir='%sfhd_%s/metadata/'%(fhd_base(),fhd_run)
if obsids is None: obsids = fp.get_obslist(decon_dir)
meta = {'clustered':False}
for o in obsids:
params = readsav(decon_dir+o+'_fhd_params.sav')['fhd_params']
metaobs = readsav('%s%s_obs.sav'%(meta_dir,o))['obs']
meta[o] = {'n_iter':params.n_iter[0],'det_thresh':params.detection_threshold[0],'beam_thresh':params.beam_threshold[0],'max_bl':metaobs.max_baseline[0],'freq':metaobs.freq_center[0],'degpix':metaobs.degpix[0]}
meta[o]['beam_width'] = meta[o]['max_bl']**-1 * 180./np.pi
return meta
def pixarea_maps(fhd_run, obsids=None, map_dir='area_maps/', recalculate=False):
'''
Return the pixel area maps and cache locally.
Parameters
----------
fhd_run: string
The name identifier of the FHD run, e.g. \'pac_decon_eor1_June2016\'.
obsids: list-like, optional
Obsids (as strings) to fetch data from. Defaults to all deconvolved.
map_dir: string, optional
The directory in which to cache the area maps. Defaults to \'area_maps/\'.
recalculate: bool, optional
If `True` and `map_dir` exists, re-run the IDL code and re-cache.
'''
if not os.path.exists(map_dir): os.system('mkdir %s'%map_dir)
if not map_dir.endswith('/'): map_dir += '/'
if obsids is None: obsids = fp.get_obslist(decon_dir)
calcobs = [o for o in obsids if recalculate or not os.path.exists(map_dir+o+'_amap.fits')]
if len(calcobs)>0:
import pidly
idl=pidly.IDL()
idl.fhddir='%sfhd_%s/'%(fhd_base(),fhd_run)
idl.mapdir = map_dir
for o in calcobs:
idl.obsid=o
commands = ['!quiet=1','!except=0','restore,fhddir+\'metadata/\'+obsid+\'_obs.sav\'',
'area_map=pixel_area(obs)',
'beam_width = 1/obs.max_baseline',
'beam_area = !pi*beam_width^2/(4*alog(2))',
'area_map /= beam_area',
'writefits, mapdir+obsid+\'_amap.fits\',area_map']
for c in commands:idl(c)
idl.close()
amaps=dict([(o,fits.open(map_dir+o+'_amap.fits')[0].data) for i,o in enumerate(obsids)])
return amaps
def get_maps(fhd_run, obsids, imtype):
'''
Return a dictionary of image maps for given obsids. Images are `astropy.fits.HDU` objects with `header` and `data` attributes.
Parameters
---------
fhd_run: string
The name identifier of the FHD run, e.g. \'pac_decon_eor1_June2016\'.
obsids: list-like
Obsids (as strings) to data from.
imtype: string
Specifies the image type as found in the fits file name, e.g. \'uniform_Residual_I\'.
'''
map_dir='%sfhd_%s/output_data/'%(fhd_base(),fhd_run)
imgs = {}
for o in obsids:
fname = map_dir+o+'_'+imtype+'.fits'
if os.path.exists(fname):
hdu = fits.open(fname)[0]
imgs[o] = hdu
else: imgs[o] = None
return imgs
|
tonitran/dvc-embedded | runConfigs.py | Python | agpl-3.0 | 107 | 0.018692 | #Constants
PROD = 'prod'
LOCAL = 'local'
NOPI = ' | nopi'
#Set configs here
ENV = PROD
loggingEn | abled = True
|
legaultmarc/grstools | setup.py | Python | mit | 2,558 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# How to build source distribution
# python setup.py sdist --format bztar
# python setup.py sdist --format gztar
# python setup.py sdist --format zip
import os
from setuptools import setup, find_packages
MAJOR = 0
MINOR = 4
MICRO = 0
VERSION = "{}.{}.{}".format(MAJOR, MINOR, MICRO)
def write_version_file(fn=None):
if fn is None:
fn = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
os.path.join("grstools", "version.py"),
)
content = (
"\n# THIS FILE WAS GENERATED AUTOMATICALLY BY GRSTOOLS SETUP.PY\n"
'grstools_version = "{version}"\n'
)
a = open(fn, "w")
try:
a.write(content.format(version=VERSION))
finally:
a.close()
def setup_package():
# Saving the version into a file
write_version_file()
setup(
name="grstools",
version=VERSION,
description="Tools to manipulate genetic risk scores.",
long_description="",
author=u"Marc-André Legault",
author_email="legaultmarc@gmail.com",
url="https://github.com/legaultmarc/grstools",
license="MIT",
packages=find_packages(exclude=["tests", ]),
package_data={"geneparse.tests": ["data/*"]},
test_suite="grstools.tests.test_suite",
entry_points={
"console_scripts": [
"grs-evaluate=grstools.scripts.evaluate:main",
"grs-compute=grstools.scripts.build_grs:main",
"grs-utils=grstools.scripts.utils:main",
"grs-create=grstools.scripts.choose_snps:main",
"grs-mr=grstools.scripts.mendelian_randomization:main",
],
},
install_requires=["geneparse >= 0.1.0", "genetest | >= 0.1.0",
"matplotlib >= 2.0", "scipy >= 0.18"],
classifiers=["Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research | ",
"Operating System :: Unix",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Topic :: Scientific/Engineering :: Bio-Informatics"],
keywords="bioinformatics genomics grs genetic risk score",
zip_safe=False,
)
if __name__ == "__main__":
setup_package()
|
Edzvu/Edzvu.github.io | APNSWrapper-0.6.1/APNSWrapper/connection.py | Python | mit | 7,014 | 0.013687 | # Copyright 2009 Max Klymyshyn, Sonettic
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import socket
import subprocess
from apnsexceptions import *
from utils import *
class APNSConnectionContext(object):
certificate = None
def __init__(self, certificate = None):
self.certificate = certificate
def connect(self, host, port):
raise APNSNotImplementedMethod, "APNSConnectionContext.connect ssl method not implemented in context"
def write(data = None):
raise APNSNotImplementedMethod, "APNSConnectionContext.write method not implemented"
def read(self):
raise APNSNotImplementedMethod, "APNSConnectionContext.read method not implemented"
def close(self):
raise APNSNotImplementedMethod, "APNSConnectionContext.close method not implemented"
class OpenSSLCommandLine(APNSConnectionContext):
"""
This class execute and send data with openssl command line tool
"""
certificate = None
host = None
port = None
executable = None
debug = False
def __init__(self, certificate = None, executable = None, debug = False):
self.certificate = certificate
self.executable = executable
self.debug = debug
def connect(self, host, port):
self.host = host
self.port = port
def _command(self):
command = "%(executable)s s_client -ssl3 -cert %(cert)s -connect %(host)s:%(port)s" % \
{
'executable' : self.executable,
'cert' : self.certificate,
'host' : self.host,
'port' : self.port
}
return subprocess.Popen(command.split(' '), shell=False, bufsize=256, \
stdin=subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
def write(self, data = None):
pipe = self._command()
std_in = pipe.stdin
std_in.write(data)
std_in.flush()
std_in.close()
std_out = pipe.stdout
if self.debug:
print "-------------- SSL Debug Output --------------"
print command
print "----------------------------------------------"
print std_out.read()
std_out.close()
pipe.wait()
def read(self, blockSize = 1024):
"""
There is method to read data from feedback service.
WARNING! It's not tested and doesn't work yet!
"""
pipe = self._command()
std_out = pipe.stdout
data = std_out.read()
#pipe.wait()
std_out.close()
return data
def context(self):
return self
def close(self):
pass
class SSLModuleConnection(APNSConnectionContext):
"""
This is class which implement APNS connection based on
"ssl" module.
"""
socket = None
certificate = None
connectionContext = None
ssl_module = None
def __init__(self, certificate = None, ssl_module = None):
self.socket = None
self.connectionContext = None
self.certificate = certificate
self.ssl_module = ssl_module
def context(self):
"""
Initialize SSL context.
"""
if self.connectionContext != None:
return self
self.socket = socket.socket()
self.connectionContext = self.ssl_module.wrap_socket(
self.socket,
ssl_version = self.ssl_module.PROTOCOL_TLSv1,
certfile = self.certificate
)
return self
def certificate(self, path):
self.certificate = path
return self
def read(self, blockSize = 1024):
"""
Make connection to the host and port.
"""
return self.connectionContext.read(blockSize)
def write(self, data = None):
"""
Make connection to the host and port.
"""
self.connectionContext.write(data)
def connect(self, host, port):
"""
Make connection to the host and port.
"""
self.connectionContext.connect((host, port))
def close(self):
"""
Close connection.
"""
self.connectionContext.close()
self.socket.close()
class APNSConnection(APNSConnectionContext):
"""
APNSConnection wrap SSL connection to the Apple Push Notification Server.
"""
debug = False
connectionCo | ntext = None
def __init__(self, certificate = None,
ssl_command = "openssl",
force_ssl_command = False,
| disable_executable_search = False,
debug = False):
self.connectionContext = None
self.debug = debug
if not os.path.exists(str(certificate)):
raise APNSCertificateNotFoundError, "Apple Push Notification Service Certificate file %s not found." % str(certificate)
try:
if force_ssl_command:
raise ImportError, "There is force_ssl_command forces command line tool"
# use ssl library to handle secure connection
import ssl as ssl_module
self.connectionContext = SSLModuleConnection(certificate, ssl_module = ssl_module)
except:
# use command line openssl tool to handle secure connection
if not disable_executable_search:
executable = find_executable(ssl_command)
else:
executable = ssl_command
if not executable:
raise APNSNoCommandFound, "SSL Executable [%s] not found in your PATH environment" % str(ssl_command)
self.connectionContext = OpenSSLCommandLine(certificate, executable, debug = debug)
self.certificate = str(certificate)
def connect(self, host, port):
"""
Make connection to the host and port.
"""
self.context().connect(host, port)
return self
def certificate(self, path):
self.context().certificate(path)
return self
def write(self, data = None):
self.context().write(data)
def read(self, blockSize = 1024):
return self.context().read(blockSize)
def context(self):
if not self.connectionContext:
raise APNSNoSSLContextFound, "There is no SSL context available in your python environment."
return self.connectionContext.context()
def close(self):
"""
Close connection.
"""
self.context().close()
|
mcnowinski/various-and-sundry | lightcurve/super.calibrate.py | Python | mit | 10,853 | 0.018336 | #
#calibrate.py
#
#calibrate fits images using darks, flats, and bias frames
#corrected image = (image - bias - k(dark-bias))/flat
#for k=1, i.e. image exp = dark exp, corrected image = (image - dark)/flat
import os
import glob
import math
import subprocess
import re
import sys
import datetime
import shutil
from decimal import Decimal
from astropy.io import fits
from astropy import wcs
from astropy import log
log.setLevel('ERROR')
from astropy import units as u
import ccdproc
import numpy as np
def logme( str ):
log.write(str + "\n")
print str
return
#MODIFY THESE FIELDS AS NEEDED!
#input path *with* ending forward slash
input_path='./'
#output path *with* ending forward slash
output_path='./calibrated/'
#log file name
log_fname = 'log.calibrate.txt'
#suffix for output files, if any...
output_suffix='.calibrated'
#used in master calibration filenames
date_suffix = datetime.datetime.now().strftime('%Y%m%d.%H%M%S')
#master bias frame
#folder with bias component frames *including* ending forward slash
bias_path='./bias/'
bias_master = 'mbias.' + date_suffix + '.fits'
#master dark frame
#folder with dark component frames *including* ending forward slash
dark_path='./dark/'
dark_is | _bias_corrected = False
dark_bias = None
dark_master = 'mdark.' + date_suffix + '.fits'
#master flat frame
#folder with bias component frames *inc | luding* ending forward slash
flat_path='./flat/'
flat_is_bias_corrected = False
flat_bias = None
flat_is_dark_corrected = False
flat_dark = None
flat_ave_exptime = 0
flat_master = 'mflat.' + date_suffix + '.fits'
#name of exposure variable in FITS header file
exposure_label='EXPTIME'
log=open(log_fname, 'a+')
#trim image? set range here, or set to '' to disable
trim_range = ''
if(len(sys.argv) == 5):
naxis1_start = int(sys.argv[1])
naxis1_end = int(sys.argv[2])
naxis2_start = int(sys.argv[3])
naxis2_end = int(sys.argv[4])
trim_range = '[%d:%d, %d:%d]'%(naxis1_start, naxis1_end, naxis2_start, naxis2_end) #starts at 1, inclusive
logme('Trimming images to NAXIS1=%d to %d, NAXIS2=%d to %d.'%(naxis1_start, naxis1_end, naxis2_start, naxis2_end))
#does output directory exist? If not, create it
try:
os.mkdir(output_path)
except:
pass
#bias
#create master bias frame
im=glob.glob(bias_path+'*.fits')+glob.glob(bias_path+'*.fit')
if(len(im) <= 0):
logme('Error. Bias calibration frame(s) not found (%s).' % bias_path)
log.close()
sys.exit(-1)
biases = None
for i in range(0,len(im)):
if(biases):
biases += ','+im[i]
else:
biases = im[i]
#if there is just one, make it two of the same for the combine!
if (len(im) == 1):
biases += ','+im[0]
bias_path += 'master/'
try:
os.mkdir(bias_path)
except:
pass
bias_path += bias_master
logme('Creating master bias frame (%s)...'%(bias_path))
bias = ccdproc.combine(biases, method='median', unit='adu', add_keyword=False)
#trim it, if necessary
if(len(trim_range) > 0):
bias = ccdproc.trim_image(bias, trim_range);
#write master frame to file
hdulist = bias.to_hdu()
hdulist.writeto(bias_path, clobber=True)
#dark
#create master dark frame
im=glob.glob(dark_path+'*.fits')+glob.glob(dark_path+'*.fit')
if(len(im) <= 0):
logme('Error. Dark calibration frame(s) not found (%s).' % dark_path)
log.close()
sys.exit(-1)
darks = None
bias_header = None
for i in range(0,len(im)):
#is (any) dark bias corrected?
header = fits.getheader(im[i])
if(header.get('BIAS') != None):
dark_is_bias_corrected = True
dark_bias = header.get('BIAS')
elif(header.get('BIASCORR') != None):
dark_is_bias_corrected = True
dark_bias = header.get('BIASCORR')
if(darks):
darks += ','+im[i]
else:
darks = im[i]
#if there is just one, make it two of the same for the combine!
if (len(im) == 1):
darks += ','+im[0]
dark_path += 'master/'
try:
os.mkdir(dark_path)
except:
pass
dark_path += dark_master
logme('Creating master dark frame (%s)...'%(dark_path))
dark = ccdproc.combine(darks, method='median', unit='adu', add_keyword=False, **{'verify': 'ignore'})
#trim it, if necessary
if(len(trim_range) > 0):
dark = ccdproc.trim_image(dark, trim_range);
#bias correct, if necessary
if(not dark_is_bias_corrected):
#logme('Subtracting master bias frame from master dark frame...')
dark = ccdproc.subtract_bias(dark, bias, add_keyword=False)
dark_bias = bias_master
else:
logme('Master dark frame is *already* bias corrected (%s).'%dark_bias)
#write master dark frame
hdulist = dark.to_hdu()
#add bias correction to header
header=hdulist[0].header
header['BIASCORR'] = dark_bias
hdulist.writeto(dark_path, clobber=True)
#flat
#create master flat frame
im=glob.glob(flat_path+'*.fits')+glob.glob(flat_path+'*.fit')
if(len(im) <= 0):
logme('Error. Flat calibration frame(s) not found (%s).' % flat_path)
log.close()
sys.exit(-1)
flats = None
count = 0
flat_corrected = None
#check a few things in these flat component frames
for i in range(0,len(im)):
header = fits.getheader(im[i])
#is this flat bias corrected?
if(header.get('BIAS') != None):
flat_is_bias_corrected = True
flat_bias = header.get('BIAS')
elif(header.get('BIASCORR') != None):
flat_is_bias_corrected = True
flat_bias = header.get('BIASCORR')
#is this flat dark corrected?
if(header.get('DARK') != None):
flat_is_dark_corrected = True
flat_dark = header.get('DARK')
elif(header.get('DARKCORR') != None):
flat_is_dark_corrected = True
flat_dark = header.get('DARKCORR')
flat_corrected = "%s"%(im[i].rsplit('.',1)[0])+".corrected"
shutil.copy(im[i], flat_corrected)
#trim as necessary
if(len(trim_range) > 0):
flat = ccdproc.CCDData.read(flat_corrected, unit='adu', relax=True)
flat = ccdproc.trim_image(flat, trim_range)
hdulist = flat.to_hdu()
hdulist.writeto(flat_corrected, clobber=True)
#bias correct, if necessary
if(not flat_is_bias_corrected):
#logme('Subtracting master bias frame from flat frame...')
flat = ccdproc.CCDData.read(flat_corrected, unit='adu', relax=True)
#trim it, if necessary
#if(len(trim_range) > 0):
# flat = ccdproc.trim_image(flat, trim_range);
#flat = ccdproc.subtract_bias(flat, bias, add_keyword=False)
hdulist = flat.to_hdu()
#add bias correction to header
header=hdulist[0].header
header['BIASCORR'] = flat_bias
hdulist.writeto(flat_corrected, clobber=True)
flat_bias = bias_master
else:
logme('Flat frame (%s) is *already* bias corrected (%s).'%(im[i],flat_bias))
#dark correct, if necessary
if(not flat_is_dark_corrected):
#logme('Subtracting master dark frame from flat frame...')
flat = ccdproc.CCDData.read(flat_corrected, unit='adu', relax=True)
##trim it, if necessary
#if(len(trim_range) > 0):
# flat = ccdproc.trim_image(flat, trim_range);
flat = ccdproc.subtract_dark(flat, dark, scale=True, exposure_time=exposure_label, exposure_unit=u.second, add_keyword=False)
hdulist = flat.to_hdu()
#add bias correction to header
header=hdulist[0].header
header['DARKCORR'] = dark_bias
hdulist.writeto(flat_corrected, clobber=True)
flat_dark = dark_master
else:
logme('Flat frame (%s) is *already* dark corrected (%s).'%(im[i],flat_dark) )
if(flats):
flats += ','+flat_corrected
else:
flats = flat_corrected
#calc average exposure time for potential dark correction
if(header.get('EXPTIME') != None):
#print header.get('EXPTIME')
try:
exptime = float(header.get('EXPTIME'))
flat_ave_exptime += exptime
except ValueError:
logme('Exposure time (EXPTIME) is not a float (%s).'%(header.get('EXPTIME')))
count += 1
#calc average exposure time
#if(count > 0):
# flat_ave_exptime = flat_ave_exptime/count
# flat.header['E |
h2oai/sparkling-water | py/tests/unit/with_runtime_sparkling/test_mojo_parameters.py | Python | apache-2.0 | 6,596 | 0.002729 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pysparkling.ml import H2OGBM, H2ODRF, H2OXGBoost, H2OGLM, H2OGAM, H2OCoxPH
from pysparkling.ml import H2ODeepLearning, H2OKMeans, H2OIsolationForest
from pysparkling.ml import H2OAutoEncoder, H2OPCA, H2OGLRM, H2ORuleFit, H2OWord2Vec
def testGBMParameters(prostateDataset):
features = ['AGE', 'RACE', 'DPROS', 'DCAPS', 'PSA']
algorithm = H2OGBM(seed=1, labelCol="CAPSULE", featuresCols=features, monotoneConstraints={'AGE': 1, 'RACE': -1})
model = algorithm.fit(prostateDataset)
compareParameterValues(algorithm, model)
def testDRFParameters(prostateDataset):
features = ['AGE', 'RACE', 'DPROS', 'DCAPS', 'PSA']
algorithm = H2ODRF(seed=1, labelCol="CAPSULE", featuresCols=features, classSamplingFactors=[.2, .8, 1])
model = algorithm.fit(prostateDataset)
compareParameterValues(algorithm, model)
def testXGBoostParameters(prostateDataset):
features = ['AGE', 'RACE', 'DPROS', 'DCAPS', 'PSA']
algorithm = H2OXGBoost(seed=1, labelCol="CAPSULE", featuresCols=features,
monotoneConstraints={'AGE': 1, 'RACE': -1},
interactionConstraints=[['AGE', 'RACE', 'DPROS'], ['DCAPS', 'PSA']])
model = algorithm.fit(prostateDataset)
compareParameterValues(algorithm, model)
def testGLMParameters(prostateDataset):
features = ['AGE', 'RACE', 'DPROS', 'DCAPS', 'PSA']
algorithm = H2OGLM(seed=1, labelCol="CAPSULE", alphaValue=[0.5], lambdaValue=[0.5], maxIterations=30,
objectiveEpsilon=0.001, gradientEpsilon=0.001, objReg=0.001, maxActivePredictors=3000,
lambdaMinRatio=0.001, featuresCols=features)
model = algorithm.fit(prostateDataset)
compareParameterValues(algorithm, model)
def testGAMParameters(prostateDataset):
features = ['AGE', 'RACE', 'DPROS', 'DCAPS', 'PSA']
algorithm = H2OGAM(seed=1, labelCol="CAPSULE", gamCols=[["PSA"], ["AGE"]], numKnots=[5, 5], lambdaValue=[0.5],
featuresC | ols=features, bs=[1, 1], scale=[0.5, 0.5])
model = algorithm.fit(prostateDataset)
compareParameterValues(algorithm, model, ["getFeaturesCols"])
def testDeepLearningParameters(prostateDataset):
features = ['AGE', 'RACE', 'DPROS', 'DCAPS', 'PSA']
algorithm = H2ODeepLearning(seed=1, labelCol="CAPSULE", featuresCols=features, reproducible=True)
model = algorithm.fit(prostateDataset)
compareParameterValues(algorithm, model)
def testKmeansParameters(prostateDataset):
featu | res = ['AGE', 'RACE', 'DPROS', 'DCAPS', 'PSA']
algorithm = H2OKMeans(seed=1, featuresCols=features)
model = algorithm.fit(prostateDataset)
compareParameterValues(algorithm, model)
def testIsolationForestParameters(prostateDataset):
features = ['AGE', 'RACE', 'DPROS', 'DCAPS', 'PSA']
algorithm = H2OIsolationForest(seed=1, sampleRate=0.5, featuresCols=features)
model = algorithm.fit(prostateDataset)
compareParameterValues(algorithm, model)
def testCoxPHParameters(heartDataset):
features = ['age', 'year', 'surgery', 'transplant', 'start', 'stop']
algorithm = H2OCoxPH(labelCol="event", featuresCols=features, startCol='start', stopCol='stop')
model = algorithm.fit(heartDataset)
compareParameterValues(algorithm, model)
def testRuleFitParameters(prostateDataset):
features = ['AGE', 'RACE', 'DPROS', 'DCAPS', 'PSA']
algorithm = H2ORuleFit(seed=1, labelCol="CAPSULE", featuresCols=features)
model = algorithm.fit(prostateDataset)
ingored = ["getFeaturesCols"]
compareParameterValues(algorithm, model, ingored)
def testAutoEncoderParameters(prostateDataset):
features = ["RACE", "DPROS", "DCAPS"]
algorithm = H2OAutoEncoder(seed=1, inputCols=features, reproducible=True, hidden=[3,])
model = algorithm.fit(prostateDataset)
compareParameterValues(algorithm, model)
def testPCAParameters(prostateDataset):
features = ['AGE', 'RACE', 'DPROS', 'DCAPS', 'PSA']
algorithm = H2OPCA(seed=1, inputCols=features, k=3)
model = algorithm.fit(prostateDataset)
ignored = ["getPcaImpl"] # PUBDEV-8217: Value of pca_impl isn't propagated to MOJO models
compareParameterValues(algorithm, model, ignored)
def testGLRMParameters(prostateDataset):
features = ['AGE', 'RACE', 'DPROS', 'DCAPS', 'PSA']
algorithm = H2OGLRM(seed=1, inputCols=features, k=3)
model = algorithm.fit(prostateDataset)
compareParameterValues(algorithm, model)
def testWord2VecParameters(spark):
algorithm = H2OWord2Vec(vecSize=11, windowSize=2, sentSampleRate=0.002, normModel="HSM", epochs=5, minWordFreq=1,
initLearningRate=0.01, wordModel="CBOW", outputCol="someOutputCol", inputCol="someInputCol")
dataset = spark.sparkContext.parallelize([[["a", "b", "c"]], [["c", "b", "a"]]]).toDF(["someInputCol"])
model = algorithm.fit(dataset)
compareParameterValues(algorithm, model)
def compareParameterValues(algorithm, model, ignored=[]):
algorithmMethods = dir(algorithm)
def isMethodRelevant(method):
return method.startswith("get") and \
getattr(model, method).__code__.co_argcount == 1 and \
method in algorithmMethods and \
method not in ignored
methods = filter(isMethodRelevant, dir(model))
for method in methods:
modelValue = getattr(model, method)()
algorithmValue = getattr(algorithm, method)()
assert(valuesAreEqual(algorithmValue, modelValue))
def valuesAreEqual(algorithmValue, modelValue):
if algorithmValue == "AUTO":
return True
elif algorithmValue == "auto":
return True
elif algorithmValue == "family_default":
return True
elif algorithmValue == {} and modelValue is None:
return True
else:
return algorithmValue == modelValue
|
grapesmoker/regulations-parser | regparser/history/annual.py | Python | cc0-1.0 | 3,599 | 0 | import logging
import re
from lxml import etree
import requests
from regparser.federalregister import fetch_notice_json
from regparser.history.delays import modify_effective_dates
from regparser.notice.build import build_notice
CFR_BULK_URL = ("http://www.gpo.gov/fdsys/bulkdata/CFR/{year}/title-{title}/"
+ "CFR-{year}-title{title}-vol{volume}.xml")
CFR_PART_URL = ("http://www.gpo.gov/fdsys/pkg/"
+ "CFR-{year}-title{title}-vol{volume}/xml/"
+ "CFR-{year}-title{title}-vol{volume}-part{part}.xml")
class Volume(object):
def __init__(self, year, title, vol_num):
self.year = year
self.title = title
self.vol_num = vol_num
self.url = CFR_BULK_URL.format(year=year, title=title, volume=vol_num)
self._response = requests.get(self.url, stream=True)
self.exists = self._response.status_code == 200
def should_contain(self, part):
lines = self._response.iter_lines()
line = next(lines)
while '<PARTS>' not in line:
line = next(lines)
if not line:
logging.warning('No <PARTS> in ' + self.url)
return False
match = re.match(r'.*parts? (\d+) to (\d+|end).*', line.lower())
if match:
start = int(match.group(1))
if start > part:
return False
if match.group(2) == 'end':
return True
end = int(match.group(2))
return end >= part
else:
logging.warning("Can't parse: " + line)
return False
def find_part_xml(self, part):
url = CFR_PART_URL.format(year=self.year, title=self.title,
volume=self.vol_num, part=part)
response = requests.get(url)
if response.status_code == 200:
return etree.fromstring(response.content)
def annual_edition_for(title, notice):
"""Annual editions are published for different titles at different
points throughout the year. Find the 'next' annual edition"""
if title <= 16:
month = '01'
elif title <= 27:
month = '04'
elif title <= 41:
month = '07'
else:
month = '10'
notice_year = int(notice['effective_on'][:4])
if notice['effective_on'] <= '%d-%s-01' % (notice_year, month):
return notice_year
else:
return notice_year + 1
def find_volume(year, title, part):
"""Annual editions have multiple volume numbers. Try to find the vol | ume
that we care about"""
vol_num = 1
volume = Volume(year, title, vol_num)
while volume.exists:
if volume.should_contain(part):
return volume
vol_num += 1
volume = Volume(year, title, vol_num)
return | None
def first_notice_and_xml(title, part):
"""Find the first annual xml and its associated notice"""
notices = [build_notice(title, part, n, do_process_xml=False)
for n in fetch_notice_json(title, part, only_final=True)
if n['full_text_xml_url'] and n['effective_on']]
modify_effective_dates(notices)
notices = sorted(notices,
key=lambda n: (n['effective_on'], n['publication_date']))
years = {}
for n in notices:
year = annual_edition_for(title, n)
years[year] = n
for year, notice in sorted(years.iteritems()):
volume = find_volume(year, title, part)
if volume:
part_xml = volume.find_part_xml(part)
if part_xml is not None:
return (notice, part_xml)
|
jaeilepp/eggie | mne/tests/test_fixes.py | Python | bsd-2-clause | 4,590 | 0.000218 | # Authors: Emmanuelle Gouillart <emmanuelle.gouillart@normalesup.org>
# Gael Varoquaux <gael.varoquaux@normalesup.org>
# Alex Gramfort <alexandre.gramfort@telecom-paristech.fr>
# License: BSD
import numpy as np
from nose.tools import assert_equal, assert_raises
from numpy.testing import assert_array_equal
from distutils.version import LooseVersion
from scipy import signal
from ..fixes import (_in1d, _tril_indices, _copysign, _unravel_index,
_Counter, _unique, _bincount, _digitize)
from | ..fixes import _firwin2 as mne_firwin2
from ..fixes import _filtfilt as mne_filtfilt
def test_counter():
"""Test Counter replacement"""
import collections
try:
Counter = collections.Counter
except:
pass
else:
a = Counter([1, 2, 1, 3])
b = _Counter([1, 2, 1, 3])
for key, count in zip([1, 2, 3], [2, 1, 1]) | :
assert_equal(a[key], b[key])
def test_unique():
"""Test unique() replacement
"""
# skip test for np version < 1.5
if LooseVersion(np.__version__) < LooseVersion('1.5'):
return
for arr in [np.array([]), np.random.rand(10), np.ones(10)]:
# basic
assert_array_equal(np.unique(arr), _unique(arr))
# with return_index=True
x1, x2 = np.unique(arr, return_index=True, return_inverse=False)
y1, y2 = _unique(arr, return_index=True, return_inverse=False)
assert_array_equal(x1, y1)
assert_array_equal(x2, y2)
# with return_inverse=True
x1, x2 = np.unique(arr, return_index=False, return_inverse=True)
y1, y2 = _unique(arr, return_index=False, return_inverse=True)
assert_array_equal(x1, y1)
assert_array_equal(x2, y2)
# with both:
x1, x2, x3 = np.unique(arr, return_index=True, return_inverse=True)
y1, y2, y3 = _unique(arr, return_index=True, return_inverse=True)
assert_array_equal(x1, y1)
assert_array_equal(x2, y2)
assert_array_equal(x3, y3)
def test_bincount():
"""Test bincount() replacement
"""
# skip test for np version < 1.6
if LooseVersion(np.__version__) < LooseVersion('1.6'):
return
for minlength in [None, 100]:
x = _bincount(np.ones(10, int), None, minlength)
y = np.bincount(np.ones(10, int), None, minlength)
assert_array_equal(x, y)
def test_in1d():
"""Test numpy.in1d() replacement"""
a = np.arange(10)
b = a[a % 2 == 0]
assert_equal(_in1d(a, b).sum(), 5)
def test_digitize():
"""Test numpy.digitize() replacement"""
data = np.arange(9)
bins = [0, 5, 10]
left = np.array([1, 1, 1, 1, 1, 2, 2, 2, 2])
right = np.array([0, 1, 1, 1, 1, 1, 2, 2, 2])
assert_array_equal(_digitize(data, bins), left)
assert_array_equal(_digitize(data, bins, True), right)
assert_raises(NotImplementedError, _digitize, data + 0.1, bins, True)
assert_raises(NotImplementedError, _digitize, data, [0., 5, 10], True)
def test_tril_indices():
"""Test numpy.tril_indices() replacement"""
il1 = _tril_indices(4)
il2 = _tril_indices(4, -1)
a = np.array([[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
[13, 14, 15, 16]])
assert_array_equal(a[il1],
np.array([1, 5, 6, 9, 10, 11, 13, 14, 15, 16]))
assert_array_equal(a[il2], np.array([5, 9, 10, 13, 14, 15]))
def test_unravel_index():
"""Test numpy.unravel_index() replacement"""
assert_equal(_unravel_index(2, (2, 3)), (0, 2))
assert_equal(_unravel_index(2, (2, 2)), (1, 0))
assert_equal(_unravel_index(254, (17, 94)), (2, 66))
assert_equal(_unravel_index((2 * 3 + 1) * 6 + 4, (4, 3, 6)), (2, 1, 4))
assert_array_equal(_unravel_index(np.array([22, 41, 37]), (7, 6)),
[[3, 6, 6], [4, 5, 1]])
assert_array_equal(_unravel_index(1621, (6, 7, 8, 9)), (3, 1, 4, 1))
def test_copysign():
"""Test numpy.copysign() replacement"""
a = np.array([-1, 1, -1])
b = np.array([1, -1, 1])
assert_array_equal(_copysign(a, b), b)
assert_array_equal(_copysign(b, a), a)
def test_firwin2():
"""Test firwin2 backport
"""
taps1 = mne_firwin2(150, [0.0, 0.5, 1.0], [1.0, 1.0, 0.0])
taps2 = signal.firwin2(150, [0.0, 0.5, 1.0], [1.0, 1.0, 0.0])
assert_array_equal(taps1, taps2)
def test_filtfilt():
"""Test IIR filtfilt replacement
"""
x = np.r_[1, np.zeros(100)]
# Filter with an impulse
y = mne_filtfilt([1, 0], [1, 0], x, padlen=0)
assert_array_equal(x, y)
|
troeger/opensubmit | executor/setup.py | Python | agpl-3.0 | 977 | 0.018443 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('requirements.txt') as f:
required = f.read().splitlines()
setup(
name = 'opensubmit-exec',
version = '0.7.32',
url = 'https://github.com/troeger/opensubmit',
license='AGPL',
author = 'Peter Tröger',
description = 'This is the executor daemon for the OpenSubmit web application.',
author_em | ail = 'peter@troeger.eu',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 3.6',
'Programming Lang | uage :: Python :: 3.5',
'Programming Language :: Python :: 3.4'
],
install_requires=required,
extras_require={'report-opencl': ["pyopencl"]},
packages = ['opensubmitexec'],
package_data = {'opensubmitexec': ['VERSION']},
entry_points={
'console_scripts': [
'opensubmit-exec = opensubmitexec.cmdline:console_script',
],
}
)
|
keithio/django-mailify | mailify/signals.py | Python | mit | 226 | 0.017699 | from django.dispatch import Signal
message = Signal(providing_args=['desc', 'from', 'recipients | ', 'celery', 'when',
'keep', 'subject_context', 'me | ssage_context', 'subject_template',
'text_template', 'html_template']) |
shlopack/cursovaya | template/f_h_21.py | Python | mit | 25 | 0.08 | t | emplate = '%.2f'% | (fh_21) |
open-craft/xblock-mentoring | mentoring/title.py | Python | agpl-3.0 | 1,443 | 0 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2014 Harvard
#
# Authors:
# Xavier Antoviaque <xavier@antoviaque.org>
#
# This software's license gives you freedom; you can copy, convey,
# propagate, redistribute and/or modify this program under the terms of
# the GNU Affero General Public License (AGPL) as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version of the AGPL published by the FSF.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
# General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program in a file in the toplevel directory called
# "AGPLv3". If not, see <http://www.gnu.org/licenses/>.
#
# Imports ################################################### | ########
import logging
from .light_children import LightChild, Scope, String
# Globals ################### | ########################################
log = logging.getLogger(__name__)
# Classes ###########################################################
class TitleBlock(LightChild):
"""
A simple html representation of a title, with the mentoring weight.
"""
content = String(help="Text to display", scope=Scope.content, default="")
|
bjaraujo/ENigMA | trunk/wrappers/swig/python/setup.py | Python | gpl-2.0 | 831 | 0.001203 | import sys
import setuptools
with open('README.md', 'r') as fh:
long_description = fh.read()
with open('version.h', 'r') as fh:
version = fh.read().split('"')[1]
setuptools.setup(
name='ENigMApy',
version=version,
| author='bjaraujo',
author_email='',
description='ENigMA - Extended Numerical Multiphysics Analysis',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/bjaraujo/ENigMA',
packages=['ENigMA'],
include_package_data=True,
classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved :: GNU General Public License (GPL)',
| 'Operating System :: Microsoft :: Windows' if sys.platform == 'win32' else 'Operating System :: POSIX :: Linux'
],
python_requires='>=3.5'
)
|
lwahlmeier/python-threadly | threadly/Futures.py | Python | unlicense | 4,445 | 0.000225 | """
Futures tools for threadly
"""
import threading
import time
class ListenableFuture(object):
"""
This class i used to make a Future that can have listeners and callbacks
added to it. Once setter(object) is called all listeners/callbacks are
also called. Callbacks will be given the set object, and .get() will
return said object.
"""
def __init__(self):
self.lock = threading.Condition()
self.settable = None
self.listeners = list()
self.callables = list()
def add_listener(self, listener, args=None, kwargs=None):
"""
Add a listener function to this ListenableFuture. Once set is called
on this future all listeners will be ran. Arguments for the listener
can be given if needed.
`listener` a callable that will be called when the future is completed
`args` tuple arguments that will be passed to the listener when called.
`kwargs` dict keyword arguments to be passed to the passed listener
when called.
"""
args = args or ()
kwargs = kwargs or {}
if self.settable is None:
self.listeners.append((listener, args, kwargs))
else:
listener(*args, **kwargs)
def add_callable(self, cable, args=None, kwargs=None):
"""
Add a callable function to this ListenableFuture. Once set is called
on this future all callables will be ran. This works the same as the
| listener except the set object is passed as the first argument when
the callable is called. Arguments for the listener can be given if
needed.
`cable` a callable that will be called when the future is completed,
it must have at least 1 argument.
`args` tuple arguments that will be passed to the listener when called.
`kwargs` dict keyword arguments to be passed to the passed listener
when called.
"""
args = args or ()
| kwargs = kwargs or {}
if self.settable is None:
self.callables.append((cable, args, kwargs))
else:
cable(self.settable, *args, **kwargs)
def get(self, timeout=2 ** 32):
"""
This is a blocking call that will return the set object once it is set.
`timeout` The max amount of time to wait for get (in seconds).
If this is reached a null is returned.
`returns` the set object. This can technically be anything so know
what your listening for.
"""
if self.settable is not None:
return self.settable
start = time.time()
try:
self.lock.acquire()
while self.settable is None and time.time() - start < timeout:
self.lock.wait(timeout - (time.time() - start))
return self.settable
finally:
self.lock.release()
def setter(self, obj):
"""
This is used to complete this future. Whatever thread sets this will
be used to call all listeners and callables for this future.
`obj` The object you want to set on this future
(usually use just True if you dont care)
"""
if self.settable is None:
self.settable = obj
self.lock.acquire()
self.lock.notify_all()
self.lock.release()
while len(self.listeners) > 0:
i = self.listeners.pop(0)
try:
i[0](*i[1], **i[2])
except Exception as exp:
print("Exception calling listener", i[0], exp)
while len(self.callables) > 0:
i = self.callables.pop(0)
try:
i[0](self.settable, *i[1], **i[2])
except Exception as exp:
print("Exception calling listener", i[0], exp)
else:
raise Exception("Already Set!")
def future_job(future, job):
"""
This is a simple helper function used to wrap a task on the Scheduler
in a future. Once the job runs the future will complete.
`future` The future that will be completed once the job finishes.
`job` The job to run before completing the future.
"""
try:
job[0](*job[1], **job[2])
future.setter(True)
except Exception as exp:
print("Error running futureJob:", exp)
future.setter(False)
|
jtmitchell/reeder-demo | reeder/rssfeeds/models.py | Python | gpl-2.0 | 1,244 | 0 | # -*- coding: utf-8 -*-
from django.db import models
class RssFeed(models.Model):
url = models.URLField(unique=True, db_index=True, default='')
name = models.CharField(blank=True, default='', max_length=100)
lastmodified = models.DateTimeField(editable=False, auto_now=True,
auto_now_add=True, db_index=True)
def __unicode__(self):
if self.name:
return "{}".format(self.name)
elif self.url:
return "RssFeed {}".format(self.url)
else:
return "RssFeed {}".format(self.pk)
class RssArticle(models.Model):
feed = models.ForeignKey(RssFeed)
url = models.URLField(max_length=200, db_index=True, default='')
snippet = models.CharField(max_length=500, default='', blank=True)
is_read = models.BooleanField(default=True)
lastmodified = models.DateTimeField(editable=False, auto_now=True,
| auto_now_add=True, db_in | dex=True)
def __unicode__(self):
if self.url:
return "{} {} {}".format(self.feed, self.url, self.snippet[:10])
else:
return "RssArticle {}".format(self.pk)
class Meta:
unique_together = ['feed', 'url']
|
fhoring/autorest | src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/BodyDictionary/autorestswaggerbatdictionaryservice/operations/dictionary_operations.py | Python | mit | 110,391 | 0.000661 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Ge | nerator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import C | lientRawResponse
from .. import models
class DictionaryOperations(object):
"""DictionaryOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def get_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get null dictionary value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/null'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{int}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_empty(
self, custom_headers=None, raw=False, **operation_config):
"""Get empty dictionary value {}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/empty'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{int}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_empty(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value empty {}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/empty'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{str}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_null_value(
self, custom_headers=None, raw=False, **operation_config):
"""Get Dictionary with null value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/nullvalue'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{str}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_null_key(
self, custom_headers=None, raw=False, **operation_config):
"""Get Dictionary with null key.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/nullkey'
|
edwinsteele/sensorsproject | sensors/urls.py | Python | cc0-1.0 | 410 | 0.004878 | from django.conf.urls import patterns, url
import sensors.views
urlpatterns = patterns('sensors.views',
url(r'^$', sensors.views.HomeViewClass.as_view(), name='home'),
url(r'^latest$', sensors.views.LatestViewClass.as_view(), name='latest'),
url(r'^recent$', sensors.views.RecentViewClass.as_view(), name='recent'),
url(r'^canned$', sensors.views.CannedViewClass.as_view(), name='canned'), |
)
| |
pjwerneck/device-mesh | mesh/views/api_errors.py | Python | apache-2.0 | 888 | 0 | # -*- coding: utf-8 -*-
from flask import request, Response
from werkzeug.http import parse_accept_header
import simplejson as json
from uuid import uuid4
def generic_api_error(e):
resp = json.dumps({"error": {"status": e.code,
"title": e.name,
"code": -1,
| "message": e.description,
"logref": uuid4().hex
}})
return Response(resp, status=e.code, mimetype='application/json')
def register_error_handlers(x):
x.errorhandler(400)(generic_api_error)
x.errorhandler(401)(generic_api_error)
x.er | rorhandler(403)(generic_api_error)
x.errorhandler(404)(generic_api_error)
x.errorhandler(405)(generic_api_error)
x.errorhandler(406)(generic_api_error)
x.errorhandler(409)(generic_api_error)
|
HXLStandard/libhxl-python | tests/test_model.py | Python | unlicense | 18,634 | 0.001556 | """
Unit tests for the hxl.model module
David Megginson
October 2014
License: Public Domain
"""
import io, unittest
import hxl
from hxl.datatypes import normalise_string
from hxl.model import TagPattern, Dataset, Column, Row, RowQuery
DATA = [
['Organisation', 'Cluster', 'District', 'Affected'],
['#org', '#sector+cluster', '#adm1', '#affected'],
['NGO A', 'WASH', 'Coast', '100'],
['NGO B', 'Education', 'Plains', '200'],
['NGO B', 'Education', 'Coast', '300']
]
SCHEMA_GOOD = [
['#valid_tag', '#valid_required'],
['#org', 'true']
]
SCHEMA_BAD = [
['#valid_tag', '#valid_required'],
['#severity', 'true']
]
class TestPattern(unittest.TestCase):
"""Test the TagPattern class."""
def setUp(self):
self.column = Column(tag='#tag', attributes=['foo', 'bar'])
def test_simple(self):
pattern = TagPattern('#tag')
self.assertTrue(pattern.match(self.column))
pattern = TagPattern('#tagx')
self.assertFalse(pattern.match(self.column))
def test_include(self):
pattern = TagPattern('#tag', include_attributes=['foo'])
self.assertTrue(pattern.match(self.column))
pattern = TagPattern('#tag', include_attributes=['xxx'])
self.assertFalse(pattern.match(self.column))
def test_exclude(self):
pattern = TagPattern('#tag', exclude_attributes=['xxx'])
self.assertTrue(pattern.match(self.column))
pattern = TagPattern('#tag', exclude_attributes=['foo'])
self.assertFalse(pattern.match(self.column))
def test_caseinsensitive(self):
pattern = TagPattern.parse('#Tag')
self.assertTrue(pattern.match(self.column))
pattern = TagPattern.parse('#tag+fOO')
self.assertTrue(pattern.match(self.column))
def test_simple_wildcard(self):
pattern = TagPattern.parse('*')
self.assertTrue(pattern.is_wildcard())
self.assertTrue(pattern.match(self.column))
def test_wildcard_empty_column(self):
pattern = TagPattern.parse('*')
untagged_column = Column(header="Foo", column_number=1)
self.assertFalse(pattern.match(untagged_column))
def test_attributes_wildcard(self):
pattern = TagPattern.parse('*+foo')
self.assertTrue(pattern.is_wildcard())
self.assertTrue(pattern.match(self.column))
pattern = TagPattern.parse('*-foo')
self.assertTrue(pattern.is_wildcard())
self.assertFalse(pattern.match(self.column))
pattern = TagPattern.parse('*+xxx')
self.assertTrue(pattern.is_wildcard())
self.assertFalse(pattern.match(self.column))
def test_absolute(self):
# don't allow exclusions in an absolute pattern
with self.assertRaises(ValueError):
pattern = TagPattern.parse('#foo+a-b!')
pattern = TagPattern.parse('#foo+a!')
self.assertTrue(pattern.is_absolute)
self.assertTrue(pattern.match(Column.parse('#foo+a')))
self.assertFalse(pattern.match(Column.parse('#foo')))
self.assertFalse(pattern.match(Column.parse('#foo+a+b')))
def test_parse(self):
pattern = TagPattern.parse('#tag+foo-xxx')
self.assertEqual(pattern.tag, '#tag')
self.assertTrue('foo' in pattern.include_attributes)
pattern = TagPattern.parse('tag+foo-xxx')
self.assertEqual(pattern.tag, '#tag')
pattern = TagPattern.parse(' tag +foo -xxx ')
self.assertEqual(pattern.tag, '#tag')
self.assertEqual({'foo'}, pattern.include_attributes)
self.assertEqual({'xxx'}, pattern.exclude_attributes)
def test_parse_list(self):
patterns = TagPattern.parse_list('tag+foo,tag-xxx')
for pattern in patterns:
self.assertTrue(pattern.match(self.column))
patterns = TagPattern.parse_list('tag-foo,tag+xxx')
for pattern in patterns:
self.assertFalse(pattern.match(self.column))
class TestDataset(unittest.TestCase):
def setUp(self):
self.source = hxl.data(DATA)
def test_min(self):
self.assertEqual('100', self.source.min('#affected'))
def test_min_date(self):
DATA = [
['#date'],
['2018-01-01'],
['1/1/2019']
]
self.assertEqual('2018-01-01', hxl.data(DATA).min('#date'))
def test_min_year(self):
DATA = [
['#date'],
['2018'],
['2017']
]
self.assertEqual('2017', hxl.data(DATA).min('#date'))
def test_max(self):
self.assertEqual('300', self.source.max('#affected'))
def test_cached(self):
dataset = Dataset()
self.assertFalse(dataset.is_cached)
def test_headers(self):
self.assertEqual(DATA[0], self.source.headers)
def test_has_headers(self):
self.assertTrue(self.source.has_headers)
self.assertFalse(hxl.data(DATA[1:]).has_headers)
def test_tags(self):
self.assertEqual([Column.parse(s).tag for s in DATA[1]], self.source.tags)
def test_display_tags(self):
self.assertEqual(DATA[1], self.source.display_tags)
def test_values(self):
self.assertEqual(DATA[2:], self.source.values)
def test_value_set_all(self):
expected = set()
for r in DATA[2:]:
expected.update(r)
self.assertEqual(expected, self.source.get_value_set())
def test_value_set_normalised(self):
expected = set([normalise_string(s[1]) for s in DATA[2:]])
self.assertEqual(expected, self.source.get_value_set('#sector', True))
def test_value_set_unnormalised(self):
expected = set([s[1] for s in DATA[2:]])
self.assertEqual(expected, self.source.get_value_set('#sector', False))
def test_validate(self):
self.assertTrue(self.source.validate(SCHEMA_GOOD))
self.assertFalse(self.source.validate(SCHEMA_BAD))
def test_hash_columns(self):
self.assertTrue(self.source.columns_hash is not None)
self.assertEqual(32, len(self.source.data_hash))
def test_hash_dataset(self):
self.assertTrue(self.source.data_hash is not None)
self.assertEqual(32, len(self.source.data_hash))
# TODO test generators
class TestColumn(unittest.TestCase):
HXL_TAG = '#foo'
ATTRIBUTES = ['en', 'bar', 'f']
HEADER_TEXT = 'Foo header'
COLUMN_NUMBER = 5
SOURCE_COLUMN_NUMBER = 7
def setUp(sel | f):
self.column = Column(tag=TestColumn.HXL_TAG, attributes=TestColumn.ATTRIBUTES, header=TestColumn.HEADER_TEXT)
def test_variables(self):
self.assertEqual(TestColumn.HXL_TAG, self.column.tag)
self.assertEqual(set(TestColumn.ATTRIBUTES), self.column.attributes)
self.assertEqual(TestColumn.HEADER | _TEXT, self.column.header)
def test_display_tag(self):
self.assertEqual(TestColumn.HXL_TAG + '+' + "+".join(TestColumn.ATTRIBUTES), self.column.display_tag)
def test_case_insensitive(self):
column = Column(tag='Foo', attributes=['X', 'y'])
self.assertEqual('foo', column.tag)
self.assertEqual(set(['x', 'y']), column.attributes)
def test_attribute_order(self):
TAGSPEC = '#foo+b+a+c+w+x'
self.assertEqual(TAGSPEC, Column.parse(TAGSPEC).display_tag)
def test_eq(self):
col1 = Column(tag='xxx', attributes={'b','c','a'}, header='foo')
col2 = Column(tag='xxx', attributes={'a', 'b','c'}, header='bar')
col3 = Column(tag='xxx', attributes={'b','c'})
self.assertEqual(col1, col2)
self.assertNotEqual(col1, col3)
def test_hash(self):
col1 = Column(tag='xxx', attributes={'b','c','a'}, header='foo')
col2 = Column(tag='xxx', attributes={'a', 'b','c'}, header='bar')
col3 = Column(tag='xxx', attributes={'b','c'})
self.assertEqual(hash(col1), hash(col2))
self.assertNotEqual(hash(col1), hash(col3))
def test_parse_valid(self):
col = Column.parse("#foo +a +b")
self.assertEqual(col.tag, '#foo')
self.assertTrue('a' in col.attributes)
self.assertTrue('b' in col.attributes)
def test_parse_invalid(self):
# empty |
ivano666/tensorflow | tensorflow/contrib/learn/python/learn/estimators/tensor_signature.py | Python | apache-2.0 | 4,029 | 0.007198 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""TensorSignature class and utilities."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
class TensorSignature(collections.namedtuple(
"TensorSignature", ["dtype", "shape", "is_sparse"])):
| """Signature of the `Tensor` object.
Useful to check compatibility of tensors.
Attributes:
dtype: `DType` object.
shape: `TensorShape` object.
"""
def __new__(cls, tensor):
if is | instance(tensor, ops.SparseTensor):
return super(TensorSignature, cls).__new__(
cls, dtype=tensor.values.dtype, shape=None, is_sparse=True)
return super(TensorSignature, cls).__new__(
cls, dtype=tensor.dtype, shape=tensor.get_shape(), is_sparse=False)
def is_compatible_with(self, other):
"""Returns True if signatures are compatible."""
def _shape_is_compatible_0dim(this, other):
other = tensor_shape.as_shape(other)
if this.ndims != other.ndims:
return False
for dim, (x_dim, y_dim) in enumerate(zip(this.dims, other.dims)):
if dim == 0:
continue
if not x_dim.is_compatible_with(y_dim):
return False
return True
if other.is_sparse:
return self.is_sparse and self.dtype.is_compatible_with(other.dtype)
return (self.dtype.is_compatible_with(other.dtype) and
_shape_is_compatible_0dim(self.shape, other.shape) and
not self.is_sparse)
def get_placeholder(self):
if self.is_sparse:
return array_ops.sparse_placeholder(dtype=self.dtype)
return array_ops.placeholder(dtype=self.dtype, shape=self.shape)
def tensors_compatible(tensors, signatures):
"""Check that tensors are compatible with signatures.
Args:
tensors: Dict of `Tensor` objects or single `Tensor` object.
signatures: Dict of `TensorSignature` objects or
single `TensorSignature` object.
Returns:
True if all tensors are compatible, False otherwise.
"""
# Dict of Tensors as input.
if isinstance(tensors, dict):
if not isinstance(signatures, dict):
return False
for key in signatures:
if key not in tensors:
return False
if not TensorSignature(tensors[key]).is_compatible_with(signatures[key]):
return False
return True
# Single tensor as input.
if isinstance(signatures, dict):
return False
return TensorSignature(tensors).is_compatible_with(signatures)
def create_signatures(tensors):
"""Creates TensorSignature objects for given tensors.
Args:
tensors: Dict of `Tensor` objects or single `Tensor`.
Returns:
Dict of `TensorSignature` objects or single `TensorSignature`.
"""
if isinstance(tensors, dict):
return {
key: TensorSignature(tensors[key]) for key in tensors}
return TensorSignature(tensors)
def create_placeholders_from_signatures(signatures):
"""Creates placeholders from given signatures.
Args:
signatures: Dict of `TensorSignature` objects or single `TensorSignature`.
Returns:
Dict of `tf.placeholder` objects or single `tf.placeholder`.
"""
if not isinstance(signatures, dict):
return signatures.get_placeholder()
return {
key: signatures[key].get_placeholder()
for key in signatures}
|
bokeh/bokeh | sphinx/source/docs/user_guide/examples/graph_interaction_nodesadjacentnodes.py | Python | bsd-3-clause | 1,359 | 0.006623 | import networkx as nx
from bokeh.io import output_file, show
from bokeh.models import (BoxSelectTool, Circle, HoverTool, MultiLine,
NodesAndAdjacentNodes, Plot, Range1d, TapTool)
from bokeh.palettes import Spectral4
from bokeh.plotting import from_ | networkx
G=nx.karate_club_graph()
plot = Plot(width=400, height=400,
x_range=Range1d(-1.1,1.1), y_range=Range1d(-1.1,1.1))
plot.title.text = "Graph Interaction Demonstration"
plot.add_tools(HoverTo | ol(tooltips=None), TapTool(), BoxSelectTool())
graph_renderer = from_networkx(G, nx.circular_layout, scale=1, center=(0,0))
graph_renderer.node_renderer.glyph = Circle(size=15, fill_color=Spectral4[0])
graph_renderer.node_renderer.selection_glyph = Circle(size=15, fill_color=Spectral4[2])
graph_renderer.node_renderer.hover_glyph = Circle(size=15, fill_color=Spectral4[1])
graph_renderer.edge_renderer.glyph = MultiLine(line_color="#CCCCCC", line_alpha=0.8, line_width=5)
graph_renderer.edge_renderer.selection_glyph = MultiLine(line_color=Spectral4[2], line_width=5)
graph_renderer.edge_renderer.hover_glyph = MultiLine(line_color=Spectral4[1], line_width=5)
graph_renderer.selection_policy = NodesAndAdjacentNodes()
graph_renderer.inspection_policy = NodesAndAdjacentNodes()
plot.renderers.append(graph_renderer)
output_file("interactive_graphs.html")
show(plot)
|
calston/pitool | pitool/service.py | Python | mit | 6,191 | 0.001454 | import copy
import exceptions
import json
import math
import time
try:
import RPi.GPIO as gpio
except:
gpio = None
from twisted.application import service
from twisted.internet import task, reactor, defer
from twisted.python import log
from twisted.python.filepath import FilePath
from twisted.web import server, resource
from twisted.web.static import File
from autobahn.twisted.websocket import WebSocketServerProtocol, \
WebSocketServerFactory
from .components import *
from . import web, pidata
class DataProtocol(WebSocketServerProtocol):
def __init__(self, board):
WebSocketServerProtocol.__init__(self)
self.log.debug = lambda *a, **kw: None
self.board = board
self.gpio_buffers = {}
self.streaming = False
self.inputs = []
self.active = []
self.start_time = time.time()*1000000
self.t = None
self.base = 100000
self.window = 5000000
def onMessage(self, payload, binary):
if not binary:
log.msg(payload)
msg = json.loads(payload.decode('utf8'))
cmd = msg['type']
try:
c = getattr(self, 'cmd_%s' % cmd.lower())
return defer.maybeDeferred(c, msg['args'])
except exceptions.AttributeError:
log.msg("Command '%s' not implemented" % cmd)
def send(self, t, payload):
self.sendMessage(json.dumps({'type': t, 'payload': payload}), False)
def _get_inputs(self):
inputs = []
for id, g in self.board.gpio.items():
if g.mode == 1:
inputs.append(g)
inputs.sort(key=lambda g: g.bcm_id)
return inputs
def _format_timebase(self, buffer):
if not buffer:
return []
base = float(self.base)
duration = self.window
chunks = int(math.floor(duration/base))
minT = self.start_time - duration
wlist = []
tL = 0
vL = 0
for t, v in reversed(buffer):
if (len(wlist) >= chunks) or (t < minT):
# Stop reading buffer when our timebase is full
break
if tL:
# uSecs since last datapoint
tD = tL - t
else:
# Fill waveform since earliest of all waveforms
tD = self.start_time - t
basebits = int(math.ceil(tD/base))
wlist.extend([v] * basebits)
tL = t
vL = v
if buffer and (not wlist):
v = buffer[-1][1]
wlist = [v,v]
remainder = len(wlist) - chunks
if remainder < 0:
wlist.extend([0]*remainder)
return reversed(wlist)
def getBuffer(self):
self.inputs = self._get_inputs()
heads = []
# Snapshot all the buffers
for g in self.inputs:
if self.active and (g.bcm_id not in self.active):
# Skip any inputs the user disables
continue
self.gpio_buffers[g.bcm_id] = copy.deepcopy(g.buffer)
if self.gpio_buffers[g.bcm_id]:
heads.append(self.gpio_buffers[g.bcm_id][-1])
if heads:
self.start_time = max(heads, key=lambda h: h[0])[0]
def sendBuffer(self):
# Rebase waves to t-zero
waveforms = []
for g in self.inputs:
waveforms.append({
'id': g.bcm_id,
'buffer': list(self._format_timebase(
self.gpio_buffers[g.bcm_id]))
})
self.send('waveform_start', waveforms)
def streamBuffer(self):
self.getBuffer()
self.sendBuffer()
def sendOneshotBuffers(self):
self.getBuffer()
for g in self.inputs:
g.stopListening()
self.sendBuffer()
def cmd_set_timebase(self, args):
self.base = args['val']
def cmd_set_window(self, args):
self.window = args['val']
def cmd_set_channels(self, args):
self.active = args['active']
def cmd_one_shot(self, args):
trigger_chan = args.get('chan')
trigger = args['trigger']
for g in self.inputs:
g.stopListening()
for g in self.inputs:
g. | flushBuffer()
for g in self.inputs:
g.listen()
if trigger is None:
reactor.c | allLater(self.window/1000000.0, self.sendOneshotBuffers)
def cmd_stop_buffer_stream(self, args):
if self.t:
self.t.stop()
log.msg("Stopping buffer send")
self.t = None
def cmd_start_buffer_stream(self, args):
if not self.t:
log.msg("Starting buffer send")
for g in self.inputs:
g.listen()
self.t = task.LoopingCall(self.streamBuffer)
self.t.start(0.1)
def onClose(self, wasClean, code, reason):
self.cmd_stop_buffer_stream(None)
class WSFactory(WebSocketServerFactory):
def __init__(self, url, board):
WebSocketServerFactory.__init__(self, url)
self.board = board
def buildProtocol(self, addr):
p = self.protocol(self.board)
p.factory = self
return p
class PiToolService(service.Service):
def __init__(self, config):
self.config = config
self.board = pidata.PiBoard()
def startService(self):
root = resource.Resource()
root.putChild('', web.Index(self))
root.putChild('api', web.API(self))
root.putChild('analyzer', web.Analyzer(self))
root.putChild("static", File(FilePath('pitool/resources/static').path))
site = server.Site(root)
reactor.listenTCP(self.config.get('port', 8081), site)
factory = WSFactory(u"ws://127.0.0.1:8082", self.board)
factory.protocol = DataProtocol
reactor.listenTCP(8082, factory)
for g in self.board.gpio.values():
g.listen()
def stopService(self):
for g in self.board.gpio.values():
g.stopListening()
if gpio:
gpio.cleanup()
|
jdfekete/progressivis | progressivis/linalg/__init__.py | Python | bsd-2-clause | 6,390 | 0 | # flake8: noqa
from .elementwise import (
Unary,
Binary,
ColsBinary,
Reduce,
func2class_name,
unary_module,
make_unary,
binary_module,
make_binary,
reduce_module,
make_reduce,
binary_dict_int_tst,
unary_dict_gen_tst,
binary_dict_gen_tst,
)
from .linear_map import LinearMap
from .nexpr import NumExprABC
from .mixufunc import make_local, make_local_dict, get_ufunc_args, MixUfuncABC
from ._elementwise import (
BitwiseNot,
Absolute,
Arccos,
Arccosh,
Arcsin,
Arcsinh,
Arctan,
Arctanh,
Cbrt,
Ceil,
Conj,
Conjugate,
Cos,
Cosh,
Deg2rad,
Degrees,
Exp,
Exp2,
Expm1,
Fabs,
Floor,
Frexp,
Invert,
Isfinite,
Isinf,
Isnan,
Isnat,
Log,
Log10,
Log1p,
Log2,
LogicalNot,
Modf,
Negative,
Positive,
Rad2deg,
Radians,
Reciprocal,
Rint,
Sign,
Signbit,
Sin,
Sinh,
Spacing,
Sqrt,
Square,
Tan,
Tanh,
Trunc,
Abs,
Add,
Arctan2,
BitwiseAnd,
BitwiseOr,
BitwiseXor,
Copysign,
Divide,
Divmod,
Equal,
FloorDivide,
FloatPower,
Fmax,
Fmin,
Fmod,
Gcd,
Greater,
GreaterEqual,
Heaviside,
Hypot,
Lcm,
Ldexp,
LeftShift,
Less,
LessEqual,
Logaddexp,
Logaddexp2,
LogicalAnd,
LogicalOr,
LogicalXor,
Maximum,
Minimum,
Mod,
Multiply,
Nextafter,
NotEqual,
Power,
Remainder,
RightShift,
Subtract,
TrueDivide,
ColsAdd,
ColsArctan2,
ColsBitwiseAnd,
ColsBitwiseOr,
ColsBitwiseXor,
ColsCopysign,
ColsDivide,
ColsDivmod,
ColsEqual,
ColsFloorDivide,
ColsFloatPower,
ColsFmax,
ColsFmin,
ColsFmod,
ColsGcd,
ColsGreater,
ColsGreaterEqual,
ColsHeaviside,
ColsHypot,
ColsLcm,
ColsLdexp,
ColsLeftShift,
ColsLess,
ColsLessEqual,
ColsLogaddexp,
ColsLogaddexp2,
ColsLogicalAnd,
ColsLogicalOr,
ColsLogicalXor,
ColsMaximum,
ColsMinimum,
ColsMod,
ColsMultiply,
ColsNextafter,
ColsNotEqual,
ColsPower,
ColsRemainder,
ColsRightShift,
ColsSubtract,
ColsTrueDivide,
AddReduce,
Arctan2Reduce,
BitwiseAndReduce,
BitwiseOrReduce,
BitwiseXorReduce,
CopysignReduce,
DivideReduce,
DivmodReduce,
EqualReduce,
FloorDivideReduce,
FloatPowerReduce,
FmaxReduce,
FminReduce,
FmodReduce,
GcdReduce,
GreaterReduce,
GreaterEqualReduce,
HeavisideReduce,
HypotReduce,
LcmReduce,
LdexpReduce,
LeftShiftReduce,
LessReduce,
LessEqualReduce,
LogaddexpReduce,
Logaddexp2Reduce,
LogicalAndReduce,
LogicalOrReduce,
LogicalXorReduce,
MaximumReduce,
MinimumReduce,
ModReduce,
MultiplyReduce,
NextafterReduce,
NotEqualReduce,
PowerReduce,
RemainderReduce,
RightShiftReduce,
SubtractReduce,
TrueDivideReduce,
)
__all__ = [
"Unary",
"Binary",
"ColsBinary",
"Reduce",
"func2class_name",
"unary_module",
"make_unary",
"binary_module",
"make_binary",
"reduce_module",
"make_reduce",
"binary_dict_int_tst",
"unary_dict_gen_tst",
"binary_dict_gen_tst",
"LinearMap",
"NumExprABC",
"make_local",
"make_local_dict",
"get_ufunc_args",
"MixUfuncABC",
"BitwiseNot",
"Absolute",
"Arccos",
"Arccosh",
"Arcsin",
"Arcsinh",
"Arctan",
"Arctanh",
"Cbrt",
"Ceil",
"Conj",
"Conjugate",
"Cos",
"Cosh",
"Deg2rad",
"Degrees",
"Ex | p",
"Exp2",
"Expm1",
"Fabs",
"Floor",
"Frexp",
"Invert",
"Isfinite",
"Isinf",
"Isnan",
"Isnat",
"Log",
"Log10",
"Log1p",
"Log2",
"LogicalNot",
"Modf",
"Negative",
"Positive",
"Rad2deg",
"Radians",
"Reciprocal",
"Rint",
"Sign",
"Signbit" | ,
"Sin",
"Sinh",
"Spacing",
"Sqrt",
"Square",
"Tan",
"Tanh",
"Trunc",
"Abs",
"Add",
"Arctan2",
"BitwiseAnd",
"BitwiseOr",
"BitwiseXor",
"Copysign",
"Divide",
"Divmod",
"Equal",
"FloorDivide",
"FloatPower",
"Fmax",
"Fmin",
"Fmod",
"Gcd",
"Greater",
"GreaterEqual",
"Heaviside",
"Hypot",
"Lcm",
"Ldexp",
"LeftShift",
"Less",
"LessEqual",
"Logaddexp",
"Logaddexp2",
"LogicalAnd",
"LogicalOr",
"LogicalXor",
"Maximum",
"Minimum",
"Mod",
"Multiply",
"Nextafter",
"NotEqual",
"Power",
"Remainder",
"RightShift",
"Subtract",
"TrueDivide",
"ColsAdd",
"ColsArctan2",
"ColsBitwiseAnd",
"ColsBitwiseOr",
"ColsBitwiseXor",
"ColsCopysign",
"ColsDivide",
"ColsDivmod",
"ColsEqual",
"ColsFloorDivide",
"ColsFloatPower",
"ColsFmax",
"ColsFmin",
"ColsFmod",
"ColsGcd",
"ColsGreater",
"ColsGreaterEqual",
"ColsHeaviside",
"ColsHypot",
"ColsLcm",
"ColsLdexp",
"ColsLeftShift",
"ColsLess",
"ColsLessEqual",
"ColsLogaddexp",
"ColsLogaddexp2",
"ColsLogicalAnd",
"ColsLogicalOr",
"ColsLogicalXor",
"ColsMaximum",
"ColsMinimum",
"ColsMod",
"ColsMultiply",
"ColsNextafter",
"ColsNotEqual",
"ColsPower",
"ColsRemainder",
"ColsRightShift",
"ColsSubtract",
"ColsTrueDivide",
"AddReduce",
"Arctan2Reduce",
"BitwiseAndReduce",
"BitwiseOrReduce",
"BitwiseXorReduce",
"CopysignReduce",
"DivideReduce",
"DivmodReduce",
"EqualReduce",
"FloorDivideReduce",
"FloatPowerReduce",
"FmaxReduce",
"FminReduce",
"FmodReduce",
"GcdReduce",
"GreaterReduce",
"GreaterEqualReduce",
"HeavisideReduce",
"HypotReduce",
"LcmReduce",
"LdexpReduce",
"LeftShiftReduce",
"LessReduce",
"LessEqualReduce",
"LogaddexpReduce",
"Logaddexp2Reduce",
"LogicalAndReduce",
"LogicalOrReduce",
"LogicalXorReduce",
"MaximumReduce",
"MinimumReduce",
"ModReduce",
"MultiplyReduce",
"NextafterReduce",
"NotEqualReduce",
"PowerReduce",
"RemainderReduce",
"RightShiftReduce",
"SubtractReduce",
"TrueDivideReduce",
]
|
cderici/question-analyzer | src/data/maltImporter.py | Python | gpl-2.0 | 1,766 | 0.023783 | import codecs
from question import Question
class MaltImporter:
questions = [];
def getRawQuestionTexts(self, qFilePath):
qFile = codecs.open(qFilePath, 'r', 'utf-8');##utf-8 file
qTexts = qFile.readlines();
qTexts = [text.strip().split('|') for text in qTexts];
return qTexts; ## list of raw questions with Focus and Mod
def getParsedQuestionTexts(self, qParsedFilePath):
qFile = codecs.open(qParsedFilePath, 'r', 'utf-8');
qTexts = qFile.readlines();
qTexts = [text.strip().split('\t') for text in qTexts];
questions = [];
qParts = [];
for text in qTexts:
| if(len(text) > 1):
if(text[1] == "."):
qParts.append(text);
questions.append(qParts);
qParts = [];
else:
qParts.append([t.replace('\ufeff', '') for t in text]);
return questions;##Question parts -> list of list
|
def importMaltOutputs(self, qFilePath, qParsedFilePath):
self.questions = [];
qTexts = self.getRawQuestionTexts(qFilePath);
qTextParts = self.getParsedQuestionTexts(qParsedFilePath);
length = len(qTexts);
for i in range(0, length):
question = Question(qTexts[i][0], qTextParts[i]);
#question.focus = qTexts[i][1];
#question.mod = qTexts[i][2];
question.coarseClass = qTexts[i][3];
question.fineClass = qTexts[i][4];
question.setMeta(qTexts[i][1], qTexts[i][2]);
question.answer = qTexts[i][5];
self.questions.append(question);
return self.questions;
|
hagabbar/pycbc_copy | examples/noise/timeseries.py | Python | gpl-3.0 | 481 | 0 | import pycbc.noise
import pycbc.psd
import pylab
# The color of the noise matches a PSD which you provide
flow = 30.0
delta_f = 1.0 / 16
flen = int(2048 / delta_f) + 1
psd = pycbc.psd.aLIGOZeroDetHighPower(flen, delta_f, flow)
# Generate 32 seconds of noise at 4096 Hz
delta_t = 1.0 / 4096
tsamples = int(32 / de | lta_t)
ts = pycbc.noise.noise_from_psd(tsamples, delta_t, psd, seed=127)
pylab.plot(ts.sample_times, ts)
pylab.ylabel('Strain')
py | lab.xlabel('Time (s)')
pylab.show()
|
agry/NGECore2 | scripts/mobiles/corellia/slice_hound.py | Python | lgpl-3.0 | 1,618 | 0.027194 | import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('slice_hound')
mobileTemplate.setLevel(28)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setMeatType("Carnivore Meat")
mobileTemplate.setMeatAmount(65)
mobileTemplate.setHideType("Bristly Hide")
mobileTemplate.setHideAmou | nt(35)
mobileTemplate.setBoneType("Animal Bones")
mobileTemplate.setBoneAmount(30)
mobileTemplate.setSocialGroup("slice hound")
mobileTemplate.setAssistRange(2)
mobileTemplate.setStalker(False)
mobileTemplate.setOptionsBitmask(Options.ATTACKABLE)
templates = Vector()
templates.a | dd('object/mobile/shared_corellian_slice_hound.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
attacks.add('bm_bite_2')
attacks.add('bm_hamstring_2')
mobileTemplate.setDefaultAttack('creatureMeleeAttack')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('slice_hound', mobileTemplate)
return |
CybOXProject/python-cybox | examples/demo.py | Python | bsd-3-clause | 1,287 | 0.001554 | #!/usr/bin/env python
# Copyright (c) 2017, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
'''
CybOX Common Indicator helper Demo
Demonstrates the use of the Cybox Common Indicator helper.
Creates a CybOX Observables document containing a
'''
import sys
from pprint import pprint
from cybox import helper
from cybox.core import Observables
def main():
'''Build a CybOX Observables document and write it to stdout'''
domain = helper.create_domain_name_observable('www.example.com')
url = helper.create_url_observable('http://www.example.com')
ipv4 = helper.create_ipv4_observable('127.0.0.1')
email = helper.create_email_address_observable('cybox@mitre.org')
file_ = helper.create_file_hash_observable('foo.bar',
'94f93e00fd122466d68a6ae3b8c7f908')
observ | ables_doc = Observables([
domain,
ipv4,
url,
email,
file_,
| ])
print(observables_doc.to_xml(encoding=None))
pprint(observables_doc.to_dict())
if __name__ == "__main__":
main()
sys.exit()
|
theo-l/django | tests/managers_regress/tests.py | Python | bsd-3-clause | 11,168 | 0.001074 | from unittest import skipUnless
from django.db import models
from django.template import Context, Template
from django.test import SimpleTestCase, TestCase, override_settings
from django.test.utils import isolate_apps
from django.utils.version import PY37
from .models import (
AbstractBase1, AbstractBase2, AbstractBase3, Child1, Child2, Child3,
Child4, Child5, Child6, Child7, RelatedModel, RelationModel,
)
class ManagersRegressionTests(TestCase):
def test_managers(self):
Child1.objects.create(name='fred', data='a1')
Child1.objects.create(name='barney', data='a2')
Child2.objects.create(name='fred', data='b1', value=1)
Child2.objects.create(name='barney', data='b2', value=42)
Child3.objects.create(name='fred', data='c1', comment='yes')
Child3.objects.create(name='barney', data='c2', comment='no')
Child4.objects.create(name='fred', data='d1')
Child4.objects.create(name='barney', data='d2')
Child5.objects.create(name='fred', comment='yes')
Child5.objects.create(name='barney', comment='no')
Child6.objects.create(name='fred', data='f1', value=42)
Child6.objects.create(name='barney', data='f2', value=42)
Child7.objects.create(name='fred')
Child7.objects.create(name='barney')
self.assertQuerysetEqual(Child1.manager1.all(), ["<Child1: a1>"])
self.assertQuerysetEqual(Child1.manager2.all(), ["<Child1: a2>"])
self.assertQuerysetEqual(Child1._default_manager.all(), ["<Child1: a1>"])
self.assertQuerysetEqual(Child2._default_manager.all(), ["<Child2: b1>"])
self.assertQuerysetEqual(Child2.restricted.all(), ["<Child2: b2>"])
self.assertQuerysetEqual(Child3._default_manager.all(), ["<Child3: c1>"])
self.assertQuerysetEqual(Child3.manager1.all(), ["<Child3: c1>"])
self.assertQuerysetEqual(Child3.manager2.all(), ["<Child3: c2>"])
# Since Child6 inherits from Child4, the corresponding rows from f1 and
# f2 also appear here. This is the expected result.
self.assertQuerysetEqual(Child4._default_manager.order_by('data'), [
"<Child4: d1>",
"<Child4: d2>",
"<Child4: f1>",
"<Child4: f2>",
])
self.assertQuerysetEqual(Child4.manager1.all(), ["<Child4: d1>", "<Child4: f1>"], ordered=False)
self.assertQuerysetEqual(Child5._default_manager.all(), ["<Child5: fred>"])
self.assertQuerysetEqual(Child6._default_manager.all(), ["<Child6: f1>", "<Child6: f2>"], ordered=False)
self.assertQuerysetEqual(
Child7._default_manager.order_by('name'),
["<Child7: barney>", "<Child7: fred>"]
)
def test_abstract_manager(self):
# Accessing the manager on an abstract model should
# raise an attribute error with an appropriate message.
# This error message isn't ideal, but if the model is abstract and
# a lot of the class instantiation logic isn't invoked; if the
# manager is implied, then we don't get a hook to install the
# error-raising manager.
msg = "type object 'AbstractBase3' has no attribute 'objects'"
with self.assertRaisesMessage(AttributeError, msg):
AbstractBase3.objects.all()
def test_custom_abstract_manager(self):
# Accessing the manager on an abstract model with a custom
# manager should raise an attribute error with an appropriate
# message.
msg = "Manager isn't available; AbstractBase2 is abstract"
with self.assertRaisesMessage(AttributeError, msg):
AbstractBase2.restricted.all()
def test_explicit_abstract_manager(self):
# Accessing the manager on an abstract model with an explicit
# manager should raise an attribute error with an appropriate
# message.
msg = "Manager isn't available; AbstractBase1 is abst | ract"
with self.assertRaisesMessage(AttributeError, msg):
AbstractBase1.objects.all()
@override_settings(TEST_SWAPPABLE_MODEL='managers_regress.Parent')
@isolate_apps('managers_regress')
def test_swappable_manager(self):
class SwappableModel(models.Model):
class Meta:
swappable = 'TEST_SWAPPABLE_MODEL'
# Acces | sing the manager on a swappable model should
# raise an attribute error with a helpful message
msg = (
"Manager isn't available; 'managers_regress.SwappableModel' "
"has been swapped for 'managers_regress.Parent'"
)
with self.assertRaisesMessage(AttributeError, msg):
SwappableModel.objects.all()
@override_settings(TEST_SWAPPABLE_MODEL='managers_regress.Parent')
@isolate_apps('managers_regress')
def test_custom_swappable_manager(self):
class SwappableModel(models.Model):
stuff = models.Manager()
class Meta:
swappable = 'TEST_SWAPPABLE_MODEL'
# Accessing the manager on a swappable model with an
# explicit manager should raise an attribute error with a
# helpful message
msg = (
"Manager isn't available; 'managers_regress.SwappableModel' "
"has been swapped for 'managers_regress.Parent'"
)
with self.assertRaisesMessage(AttributeError, msg):
SwappableModel.stuff.all()
@override_settings(TEST_SWAPPABLE_MODEL='managers_regress.Parent')
@isolate_apps('managers_regress')
def test_explicit_swappable_manager(self):
class SwappableModel(models.Model):
objects = models.Manager()
class Meta:
swappable = 'TEST_SWAPPABLE_MODEL'
# Accessing the manager on a swappable model with an
# explicit manager should raise an attribute error with a
# helpful message
msg = (
"Manager isn't available; 'managers_regress.SwappableModel' "
"has been swapped for 'managers_regress.Parent'"
)
with self.assertRaisesMessage(AttributeError, msg):
SwappableModel.objects.all()
def test_regress_3871(self):
related = RelatedModel.objects.create()
relation = RelationModel()
relation.fk = related
relation.gfk = related
relation.save()
relation.m2m.add(related)
t = Template('{{ related.test_fk.all.0 }}{{ related.test_gfk.all.0 }}{{ related.test_m2m.all.0 }}')
self.assertEqual(
t.render(Context({'related': related})),
''.join([str(relation.pk)] * 3),
)
def test_field_can_be_called_exact(self):
# Make sure related managers core filters don't include an
# explicit `__exact` lookup that could be interpreted as a
# reference to a foreign `exact` field. refs #23940.
related = RelatedModel.objects.create(exact=False)
relation = related.test_fk.create()
self.assertEqual(related.test_fk.get(), relation)
@isolate_apps('managers_regress')
class TestManagerInheritance(SimpleTestCase):
def test_implicit_inheritance(self):
class CustomManager(models.Manager):
pass
class AbstractModel(models.Model):
custom_manager = CustomManager()
class Meta:
abstract = True
class PlainModel(models.Model):
custom_manager = CustomManager()
self.assertIsInstance(PlainModel._base_manager, models.Manager)
self.assertIsInstance(PlainModel._default_manager, CustomManager)
class ModelWithAbstractParent(AbstractModel):
pass
self.assertIsInstance(ModelWithAbstractParent._base_manager, models.Manager)
self.assertIsInstance(ModelWithAbstractParent._default_manager, CustomManager)
class ProxyModel(PlainModel):
class Meta:
proxy = True
self.assertIsInstance(ProxyModel._base_manager, models.Manager)
self.assertIsInstance(ProxyModel._default_manager, CustomManager)
class MTIModel(PlainModel):
pass
self.assertIsInstance(MTI |
yangyingchao/TeleHealth | src/client/python/THSocket.py | Python | gpl-3.0 | 1,613 | 0.0031 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import socket
import THMessage
import struct
SERVER_PORT = 5678
SERVER_HOST = "localhost"
TMP_FLAG = 1
HEADER_FMT = "BBBBL"
HEADER_LENGTH = 16 # XXX: Update this.
class ClientSocket:
"""
This class create a socket that connects to specified host server and
port. Then user can use Send() and Receive() to send or receive messages
from server.
Note: At the beginning of Send() and Receive(), a package header must be
sent/received before the message body. Refer to src/public/MessageBase.h
for details of message header.
"""
def __init__(self, host=SERVER_HOST, port=SERVER_P | ORT):
"""
"""
try:
self.soc | k = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((host, port))
self.isOK = True
except:
self.isOk = False
finally:
pass
def Send(self, msg):
"""
Send msg with header. Procedure:
1. Send Header.
2. Send MessageBody
"""
totalsent = 0
while totalsent < HEADER_LENGTH:
sent = self.sock.send(msg[totalsent:])
if sent == 0:
raise RuntimeError("socket connection broken")
totalsent = totalsent + sent
def Receive(self):
"""
"""
msg = ''
while len(msg) < MSGLEN:
chunk = self.sock.recv(MSGLEN-len(msg))
if chunk == '':
raise RuntimeError("socket connection broken")
msg = msg + chunk
return msg
|
saullocastro/pyNastran | pyNastran/bdf/cards/elements/beam.py | Python | lgpl-3.0 | 15,046 | 0.002858 | # pylint: disable=R0904,R0902,E1101,E1103,C0111,C0302,C0103,W0101
from six import string_types
import numpy as np
from numpy.linalg import norm
from pyNastran.utils import integer_types
from pyNastran.bdf.cards.elements.bars import CBAR, LineElement
from pyNastran.bdf.bdf_interface.assign_type import (
integer, integer_or_blank, double_or_blank, integer_double_string_or_blank)
from pyNastran.bdf.field_writer_8 import set_blank_if_default
from pyNastran.bdf.field_writer_8 import print_card_8
from pyNastran.bdf.field_writer_16 import print_card_16
class CBEAM(CBAR):
"""
+-------+-----+-----+-----+-----+-----+-----+-----+----------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=======+=====+=====+=====+=====+=====+=====+=====+==========+
| CBEAM | EID | PID | GA | GB | X1 | X2 | X3 | OFFT/BIT |
+-------+-----+-----+-----+-----+-----+-----+-----+----------+
| | PA | PB | W1A | W2A | W3A | W1B | W2B | W3B |
+-------+-----+-----+-----+-----+-----+-----+-----+----------+
| | SA | SB | | | | | | |
+-------+-----+-----+-----+-----+-----+-----+-----+----------+
or
+-------+-----+-----+-----+-----+-----+-----+-----+----------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=======+=====+=====+=====+=====+=====+=====+=====+==========+
| CBEAM | EID | PID | GA | GB | G0 | | | OFFT/BIT |
+-------+-----+-----+-----+-----+-----+-----+-----+----------+
| | PA | PB | W1A | W2A | W3A | W1B | W2B | W3B |
+-------+-----+-----+-----+-----+-----+-----+-----+----------+
| | SA | SB | | | | | | |
+-------+-----+-----+-----+-----+-----+-----+-----+----------+
offt/bit are MSC specific fields
"""
type = 'CBEAM'
_field_map = {
1: 'eid', 2:'pid', 3:'ga', 4:'gb', #5:'x_g0', 6:'g1', 7:'g2',
#8:'offt',
9:'pa', 10:'pb',
17:'sa', 18:'sb',
}
def _update_field_helper(self, n, value):
if n == 11:
self.wa[0] = value
elif n == 12:
self.wa[1] = value
elif n == 13:
self.wa[2] = value
| elif n == 14:
self.wb[0] = value
elif n == 15:
self.wb[1] = value
elif n == 16:
self.wb[2] = value
else:
if self.g0 is not None:
if n == 5:
| self.g0 = value
else: # offt
msg = 'Field %r=%r is an invalid %s entry or is unsupported.' % (
n, value, self.type)
raise KeyError(msg)
else:
if n == 5:
self.x[0] = value
elif n == 6:
self.x[1] = value
elif n == 7:
self.x[2] = value
else:
msg = 'Field %r=%r is an invalid %s entry or is unsupported.' % (
n, value, self.type)
raise KeyError(msg)
def __init__(self, eid, pid, ga, gb, x, g0, offt, bit,
pa=0, pb=0, wa=None, wb=None, sa=0, sb=0, comment=''):
"""
Adds a CBEAM card
Parameters
----------
pid : int
property id
mid : int
material id
ga / gb : int
grid point at End A/B
x : List[float, float, float]
Components of orientation vector, from GA, in the displacement
coordinate system at GA (default), or in the basic coordinate system
g0 : int
Alternate method to supply the orientation vector using grid
point G0. Direction of is from GA to G0. is then transferred
to End A
offt : str; default='GGG'
Offset vector interpretation flag
None : bit is active
bit : float; default=None
Built-in twist of the cross-sectional axes about the beam axis
at end B relative to end A.
For beam p-elements ONLY!
None : offt is active
pa / pb : int; default=0
Pin Flag at End A/B. Releases the specified DOFs
wa / wb : List[float, float, float]
Components of offset vectors from the grid points to the end
points of the axis of the shear center
sa / sb : int; default=0
Scalar or grid point identification numbers for the ends A and B,
respectively. The degrees-of-freedom at these points are the
warping variables . SA and SB cannot be specified for
beam p-elements
comment : str; default=''
a comment for the card
offt/bit are MSC specific fields
"""
LineElement.__init__(self)
if comment:
self.comment = comment
if wa is None:
wa = np.zeros(3, dtype='float64')
else:
wa = np.asarray(wa)
if wb is None:
wb = np.zeros(3, dtype='float64')
else:
wb = np.asarray(wb)
self.eid = eid
self.pid = pid
self.ga = ga
self.gb = gb
self.x = x
self.g0 = g0
self.offt = offt
self.bit = bit
self.pa = pa
self.pb = pb
self.wa = wa
self.wb = wb
self.sa = sa
self.sb = sb
self._validate_input()
@classmethod
def add_card(cls, card, comment=''):
eid = integer(card, 1, 'eid')
pid = integer_or_blank(card, 2, 'pid', eid)
ga = integer(card, 3, 'ga')
gb = integer(card, 4, 'gb')
x, g0 = cls._init_x_g0(card, eid)
offt, bit = cls._init_offt_bit(card, eid)# offt doesn't exist in NX nastran
pa = integer_or_blank(card, 9, 'pa', 0)
pb = integer_or_blank(card, 10, 'pb', 0)
wa = np.array([double_or_blank(card, 11, 'w1a', 0.0),
double_or_blank(card, 12, 'w2a', 0.0),
double_or_blank(card, 13, 'w3a', 0.0)], 'float64')
wb = np.array([double_or_blank(card, 14, 'w1b', 0.0),
double_or_blank(card, 15, 'w2b', 0.0),
double_or_blank(card, 16, 'w3b', 0.0)], 'float64')
sa = integer_or_blank(card, 17, 'sa', 0)
sb = integer_or_blank(card, 18, 'sb', 0)
assert len(card) <= 19, 'len(CBEAM card) = %i\ncard=%s' % (len(card), card)
return CBEAM(eid, pid, ga, gb, x, g0, offt, bit,
pa=pa, pb=pb, wa=wa, wb=wb, sa=sa, sb=sb, comment=comment)
@classmethod
def add_op2_data(cls, data, f, comment=''):
#: .. todo:: verify
assert len(data) == 2, 'data=%s len(data)=%s' % (data, len(data))
#data = [[eid,pid,ga,gb,sa,sb, pa,pb,w1a,w2a,w3a,w1b,w2b,w3b],
# [f,g0]]
#data = [[eid,pid,ga,gb,sa,sb, pa,pb,w1a,w2a,w3a,w1b,w2b,w3b],
# [f,x1,x2,x3]]
main, aft = data
flag = aft[0]
assert f == flag, 'f=%s flag=%s' % (f, flag)
if flag == 0:
# basic cid
#data_in = [[eid, pid, ga, gb, sa, sb, pa, pb, w1a, w2a, w3a, w1b, w2b, w3b],
#[f, x1, x2, x3]]
assert len(aft) == 4, 'f=%s aft=%s len(aft)=%s' % (f, aft, len(aft))
x1, x2, x3 = aft[1:]
g0 = None
x = np.array([x1, x2, x3], dtype='float64')
elif flag == 1:
# global cid
#data_in = [[eid, pid, ga, gb, sa, sb, pa, pb, w1a, w2a, w3a, w1b, w2b, w3b],
#[f, x1, x2, x3]]
assert len(aft) == 4, 'f=%s aft=%s len(aft)=%s' % (f, aft, len(aft))
g0 = None
x1, x2, x3 = aft[1:]
x = np.array([x1, x2, x3], dtype='float64')
elif flag == 2:
# grid option
#data_in = [[eid, pid, ga, gb, sa, sb, p |
willingc/vms | vms/job/models.py | Python | gpl-2.0 | 675 | 0.001481 | from django.core.validators import RegexValidator
from django.db import models
from event.models import Event
class Job(models.Model):
id = models.AutoField(primary_key=True)
event = models.ForeignKey(Event)
name = models.CharField(
| max_length=75,
validators=[
RegexValidator(
r'^[(A-Z)|(a-z)|(\s)|(\')]+$',
),
],
)
start_date = models.DateField()
end_date = models.DateField()
description = models.TextField(
blank=True,
validators=[
RegexValid | ator(
r'^[(A-Z)|(a-z)|(0-9)|(\s)|(\.)|(,)|(\-)|(!)|(\')]+$',
),
],
)
|
adfinis-sygroup/pyapi-gitlab | gitlab_tests/pyapi-gitlab_test.py | Python | apache-2.0 | 20,830 | 0.004705 | """
pyapi-gitlab tests
"""
import unittest2 as unittest
import gitlab
import os
import time
import random
import string
try:
from Crypto.PublicKey import RSA
ssh_test = True
except ImportError:
ssh_test = False
user = os.environ.get('gitlab_user', 'root')
password = os.environ.get('gitlab_password', '5iveL!fe')
host = os.environ.get('gitlab_host', 'http://192.168.1.100')
class GitlabTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.git = gitlab.Gitlab(host=host)
cls.git.login(user=user, password=password)
name = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(8))
cls.project = cls.git.createproject(name=name, visibility_level="private",
import_url="https://github.com/Itxaka/pyapi-gitlab.git")
# wait a bit for the project to be fully imported
time.sleep(20)
cls.project_id = cls.project['id']
cls.user_id = cls.git.currentuser()['id']
@classmethod
def tearDownClass(cls):
cls.git.deleteproject(cls.project_id)
def test_user(self):
assert isinstance(self.git.createuser(name="test", username="test",
password="test1234", email="test@test.com",
skype="this", linkedin="that"), dict)
# get all users
assert isinstance(self.git.getusers(), list) # compatible with 2.6
assert isinstance(self.git.currentuser(), dict)
user = self.git.getusers(search="test")[0]
self.assertTrue(self.git.deleteuser(user["id"]))
# check can_create_user
user = self.git.createuser("random", "random", "random1234", "random@random.org",
can_create_group="false")
self.assertFalse(self.git.getuser(user['id'])['can_create_group'])
self.git.deleteuser(user['id'])
user = self.git.createuser("random", "random", "random1234", "random@random.org",
can_create_group="true")
self.assertTrue(self.git.getuser(user['id'])['can_create_group'])
assert isinstance(self.git.edituser(user['id'], can_create_group="false"), dict)
# Check that indeed the user details were changed
self.assertFalse(self.git.getuser(user['id'])['can_create_group'])
self.git.deleteuser(user['id'])
# get X pages
assert isinstance(self.git.getusers(page=2), list) # compatible with 2.6
assert isinstance(self.git.getusers(per_page=4), list) # compatible with 2.6
self.assertEqual(self.git.getusers(page=800), list("")) # check against empty list
self.assertTrue(self.git.getusers(per_page=43)) # check against false
def test_project(self):
# test project
assert isinstance(self.git.getprojects(), list)
assert isinstance(self.git.getprojects(page=5), list)
assert isinstance(self.git.getprojects(per_page=7), list)
assert isinstance(self.git.getproject(self.project_id), dict)
assert isinstance(self.git.getproject(self.project['path_with_namespace']), dict)
self.assertFalse(self.git.getproject("wrong"))
# test getprojectsall
assert isinstance(self.git.getprojectsall(), list)
assert isinstance(self.git.getprojectsall(page=5), list)
assert isinstance(self.git.getprojectsall(per_page=7), list)
# test getownprojects
assert isinstance(self.git.getprojectsowned(), list)
assert isinstance(self.git.getprojectsowned(page=5), list)
assert isinstance(self.git.getprojectsowned(per_page=7), list)
# test events
assert isinstance(self.git.getprojectevents(self.project_id), list)
assert isinstance(self.git.getprojectevents(self.project_id, page=3), list)
assert isinstance(self.git.getprojectevents(self.project_id, per_page=4), list)
# add-remove project members
self.assertTrue(self.git.addprojectmember(self.project_id, user_id=self.user_id, access_level="reporter"))
assert isinstance(self.git.getprojectmembers(self.project_id), list)
self.assertTrue(self.git.editprojectmember(self.project_id, user_id=self.user_id, access_level="master"))
self.assertTrue(self.git.deleteprojectmember(self.project_id, user_id=1))
# Hooks testing
assert isinstance(self.git.addprojecthook(self.project_id, "http://web.com"), dict)
assert isinstance(self.git.getprojecthooks(self.project_id), list)
assert isinstance(self.git.getprojecthook(self.project_id,
self.git.getprojecthooks(self.project_id)[0]['id']), dict)
self.assertTrue(self.git.editprojecthook(self.project_id,
self.git.getprojecthooks(self.project_id)[0]['id'], "http://another.com"))
self.assertTrue(self.git.deleteprojecthook(self.project_id,
self.git.getprojecthooks(self.project_id)[0]['id']))
# Forks testing
name = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(8))
newproject = self.git.createproject(name)
# set it as forker from the main project
self.git.createforkrelation(newproject["id"], self.project_id)
newproject = self.git.getproject(newproject["id"])
self.assertIn("forked_from_project", newproject)
# remove the fork relation
self.assertTrue(self.git.removeforkrelation(newproject["id"]))
newproject = self.git.getproject(newproject["id"])
with self.assertRaises(KeyError) as raises:
_ = newproject["forked_from_project"]
# test moveproject
for group in self.git.getgroups():
self.git.deletegroup(group["id"])
group = self.git.creategroup("movegroup", "movegroup")
assert isinstance(group, dict)
assert isinstance(self.git.moveproject(group["id"], newproject["id"]), dict)
project = self.git.getproject(newproject["id"])
self.assertEqual("movegroup", project["namespace"]["name"])
# Clean up the newgroup
self.git.deleteproject(newproject["id"])
# Create an actual fork of the main project
self.git.createfork(self.project_id)
def test_deploykeys(self):
keys = self.git.getdeploykeys(self.project_id)
assert isinstance(keys, list)
self.assertEqual(len(keys), 0)
if ssh_test:
name = ''.join(r | andom.choice(string.ascii_uppercase + string.digits) for _ in range(8))
rsa_key = RSA.generate(1024)
assert isinstance(self.git.adddeploykey(project_id=self.project_id, title=name,
| key=str(rsa_key.publickey().exportKey(format="OpenSSH"))), dict)
keys = self.git.getdeploykeys(self.project_id)
self.assertGreater(len(keys), 0)
key = keys[0]
assert isinstance(self.git.getdeploykey(self.project_id, key["id"]), dict)
self.assertTrue(self.git.deletedeploykey(self.project_id, key["id"]))
keys = self.git.getdeploykeys(self.project_id)
self.assertEqual(len(keys), 0)
def test_branch(self):
sha1 = self.git.getrepositorycommits(project_id=self.project_id)[0]["id"]
assert isinstance(self.git.createbranch(self.project_id, branch="deleteme", ref=sha1), dict)
self.assertTrue(self.git.deletebranch(self.project_id, branch="deleteme"))
assert isinstance(self.git.getbranches(self.project_id), list)
assert isinstance(self.git.getbranch(self.project_id, branch="develop"), dict)
self.assertTrue(self.git.protectbranch(self.project_id, branch="develop"))
self.assertTrue(self.git.unprotectbranch(self.project_id, branch="develop"))
def test_sshkeys(self):
assert isinstance(self.git.getsshkeys(), list)
self.assertEquals(len(self.git.getsshkeys()), 0)
# not working due a bug? in pycrypto: https://github.com/dlitz/py |
centricular/meson | mesonbuild/scripts/vcstagger.py | Python | apache-2.0 | 1,568 | 0.005102 | #!/usr/bin/env python3
# Copyright 2015-2016 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys, os, subprocess, re
def config_vcs_tag(infile, outfile, fallback, source_dir, replace_string, regex_selector, cmd):
try:
output = subprocess.check_output(cmd, cwd=source_dir)
new_string = re.search(regex_selector, output.decode()).group(1).strip()
except Exception:
new_string = fallback
with open(infile) as f:
new_data = f.read().replace(replace_string, new_string)
if os.path.exists(outfile):
with open(outfile) as f:
needs_update = (f.read() != new_data)
else:
needs_update = True
if needs_update:
with open(outfile, 'w') as f:
f.write(new_data)
def run(args):
infile, outfile, fallback, source_dir, replace_string, regex_selector = args[0:6]
command = args[6:]
config_vcs_tag(infile, outfile, fallback, source_dir, replace_string, regex_selector, command)
if __name__ == ' | __main__' | :
sys.exit(run(sys.argv[1:]))
|
kohr-h/odl | odl/test/operator/pspace_ops_test.py | Python | mpl-2.0 | 6,845 | 0 | # Copyright 2014-2017 The ODL contributors
#
# This file is part of ODL.
#
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at https://mozilla.org/MPL/2.0/.
from __future__ import division
import pytest
import odl
from odl.util.testutils import all_almost_equal, simple_fixture
base_op = simple_fixture(
'base_op',
[odl.IdentityOperator(odl.rn(3)),
odl.BroadcastOperator(odl.IdentityOperator(odl.rn(3)), 2),
odl.ReductionOperator(odl.IdentityOperator(odl.rn(3)), 2),
odl.DiagonalOperator(odl.IdentityOperator(odl.rn(3)), 2),
],
fmt=' {name}={value.__class__.__name__}')
def test_pspace_op_init(base_op):
"""Test initialization with different base operators."""
A = base_op
op = odl.ProductSpaceOperator([[A]])
assert op.domain == A.domain ** 1
assert op.range == A.range ** 1
op = odl.ProductSpaceOperator([[A, A]])
assert op.domain == A.domain ** 2
assert op.range == A.range ** 1
op = odl.ProductSpaceOperator([[A],
[A]])
assert op.domain == A.domain ** 1
assert op.range == A.range ** 2
op = odl.ProductSpaceOperator([[A, 0],
[0, A]])
assert op.domain == A.domain ** 2
assert op.range == A.range ** 2
op = odl.ProductSpaceOperator([[A, None],
[None, A]])
assert op.domain == A.domain ** 2
assert op.range == A.range ** 2
def test_pspace_op_derivative(base_op):
"""Test derivatives with different base operators."""
A = base_op
op = odl.ProductSpaceOperator([[A + 1]])
true_deriv = odl.ProductSpaceOperator([[A]])
deriv = op.derivative(op.domain.zero())
assert deriv.domain == op.domain
assert deriv.range == op.range
x = op.domain.one()
assert all_almost_equal(deriv(x), true_deriv(x))
op = odl.ProductSpaceOperator([[A + 1, 2 * A - 1]])
deriv = op.derivative(op.domain.zero())
true_deriv = odl.ProductSpaceOperator([[A, 2 * A]])
assert deriv.domain == op.domain
assert deriv.range == op.range
x = op.domain.one()
assert all_almost_equal(deriv(x), true_deriv(x))
def test_pspace_op_adjoint(base_op):
"""Test adjoints with different base operators."""
A = base_op
op = odl.ProductSpaceOperator([[A]])
true_adj = odl.ProductSpaceOperator([[A.adjoint]])
adj = op.adjoint
assert adj.domain == op.range
assert adj.range == op.domain
y = op.range.one()
assert all_almost_equal(adj( | y), true_adj(y))
| op = odl.ProductSpaceOperator([[2 * A, -A]])
true_adj = odl.ProductSpaceOperator([[2 * A.adjoint],
[-A.adjoint]])
adj = op.adjoint
assert adj.domain == op.range
assert adj.range == op.domain
y = op.range.one()
assert all_almost_equal(adj(y), true_adj(y))
def test_pspace_op_weighted_init():
r3 = odl.rn(3)
ran = odl.ProductSpace(r3, 2, weighting=[1, 2])
A = odl.IdentityOperator(r3)
with pytest.raises(NotImplementedError):
odl.ProductSpaceOperator([[A],
[0]], range=ran)
def test_pspace_op_sum_call():
r3 = odl.rn(3)
A = odl.IdentityOperator(r3)
op = odl.ProductSpaceOperator([[A, A]])
x = r3.element([1, 2, 3])
y = r3.element([7, 8, 9])
z = op.domain.element([x, y])
assert all_almost_equal(op(z)[0], x + y)
assert all_almost_equal(op(z, out=op.range.element())[0], x + y)
def test_pspace_op_project_call():
r3 = odl.rn(3)
A = odl.IdentityOperator(r3)
op = odl.ProductSpaceOperator([[A],
[A]])
x = r3.element([1, 2, 3])
z = op.domain.element([x])
assert x == op(z)[0]
assert x == op(z, out=op.range.element())[0]
assert x == op(z)[1]
assert x == op(z, out=op.range.element())[1]
def test_pspace_op_diagonal_call():
r3 = odl.rn(3)
A = odl.IdentityOperator(r3)
op = odl.ProductSpaceOperator([[A, 0],
[0, A]])
x = r3.element([1, 2, 3])
y = r3.element([7, 8, 9])
z = op.domain.element([x, y])
assert z == op(z)
assert z == op(z, out=op.range.element())
def test_pspace_op_swap_call():
r3 = odl.rn(3)
A = odl.IdentityOperator(r3)
op = odl.ProductSpaceOperator([[0, A],
[A, 0]])
x = r3.element([1, 2, 3])
y = r3.element([7, 8, 9])
z = op.domain.element([x, y])
result = op.domain.element([y, x])
assert result == op(z)
assert result == op(z, out=op.range.element())
def test_comp_proj():
r3 = odl.rn(3)
r3xr3 = odl.ProductSpace(r3, 2)
x = r3xr3.element([[1, 2, 3],
[4, 5, 6]])
proj_0 = odl.ComponentProjection(r3xr3, 0)
assert x[0] == proj_0(x)
assert x[0] == proj_0(x, out=proj_0.range.element())
proj_1 = odl.ComponentProjection(r3xr3, 1)
assert x[1] == proj_1(x)
assert x[1] == proj_1(x, out=proj_1.range.element())
def test_comp_proj_slice():
r3 = odl.rn(3)
r33 = odl.ProductSpace(r3, 3)
x = r33.element([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]])
proj = odl.ComponentProjection(r33, slice(0, 2))
assert x[0:2] == proj(x)
assert x[0:2] == proj(x, out=proj.range.element())
def test_comp_proj_indices():
r3 = odl.rn(3)
r33 = odl.ProductSpace(r3, 3)
x = r33.element([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]])
proj = odl.ComponentProjection(r33, [0, 2])
assert x[[0, 2]] == proj(x)
assert x[[0, 2]] == proj(x, out=proj.range.element())
def test_comp_proj_adjoint():
r3 = odl.rn(3)
r3xr3 = odl.ProductSpace(r3, 2)
x = r3.element([1, 2, 3])
result_0 = r3xr3.element([[1, 2, 3],
[0, 0, 0]])
proj_0 = odl.ComponentProjection(r3xr3, 0)
assert result_0 == proj_0.adjoint(x)
assert result_0 == proj_0.adjoint(x, out=proj_0.domain.element())
result_1 = r3xr3.element([[0, 0, 0],
[1, 2, 3]])
proj_1 = odl.ComponentProjection(r3xr3, 1)
assert result_1 == proj_1.adjoint(x)
assert result_1 == proj_1.adjoint(x, out=proj_1.domain.element())
def test_comp_proj_adjoint_slice():
r3 = odl.rn(3)
r33 = odl.ProductSpace(r3, 3)
x = r33[0:2].element([[1, 2, 3],
[4, 5, 6]])
result = r33.element([[1, 2, 3],
[4, 5, 6],
[0, 0, 0]])
proj = odl.ComponentProjection(r33, slice(0, 2))
assert result == proj.adjoint(x)
assert result == proj.adjoint(x, out=proj.domain.element())
if __name__ == '__main__':
odl.util.test_file(__file__)
|
ptroja/spark2014 | docs/lrm/review/release 0_3/tn_local_check.py | Python | gpl-3.0 | 1,182 | 0.005922 | #!/usr/bin/env python
""" This does mostly the same as the review token commit hook,
but is designed to run locally.
It will echo to standard out a fixed up version of the
review token given to it; this can be used to quickly
apply any format changes to a token (such as new fields).
It will then echo to standard error a list of problems
found (some of which will have been corrected in the
echoed output).
The basic idea is thus:
$ tn_local_check.py <token>.tn > foo
Check if you're happy with it all and then:
$ mv foo <token>.tn
"""
import sys
import os
from | tn_lib import parse_tn, write_tn
# Deal with a bad command line
if len(sys.argv) != 2:
print >> sys.stderr, "You will need to specify a file to parse."
sys.exit(1)
# Parse TN
tmp = parse_tn(os.path.basename(sys.argv[1]), open(sys.argv[1]).read())
# Print out corrected TN
print write_tn(tmp).rstrip()
# Report on any errors
if len(tmp["errors"]):
print >> sy | s.stderr, "-" * 80
print >> sys.stderr, "The review token %s contains "\
"the following errors:" % tmp["ticket"]
for e in tmp["errors"]:
print >> sys.stderr, " - %s" % e
|
IL2HorusTeam/il2fb-events-parser | docs/conf.py | Python | lgpl-3.0 | 8,504 | 0.00588 | # coding: utf-8
#
# IL-2 FB Events Parser documentation build configuration file, created by
# sphinx-quickstart on Sat Nov 22 10:43:38 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.mathjax',
]
todo_include_todos = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'IL-2 FB Events Parser'
copyright = u'2014, Alexander Oblovatniy'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.11'
# The full version, including alpha/beta/rc tags.
release = '0.11.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'sidebarwidth': 340,
}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin | static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# | Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'IL-2FBEventsParserdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'IL-2FBEventsParser.tex', u'IL-2 FB Events Parser Documentation',
u'Alexander Oblovatniy', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'il-2fbeventsparser', u'IL-2 FB Events Parser Documentation',
[u'Alexander Oblovatniy'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'IL-2FBEventsParser', u'IL-2 FB Events Parser Documentation',
u'Alexander Oblovatniy', 'IL-2FBEventsParser', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append a |
grandcat/chordentlich | code/helpers/test_iniParser.py | Python | apache-2.0 | 423 | 0.014184 | #!/usr/bin/python3
# Note: Always use unittest.sh | to run the tests!
import unittest
import imp
from helpers.iniParser import IniParser
class TestIniParser(unit | test.TestCase):
def test_property_get(self):
inip = IniParser("configExample.ini")
self.assertEqual(inip.get("PORT", "DHT"), '4424')
self.assertEqual(inip.get("HOSTNAME", "DHT"), "127.0.0.1")
if __name__ == '__main__':
unittest.main()
|
grant-olson/pyasm | __init__.py | Python | bsd-3-clause | 327 | 0.012232 | from x86asm import codePackageFromFile
from x86cpToMemory import CpToMemory
| from pythonConstants import PythonConstants
import cStringIO
import excmem
def pyasm(scope,s):
cp = codePackageFromFile(cStringIO.StringIO(s),PythonConstants)
mem = CpToMemory(cp)
mem.Make | Memory()
mem.BindPythonFunctions(scope)
|
google-research/google-research | gfsa/training/train_util.py | Python | apache-2.0 | 14,842 | 0.007681 | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Training helper functions that are shared across tasks."""
import contextlib
import functools
import operator
import signal
import typing
from typing import Any, Callable, Dict, Iterable, Optional, Sequence, Tuple, Union
from absl import logging
import dataclasses
import flax
import gin
import jax
import jax.numpy as jnp
import numpy as np
import optax
from gfsa import jax_util
from gfsa.datasets import data_loading
@jax_util.register_dataclass_pytree
@dataclasses.dataclass
class ExampleWithMetadata:
"""Stores an example or batch of examples.
Attributes:
epoch: Integer representing the epoch that this example comes from.
example_id: Integer ID uniquely identifying this example in the dataset.
example: The example itself.
mask: Array that is True for actual examples, False for padding.
static_metadata: Metadata about this example or batch that should result in
a new `jit` XLA computation (i.e. padded shapes).
"""
epoch: Any
example_id: Any
example: Any
mask: jax_util.NDArray = np.array(True)
static_metadata: Any = None
@jax_util.register_dataclass_pytree
@dataclasses.dataclass
class RatioMetric:
"""A ratio, where numerator and denominator should be summed separately.
Attributes:
numerator: Numerator of the metric.
denominator: Denominator of the metric.
"""
numerator: jax_util.NDArray
denominator: jax_util.NDArray
MetricValue = Union[float, jax_util.NDArray, RatioMetric]
# A loss function is a callable (model, example, static_metadata)
# -> (loss, metrics)
# pyformat: disable
LossFunWithMetrics = Callable[
[Any, Any, Any],
Tuple[jax_util.NDArray, Dict[str, MetricValue]]]
# pyformat: enable
# A validation function is a callable (replicated_model) -> (objective, metrics)
# where model is a tree of ShardedDeviceArrays, and objective is the value we
# want to make decrease.
ValidationFunction = Callable[[Any], Tuple[float, Dict[str, MetricValue]]]
def device_broadcast(x, num_devices):
"""Broadcast a value to all devices."""
return jax.pmap(lambda _: x)(jnp.arange(num_devices))
def _parallel_train_step(
optimizer,
batched_examples,
static_batch_metadata,
loss_fn,
max_global_norm = None,
**optimizer_hyper_params,
):
"""Train the model for one step in parallel across devices.
Args:
optimizer: Optimizer that tracks the model and parameter state. Should be
replicated to each device, i.e. should contain ShardedDeviceArrays with a
leading axis (num_devices, ...) but with the same content on each device.
batched_examples: A structure of NDArrays representing a batch of examples.
Should have two leading batch dimensions: (num_devices,
batch_size_per_device, ...)
static_batch_metadata: Metadata about this batch, which will be shared
across all batched examples. Each value of this results in a separate
XLA-compiled module.
loss_fn: Task-specific non-batched loss function to apply. Should take the
current model (optimizer.target) and an example from batched_examples, and
return a tuple of the current loss (as a scalar) and a dictionary from
string names to metric values (also scalars, or RatioMetrics).
max_global_norm: Maximum global norm to clip gradients to. Should be a
scalar, which will be broadcast automatically.
**optimizer_hyper_params: Hyperparameters to pass to the optimizer's
`apply_gradient` function, which will be broadcast across devices
automatically.
Returns:
Tuple (updated_optimizer, grads_ok, metrics). Metrics will be as returned by
loss_fn, with an extra elements "loss". All metrics will be averaged
across all elements of the batch. Both optimizer and metrics will contain
ShardedDeviceArrays that are identical across devices. grads_ok will be
a replicated bool ndarray that is True if the gradients were finite.
"""
def batched_loss_fn(model):
"""Apply loss function across a batch of examples."""
loss, metrics = jax.vmap(loss_fn, (None, 0, None))(model, batched_examples,
static_batch_metadata)
return jnp.mean(loss), metrics
# Compute gradients of loss, along with metrics.
(loss, metrics), grads = jax.value_and_grad(
batched_loss_fn, has_aux=True)(
optimizer.target)
metrics["loss"] = loss
# Exchange average gradients and metrics across devices.
agg_grads = jax.lax.pmean(grads, "devices")
agg_metrics = {}
for k, v in metrics.items():
if isinstance(v, RatioMetric):
num = jax.lax.psum(jnp.sum(v.numerator), "devices")
denom = jax.lax.psum(jnp.sum(v.denominator), "devices")
new_value = num / denom
else:
# Use nanmean to aggregate bare floats.
new_value = jnp.nanmean(jax.lax.all_gather(v, "devices"))
agg_metrics[k] = new_value
# Compute global norm and possibly clip.
global_norm = optax.global_norm(agg_grads)
agg_metrics["gradient_global_norm"] = global_norm
if max_global_norm is not None:
should_clip = global_norm > max_global_norm
agg_grads = jax.tree_map(
lambda g: jnp.where(should_clip, g * max_global_norm / global_norm, g),
agg_grads)
agg_metrics["gradient_was_clipped"] = should_clip.astype("float32")
# Check for non-finite gradients.
grads_ok = jnp.all(
jnp.stack([jnp.all(jnp.isfinite(x)) for x | in jax.tree_leaves(agg_grads)]))
# Apply updates.
updated_optimizer = optimizer.apply_gradient(agg_grads,
| **optimizer_hyper_params)
return updated_optimizer, grads_ok, agg_metrics, agg_grads
def _build_parallel_train_step():
"""Builds an accelerated version of the train step function."""
# We need to wrap and unwrap so that the final function can be called with
# keyword arguments, but we still maintain the proper axes.
@functools.partial(
jax.pmap,
axis_name="devices",
in_axes=(0, 0, None, None, None, None),
static_broadcasted_argnums=(2, 3))
def wrapped(optimizer, batched_examples, static_batch_metadata, loss_fn,
max_global_norm, optimizer_hyper_params):
return _parallel_train_step(optimizer, batched_examples,
static_batch_metadata, loss_fn, max_global_norm,
**optimizer_hyper_params)
@functools.wraps(_parallel_train_step)
def wrapper(optimizer, batched_examples, static_batch_metadata, loss_fn,
max_global_norm, **optimizer_hyper_params):
return wrapped(optimizer, batched_examples, static_batch_metadata, loss_fn,
max_global_norm, optimizer_hyper_params)
return wrapper
# The primary version of the training step, with the associated jit cache.
parallel_train_step = _build_parallel_train_step()
def warmup_train_step(
optimizer,
batched_example,
static_batch_metadata,
loss_fn,
optimizer_is_replicated = False,
profile = False,
runner=None,
):
"""Run a fake train step to warm up JIT cache.
Args:
optimizer: Optimizer that tracks the model and parameter state.
batched_example: A structure of NDArrays representing a batch of examples.
static_batch_metadata: Metadata about the batch, which will be shared across
all batched examples.
loss_fn: Task-specific non-batched loss function to apply. Should take the
current model (optimizer.target) and an example from batched_examples, and
return a |
elkeschaper/tral | tral/conftest.py | Python | gpl-2.0 | 146 | 0 | import pytest
def py | test_runtest_setup(item):
if 'notfixed' in item.keywords:
pytest.skip("Skipping tests that are not fixed yet." | )
|
BlackPole/bp-enigma2 | lib/python/Components/Converter/ServiceInfo.py | Python | gpl-2.0 | 6,815 | 0.028467 | from Components.Converter.Converter import Converter
from enigma import iServiceInformation, iPlayableService
from Components.Element import cached
from Tools.Transponder import ConvertToHumanReadable
class ServiceInfo(Converter, object):
HAS_TELETEXT = 0
IS_MULTICHANNEL = 1
IS_CRYPTED = 2
IS_WIDESCREEN = 3
SUBSERVICES_AVAILABLE = 4
XRES = 5
YRES = 6
APID = 7
VPID = 8
PCRPID = 9
PMTPID = 10
TXTPID = 11
TSID = 12
ONID = 13
SID = 14
FRAMERATE = 15
TRANSFERBPS = 16
HAS_HBBTV = 17
AUDIOTRACKS_AVAILABLE = 18
SUBTITLES_AVAILABLE = 19
FREQ_INFO = 20
def __init__(self, type):
Converter.__init__(self, type)
self.type, self.interesting_events = {
"HasTelext": (self.HAS_TELETEXT, (iPlayableService.evUpdatedInfo,)),
"IsMultichannel": (self.IS_MULTICHANNEL, (iPlayableService.evUpdatedInfo,)),
"IsCrypted": (self.IS_CRYPTED, (iPlayableService.evUpdatedInfo,)),
"IsWidescreen": (self.IS_WIDESCREEN, (iPlayableService.evVideoSizeChanged,)),
"SubservicesAvailable": (self.SUBSERVICES_AVAILABLE, (iPlayableService.evUpdatedEventInfo,)),
"VideoWidth": (self.XRES, (iPlayableService.evVideoSizeChanged,)),
"VideoHeight": (self.YRES, (iPlayableService.evVideoSizeChanged,)),
"AudioPid": (self.APID, (iPlayableService.evUpdatedInfo,)),
"VideoPid": (self.VPID, (iPlayableService.evUpdatedInfo,)),
"PcrPid": (self.PCRPID, (iPlayableService.evUpdatedInfo,)),
"PmtPid": (self.PMTPID, (iPlayableService.evUpdatedInfo,)),
"TxtPid": (self.TXTPID, (iPlayableService.evUpdatedInfo,)),
"TsId": (self.TSID, (iPlayableService.evUpdatedInfo,)),
"OnId": (self.ONID, (iPlayableService.evUpdatedInfo,)),
"Sid": (self.SID, (iPlayableService.evUpdatedInfo,)),
"Framerate": (self.FRAMERATE, (iPlayableService.evVideoSizeChanged,iPlayableService.evUpdatedInfo,)),
"TransferBPS": (self.TRANSFERBPS, (iPlayableService.evUpdatedInfo,)),
"HasHBBTV": (self.HAS_HBBTV, (iPlayableService.evUpdatedInfo,iPlayableService.evHBBTVInfo,)),
"AudioTracksAvailable": (self.AUDIOTRACKS_AVAILABLE, (iPlayableService.evUpdatedInfo,)),
"SubtitlesAvailable": (self.SUBTITLES_AVAILABLE, (iPlayableService.evUpdatedInfo,)),
"Freq_Info": (self.FREQ_INFO, (iPlayableService.evUpdatedInfo,)),
}[type]
def getServiceInfoString(self, info, what, convert = lambda x: "%d" % x):
v = info.getInfo(what)
if v == -1:
return "N/A"
if v == -2:
return info.getInfoString(what)
return convert(v)
@cached
def getBoolean(self):
service = self.source.service
info = service and service.info()
if not info:
return False
if self.type == self.HAS_TELETEXT:
tpid = info.getInfo(iServiceInformation.sTXTPID)
return tpid != -1
elif self.type == self.IS_MULTICHANNEL:
# FIXME. but currently iAudioTrackInfo doesn't provide more information.
audio = service.audioTracks()
if audio:
n = audio.getNumberOfTracks()
idx = 0
while idx < n:
i = audio.getTrackInfo(idx)
description = i.getDescription();
if "AC3" in description or "AC-3" in description or "DTS" in description:
return True
idx += 1
return False
elif self.type == self.IS_CRYPTED:
return info.getInfo(iServiceInformation.sIsCrypted) == 1
elif self.type == self.IS_WIDESCREEN:
return info.getInfo(iServiceInformation.sAspect) in (3, 4, 7, 8, 0xB, 0xC, 0xF, 0x10)
elif self.type == self.SUBSERVICES_AVAILABLE:
subservices = service.subServices()
return subservices and subservices.getNumberOfSubservices() > 0
elif self.type == self.HAS_HBBTV:
return info.getInfoString(iServiceInformation.sHBBTVUrl) != ""
elif self.type == self.AUDIOTRACKS_AVAILABLE:
audio = service.audioTracks()
return audio and audio.getNumberOfTracks() > 1
elif self.type == self.SUBTITLES_AVAILABLE:
subtitle = service and service.subtitle()
subtitlelist = subtitle and subtitle.getSubtitleList()
if subtitlelist:
return len(subtitlelist) > 0
return False
boolean = property(getBoolean)
@cached
def getText(self):
service = self.source.service
info = service and service.info()
if not info:
return ""
if self.type == self.XRES:
return self.getServiceInfoString(info, iServiceInformation.sVideoWidth)
elif self.type == self.YRES:
return self.getServiceInfoString(info, iServiceInformation.sVideoHeight)
elif self.type == self.APID:
return self.getServiceInfoString(info, iServiceInformation.sAudioPID)
elif self.type == self.VPID:
return self.getServiceInfoString(info, iServiceInformation.sVideoPID)
elif self.type == self.PCRPID:
return self.getServiceInfoString(info, iServiceInformation.sPCRPID)
elif self.type == self.PMTPID:
return self.getServiceInfoString(info, iServiceInformation.sPMTPID)
elif self.type == self.TXTPID:
return self.getServiceInfoString(info, iServiceInformation.sTXTPID)
elif self.type == self.TSID:
return self.getServiceInfoString(info, iServiceInformation.sTSID)
elif self.type == self.ONID:
return self.getServiceInfoString(info, iServiceInformation.sONID)
elif self.type == self.SID:
return sel | f.getServiceInfoString(info, iServiceInformation.sSID)
elif self.type == self.FRAMERATE:
return self.getServiceInfoString(info, iServiceInformation.sFrameRate, lambda x: "%d fps" % ((x+500)/1000))
elif self.type == self.TRANSFERBPS:
return self.getServiceInfoString(info, iServiceInformation.sTransferBPS, lambda x: "%d kB/s" % (x/1024))
elif self.type == self.FREQ_INFO:
feinfo = service.frontendInfo()
| if feinfo is None:
return ""
feraw = feinfo.getAll(False)
if feraw is None:
return ""
fedata = ConvertToHumanReadable(feraw)
if fedata is None:
return ""
frequency = fedata.get("frequency")
if frequency:
frequency = str(frequency / 1000)
sr_txt = "Sr:"
polarization = fedata.get("polarization_abbreviation")
if polarization is None:
polarization = ""
symbolrate = str(int(fedata.get("symbol_rate", 0) / 1000))
if symbolrate == "0":
sr_txt = ""
symbolrate = ""
fec = fedata.get("fec_inner")
if fec is None:
fec = ""
out = "Freq: %s %s %s %s %s" % (frequency, polarization, sr_txt, symbolrate, fec)
return out
return ""
text = property(getText)
@cached
def getValue(self):
service = self.source.service
info = service and service.info()
if not info:
return -1
if self.type == self.XRES:
return info.getInfo(iServiceInformation.sVideoWidth)
if self.type == self.YRES:
return info.getInfo(iServiceInformation.sVideoHeight)
if self.type == self.FRAMERATE:
return info.getInfo(iServiceInformation.sFrameRate)
return -1
value = property(getValue)
def changed(self, what):
if what[0] != self.CHANGED_SPECIFIC or what[1] in self.interesting_events:
Converter.changed(self, what)
|
datawire/mdk | functionaltests/source/continue_trace.py | Python | apache-2.0 | 330 | 0.00303 | "" | "Write some logs."""
from __future__ import print_function
import sys
import time
from mdk import start
mdk = start()
def main():
context = sys.argv[1]
session = mdk.join(context)
session.info("process2", "world")
time.sleep(5) # make sure it's written
mdk.sto | p()
if __name__ == '__main__':
main()
|
simondlevy/m021v4l2 | opencv/python/lic570_capture.py | Python | gpl-3.0 | 1,319 | 0.002274 | #!/usr/bin/env python3
'''
lic570_capture.py : capture frames from Leopard Imaging LI-USB30-C570 camera and display them using OpenCV
Copyright (C) 2017 Simon D. Levy
This file is part of M021_V4L2.
M021_V4L2 is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
BreezySTM32 is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. Se | e the
GNU General Public License for more details.
| You should have received a copy of the GNU General Public License
along with M021_V4L2. If not, see <http://www.gnu.org/licenses/>.
'''
import cv2
from m021v4l2 import Capture1600x1200
from time import time
cap = Capture1600x1200()
start = time()
while True:
# Capture frame-by-frame
ret, frame = cap.read()
# Display the resulting frame
cv2.imshow('LI-USB30-M021',frame)
if cv2.waitKey(1) & 0xFF == 27:
break
count = cap.getCount()
elapsed = time() - start
print('%d frames in %3.2f seconds = %3.2f fps' % (count, elapsed, count/elapsed))
# When everything done, release the capture
cv2.destroyAllWindows()
|
Naoto-Imamachi/MIRAGE | scripts/module/preparation/AA2_add_miRNA_infor_miR-155_rev_seed.py | Python | mit | 1,046 | 0.011472 | #!/usr/bin/env python
import re
from operator import itemgetter
ref_file = open('../../../data/RNA-seq_miR-124_miR-155_transfected_HeLa/gene_exp_miR-155_overexpression_RefSeq_Rep_isoforms.diff','r')
input_ | file = open('../../../result/mirage_output_rev_seed_miR-155_vs_RefSeq_NM_2015-07-30.txt','r')
output_file = open('../../../result/mirage_output_rev_seed_miR-155_vs_RefSe | q_NM_2015-07-30_miR-155_overexpression.result','w')
ref_dict = {}
header = ''
for line in ref_file:
line = line.rstrip()
data = line.split("\t")
if data[0] == 'gr_id':
header = line
continue
refid = data[2]
ref_dict[refid] = line
for line in input_file:
line = line.rstrip()
data = line.split("\t")
if data[0] == 'miRNA_name_id':
print(header,line, sep="\t",end="\n",file=output_file)
continue
refid = data[1]
if refid in ref_dict:
print(ref_dict[refid],line, sep="\t",end="\n",file=output_file)
ref_file.close()
input_file.close()
output_file.close()
|
gmathers/iii-addons | iii-repair/__init__.py | Python | agpl-3.0 | 21 | 0.047619 | import | iii_mrp_r | epair |
evernym/zeno | plenum/common/messages/message_base.py | Python | apache-2.0 | 5,968 | 0.000168 | from collections import OrderedDict
from operator import itemgetter
from typing import Mapping, Dict
from plenum.common.types import f
from plenum.common.constants import OP_FIELD_NAME, SCHEMA_IS_STRICT
from plenum.common.exceptions import MissingProtocolVersionError
from plenum.common.messages.fields import FieldValidator
class MessageValidator(FieldValidator):
# the schema has to be an ordered iterable because the message class
# can be create with positional arguments __init__(*args)
schema = ()
schema_is_strict = SCHEMA_IS_STRICT
def __init__(self, schema_is_strict=SCHEMA_IS_STRICT, optional: bool = False):
self.schema_is_strict = schema_is_strict
# TODO INDY-2072 test optional
super().__init__(optional=optional)
def validate(self, dct):
self._validate_fields_with_schema(dct, self.schema)
self._validate_message(dct)
def _validate_fields_with_schema(self, dct, schema):
if not isinstance(dct, dict):
self._raise_invalid_type(dct)
schema_dct = dict(schema)
required_fields = filter(lambda x: not x[1].optional, schema)
required_field_names = map(lambda x: x[0], required_fields)
missed_required_fields = set(required_field_names) - set(dct)
if missed_required_fields:
self._raise_missed_fields(*missed_required_fields)
for k, v in dct.items():
if k not in schema_dct:
if self.schema_is_strict:
self._raise_unknown_fields(k, v)
else:
validation_error = schema_dct[k].validate(v)
if validation_error:
self._raise_invalid_fields(k, v, validation_error)
def _validate_message(self, dct):
return None
def _raise_invalid_type(self, dct):
raise TypeError("{} invalid type {}, dict expected"
.format(self.__error_msg_prefix, type(dct)))
def _raise_missed_fields(self, *fields):
msg = "{} missed fields - {}. " \
.format(self.__error_msg_prefix,
', '.join(map(str, fields)))
if any(field == f.PROTOCOL_VERSION.nm for field in map(str, fields)):
raise MissingProtocolVersionError(msg)
else:
raise TypeError(msg)
def _raise_unknown_fields(self, field, value):
raise TypeError("{} unknown field - "
"{}={}".format(self.__error_msg_prefix,
| field, value))
def _raise_invalid_fields(self, field, value, reason):
raise TypeError("{} {} "
"({}={})".format(self.__error_msg_prefix, reason,
field, value))
def _raise_invalid_message(self, reason):
raise TypeError("{} {}".format(self.__error_msg_prefi | x, reason))
@property
def __error_msg_prefix(self):
return 'validation error [{}]:'.format(self.__class__.__name__)
class MessageBase(Mapping, MessageValidator):
typename = None
def __init__(self, *args, **kwargs):
if args and kwargs:
raise ValueError("*args, **kwargs cannot be used together")
if kwargs:
# op field is not required since there is self.typename
kwargs.pop(OP_FIELD_NAME, None)
argsLen = len(args or kwargs)
if self.schema_is_strict and argsLen > len(self.schema):
raise ValueError(
"number of parameters {} should be less than or equal to "
"the number of fields in schema {}"
.format(argsLen, len(self.schema))
)
super().__init__()
input_as_dict = kwargs if kwargs else self._join_with_schema(args)
self.validate(input_as_dict)
input_as_dict = self._post_process(input_as_dict)
self._fields = OrderedDict(
(name, input_as_dict[name])
for name, _ in self.schema
if name in input_as_dict)
def _join_with_schema(self, args):
return dict(zip(map(itemgetter(0), self.schema), args))
def _post_process(self, input_as_dict: Dict) -> Dict:
return input_as_dict
def __getattr__(self, item):
if item in self._fields:
return self._fields[item]
raise AttributeError(
"'{}' object has no attribute '{}'"
.format(self.__class__.__name__, item)
)
def __getitem__(self, key):
values = list(self._fields.values())
if isinstance(key, slice):
return values[key]
if isinstance(key, int):
return values[key]
raise TypeError("Invalid argument type.")
def _asdict(self):
return self.__dict__
@property
def __dict__(self):
"""
Return a dictionary form.
"""
m = self._fields.copy()
m[OP_FIELD_NAME] = self.typename
m.move_to_end(OP_FIELD_NAME, False)
return m
@property
def __name__(self):
return self.typename
def __iter__(self):
return self._fields.values().__iter__()
def __len__(self):
return len(self._fields)
def items(self):
return self._fields.items()
def keys(self):
return self._fields.keys()
def values(self):
return self._fields.values()
def __str__(self):
return "{}{}".format(self.typename, dict(self.items()))
def __repr__(self):
return self.__str__()
def __eq__(self, other):
if not issubclass(other.__class__, self.__class__):
return False
return self._asdict() == other._asdict()
def __hash__(self):
h = 1
for index, value in enumerate(list(self.__iter__())):
h = h * (index + 1) * (hash(value) + 1)
return h
def __dir__(self):
return self.keys()
def __contains__(self, key):
return key in self._fields
|
superfluidity/RDCL3D | code/lib/toscanfv/toscanfv_parser.py | Python | apache-2.0 | 2,078 | 0.006737 | import json
import pyaml
import yaml
from lib.util import Util
from lib.parser import Parser
import logging
import traceback
import glob
import os
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger('ToscanfvParser')
class ToscanfvParser(Parser):
"""Parser methods for toscanfv project type
"""
def __init__(self):
| super(ToscanfvParser, self).__init__()
@classmethod
def importprojectdir(cls,dir_project, file_type):
"""Imports all descriptor files under a given folder
this method is specific for Toscanfv project type
"""
project = {
'toscayaml':{},
'positions': {}
}
for desc_type in project | :
cur_type_path = os.path.join(dir_project, desc_type.upper())
log.debug(cur_type_path)
if os.path.isdir(cur_type_path):
for file in glob.glob(os.path.join(cur_type_path, '*.'+file_type)):
if file_type == 'json':
project[desc_type][os.path.basename(file).split('.')[0]] = Util.loadjsonfile(file)
elif file_type == 'yaml':
project[desc_type][os.path.basename(file).split('.')[0]] = Util.loadyamlfile(file)
for vertices_file in glob.glob(os.path.join(dir_project, '*.json')):
if os.path.basename(vertices_file) == 'vertices.json':
project['positions']['vertices'] = Util.loadjsonfile(vertices_file)
return project
@classmethod
def importprojectfiles(cls, file_dict):
"""Imports descriptors (extracted from the new project POST)
The keys in the dictionary are the file types
"""
project = {
'toscayaml':{},
}
for desc_type in project:
if desc_type in file_dict:
files_desc_type = file_dict[desc_type]
for file in files_desc_type:
project[desc_type][os.path.splitext(file.name)[0]] = json.loads(file.read())
return project |
idan/oauthlib | oauthlib/oauth2/rfc6749/endpoints/revocation.py | Python | bsd-3-clause | 5,212 | 0.000767 | """
oauthlib.oauth2.rfc6749.endpoint.revocation
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
An implementation of the OAuth 2 `Token Revocation`_ spec (draft 11).
.. _`Token Revocation`: https://tools.ietf.org/html/draft-ietf-oauth-revocation-11
"""
import logging
from oauthlib.common import Request
from ..errors import OAuth2Error
from .base import BaseEndpoint, catch_errors_and_unavailability
log = logging.getLogger(__name__)
class RevocationEndpoint(BaseEndpoint):
"""Token revocation endpoint.
Endpoint used by authenticated clients to revoke access and refresh tokens.
Commonly this will be part of the Authorization Endpoint.
"""
valid_token_types = ('access_token', 'refresh_token')
valid_request_methods = ('POST',)
def __init__(self, request_validator, supported_token_types=None,
enable_jsonp=False):
BaseEndpoint.__init__(self)
self.request_validator = request_validator
self.supported_token_types = (
supported_token_types or self.valid_token_types)
self.enable_jsonp = enable_jsonp
@catch_errors_and_unavailability
def create_revocation_response(self, uri, http_method='POST', body=None,
headers=None):
"""Revoke supplied access or refresh token.
The authorization server responds with HTTP status code 200 if the
token has been revoked sucessfully or if the client submitted an
invalid token.
Note: invalid tokens do not cause an error response since the client
cannot handle such an error in a reasonable way. Moreover, the purpose
of the revocation request, invalidating the particular token, is
already achieved.
The content of the response body is ignored by the client as all
necessary information is conveyed in the response code.
An invalid token type hint value is ignored by the authorization server
and does not influence the revocation response.
"""
resp_headers = {
'Content-Type': 'application/json',
'Cache-Control': 'no-store',
'Pragma': 'no-cache',
}
request = Request(
uri, http_method=http_method, body=body, headers=headers)
try:
self.validate_revocation_request(request)
log.debug('Token revocation valid for %r.', request)
except OAuth2Error as e:
log.debug('Client error during validation of %r. %r.', request, e)
response_body = e.json
if self.enable_jsonp and request.callback:
response_body = '{}({});'.format(request.callback, response_body)
resp_headers.update(e.headers)
return resp_headers, response_body, e.status_code
self.request_validator.revoke_token(request.token,
request.token_type_hint, request)
response_body = ''
if self.enable_jsonp and request.callback:
response_body = request.callback + '();'
return {}, response_body, 200
def validate_revocation_request(self, request):
"""Ensure the request is valid.
The client constructs the request by including the following parameters
using the "application/x-www-form-urlencoded" format in the HTTP
request entity-body:
token (REQUIRED). The token that the client wants to get revoked.
token_type_hint (OPTIONAL). A hint about the type of the token
submitted for revocation. Clients MAY pass this parameter in order to
help the authorization server to optimize the token lookup. If the
server | is unable to locate the token using the given hint, it MUST
extend its search accross all of its supported token types. An
authorization server MAY ignore this parameter, particularly if it is
able to detect the token type automatically. This specification
defines two such values:
* access_token: An Access Token as | defined in [RFC6749],
`section 1.4`_
* refresh_token: A Refresh Token as defined in [RFC6749],
`section 1.5`_
Specific implementations, profiles, and extensions of this
specification MAY define other values for this parameter using
the registry defined in `Section 4.1.2`_.
The client also includes its authentication credentials as described in
`Section 2.3`_. of [`RFC6749`_].
.. _`section 1.4`: https://tools.ietf.org/html/rfc6749#section-1.4
.. _`section 1.5`: https://tools.ietf.org/html/rfc6749#section-1.5
.. _`section 2.3`: https://tools.ietf.org/html/rfc6749#section-2.3
.. _`Section 4.1.2`: https://tools.ietf.org/html/draft-ietf-oauth-revocation-11#section-4.1.2
.. _`RFC6749`: https://tools.ietf.org/html/rfc6749
"""
self._raise_on_bad_method(request)
self._raise_on_bad_post_request(request)
self._raise_on_missing_token(request)
self._raise_on_invalid_client(request)
self._raise_on_unsupported_token(request)
|
haiyangd/cockpit_view | test/storagelib.py | Python | lgpl-2.1 | 6,735 | 0.001782 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of Cockpit.
#
# Copyright (C) 2015 Red Hat, Inc.
#
# Cockpit is free software; you ca | n redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# Cockpit is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# |
# You should have received a copy of the GNU Lesser General Public License
# along with Cockpit; If not, see <http://www.gnu.org/licenses/>.
import os
from testlib import *
def content_action_btn(index):
return "#content .list-group li:nth-child(%d) .btn-group" % index
class StorageCase(MachineCase):
def setUp(self):
if "atomic" in os.getenv("TEST_OS", ""):
self.skipTest("No storage on Atomic")
MachineCase.setUp(self)
def inode(self, f):
return self.machine.execute("stat -L '%s' -c %%i" % f)
# Action buttons
def content_action(self, index, action):
self.browser.click_action_btn(content_action_btn(index), action)
def content_default_action(self, index, action):
btn = content_action_btn(index)
self.browser.wait_action_btn (btn, action)
self.browser.click_action_btn (btn)
def content_single_action(self, index, action):
btn = "#content .list-group li:nth-child(%d) button" % index
self.browser.wait_text (btn, action)
self.browser.click (btn)
# Dialogs
def dialog_wait_open(self):
self.browser.wait_present('#dialog')
self.browser.wait_visible('#dialog')
def dialog_wait_alert(self, text):
self.browser.wait_in_text('#dialog .alert-message', text)
def dialog_field(self, field):
return '#dialog [data-field="%s"]' % field
def dialog_val(self, field):
return self.browser.val(self.dialog_field(field))
def dialog_set_val(self, field, val):
if isinstance(val, bool):
self.browser.set_checked(self.dialog_field(field), val)
elif isinstance(val, dict):
for label in val:
self.dialog_select(field, label, val[label])
else:
self.browser.set_val(self.dialog_field(field), val)
def dialog_is_present(self, field, label):
return self.browser.is_present('%s .checkbox:contains("%s") input' % (self.dialog_field(field), label))
def dialog_select(self, field, label, val):
self.browser.set_checked('%s .checkbox:contains("%s") input' % (self.dialog_field(field), label), val)
def dialog_wait_val(self, field, val):
self.browser.wait_val(self.dialog_field(field), val)
def dialog_wait_error(self, field, val):
# XXX - allow for more than one error
self.browser.wait_in_text('#dialog .dialog-error', val)
def dialog_wait_not_visible(self, field):
self.browser.wait_not_visible(self.dialog_field(field))
def dialog_apply(self):
self.browser.click('#dialog [data-action="apply"]')
def dialog_cancel(self):
self.browser.click('#dialog [data-dismiss="modal"]')
def dialog_wait_close(self):
self.browser.wait_not_present('#dialog')
def dialog_check(self, expect):
for f in expect:
if not self.dialog_val(f) == expect[f]:
return False
return True
def dialog(self, values, expect={}):
self.dialog_wait_open()
for f in expect:
self.dialog_wait_val(f, expect[f])
for f in values:
self.dialog_set_val(f, values[f])
self.dialog_apply()
self.dialog_wait_close()
def confirm(self):
self.dialog({})
# There is a lot of asynchronous activity in the storage stack.
# For example, changing fstab or crypttab via the storaged API
# will not immediately update the Configuration properties of
# block objects. The storaged daemon will only do that once it
# gets a change notification for those files, which happens some
# time later. As another example, wiping a block device has to be
# noticed by udev and be propagated back to the daemon before it
# updates its properties.
#
# Concretely, the tests have to mainly deal with the cases listed
# below, and we offer some functions to help with that.
#
# - Waiting until a expected change to fstab or crypttab has
# arrived in storaged. This is important so that it will mount
# filesystems to the expected places, and will clean up fstab in
# the expected ways, among other things.
#
# This is done with wait_in_storaged_configuration and
# wait_not_in_storaged_configuration.
#
# - Waiting until a expected change to fstab or crypttab has
# arrived in Cockpit. This is important so that dialogs will
# show the right things, and try to modify the right
# configuration.
#
# This is done by repeatedly opening a dialog until it shows the
# right values, via dialog_with_retry.
#
# - Waiting until a block device is considered 'free' and can be
# used as a physical volume or raid member.
#
# This is also done by repeatedly opening a dialog until all
# needed block devices are listed.
def retry(self, setup, check, teardown):
b = self.browser
b.arm_timeout()
while True:
setup()
if check():
break
teardown()
b.wait_checkpoint()
b.disarm_timeout()
def dialog_with_retry(self, trigger, values, expect):
def setup():
trigger()
self.dialog_wait_open()
def check():
if callable(expect):
return expect()
else:
return self.dialog_check(expect)
def teardown():
self.dialog_cancel()
self.dialog_wait_close()
self.retry(setup, check, teardown)
if values:
for f in values:
self.dialog_set_val(f, values[f])
self.dialog_apply()
else:
self.dialog_cancel()
self.dialog_wait_close()
def wait_in_storaged_configuration(self, mount_point):
wait(lambda: mount_point in self.machine.execute("storagedctl dump | grep Configuration"))
def wait_not_in_storaged_configuration(self, mount_point):
wait(lambda: mount_point not in self.machine.execute("storagedctl dump | grep Configuration"))
|
MaestroGraph/sparse-hyper | sparse/tensors.py | Python | mit | 10,861 | 0.003775 | import torch
from torch import FloatTensor, LongTensor
from torch.autograd import Variable
import torch.nn.functional as F
from sparse.util import prod
import util, sys
from util import d
"""
Utility functions for manipulation tensors
"""
def flatten_indices_mat(indices, in_shape, out_shape):
"""
Turns a n NxK matrix of N index-tuples for a tensor T of rank K into an Nx2 matrix M of index-tuples for a _matrix_
that is created by flattening the first 'in_shape' dimensions into the vertical dimension of M and the remaining
dimensions in the the horizontal dimension of M.
:param indices: Long tensor
:param in_rank:
:return: (1) A matrix of size N by 2, (2) the dimensions of M
"""
batchsize, n, rank = indices.size()
inrank = len(in_shape)
outrank = len(out_shape)
result = torch.cuda.LongTensor(batchsize, n, 2) if indices.is_cuda else LongTensor(batchsize, n, 2)
left = fi_matrix(indices[:, :, 0:outrank], out_shape) # i index of the weight matrix
right = fi_matrix(indices[:, :, outrank:rank], in_shape) # j index
result = torch.cat([left.unsqueeze(2), right.unsqueeze(2)], dim=2)
return result, LongTensor((prod(out_shape), prod(in_shape)))
def fi_matrix(indices, shape):
batchsize, rows, rank = indices.size()
prod = torch.LongTensor(rank).fill_(1)
if indices.is_cuda:
prod = prod.cuda()
for i in range(rank):
prod[i] = 1
for j in range(i + 1, len(shape)):
prod[i] *= shape[j]
indices = indices * prod.unsqueeze(0).unsqueeze(0).expand_as(indices)
return indices.sum(dim=2)
def contract(indices, values, size, x, cuda=None):
"""
Performs a contraction (generalized matrix multiplication) of a sparse tensor with and input x.
The contraction is defined so that every element of the output is the sum of every element of the input multiplied
once by a unique element from the tensor (that is, like a fully connected neural network layer). See the paper for
details.
:param indices: (b, k, r)-tensor describing indices of b sparse tensors of rank r
:param values: (b, k)-tes=nsor with the corresponding values
:param size:
:param x:
:return:
"""
# translate tensor indices to matrix indices
if cuda is None:
cuda = indices.is_cuda
b, k, r = indices.size()
# size is equal to out_size + x.size()
in_size = x.size()[1:]
out_size = size[:-len(in_size)]
assert len(out_size) + len(in_size) == r
# Flatten into a matrix multiplication
mindices, flat_size = flatten_indices_mat(indices, x.size()[1:], out_size)
x_flat = x.view(b, -1, 1)
# Prevent segfault
assert mindices.min() >= 0, 'negative index in flattened indices: {} \n {} \n Original indices {} \n {}'.format(mindices.size(), mindices, indices.size(), indices)
assert not util.contains_nan(values.data), 'NaN in values:\n {}'.format(values)
y_flat = batchmm(mindices, values, flat_size, x_flat, cuda)
return y_flat.view(b, *out_size) # reshape y into a tensor
def sparsemm(use_cuda):
"""
:param use_cuda:
:return:
"""
return SparseMMGPU.apply if use_cuda else SparseMMCPU.apply
class SparseMMCPU(torch.autograd.Function):
"""
Sparse matrix multiplication with gradients over the value-vector
Does not work with batch dim.
"""
@staticmethod
def forward(ctx, indices, values, size, xmatrix):
# print(type(size), size, list(size), intlist(size))
# print(indices.size(), values.size(), torch.Size(intlist(size)))
matrix = torch.sparse.FloatTensor(indices, values, torch.Size(intlist(size)))
ctx.indices, ctx.matrix, ctx.xmatrix = indices, matrix, xmatrix
return torch.mm(matrix, xmatrix)
@staticmethod
def backward(ctx, grad_output):
grad_output = grad_output.data
# -- this will break recursive autograd, but it's the only way to get grad over sparse matrices
i_ixs = ctx.indices[0,:]
j_ixs = ctx.indices[1,:]
output_select = grad_output[i_ixs, :]
xmatrix_select = ctx.xmatrix[j_ixs, :]
grad_values = (output_select * xmatrix_select).sum(dim=1)
grad_xmatrix = torch.mm(ctx.matrix.t(), grad_output)
return None, Variable(grad_values), None, Variable(grad_xmatrix)
class SparseMMGPU(torch.autograd.Function):
"""
Sparse matr | ix multiplication with gradients over the value-vector
Does not work with batch dim.
"""
@staticmethod
def forward(ctx, indices, values, size, xmatrix):
# print(type(size), size, list(size), intlist(size))
matrix = torch.cuda.sparse.FloatTensor(indices, val | ues, torch.Size(intlist(size)))
ctx.indices, ctx.matrix, ctx.xmatrix = indices, matrix, xmatrix
return torch.mm(matrix, xmatrix)
@staticmethod
def backward(ctx, grad_output):
grad_output = grad_output.data
# -- this will break recursive autograd, but it's the only way to get grad over sparse matrices
i_ixs = ctx.indices[0,:]
j_ixs = ctx.indices[1,:]
output_select = grad_output[i_ixs]
xmatrix_select = ctx.xmatrix[j_ixs]
grad_values = (output_select * xmatrix_select).sum(dim=1)
grad_xmatrix = torch.mm(ctx.matrix.t(), grad_output)
return None, Variable(grad_values), None, Variable(grad_xmatrix)
def batchmm(indices, values, size, xmatrix, cuda=None):
"""
Multiply a batch of sparse matrices (indices, values, size) with a batch of dense matrices (xmatrix)
:param indices:
:param values:
:param size:
:param xmatrix:
:return:
"""
if cuda is None:
cuda = indices.is_cuda
b, n, r = indices.size()
dv = 'cuda' if cuda else 'cpu'
height, width = size
size = torch.tensor(size, device=dv, dtype=torch.long)
bmult = size[None, None, :].expand(b, n, 2)
m = torch.arange(b, device=dv, dtype=torch.long)[:, None, None].expand(b, n, 2)
bindices = (m * bmult).view(b*n, r) + indices.view(b*n, r)
bfsize = Variable(size * b)
bvalues = values.contiguous().view(-1)
b, w, z = xmatrix.size()
bxmatrix = xmatrix.view(-1, z)
sm = sparsemm(cuda)
result = sm(bindices.t(), bvalues, bfsize, bxmatrix)
return result.view(b, height, -1)
def intlist(tensor):
"""
A slow and stupid way to turn a tensor into an iterable over ints
:param tensor:
:return:
"""
if type(tensor) is list:
return tensor
tensor = tensor.squeeze()
assert len(tensor.size()) == 1
s = tensor.size()[0]
l = [None] * s
for i in range(s):
l[i] = int(tensor[i])
return l
def accuracy(output, labels):
preds = output.max(1)[1].type_as(labels)
correct = preds.eq(labels).double()
correct = correct.sum()
return correct / len(labels)
def simple_normalize(indices, values, size, row=True, method='softplus', cuda=torch.cuda.is_available()):
"""
Simple softmax-style normalization with
:param indices:
:param values:
:param size:
:param row:
:return:
"""
epsilon = 1e-7
if method == 'softplus':
values = F.softplus(values)
elif method == 'abs':
values = values.abs()
elif method == 'relu':
values = F.relu(values)
else:
raise Exception(f'Method {method} not recognized')
sums = sum(indices, values, size, row=row)
return (values/(sums + epsilon))
# -- stable(ish) softmax
def logsoftmax(indices, values, size, its=10, p=2, method='iteration', row=True, cuda=torch.cuda.is_available()):
"""
Row or column log-softmaxes a sparse matrix (using logsumexp trick)
:param indices:
:param values:
:param size:
:param row:
:return:
"""
epsilon = 1e-7
if method == 'naive':
values = values.exp()
sums = sum(indices, values, size, row=row)
return (values/(sums + epsilon)).log()
if method == 'pnorm':
maxes = rowpnorm(indices, values, size, p=p)
elif method == 'iteration':
maxes = itmax(indices, values, size,its=its, p=p)
e |
klebercode/esperancanordeste | esperancanordeste/catalog/views.py | Python | mit | 2,030 | 0 | # coding: utf-8
from django.db.models import Q
from django.views import generic
# from django.views.generic.dates import (YearArchiveView, MonthArchiveView,
# DayArchiveView)
from esperancanordeste.context_processors import EnterpriseExtraContext
from esperancanordeste.catalog.models import Catalog, Category, Product
class ProductListView(EnterpriseExtraContext, generic.ListView):
queryset = Product.published.all()
template_name = 'catalog/catalog_home.html'
# TODO: mudar a paginacao para 20
paginate_by = 20
def get_queryset(self, **kwargs):
search = self.request.GET.get('search', '')
if search:
obj_lst = Product.published.filter(
Q(name__icontains=search) |
Q(description__icontains=search))
else:
obj_lst = Product.published.all()
return obj_lst
def get_context_data(self, **kwargs):
context = super(ProductListView, self).get_context_data(**kwargs)
search = self | .request.GET.get('search', '')
context['search'] = searc | h
context['category_list'] = Category.objects.all()
context['catalog_list'] = Catalog.objects.all()
return context
class ProductCategoryListView(ProductListView):
"""
Herda de EntryListView mudando o filtro para tag selecionada
"""
def get_queryset(self):
"""
Incluir apenas as Entries marcadas com a tag selecionada
"""
return Product.published.filter(
category__slug=self.kwargs['category_slug'])
class ProductDetailListView(ProductListView):
"""
Herda de EntryListView mudando o filtro para tag selecionada
"""
template_name = 'catalog/catalog_detail.html'
def get_queryset(self):
"""
Incluir apenas as Entries marcadas com a tag selecionada
"""
return Product.published.filter(
category__slug=self.kwargs['category_slug'],
slug=self.kwargs['slug'])
|
chrisortman/CIS-121 | k0776243/exercises/ex18.py | Python | mit | 526 | 0.015209 | # this one is like your scripts with argv
def print_two(*args):
arg1, arg2 = args
print "arg1: %r, arg2: %r" % (arg1, arg2)
# ok that *args is actually pointless, we can just do this
def print_two_again(arg1, arg2):
print "arg1: %r, arg2: %r" % (arg1, | arg2)
# this just takes one argument
def print_one(arg1):
print "arg1: %r" % arg1
|
# this one takes no arguments
def print_none():
print "I got nothin'."
print_two("Zed", "Shaw")
print_two_again("Zed","Shaw")
print_one("First!")
print_none() |
wilima/herocomp | herocomp/asm/Asm.py | Python | mit | 901 | 0 | def filename_directive(filename):
return "\t.file\t\"{0}\"\n".format(filename)
def compiler_ident_directive():
return "\t.ident\t\"{0}\"\n".format("HEROCOMP - Tomas Mikula 201 | 7")
def text_directive():
return "\t.text\n"
def data_directive():
return "\t.data\n"
def quad_directive(arg):
return "\t.quad\t{}\n".format(arg)
def global_array(identifier, size):
return "\t.comm {0},{1},32\n".format(identifier, size * 8)
def global_directive(arg):
return "\t.global\t{0}\n".format(arg)
def label(name):
return "{0}:\n".format(name)
| def instruction(name, *args):
code = "\t{0}\t".format(name)
for i in range(len(args)):
if i == len(args) - 1:
code += "{0}".format(args[i])
else:
code += "{0}, ".format(args[i])
code += "\n"
return code
def number_constant(number):
return "${0}".format(number)
|
acoecorelearning/Algebra1wRobots | 4-RaceToTie2/raceToTie2.py | Python | gpl-3.0 | 884 | 0.020362 | # This work is licensed by James Town and ACOE Core Learning under a
# Creative Commons Attribution-NonCommercial-ShareAlike 4.0
# International License: http://creativecommons.org/licenses/by-nc-sa/4.0/
import linkbot # loads library for the Linkbots
robot1 = linkbot.Linkbot('AB | CD') #assigns the name robot1 to the first robot
robot2 = linkbot.Linkbot('1234') #assigns the name robot2 to the second robot
robot1.setJointSpeeds(speed, 0, speed) #changes robot1's speed (degrees/second)
robot1.moveNB(some number of degrees, 0, -so | me number of degrees) #moves the robot some number of degrees
robot2.setJointSpeeds(speed, 0, speed) #changes robot2's speed (degrees/second)
robot2.moveNB(some number of degrees, 0, -some number of degrees) #moves the robot some number of degrees
robot1.moveWait() #waits for moveNB to finish
robot2.moveWait() #waits for moveNB to finish |
updownlife/multipleK | dependencies/biopython-1.65/build/lib.linux-x86_64-2.7/Bio/Emboss/Applications.py | Python | gpl-2.0 | 53,929 | 0.003931 | # Copyright 2001-2009 Brad Chapman.
# Revisions copyright 2009-2010 by Peter Cock.
# Revisions copyright 2009 by David Winter.
# Revisions copyright 2009-2010 by Leighton Pritchard.
# All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Code to interact with and run various EMBOSS programs.
These classes follow the AbstractCommandline interfaces for running
programs.
"""
from __future__ import print_function
from Bio.Application import _Option, _Switch, AbstractCommandline
__docformat__ = "restructuredtext en"
class _EmbossMinimalCommandLine(AbstractCommandline):
"""Base Commandline object for EMBOSS wrappers (PRIVATE).
This is provided for subclassing, it deals with shared options
common to all the EMBOSS tools:
- auto Turn off prompts
- stdout Write standard output
- filter Read standard input, write standard output
- options Prompt for standard and additional values
- debug Write debug output to program.dbg
- verbose Report some/full command line options
- help Report command line options. More
information on associated and general
qualifiers can be found with -help -verbose
- warning Report warnings
- error Report errors
- fatal Report fatal errors
- die Report dying program messages
"""
def __init__(self, cmd=None, **kwargs):
assert cmd is not None
extra_parameters = [
_Switch(["-auto", "auto"],
"""Turn off prompts.
Automatic mode disables prompting, so we recommend you set
this argument all the time when calling an EMBOSS tool from
Biopython.
"""),
_Switch(["-stdout", "stdout"],
"Write standard output."),
_Switch(["-filter", "filter"],
"Read standard input, write standard output."),
_Switch(["-options", "options"],
"""Prompt for standard and additional values.
If you are calling an EMBOSS tool from within Biopython,
we DO NOT recommend using this option.
"""),
_Switch(["-debug", "debug"],
"Write debug output to program.dbg."),
_Switch(["-verbose", "verbose"],
"Report some/full command line options"),
_Switch(["-help", "help"],
"""Report command line options.
More information on associated and general qualifiers can
be found with -help -verbose
"""),
_Switch(["-warning", "warning"],
| "Report warnings."),
_Switch(["-error", "error"],
"Report errors."),
_Switch(["-die", "die"],
| "Report dying program messages."),
]
try:
# Insert extra parameters - at the start just in case there
# are any arguments which must come last:
self.parameters = extra_parameters + self.parameters
except AttributeError:
# Should we raise an error? The subclass should have set this up!
self.parameters = extra_parameters
AbstractCommandline.__init__(self, cmd, **kwargs)
class _EmbossCommandLine(_EmbossMinimalCommandLine):
"""Base Commandline object for EMBOSS wrappers (PRIVATE).
This is provided for subclassing, it deals with shared options
common to all the EMBOSS tools plus:
- outfile Output filename
"""
def __init__(self, cmd=None, **kwargs):
assert cmd is not None
extra_parameters = [
_Option(["-outfile", "outfile"],
"Output filename",
filename=True),
]
try:
# Insert extra parameters - at the start just in case there
# are any arguments which must come last:
self.parameters = extra_parameters + self.parameters
except AttributeError:
# Should we raise an error? The subclass should have set this up!
self.parameters = extra_parameters
_EmbossMinimalCommandLine.__init__(self, cmd, **kwargs)
def _validate(self):
# Check the outfile, filter, or stdout option has been set.
# We can't simply do this via the required flag for the outfile
# output - this seems the simplest solution.
if not (self.outfile or self.filter or self.stdout):
raise ValueError("You must either set outfile (output filename), "
"or enable filter or stdout (output to stdout).")
return _EmbossMinimalCommandLine._validate(self)
class Primer3Commandline(_EmbossCommandLine):
"""Commandline object for the Primer3 interface from EMBOSS.
The precise set of supported arguments depends on your version of EMBOSS.
This version accepts arguments current at EMBOSS 6.1.0, but in order to
remain backwards compatible also support the old argument names as well.
e.g. Using EMBOSS 6.1.0 or later,
>>> cline = Primer3Commandline(sequence="mysequence.fas", auto=True, hybridprobe=True)
>>> cline.explainflag = True
>>> cline.osizeopt=20
>>> cline.psizeopt=200
>>> cline.outfile = "myresults.out"
>>> cline.bogusparameter = 1967 # Invalid parameter
Traceback (most recent call last):
...
ValueError: Option name bogusparameter was not found.
>>> print(cline)
eprimer3 -auto -outfile=myresults.out -sequence=mysequence.fas -hybridprobe=True -psizeopt=200 -osizeopt=20 -explainflag=True
The equivalent for anyone still using an older version of EMBOSS would be:
>>> cline = Primer3Commandline(sequence="mysequence.fas", auto=True, hybridprobe=True)
>>> cline.explainflag = True
>>> cline.oligosize=20 # Old EMBOSS, instead of osizeopt
>>> cline.productosize=200 # Old EMBOSS, instead of psizeopt
>>> cline.outfile = "myresults.out"
>>> print(cline)
eprimer3 -auto -outfile=myresults.out -sequence=mysequence.fas -hybridprobe=True -productosize=200 -oligosize=20 -explainflag=True
"""
def __init__(self, cmd="eprimer3", **kwargs):
self.parameters = [
_Option(["-sequence", "sequence"],
"Sequence to choose primers from.",
is_required=True),
_Option(["-task", "task"],
"Tell eprimer3 what task to perform."),
_Option(["-hybridprobe", "hybridprobe"],
"Find an internal oligo to use as a hyb probe."),
_Option(["-numreturn", "numreturn"],
"Maximum number of primer pairs to return."),
_Option(["-includedregion", "includedregion"],
"Subregion of the sequence in which to pick primers."),
_Option(["-target", "target"],
"Sequence to target for flanking primers."),
_Option(["-excludedregion", "excludedregion"],
"Regions to exclude from primer picking."),
_Option(["-forwardinput", "forwardinput"],
"Sequence of a forward primer to check."),
_Option(["-reverseinput", "reverseinput"],
"Sequence of a reverse primer to check."),
_Option(["-gcclamp", "gcclamp"],
"The required number of Gs and Cs at the 3' of each primer."),
_Option(["-osize", "osize"],
"Optimum length of a primer oligo."),
_Option(["-minsize", "minsize"],
"Minimum length of a primer oligo."),
_Option(["-maxsize", "maxsize"],
"Maximum length of a primer oligo."),
_Option(["-otm", "otm"],
"""Melting temperature for primer o |
douglasstarnes/tcguestbook | main.py | Python | mit | 1,494 | 0 | from flask import Flask, request, render_template, redirect
from flask import send_from_directory
from mongoengine import Document, StringField, DateTimeField, connect
import os
import datetime
app = Flask(__name__)
app.debug = True
class Entry(Document):
author = StringField()
message = StringField()
timestamp = DateTimeField()
@app.route("/", methods=["GET", ])
def index():
"""List all the guestbook entries."""
all_entries = Entry.objects
return render_template("index.html", entries=all_entries)
@app.route("/sign", methods=["GET", "POST", ])
def sign():
"""Allow guests to *sign* the guestbook."""
if request.method == "GET":
return render_template("sign.html")
else:
the_author = request.form["the_author"]
the_message = request.form["the_message"]
current_time = datetime.datetime.now()
entry = Entry(
author=the_author,
message=the_message,
timestamp=current_ti | me
)
entry.save()
return redirect("/") # Redirect after POST is Good Behavor!
@app.route("/styles/<path:filename>")
def styles(filename):
"""Allow Flask to server our CSS files."""
return send_from_directory("styles", filename)
if __name__ == "__main__":
host = "localhost"
port = int(os.getenv("PORT", 5000))
if port != 5000:
host = "0.0.0.0"
| else:
connect("tcguestbook") # A MongoDB connection
app.run(port=port, host=host)
|
zerg000000/mario-ai | src/main/java/amico/python/agents/evaluationinfo.py | Python | bsd-3-clause | 1,575 | 0.006349 | __author__="Sergey Karakovskiy"
__date__ ="$Mar 18, 2010 10:48:28 PM$"
class Inspectable(object):
""" All derived classes gains the ability to print the names and values of all their fields"""
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__,
dict([(x,y) for (x,y) in self.__dict__.items() if not x.startswith('_')]) )
class EvaluationInfo(Inspectable):
def __init__(self, evInfo):
print "widthCells = ", evInfo[0]
print "widthPhys = ", evInfo[1]
| print "flowersDevoured = ", evInfo[2]
print "killsByFire = ", evInfo[3]
print "killsByShell = ", evInfo[4]
print "killsByStomp = ", evInfo[5]
print "killsTotal = ", evInfo[6]
| print "marioMode = ", evInfo[7]
print "marioStatus = ", evInfo[8]
print "mushroomsDevoured = ", evInfo[9]
print "marioCoinsGained = ", evInfo[10]
print "timeLeft = ", evInfo[11]
print "timeSpent = ", evInfo[12]
print "hiddenBlocksFound = ", evInfo[13]
self.widthCells = evInfo[0]
self.widthPhys = evInfo[1]
self.flowersDevoured = evInfo[2]
self.killsByFire = evInfo[3]
self.killsByShell = evInfo[4]
self.killsByStomp = evInfo[5]
self.killsTotal = evInfo[6]
self.marioMode = evInfo[7]
self.marioStatus = evInfo[8]
self.mushroomsDevoured = evInfo[9]
self.marioCoinsGained = evInfo[10]
self.timeLeft = evInfo[11]
self.timeSpent = evInfo[12]
self.hiddenBlocksFound = evInfo[13]
|
kibitzr/kibitzr | kibitzr/cli.py | Python | mit | 2,704 | 0 | import sys
import logging
import click
import entrypoints
LOG_LEVEL_CODES = {
"debug": logging.DEBUG,
"info": logging.INFO,
"warning": logging.WARNING,
"error": logging.ERROR,
}
def merge_extensions(click_group):
"""
Each extension is called with click group for
ultimate agility while preserving cli context.
"""
for extension in load_extensions():
extension(click_group)
return click_group
def load_extensions():
"""Return list of Kibitzr CLI extensions"""
return [
point.load()
for point in entrypoints.get_group_all("kibitzr.cli")
]
@click.group()
@click.option("-l", "--log-level", default="info",
type=click.Choice(LOG_LEVEL_CODES.keys()),
help="Logging level")
@click.pass_c | ontext
def cli(ctx, log_level):
"""Run kibitzr COMMAND --help for detailed descriptions"""
ctx.obj = {'log_level': LOG_LEVEL_CODES[log_level.lower()]}
@cl | i.command()
def version():
"""Print version"""
from kibitzr import __version__ as kibitzr_version
print(kibitzr_version)
@cli.command()
def firefox():
"""Launch Firefox with persistent profile"""
from kibitzr.app import Application
Application().run_firefox()
@cli.command()
@click.argument('name', nargs=-1)
@click.pass_context
def once(ctx, name):
"""Run kibitzr checks once and exit"""
from kibitzr.app import Application
app = Application()
sys.exit(app.run(once=True, log_level=ctx.obj['log_level'], names=name))
@cli.command()
@click.argument('name', nargs=-1)
@click.pass_context
def run(ctx, name):
"""Run kibitzr in the foreground mode"""
from kibitzr.app import Application
app = Application()
sys.exit(app.run(once=False, log_level=ctx.obj['log_level'], names=name))
@cli.command()
def init():
"""Create boilerplate configuration files"""
from kibitzr.app import Application
Application.bootstrap()
@cli.command()
def telegram_chat():
"""Return chat id for the last message sent to Telegram Bot"""
# rename import to escape name clashing:
from kibitzr.app import Application
app = Application()
app.telegram_chat()
@cli.command()
def clean():
"""Clean change history"""
from kibitzr.storage import PageHistory
PageHistory.clean()
@cli.command()
def stash():
"""Print stash contents"""
from kibitzr.stash import Stash
Stash.print_content()
@cli.command()
def reload():
"""Send signal to reload configuration for kibitzr process"""
from kibitzr.app import Application
app = Application()
app.send_reload()
extended_cli = merge_extensions(cli)
if __name__ == "__main__":
extended_cli()
|
vrbagalkote/avocado-misc-tests-1 | generic/service_check.py | Python | gpl-2.0 | 3,084 | 0 | #!/usr/bin/env python
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See LICENSE for more details.
#
# Copyright: 2016 IBM
# Author: Santhosh G <santhog4@linux.vnet.ibm.com>
#
# Copyright: 2014 Red Hat Inc.
# Besed on the Sample Idea from:
# https://github.com/autotest/virt-test/blob/master/samples/service.py
import ConfigParser
from avocado import Test
from avocado import main
from avocado.utils import process
from avocado.utils.service import SpecificServiceManager
from avocado.utils import distro
from avocado.utils.wait import wait_for
class service_check(Test):
def test(self):
detected_distro = distro.detect()
parser = ConfigParser.ConfigParser()
config_file = self.d | atadir + '/services.cfg'
parser.read(config_file)
services_list = parser.get(detec | ted_distro.name, 'services').split(',')
if 'PowerNV' in open('/proc/cpuinfo', 'r').read():
services_list.extend(['opal_errd', 'opal-prd'])
else:
services_list.extend(['rtas_errd'])
services_failed = []
runner = process.run
for service in services_list:
service_obj = SpecificServiceManager(service, runner)
self.log.info("Checking %s service" % service)
if service_obj.is_enabled() is False:
self.log.info("%s service Not Found !!!" % service)
services_failed.append(service)
continue
original_status = service_obj.status()
if original_status is True:
service_obj.stop()
if not wait_for(lambda: not service_obj.status(), 10):
self.log.info("Fail to stop %s service" % service)
services_failed.append(service)
continue
service_obj.start()
wait_for(service_obj.status, 10)
else:
service_obj.start()
if not wait_for(service_obj.status, 10):
self.log.info("Fail to start %s service" % service)
services_failed.append(service)
continue
service_obj.stop()
wait_for(lambda: not service_obj.status(), 10)
if not service_obj.status() is original_status:
self.log.info("Fail to restore original status of the %s"
"service" % service)
services_failed.append(service)
if services_failed:
self.fail("List of services failed: %s" % services_failed)
else:
self.log.info("All Services Passed the ON/OFF test")
if __name__ == "__main__":
main()
|
vegphilly/vegphilly.com | vegancity/api.py | Python | gpl-3.0 | 4,516 | 0 | from django.conf.urls import url
from django.contrib.auth.models import User
from tastypie import fields
from tastypie.resources import ModelResource
from tastypie.utils import trailing_slash
from vegancity import models
from .search import master_search
from tastypie.api import Api
def build_api():
v1_api = Api(api_name='v1')
v1_api.register(VendorResource())
v1_api.register(ReviewResource())
return v1_api
class VendorResource(ModelResource):
reviews = fields.ToManyField('vegancity.api.ReviewResource',
'review_set',
null=True)
neighborhood = fields.ToOneField('vegancity.api.NeighborhoodResource',
'neighborhood',
null=True,
full=True)
cuisine_tags = fields.ToManyField('vegancity.api.CuisineTagResource',
'cuisine_tags',
related_name='vendors',
null=True,
full=True)
feature_tags = fields.ToManyField('vegancity.api.FeatureTagResource',
'feature_tags',
related_name='vendors',
null=True,
full=True)
veg_level = fields.ToOneField('vegancity.api.VegLevelResource',
'veg_level',
related_name='vendors',
null=True,
| full=True)
food_rating = fields.IntegerField(null=True, readonly=True)
atmosphere_rating = fields.IntegerField(null=True, readonly=True)
def prepend_urls(self):
url_template = r'^(?P<reso | urce_name>%s)/search%s$'
url_body = url_template % (self._meta.resource_name, trailing_slash())
response_url = url(url_body, self.wrap_view('get_search'),
name='api_get_search')
return [response_url]
def get_search(self, request, **kwargs):
raw_results = master_search(request.GET.get('q', ''))
vendors = []
results, status_code = raw_results
for result in results:
bundle = self.build_bundle(obj=result, request=request)
bundle = self.full_dehydrate(bundle)
vendors.append(bundle)
ctx = {'vendors': vendors}
return self.create_response(request, ctx)
def dehydrate_food_rating(self, bundle):
return bundle.obj.food_rating()
def dehydrate_atmosphere_rating(self, bundle):
return bundle.obj.atmosphere_rating()
class Meta:
queryset = models.Vendor.objects.approved().all()
resource_name = 'vendors'
fields = ['id', 'name', 'address', 'website', 'phone',
'notes', 'resource_uri']
class ReviewResource(ModelResource):
vendor = fields.ToOneField('vegancity.api.VendorResource', 'vendor')
author = fields.ToOneField('vegancity.api.UserResource',
'author',
null=True,
full=True)
class Meta:
queryset = models.Review.objects.approved().all()
resource_name = 'reviews'
fields = [
'id', 'atmosphere_rating', 'food_rating', 'title', 'content',
'created', 'modified', 'suggested_feature_tags',
'suggested_cuisine_tags'
]
class CuisineTagResource(ModelResource):
class Meta:
queryset = models.CuisineTag.objects.all()
resource_name = 'cuisine_tag'
fields = ['description', 'id']
class NeighborhoodResource(ModelResource):
class Meta:
queryset = models.Neighborhood.objects.all()
resource_name = 'neighborhood'
fields = ['name']
class FeatureTagResource(ModelResource):
class Meta:
queryset = models.FeatureTag.objects.all()
resource_name = 'feature_tag'
fields = ['description', 'id']
class UserResource(ModelResource):
class Meta:
queryset = User.objects.all()
resource_name = 'user'
fields = ['id', 'username', 'first_name', 'last_name']
class VegLevelResource(ModelResource):
class Meta:
queryset = models.VegLevel.objects.all()
resource_name = 'veg_level'
fields = ['name', 'description']
|
solvo/derb | report_builder/create_data.py | Python | gpl-3.0 | 9,928 | 0.003525 | import random
import datetime
from async_notifications.models import EmailTemplate
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from report_builder import models
PROJECT_NAMES = [
'Project Manhattan',
'Sysadmin association project',
'Operation Know-it-all',
'dejaVu Project',
'Hydra Project',
'Project X'
]
REPORT_NAMES = [
'Anthropology Department evaluation',
'Chinese Program',
'Data Science scholarship',
'Department of Computer Science reevaluation',
'Neuroscience Program survey',
'Program in Latin American and Caribbean Studies',
'Campus Recreation Services Commission',
'Faculty Senate reelection',
'IT department job evaluation',
'Office of Human Resources examination',
'Marketing study for the future',
'High Commander\'s log'
]
QUESTIONS = [
'How likely is it that you would recommend this company to a friend or colleague?',
'Overall, how satisfied or dissatisfied are you with our company?',
'Which of the following words would you use to describe our products? Select all that apply.',
'How well do our products meet your needs?',
'How would you rate the quality of the product?',
'How would you rate the value for money of the product?',
'How responsive have we been to your questions or concerns about our products?',
'How long have you been a customer of our company?',
'How likely are you to purchase any of our products again?',
'Do you have any other comments, questions, or concerns?',
'How meaningful is your work?',
'How challenging is your job?',
'In a typical week, how often do you feel stressed at work?',
'How well are you paid for the work you do?',
'How much do your opinions about work matter to your coworkers?',
'How realistic are the expectations of your supervisor?',
'How often do the tasks assigned to you by your supervisor help you grow professionally?',
'How many opportunities do you have to get promoted where you work?',
'Are you supervised too much at work, supervised too little, or supervised about the right amount?',
'Are you satisfied with your employee benefits, neither satisfied nor dissatisfied with them, or dissatisfied with them?',
'Are you satisfied with your job, neither satisfied nor dissatisfied with it, or dissatisfied with it?',
'How proud are you of your employer\'s brand?',
'Do you like your employer, neither like nor dislike them, or dislike them?',
'How likely are you to look for another job outside the company?',
'Overall, how would you rate the quality of your customer service experience?',
'How well did we understand your questions and concerns?',
'How much time did it take us to address your questions and concerns?',
'How likely is it that you would recommend this company to a friend or colleague?',
'Do you have any other comments, questions, or concerns?',
'What do you like most about our new product?',
'What changes would most improve our new product?',
'What do you like most about competing products currently available from other companies?',
'What changes would most improve competing products currently available from other companies?',
'What would make you more likely to use our new product?',
'If our new product were available today, how likely would you be to recommend it to others?',
'If you are not likely to use our new product, why not?',
'How important is price to you when choosing this type of company?',
'Overall, are you satisfied with your experience using our new product, dissatisfied with it, or neither satisfied or dissatisfied with it?',
'If our new product were available today, how likely would you be to use it instead of competing products currently available from other companies?',
'What do you like most about our new service?',
'What changes would most improve our new service?',
'What do you like most about competing services currently available from other companies?',
'What changes would most improve competing services currently available from other companies?',
'If our new company were available today, how likely would you be to recommend it to others?',
'What would make you more likely to use our new service?',
'How important is convenience when choosing this type of service?',
'If you are not likely to use our new service, why not?',
'Overall, are you satisfied with your experience using our new service, neither satisfied or dissatisfied with it, or dissatisfied with it?',
'If our new service were available today, how likely would you be to use it instead of competing services currently available from other companies?',
'How many school-age children do you have (K-12)?',
'Which elementary school is in your district?',
'What school(s) does your child/children attend?',
'My child/children attend(s)',
'Whom would you like to evaluate?',
'How likely is it that you would recommend your supervisor to a colleague?',
'How easy is it to get help from your supervisor when you want it?',
'How available to employees is your supervisor?',
'How often does your supervisor give you feedback about your work?',
'How improved is your performance after getting feedback from your supervisor about your work?',
'How effective is the training you receive from your supervisor?',
'How consistently does your supervisor reward employees for good | work?',
'How consistently does your supervisor punish employees for bad work?',
'How reasonable are the decisions made by your supervisor?',
'Does your supervisor take too much time to make decisi | ons, too little time, or about the right amount of time?',
'How often does your supervisor listen to employees\' opinions when making decisions?',
'How easy is it for employees to disagree with the decisions made by your supervisor?',
'When you make a mistake, how often does your supervisor respond constructively?',
'How reliable is your supervisor?',
'How effectively does your supervisor use company resources?',
'When someone completes a task especially well, how often does your supervisor acknowledge this success?',
'How professionally does your supervisor behave?',
'Overall, are you satisfied with your supervisor, neither satisfied nor dissatisfied with him/her, or dissatisfied with him/her?',
'Overall, how effective at his job is your supervisor?',
'What does your supervisor need to do to improve his/her performance?'
]
def create_projects():
for i in range(0, len(PROJECT_NAMES)):
models.Project.objects.create(
description=PROJECT_NAMES[i],
content_type=random_choice(get_content_types()),
object_id=i
)
def create_report_types(max=5):
for i in range(0, max):
models.ReportType.objects.create(
type='report_type_%d' % i,
app_name='report_builder',
name='report',
action_ok=EmailTemplate.objects.create(code='ok_report_type_%d' % i, subject='', message=''),
revision_turn=EmailTemplate.objects.create(code='turn_report_type_%d' % i, subject='', message=''),
responsable_change=EmailTemplate.objects.create(code='change_report_type_%d' % i, subject='', message=''),
report_start=EmailTemplate.objects.create(code='start_report_type_%d' % i, subject='', message=''),
report_end=EmailTemplate.objects.create(code='end_report_type_%d' % i, subject='', message='')
)
def create_reports():
for i in range(0, len(REPORT_NAMES)):
models.Report.objects.create(
type=random_model_object(models.ReportType),
name=REPORT_NAMES[i],
template={},
opening_date=random_future_date()
)
def create_report_by_projects():
for i in range(0, len(REPORT_NAMES)):
start_date = random_future_date()
models.ReportByProject.objects.create(
report=models.Report.objects.get(name=REPORT_NAMES[i]),
sta |
Eksmo/calibre | setup/upload.py | Python | gpl-3.0 | 10,267 | 0.009253 | #!/usr/bin/env python
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
__license__ = 'GPL v3'
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import os, subprocess, hashlib, shutil, glob, stat, sys, time
from subprocess import check_call
from tempfile import NamedTemporaryFile, mkdtemp
from zipfile import ZipFile
if __name__ == '__main__':
d = os.path.dirname
sys.path.insert(0, d(d(os.path.abspath(__file__))))
from setup import Command, __version__, installer_name, __appname__
PREFIX = "/var/www/calibre-ebook.com"
DOWNLOADS = PREFIX+"/htdocs/downloads"
BETAS = DOWNLOADS +'/betas'
USER_MANUAL = '/var/www/localhost/htdocs/'
HTML2LRF = "calibre/ebooks/lrf/html/demo"
TXT2LRF = "src/calibre/ebooks/lrf/txt/demo"
STAGING_HOST = '67.207.135.179'
STAGING_USER = 'root'
STAGING_DIR = '/root/staging'
def installers():
installers = list(map(installer_name, ('dmg', 'msi', 'tar.bz2')))
installers.append(installer_name('tar.bz2', is64bit=True))
installers.insert(0, 'dist/%s-%s.tar.xz'%(__appname__, __version__))
installers.append('dist/%s-portable-%s.zip'%(__appname__, __version__))
return installers
def installer_description(fname):
if fname.endswith('.tar.xz'):
return 'Source code'
if fname.endswith('.tar.bz2'):
bits = '32' if 'i686' in fname else '64'
return bits + 'bit Linux binary'
if fname.endswith('.msi'):
return 'Windows installer'
if fname.endswith('.dmg'):
return 'OS X dmg'
if fname.endswith('.zip'):
return 'Calibre Portable'
return 'Unknown file'
class ReUpload(Command): # {{{
description = 'Re-uplaod any installers present in dist/'
sub_commands = ['upload_installers']
def pre_sub_commands(self, opts):
opts.replace = True
def run(self, opts):
for x in installers():
if os.path.exists(x):
os.remove(x)
# }}}
# Data {{{
def get_google_data():
with open(os.path.expanduser('~/work/kde/conf/googlecodecalibre'), 'rb') as f:
gc_password, ga_un, pw = f.read().strip().split('|')
return {
'username':ga_un, 'password':pw, 'gc_password':gc_password,
'path_map_server':'root@kovidgoyal.net',
'path_map_location':'/var/www/status.calibre-ebook.com/googlepaths',
# If you change this remember to change it in the
# status.calibre-ebook.com server as well
'project':'calibre-ebook'
}
def get_sourceforge_data():
return {'username':'kovidgoyal', 'project':'calibre'}
def send_data(loc):
subprocess.check_call(['rsync', '--inplace', '--delete', '-r', '-z', '-h', '--progress', '-e', 'ssh -x',
loc+'/', '%s@%s:%s'%(STAGING_USER, STAGING_HOST, STAGING_DIR)])
def gc_cmdline(ver, gdata):
return [__appname__, ver, 'fmap', 'googlecode',
gdata['project'], gdata['username'], gdata['password'],
gdata['gc_password'], '--path-map-server',
gdata['path_map_server'], '--path-map-location',
gdata['path_map_location']]
def sf_cmdline(ver, sdata):
return [__appname__, ver, 'fmap', 'sourceforge', sdata['project'],
sdata['username']]
def run_remote_upload(args):
print 'Running remotely:', ' '.join(args)
subprocess.check_call(['ssh', '-x', '%s@%s'%(STAGING_USER, STAGING_HOST),
'cd', STAGING_DIR | , '&&', 'python', 'hosting.py']+args)
# }}}
class UploadInstallers(Command): # {{{
def add_options(self, parser):
parser.add_option('--replace', default=False, action='store_true', help=
'Replace existing installers, when uploading to google')
def r | un(self, opts):
all_possible = set(installers())
available = set(glob.glob('dist/*'))
files = {x:installer_description(x) for x in
all_possible.intersection(available)}
tdir = mkdtemp()
try:
self.upload_to_staging(tdir, files)
self.upload_to_sourceforge()
self.upload_to_google(opts.replace)
finally:
shutil.rmtree(tdir, ignore_errors=True)
def upload_to_staging(self, tdir, files):
os.mkdir(tdir+'/dist')
hosting = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'hosting.py')
shutil.copyfile(hosting, os.path.join(tdir, 'hosting.py'))
for f in files:
shutil.copyfile(f, os.path.join(tdir, f))
with open(os.path.join(tdir, 'fmap'), 'wb') as fo:
for f, desc in files.iteritems():
fo.write('%s: %s\n'%(f, desc))
while True:
try:
send_data(tdir)
except:
print('\nUpload to staging failed, retrying in a minute')
time.sleep(60)
else:
break
def upload_to_google(self, replace):
gdata = get_google_data()
args = gc_cmdline(__version__, gdata)
if replace:
args = ['--replace'] + args
run_remote_upload(args)
def upload_to_sourceforge(self):
sdata = get_sourceforge_data()
args = sf_cmdline(__version__, sdata)
run_remote_upload(args)
# }}}
class UploadUserManual(Command): # {{{
description = 'Build and upload the User Manual'
sub_commands = ['manual']
def build_plugin_example(self, path):
from calibre import CurrentDir
with NamedTemporaryFile(suffix='.zip') as f:
os.fchmod(f.fileno(),
stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH|stat.S_IWRITE)
with CurrentDir(path):
with ZipFile(f, 'w') as zf:
for x in os.listdir('.'):
if x.endswith('.swp'): continue
zf.write(x)
if os.path.isdir(x):
for y in os.listdir(x):
zf.write(os.path.join(x, y))
bname = self.b(path) + '_plugin.zip'
dest = '%s/%s'%(DOWNLOADS, bname)
subprocess.check_call(['scp', f.name, 'divok:'+dest])
def run(self, opts):
path = self.j(self.SRC, '..', 'manual', 'plugin_examples')
for x in glob.glob(self.j(path, '*')):
self.build_plugin_example(x)
check_call(' '.join(['rsync', '-z', '-r', '--progress',
'manual/.build/html/',
'bugs:%s'%USER_MANUAL]), shell=True)
# }}}
class UploadDemo(Command): # {{{
description = 'Rebuild and upload various demos'
def run(self, opts):
check_call(
'''ebook-convert %s/demo.html /tmp/html2lrf.lrf '''
'''--title='Demonstration of html2lrf' --authors='Kovid Goyal' '''
'''--header '''
'''--serif-family "/usr/share/fonts/corefonts, Times New Roman" '''
'''--mono-family "/usr/share/fonts/corefonts, Andale Mono" '''
''''''%self.j(self.SRC, HTML2LRF), shell=True)
check_call(
'cd src/calibre/ebooks/lrf/html/demo/ && '
'zip -j /tmp/html-demo.zip * /tmp/html2lrf.lrf', shell=True)
check_call('scp /tmp/html-demo.zip divok:%s/'%(DOWNLOADS,), shell=True)
# }}}
class UploadToServer(Command): # {{{
description = 'Upload miscellaneous data to calibre server'
def run(self, opts):
check_call('ssh divok rm -f %s/calibre-\*.tar.xz'%DOWNLOADS, shell=True)
#check_call('scp dist/calibre-*.tar.xz divok:%s/'%DOWNLOADS, shell=True)
check_call('gpg --armor --detach-sign dist/calibre-*.tar.xz',
shell=True)
check_call('scp dist/calibre-*.tar.xz.asc divok:%s/signatures/'%DOWNLOADS,
shell=True)
check_call('ssh divok bzr update /usr/local/calibre',
shell=True)
check_call('''ssh divok echo %s \\> %s/latest_version'''\
%(__version__, DOWNLOADS), shell=True)
check_call('ssh divok /etc/init.d/apache2 graceful',
shell=True)
tdir = mkdtemp()
for installer in installers():
if not os.path.exists(installer):
continue
|
ncss-tech/geo-pit | updateAttTable/wholesale_change.py | Python | gpl-2.0 | 3,943 | 0.012934 | #-------------------------------------------------------------------------------
# Name: module1
# Purpose:
#
# Author: charles.ferguson
#
# Created: 20/05/2015
# Copyright: (c) charles.ferguson 2015
# Licence: <your licence>
#-------------------------------------------------------------------------------
def errorMsg():
try:
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
theMsg = tbinfo + " \n" + str(sys.exc_type)+ ": " + str(sys.exc_value)
arcpy.AddError(theMsg)
except:
arcpy.AddError("Unhandled error in errorMsg method")
pass
import sys, os, traceback, time, arcpy
changeTblParam = arcpy.GetParameterAsText(0)
areaParam = arcpy.GetParameterAsText(1)
xstFldParam = arcpy.GetParameterAsText(2)
newFldParam = arcpy.GetParameterAsText(3)
musymParam = arcpy.GetParameterAsText(4)
spTblParam = arcpy.GetParameterAsText(5)
updateDict = {}
musymLst = str(musymParam).split(';')
arcpy.AddMessage('\n\n')
try:
## rav = False
cp = arcpy.Describe(spTblParam).catalogPath
bName = os.path.dirname(cp)
if cp.endswith('.shp'):
aWS = os.path.dirname(cp)
elif [any(ext) for ext in ('.gdb', '.mdb', '.sde') if ext in os.path.splitext(bName)]:
aWS = bName
else:
aWS = os.path.dirname(bName)
## rav = True
for musym in musymLst:
wc = 'AREASYMBOL = ' "'" + areaParam + "' AND MUSYM = '" + musym + "'"
#arcpy.AddMessage(wc)
with arcpy.da.SearchCursor(changeTblParam, newFldParam, where_clause = wc) as rows:
for row in rows:
updateDict[musym] = str(row[0])
del row, rows, wc
#for k,v in updateDict.iteritems():
#arcpy.AddMessage(k + v)
aCnt = len(updateDict)
arcpy.SetProgressor("Step", "Initializing tool", 0, aCnt, 1)
c = 0
for key in updateDict:
time.sleep(0.05)
c += 1
arcpy.SetProgressorLabel("Updating " + key + " (" + str(c) + " of " + str(aCnt) + ")")
upVal = updateDict.get(key)
if len(upVal) > 6:
arcpy.AddWarning('Illegal value for ' + key + ', greater than 6 characters ( | ' + upVal + ')')
arcpy.SetProgressorPosition()
elif upVal == 'None':
arcpy.AddWarning('No update value specified for ' + key)
arcpy.SetProgressorPosition()
else:
n=0
wc = '"AREASYMBOL" = ' "'" + areaParam + "' AND \"MUSYM\" = '" + key + "'"
with arcpy.da.Editor(aWS) as e | dit:
##
with arcpy.da.UpdateCursor(spTblParam, "MUSYM", where_clause=wc)as rows:
for row in rows:
row[0] = str(upVal)
rows.updateRow(row)
n=n+1
if n > 0:
arcpy.AddMessage('Successfully updated ' + key + ' to ' + upVal + ", " + str(n) + " occurances")
try:
del row, rows
arcpy.SetProgressorPosition()
except:
arcpy.AddMessage('No rows were found for ' + key)
arcpy.SetProgressorPosition()
## try:
## edit = arcpy.da.Editor(aWS)
## edit.startEditing()
## edit.startOperation
## if rav == True:
##
## try:
## arcpy.management.RegisterAsVersioned(aWS)
## except:
## pass
## #do some stuff
## edit.stopOperation()
## edit.stopEditing(True)
## except arcpy.ExecuteError:
## arcpy.AddMessage(arcpy.GetMessages(2))
arcpy.AddMessage('\n\n')
except:
errorMsg()
|
dufresnedavid/canada | account_tax_expense_include/__openerp__.py | Python | agpl-3.0 | 1,843 | 0 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Odoo, Open Source Management Solution
# Copyright (C) 2010 - 2014 Savoir-faire Linux
# (<http://www.savoirfairelinux.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Taxes included in expense",
"version": "1.2",
"author": "Savoir-faire Linux",
"category": "Account",
"website": "http://wwww.savoirfairelinux.com",
"license": "AGPL-3",
"description": """
Taxes i | ncluded in expense
=========================
This module adds a checkbox to tax to include tax in expense invoices.
It is useful if your taxes are not included in the price, but you
want to ease the life of your employees by allowing them to enter
their expenses with the taxes included.
Contributors
------------
* Jonatan Cloutier <jonatan.cloutier@savoirfairelinux.com>
* Maxime Chambreuil <maxime.chambreuil@savoirfairelinux.com>
* Sandy Carter <sandy.carter@savoirfairelinux.com>
""",
"de | pends": ["account"],
"data": [
"account_tax_view.xml",
],
"installable": True
}
|
moneymaker365/plugin.video.ustvvod | resources/lib/main_aenetwork.py | Python | gpl-2.0 | 8,687 | 0.036491 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import common
import connection
import m3u8
import base64
import os
import ustvpaths
import re
import simplejson
import sys
import time
import urllib
import xbmc
import xbmcaddon
import xbmcgui
import xbmcplugin
from bs4 import BeautifulSoup, SoupStrainer
addon = xbmcaddon.Addon()
pluginHandle = int(sys.argv[1])
def masterlist(SITE, SHOWS):
master_db = []
master_data = connection.getURL(SHOWS)
master_tree = simplejson.loads(master_data)
for master_item in master_tree:
if (master_item['hasNoVideo'] == 'false'):
#print master_item
try:
master_name = common.smart_unicode(master_item['detailTitle'])
master_db.append((master_name, SITE, 'seasons', urllib.quote_plus(master_item['showID'])))
except Exception,e:
print "Exception", e, master_item
return master_db
def seasons(SITE, SEASONSEPISODE, SEASONSCLIPS, EPISODES, CLIPS, season_url = common.args.url):
seasons = []
season_data = connection.getURL(SEASONSEPISODE % season_url)
season_tree = simplejson.loads(season_data)['season']
for season_item in season_tree:
season_name = 'Season ' + str(season_item)
seasons.append((season_name, SITE, 'episodes', EPISODES % (season_url, season_item), -1, -1))
season_url = common.args.url
season_data = connection.getURL(SEASONSCLIPS % season_url)
season_tree = simplejson.loads(season_data)['season']
for season_item in season_tree:
season_name = 'Season Clips ' + str(season_item)
seasons.append((season_name, SITE, 'episodes', CLIPS % (season_url, season_item), -1, -1))
return seasons
def episodes(SITE, episode_url = common.args.ur | l):
episodes = []
episode_data = connection.getURL(episode_url)
episode_tree = simplejson.loads(episode_data)['Items']
for episode_item in episode_tree:
if episode_ | item['isBehindWall'] == 'false':
url = episode_item['playURL_HLS']
episode_duration = int(episode_item['totalVideoDuration']) / 1000
try:
episode_airdate = common.format_date(episode_item['airDate'].split('T')[0],'%Y-%m-%d')
except:
episode_airdate = -1
episode_name = episode_item['title']
try:
season_number = int(episode_item['season'])
except:
season_number = -1
try:
episode_number = int(episode_item['episode'])
except:
episode_number = -1
try:
episode_thumb = episode_item['thumbnailImageURL']
except:
try:
episode_thumb = episode_item['stillImageURL']
except:
try:
episode_thumb = episode_item['modalImageURL']
except:
episode_thumb = None
episode_plot = episode_item['description']
episode_showtitle = episode_item['seriesName']
try:
episode_mpaa = episode_item['rating'].upper()
except:
episode_mpaa = None
try:
episode_expires = episode_item['expirationDate'].split('T')[0]
except:
episode_expires = None
if episode_item['mrssLengthType'] == 'Episode':
episode_type = 'Full Episode'
else:
episode_type = 'Clips'
try:
if episode_item['isHD'] == 'true':
episode_HD = True
else:
episode_HD = False
except:
episode_HD = False
u = sys.argv[0]
u += '?url="' + urllib.quote_plus(url) + '"'
u += '&mode="' + SITE + '"'
u += '&sitemode="play_video"'
infoLabels={ 'title' : episode_name,
'durationinseconds' : episode_duration,
'season' : season_number,
'episode' : episode_number,
'plot' : episode_plot,
'premiered' : episode_airdate,
'TVShowTitle' : episode_showtitle,
'mpaa' : episode_mpaa }
episodes.append((u, episode_name, episode_thumb, infoLabels, 'list_qualities', episode_HD, episode_type))
return episodes
def list_qualities():
video_url = common.args.url
bitrates = []
sig = sign_url(video_url)
smil_url = re.compile('(.+)\?').findall(video_url)[0] + '?switch=hls&assetTypes=medium_video_s3&mbr=true&metafile=true&sig=' + sig
video_data = connection.getURL(smil_url)
smil_tree = BeautifulSoup(video_data, 'html.parser')
video_url2 = smil_tree.video['src']
m3u_master_data = connection.getURL(video_url2)
m3u_master = m3u8.parse(m3u_master_data)
for video_index in m3u_master.get('playlists'):
bitrate = int(video_index.get('stream_info')['bandwidth'])
display = int(bitrate) / 1024
bitrates.append((display, bitrate))
return bitrates
def play_video():
try:
qbitrate = common.args.quality
except:
qbitrate = None
closedcaption = None
video_url = common.args.url
sig = sign_url(video_url)
smil_url = re.compile('(.+)\?').findall(video_url)[0] + '?switch=hls&assetTypes=medium_video_s3&mbr=true&metafile=true&sig=' + sig
smil_data = connection.getURL(smil_url)
smil_tree = BeautifulSoup(smil_data, 'html.parser')
video_url2 = smil_tree.video['src']
try:
closedcaption = smil_tree.textstream['src']
except:
pass
m3u_master_data = connection.getURL(video_url2, savecookie = True)
m3u_master = m3u8.parse(m3u_master_data)
hbitrate = -1
sbitrate = int(addon.getSetting('quality')) * 1024
for video_index in m3u_master.get('playlists'):
bitrate = int(video_index.get('stream_info')['bandwidth'])
if qbitrate is None:
if bitrate > hbitrate and bitrate <= sbitrate:
hbitrate = bitrate
m3u8_url = video_index.get('uri')
elif bitrate == qbitrate:
m3u8_url = video_index.get('uri')
m3u_data = connection.getURL(m3u8_url, loadcookie = True)
key_url = re.compile('URI="(.*?)"').findall(m3u_data)[0]
key_data = connection.getURL(key_url, loadcookie = True)
key_file = open(ustvpaths.KEYFILE % '0', 'wb')
key_file.write(key_data)
key_file.close()
video_url5 = re.compile('(http:.*?)\n').findall(m3u_data)
for i, video_item in enumerate(video_url5):
newurl = base64.b64encode(video_item)
newurl = urllib.quote_plus(newurl)
m3u_data = m3u_data.replace(video_item, 'http://127.0.0.1:12345/0/foxstation/' + newurl)
localhttpserver = True
filestring = 'XBMC.RunScript(' + os.path.join(ustvpaths.LIBPATH,'proxy.py') + ', 12345)'
xbmc.executebuiltin(filestring)
time.sleep(20)
m3u_data = m3u_data.replace(key_url, 'http://127.0.0.1:12345/play0.key')
playfile = open(ustvpaths.PLAYFILE, 'w')
playfile.write(m3u_data)
playfile.close()
finalurl = ustvpaths.PLAYFILE
if (addon.getSetting('enablesubtitles') == 'true') and (closedcaption is not None):
convert_subtitles(closedcaption)
item = xbmcgui.ListItem(path = finalurl)
try:
item.setThumbnailImage(common.args.thumb)
except:
pass
try:
item.setInfo('Video', { 'title' : common.args.name,
'season' : common.args.season_number,
'episode' : common.args.episode_number,
'TVShowTitle' : common.args.show_title})
except:
pass
xbmcplugin.setResolvedUrl(pluginHandle, True, item)
if ((addon.getSetting('enablesubtitles') == 'true') and (closedcaption is not None)) or localhttpserver is True:
while not xbmc.Player().isPlaying():
xbmc.sleep(100)
if (addon.getSetting('enablesubtitles') == 'true') and (closedcaption is not None):
xbmc.Player().setSubtitles(ustvpaths.SUBTITLE)
if localhttpserver is True:
while xbmc.Player().isPlaying():
xbmc.sleep(1000)
connection.getURL('http://localhost:12345/stop', connectiontype = 0)
def clean_subs(data):
br = re.compile(r'<br.*?>')
tag = re.compile(r'<.*?>')
space = re.compile(r'\s\s\s+')
apos = re.compile(r'&apos;')
gt = re.compile(r'>')
sub = br.sub('\n', data)
sub = tag.sub(' ', sub)
sub = space.sub(' ', sub)
sub = apos.sub('\'', sub)
sub = gt.sub('>', sub)
return sub
def convert_subtitles(closedcaption):
str_output = ''
subtitle_data = connection.getURL(closedcaption, connectiontype = 0)
subtitle_data = BeautifulSoup(subtitle_data, 'html.parser', parse_only = SoupStrainer('div'))
srt_output = ''
lines = subtitle_data.find_all('p')
i = 0
last_start_time = ''
last_end_time = ''
for line in lines:
try:
if line is not None:
sub = clean_subs(common.smart_utf8(line))
start_time = common.smart_utf8(line['begin'].replace('.', ','))
end_time = common.smart_utf8(line['end'].replace('.', ','))
if start_time != last_start_time and end_time != last_end_time:
str_output += '\n' + str(i + 1) + '\n' + start_time + ' --> ' + end |
PlayCircular/play_circular | apps/paginas/admin.py | Python | agpl-3.0 | 12,654 | 0.035731 | #coding=utf-8
# Copyright (C) 2014 by Víctor Romero Blanco <info at playcircular dot com>.
# http://playcircular.com/
# It's licensed under the AFFERO GENERAL PUBLIC LICENSE unless stated otherwise.
# You can get copies of the licenses here: http://www.affero.org/oagpl.html
# AFFERO GENERAL PUBLIC LICENSE is also included in the file called "LICENSE".
from django.contrib import admin
from django.conf import settings
from settings import MEDIA_ROOT, STATIC_ROOT
from django.utils.translation import ugettext as _
from django.contrib.contenttypes import generic
from django.conf.urls import patterns, include, url
from django.contrib.auth.models import User
from django.contrib.contenttypes import generic
from sorl.thumbnail import default
from sorl.thumbnail.admin import AdminImageMixin
from paginas.models import *
from paginas.forms import *
from mptt.admin import MPTTModelAdmin
# Si utilizo el DjangoMpttAdmin no funciona el def queryset
from django_mptt_admin.admin import DjangoMpttAdmin
from tinymce import models as tinymce_models
from tinymce.models import HTMLField
from django.db.models import Q
from django.db.utils import DatabaseError,IntegrityError
ADMIN_THUMBS_SIZE = '80x30'
############################################################################################################################
class Fotos_Entrada_Inline(admin.TabularInline):
model = Fotos_entrada
extra = 2
max_num = 14
verbose_name = _(u'foto')
############################################################################################################################
class Fotos_Pagina_Inline(admin.TabularInline):
model = Fotos_pagina
extra = 2
max_num = 14
verbose_name = _(u'foto')
############################################################################################################################
class MetatagInline(generic.GenericStackedInline):
model = Metatag
extra = 1
max_num = 1
verbose_name = "SEO"
############################################################################################################################
class Idiomas_Categoria_Entrada_Inline(admin.StackedInline):
model = Idiomas_categoria_entrada
formset = Idioma_requerido_formset
prepopulated_fields = {"slug": ("nombre",)}
extra = 1
max_num = 5
def get_extra(self, request, obj=None, **kwargs):
extra = 1
if obj:
extra = 0
| return extra
return extra
verbose_name = _(u'idioma de categoria')
verbose_name_plural = _(u'idiomas de categorias')
############################################################################################################################
class Idiomas_P | agina_Inline(admin.StackedInline):
model = Idiomas_pagina
formset = Idioma_requerido_formset
prepopulated_fields = {"slug": ("titulo",)}
extra = 1
max_num = 5
def get_extra(self, request, obj=None, **kwargs):
extra = 1
if obj:
extra = 0
return extra
return extra
verbose_name = _(u'idioma de la pagina')
verbose_name_plural = _(u'idiomas')
############################################################################################################################
class Idiomas_Entrada_Inline(admin.StackedInline):
model = Idiomas_entrada
formset = Idioma_requerido_formset
prepopulated_fields = {"slug": ("titulo",)}
extra = 1
max_num = 5
def get_extra(self, request, obj=None, **kwargs):
extra = 1
if obj:
extra = 0
return extra
return extra
verbose_name = _(u'idioma de la entrada')
verbose_name_plural = _(u'idiomas')
############################################################################################################################
class Categoria_Entrada_Admin(admin.ModelAdmin):
list_display = ('nombre_de_categoria','usuario','grupos','idiomas','creada','creada_por','modificada','modificada_por')
search_fields = ['nombre']
form = Form_Categoria_Entrada_Admin
inlines = [
Idiomas_Categoria_Entrada_Inline,
]
#Esto es para que funcione el Form_Categoria_Entrada_Admin. Para pasarle el request
def get_form(self, request, obj=None, **kwargs):
self.exclude = []
if not request.user.is_superuser:
self.exclude.append('superadmin')
AdminForm = super(Categoria_Entrada_Admin, self).get_form(request, obj, **kwargs)
class ModelFormMetaClass(AdminForm):
def __new__(cls, *args, **kwargs):
kwargs['request'] = request
kwargs['user'] = request.user
return AdminForm(*args, **kwargs)
return ModelFormMetaClass
def queryset(self, request):
qs = super(Categoria_Entrada_Admin, self).queryset(request)
#Si es superusuario lo ve todo
if request.user.is_superuser:
return qs
else:
# Si no es superusuario pero es administrador de grupo, ve todos los de su grupo.
grupos_administrados = Miembro.objects.filter(usuario=request.user,nivel=u'Administrador').values_list('grupo', flat=True).order_by('grupo')
if len(grupos_administrados) > 0:
return qs.filter(grupo__in=grupos_administrados,superadmin=False)
else:
#Y si no ve solo lo suyo
return qs.filter(usuario=request.user)
def save_model(self, request, obj, form, change):
try:
c = Categoria_Entrada.objects.get(pk=obj.pk)
except Categoria_Entrada.DoesNotExist:
obj.usuario = request.user
obj.creada_por = request.user
obj.modificada_por = request.user
obj.save()
############################################################################################################################
class Entrada_Admin(admin.ModelAdmin):
list_display = ('usuario','_titulo','tipo','grupos','idiomas','visibilidad','estado','comentarios','creada','creada_por','modificada','modificada_por')
list_filter = ('visibilidad','estado')
search_fields = ['usuario','Idiomas_entrada__titulo']
filter_horizontal = ['categoria','entradas_relacionadas']
form = Form_Entrada_Admin
change_form_template = 'admin/paginas/entrada/change_form.html'
inlines = [
Idiomas_Entrada_Inline,
Fotos_Entrada_Inline,
]
#Esto es para que funcione el Form_Entrada_Admin. Para pasarle el request
def get_form(self, request, obj=None, **kwargs):
self.exclude = []
if not request.user.is_superuser:
self.exclude.append('superadmin')
AdminForm = super(Entrada_Admin, self).get_form(request, obj, **kwargs)
class ModelFormMetaClass(AdminForm):
def __new__(cls, *args, **kwargs):
kwargs['request'] = request
kwargs['user'] = request.user
return AdminForm(*args, **kwargs)
return ModelFormMetaClass
def queryset(self, request):
qs = super(Entrada_Admin, self).queryset(request)
#Si es superusuario lo ve todo
if request.user.is_superuser:
return qs
else:
# Si no es superusuario pero es administrador de grupo, ve todos los de su grupo.
grupos_administrados = Miembro.objects.filter(usuario=request.user,nivel=u'Administrador').values_list('grupo', flat=True).order_by('grupo')
if len(grupos_administrados) > 0:
return qs.filter(grupo__in=grupos_administrados,superadmin=False)
else:
#Y si no ve solo lo suyo
return qs.filter(usuario=request.user)
def save_model(self, request, obj, form, change):
try:
c = Entrada.objects.get(pk=obj.pk)
except Entrada.DoesNotExist:
#obj.usuario = request.user
obj.creada_por = request.user
obj.modificada_por = request.user
obj.save()
############################################################################################################################
class Pagina_Admin(DjangoMpttAdmin):
list_display = ('_titulo','parent','usuario','grupos','idiomas','tipo','visibilidad','en_menu','estado','comentarios','creada','creada_por','modificada','modificada_por')
form = Form_Pagina_Admin
change_form_template = 'admin/paginas/pagina/change_form.html'
list_filter = ('tipo','estado')
search_fields = ['usuario']
inlines = [
Idiomas_Pagina_Inline,
Fotos_Pagina_Inline,
]
#Esto es para que funcione el Form_Pagina_Admin. Para pasarle el request
def get_form(self, request, obj=None, **kwargs):
self.exclude = []
if not request.user.is_superuser:
self.exclude.append('tipo')
self.exclude.append('superadmin')
AdminForm = super(Pagina_Admin, self).get_form(request, obj, **kwargs)
class Mode |
nathanielbecker/business-contacter-django-app | myproject/cookie_app/migrations/0008_auto_20141120_0807.py | Python | apache-2.0 | 847 | 0.002361 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class | Migration(migrations.Migration):
dependencies = [
('cookie_app', '0007_auto_20141118_0707'),
]
operations = [
migrations.CreateModel(
name='DateTime',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
| ('datetime', models.DateTimeField(auto_now_add=True, null=True)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='barebones_crud',
name='Created',
field=models.ForeignKey(to='cookie_app.DateTime', null=True),
preserve_default=True,
),
]
|
napalm-automation/napalm-logs | napalm_logs/transport/cli.py | Python | apache-2.0 | 559 | 0 | # -*- coding: utf-8 -*-
'''
CLI transport for napalm-logs.
Useful for debug only, publi | shes (prints) on the CLI.
'''
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
# Import napalm-logs pkgs
from napalm_logs.transport.base import TransportBase
class CLITransport(TransportBase):
'''
CLI transport class.
'''
NO_ENCRYPT = True
# This tells the publisher to not encrypt the messages
# | published over this channel.
def publish(self, obj):
print(obj)
|
stormi/tsunami | src/primaires/objet/types/pierre_feu.py | Python | bsd-3-clause | 2,715 | 0.004071 | # -*-coding:Utf-8 -*
# Copyright (c) 2012 NOEL-BARON Léo
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le type pierre à feu."""
from primaires.interpreteur.editeur.entier import Entier
from primaires.objet.types.base import BaseType
class PierreFeu(BaseType):
"""Type d'objet: pierre à feu.
"""
nom_type = "pierre à feu"
def __init__(self, cle=""):
| """Constructeur de l'objet"""
BaseType.__init__(self, cle)
self.efficacite = 30
# Editeur
self.etendre_editeur("f", "efficacite", Entier, self, "efficacite",
1, 50)
def travailler_enveloppes(self, enveloppes):
"""Travail sur les enveloppes"""
efficacite = enveloppes["f"]
efficacite.apercu = "{objet.efficacite}"
efficacite.prompt | = "Entrez une efficacité : "
efficacite.aide_courte = \
"Entrez l'|ent|efficacité|ff| initiale de la pierre, de " \
"|cmd|1|ff| (quasi nulle) à |cmd|50|ff| (maximale).\nCette " \
"efficacité conditionne la solidité de la pierre et se " \
"décrémente de 1\nà chaque utilisation.\n\n" \
"Efficacité actuelle : {objet.efficacite}"
|
RyanDJLee/pyta | tests/test_type_inference/test_assign_tuple.py | Python | gpl-3.0 | 4,373 | 0.002058 | import astroid
from nose.tools import eq_
import tests.custom_hypothesis_support as cs
from tests.custom_hypothesis_support import lookup_type
from python_ta.transforms.type_inference_visitor import NoType
from python_ta.typecheck.base import TypeInfo, TypeFail
from typing import Tuple
def generate_tuple(length: int, t: type=None):
program = ''
for i in range(length + 1):
if t is None:
program += f'x{i}, '
elif t == int:
program += f'{i}, '
elif t == bool:
program += f'{i % 2 == 0}, '
elif t == str:
program += f'\'{str(chr(i+97))}\', '
return program
def generate_tuple_assign(length: int, t: type, same_length: bool, more_args: bool = None):
program = ''
for i in range(1, length):
for j in range(1, length):
if same_length and i == j:
program += generate_tuple(i) + '= ' + generate_tuple(j, t) + '\n'
elif not same_length:
if (more_args and i > j) or (not more_args and i < j):
program += generate_tuple(i) + '= ' + generate_tuple(j, t) + '\n'
return program
def test_tuple_same_length_int():
program = generate_tuple_assign(10, int, True)
module, _ = cs._parse_text(program)
for assign_node in module.nodes_of_class(astroid.Assign):
eq_ (assign_node.inf_type, NoType())
def test_tuple_same_length_bool():
program = generate_tuple_assign(10, bool, True)
module, _ = cs._parse_text(program)
for assign_node in module.nodes_of_class(astroid.Assign):
eq_ (assign_node.inf_type, NoType())
def test_tuple_same_length_str():
program = generate_tuple_assign(10, str, True)
module, _ = cs._parse_text(program)
for assign_node in module.nodes_of_class(astroid.Assign):
eq_ (assign_node.inf_type, NoType())
def test_tuple_single_var():
program = """
a = 1, 2
b = 1, 2, 3
c = 1, 2, 3, 4
"""
module, _ = cs._parse_text(program)
for assign_node in module.nodes_of_class(astroid.Assign):
eq_(assign_node.inf_type, NoType())
def test_tuple_single_val():
program = """
a, b = 1
a, b, c = 1
a, b, c, d = 1
"""
module, _ = cs._parse_text(program)
for assign_node in module.nodes_of_class(astroid.Assign):
assert isinstance(assign_node.inf_type, TypeFail)
def test_tuple_extra_vars():
program = generate_tuple_assign(10, int, False, True)
module, _ = cs._parse_text(program)
for assign_node in module.nodes_of_class(astroid.Assign):
assert isinstance(assign_node.inf_type, TypeFail)
def test_tuple_extra_value():
program = generate_tuple_assign(10, int, False, False)
module, _ = cs._parse_text(program)
for assign_node in module.nodes_of_class(astroid.Assign):
assert isinstance(assign_node.inf_type, TypeFail)
def test_tuple_subscript():
program = """
lst = ['Hello', 'Goodbye']
lst[0], lst[1] = 'Bonjour', 'Au revoir'
"""
module, _ = cs._parse_text(program)
for assign_node in module.nodes_of_class(astroid.Assign):
assert not isinstance(assign_node.inf_type, TypeFail)
def test_tuple_attribute():
program = """
class A:
def __init__(self):
self.first_attr = 0
self.second_attr = 1
a = A()
a.first_attr, a.second_attr = 10 | , 11
"""
module, _ = cs._parse_text(program)
for assign_node in module.nodes_of_class(astroid.Assign):
assert not isinstance(assign_node.inf_type, TypeFail)
def test_tuple_attribute_variable():
program = """
class A:
def __init__(self):
self.first_attr = 0
self.second_attr = 1
a = A()
some_list = [1, 2]
a.fir | st_attr, a.second_attr = some_list
"""
module, _ = cs._parse_text(program)
for assign_node in module.nodes_of_class(astroid.Assign):
assert not isinstance(assign_node.inf_type, TypeFail)
def test_tuple_empty():
program = """
def f(x):
a = ()
b = (x,)
a = b
"""
module, ti = cs._parse_text(program)
functiondef_node = next(module.nodes_of_class(astroid.FunctionDef))
eq_(lookup_type(ti, functiondef_node, 'a'), Tuple[()])
x_type = lookup_type(ti, functiondef_node, 'x')
eq_(lookup_type(ti, functiondef_node, 'b'), Tuple[x_type])
|
meihuanyu/rental | ipproxytool/spiders/proxy/hidemy.py | Python | mit | 1,764 | 0.010204 | #-*- coding: utf-8 -*-
import utils
from scrapy import Selector
from .basespider import BaseSpider
from proxy import Proxy
class HidemySpider(BaseSpider):
name = 'hidemy'
def __init__(self, *a, **kw):
super(HidemySpider, self).__init__(*a, **kw)
self.urls = ['https://hidemy.name/en/proxy-list/?start=%s' % n for n in range(0, 5 * 64, 64)]
self.headers = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'en-US,en;q=0.5',
'Connection': 'keep-alive',
'Host': 'hidemy.name',
'Referer': 'https://hidemy.name/en/proxy-list/?start=0',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:51.0) Gecko/20100101 Firefox/51.0',
}
self.init()
def parse_page(self, response):
self.write(response.body)
sel = Selector(response)
infos = sel.xpath('//tbody/tr').extract()
for i, info in enumerate(infos):
if i == 0:
continue
val = Selector(text = info)
ip = val.xpath('//td[1]/text()').extract_first()
port = v | al.xpath('//td[2]/text()').extract_first()
country = val.xpath('//td[3]/div/text()').extract_first()
anonymity = val.xpath('//td[6]/text()').extract_first()
proxy = Proxy()
proxy.set_value(
ip = ip,
port = port,
country = country,
anonymity = anonymity,
source = self.name,
)
|
self.add_proxy(proxy = proxy)
|
jonlabelle/SublimeJsPrettier | tests/validate_json_format.py | Python | mit | 5,940 | 0 | import codecs
import json
import re
RE_LINE_PRESERVE = re.compile(r'\r?\n', re.MULTILINE)
RE_COMMENT = re.compile(
r'''(?x)
(?P<comments>
/\*[^*]*\*+(?:[^/*][^*]*\*+)*/ # multi-line comments
| [ \t]*//(?:[^\r\n])* # single line comments
)
| (?P<code>
"(?:\\.|[^"\\])*" # double quotes
| '(?:\\.|[^'\\])*' # single quotes
| .[^/"']* # everything else
)
''',
re.DOTALL
)
RE_TRAILING_COMMA = re.compile(
r'''(?x)
(
(?P<square_comma>
, # trailing comma
(?P<square_ws>[\s\r\n]*) # white space
(?P<square_bracket>\]) # bracket
)
| (?P<curly_comma>
, # trailing comma
(?P<curly_ws>[\s\r\n]*) # white space
(?P<curly_bracket>\}) # bracket
)
)
| (?P<code>
"(?:\\.|[^"\\])*" # double quoted string
| '(?:\\.|[^'\\])*' # single quoted string
| .[^,"']* # everything else
)
''',
re.DOTALL
)
RE_LINE_INDENT_TAB = re.compile(r'^((\t+)?[^ \t\r\n][^\r\n]*)?\r?\n$')
RE_LINE_INDENT_SPACE = re.compile(r'^(((?: {4})+)?[^ \t\r\n][^\r\n]*)?\r?\n$')
RE_TRAILING_SPACES = re.compile(r'^.*?[ \t]+\r?\n?$')
E_MALFORMED = "E0"
E_COMMENTS = "E1"
E_COMMA = "E2"
W_NL_START = "W1"
W_NL_END = "W2"
W_INDENT = "W3"
W_TRAILING_SPACE = "W4"
VIOLATION_MSG = {
E_MALFORMED: 'JSON content is malformed.',
E_COMMENTS: 'Comments are not part of the JSON spec.',
E_COMMA: 'Dangling comma found.',
W_NL_START: 'Unnecessary newlines at the start of file.',
W_NL_END: 'Missing a new line at the end of the file.',
W_INDENT: 'Indentation Error.',
W_TRAILING_SPACE: 'Trailing whitespace.'
}
class CheckJsonFormat(object):
line_range = None
"""
Test JSON for format irregularities.
- Trailing spaces.
- Inconsistent indentation.
- New lines at end of file.
- Unnecessary newlines at start of file.
- Trailing commas.
- Malformed JSON.
"""
def __init__(self, use_tabs=False, allow_comments=False):
self.use_tabs = use_tabs
self.allow_comments = allow_comments
self.fail = False
def index_lines(self, text):
self.line_range = []
count = 1
last = 0
for m in re.finditer('\n', text):
self.line_range.append((last, m.end(0) - 1, count))
last = m.end(0)
count += 1
def get_line(self, pt):
"""Get the line from char index."""
line = None
for r in self.line_range:
if r[0] <= pt <= r[1]:
line = r[2]
break
return line
def check_comments(self, text):
"""Check for JavaScript comments.
Log them and strip them out so we can continue.
"""
def remove_comments(group):
return ''.join([x[0] for x in RE_LINE_PRESERVE.findall(group)])
def evaluate(m):
g = m.groupdict()
if g["code"] is None:
if not self.allow_comments:
self.log_failure(E_COMMENTS, self.get_line(m.start(0)))
txt = remove_comments(g["comments"])
else:
txt = g["code"]
return txt
content = ''.join(
map(lambda m: evaluate(m), RE_COMMENT.finditer(text)))
return content
def check_dangling_commas(self, text):
"""Check for dangling commas.
Log them and strip them out so we can continue.
"""
def check_comma(g, line):
# ,] -> ] or ,} -> }
self.log_failure(E_COMMA, line)
if g["square_comma"] is not None:
return g["square_ws"] + g["square_bracket"]
else:
return g["curly_ws"] + g["curly_bracket"]
def evaluate(m):
g = m.groupdict()
return check_comma(g, self.get_line(m.start(0))) \
if g["code"] is None else g["code"]
return ''.join(
map(lambda m: evaluate(m), RE_TRAILING_COMMA.finditer(text)))
def log_failure(self, code, line=None):
"""Log failure.
Log failure code, line number (if available) and message.
"""
if line:
print("%s: Line %d - %s" % (code, line, VIOLATION_MSG[code]))
else:
print("%s: %s" % (code, VIOLATION_MSG[code]))
self.fail = True
def check_format(self, file_name):
"""Initiate the check."""
self.fail = False
with codecs.open(file_name, encoding='utf-8') as f:
count = 1
for line in f:
if count == 1 and line.strip() == '':
se | lf.log_failure(W_NL_START, count)
if not line.endswith('\n'):
self.log_failure(W_NL_END, count)
if RE_TRAILING_SPACES.match(line):
self.log_failure(W_TRAILING_SPACE, count)
if self.use_tabs and (
RE_L | INE_INDENT_TAB if self.use_tabs
else RE_LINE_INDENT_SPACE).match(line) is None:
self.log_failure(W_INDENT, count)
count += 1
f.seek(0)
text = f.read()
self.index_lines(text)
text = self.check_comments(text)
self.index_lines(text)
text = self.check_dangling_commas(text)
try:
json.loads(text)
except Exception as e:
self.log_failure(E_MALFORMED)
print(e)
return self.fail
if __name__ == "__main__":
import sys
cjf = CheckJsonFormat(False, True)
cjf.check_format(sys.argv[1])
|
AutorestCI/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_08_01/models/application_gateway_sku.py | Python | mit | 1,526 | 0 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ApplicationGatewaySku(Model):
"""SKU of an application gateway.
:param name: Name of an application gateway SKU. Possible values include:
'Standard_Small', 'Standard_Medium', 'Standard_Large', 'WAF_Medium',
'WAF_Large'
:type name: str or
~azure.mgmt.network.v2017_08_01.models.ApplicationGatewaySkuName
:param tier: Tier of an application gateway. Possible values include:
'Standard', 'WAF'
:type tier: str or
~azure.mgmt.network.v2017_08_01.models.ApplicationGatewayTier
:param capacity: Capacity (instance count) of an application gateway.
:type ca | pacity: int
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'tier': {'key': 'tier', 'type': 'str'},
'capacity': {'key': 'capacity', 'type': 'int'},
}
def __init__(self, name=None, tier=None, capacity=None):
super(ApplicationGatewaySku, self).__init__()
self.name = name
self.tier = t | ier
self.capacity = capacity
|
smontoya/authy-python3 | authy/api/resources.py | Python | mit | 6,164 | 0.002109 | import requests
import platform
from authy import __version__, AuthyFormatException
from urllib.parse import quote
# import json
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
from django.utils import simplejson as json
class Resource(object):
def __init__(self, api_uri, api_key):
self.api_uri = api_uri
self.api_key = api_key
self.def_headers = self.__default_headers()
def post(self, path, data=None):
return self.request("POST", path, data, {'Content-Type': 'application/json'})
def get(self, path, data=None):
return self.request("GET", path, data)
def put(self, path, data=None):
return self.request("PUT", path, data, {'Content-Type': 'application/json'})
def delete(self, path, data=None):
return self.request("DELETE", path, data)
def request(self, method, path, data=None, req_headers=None):
if data is None:
data = {}
if req_headers is None:
req_headers = {}
url = self.api_uri + path
params = {"api_key": self.api_key}
headers = self.def_headers
headers.update(req_headers)
if method == "GET":
params.update(data)
return requests.request(method, url, headers=headers,
params=params)
else:
return requests.request(method, url, headers=headers,
params=params, data=json.dumps(data))
def __default_headers(self):
return {
'User-Agent': "AuthyPython/{0} ({1}; Python {2})".format(
__version__,
platform.platform(True),
platform.python_version()
)}
class Instance(object):
def __init__(self, resource, response):
self.resource = resource
self.response = response
try:
self.content = self.response.json()
except ValueError:
self.content = self.response.text
def ok(self):
return self.response.status_code == 200
def errors(self):
if self.ok():
return {}
errors = self.content
if(not isinstance(errors, dict)):
errors = {"error": errors}
elif('errors' in errors):
errors = errors['errors']
return errors
def __getitem__(self, key):
return self.content[key]
class Sms(Instance):
def ignored(self):
try:
self.content['ignored']
return True
except KeyError:
return False
class User(Instance):
def __init__(self, resource, response):
super(User, self).__init__(resource, response)
if(isinstance(self.content, dict) and 'user' in self.content):
self.id = self.content['user']['id']
else:
self.id = None
class Users(Resource):
def create(self, email, phone, country_code=1):
data = {
"user": {
"email": email,
"cellphone": phone,
"country_code": country_code
}
}
resp = self.post("/protected/json/users/new", data)
return User(self, resp)
def request_sms(self, user_id, options={}):
resp = self.get("/protected/json/sms/"+quote(str(user_id)), options)
return Sms(self, resp)
def status(self, user_id):
resp = self.get("/protected/json/users/{0}/status".format(user_id))
return User(self, resp)
def delete(self, user_id):
resp = self.post("/protected/json/users/{0}/delete".format(user_id))
return User(self, resp)
class Token(Instance):
def ok(self):
if super(Token, self).ok():
return '"token":"is valid"' in str(self.response.content)
return False
class Tokens(Resource):
def verify(self, device_id, token, options={}):
self.__validate(token, device_id)
if 'force' not in options:
options['force'] = "true"
url = "/protected/json/verify/"
url += quote(str(token))+"/"+quote(str(device_id))
resp = self.get(url, options)
return Token(self, resp)
def __validate(self, token, device_id):
self.__validate_digit(token, "Invalid Token. Only digits accepted.")
self.__validate_digit(device_id,
"Invalid Authy id. Only digits accepted.")
length = len(str(token))
if length < 6 or length > 10:
raise AuthyFormatException("Invalid Token. Unexpected length.")
def __validate_digit(self, var, message):
# PEP 0237: Essentially, long renamed to int.
if not isinstance(var, int) and not var.isdigit():
raise AuthyFormatException(message)
class App(Instance):
pass
class Apps(Resource):
def fetch(self):
resp = self.get("/protected/json/app/deta | ils")
return App(self, resp)
class Stats(Instance):
pass
class StatsResource(Resource):
def fetch(self):
resp = self.get("/protected/json/app/stats")
return Stats(self, resp)
class Phone(Instance):
pass
class Phones(Resource):
def verification_start(self, phone_number | , country_code, via = 'sms'):
options = {
'phone_number': phone_number,
'country_code': country_code,
'via': via
}
resp = self.post("/protected/json/phones/verification/start", options)
return Phone(self, resp)
def verification_check(self, phone_number, country_code, verification_code):
options = {
'phone_number': phone_number,
'country_code': country_code,
'verification_code': verification_code
}
resp = self.get("/protected/json/phones/verification/check", options)
return Phone(self, resp)
def info(self, phone_number, country_code):
options = {
'phone_number': phone_number,
'country_code': country_code
}
resp = self.get("/protected/json/phones/info", options)
return Phone(self, resp)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.