repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
hsharrison/history-queue
|
setup.py
|
Python
|
bsd-2-clause
| 1,610
| 0.001242
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import io
import re
from glob import glob
from os.path import basename
from os.path import dirname
from os.path import join
from os.path import splitext
from setuptools import find_packages
from setuptools import setup
def read(*names, **kwargs):
return io.open(
join(dirname(__file__), *names),
encoding=kwargs.get('encoding', 'utf8')
).read()
setup(
name='hqueue',
version='0.2.0',
license='BSD',
description='asyncio.Queue with history',
long_description='%s\n\n%s' % (
re.compile('^.. start-badges.*^.. end-badges', re.M | re.S).sub('', read('README.rst')),
re.sub(':[a-z]+:`~?(.*?)`', r'``\1``', read('CHANGELOG.rst'))
),
author='Henry S. Harrison',
author_email='henry.schafer.harrison@gmail.com',
url='https://github.com/hsharrison/history-queue',
packages=find_packages('src'),
package_dir={'': 'src'},
py_modules=[splitext(basename(path))[0] for path in glob('src/*.py')],
include_package_data=True,
zip_safe=True,
classifiers=[
# complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'Intended Audience :: Developer
|
s',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
],
keywords=[
'asyncio', 'deque', 'queue', 'history',
],
extras_require={
'test': ['pytes
|
t', 'pytest-cov', 'hypothesis', 'toolz'],
}
)
|
Azure/azure-sdk-for-python
|
sdk/databox/azure-mgmt-databox/azure/mgmt/databox/v2020_11_01/_data_box_management_client.py
|
Python
|
mit
| 3,173
| 0.001891
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
from azure.mgmt.core import ARMPipelineClient
from msrest import Deserializer, Serializer
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Optional
from azure.core.credentials import TokenCredential
from ._configuration import DataBoxManagementClientConfiguration
from .operations import Operations
from .operations import JobsOperations
from .operations import ServiceOperations
from . import models
class DataBoxManagementClient(object):
"""The DataBox Client.
:ivar operations: Operations operations
|
:vartype operations: azure.mgmt.databox.operations.Operations
:ivar jobs: JobsOperations operations
:vartype jobs: azure.mgmt.databox.operations.JobsOperations
:ivar service: ServiceOperations operations
:vartype service: azure.mgmt.databox.operations.ServiceOper
|
ations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The Subscription Id.
:type subscription_id: str
:param str base_url: Service URL
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
"""
def __init__(
self,
credential, # type: "TokenCredential"
subscription_id, # type: str
base_url=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> None
if not base_url:
base_url = 'https://management.azure.com'
self._config = DataBoxManagementClientConfiguration(credential, subscription_id, **kwargs)
self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._serialize.client_side_validation = False
self._deserialize = Deserializer(client_models)
self.operations = Operations(
self._client, self._config, self._serialize, self._deserialize)
self.jobs = JobsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.service = ServiceOperations(
self._client, self._config, self._serialize, self._deserialize)
def close(self):
# type: () -> None
self._client.close()
def __enter__(self):
# type: () -> DataBoxManagementClient
self._client.__enter__()
return self
def __exit__(self, *exc_details):
# type: (Any) -> None
self._client.__exit__(*exc_details)
|
parksandwildlife/borgcollector
|
tablemanager/migrations/0021_auto_20160219_0803.py
|
Python
|
bsd-3-clause
| 2,007
| 0.003986
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.db.models.deletion
import re
import tablemanager.models
import django.utils.timezone
import borg_utils.resource_status
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('tablemanager', '0020_workspace_auth_level'),
]
operations = [
migrations.CreateModel(
name='Style',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.SlugFie
|
ld(help_text='Name of Publish', max_length=255, validators=[django.core.validators.RegexValidator(re.compile('^[a-z0-9_]+$'), 'Slug can
|
only contain lowercase letters, numbers and underscores', 'invalid')])),
('description', models.CharField(max_length=512, null=True, blank=True)),
('status', models.CharField(default=b'Enabled', max_length=32, choices=[(b'Enabled', b'Enabled'), (b'Disabled', b'Disabled')])),
('sld', tablemanager.models.XMLField(help_text='Styled Layer Descriptor', null=True, blank=True)),
('last_modify_time', models.DateTimeField(default=django.utils.timezone.now, auto_now_add=True)),
('publish', models.ForeignKey(to='tablemanager.Publish')),
],
options={
'ordering': ('publish', 'name'),
},
bases=(models.Model, borg_utils.resource_status.ResourceStatusMixin),
),
migrations.AlterUniqueTogether(
name='style',
unique_together=set([('publish', 'name')]),
),
migrations.AddField(
model_name='publish',
name='default_style',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, to='tablemanager.Style', null=True),
preserve_default=True,
),
]
|
globocom/database-as-a-service
|
dbaas/physical/commands.py
|
Python
|
bsd-3-clause
| 3,573
| 0
|
class HostCommands(object):
def __new__(cls, host):
if host.is_ol6:
return HostCommandOL6(host)
if host.is_ol7:
return HostCommandOL7(host)
class HostBaseCommands(object):
def __init__(self, host):
self.host = host
self.infra = host.infra
self.engine_name = host.infra.engine.name
def exec_service_command(self, service_name, action, no_output=False):
cmd = self.command_tmpl.format(
service_name=service_name,
action=action
)
if no_output:
cmd += ' > /dev/null'
return cmd
def init_database_script(self, action, instances, no_output=True):
script = ''
for instance in instances:
script += "{};".format(self.database(
action=action
))
if instance.is_sentinel:
script += "{};".format(self.secondary_service(
action=action,
no_output=True
))
return script
def secondary_service(self, action, no_output=True):
return self.exec_service_command(
service_name=self.SECONDARY_SERVICE_NAME_BY_ENGINE[
self.engine_name
],
action=action,
no_output=no_output
)
def database(self, action, no_output=True):
return self.exec_service_command(
service_name=self.PRIMARY_SERVICE_NAME_BY_ENGINE[self.engine_name],
action=action,
no_output=no_output
)
def monit_script(self, action='start'):
return """
echo ""; echo $(date "+%Y-%m-%d %T") "- Monit"
{}
""".format(
self.exec_service_command(
service_name='monit',
action=action
)
)
def rsyslog(self, action, no_output=False):
return self.exec_service_command(
service_name='rsyslog',
action=action,
no_output=no_output
)
def telegraf(self, action, no_output=False):
return self.exec_service_command(
service_name='telegraf',
action=action,
no_output=no_output
)
def httpd(self, action, no_output=False):
return self.exec_service_command(
s
|
ervice_name='httpd',
action=action,
no_output=no_output
)
def heartbeat(self, action, no_output=False):
return self.exec_service_command(
service_name='pt-heartbeat',
action=action,
no_output=no_output
)
@property
def command_tmpl(self):
raise NotImplemente
|
dError()
class HostCommandOL6(HostBaseCommands):
PRIMARY_SERVICE_NAME_BY_ENGINE = {
'mongodb': 'mongodb',
'redis': 'redis',
'mysql': 'mysql',
'mysql_percona': 'mysql'
}
SECONDARY_SERVICE_NAME_BY_ENGINE = {
'mongodb': 'mongodb',
'redis': 'sentinel',
'mysql': '',
'mysql_percona': ''
}
command_tmpl = '/etc/init.d/{service_name} {action}'
class HostCommandOL7(HostBaseCommands):
PRIMARY_SERVICE_NAME_BY_ENGINE = {
'mongodb': 'mongodb',
'redis': 'redis',
'mysql': 'mysql',
'mysql_percona': 'mysql'
}
SECONDARY_SERVICE_NAME_BY_ENGINE = {
'mongodb': 'mongodb',
'redis': 'sentinel',
'mysql': '',
'mysql_percona': ''
}
command_tmpl = 'sudo systemctl {action} {service_name}.service'
|
StartupsPoleEmploi/labonneboite
|
labonneboite/importer/conf/development.py
|
Python
|
agpl-3.0
| 497
| 0.002012
|
# --- job 1/8 & 2/8 : check_etablissements & extract_etablissements
DISTINCT_DEPARTEMENTS_HAVING_OFFIC
|
ES = 15
# --- job 5/8 : compute_scores
SCORE_COEFFICIENT_OF_VARIATION_MAX = 8
MINIMUM_OFFICES_REQUIRED_TO_TRAIN_MODEL = 0
MAXIMUM_
|
COMPUTE_SCORE_JOB_FAILURES = 94 # 96 departements == 2 successes + 94 failures
RMSE_MAX = 300
# --- job 6/8 : validate_scores
SCORE_REDUCING_MINIMUM_THRESHOLD = 0
# SCORE_ALTERNANCE_REDUCING_MINIMUM_THRESHOLD = 0
DEPARTEMENTS_TO_BE_SANITY_CHECKED = ['14', '69']
|
rdespoiu/QTitan
|
QTitan/QTSurvey/Controllers/BaseDemographicForm.py
|
Python
|
gpl-3.0
| 214
| 0.023364
|
from django import forms
from
|
..models import BaseDemographic
class BaseDemographicForm(forms.ModelForm):
class Meta:
model = BaseDemographic
fields = ['first_name','last_n
|
ame','phone','dob']
|
erikr/howtostoreiosdata
|
howtostoreiosdata/wizard/code_samples.py
|
Python
|
mit
| 1,995
| 0.002506
|
# All the code samples below have one parameter, which is where the protection level name
# for that storage type will be inserted, e.g. NSDataWritingFileProtectionCompleteUnlessOpen
CODE_SAMPLE_CORE_DATA = """
- (NSPersistentStoreCoordinator *)persistentStoreCoordinator {
if (persistentStoreCoordinator_ != nil) {
return persistentStoreCoordinator_;
}
persistentStoreCoordinator_ = [[NSPersistentStoreCoordinator alloc]
initWithManagedObjectModel:[self managedObjectModel]];
NSURL *storeURL = [NSURL fileURLWithPath:
[[self applicationDocumentsDirectory] stringByAppendingPathComponent: @"MyStore.sqlite"]];
[persistentStoreCoordinator_ addPersistentStoreWithType:NSSQLiteStoreType
configuration:nil URL:storeURL options:nil error:&error];
NSDictionary *fileAttributes = [NSDictionary
dictionaryWithObject:%s
forKey:NSFileProtectionKey];
[[NSFileManager defaultManager] setAttributes:fileAttributes
ofItemAtPath:[storeURL path] error: &error];
return persistentStoreCoordinator_;
}"""
CODE_SAMPLE_SQL = """
int flags = SQLITE_OPEN_CREATE |
SQLITE_OPEN_READWRITE |
%s;
sqlite3_open_v2(path, &database, flags, NULL)
// Or, if you prefer FMDB:
FMDatabase *database = [FMDatabase databaseWithPath:dbPath];
[database openWithFlags:fl
|
ags]
"""
CODE_SAMPLE_RAW_DATA = """
NSData *contents = [@"secret file contents" dataUsingEncoding:NSUTF8StringEncoding];
[contents writeToFile:path
options:%s
error:&error];
"""
CODE_SAMPLE_KEYCHAIN = """
// Note that metadata, like the account name, is no
|
t encrypted.
NSDictionary *item = @{
(__bridge id)kSecAttrAccount: account,
(__bridge id)kSecClass: (__bridge id)kSecClassGenericPassword,
(__bridge id)kSecAttrAccessible: (__bridge id)%s,
(__bridge id)kSecValueData: data,
};
OSStatus error = SecItemAdd((__bridge CFDictionaryRef)item, NULL);
"""
|
platinhom/CADDHom
|
python/basic/HHGeometry.py
|
Python
|
gpl-2.0
| 2,547
| 0.016097
|
import __init__
import math
import numpy as np
class Point:
'''Point at 3D. Some method for it. '''
def __init__ (self, x=0.0, y=0.0 ,z=0.0):
self.x=x
self.y=y
self.z=z
def coors(self):
coor=(self.x,self.y,self.z)
return coor
def dist_to(self,apoint):
return math.sqrt(math.pow(self.x - apoint.x,2) + math.pow(self.y - apoint.y,2) + math.pow(self.z - apoint.z,2))
def CopyOf(self):
return point(self.x, self.y, self.z)
def average_with(self, other_point):
return point((self.x + other_point.x) / 2.0, (self.y + other_point.y) / 2.0, (self.z + other_point.z) / 2.0)
def dot_product_with(self, other_point):
return self.x * other_point.x + self.y * other_point.y + self.z * other_point.z
def length(self):
return self.dist_to(point(0.0,0.0,0.0))
def minus(self, other_point):
return point(self.x - other_point.x, self.y - other_point.y, self.z - other_point.z)
def CreatePDBLine(self):
#if len(self.atomname) > 1: self.atomname = self.atomname[:1].upper() + self.atomname[1:].lower()
output = "ATOM "
#output = output + str(index).rjust(6) + self.atomname.rjust(5) + self.residue.rjust(4)
output = output + "5".rjust(6) + "X".rjust(5) + "XXX".rjust(4)
output = output + ("%.3f" % self.x).rjust(18)
output = output + ("%.3f" % self.y).rjust(8)
output = output + ("%.3f" % self.z).rjust(8)
output = output + "X".rjust(24) # + " " + str(uniqueID) #This last part must be removed
return output
#######basic function is put here#####
def coors_to_point(coors):
point=Point(coors[0],coors[1],coors[2])
return point
# -------------------------------- Vertor -------------------------
class Vector:
pass
|
# -------------------------------- Vertex -------------------------
# -------------------------------- Edge ---------------------------
# -------------------------------- Face ---------------------------
# -------------------------------- Box ----------------------------
# -------------------------------- Cubic --------------------------
# -------------------------------- Circle -------------------
|
------
# -------------------------------- Ball ---------------------------
# -------------------------------- Grid ---------------------------
# ----------------------------- MultiGrid -------------------------
# -------------------------- High Dimension ? ---------------------
|
tailhook/tilenol
|
tilenol/widgets/groupbox.py
|
Python
|
mit
| 3,880
| 0.000258
|
from collections import namedtuple
from cairo import LINE_JOIN_ROUND
from zorro.di import di, dependency, has_dependencies
from tilenol.groups import GroupManager
from tilenol.commands import CommandDispatcher
from .base import Widget
from tilenol.theme import Theme
from tilenol.window import Window
GroupState = namedtuple(
'GroupState',
('name', 'empty', 'active', 'visible', 'urgent')
)
@has_dependencies
class State(object):
commander = dependency(CommandDispatcher, 'commander')
gman = dependency(GroupManager, 'group-manager')
def __init__(self):
self._state = None
def dirty(self):
return self._state != self._read()
def update(self):
nval = self._read()
if nval != self._state:
self._state = nval
return True
def _read(self):
cur = self.commander.get('group')
visgr = self.gman.current_groups.values()
return tuple(GroupState(g.name, g.empty, g is cur, g in visgr,
g.has_urgent_windows)
for g in self.gman.groups)
@property
def groups(self):
return self._state
@has_dependencies
class Groupbox(Widget):
theme = dependency(Theme, 'theme')
def __init__(self, *, filled=False, first_letter=False, right=False):
super().__init__(right=right)
self.filled = filled
self.first_letter = first_letter
def __zorro_di_done__(self):
self.state = di(self).inject(State())
bar = self.theme.bar
self.font = bar.font
self.inactive_color = bar.dim_color_pat
self.urgent_color = bar.bright_color_pat
self.active_color = bar.text_color_pat
self.selected_color = bar.active_border_pat
self.subactive_color = bar.subactive_border_pat
self.padding = bar.text_padding
self.border_width = bar.border_width
self.state.gman.group_changed.listen(self.bar.redraw.emit)
Window.any_window_changed.listen(self.check_state)
def check_state(self):
if self.state.dirty:
self.bar.redraw.emit()
def draw(self, canvas, l, r):
self.state.update()
assert not self.right, "Sorry, right not implemented"
self.font.apply(canvas)
canvas.set_line_join(LINE_JOIN_ROUND)
canvas.set_line_width(self.border_width)
x = l
between = self.padding.right + self.padding.left
for gs in self.state.groups:
gname = gs.name
if self.first_letter:
gname = gname[0]
sx, sy, w, h, ax, ay = canvas.text_extents(gname)
if gs.active:
canvas.set_source(self.selected_color)
if self.filled:
canvas.rectangle(x, 0, ax + between, self.height)
canvas.fill()
else:
canvas.rectangle(
x + 2, 2, ax + between - 4, self.height - 4
)
canvas.stroke()
elif gs.visible:
canvas.set_source(self.subactive_color)
if self.filled:
canvas.rectangle(x, 0, ax + between, self.height)
canvas.fill()
else:
canvas.rectangle(
x + 2, 2, ax + between - 4, self.height - 4
|
)
|
canvas.stroke()
if gs.urgent:
canvas.set_source(self.urgent_color)
elif gs.empty:
canvas.set_source(self.inactive_color)
else:
canvas.set_source(self.active_color)
canvas.move_to(x + self.padding.left,
self.height - self.padding.bottom)
canvas.show_text(gname)
x += ax + between
return x, r
|
lucianopuccio/golem
|
golem/cli/commands.py
|
Python
|
mit
| 8,682
| 0.001497
|
import os
import sys
import golem
from golem.core import (utils, session, suite as suite_module, test,
settings_manager, test_directory)
from golem.core.project import Project, create_project
from golem.gui import gui_start
from golem.test_runner.execution_runner import ExecutionRunner
from golem.test_runner import interactive as interactive_module
from golem.gui.user_management import Users
from . import messages
def command_dispatcher(args):
if args.help:
display_help(args.help, args.command)
elif args.command == 'run':
run_command(args.project, args.test_query, args.browsers, args.processes,
args.environments, args.interactive, args.timestamp, args.report,
args.report_folder, args.report_name, args.tags, args.cli_log_level)
elif args.command == 'gui':
gui_command(args.host, args.port, args.debug)
elif args.command == 'createproject':
createproject_command(args.project)
elif args.command == 'createtest':
createtest_command(args.project, args.test)
elif args.command == 'createsuite':
createsuite_command(args.project, args.suite)
elif args.command == 'createuser':
createuser_command()
elif args.command == 'createsuperuser':
createsuperuser_command(args.username, args.email, args.password, args.noinput)
elif args.command is None:
if args.version:
display_version()
else:
print(messages.USAGE_MSG)
def display_help(help, command):
if help == 'run' or command == 'run':
print(messages.RUN_USAGE_MSG)
elif help == 'gui' or command == 'gui':
print(messages.GUI_USAGE_MSG)
elif help == 'createproject' or command == 'createproject':
print(messages.CREATEPROJECT_USAGE_MSG)
elif help == 'createtest' or command == 'createtest':
print(messages.CREATETEST_USAGE_MSG)
elif help == 'createsuite' or command == 'createsuite':
print(messages.CREATESUITE_USAGE_MSG)
elif help == 'createsuperuser' or command == 'createsuperuser':
print(messages.CREATESUPERUSER_USAGE_MSG)
else:
print(messages.USAGE_MSG)
def run_command(project='', test_query='', browsers=None, processes=1,
environments=None, interactive=False, timestamp=None,
reports=None, report_folder=None, report_name=None,
tags=None, cli_log_level=None):
execution_runner = ExecutionRunner(browsers, processes, environments, interactive,
timestamp, reports, report_folder, report_name, tags)
if project:
if test_directory.project_exists(project):
execution_runner.project = project
session.settings = settings_manager.get_project_settings(project)
# add --interactive value to settings to make
# it available from inside a test
session.settings['interactive'] = interactive
# override cli_log_level setting if provided by the CLI
if cli_log_level:
session.settings['cli_log_level'] = cli_log_level.upper()
if test_query:
norm_query = utils.normalize_query(test_query)
if suite_module.Suite(project, norm_query).exists:
execution_runner.run_suite(norm_query)
elif test.Test(project, norm_query).exists:
|
execution_runner.run_test(norm_query)
else:
if test_query == '.':
test_query = ''
path = os.path.join(session.testdir, 'projects',
project, 'tests', test_query)
if os.path.isdir(path):
executio
|
n_runner.run_directory(test_query)
else:
msg = ('golem run: error: the value {} does not match '
'an existing test, suite or directory'.format(test_query))
sys.exit(msg)
else:
print(messages.RUN_USAGE_MSG)
tests = Project(project).test_tree
print('Tests:')
utils.display_tree_structure_command_line(tests['sub_elements'])
suites = Project(project).suite_tree
print('\nTest Suites:')
# TODO print suites in structure
for suite in suites['sub_elements']:
print(' ' + suite['name'])
else:
msg = ('golem run: error: the project {} does not exist'.format(project))
sys.exit(msg)
elif interactive:
interactive_module.interactive(session.settings, browsers)
else:
print(messages.RUN_USAGE_MSG)
print('Projects:')
for project in test_directory.get_projects():
print(' {}'.format(project))
def gui_command(host=None, port=5000, debug=False):
gui_start.run_gui(host, port, debug)
def createproject_command(project):
if test_directory.project_exists(project):
msg = ('golem createproject: error: a project with name \'{}\' already exists'
.format(project))
sys.exit(msg)
else:
create_project(project)
def createtest_command(project, test_name):
if not test_directory.project_exists(project):
msg = ('golem createtest: error: a project with name {} '
'does not exist'.format(project))
sys.exit(msg)
test_name = test_name.replace(os.sep, '.')
errors = test.create_test(project, test_name)
if errors:
sys.exit('golem createtest: error: {}'.format(' '.join(errors)))
def createsuite_command(project, suite_name):
if not test_directory.project_exists(project):
msg = ('golem createsuite: error: a project with name {} '
'does not exist'.format(project))
sys.exit(msg)
errors = suite_module.create_suite(project, suite_name)
if errors:
sys.exit('golem createsuite: error: {}'.format(' '.join(errors)))
# TODO deprecated
def createuser_command():
sys.exit('Error: createuser command is deprecated. Use createsuperuser instead.')
def createsuperuser_command(username, email, password, no_input=False):
if no_input:
if username is None or password is None:
sys.exit('Error: --username and --password are required for --noinput.')
else:
try:
while True:
username = input('Username: ').strip()
if username:
break
while True:
email = input('Email address (optional): ').strip()
if email and not utils.validate_email(email):
print('Error: Enter a valid email address.')
else:
break
while True:
password = input('Password: ')
repeat_password = input('Password (again): ')
if not len(password):
print('Error: Blank passwords are not allowed.')
elif password != repeat_password:
print('Error: The passwords did not match.')
else:
break
except KeyboardInterrupt:
sys.exit('Cancelled.')
errors = Users.create_super_user(username, password, email)
if errors:
for error in errors:
print('Error: {}'.format(error))
exit(1)
else:
print('Superuser {} was created successfully.'.format(username))
def createdirectory_command(dir_name, no_confirm=False):
"""Create a new Golem test directory
dir_name must be an absolute or relative path.
If the path exists and is not empty and no_confirm
is False the user will be prompted to continue.
"""
abspath = os.path.abspath(dir_name)
if os.path.exists(abspath) and os.listdir(abspath):
# directory is not empty
if not no_confirm:
msg = 'Directory {} is not empty, continue? [Y/n]'.format(dir_name)
if not utils.prompt_yes_no(msg):
|
webeng/DeepLearningTutorials
|
code/convolutional_mlp.py
|
Python
|
bsd-3-clause
| 12,771
| 0.001175
|
"""This tutorial introduces the LeNet5 neural network architecture
using Theano. LeNet5 is a convolutional neural network, good for
classifying images. This tutorial shows how to build the architecture,
and comes with all the hyper-parameters you need to reproduce the
paper's MNIST results.
This implementation simplifies the model in the following ways:
- LeNetConvPool doesn't implement location-specific gain and bias parameters
- LeNetConvPool doesn't implement pooling by average, it implements pooling
by max.
- Digit classification is implemented with a logistic regression rather than
an RBF network
- LeNet5 was not fully-connected convolutions at second layer
References:
- Y. LeCun, L. Bottou, Y. Bengio and P. Haffner:
Gradient-Based Learning Applied to Document
Recognition, Proceedings of the IEEE, 86(11):2278-2324, November 1998.
http://yann.lecun.com/exdb/publis/pdf/lecun-98.pdf
"""
import os
import sys
import timeit
import numpy
import theano
import theano.tensor as T
from theano.tensor.signal import downsample
from theano.tensor.nnet import conv
from logistic_sgd import LogisticRegression, load_data
from mlp import HiddenLayer
class LeNetConvPoolLayer(object):
"""Pool Layer of a convolutional network """
def __init__(self, rng, input, filter_shape, image_shape, poolsize=(2, 2)):
"""
Allocate a LeNetConvPoolLayer with shared variable internal parameters.
:type rng: numpy.random.RandomState
:param rng: a random number generator used to initialize weights
:type input: theano.tensor.dtensor4
:param input: symbolic image tensor, of shape image_shape
:type filter_shape: tuple or list of length 4
:param filter_shape: (number of filters, num input feature maps,
filter height, filter width)
:type image_shape: tuple or list of length 4
:param image_shape: (batch size, num input feature maps,
image height, image width)
:type poolsize: tuple or list of length 2
:param poolsize: the downsampling (pooling) factor (#rows, #cols)
"""
assert image_shape[1] == filter_shape[1]
self.input = input
# there are "num input feature maps * filter height * filter width"
# inputs to each hidden unit
fan_in = numpy.prod(filter_shape[1:])
# each unit in the lower layer receives a gradient from:
# "num output feature maps * filter height * filter width" /
# pooling size
fan_out = (filter_shape[0] * numpy.prod(filter_shape[2:]) /
numpy.prod(poolsize))
# initialize weights with random weights
W_bound = numpy.sqrt(6. / (fan_in + fan_out))
self.W = theano.shared(
numpy.asarray(
rng.uniform(low=-W_bound, high=W_bound, size=filter_shape),
dtype=theano.config.floatX
),
borrow=True
)
# the bias is a 1D tensor -- one bias per output feature map
b_values = numpy.zeros((filter_shape[0],), dtype=theano.config.floatX)
self.b = theano.shared(value=b_values, borrow=True)
# convolve input feature maps with filters
conv_out = conv.conv2d(
input=input,
filters=self.W,
filter_shape=filter_shape,
image_shape=image_shape
)
# downsample each feature map individually, using maxpooling
pooled_out = downsample.max_pool_2d(
input=conv_out,
ds=poolsize,
ignore_border=True
)
# add the bias term. Since the bias is a vector (1D array), we first
# reshape it to a tensor of shape (1, n_filters, 1, 1). Each bias will
# thus be broadcasted across mini-batches and feature map
# width & height
self.output = T.tanh(pooled_out + self.b.dimshuffle('x', 0, 'x', 'x'))
# store parameters of this layer
self.params = [self.W, self.b]
# keep track of model input
self.input = input
def evaluate_lenet5(learning_rate=0.1, n_epochs=200,
dataset='mnist.pkl.gz',
nkerns=[20, 50], batch_size=500):
""" Demonstrates lenet on MNIST dataset
:type learning_rate: float
:param learning_rate: learning rate used (factor for the stochastic
gradient)
:type n_epochs: int
:param n_epochs: maximal number of epochs to run the optimizer
:type dataset: string
:param dataset: path to the dataset used for training /testing (MNIST here)
:type nkerns: list of ints
:param nkerns: number of kernels on each layer
"""
rng = numpy.random.RandomState(23455)
datasets = load_data(dataset)
train_set_x, train_set_y = datasets[0]
valid_set_x, valid_set_y = datasets[1]
test_set_x, test_set_y = datasets[2]
# compute number of minibatches for training, validation and testing
n_train_batches = train_set_x.get_value(borrow=True).shape[0]
n_valid_batches = valid_set_x.get_value(borrow=True).shape[0]
n_test_batches = test_set_x.get_value(borrow=True).shape[0]
n_train_batches /= batch_size
n_valid_batches /= batch_size
n_test_batches /= batch_size
print test_set_x.get_value()[0].shape
# allocate symbolic variables for the data
index = T.lscalar() # index to a [mini]batch
# start-snippet-1
x = T.matrix('x') # the data is presented as rasterized images
y = T.ivector('y') # the labels are presented as 1D vector of
# [int] labels
######################
# BUILD ACTUAL MODEL #
######################
print '... building the model'
# Reshape matrix of rasterized images of shape (batch_size, 28 * 28)
# to a 4D tensor, compatible with our LeNetConvPoolLayer
# (28, 28) is the size of MNIST images.
layer0_input = x.reshape((batch_size, 1, 28, 28))
# Construct the first convolutional pooling layer:
# filtering reduces the image size to (28-5+1 , 28-5+1) = (24, 24)
# maxpooling reduces this further to (24/2, 24/2) = (12, 12)
# 4D output tensor is thus of shape (batch_size, nkerns[0], 12, 12)
layer0 = LeNetConvPoolLayer(
rng,
input=layer0_input,
image_shape=(batch_size, 1, 28, 28),
filter_shape=(nkerns[0], 1, 5, 5),
poolsize=(2, 2)
)
# Construct the second convolutional pooling layer
# filtering reduces the image size to (12-5+1, 12-5+1) = (8, 8)
# maxpooling reduces this further to (8/2, 8/2) = (4, 4)
# 4D output tensor is thus of shape (batch_size, nkerns[1], 4, 4)
layer1 = LeNetConvPoolLayer(
rng,
input=layer0.output,
image_shape=(batch_size, nkerns[0], 12, 12),
filter_shape=(nkerns[1], nkerns[0], 5, 5),
poolsize=(2, 2)
)
# the HiddenLayer being fully-connected, it operates on 2D matrices of
# shape (batch_size, num_pixels) (i.e matrix of rasterized images).
# This will generate a matrix of shape (batch_size, nkerns[1] * 4 * 4),
# or (500, 50 * 4 * 4) = (500, 800) with the default values.
layer2_input = layer1.output.flatten(2)
# construct a fully-connected sigmoidal layer
layer2 = HiddenLayer(
rng,
input=layer2_input,
n_in=nkerns[1] * 4 * 4,
n_out=500,
activation=T.tanh
)
# classify the value
|
s of the fully-connected sigmoidal layer
layer3 = LogisticRegression(input=layer2.output, n_in=500, n_out=10)
# the cost we minimize during training is the NLL of the model
cost = layer3.negative_log_likelihood(y)
# create a function to compute the mistakes that are made by the model
test_mode
|
l = theano.function(
[index],
layer3.errors(y),
givens={
x: test_set_x[index * batch_size: (index + 1) * batch_size],
y: test_set_y[index * batch_size: (index + 1) * batch_size]
}
)
validate_model = theano.function(
[index],
layer3.errors(y),
givens={
x: valid_set_x[index * ba
|
7anshuai/white
|
white/domain/storage.py
|
Python
|
gpl-2.0
| 2,170
| 0.001382
|
#!/usr/bin/env python
# 2015 Copyright (C) White
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from white.orm import Backend
from white.model import Pair
from flask.json import dumps
import re
class StorageService(object):
def __init__(self):
self.pair_repo = Backend('storage')
def site_meta(self):
return self.pair_repo.find('system')
def update_site_meta(self, sitename, description, site_page,
posts_per_page, auto_published_comments, comment_moderation_keys):
meta = self.site_meta()
config = meta.json_value()
try:
sitename = sitename or sitena
|
me.strip()
if sitename:
config['sitename'] = sitename
description = description or description.strip()
if description:
config['description'] = descriptio
|
n
site_page = int(site_page)
if site_page >= 0:
config['site_page'] = site_page
posts_per_page = int(posts_per_page)
if posts_per_page:
config['posts_per_page'] = posts_per_page
auto_published_comments = bool(auto_published_comments)
config['auto_published_comments'] = auto_published_comments
if comment_moderation_keys is not None:
keys = [key.strip() for key in re.split(' +', comment_moderation_keys) if key.strip()]
config['comment_moderation_keys'] = keys
meta.value = dumps(config)
self.pair_repo.update(meta)
return True
except:
return False
|
JiahuiZHONG/Internship_Thread
|
tests/scripts/thread-cert/Cert_6_4_01_LinkLocal.py
|
Python
|
bsd-3-clause
| 2,986
| 0.001005
|
#!/usr/bin/python
#
# Copyright (c) 2016, Nest Labs, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS B
|
E
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN I
|
F ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import pexpect
import time
import unittest
import node
LEADER = 1
ED = 2
class Cert_6_4_1_LinkLocal(unittest.TestCase):
def setUp(self):
self.nodes = {}
for i in range(1,3):
self.nodes[i] = node.Node(i)
self.nodes[LEADER].set_panid(0xface)
self.nodes[LEADER].set_mode('rsdn')
self.nodes[LEADER].add_whitelist(self.nodes[ED].get_addr64())
self.nodes[LEADER].enable_whitelist()
self.nodes[ED].set_panid(0xface)
self.nodes[ED].set_mode('rsn')
self.nodes[ED].add_whitelist(self.nodes[LEADER].get_addr64())
self.nodes[ED].enable_whitelist()
def tearDown(self):
for node in self.nodes.itervalues():
node.stop()
del self.nodes
def test(self):
self.nodes[LEADER].start()
self.nodes[LEADER].set_state('leader')
self.assertEqual(self.nodes[LEADER].get_state(), 'leader')
self.nodes[ED].start()
time.sleep(3)
self.assertEqual(self.nodes[ED].get_state(), 'child')
addrs = self.nodes[ED].get_addrs()
for addr in addrs:
if addr[0:4] == 'fe80':
self.nodes[LEADER].ping(addr, size=256)
self.nodes[LEADER].ping(addr)
self.nodes[LEADER].ping('ff02::1', size=256)
self.nodes[LEADER].ping('ff02::1')
if __name__ == '__main__':
unittest.main()
|
joaormatos/anaconda
|
mmfparser/player/players.py
|
Python
|
gpl-3.0
| 6,005
| 0.006828
|
# Copyright (c) Mathias Kaerlev 2012.
# This file is part of Anaconda.
# Anaconda is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Anaconda is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Anaconda. If not, see <http://www.gnu.org/licenses/>.
from pyglet.window.key import user_key
from pyglet.window.mouse import LEFT as MOUSE_LEFT, RIGHT as MOUSE_RIGHT
from mmfparser.player.common import PlayerChild
from mmfparser.player.eventdispatcher import EventDispatcher
DIRECTIONAL_CONTROLS = ('Up', 'Down', 'Left', 'Right')
KEY_LIST = ('Up', 'Down', 'Left', 'Right', 'Button1', 'Button2', 'Button3',
'Button4')
UP, DOWN, LEFT, RIGHT, BUTTON1, BUTTON2, BUTTON3, BUTTON4 = xrange(8)
class Player(PlayerChild, EventDispatcher):
name = ''
keys = None
keyList = None
keyNames = None
pressed_keys = None
lives = None
score = None
controls_ignored = False
use_mouse = False
def initialize(self, control):
controlType = control.getControlType()
if controlType != 'Keyboard':
raise NotImplementedError(
'control type %r unsupported' % controlType)
keys = control.keys
convert = self.player.keyboard.convert
self.keyList = keyList = []
self.keyNames = keyNames = []
for key in (keys.up, keys.down, keys.left, keys.right, keys.button1,
keys.button2, keys.button3, keys.button4):
keyList.append(convert(key.getValue()))
keyNames.append(key.getName())
self.keys = keys = {}
for index, key in enumerate(KEY_LIST):
keys[key] = keyList[index]
self.symbol_to_key = dict([(v, k) for k, v in keys.iteritems()])
self.reset()
self.player.window.push_handlers(
on_key_press = self.key_pressed,
on_key_release = self.key_released
)
self.player.mouse.push_handlers(
on_mouse_press = self.mouse_pressed,
on_mouse_release = self.mouse_released
)
def mouse_pressed(self, x, y, symbol, modifiers):
if self.controls_ignored or not self.use_mouse:
return
if symbol == MOUSE_LEFT:
self.dispatch_event('player_key_pressed', 'Button1')
elif symbol == MOUSE_RIGHT:
self.dispatch_event('player_key_pressed', 'Button2')
def mouse_released(self, x, y, symbol, modifiers):
if self.controls_ignored or not self.use_mouse:
return
if symbol == MOUSE_LEFT:
self.dispatch_event('player_key_released', 'Button1')
elif symbol == MOUSE_RIGHT:
self.dispatch_event('player_key_released', 'Button2')
def key_pressed(self, symbol, modifiers):
if self.controls_ignored:
return
try:
key = self.symbol_to_key[symbol]
if self.use_mouse and key in ('Button1', 'Button2'):
return
self.dispatch_event('player_key_pressed', key)
except KeyError:
pass
def key_released(self, symbol, modifiers):
if self.controls_ignored:
return
try:
key = self.symbol_to_key[symbol]
if self.use_mouse and key in ('Button1', 'Button2'):
return
self.dispatch_event('player_key_released', key)
except KeyError:
pass
def is_down(self, key):
if self.controls_ignored:
return False
if self.use_mouse:
if key == 'Button1':
return self.player.mouse.left
elif key == 'Button2':
return self.player.mouse.right
return self.player.keyboard[self.keys[key]]
def is_down_index(self, value):
if self.controls_ignored:
return False
if self.use_mouse:
if value == BUTTON1:
return self.player.mouse.left
elif value == BUTTON2:
return self.player.mouse.right
return self.player.keyboard[self.keyList[value]]
def set_key(self, index, key):
code = self.player.keyboard.convert(key.getValue())
name_key = KEY_LIST[index]
name = key.getName()
self.keyList[index] = code
self.keyNames[index] = name
self.keys[name_key] = code
self.symbol_to_key[code] = name_key
def set_score(self, value):
self.score = value
self.dispatch_event('score_changed', value)
def set_lives(self, value):
self.lives = value
self.dispatch_event('lives_changed', value)
def reset(self, frame = False):
self.controls_ignored = False
if frame:
return
header = self.player.gameData.header
self.lives = header.initialLives
self.score = header.initialScore
Player.register_event_type('player_key_pressed')
Player.register_event_type('player_key_released')
Player.register_event_type('score_changed')
Player.register_event_type('lives_changed')
class Players(PlayerChild):
items = None
def initialize(self):
header = self.player.gameData.header
|
self.items = items = []
for control in header.controls.items:
player = self.new(Player)
player.initialize(control)
items.append(player)
def reset(self, fr
|
ame = False):
for player in self.items:
player.reset(frame)
|
stanographer/plover
|
plover_build_utils/testing/parametrize.py
|
Python
|
gpl-2.0
| 1,189
| 0.000841
|
import inspect
import pytest
def parametrize(tests, arity=None):
'''Helper for parametrizing pytest tests.
Expects a list of lambdas, one
|
per test. Each lambda must return
the parameters for its respective test.
Test identifiers will be automatically generated, from the test
number and its lambda definition line (1.10, 2.12, 3.20, ...).
If arity is None, the arguments being
|
parametrized will be automatically
set from the function's last arguments, according to the numbers of
parameters for each test.
'''
ids = []
argvalues = []
for n, t in enumerate(tests):
line = inspect.getsourcelines(t)[1]
ids.append('%u:%u' % (n+1, line))
argvalues.append(t())
if arity is None:
arity = len(argvalues[0])
assert arity > 0
def decorator(fn):
argnames = list(
parameter.name
for parameter in inspect.signature(fn).parameters.values()
if parameter.default is inspect.Parameter.empty
)[-arity:]
if arity == 1:
argnames = argnames[0]
return pytest.mark.parametrize(argnames, argvalues, ids=ids)(fn)
return decorator
|
CuonDeveloper/cuon
|
Distributionen/CuonServer/cuon-simple-server-install.py
|
Python
|
gpl-3.0
| 21,083
| 0.014846
|
#!/usr/bin/python2.7
# cuon_server install
import os, sys, platform
import subprocess, shlex, shutil
import commands
import locale
import pwd, grp
from gi.repository import Gtk
import ConfigParser
class cssi():
def __init__(self, user=None):
self.user = user
self.win = None
self.grid = None
self.CalendarDir="/usr/local/iCal"
self.program_names = ["Postgres", "Subversion", "ssh"]
self.programs = []
self.programs_gentoo = ["/usr/bin/postmaster", "/usr/bin/svn", "/usr/bin/ssh-keygen"]
self.programs_ubuntu = ["/usr/bin/pg_ctlcluster", "/usr/bin/svn", "/usr/bin/ssh-keygen"]
self.programs_debian = ["/usr/bin/pg_ctlcluster", "/usr/bin/svn", "/usr/bin/ssh-keygen"]
self.program_installer_gentoo = [{"Postgres":["dev-db/postgresql-server", "app-admin/eselect-postgresql", "dev-db/postgresql-docs"]}, {"Subversion":["dev-vcs/subversion"]}, {"ssh":["virtual/ssh"]}]
self.program_installer_ubuntu = [{"Postgres":["postgresql-9.1", "postgresql-client-9.1"]},{"Subversion":["subversion"]}, {"ssh":["ssh"]}]
self.program_installer_debian = [{"Postgres":["postgresql-9.1", "postgresql-client-9.1"]},{"ssh":["ssh"]} ]
self.program_installer = []
self.programs_exist = []
self.python_modules = ["PIL", "reportlab", "twisted.web", "twisted.words", "pygments", "webkit", "pg"]
self.python_modules_exist = []
self.python_installer = []
self.python_installer_gentoo = [{"PIL":["dev-python/imaging"]}, {"reportlab":["dev-python/reportlab"]}, {"twisted.web":["dev-python/twisted-web"]}, {"twisted.words":[]}, {"pygments":[]}, {"webkit":["dev-python/pywebkitgtk"]},{"pg":[]} ]
self.python_installer_ubuntu = [{"PIL":["python-imagin
|
g", "python-imaging-sane"]}, {"reportlab":["python-reportlab"]} , {"twisted.web":["python-twisted-web" ]}, {"twisted.words":["python-twisted-words"]}, {"pygments":["python-pygments"]}, {"webkit":["python-webkit"]},{"pg":["python-pygresql"]} ]
self.python_installer_debian = []
self.OS_Installer = None
self.OS = None
self.Sudo = "" # or gksu
self.Terminals = ["gnome-terminal", "konsole", "xfce4-terminal", "termina
|
l", "xterm"]
self.Terminal = None
self.cpServer = ConfigParser.ConfigParser()
self.CUON_FS = "/etc/cuon"
self.dia = MessageDialogWindow()
def checkOS(self):
print "start check OS"
if sys.platform.startswith('linux'):
# Linux-specific code here...
os_desc = os.uname()[1].upper()
os_name = os.uname()[2].upper()
os_machine = os.uname()[3].upper()
os_type = os.uname()[4].upper()
os_dist = platform.linux_distribution()[0].upper()
print os_dist, os_name, os_machine, os_type
if os_dist.find("GENTOO")>= 0:
if os.path.exists("/usr/bin/emerge"):
self.OS = "GENTOO"
self.program_installer = self.program_installer_gentoo
self.python_installer = self.python_installer_gentoo
self.programs = self.programs_gentoo
print 'Check1', self.programs , self.programs_gentoo
self.OS_Installer = "/usr/bin/emerge "
elif os_dist.find("UBUNTU")>= 0:
if os.path.exists("/usr/bin/apt-get"):
self.OS = "UBUNTU"
self.program_installer = self.program_installer_ubuntu
self.python_installer = self.python_installer_ubuntu
self.programs = self.programs_ubuntu
self.OS_Installer = "/usr/bin/apt-get install "
elif os_dist.find("DEBIAN")>= 0:
if os.path.exists("/usr/bin/apt-get"):
self.OS = "DEBIAN"
self.program_installer = self.program_installer_debian
self.python_installer = self.python_installer_debian
self.programs = self.programs_debian
self.OS_Installer = "/usr/bin/apt-get install "
print "OS = ", self.OS
for j in self.Terminals:
if os.path.exists("/usr/bin/" + j):
self.Terminal = "/usr/bin/" + j
print "Terminal = " + self.Terminal
break
def checkEnvironment(self):
self.programs_exist = []
self.python_modules_exist = []
print 'programs = ', self.programs
for program in self.programs:
print program
if os.path.exists(program):
self.programs_exist.append(True)
else:
self.programs_exist.append(False)
print 'Exist 8', self.programs, self.programs_exist
for python_module in self.python_modules:
try:
print python_module
if python_module == "webkit":
if os.path.exists("/usr/lib/python2.7/site-packages/webkit/webkit.so"):
self.python_modules_exist.append(True)
elif os.path.exists("/usr/lib/python2.7/dist-packages/webkit/webkit.so"):
self.python_modules_exist.append(True)
else:
self.python_modules_exist.append(False)
else:
if __import__(python_module):
self.python_modules_exist.append(True)
except ImportError:
self.python_modules_exist.append(False)
except:
self.python_modules_exist.append(False)
print 'Exist 9', self.python_modules, self.python_modules_exist
def on_button_clicked(self, widget):
print "Hello World"
self.dia.wrong_requirement()
def start(self, again=False):
print 'again', again
self.checkOS()
self.checkEnvironment()
if not again:
self.win = Gtk.Window()
self.win.connect("delete-event", Gtk.main_quit)
self.button = Gtk.Button(label="Next")
self.button.connect("clicked", self.on_check_missing)
if again:
self.win.remove(self.grid)
self.grid = Gtk.Table(10, 4, True)
z= 0
print self.programs_exist
for name in self.program_names:
print z, self.programs_exist[z]
self.grid.attach(Gtk.Label(name), 0, 1, z, z+1)
self.grid.attach(Gtk.Label(`self.programs_exist[z]`), 1, 2, z,z+1)
z += 1
z = 0
for pName in self.python_modules:
l1 = Gtk.Label(pName)
l1.set_justify(Gtk.Justification.LEFT)
self.grid.attach(l1, 3, 4, z,z+1, 0, 0.5, 0, 0.5)
self.grid.attach(Gtk.Label(`self.python_modules_exist[z]`), 4, 5,z, z+1, 0, 0.5, 0, 0.5)
z += 1
if not again:
self.grid.attach(self.button, 4, 5, 9 , 10)
self.win.add(self.grid)
self.win.show_all()
self.dia.warn(self.user)
if self.dia.Q2 == False:
sys.exit(0)
if not again:
Gtk.main()
def on_check_missing(self, widget, again=False):
if again:
self.start(again)
if not self.Terminal:
self.dia.AbortInfo1()
sys.exit(0)
if False in self.python_modules_exist or False in self.programs_exist:
if again:
self.dia.error1()
sys.exit(0)
self.dia.wrong_requirement()
print 'q1', self.dia.Q1
if self.dia.Q1:
self.try_install_missing_programs()
else:
# All is ok, next step
|
maartenq/ansible
|
test/units/modules/network/exos/test_exos_config.py
|
Python
|
gpl-3.0
| 10,664
| 0.001407
|
#
# (c) 2018 Extreme Networks Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests.mock import patch
from ansible.modules.network.exos import exos_config
from units.modules.utils import set_module_args
from .exos_module import TestExosModule, load_fixture
class TestExosConfigModule(TestExosModule):
module = exos_config
def setUp(self):
super(TestExosConfigModule, self).setUp()
self.mock_get_config = patch('ansible.modules.network.exos.exos_config.get_config')
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch('ansible.modules.network.exos.exos_config.load_config')
self.load_config = self.mock_load_config.start()
self.mock_run_commands = patch('ansible.modules.network.exos.exos_config.run_commands')
self.run_commands = self.mock_run_commands.start()
def tearDown(self):
super(TestExosConfigModule, self).tearDown()
self.mock_get_config.stop()
self.mock_load_config.stop()
self.mock_run_commands.stop()
def load_fixtures(self, commands=None):
config_file = 'exos_config_config.cfg'
self.get_config.return_value = load_fixture(config_file)
self.load_config.return_value = None
def test_exos_config_unchanged(self):
src = load_fixture('exos_config_config.cfg')
set_module_args(dict(src=src))
self.execute_module()
def test_exos_config_src(self):
src = load_fixture('exos_config_src.cfg')
set_module_args(dict(src=src))
commands = ['configure ports 1 description-string "IDS"',
'configure snmp sysName "marble"']
self.execute_module(changed=True, commands=commands)
def test_exos_config_backup(self):
set_module_args(dict(backup=True))
result = self.execute_module()
self.assertIn('__backup__', result)
def test_exos_config_save_always(self):
self.run_commands.return_value = 'configure snmp sysName "marble"'
set_module_args(dict(save_when='alw
|
ays'))
self.execute_module(changed=True)
self.assertEqual(self.run_commands.call_count, 1)
self.assertEqual(self.get_config.call_count, 0)
self.assertEqual(self.load_config.call_count, 0)
args = self.run_commands.call_args[0][1]
self.assertIn('save configuration', args['command'])
def test_exos_config_save_changed_true(self):
src = load_fixture('exos_config_src.cfg')
set_module_args(dict(src=src, save_when='changed'))
comman
|
ds = ['configure ports 1 description-string "IDS"',
'configure snmp sysName "marble"']
self.execute_module(changed=True, commands=commands)
self.assertEqual(self.run_commands.call_count, 1)
self.assertEqual(self.get_config.call_count, 1)
self.assertEqual(self.load_config.call_count, 1)
args = self.run_commands.call_args[0][1]
self.assertIn('save configuration', args['command'])
def test_exos_config_save_changed_true_check_mode(self):
src = load_fixture('exos_config_src.cfg')
set_module_args(dict(src=src, save_when='changed', _ansible_check_mode=True))
commands = ['configure ports 1 description-string "IDS"',
'configure snmp sysName "marble"']
self.execute_module(changed=True, commands=commands)
self.assertEqual(self.run_commands.call_count, 0)
self.assertEqual(self.get_config.call_count, 1)
self.assertEqual(self.load_config.call_count, 0)
def test_exos_config_save_changed_false(self):
set_module_args(dict(save_when='changed'))
self.execute_module(changed=False)
self.assertEqual(self.run_commands.call_count, 0)
self.assertEqual(self.get_config.call_count, 0)
self.assertEqual(self.load_config.call_count, 0)
def test_exos_config_save_modified_false(self):
mock_get_startup_config_text = patch('ansible.modules.network.exos.exos_config.get_startup_config_text')
get_startup_config_text = mock_get_startup_config_text.start()
get_startup_config_text.return_value = load_fixture('exos_config_config.cfg')
set_module_args(dict(save_when='modified'))
self.execute_module(changed=False)
self.assertEqual(self.run_commands.call_count, 0)
self.assertEqual(self.get_config.call_count, 1)
self.assertEqual(get_startup_config_text.call_count, 1)
self.assertEqual(self.load_config.call_count, 0)
mock_get_startup_config_text.stop()
def test_exos_config_save_modified_true(self):
mock_get_startup_config_text = patch('ansible.modules.network.exos.exos_config.get_startup_config_text')
get_startup_config_text = mock_get_startup_config_text.start()
get_startup_config_text.return_value = load_fixture('exos_config_modified.cfg')
set_module_args(dict(save_when='modified'))
self.execute_module(changed=True)
self.assertEqual(self.run_commands.call_count, 1)
self.assertTrue(self.get_config.call_count > 0)
self.assertEqual(get_startup_config_text.call_count, 1)
self.assertEqual(self.load_config.call_count, 0)
mock_get_startup_config_text.stop()
def test_exos_config_lines(self):
set_module_args(dict(lines=['configure snmp sysName "marble"']))
commands = ['configure snmp sysName "marble"']
self.execute_module(changed=True, commands=commands)
def test_exos_config_before(self):
set_module_args(dict(lines=['configure snmp sysName "marble"'], before=['test1', 'test2']))
commands = ['test1', 'test2', 'configure snmp sysName "marble"']
self.execute_module(changed=True, commands=commands, sort=False)
def test_exos_config_after(self):
set_module_args(dict(lines=['hostname foo'], after=['test1', 'test2']))
commands = ['hostname foo', 'test1', 'test2']
set_module_args(dict(lines=['configure snmp sysName "marble"'], after=['test1', 'test2']))
commands = ['configure snmp sysName "marble"', 'test1', 'test2']
self.execute_module(changed=True, commands=commands, sort=False)
def test_exos_config_before_after_no_change(self):
set_module_args(dict(lines=['configure snmp sysName "x870"'],
before=['test1', 'test2'],
after=['test3', 'test4']))
self.execute_module()
def test_exos_config_config(self):
config = 'hostname localhost'
set_module_args(dict(lines=['configure snmp sysName "x870"'], config=config))
commands = ['configure snmp sysName "x870"']
self.execute_module(changed=True, commands=commands)
def test_exos_config_match_none(self):
lines = ['configure snmp sysName "x870"']
set_module_args(dict(lines=lines, match='none'))
self.execute_module(changed=True, commands=lines)
def test_exos_config_src_and_lines_fails(self):
args = dict(src='foo', lines='foo')
set_module_args(args)
self.execute_module(failed=True)
def test_exos_config_match_exact_requires_lines(self):
args = dict(match='exact')
set_module_args(args)
self.execute_module(failed=True)
def test_exos_config_match_strict_requires_lines(self):
args = dict(match='strict')
set_module_args(args)
self.execute_module(failed=True)
|
ZhaoCJ/django
|
django/db/migrations/writer.py
|
Python
|
bsd-3-clause
| 7,931
| 0.001891
|
from __future__ import unicode_literals
import datetime
import types
import os
from importlib import import_module
from django.utils import six
from django.db import models
from django.db.models.loading import cache
from django.db.migrations.loader import MigrationLoader
from django.utils.encoding import force_text
from django.utils.functional import Promise
class MigrationWriter(object):
"""
Takes a Migration instance and is able to produce the contents
of the migration file from it.
"""
def __init__(self, migration):
self.migration = migration
def as_string(self):
"""
Returns a string of the file contents.
"""
items = {
"dependencies": repr(self.migration.dependencies),
}
imports = set()
# Deconstruct operations
operation_strings = []
for operation in self.migration.operations:
name, args, kwargs = operation.deconstruct()
arg_strings = []
for arg in args:
arg_string, arg_imports = self.serialize(arg)
arg_strings.append(arg_string)
imports.update(arg_imports)
for kw, arg in kwargs.items():
arg_string, arg_imports = self.serialize(arg)
imports.update(arg_imports)
arg_strings.append("%s = %s" % (kw, arg_string))
operation_strings.append("migrations.%s(%s\n )" % (name, "".join("\n %s," % arg for arg in arg_strings)))
items["operations"] = "[%s\n ]" % "".join("\n %s," % s for s in operation_strings)
# Format imports nicely
imports.discard("from django.db import models")
if not imports:
items["imports"] = ""
else:
items["imports"] = "\n".join(imports) + "\n"
return (MIGRATION_TEMPLATE % items).encode("utf8")
@property
def filename(self):
return "%s.py" % self.migration.name
@property
def path(self):
migrations_module_name = MigrationLoader.migrations_module(self.migration.app_label)
app_module = cache.get_app(self.migration.app_label)
# See if we can import the migrations module directly
try:
migrations_module = import_module(migrations_module_name)
basedir = os.path.dirname(migrations_module.__file__)
except ImportError:
# Alright, see if it's a direct submodule of the app
oneup = ".".join(migrations_module_name.split(".")[:-1])
app_oneup = ".".join(app_module.__name__.split(".")[:-1])
if oneup == app_oneup:
basedir = os.path.join(os.path.dirname(app_module.__file__), migrations_module_name.split(".")[-1])
else:
raise ImportError("Cannot open migrations module %s for app %s" % (migrations_module_name, self.migration.app_label))
return os.path.join(basedir, self.filena
|
me)
@classmethod
def serialize_deconstructed(cls, path, args, kwargs):
module, name = path.rsplit(".", 1)
if module == "django.db.models":
imports = set(["from django.db import models"])
name = "models.%s" % name
else:
imports = set(["import %s" % module])
|
name = path
arg_strings = []
for arg in args:
arg_string, arg_imports = cls.serialize(arg)
arg_strings.append(arg_string)
imports.update(arg_imports)
for kw, arg in kwargs.items():
arg_string, arg_imports = cls.serialize(arg)
imports.update(arg_imports)
arg_strings.append("%s=%s" % (kw, arg_string))
return "%s(%s)" % (name, ", ".join(arg_strings)), imports
@classmethod
def serialize(cls, value):
"""
Serializes the value to a string that's parsable by Python, along
with any needed imports to make that string work.
More advanced than repr() as it can encode things
like datetime.datetime.now.
"""
# Sequences
if isinstance(value, (list, set, tuple)):
imports = set()
strings = []
for item in value:
item_string, item_imports = cls.serialize(item)
imports.update(item_imports)
strings.append(item_string)
if isinstance(value, set):
format = "set([%s])"
elif isinstance(value, tuple):
format = "(%s,)"
else:
format = "[%s]"
return format % (", ".join(strings)), imports
# Dictionaries
elif isinstance(value, dict):
imports = set()
strings = []
for k, v in value.items():
k_string, k_imports = cls.serialize(k)
v_string, v_imports = cls.serialize(v)
imports.update(k_imports)
imports.update(v_imports)
strings.append((k_string, v_string))
return "{%s}" % (", ".join("%s: %s" % (k, v) for k, v in strings)), imports
# Datetimes
elif isinstance(value, (datetime.datetime, datetime.date)):
return repr(value), set(["import datetime"])
# Simple types
elif isinstance(value, six.integer_types + (float, six.binary_type, six.text_type, bool, type(None))):
return repr(value), set()
# Promise
elif isinstance(value, Promise):
return repr(force_text(value)), set()
# Django fields
elif isinstance(value, models.Field):
attr_name, path, args, kwargs = value.deconstruct()
return cls.serialize_deconstructed(path, args, kwargs)
# Functions
elif isinstance(value, (types.FunctionType, types.BuiltinFunctionType)):
# Special-cases, as these don't have im_class
special_cases = [
(datetime.datetime.now, "datetime.datetime.now", ["import datetime"]),
(datetime.datetime.utcnow, "datetime.datetime.utcnow", ["import datetime"]),
(datetime.date.today, "datetime.date.today", ["import datetime"]),
]
for func, string, imports in special_cases:
if func == value: # For some reason "utcnow is not utcnow"
return string, set(imports)
# Method?
if hasattr(value, "im_class"):
klass = value.im_class
module = klass.__module__
return "%s.%s.%s" % (module, klass.__name__, value.__name__), set(["import %s" % module])
elif hasattr(value, 'deconstruct'):
return cls.serialize_deconstructed(*value.deconstruct())
elif value.__name__ == '<lambda>':
raise ValueError("Cannot serialize function: lambda")
elif value.__module__ is None:
raise ValueError("Cannot serialize function %r: No module" % value)
else:
module = value.__module__
return "%s.%s" % (module, value.__name__), set(["import %s" % module])
# Classes
elif isinstance(value, type):
special_cases = [
(models.Model, "models.Model", []),
]
for case, string, imports in special_cases:
if case is value:
return string, set(imports)
if hasattr(value, "__module__"):
module = value.__module__
return "%s.%s" % (module, value.__name__), set(["import %s" % module])
# Uh oh.
else:
raise ValueError("Cannot serialize: %r" % value)
MIGRATION_TEMPLATE = """# encoding: utf8
from django.db import models, migrations
%(imports)s
class Migration(migrations.Migration):
dependencies = %(dependencies)s
operations = %(operations)s
"""
|
Kryz/sentry
|
tests/sentry/metrics/test_datadog.py
|
Python
|
bsd-3-clause
| 979
| 0
|
from __future__ import absolute_import
from mock import patch
from datadog.util.hostname import get_hostname
from sentry.metrics.datadog import DatadogMetricsBackend
from sentry.testutils import TestCase
class DatadogMetricsBackendTest(TestCase):
def setUp(self):
self.backend = DatadogMetricsBackend(prefix='sentrytest.')
@patch('datadog.threadstats.base.ThreadStats.increment')
def test_incr(self, mock_incr):
self.backend.incr('foo', instance='bar')
mock_incr.ass
|
ert_called_once_with(
'sentrytest.foo', 1,
tags=['instance:bar'],
host=get_hostname(),
)
@patch('datadog.threadstats.base.ThreadStats.timing')
def test_timing(self, mock_timing):
self.backend.timing('foo', 30, instance='bar')
|
mock_timing.assert_called_once_with(
'sentrytest.foo', 30,
sample_rate=1,
tags=['instance:bar'],
host=get_hostname(),
)
|
pluser/nikola_plugins
|
v7/wordpress_compiler/wordpress/wordpress.py
|
Python
|
mit
| 11,780
| 0.002547
|
# -*- coding: utf-8 -*-
# A WordPress compiler plugin for Nikola
#
# Copyright (C) 2014-2015 by Felix Fontein
# Copyright (C) by the WordPress contributors
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from __future__ import unicode_literals
import os
import io
import json
import re
import sys
from nikola.plugin_categories import PageCompiler
from nikola.utils import makedirs, write_metadata
from nikola.utils import get_logger, STDERR_HANDLER
from . import default_filters, php, plugin_interface, shortcodes
_LOGGER = get_logger('compile_wordpress', STDERR_HANDLER)
class Context(object):
id = None
def __init__(self, id, name=None, additional_data=None):
self.id = id
self.name = name
self.__file_deps_fragment = set()
self.__file_deps_page = set()
self.__uptodate_deps_fragment = list()
self.__uptodate_deps_page = list()
self.__additional_data = additional_data or {}
self.__plugin_data = {}
def get_name(self):
return "(unknown:{0})".format(self.id) if self.name is None else self.name
def add_file_dependency(self, filename, add='both'):
if add not in {'fragment', 'page', 'both'}:
raise Exception("Add parameter is '{0}', but must be either 'fragment', 'page', or 'both'.".format(add))
if add == 'fragment' or add == 'both':
self.__file_deps_fragment.add(filename)
if add == 'page' or add == 'both':
self.__file_deps_page.add(filename)
def add_uptodate_dependency(self, uptodate_dependency, add='both'):
if add not in {'fragment', 'page', 'both'}:
raise Exception("Add parameter is '{0}', but must be either 'fragment', 'page', or 'both'.".format(add))
if add == 'fragment' or add == 'both':
self.__uptodate_deps_fragment.append(uptodate_dependency)
if add == 'page' or add == 'both':
self.__uptodate_deps_page.append(uptodate_dependency)
def has_dependencies(self):
return (len(self.__file_deps_fragment) > 0 or len(self.__file_deps_page) > 0 or
len(self.__uptodate_deps_fragment) > 0 or len(self.__uptodate_deps_page) > 0)
def get_file_dependencies_fragment(self):
return sorted(list(self.__file_deps_fragment))
def get_file_dependencies_page(self):
return sorted(list(self.__file_deps_page))
def get_uptodate_dependencies_fragment(self):
return self.__uptodate_deps_fragment
def get_uptodate_dependencies_page(self):
return self.__uptodate_deps_page
def get_additional_data(self, name):
return self.__additional_data.get(name)
def store_plugin_data(self, plugin_name, key, data):
if plugin_name not in self.__plugin_data:
self.__plugin_data[plugin_name] = {}
self.__plugin_data[plugin_name][key] = data
def get_plugin_data(self, plugin_name, key, default_value=None):
plugin_data = self.__plugin_data.get(plugin_name)
return default_value if plugin_data is None else plugin_data.get(key, default_value)
def inc_plugin_counter(self, plugin_name, key):
counter = self.get_plugin_data(plugin_name, key, 0)
counter += 1
self.store_plugin_data(plugin_name, key, counter)
return counter
def __str__(self):
return "Context<" + str(self.id) + ">(" + str(self.__file_deps_fragment) + ", " + str(self.__file_deps_page) + ", " + str(self.__uptodate_deps_fragment) + ", " + str(self.__uptodate_deps_page) + ")"
class CompileWordpress(PageCompiler):
"""Compiles a subset of Wordpress into HTML."""
name = "wordpress"
demote_headers = True
site = None
def __init__(self):
|
super(CompileWordpress, self).__init__()
self.__filters = dict()
self.__shortcodes = shortcodes.ShortCodes(
|
)
self.__default_wordpress_filters = default_filters.DefaultWordpressFilters(self.__shortcodes)
self.add_filter("the_content", lambda data, context: self.__default_wordpress_filters.wptexturize(data))
self.add_filter("the_content", lambda data, context: self.__default_wordpress_filters.convert_smilies(data))
self.add_filter("the_content", lambda data, context: self.__default_wordpress_filters.convert_chars(data))
self.add_filter("the_content", lambda data, context: self.__default_wordpress_filters.wpautop(data))
self.add_filter("the_content", lambda data, context: self.__default_wordpress_filters.shortcode_unautop(data))
self.add_filter('the_content', lambda data, context: self.__shortcodes.do_shortcode(data, context), 11) # AFTER wpautop()
def _register_plugins(self):
# collect plugins
count = 0
modules = {
'default_filters': default_filters,
'php': php,
'plugin_interface': plugin_interface,
'shortcodes': shortcodes,
'wordpress': sys.modules[__name__]
}
for plugin in self.get_compiler_extensions():
_LOGGER.info("Registered WordPress plugin {0}".format(plugin.name))
plugin.plugin_object.register(self, modules)
count += 1
_LOGGER.info("Registered {0} WordPress plugin{1}".format(count, "s" if count != 1 else ""))
def register_head_code(self, head_function):
# FIXME: implement
# (not even sure if it's really implementable...)
raise NotImplementedError()
def add_filter(self, tag, filter_function, priority=10):
if tag not in self.__filters:
self.__filters[tag] = list()
f = self.__filters[tag]
# find where to insert priority
i = 0
while i < len(f) and f[i][0] < priority:
i += 1
if i < len(f) and f[i][0] > priority:
f.insert(i, (priority, list()))
elif i == len(f):
f.append((priority, list()))
f[i][1].append(filter_function)
def filter(self, tag, data, context):
if tag not in self.__filters:
return data
for prio, fs in self.__filters[tag]:
for f in fs:
data = f(data, context)
return data
def register_shortcode(self, tag, function):
self.__shortcodes.register_shortcode(tag, function)
def unregister_shortcode(self, tag):
self.__shortcodes.unregister_shortcode(tag)
def do_shortcode(self, data):
return self.__shortcodes.do_shortcode(data)
def set_site(self, site):
super(CompileWordpress, self).set_site(site)
self._register_plugins()
def __formatData(self, data, context, source=None):
output = self.filter("the_content", data, context)
left_shortcodes = self.__shortcodes.get_containing_shortcodes_set(output)
if len(left_shortcodes) > 0 and source is not None:
_LOGGER.warning("The post '" + source + "' still contains shortcodes: " + str(left_shortcodes))
return output
def compile_to_string(self, source_data, name=None, additional_data=None):
context = Context(hash(source_data), name=name, additional_data=additional_data)
return self.__formatData(source_data, context)
def _read_extra_deps(self, post):
dep_path = post.base_path + '.dep'
if os.path.isfile(dep_path):
with io.open(dep_path, 'rb') as file:
result = json.loads(file.read().decode('utf-8'))
if type(result) == list and len(result) == 4:
return r
|
OxES/k2sc
|
src/detrender.py
|
Python
|
gpl-3.0
| 7,258
| 0.012951
|
from __future__ import division
import math as m
import numpy as np
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as pl
from scipy.optimize import fmin, fmin_powell, minimize
from numpy import (any, array, asarray, ones, ones_like, zeros, isfinite, inf, concatenate, arange, unique, delete,
dot, median, abs, std, nan, diag, log, where, identity, s_, sqrt)
from numpy.random import permutation
from matplotlib.pyplot import subplots, setp
from numpy.linalg.linalg import LinAlgError
from .gp import GeorgeGP, SplitGP
from .kernels import BasicKernel
from .utils import medsig
from .dtdata import DtData
class Detrender(object):
def __init__(self, flux, inputs, mask=None, p0=None, kernel=None, splits=[], tr_nrandom=200, tr_bspan=50, tr_nblocks=6):
self.data = DtData(flux, inputs, mask)
self.kernel = kernel or BasicKernel()
self.gp = SplitGP(self.kernel, splits) if splits is not None else GeorgeGP(self.kernel)
self.tr_data = self.data.create_training_set(tr_nrandom, tr_bspan, tr_nblocks)
self.gp.set_inputs(self.tr_data.masked_inputs)
## ======================
## Convenience routines
## ======================
@property
def flux(self):
return self.data.masked_flux
@property
def time(self):
return self.data.masked_time
## =====================
## Detrending routines
## =====================
def covariance_matrix(self, pv=None, inputs=None, separate=False):
inputs = inputs if inputs is not None else self.tr_data.masked_inputs
self.gp.compute(inputs, pv)
return self.gp._covariance_matrix(inputs, separate=separate)
def neglnposterior(self, pv, training=True):
if any(pv < self.kernel.lims[0]) or any(self.kernel.lims[1] < pv):
return inf
ds = self.tr_data if training else self.data
try:
lnlike = self.gp.lnlikelihood(pv, ds.masked_normalised_flux, ds.masked_inputs)
return -(self.kernel.ln_prior(pv) + lnlike)
except LinAlgError:
return inf
def train(self, pv0=None, disp=False):
pv0 = pv0 if pv0 is not None else self.kernel.pv0
mres = minimize(self.neglnposterior, pv0, method='Powell')
self.tr_pv = mres.x.copy()
return self.tr_pv, mres.success
def predict(self, pv, inputs=None, components=False, mean_only=True):
inputs = inputs if inputs is not None else self.data.unmasked_inputs
self.gp.compute(self.data.masked_inputs, pv)
self.gp._compute_alpha(self.data.masked_normalised_flux)
if components:
mu_time, mu_pos = self.gp.predict_components(inputs)
return ((1. + mu_time) * self.data._fm,
(1. + mu_pos) * self.data._fm)
else:
return self.gp.predict(inputs, mean_only=mean_only)
def detrend_spatial(self, pv):
mt, mp = self.compute_components(pv)
flux = self.data.unmasked_flux.copy()
flux[self.data.mask] += -mp + median(mp)
flux[~self.data.mask] = nan
return flux
## ===================
## Plotting routines
## ===================
def plot_xy(self, pv=None, ax=None, plot_wireframe=False):
"""Plot the x and y points for the whole dataset and the training set.
"""
if ax is None:
fig,ax = subplots(1,1, figsize=(10,10))
if pv is None:
ax.tripcolor(self.data.mx, self.data.my, ones(self.data.nptm), vmin=0, vmax=1)
if plot_wireframe:
ax.triplot(self.data.mx, self.data.my, color='w')
else:
mt, mp = self.compute_components(pv)
ax.tripcolor(self.data.mx, self.data.my, mp)
ax.plot(self.tr_data.mx, self.tr_data.my, 'o', ms=3, c='k', mec='w')
return ax
def plot_t(self, pv=None, ax=None):
""" Plot the flux as a function of time for the whole dataset and the training set.
"""
if ax is None:
fig, ax = subplots(1,1)
fm = self.data.flux_median
fmin = self.data.masked_flux.min()
fmax = self.data.masked_flux.max()
fptp = self.data.masked_flux.ptp()
ax.plot(self.data.mt, self.data.mf, c='0.75', lw=1)
ax.plot(self.tr_data.ut, self.tr_data.uf, '.k', ms=6)
setp(ax, ylim=(0.999*fmin,1.001*fmax))
if pv is not None:
fd = self.detrend_spatial(pv)
fd += fm - np.nanmedian(fd)
mm = isfinite(fd)
ax.plot(self.data.unmasked_time[mm], fd[mm] - 0.7*fptp, alpha=0.
|
75, lw=1)
setp(ax, ylim=(0.999*(fmin-0.7*fptp), 1.001*fmax))
setp(ax, xlim=self.data.mt[[0,-1]], xlabel='Time', ylabel='Flux')
return ax
def plot_report(self, pv, tid, fname=None, maxpt=350):
|
lmargin, rmargin = 0.12, 0.03
fig = pl.figure(figsize=(8.3,11.7))
fig.text(0.04, 0.965, 'EPIC {:9d}'.format(tid), va='top', size=24, color='w', weight='bold')
ax = fig.add_axes([0,0,1,1])
ax.xaxis.set_visible(False)
ax.yaxis.set_visible(False)
ax.set_zorder(-1000)
ax.add_patch(pl.Rectangle((0,0.92), 1, 0.08, fill=True))
ax_a = fig.add_axes([lmargin,0.25,1-lmargin-rmargin,0.3])
ax_x = fig.add_axes([lmargin,0.05,1-lmargin-rmargin,0.1])
ax_y = fig.add_axes([lmargin,0.15,1-lmargin-rmargin,0.1])
ax_c = fig.add_axes([0.55,0.6,0.45-rmargin, 0.3])
ax_x.plot(self.data.masked_time, self.data.mx, lw=1)
ax_y.plot(self.data.masked_time, self.data.my, lw=1)
## Compute stuff
## -------------
fm = median(self.data.masked_flux)
fmin = self.data.masked_flux.min()
fmax = self.data.masked_flux.max()
fptp = self.data.masked_flux.ptp()
mt, mp = self.compute_components(pv)
ms = self.data.mask
fd = self.data.unmasked_flux.copy()
fd[ms] += -mp + median(mp)
fd[~ms] = nan
fd += fm - np.nanmedian(fd)
## Plot A
## ------
ax_a.plot(self.data.masked_time, self.data.masked_flux/fm, c='0.75', lw=1)
ax_a.plot(self.tr_data.unmasked_time, self.tr_data.unmasked_flux/fm, '.k', ms=6)
ax_a.plot(*self.data.outliers, ls='', marker='o', ms=6)
ax_a.plot(self.data.unmasked_time[ms], (fd[ms] - 0.7*fptp)/fm, lw=1)
ax_a.plot(self.time, (mp-1.4*fptp)/fm, lw=1)
samples = permutation(self.time.size)[:maxpt]
ax_c.tripcolor(self.data.mx[samples], self.data.my[samples], mp[samples])
ax_c.plot(self.tr_data.mx, self.tr_data.my, '.', ms=3, c='w', alpha=0.8)
ax_c.plot(self.tr_data.mx, self.tr_data.my, '.', ms=1.5, c='k')
setp(ax_a, ylim=(0.999*(fmin-1.4*fptp)/fm, 1.001*fmax/fm))
setp(ax_a.get_xticklabels()+ax_y.get_xticklabels(), visible=False)
setp(ax_x, xlabel='Time', ylabel='X')
setp(ax_c, xlabel='X', ylabel='Y')
setp([ax_a,ax_x,ax_y], xlim=self.time[[0,-1]])
setp(ax_a, ylabel='Normalised flux')
setp(ax_y, ylabel='Y')
if fname:
fig.savefig(fname)
|
MichaelSchreier/Kano
|
class_UI_aboutWindow.py
|
Python
|
gpl-3.0
| 4,429
| 0.029153
|
# -*- coding: utf-8 -*-
"""
Created on Sat Sep 17 17:19:10 2016
@author: Michael
"""
from PyQt5 import QtWidgets
class AboutWindow(QtWidgets.QTextEdit):
def __init__(self, parent=None):
super().__init__(parent)
self.setReadOnly(True)
self.setHtml(
"""
<h1 id="kano">Kano</h1>
<p>Copyright (c) 2017, Michael Schreier <br>
All rights reserved.</p>
<p>This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.</p>
<p>This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.</p>
<p>You should have received a copy of the GNU General Public License along with this program. If not, see <a href="http://www.gnu.org/licenses/">http://www.gnu.org/licenses/</a></p>
<hr>
<p>Kano has been built using the following libraries:</p>
<h3 id="entypo">Entypo+</h3>
<blockquote>
<p>All icons used by Kano are taken from the “Entypo+” library by Daniel Bruce, available under the Creative Commons license CC BY-SA 4.0.</p>
</blockquote>
<h3 id="pyqt5">PyQt5</h3>
<blockquote>
<p>Copyright (c) 2017, Riverbank Computing Limited <br>
All rights reserved.</p>
<p>This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by >the Free Software Foundation, either version 3 of the License, or (at your option) any later version.</p>
<p>This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.</p>
<p>You should have received a copy of the GNU General Public License along with this program. If not, see <a href="http://www.gnu.org/licenses/">http://www.gnu.org/licenses/</a></p>
</blockquote>
<h3
|
id="fuzzywuzzy">FuzzyWuzzy</h3>
<blockquote>
<p>Copyright (c) 2017, SeatGeak <br>
All rights reserved.</p>
<p>This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your o
|
ption) any later version.</p>
<p>This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.</p>
<p>You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA</p>
</blockquote>
<h3 id="pyyaml">PyYAML</h3>
<blockquote>
<p>Copyright (c) 2006, Kirill Simonov <br>
All rights reserved.</p>
<p>Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:</p>
<p>The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.</p>
<p>THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.</p>
</blockquote>
"""
)
|
benley/Mathics
|
mathics/builtin/physchemdata.py
|
Python
|
gpl-3.0
| 5,698
| 0.000527
|
# -*- coding: utf8 -*-
"""
Physical and Chemical data
"""
from csv import reader as csvreader
from mathics.builtin.base import Builtin
from mathics.core.expression import (Expression, from_python, Symbol, String,
strip_context)
from mathics.settings import ROOT_DIR
def load_element_data():
element_file = open(ROOT_DIR + 'data/element.csv', 'rb')
reader = csvreader(element_file, delimiter='\t')
element_data = []
for row in reader:
element_data.append([value for value in row])
element_file.close()
return element_data
_ELEMENT_DATA = load_element_data()
class ElementData(Builtin):
"""
<dl>
<dt>'ElementData["$name$", "$property$"]
<dd>gives the value of the $property$ for the chemical specified by $name$".
<dt>'ElementData[$n$, "$property$"]
<dd>gives the value of the $property$ for the $n$th chemical element".
</dl>
>> ElementData[74]
= Tungsten
>> ElementData["He", "AbsoluteBoilingPoint"]
= 4.22
>> ElementData["Carbon", "IonizationEnergies"]
= {1086.5, 2352.6, 4620.5, 6222.7, 37831, 47277.}
>> ElementData[16, "ElectronConfigurationString"]
= [Ne] 3s2 3p4
>> ElementData[73, "ElectronConfiguration"]
= {{2}, {2, 6}, {2, 6, 10}, {2, 6, 10, 14}, {2, 6, 3}, {2}}
The number of known elements:
>> Length[ElementData[All]]
= 118
Some properties are not appropriate for certain elements:
>> ElementData["He", "ElectroNegativity"]
= Missing[NotApplicable]
Some data is missing:
>> ElementData["Tc", "SpecificHeat"]
= Missing[NotAvailable]
All the known properties:
>> ElementData["Properties"]
= {Abbreviation, AbsoluteBoilingPoint, AbsoluteMeltingPoint, AtomicNumber, AtomicRadius, AtomicWeight, Block, BoilingPoint, BrinellHardness, BulkModulus, CovalentRadius, CrustAbundance, Density, DiscoveryYear, ElectroNegativity, ElectronAffinity, ElectronConfiguration, ElectronConfigurationString, ElectronShellConfiguration, FusionHeat, Group, IonizationEnergies, LiquidDensity, MeltingPoint, MohsHardness, Name, Period, PoissonRatio, Series, ShearModulus, SpecificHeat, StandardName, ThermalConductivity, VanDerWaalsRadius, VaporizationHeat, VickersHardness, YoungModulus}
>> ListPlot[Table[ElementData[z, "AtomicWeight"], {z, 118}]]
= -Graphics-
"""
rules = {
'ElementData[n_]': 'ElementData[n, "StandardName"]',
'ElementData[]': 'ElementData[All]',
'ElementData["Properties"]': 'ElementData[All, "Properties"]',
}
messages = {
'noent': ('`1` is not a known entity, class, or tag for ElementData. '
'Use ElementData[] for a list of entities.'),
'noprop': ('`1` is not a known property for ElementData. '
'Use ElementData["Properties"] for a list of properties.'),
}
def apply_all(self, evaluation):
'ElementData[All]'
iprop = _ELEMENT_DATA[0].index('StandardName')
return from_python([element[iprop] for element in _ELEMENT_DATA[1:]])
def apply_all_properties(self, evaluation):
'ElementData[All, "Properties"]'
return from_python(sorted(_ELEMENT_DATA[0]))
def apply_name(self, name, prop, evaluation):
"ElementData[name_?StringQ, prop_]"
py_name = name.to_python().strip('"')
names = ['StandardName', 'Name', 'Abbreviation']
iprops = [_ELEMENT_DATA[0].index(s) for s in names]
indx = None
for iprop in iprops:
try:
indx = [element[iprop] for element in
_ELEMENT_DATA[1:]].index(py_name) + 1
except ValueError:
pass
if indx is None:
evaluation.message("ElementData", "noent", name)
return
return self.apply_int(from_python(indx), prop, evaluation)
def apply_int(self, n, prop, evaluation):
"ElementData[n_?IntegerQ, prop_]"
from mathics.core.parser import parse
py_n = n.to_python()
py_prop = prop.to_python()
# Check element specifier n or "name"
if isinstance(py_n, int):
if not 1 <= py_n <= 118:
evaluation.message("ElementData", "noent", n)
return
elif isinstance(py_n, unicode):
pass
else:
evaluation.message("ElementData", "noent", n)
return
# Check property specifier
if isinstance(py_prop, str) or isinstance(py_prop, unicode):
py_prop = str(py_prop)
if py_prop == '"Properties"':
result = []
for i, p in enumerate(_ELEMENT_DATA[py_n]):
if p not in ["NOT_AVAILABLE", "NOT_APPLICABLE", "NOT_KNOWN"]:
result.append(_ELEMENT_DATA[0][i])
return from_python(sorted(result))
if not (isinstance(py_prop, str) and
py_prop[0] == py_prop[-1] == '"' and
py_prop.strip('"') in _ELEMENT_DATA[0]):
evaluation.message("ElementData", "noprop", prop)
return
iprop = _ELEMENT_DATA[0].index(py_prop.strip('"'))
result = _ELEMENT_DATA[py_n][iprop]
if result == "NOT_AVAILABLE":
return Expression
|
("Missing", "
|
NotAvailable")
if result == "NOT_APPLICABLE":
return Expression("Missing", "NotApplicable")
if result == "NOT_KNOWN":
return Expression("Missing", "Unknown")
result = parse(result, evaluation.definitions)
if isinstance(result, Symbol):
result = String(strip_context(result.get_name()))
return result
|
mikewiebe-ansible/ansible
|
lib/ansible/plugins/action/nxos.py
|
Python
|
gpl-3.0
| 8,307
| 0.003732
|
#
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import copy
import re
import sys
from ansible import constants as C
from ansible.plugins.action.network import ActionModule as ActionNetworkModule
from ansible.module_utils.network.common.utils import load_provider
from ansible.module_utils.network.nxos.nxos import nxos_provider_spec
from ansible.utils.display import Display
display = Display()
class ActionModule(ActionNetworkModule):
def run(self, tmp=None, task_vars=None):
del tmp # tmp no longer has any effect
module_name = self._task.action.split('.')[-1]
self._config_module = True if module_name == 'nxos_config' else False
persistent_connection = self._play_context.connection.split('.')[-1]
warnings = []
if (self._play_context.connection in ('httpapi', 'local') or self._task.args.get('provider', {}).get('transport') == 'nxapi') \
and module_name in ('nxos_file_copy', 'nxos_nxapi'):
return {'failed': True, 'msg': "Transport type 'nxapi' is not valid for '%s' module." % (module_name)}
if module_name == 'nxos_file_copy':
self._task.args['host'] = self._play_context.remote_addr
self._task.args['password'] = self._play_context.password
if self._play_context.connection == 'network_cli':
self._task.args['username'] = self._play_context.remote_user
elif self._play_context.connection == 'local':
self._task.args['username'] = self._play_context.connection_user
if module_name == 'nxos_install_os':
connection = self._connection
if connection.transport == 'local':
persistent_command_timeout = C.PERSISTENT_COMMAND_TIMEOUT
persistent_connect_timeout = C.PERSISTENT_CONNECT_TIMEOUT
else:
persistent_command_timeout = connection.get_option('persistent_command_timeout')
persistent_connect_timeout = connection.get_option('persistent_connect_timeout')
display.vvvv('PERSISTENT_COMMAND_TIMEOUT is %s' % str(persistent_command_timeout), self._play_context.remote_addr)
display.vvvv('PERSISTENT_CONNECT_TIMEOUT is %s' % str(persistent_connect_timeout), self._play_context.remote_addr)
if persistent_command_timeout < 600 or persistent_connect_timeout < 600:
msg = 'PERSISTENT_COMMAND_TIMEOUT and PERSISTENT_CONNECT_TIMEOUT'
msg += ' must be set to 600 seconds or higher when using nxos_install_os module.'
msg += ' Current persistent_command_timeout setting:' + str(persistent_command_timeout)
msg += ' Current persistent_connect_timeout setting:' + str(persistent_connect_timeout)
return {'failed': True, 'msg': msg}
if persistent_connection in ('network_cli', 'httpapi'):
provider = self._task.args.get('provider', {})
if any(provider.values()):
display.warning('provider is unnecessary when using %s and will be ignored' % self._play_context.connection)
del self._task.args['provider']
if self._task.args.get('transport'):
display.warning('transport is unnecessary when using %s and will be ignored' % self._play_context.connection)
del self._task.args['transport']
elif self._play_context.connection == 'local':
provider = load_provider(nxos_provider_spec, self._task.args)
transport = provider['transport'] or 'cli'
display.vvvv('connection transport is %s' % transport, self._play_context.remote_addr)
if transport == 'cli':
pc = copy.deepcopy(self._play_context)
pc.connection = 'ansible.netcommon.network_cli'
pc.network_os = 'cisco.nxos.nxos'
pc.remote_addr = provider['host'] or self._play_context.remote_addr
pc.port = int(provider['port'] or self._play_context.port or 22)
pc.remote_user = provider['username'] or self._play_context.connection_user
pc.password = provider['password'] or self._play_context.password
pc.private_key_file = provider['ssh_keyfile'] or self._play_context.private_key_file
pc.become = provider['authorize'] or False
if pc.become:
pc.become_method = 'enable'
pc.become_pass = provider['auth_pass']
connection = self._shared_loader_obj.connection_loader.get('ansible.netcommon.persistent', pc, sys.stdin,
task_uuid=self._task._uuid)
# TODO: Remove below code after ansible minimal is cut out
if connection is None:
pc.connection = 'network_cli'
pc.network_os = 'nxos'
connection = self._shared_loader_obj.connection_loader.get('persistent', pc, sys.stdin, task_uuid=self._task._uuid)
display.vvv('using connection plugin %s (was local)' % pc.connection, pc.remote_addr)
command_timeout = int(provider['timeout']) if provider['timeout'] else connection.get_option('persistent_command_timeout')
connection.set_options(direct={'persistent_command_timeout': command_timeout})
socket_path = connection.run()
display.vvvv('socket_path: %s' % socket_path, pc.remote_addr)
if not socket_path:
return {'failed': True,
'msg': 'unable to open shell. Please see: ' +
'https://docs.ansible.com/ansible/network_debug_troubleshooting.html#unable-to-open-shell'}
task_vars['ansible_socket'] = socket_path
else:
self._task.args['provider'] = ActionModule.nxapi_implementation(provider, self._play_context)
warnings.append(['connection local support for this module is deprecated and will be removed in version 2.14,'
' use connect
|
ion either httpapi or ansible.netcommon.httpapi (whichever is applicable)'])
else:
return {'failed': True, 'msg': 'Connection type %s is not valid for this module' % self._play_context.connection}
result = super(ActionModule, self).run(task_vars=task_vars)
if warnings:
if 'warnings' in result:
result['warnings'].extend(warnings)
else:
result['warnings'] = warnings
return result
|
@staticmethod
def nxapi_implementation(provider, play_context):
provider['transport'] = 'nxapi'
if provider.get('host') is None:
provider['host'] = play_context.remote_addr
if provider.get('port') is None:
if provider.get('use_ssl'):
provider['port'] = 443
else:
provider['port'] = 80
if provider.get('timeout') is None:
provider['timeout'] = C.PERSISTENT_COMMAND_TIMEOUT
if provider.get('username') is None:
provider['username'] = play_context.connection_user
if provider.get('password') is None:
provider['password'] = play_context.password
if provider.get('use_ssl') is None:
provider['use_ssl'] = Fa
|
jaggu303619/asylum
|
openerp/addons/auth_crypt/auth_crypt.py
|
Python
|
agpl-3.0
| 5,849
| 0.009574
|
#
# Implements encrypting functions.
#
# Copyright (c) 2008, F S 3 Consulting Inc.
#
# Maintainer:
# Alec Joseph Rivera (agi<at>fs3.ph)
# refactored by Antony Lesuisse <al<at>openerp.com>
#
import hashlib
import hmac
import logging
from random import sample
from string import ascii_letters, digits
import openerp
from openerp.osv import fields, osv
_logger = logging.getLogger(__name__)
magic_md5 = '$1$'
magic_sha256 = '$5$'
openerp.addons.base.res.res_users.USER_PRIVATE_FIELDS.append('password_crypt')
def gen_salt(length=8, symbols=None):
if symbols is None:
symbols = ascii_letters + digits
return ''.join(sample(symbols, length))
def md5crypt( raw_pw, salt, magic=magic_md5 ):
""" md5crypt FreeBSD crypt(3) based on but different from md5
The md5crypt is based on Mark Johnson's md5crypt.py, which in turn is
based on FreeBSD src/lib/libcrypt/crypt.c (1.2) by Poul-Henning Kamp.
Mark's port can be found in ActiveState ASPN Python Cookbook. Kudos to
Poul and Mark. -agi
Original license:
* "THE BEER-WARE LICENSE" (Revision 42):
*
* <phk@login.dknet.dk> wrote this file. As long as you retain this
* notice you can do whatever you want with this stuff. If we meet some
* day, and you think this stuff is worth it, you can buy me a beer in
* return.
*
* Poul-Henning Kamp
"""
raw_pw = raw_pw.encode('utf-8')
salt = salt.encode('utf-8')
hash = hashlib.md5()
hash.update( raw_pw + magic + salt )
st = hashlib.md5()
st.update( raw_pw + salt + raw_pw)
stretch = st.digest()
for i in range( 0, len( raw_pw ) ):
hash.update( stretch[i % 16] )
i = len( raw_pw )
while i:
if i & 1:
hash.update('\x00')
else:
hash.update( raw_pw[0] )
i >>= 1
saltedmd5 = hash.digest()
for i in range( 1000 ):
hash = hashlib.md5()
if i & 1:
hash.update( raw_pw )
else:
hash.update( saltedmd5 )
if i % 3:
hash.update( salt )
if i % 7:
hash.update( raw_pw )
if i & 1:
hash.update( saltedmd5 )
else:
hash.update( raw_pw )
saltedmd5 = hash.digest()
itoa64 = './0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
rearranged = ''
for a, b, c in ((0, 6, 12), (1, 7, 13), (2, 8, 14), (3, 9, 15), (4, 10, 5)):
v = ord( saltedmd5[a] ) << 16 | ord( saltedmd5[b] ) << 8 | ord( saltedmd5[c] )
for i in range(4):
rearranged += itoa64[v & 0x3f]
v >>= 6
v = ord( saltedmd5[11] )
for i in range( 2 ):
rearranged += itoa64[v & 0x3f]
v >>= 6
return magic + salt + '$' + rearranged
def sh256crypt(cls, password, salt, magic=magic_sha256):
iterations = 1000
# see http://en.wikipedia.org/wiki/PBKDF2
result = password.encode('utf8')
for i in xrange(cls.iterations):
result = hmac.HMAC(result, salt, hashlib.sha256).digest() # uses HMAC (RFC 2104) to apply salt
result = result.encode('base64') # doesnt seem to be crypt(3) compatible
return '%s%s$%s' % (magic_sha256, salt, result)
class res_users(osv.osv):
_inherit = "res.users"
def init(self, cr):
"""Encrypt all passwords at module installation"""
cr.execute("SELECT id, password FROM res_users WHERE password IS NOT NULL and password != ''")
for user in cr.fetchall():
self._set_encrypted_password(cr, user[0], user[1])
def _set_encrypted_password(self, cr, uid, plain_password):
"""Set an encrypted password for a given user"""
salt = gen_salt()
stored_password_crypt = md5crypt(plain_password, salt)
cr.execute("UPDATE res_users SET password = '', password_crypt = %s WHERE id = %s",
(stored_password_crypt, uid))
def set_pw(self, cr, uid, id, name, value, args, context):
if value:
self._set_encrypted_password(cr, id, value)
del value
def get_pw( self, cr, uid, ids, name, args, context ):
cr.execute('select id, password from res_users where id in %s', (tuple(map(int, ids)),))
stored_pws = cr.fetchall()
res = {}
for id, stored_pw in stored_pws:
res[id] = stored_pw
return res
_columns = {
'password': fields.function(get_pw, fnct_inv=set_pw, type='char', string='Password', invisible=True, store=True),
'password_crypt': fields.char(string='Encrypted Password', invisible=True),
}
def check_credentials(self, cr, uid, password):
# convert to base_crypt if needed
cr.execute('SELECT password, password_crypt FROM res_users WHERE id=%s AND active', (uid,))
if cr.rowcount:
stored_password, stored_password_crypt = cr.fetchone()
if stored_password and not stored_password_crypt:
self._set_e
|
ncrypted_password(cr, uid, stored_password)
try:
return super(res_users, self).check_credentials(cr, uid, password)
except openerp.exceptions.AccessDenied:
# check md5crypt
if stored_password_crypt:
if stored_password_crypt[:len(magic_md5)] == magi
|
c_md5:
salt = stored_password_crypt[len(magic_md5):11]
if stored_password_crypt == md5crypt(password, salt):
return
elif stored_password_crypt[:len(magic_md5)] == magic_sha256:
salt = stored_password_crypt[len(magic_md5):11]
if stored_password_crypt == md5crypt(password, salt):
return
# Reraise password incorrect
raise
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
SANDEISON/The-Huxley
|
Python/Sorveteria Tropical,py.py
|
Python
|
gpl-3.0
| 351
| 0.005698
|
sabor = input()
quantidade = int(input())
if sabor.lower() == "morango" or sabor.lower() == "cereja":
total = quantidade*4.50
elif sabor.lower() == "damasco" or sabor.lower() == "siriguela":
total = qu
|
antidade*3.80
else:
total = quantidade*2.75
print("%.2f"%total)
if quantidade > 2:
print ("COM CALDA")
else:
pr
|
int("SEM CALDA")
|
pimutils/todoman
|
todoman/__init__.py
|
Python
|
isc
| 133
| 0
|
from todoman import version # ty
|
pe: ignore
__version__ = version.version
__documentation__
|
= "https://todoman.rtfd.org/en/latest/"
|
niccokunzmann/ledtable
|
python/ledtable/SerialLEDTable.py
|
Python
|
mit
| 4,859
| 0.004322
|
#!/usr/bin/python3
from . tkLEDTable import *
try:
import serial
except:
import sys
print("Install the serial module width '{} -m pip install PySerial'.".format(sys.executable))
raise
import threading
try:
from queue import Queue
except ImportError:
from Queue import Queue
import itertools
def list_serial_ports():
## got the code from
## http://stackoverflow.com/questions/12090503/listing-available-com-ports-with-python
import os
from serial.tools import list_ports
# Windows
if os.name == 'nt':
# Scan for available ports.
available = []
for i in range(256):
try:
s = serial.Serial(i)
available.append('COM'+str(i + 1))
s.close()
except serial.SerialException:
pass
return available
else:
# Mac / Linux
return [port[0] for port in list_ports.comports()]
class SerialLEDTable(threading.Thread):
COMMAND_CHARACTER = b"!"
|
def __init__(self, led_table, file):
super(SerialLEDTable, self).__init__()
self.led_table = led_
|
table
self.file = file
def run(self):
for line in self.file:
if not line.startswith(self.COMMAND_CHARACTER):
self.default_line(line)
else:
self.command_line(line[1:])
def default_line(self, line):
if line.endswith(b"\n"):
line = line[:-1]
if line.endswith(b"\r"):
line = line[:-1]
try:
line = line.decode("ASCII")
except UnicodeDecodeError:
pass
print(line)
def command_line(self, line):
command = line.split()
if command[0] == b"p":
height = int(command[1])
self.set_pixels(height)
elif command[0] == b"o":
height = int(command[1])
self.set_pixel_order(height)
else:
print("Invalid Command: {}".format(command))
def set_pixel_order(self, height):
indices = [[(int(i), x, y) for x, i in enumerate(self.file.readline().rstrip().split())]
for y in range(height)]
indices = list(itertools.chain.from_iterable(indices))
indices.sort()
coordinates = [(x, y) for i, x, y in indices]
self.set_pixel_order_on_ledtable(coordinates)
def set_pixel_order_on_ledtable(self, coordinates):
self.led_table.show_pixel_order(coordinates)
def set_pixels(self, height):
for y in range(height):
line = self.file.readline().rstrip()
pixels = line.decode("ASCII").split("#")[1:]
for x, pixel in enumerate(pixels):
self.set_pixel(x, y, pixel)
def set_pixel(self, x, y, color):
if not color:
return # nothing happened
if len(color) > 6:
color = color[-6:]
else:
color = color.rjust(6, "0")
color = "#" + color
self.set_pixel_on_ledtable(x, y, color)
def set_pixel_on_ledtable(self, x, y, color):
if x >= self.led_table.width:
self.led_table.width = x + 1
if y >= self.led_table.height:
self.led_table.height = y + 1
self.led_table.set_pixel_color(x, y, color)
class TkSerialLEDTable(SerialLEDTable):
def __init__(self, *args, **kw):
super(TkSerialLEDTable, self).__init__(*args, **kw)
self.queue = Queue()
self.led_table.after(1, self.loop)
def set_pixel_on_ledtable(self, *args):
self.queue.put((super(TkSerialLEDTable, self).set_pixel_on_ledtable, args, {}))
def set_pixel_order_on_ledtable(self, *args):
self.queue.put((super(TkSerialLEDTable, self).set_pixel_order_on_ledtable, args, {}))
def loop(self):
while not self.queue.empty():
func, args, kw = self.queue.get(block = False)
func(*args, **kw)
self.led_table.after(1, self.loop)
if __name__ == "__main__":
import sys
if not sys.argv[1:]:
ports = list_serial_ports()
if not ports:
print("No serial ports detected. You should pass one as argument.")
else:
print('The first argument should be a serial port, e.g. "{}"'.format(
"\", \"".join(map(str, ports))))
print("The second argument can be the baud rate of the serial connection.")
print("If there is no second argument we assume 9600.")
exit(1)
port = sys.argv[1]
if len(sys.argv) >= 3:
baudrate = sys.argv[2]
else:
baudrate = 9600
s = serial.serial_for_url(port, baudrate = baudrate)
t = LEDTk()
t.title(port)
ts = TkSerialLEDTable(t, s)
ts.deamon = True
ts.start()
try:
t.mainloop()
finally:
s.close()
|
Linhua-Sun/p4-phylogenetics
|
p4/Data.py
|
Python
|
gpl-2.0
| 31,372
| 0.005355
|
from Alignment import Alignment
import sys,time,os
import pf,func
from Var import var
from Glitch import Glitch
class Data:
"""All the alignments that you want to work with, in one place.
Initialize this with one of
- nothing (or None),
- a list of Alignment objects, or
- a single Alignment object.
If you initialize with nothing (or None), then all alignments in
var.alignments are used. If you initialize with a list of
alignments, then that is used. You can initialize with an empty
list to get an empty Data object.
"""
#def __del__(self, freeData=pf.freeData, dp_freeData=pf.dp_freeData):
def __del__(self, freeData=pf.freeData):
if self.alignments:
for a in self.alignments:
a.parts = []
self.alignments = None
if self.parts:
#print len(self.parts)
for p in self.parts:
#if p.cPart:
# freePart(p.cPart) # this is not as good as p.__del__(), as it leaves some memory un-freed
p.__del__()
#else:
# print 0
if self.cData:
if self.doDataPart:
dp_freeData(self.cData)
else:
freeData(self.cData)
self.cData = None
self.parts = None
self.taxNames = None
## ##Ignore
## def wipe(self):
## if self.cData:
## freeData(self.cData)
## self.cData = None
## for p in self.parts:
## if p.cPart:
## pf.freePart(p.cPart)
## p.cPart = None
## del(p)
## self.taxNames = None
## for a in self.alignments:
## del(a)
## self.alignments = None
def __init__(self, alignments=None):
gm = ['Data.__init__()']
self.nParts = 0
self.parts = []
self.alignments = []
self.nTax = 0
self.taxNames = []
self.cData = None
self.unconstrainedLogLikelihood = None
if alignments:
if isinstance(alignments, Alignment):
#Passed in a single alignment object not a list
alignments = [alignments]
else:
if type(alignments) != type([]):
gm.append("The 'alignments' arg should be a list or a single Alignment object.")
raise Glitch, gm
for a in alignments:
if isinstance(a, Alignment):
pass
else:
gm.append("Something in the 'alignments' arg was not an Alignment.")
raise Glitch, gm
self._fill(alignments)
elif alignments == []:
pass
elif var.alignments:
self._fill(var.alignments)
# temporary! Only used in __del__()
self.doDataPart = var.doDataPart
def dump(self):
"""Print rubbish about self."""
print "Data dump"
if self.nParts == 1:
if var.doDataPart:
print " There is 1 dataPart"
else:
print " There is 1 part"
else:
if var.doDataPart:
print " There are %i dataParts" % self.nParts
else:
print " There are %i parts" % self.nParts
for p in self.parts:
print " name=%s, nChar %i, dataType %s, cPart %s" % \
(p.name, p.nChar, p.dataType, p.cPart)
print " There are %i taxa" % self.nTax
if len(self.alignments) == 1:
print " There is 1 alignment"
else:
print " There are %i alignments" % len(self.alignments)
if self.cData:
print " The cData is %s" % self.cData
else:
print " There is no cData"
if self.unconstrainedLogLikelihood:
print " The unconstrainedLogLikelihood is %s" % self.unconstrainedLogLikelihood
else:
pass
def _fill(self, alignments):
# Fill self with Parts from all alignments.
#
# This method is called from __init__(), and it is generally
# not needed on its own. If we get here, we can be fairly sure
# that arg alignments is a non-empty list of Alignment
# objects. This method calls the Alignment method _initParts()
gm = ["Data._fill()"]
self.alignments = alignments
# Make a part out of the first alignment.
if not len(self.alignments):
gm.append("There are no alignments")
raise Glitch, gm
a = self.alignments[0]
if var.doDataPart:
a.initDataParts()
else:
a._initParts()
if not len(a.parts):
gm.append("First alignment failed to make a part")
raise Glitch, gm
self.taxNames = a.taxNames
self.nTax = len(self.taxNames)
for p in a.parts:
self.parts.append(p)
self.nParts = len(self.parts)
# Now do subsequent alignments ...
for aligNum in range(len(self.alignments))[1:]:
a = self.alignments[aligNum]
if self.nTax != len(a.sequences):
gm.append("Additional alignment is not the same size as the first alignment.")
if a.fName:
gm.append('(New alignment from file %s.)' % a.fName)
gm.append("From the first alignment, nTax is %s." % self.nTax)
gm.append("However, (zero-based) alignment %i has %i sequences." % (aligNum, len(a.sequences)))
raise Glitch, gm
if self.nTax != len(a.taxNames):
gm.append("Additional alignment appears to be not the same size as the first alignment.")
if a.fName:
gm.append('(New alignment from file %s.)' % a.fName)
gm.append("From the first alignment, nTax is %s." % self.nTax)
gm.append("However, (zero-based) alignment %i has %i taxNames." % (aligNum, len(a.taxNames)))
raise Glitch, gm
for i in range(self.nTax):
if self.taxNames[i] != a.taxNames[i]:
gm.append("Name mis-match in (zero-based) taxon number %i," % i)
gm.append("in (zero-based) alignment %i." % aligNum)
if a.fName:
gm.append('(New alignment from file %s.)' % a.fName)
gm.append("Newly-added alignment taxname %s is not the" % a.taxNames[i])
gm.append(" same as first alignment taxname %s" % self.taxNames[i])
raise Glitch, gm
if var.doDataPart:
a.initDataParts()
else:
a._initParts()
if not len(a.parts):
gm.append("Additional alignment failed to make a part.")
if a.fName:
gm.append('(New alignment from file %s.)' % a.fName)
raise Glitch, gm
for p in a.parts:
self.parts.append(p)
self.nParts = len(self.parts)
def calcUnconstrainedLogLikelihood1(self):
"""Calculate likelihood under the multinomial model.
This calculates the unconstrained (multinomial) log like
without regard to character partitions. The result is placed
in the data variable unconstrainedLogLikelihood. If there is
more than one partition, it makes a new temporary alignment
and puts all the sequences in one part in that alignment. So
it ultimately only works on one data partition.
|
If there is
more than one alignment, there is possibly more than one
datatype, and so this method will refuse to do it. Note that
the unconstrained log like of the combined data is not the sum
of t
|
he unconstrained log likes of the separate partitions.
See also calcUnconstrainedLogLikelihood2
"""
if len(self.alignments) > 1:
gm = ["Data.c
|
Skreex/LPTHW
|
ex14.py
|
Python
|
mit
| 529
| 0.00189
|
from sys import argv
script, user_name = argv
prompt = '> '
print "Hi %s, I'm the %s script." % (user_name, script)
print "I'd like to ask you a few questions."
print "Do you like me %s?" % user_na
|
me
likes = raw_input(prompt)
print "Where do you live %s?" % user_name
lives = raw_input(prompt)
print "What kind of computer do you have?"
computer = raw_input(prompt)
print """
Alright, so you said %r about liking me.
You live in %r. Not sure where that is.
And you have a %r computer. Nice.
""" % (likes, lives, computer)
| |
dwilmer/rcpsp-testing-framework
|
dataset.py
|
Python
|
mit
| 3,291
| 0.036159
|
import string, copy
def joinHeaders(first, second, joined, on):
joined.headers = first.headers[:]
mappedHeaders = {}
for header in second.headers:
if header == on:
continue
i = 0
newHeader = header
while newHeader in first.headers:
newHeader = '{0}_{1}'.format(newHeader, i)
i += 1
if i > 0:
mappedHeaders[header] = newHeader
joined.headers.append(newHeader)
return mappedHeaders
def mergeRow(row, toMerge, mappedHeaders):
for header in toMerge:
if header in mappedHeaders:
row[mappedHeaders[header]] = toMerge[header]
else:
row[header] = toMerge[header]
def mergeRows(first, second, joined, on, mappedHeaders):
joined.rows = copy.deepcopy(first.rows)
secondRows = copy.deepcopy(second.rows)
for secondRow in secondRows:
pivot = secondRow[on]
for row in joined.rows:
if row[on] == pivot:
mergeRow(row, secondRow, mappedHeaders)
break
else:
newRow = {}
mergeRow(newRow, secondRow, mappedHeaders)
joined.rows.append(newRow)
class Dataset:
def __init__(self, filename = '', separator=',', header=True):
self.headers = []
self.rows = []
try:
infile = file(filename, 'r')
if header:
self.headers = infile.readline().strip().split(separator)
for line in infile:
row = line.strip().split(separator)
if not header and not self.headers:
self.headers = ["V{0}".format(i) for i in range(len(row))]
self.rows.append({self.headers[i]:row[i] for i in range(len(row))})
infile.close()
except IOError:
pass
def export(self, filename):
outfile = file(filename, 'w')
outfile.write(','.join(self.headers))
for row in self.rows:
outfile.write('\n')
outfile.write(','.join([row[x] for x in self.headers]))
outfile.close()
def join(self, other, on):
"""Join self dataset with another dataset, creating
|
a new dataset.
The original datasets remain unchanged.
The third argument is t
|
he header on which to join"""
# check for correct join
if not (on in self.headers or on in other.headers):
print "Error: header '{0}' not found in both collections".format(on)
return None
# create new dataset
joined = Dataset()
# fill new dataset with combined data
mappedHeaders = joinHeaders(self, other, joined, on)
mergeRows(self, other, joined, on, mappedHeaders)
joined.ensureFilled()
# return newly created dataset
return joined
def pivot(self):
"""Pivot this dataset into a new one, discarding current headers, using first column as new headers"""
pivoted = Dataset()
for (index, header) in enumerate(self.headers):
for row in self.rows:
if index == 0:
pivoted.headers.append(row[header])
else:
if len(pivoted.rows) < index:
pivoted.rows.extend([{} for x in range(index - len(pivoted.rows))])
pivoted.rows[index - 1][row[self.headers[0]]] = row[header]
return pivoted
def ensureFilled(self):
for row in self.rows:
for header in self.headers:
if not header in row:
row[header] = None
def append(self, other, ensureFilled = True):
"""Append rows of another dataset to this one, leaving the other dataset unchanged"""
self.rows.extend(other.rows)
self.headers.extend([x for x in other.headers if not x in self.headers])
if(ensureFilled):
self.ensureFilled()
return self
|
daviddrysdale/python-phonenumbers
|
python/phonenumbers/data/region_881.py
|
Python
|
apache-2.0
| 569
| 0.008787
|
"""Auto-generated file, do not edit by hand. 881 metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_ME
|
TADATA_881 = PhoneMetadata(id='001', country_code=881, international_prefix=None,
general_desc=PhoneNumberDesc(national_number_pattern='[0-36-9]\\d{8}', possible_length=(9,)),
mobile=PhoneNumberDesc(national_number_pattern='[0-36-9]\\d{8}', example_number='612345678', possible_length=(9,)),
number_format=[NumberFormat(pattern='(\\d)(\\d{3})(\\d{5})', format='\\1 \\2 \\
|
3', leading_digits_pattern=['[0-36-9]'])])
|
chessco/cursus
|
controllers/utils.py
|
Python
|
apache-2.0
| 25,293
| 0.000474
|
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handlers that are not directly related to course content."""
__author__ = 'Saifu Angto (saifu@google.com)'
import base64
import hmac
import os
import time
import urlparse
import webapp2
import appengine_config
from common import jinja_utils
from models import models
from models import transforms
from models.config import ConfigProperty
from models.config import ConfigPropertyEntity
from models.courses import Course
from models.models import Student
from models.models import StudentProfileDAO
from models.models import TransientStudent
from models.roles import Roles
from google.appengine.api import namespace_manager
from google.appengine.api import users
# The name of the template dict key that stores a course's base location.
COURSE_BASE_KEY = 'gcb_course_base'
# The name of the template dict key that stores data from course.yaml.
COURSE_INFO_KEY = 'course_info'
TRANSIENT_STUDENT = TransientStudent()
XSRF_SECRET_LENGTH = 20
XSRF_SECRET = ConfigProperty(
'gcb_xsrf_secret', str, (
'Text used to encrypt tokens, which help prevent Cross-site request '
'forgery (CSRF, XSRF). You can set the value to any alphanumeric text, '
'preferably using 16-64 characters. Once you change this value, the '
'server rejects all subsequent requests issued using an old value for '
'this variable.'),
'course builder XSRF secret')
# Whether to record page load/unload events in a database.
CAN_PERSIST_PAGE_EVENTS = ConfigProperty(
'gcb_can_persist_page_events', bool, (
'Whether or not to record student page interactions in a '
'datastore. Without event recording, you cannot analyze student '
'page interactions. On the other hand, no event recording reduces '
'the number of datastore operations and minimizes the use of Google '
'App Engine quota. Turn event recording on if you want to analyze '
'this data.'),
False)
# Whether to record tag events in a database.
CAN_PERSIST_TAG_EVENTS = ConfigProperty(
'gcb_can_persist_tag_events', bool, (
'Whether or not to record student tag interactions in a '
'datastore. Without event recording, you cannot analyze student '
'tag interactions. On the other hand, no event recording reduces '
'the number of datastore operations and minimizes the use of Google '
'App Engine quota. Turn event recording on if you want to analyze '
'this data.'),
False)
# Whether to record events in a database.
CAN_PERSIST_ACTIVITY_EVENTS = ConfigProperty(
'gcb_can_persist_activity_events', bool, (
'Whether or not to record student activity interactions in a '
'datastore. Without event recording, you cannot analyze student '
'activity interactions. On the other hand, no event recording reduces '
'the number of datastore operations and minimizes the use of Google '
'App Engine quota. Turn event recording on if you want to analyze '
'this data.'),
False)
# Date format string for displaying datetimes in UTC.
# Example: 2013-03-21 13:00 UTC
HUMAN_READABLE_DATETIME_FORMAT = '%Y-%m-%d, %H:%M UTC'
# Date format string for displaying dates. Example: 2013-03-21
HUMAN_READABLE_DATE_FORMAT = '%Y-%m-%d'
# Time format string for displaying times. Example: 01:16:40 UTC.
HUMAN_READABLE_TIME_FORMAT = '%H:%M:%S UTC'
class PageInitializer(object):
"""Abstract class that defines an interface to initialize page headers."""
@classmethod
def initialize(cls, template_value):
raise NotImplementedError
class DefaultPageInitializer(PageInitializer):
"""Implements default page initializer."""
@classmethod
def initialize(cls, template_value):
pass
class PageInitializerService(object):
"""Installs the appropriate PageInitializer."""
_page_initializer = DefaultPageInitializer
@classmethod
def get(cls):
return cls._page_initializer
@classmethod
def set(cls, page_initializer):
cls._page_initializer = page_initializer
class ReflectiveRequestHandler(object):
"""Uses reflection to handle custom get() and post() requests.
Use this class as a mix-in with any webapp2.RequestHandler to allow request
dispatching to multiple get() and post() methods based on the 'action'
parameter.
Open your existing webapp2.RequestHandler, add this class as a mix-in.
Define the following class variables:
default_action = 'list'
get_actions = ['default_action', 'edit']
post_actions = ['save']
Add instance methods named get_list(self), get_edit(self), post_save(self).
These methods will now be called automatically based on the 'action'
GET/POST parameter.
"""
def create_xsrf_token(self, action):
return XsrfTokenManager.create_xsrf_token(action)
def get(self):
"""Handles GET."""
action = self.request.get('action')
if not action:
action = self.default_action
if action not in self.get_actions:
|
self.error(404)
return
handler = getattr(self, 'get_%s' % action)
if not handler:
self.error(404)
return
return handler()
def post(self):
"""Handles POST."""
action = self.request.get('action')
if not action or action not in self.post_actions:
self.error(404)
return
handler = getattr(self, 'po
|
st_%s' % action)
if not handler:
self.error(404)
return
# Each POST request must have valid XSRF token.
xsrf_token = self.request.get('xsrf_token')
if not XsrfTokenManager.is_xsrf_token_valid(xsrf_token, action):
self.error(403)
return
return handler()
class ApplicationHandler(webapp2.RequestHandler):
"""A handler that is aware of the application context."""
@classmethod
def is_absolute(cls, url):
return bool(urlparse.urlparse(url).scheme)
@classmethod
def get_base_href(cls, handler):
"""Computes current course <base> href."""
base = handler.app_context.get_slug()
if not base.endswith('/'):
base = '%s/' % base
# For IE to work with the <base> tag, its href must be an absolute URL.
if not cls.is_absolute(base):
parts = urlparse.urlparse(handler.request.url)
base = urlparse.urlunparse(
(parts.scheme, parts.netloc, base, None, None, None))
return base
def __init__(self, *args, **kwargs):
super(ApplicationHandler, self).__init__(*args, **kwargs)
self.template_value = {}
def get_template(self, template_file, additional_dirs=None):
"""Computes location of template files for the current namespace."""
self.template_value[COURSE_INFO_KEY] = self.app_context.get_environ()
self.template_value['is_course_admin'] = Roles.is_course_admin(
self.app_context)
self.template_value[
'is_read_write_course'] = self.app_context.fs.is_read_write()
self.template_value['is_super_admin'] = Roles.is_super_admin()
self.template_value[COURSE_BASE_KEY] = self.get_base_href(self)
template_environ = self.app_context.get_template_environ(
self.template_value[COURSE_INFO_KEY]['course']['locale'],
additional_dirs
)
template_environ.filters[
'gcb_tags'] = jinja_utils.get_gcb_tags_filter(self)
return templ
|
ielnehc/ltmh
|
tools/get_public_ip.py
|
Python
|
mit
| 610
| 0.02623
|
#!/usr/bin/env python
import re,urllib2
class Get_public_ip:
def getip(self):
try:
myip = self.visit("http://www.whereismyip.com/")
except:
try:
myip = self.visit("http://www.ip138.
|
com/ip2city.asp")
|
except:
myip = "So sorry!!!"
return myip
def visit(self,url):
opener = urllib2.urlopen(url)
if url == opener.geturl():
str = opener.read()
return re.search('\d+\.\d+\.\d+\.\d+',str).group(0)
if __name__ == "__main__":
getmyip = Get_public_ip()
print getmyip.getip()
|
singularityhub/sregistry
|
shub/apps/base/sitemap.py
|
Python
|
mpl-2.0
| 917
| 0.001091
|
"""
Copyright (C) 2017-2021 Vanessa Sochat.
This Source Code Form is subject to the terms of the
Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed
with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
from django.contrib.sitemaps import Sitemap
from shub.apps.main.models import Container, Collection
class BaseSitemap(Sitemap):
priority = 0.5
def location(self, obj):
return obj.get_absolute_url()
class ContainerSitemap(BaseSitemap):
cha
|
ngefreq = "weekly"
def lastmod(self, obj):
return obj.build_date
def items(self):
return [x for x in Container.objects.all() if x.collection.priva
|
te is False]
class CollectionSitemap(BaseSitemap):
changefreq = "weekly"
def lastmod(self, obj):
return obj.modify_date
def items(self):
return [x for x in Collection.objects.all() if x.private is False]
|
AndreasAntener/mavlink
|
pymavlink/examples/__init__.py
|
Python
|
lgpl-3.0
| 88
| 0.011364
|
'''This folder contains various examp
|
le scripts demonstrating
|
MAVLink functionality.'''
|
ThomasStivers/nvda-notepadPlusPlus
|
addon/appModules/notepad++/autocomplete.py
|
Python
|
gpl-2.0
| 210
| 0.033333
|
from NVD
|
AObjects.IAccessible import IAccessible
import nvwave
import speech
import os
class AutocompleteList(IAccessible):
def event_selection(s
|
elf):
speech.cancelSpeech()
speech.speakText(self.name)
|
roy-boy/python_scripts
|
file_parser.py
|
Python
|
gpl-3.0
| 847
| 0
|
"""file_parser.py reads text file and parse the item into a list."""
def file_to_list(input_file):
data_list_trim = []
try:
with open(input_file) as in_put:
input_data = in_put.readlines()
if len(input_data) == 1:
print()
data_list = input_data[0].replace('"', '').strip()
data_list_
|
trim = data_list.split(',')
elif len(input_data) > 1:
print()
for row in input_data:
row_list = row.replace('"', '').strip()
row_list_trim = row_list.split(',')
data_list_trim = data_list_trim + row_list_trim
else:
print('no content is the file')
except O
|
SError as err:
print('Failed to open file', err)
return data_list_trim
|
iulian787/spack
|
var/spack/repos/builtin/packages/r-bigmemory-sri/package.py
|
Python
|
lgpl-2.1
| 708
| 0.00565
|
# Copyright 2013-2020 Lawrence Livermore Nationa
|
l Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RBigmemorySri(RPackage):
"""This package provides a shared resource interface
for the bigmemory and synchronicity packages."""
homepage = "https://cloud.r-project.org/web/packages/bigmemory.sri/index.html"
url = "https://cloud.r-project.org/src/contrib/bigmemory.sri_0.1.3.tar.gz"
list_url = "
|
https://cloud.r-project.org/src/contrib/Archive/bigmemory.sri"
version('0.1.3', sha256='55403252d8bae9627476d1f553236ea5dc7aa6e54da6980526a6cdc66924e155')
|
littlezz/ESL-Model
|
tests/test_ch4.py
|
Python
|
mit
| 5,437
| 0.002391
|
import pytest
from .utils import digit_float
import numpy as np
vowel_data_y_dimension = 11
@pytest.fixture
def vowel_data():
from esl_model.datasets import VowelDataSet
data = VowelDataSet()
return data.return_all()
@pytest.fixture
def SAHeart_data():
from esl_model.datasets import SAHeartDataSet
data = SAHeartDataSet()
return data.return_all()
def test_vowel_data():
from esl_model.datasets import VowelDataSet
data = VowelDataSet()
assert list(data.train_y[:5]) == list(range(1, 6))
data.select_features = data.feature_names[:2]
assert np.array_equal(data.train_x[:1], data._train_x.iloc[:1, :2].values)
ft = list(range(3))
data.select_features = ft
assert np.array_equal(data.train_x[:1], data._train_x.iloc[:1, ft].values)
def test_indicator_matrix(vowel_data):
from esl_model.ch4.models import LinearRegressionIndicatorMatrix
train_x, train_y, test_x, test_y, features = vowel_data
lrm = LinearRegressionIndicatorMatrix(train_x=train_x, train_y=train_y, n_class=vowel_data_y_dimension)
lrm.pre_processing()
lrm.train()
print(lrm.error_rate)
test_result = lrm.test(test_x, test_y)
print(test_result.error_rate)
assert digit_float(lrm.error_rate) == 0.477
assert digit_float(test_result.error_rate) == 0.667
def test_LDA(vowel_data):
from esl_model.ch4.models import LDAModel
train_x, train_y, test_x, test_y, features = vowel_dat
|
a
lda = LDAModel(train_x=train_x, train_y=train_y, n_class=vowel_data_y_dimension)
lda.pre_processing()
lda.train()
print(lda.y_hat[:10])
print(lda.error_rate)
te = lda.test(test_x, test_y)
print(te.error_rate)
assert digit_float(lda.error_rate) == 0.316
assert digit_float(te.error_rate) == 0.556
def test_QDA(vowel_data):
from esl_model.ch
|
4.models import QDAModel
train_x, train_y, test_x, test_y, features = vowel_data
qda = QDAModel(train_x=train_x, train_y=train_y, n_class=vowel_data_y_dimension)
qda.pre_processing()
qda.train()
print(qda.y_hat[:10])
print(qda.error_rate)
te = qda.test(test_x, test_y).error_rate
print(te)
assert digit_float(qda.error_rate) == 0.011
assert digit_float(te) == 0.528
def test_RDA(vowel_data):
from esl_model.ch4.models import RDAModel
train_x, train_y, test_x, test_y, features = vowel_data
# http://waxworksmath.com/Authors/G_M/Hastie/WriteUp/weatherwax_epstein_hastie_solutions_manual.pdf
# pp 60
model = RDAModel(train_x=train_x, train_y=train_y, n_class=vowel_data_y_dimension, alpha=0.969697)
model.pre_processing()
model.train()
print(model.error_rate)
te = model.test(test_x, test_y)
print(te.error_rate)
assert digit_float(te.error_rate) == 0.478
def test_LDA_computation(vowel_data):
from esl_model.ch4.models import LDAForComputation
train_x, train_y, test_x, test_y, features = vowel_data
model = LDAForComputation(train_x=train_x, train_y=train_y, n_class=vowel_data_y_dimension)
model.pre_processing()
model.train()
from esl_model.ch4.models import LDAModel
lda = LDAModel(train_x=train_x, train_y=train_y, n_class=vowel_data_y_dimension)
lda.pre_processing()
lda.train()
print(model.error_rate)
assert np.isclose(model.error_rate, lda.error_rate)
assert np.isclose(model.test(test_x, test_y).error_rate, lda.test(test_x, test_y).error_rate)
def test_RRLDA(vowel_data):
from esl_model.ch4.models import ReducedRankLDAModel
train_x, train_y, test_x, test_y, features = vowel_data
model = ReducedRankLDAModel(train_x=train_x, train_y=train_y, n_class=vowel_data_y_dimension, L=2)
model.pre_processing()
model.train()
print(model.y_hat[:5])
print(model.error_rate)
te = model.test(test_x, test_y)
print(te.error_rate)
assert digit_float(model.error_rate) == 0.350
assert digit_float(te.error_rate) == 0.491
def test_SAHeart_data_set(SAHeart_data):
x, y, *_ = SAHeart_data
assert x[1, 2] == 4.41
assert list(y[:4]) == [1, 1, 0, 1]
def test_binary_logistic_regression(SAHeart_data):
from esl_model.datasets import SAHeartDataSet
data = SAHeartDataSet(select_features=[1, 2, 4, 8])
from esl_model.ch4.models import BinaryLogisticRegression
train_x = data.train_x
train_y = data.train_y
model = BinaryLogisticRegression(train_x=train_x, train_y=train_y, n_class=2, do_standardization=False)
model.pre_processing()
model.train()
print(model.beta_hat)
print(model.error_rate)
print('yhat', model.y_hat[:5])
print(repr(model.std_err))
print('z score', model.z_score)
eq_beta_hat = np.array([[-4.20427542],
[0.08070059],
[0.16758415],
[0.92411669],
[0.04404247]])
eq_std_err = np.array([0.498348, 0.02551477, 0.05418979, 0.22318295, 0.00974321])
assert np.allclose(model.beta_hat, eq_beta_hat)
assert digit_float(model.error_rate) == 0.268
assert np.allclose(model.std_err, eq_std_err)
data = SAHeartDataSet(select_features=[0, 1, 2, 4, 6, 7, 8])
train_x = data.train_x
train_y = data.train_y
model = BinaryLogisticRegression(train_x=train_x, train_y=train_y, n_class=2, do_standardization=False)
model.pre_processing()
model.train()
assert digit_float(model.error_rate) == 0.271
|
danfr/RemoteTV
|
Server/bin/Setup.py
|
Python
|
mit
| 269
| 0.003717
|
import os
import sys
from pathlib import Path
class Setup:
CONFIGURATION_FILE = os.path.join(Path(__file__).parents[1], "config", "server.cfg")
VLC_DEFAULT_COMMA
|
ND = "vlc -f"
POSIX = 'posix' in sys.builtin_module_na
|
mes
VLC_PLAYLIST_END = "vlc://quit"
|
likelyzhao/mxnet
|
example/rcnn/rcnn/symbol/proposal_target.py
|
Python
|
apache-2.0
| 3,893
| 0.002312
|
"""
Proposal Target Operator selects foreground and background roi and assigns label, bbox_transform to them.
"""
from __future__ import print_function
import mxnet as mx
import numpy as np
from distutils.util import strtobool
from rcnn.io.rcnn import sample_rois
DEBUG = False
class ProposalTargetOperator(mx.operator.CustomOp):
def __init__(self, num_classes, batch_images, batch_rois, fg_fraction):
super(ProposalTargetOperator, self).__init__()
self._num_classes = num_classes
self._batch_images = batch_images
self._batch_rois = batch_rois
self._fg_fraction = fg_fraction
if DEBUG:
self._count = 0
self._fg_num = 0
self._bg_num = 0
def forward(self, is_train, req, in_data, out_data, aux):
assert self._batch_rois % self._batch_images == 0, \
'BATCHIMAGES {} must devide BATCH_ROIS {}'.format(self._batch_images, self._batch_rois)
rois_per_image = self._batch_rois / self._batch_images
fg_rois_per_image = np.round(self._fg_fraction * rois_per_image).astype(int)
all_rois = in_data[0].asnumpy()
gt_boxes = in_data[1].asnumpy()
# Include ground-truth boxes in the set of candidate rois
zeros = np.zeros((gt_boxes.shape[0], 1), dtype=gt_boxes.dtype)
all_rois = np.vstack((all_rois, np.hstack((zeros, gt_boxes[:, :-1]))))
# Sanity check: single batch only
assert np.all(all_rois[:, 0] == 0), 'Only single item batches are supported'
rois, labels, bbox_targets, bbox_weights = \
sample_ro
|
is(all_r
|
ois, fg_rois_per_image, rois_per_image, self._num_classes, gt_boxes=gt_boxes)
if DEBUG:
print("labels=", labels)
print('num fg: {}'.format((labels > 0).sum()))
print('num bg: {}'.format((labels == 0).sum()))
self._count += 1
self._fg_num += (labels > 0).sum()
self._bg_num += (labels == 0).sum()
print("self._count=", self._count)
print('num fg avg: {}'.format(self._fg_num / self._count))
print('num bg avg: {}'.format(self._bg_num / self._count))
print('ratio: {:.3f}'.format(float(self._fg_num) / float(self._bg_num)))
for ind, val in enumerate([rois, labels, bbox_targets, bbox_weights]):
self.assign(out_data[ind], req[ind], val)
def backward(self, req, out_grad, in_data, out_data, in_grad, aux):
self.assign(in_grad[0], req[0], 0)
self.assign(in_grad[1], req[1], 0)
@mx.operator.register('proposal_target')
class ProposalTargetProp(mx.operator.CustomOpProp):
def __init__(self, num_classes, batch_images, batch_rois, fg_fraction='0.25'):
super(ProposalTargetProp, self).__init__(need_top_grad=False)
self._num_classes = int(num_classes)
self._batch_images = int(batch_images)
self._batch_rois = int(batch_rois)
self._fg_fraction = float(fg_fraction)
def list_arguments(self):
return ['rois', 'gt_boxes']
def list_outputs(self):
return ['rois_output', 'label', 'bbox_target', 'bbox_weight']
def infer_shape(self, in_shape):
rpn_rois_shape = in_shape[0]
gt_boxes_shape = in_shape[1]
output_rois_shape = (self._batch_rois, 5)
label_shape = (self._batch_rois, )
bbox_target_shape = (self._batch_rois, self._num_classes * 4)
bbox_weight_shape = (self._batch_rois, self._num_classes * 4)
return [rpn_rois_shape, gt_boxes_shape], \
[output_rois_shape, label_shape, bbox_target_shape, bbox_weight_shape]
def create_operator(self, ctx, shapes, dtypes):
return ProposalTargetOperator(self._num_classes, self._batch_images, self._batch_rois, self._fg_fraction)
def declare_backward_dependency(self, out_grad, in_data, out_data):
return []
|
saketkc/statsmodels
|
statsmodels/tsa/statespace/tests/test_structural.py
|
Python
|
bsd-3-clause
| 9,003
| 0.001666
|
"""
Tests for structural time series models
Author: Chad Fulton
License: Simplified-BSD
"""
from __future__ import division, absolute_import, print_function
import numpy as np
import pandas as pd
import os
import warnings
from statsmodels.datasets import macrodata
from statsmodels.tsa.statespace import structural
from statsmodels.tsa.statespace.structural import UnobservedComponents
from .results import results_structural
from statsmodels.tools import add_constant
from numpy.testing import assert_equal, assert_almost_equal, assert_raises, assert_allclose
from nose.exc import SkipTest
try:
import matplotlib.pyplot as plt
have_matplotlib = True
except ImportError:
have_matplotlib = False
dta = macrodata.load_pandas().data
dta.index = pd.date_range(start='1959-01-01', end='2009-07-01', freq='QS')
def run_ucm(name):
true = getattr(results_structural, name)
for model in true['models']:
kwargs = model.copy()
kwargs.update(true['kwargs'])
# Make a copy of the data
values = dta.copy()
freq = kwargs.pop('freq', None)
if freq is not None:
values.index = pd.date_range(start='1959-01-01', periods=len(dta),
freq=freq)
# Test pandas exog
if 'exog' in kwargs:
# Default value here is pd.Series object
exog = np.log(values['realgdp'])
# Also allow a check with a 1-dim numpy array
if kwargs['exog'] == 'numpy':
exog = exog.values.squeeze()
kwargs['exog'] = exog
# Create the model
mod = UnobservedComponents(values['unemp'], **kwargs)
# Smoke test for starting parameters, untransform, transform
# Also test that transform and untransform are inverses
mod.start_params
assert_allclose(mod.start_params, mod.transform_params(mod.untransform_params(mod.start_params)))
# Fit the model at the true parameters
res_true = mod.filter(true['params'])
# Check that the cycle bounds were computed correctly
freqstr = freq[0] if freq is not None else values.index.freqstr[0]
if freqstr == 'A':
cycle_period_bounds = (1.5, 12)
elif freqstr == 'Q':
cycle_period_bounds = (1.5*4, 12*4)
elif freqstr == 'M':
cycle_period_bounds = (1.5*12, 12*12)
else:
# If we have no information on data frequency, require the
# cycle frequency to be between 0 and pi
cycle_period_bounds = (2, np.inf)
# Test that the cycle frequency bound is correct
assert_equal(mod.cycle_frequency_bound,
(2*np.pi / cycle_period_bounds[1],
2*np.pi / cycle_period_bounds[0])
)
# Test that the likelihood is correct
rtol = true.get('rtol', 1e-7)
atol = true.get('atol', 0)
assert_allclose(res_true.llf, true['llf'], rtol=rtol, atol=atol)
# Smoke test for plot_components
if have_matplotlib:
fig = res_true.plot_components()
plt.close(fig)
# Now fit the model via MLE
with warnings.catch_warnings(record=True) as w:
res = mod.fit(disp=-1)
# If we found a higher likelihood, no problem; otherwise check
# that we're very close to that found by R
if res.llf <= true['llf']:
assert_allclose(res.llf, true['llf'], rtol=1e-4)
# Smoke test for summary
res.summary()
def test_irregular():
run_ucm('irregular')
def test_fixed_intercept():
warnings.simplefilter("always")
with warnings.catch_warnings(record=True) as w:
run_ucm('fixed_intercept')
message = ("Specified model does not contain a stochastic element;"
" irregular component added.")
assert_equal(str(w[0].message), message)
def test_deterministic_constant():
|
run_ucm('deterministic_constant')
def test_random_walk():
run_ucm('random_walk')
def test_local_level():
run_ucm('local_level')
def test_fixed_slope():
run_ucm('fixed_slope')
def test_fixed_slope():
warnings.simplefilter("always")
with warnings.catch_warnings(record=True) as w:
run_ucm('fixed_slope')
message = ("Specified model does not contain a stochastic element;"
" irregular component added.")
assert_equal(str
|
(w[0].message), message)
def test_deterministic_trend():
run_ucm('deterministic_trend')
def test_random_walk_with_drift():
run_ucm('random_walk_with_drift')
def test_local_linear_deterministic_trend():
run_ucm('local_linear_deterministic_trend')
def test_local_linear_trend():
run_ucm('local_linear_trend')
def test_smooth_trend():
run_ucm('smooth_trend')
def test_random_trend():
run_ucm('random_trend')
def test_cycle():
run_ucm('cycle')
def test_seasonal():
run_ucm('seasonal')
def test_reg():
run_ucm('reg')
def test_rtrend_ar1():
run_ucm('rtrend_ar1')
def test_lltrend_cycle_seasonal_reg_ar1():
run_ucm('lltrend_cycle_seasonal_reg_ar1')
def test_mle_reg():
endog = np.arange(100)*1.0
exog = endog*2
# Make the fit not-quite-perfect
endog[::2] += 0.01
endog[1::2] -= 0.01
with warnings.catch_warnings(record=True) as w:
mod1 = UnobservedComponents(endog, irregular=True, exog=exog, mle_regression=False)
res1 = mod1.fit(disp=-1)
mod2 = UnobservedComponents(endog, irregular=True, exog=exog, mle_regression=True)
res2 = mod2.fit(disp=-1)
assert_allclose(res1.regression_coefficients.filtered[0, -1], 0.5, atol=1e-5)
assert_allclose(res2.params[1], 0.5, atol=1e-5)
def test_specifications():
endog = [1, 2]
# Test that when nothing specified, a warning is issued and the model that
# is fit is one with irregular=True and nothing else.
warnings.simplefilter("always")
with warnings.catch_warnings(record=True) as w:
mod = UnobservedComponents(endog)
message = ("Specified model does not contain a stochastic element;"
" irregular component added.")
assert_equal(str(w[0].message), message)
assert_equal(mod.trend_specification, 'irregular')
# Test an invalid string trend specification
assert_raises(ValueError, UnobservedComponents, endog, 'invalid spec')
# Test that if a trend component is specified without a level component,
# a warning is issued and a deterministic level component is added
with warnings.catch_warnings(record=True) as w:
mod = UnobservedComponents(endog, trend=True, irregular=True)
message = ("Trend component specified without level component;"
" deterministic level component added.")
assert_equal(str(w[0].message), message)
assert_equal(mod.trend_specification, 'deterministic trend')
# Test that if a string specification is provided, a warning is issued if
# the boolean attributes are also specified
trend_attributes = ['irregular', 'trend', 'stochastic_level',
'stochastic_trend']
for attribute in trend_attributes:
with warnings.catch_warnings(record=True) as w:
kwargs = {attribute: True}
mod = UnobservedComponents(endog, 'deterministic trend', **kwargs)
message = ("Value of `%s` may be overridden when the trend"
" component is specified using a model string."
% attribute)
assert_equal(str(w[0].message), message)
# Test that a seasonal with period less than two is invalid
assert_raises(ValueError, UnobservedComponents, endog, seasonal=1)
def test_start_params():
# Test that the behavior is correct for multiple exogenous and / or
# autoregressive components
# Parameters
nobs = int(1e4)
beta = np.r_[10, -2]
phi = np.r_[0.5, 0.1]
# Generate data
np.random.seed(1234)
exog = np.c_[np.ones(nobs), np.arange(nobs)*1.0]
eps = np.random.normal(size=nobs)
endog = np.zeros(nobs+2)
fo
|
ifduyue/sentry
|
src/sentry/filters/localhost.py
|
Python
|
bsd-3-clause
| 996
| 0.002008
|
from __future__ import absolute_import
from .base import Filter
from six.moves.urllib.parse import urlparse
from sentry.utils.data_filters import FilterStatKeys
LOCAL_IPS = frozenset(['127.0.0.1', '::1'])
LOCAL_DOMAINS = frozenset(['127.0.0.1', 'localhost'])
class LocalhostFilter(Filter):
id = FilterStatKeys.LOCALHOST
name = 'Filter out events coming from localhost'
description = 'This applies to both IPv4 (``127.0.0
|
.1``) and IPv6 (``::1``) addresses.'
def get_ip_address(self, data):
try:
return data['sentry.interfaces.User']['ip_address']
except KeyError:
return ''
def get_url(self, data):
try:
return data['sentry.interfaces.Http']['url'] or ''
except KeyError:
return ''
def
|
get_domain(self, data):
return urlparse(self.get_url(data)).hostname
def test(self, data):
return self.get_ip_address(data) in LOCAL_IPS or self.get_domain(data) in LOCAL_DOMAINS
|
masom/Puck
|
server/controllers/root.py
|
Python
|
lgpl-3.0
| 2,450
| 0.002041
|
'''
Puck: FreeBSD virtualization guest configuration server
Copyright (C) 2011 The Hotel Communication Network inc.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import os.path
import cherrypy
from libs.controller import *
import models
from models import Users
class RootController(Controller):
crumbs = [Crumb("/", "Home")]
def __init__(self, lookup):
Controller.__init__(self, lookup)
self._lookup = lookup
self._routes = {}
@cherrypy.expose
@cherrypy.tools.myauth()
|
def index(self):
return self.render("index.html", self.crumbs[:-1])
@cherrypy.expose
def login(self, **post):
if post:
self._login(post)
return self.render("login.html", self.crumbs[:-1])
@cherrypy.expose
def logout(self, **post):
cherrypy.session.delete()
raise cherrypy.HTTPRedirect("/l
|
ogin")
def add(self, route, cls):
self._routes[route] = cls
def load(self):
[setattr(self, route, self._routes[route](self._lookup)) for route in self._routes]
def _login(self, post):
fields = ['user.username', 'user.password']
for f in fields:
if not f in post:
cherrypy.session['flash'] = "Invalid form data."
return False
hash_password = Users.hash_password(post['user.password'])
user = Users.first(username=post['user.username'], password=hash_password)
if not user:
cherrypy.session['flash'] = 'Invalid username or password.'
return False
creds = user.generate_auth()
cherrypy.session['user.id'] = user.id
cherrypy.session['user.group'] = user.user_group
cherrypy.session['credentials'] = creds
raise cherrypy.HTTPRedirect('/index')
|
LRGH/amoco
|
amoco/arch/avr/asm.py
|
Python
|
gpl-2.0
| 10,020
| 0.000499
|
# -*- coding: utf-8 -*-
# This code is part of Amoco
# Copyright (C) 2014 Axel Tillequin (bdcht3@gmail.com)
# published under GPLv2 license
from amoco.arch.avr.env import *
from amoco.cas.mapper import mapper
# ------------------------------------------------------------------------------
# low level functions :
def _push_(fmap, x):
fmap[sp] = fmap[sp] - x.length
fmap[mem(sp, x.size)] = x
def _pop_(fmap, _l):
fmap[_l] = fmap(mem(sp, _l.size))
fmap[sp] = fmap[sp] + _l.length
def __pc(f):
def pcnpc(i, fmap):
fmap[pc] = fmap[pc] + i.length
if len(fmap.conds) > 0:
cond = fmap.conds.pop()
m = mapper()
f(i, m)
for l, v in m:
fmap[l] = tst(cond, v, fmap(l))
else:
f(i, fmap)
return pcnpc
def __nopc(f):
return f.__closure__[0].cell_contents
def __nopc(f):
return f.__closure__[0].cell_contents
# flags for arithmetic operations:
def __setflags__A(i, fmap, a, b, x, neg=False):
fmap[zf] = x == 0
fmap[nf] = x.bit(7)
if neg:
a, x = ~a, ~x
fmap[cf] = (
((a.bit(7)) & (b.bit(7)))
| ((b.bit(7)) & (~x.bit(7)))
| ((a.bit(7)) & (~x.bit(7)))
)
fmap[vf] = ((a.bit(7)) & (b.bit(7)) & (~x.bit(7))) | (
(~a.bit(7)) & (~b.bit(7)) & (x.bit(7))
)
fmap[sf] = fmap[nf] ^ fmap[vf]
fmap[hf] = (
|
((a.bit(3)) & (b.bit(3)))
| ((b.bit(3)) & (~x.bit(3)))
| ((a.bit(3)) & (~x.bit(3)))
)
# flags for logical operations:
def __setflags__L(i, fmap, a, b, x):
fmap[zf] = x == 0
fmap[nf] = x.bit(7)
fmap[vf] = bit0
fmap[sf] = fmap[nf] ^ fmap[vf]
# flags for shift operations:
def __setflags__S(i, fmap, a, x):
# cf must be set before calling this function.
fmap[zf] = x == 0
fmap[nf] = x.bit(7)
|
fmap[vf] = fmap[nf] ^ fmap[cf]
fmap[sf] = fmap[nf] ^ fmap[vf]
# ixxx is the translation of AVR instruction xxx.
# ------------------------------------------------------------------------------
@__pc
def i_NOP(i, fmap):
pass
def i_SLEEP(i, fmap):
fmap[pc] = ext("SLEEP", size=pc.size).call(fmap)
def i_BREAK(i, fmap):
fmap[pc] = ext("BREAK", size=pc.size).call(fmap)
def i_IN(i, fmap):
r, port = i.operands
fmap[pc] = ext("IN", size=pc.size).call(fmap)
def i_OUT(i, fmap):
port, r = i.operands
fmap[pc] = ext("OUT", size=pc.size).call(fmap)
# arithmetic & logic instructions:
##################################
@__pc
def i_ADD(i, fmap):
dst, src = i.operands
a = fmap(dst)
b = fmap(src)
x = a + b
__setflags__A(i, fmap, a, b, x)
fmap[dst] = x
@__pc
def i_ADIW(i, fmap):
dst, src = i.operands
if i.misc["W"]:
assert dst is R[24]
a = fmap(composer([dst, R[25]]))
else:
a = fmap(dst)
b = fmap(src)
x = a + b
__setflags__A(i, fmap, a, b, x)
fmap[dst] = x[0 : dst.size]
if i.misc["W"]:
assert x.size == 16
fmap[R[25]] = x[8:16]
@__pc
def i_ADC(i, fmap):
dst, src = i.operands
_c = fmap[cf]
__nopc(i_ADD)(i, fmap)
a = fmap(dst)
b = tst(_c, cst(1, a.size), cst(0, a.size))
x = a + b
__setflags__A(i, fmap, a, b, x)
fmap[dst] = x
@__pc
def i_INC(i, fmap):
dst = i.operands[0]
a = fmap(dst)
b = cst(1, dst.size)
x = a + b
fmap[zf] = x == 0
fmap[nf] = x.bit(7)
fmap[vf] = a == cst(0x7F, 8)
fmap[sf] = fmap[nf] ^ fmap[vf]
fmap[dst] = x
@__pc
def i_CP(i, fmap):
dst, src = i.operands
a = fmap(dst)
b = fmap(src)
x = a - b
__setflags__A(i, fmap, a, b, x, neg=True)
@__pc
def i_CPSE(i, fmap):
rd, rr = i.operands
fmap.conds[fmap(rd == rr)]
@__pc
def i_SBRC(i, fmap):
b = i.operands[0]
fmap.conds[fmap(b == bit0)]
@__pc
def i_SBRS(i, fmap):
b = i.operands[0]
fmap.conds[fmap(b == bit1)]
@__pc
def i_SUB(i, fmap):
dst, src = i.operands
a = fmap(dst)
b = fmap(src)
x = a - b
__setflags__A(i, fmap, a, b, x, neg=True)
fmap[dst] = x
i_SUBI = i_SUB
@__pc
def i_SBIW(i, fmap):
dst, src = i.operands
if i.misc["W"]:
assert dst is R[24]
a = fmap(composer([dst, R[25]]))
else:
a = fmap(dst)
b = fmap(src)
x = a - b
__setflags__A(i, fmap, a, b, x, neg=True)
fmap[dst] = x[0 : dst.size]
if i.misc["W"]:
assert x.size == 16
fmap[R[25]] = x[8:16]
@__pc
def i_COM(i, fmap):
dst, src = i.operands
a = cst(0xFF, 8)
b = fmap(dst)
x = a - b
__setflags__A(i, fmap, a, b, x, neg=True)
fmap[dst] = x
@__pc
def i_NEG(i, fmap):
dst = i.operands[0]
a = cst(0, dst.size)
b = fmap(dst)
x = a - b
__setflags__A(i, fmap, a, b, x, neg=True)
fmap[dst] = x
@__pc
def i_DEC(i, fmap):
dst = i.operands[0]
a = fmap(dst)
b = cst(1, dst.size)
x = a - b
fmap[zf] = x == 0
fmap[nf] = x.bit(7)
fmap[vf] = a == cst(0x80, 8)
fmap[sf] = fmap[nf] ^ fmap[vf]
fmap[dst] = x
@__pc
def i_CPC(i, fmap):
dst, src = i.operands
a = fmap(dst)
b = fmap(src)
_c = fmap[cf]
__nopc(i_CP)(i, fmap)
a = fmap(a - b)
b = tst(_c, cst(1, a.size), cst(0, a.size))
x = a - b
__setflags__A(i, fmap, a, b, x, neg=True)
@__pc
def i_SBC(i, fmap):
dst, src = i.operands
_c = fmap[cf]
__nopc(i_SUB)(i, fmap)
a = fmap(dst)
b = tst(_c, cst(1, a.size), cst(0, a.size))
x = a - b
__setflags__A(i, fmap, a, b, x, neg=True)
fmap[dst] = x
i_SBCI = i_SBC
@__pc
def i_AND(i, fmap):
dst, src = i.operands
a = fmap(dst)
b = fmap(src)
x = a & b
__setflags__L(i, fmap, a, b, x)
fmap[dst] = x
i_ANDI = i_AND
@__pc
def i_OR(i, fmap):
dst, src = i.operands
a = fmap(dst)
b = fmap(src)
x = a | b
__setflags__L(i, fmap, a, b, x)
fmap[dst] = x
i_ORI = i_OR
@__pc
def i_EOR(i, fmap):
dst, src = i.operands
a = fmap(dst)
b = fmap(src)
x = a ^ b
__setflags__L(i, fmap, a, b, x)
fmap[dst] = x
@__pc
def i_MUL(i, fmap):
dst, src = i.operands
a = fmap(dst)
b = fmap(src)
x = a ** b
fmap[cf] = x[15:16]
fmap[zf] = x == 0
fmap[R[0]] = x[0:8]
fmap[R[1]] = x[8:16]
# shift/rotate instructions:
############################
@__pc
def i_LSL(i, fmap):
dst = i.operands[0]
a = fmap(dst)
fmap[cf] = a.bit(7)
x = a << 1
__setflags__S(i, fmap, a, x)
fmap[dst] = x
@__pc
def i_LSR(i, fmap):
dst = i.operands[0]
a = fmap(dst)
fmap[cf] = a.bit(0)
x = a >> 1
__setflags__S(i, fmap, a, x)
fmap[dst] = x
@__pc
def i_ASR(i, fmap):
dst = i.operands[0]
a = fmap(dst)
fmap[cf] = a.bit(0)
x = a & 0x80
x |= a >> 1
__setflags__S(i, fmap, a, x)
fmap[dst] = x
@__pc
def i_ROL(i, fmap):
dst = i.operands[0]
a = fmap(dst)
c = fmap[cf].zeroextend(a.size)
fmap[cf] = a.bit(7)
x = a << 1
x |= c
__setflags__S(i, fmap, a, x)
fmap[dst] = x
@__pc
def i_ROR(i, fmap):
dst = i.operands[0]
a = fmap(dst)
c = fmap[cf]
fmap[cf] = a.bit(0)
x = composer([cst(0, 7), c])
x |= a >> 1
__setflags__S(i, fmap, a, x)
fmap[dst] = x
# bit instructions:
###################
@__pc
def i_SWAP(i, fmap):
b = i.operands[0]
x = fmap(b)
fmap[b] = composer([x[4:8], x[0:4]])
@__pc
def i_BCLR(i, fmap):
b = i.operands[0]
fmap[b] = bit0
@__pc
def i_BSET(i, fmap):
b = i.operands[0]
fmap[b] = bit1
@__pc
def i_BST(i, fmap):
b = i.operands[0]
fmap[tf] = fmap(b)
@__pc
def i_BLD(i, fmap):
b = i.operands[0]
fmap[b] = fmap(tf)
# stack instructions:
#####################
@__pc
def i_POP(i, fmap):
dst = i.operands[0]
_pop_(fmap, dst)
@__pc
def i_PUSH(i, fmap):
src = i.operands[0]
_push_(fmap, src)
# load-store instructions:
##########################
@__pc
def i_LD(i, fmap):
dst, src = i.operands
if i.misc["flg"] == -1:
fmap[src] = fmap(src - 1)
fmap[dst] = fmap(mem(src, dst.size))
if i.misc["flg"] == 1:
fmap[src] = fmap(src + 1)
i_LDS = i_LD
i_LDD = i_LD
@__pc
def i_ST(i,
|
glemaitre/protoclass
|
protoclass/data_management/t2w_modality.py
|
Python
|
gpl-2.0
| 8,496
| 0
|
"""T2W modality class."""
import warnings
import numpy as np
import SimpleITK as sitk
from .standalone_modality import StandaloneModality
from ..utils.validation import check_path_data
class T2WModality(StandaloneModality):
"""Class to handle T2W-MRI modality.
Parameters
----------
path_data : str, optional (default=None)
The folder in which the data are stored.
Attributes
----------
path_data_ : string
Location of the data.
data_ : ndarray, shape (Y, X, Z)
The different volume of the T2W volume. The data are saved in
Y, X, Z ordered.
metadata_ : dict
Dictionnary which contain the MRI sequence information. Note that the
information are given in the original ordering (X, Y, Z), which is
different from the organisation of `data_` which is (Y, X, Z).
pdf_ : list, length (n_serie)
List of the PDF for each serie.
bin_ : list of ndarray, length (n_serie)
List of the bins used to plot the pdfs.
max_ : float
Maximum intensity of the T2W-MRI volume.
min_ : float
Minimum intensity of the T2W-MRI volume
|
.
"""
def __init__(self, path_data=None):
super(T2WModality, self).__init__(path_data=path_data)
def get_pdf(self, roi_data=None, nb_bins='auto'):
""" Extract the a list of pdf related with the data.
Parameters
----------
roi_data : tuple
Indices of elements to consider while computing the hist
|
ogram.
The ROI is a 3D volume which will be used for each time serie.
nb_bins : list of int or str, optional (default='auto')
The numbers of bins to use to compute the histogram.
The possibilities are:
- If 'auto', the number of bins is found at fitting time.
- If None, the number of bins used is the one at the last
call of update histogram.
- Otherwise, a list of integer needs to be given.
Returns
-------
pdf_data : ndarray, length (n_serie)
List of the pdf with the associated series.
bin_data : list of ndarray, length (n_series + 1)
List of the bins associated with the list of pdf.
"""
# Check that the data have been read
if self.data_ is None:
raise ValueError('You need to load the data first. Refer to the'
' function read_data_from_path().')
# Build the histogram corresponding to the current volume
# Find how many bins do we need
if isinstance(nb_bins, basestring):
if nb_bins == 'auto':
nb_bins = int(np.round(self.max_ - self.min_))
else:
raise ValueError('Unknown parameters for `nb_bins.`')
elif isinstance(nb_bins, int):
pass
elif nb_bins is None:
nb_bins = self.nb_bins_
else:
raise ValueError('Unknown type for the parameters `nb_bins`.')
if roi_data is None:
pdf_data, bin_data = np.histogram(self.data_,
bins=nb_bins,
density=True)
else:
pdf_data, bin_data = np.histogram(self.data_[roi_data],
bins=nb_bins,
density=True)
return pdf_data, bin_data
def update_histogram(self, nb_bins=None):
"""Update the PDF and the first-order statistics.
Parameters
----------
nb_bins : int or None, optional (default=None)
The numbers of bins to use to compute the histogram.
The possibilities are:
- If None, the number of bins found at reading will be used.
- If 'auto', the number of bins is found at fitting time.
- Otherwise, an integer needs to be given.
Returns
-------
self : object
Returns self.
Notes
-----
There is the possibility to redifine the number of bins to use for
the histogram since it can be tricky to play with normalized data.
"""
# Check if the data have been read
if self.data_ is None:
raise ValueError('You need to read the data first. Call the'
' function read_data_from_path()')
# Compute the min and max from the T2W volume
self.max_ = np.ndarray.max(self.data_)
self.min_ = np.ndarray.min(self.data_)
# Build the histogram corresponding to the current volume
# Find how many bins do we need
if isinstance(nb_bins, basestring):
if nb_bins == 'auto':
nb_bins = int(np.round(self.max_ - self.min_))
else:
raise ValueError('Unknown parameters for `nb_bins.`')
elif nb_bins is None:
nb_bins = self.nb_bins_
self.pdf_, self.bin_ = np.histogram(self.data_,
bins=nb_bins,
density=True)
return self
def read_data_from_path(self, path_data=None):
"""Read T2W images which represent a single 3D volume.
Parameters
----------
path_data : str or None, optional (default=None)
Path to the standalone modality data.
Returns
-------
self : object
Returns self.
"""
# Check the consistency of the path data
if self.path_data_ is not None and path_data is not None:
# We will overide the path and raise a warning
warnings.warn('The data path will be overriden using the path'
' given in the function.')
self.path_data_ = check_path_data(path_data)
elif self.path_data_ is None and path_data is not None:
self.path_data_ = check_path_data(path_data)
elif self.path_data_ is None and path_data is None:
raise ValueError('You need to give a path_data from where to read'
' the data.')
# Create a reader object
reader = sitk.ImageSeriesReader()
# Find the different series present inside the folder
series = np.array(reader.GetGDCMSeriesIDs(self.path_data_))
# Check that you have more than one serie
if len(series) > 1:
raise ValueError('The number of series should not be larger than'
' 1 with standalone modality.')
# The data can be read
dicom_names_serie = reader.GetGDCMSeriesFileNames(self.path_data_)
# Set the list of files to read the volume
reader.SetFileNames(dicom_names_serie)
# Read the data for the current volume
vol = reader.Execute()
# Get a numpy volume
vol_numpy = sitk.GetArrayFromImage(vol)
# The Matlab convention is (Y, X, Z)
# The Numpy convention is (Z, Y, X)
# We have to swap these axis
# Swap Z and X
vol_numpy = np.swapaxes(vol_numpy, 0, 2)
vol_numpy = np.swapaxes(vol_numpy, 0, 1)
# Convert the volume to float
vol_numpy = vol_numpy.astype(np.float64)
# We can create a numpy array
self.data_ = vol_numpy
# Compute the information regarding the T2W images
# Set the number of bins that will be later used to compute
# the histogram
self.nb_bins_ = int(np.round(np.ndarray.max(self.data_) -
np.ndarray.min(self.data_)))
self.update_histogram()
# Store the DICOM metadata
self.metadata_ = {}
# Get the information that have been created by SimpleITK
# Information about data reconstruction
self.metadata_['size'] = vol.GetSize()
self.metadata_['origin'] = vol.GetOrigin()
self.metadata_['direction'] = vol.GetDirection()
self.metadata_['spacing'] = vol.GetSpacing()
# Information about the MRI sequence
#
|
logston/python-dircmp
|
dircmppy/dircmpdel.py
|
Python
|
bsd-2-clause
| 2,648
| 0.000755
|
import os
import errno
def delete_file(file_name, dry=False):
if dry:
print(' DRY DELETED: {}'.format(file_name))
else:
os.remove(file_name)
try:
dirname = os.path.dirname(file_name)
os.rmdir(dirname)
print(' DELETED DIR: {}'.format(dirname))
except OSError as ex:
if ex.errno != errno.ENOTEMPTY:
raise
print(' DELETED: {}'.format(file_name))
def run_dircmpdel(dircmp_file, prompt=True, dry=False):
"""
Parse dircmp file for groups of file names to be deleted.
"""
with open(dircmp_file) as fp:
lines = fp.read()
groups = lines.strip().split('\n\n')
print('Found {} duplicate groups'.format(len(groups)))
groups = (group.split('\n') for group in groups)
checked_proper_cwd = False
for group in groups:
for i, file_name in enumerate(group):
if not i:
if not checked_proper_cwd:
if not os.path.exists(file_name):
raise RuntimeError('File {} could not be found. '
'Please ensure you are in the '
'correct directory.'
''.format(file_name))
checked_proper_cwd = True
print('Deleting duplicates of {}'.
|
format(file_name))
else:
if prompt:
while True:
resp = input(' Delete {}? '.format(file_name))
resp = resp.lower()
if resp not in ('yes', 'no'):
print('Please answer "yes" or "no".')
elif resp == 'yes':
delete_file(file_name, dry=dry)
|
break
elif resp == 'no':
print(' Not deleted: {}'.format(file_name))
break
else:
delete_file(file_name, dry=dry)
print()
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(
description='Utility for deleting duplicate files found by dircmp'
)
parser.add_argument('file')
parser.add_argument('--no-prompt',
action='store_false', default=True, dest='prompt')
parser.add_argument('-d', '--dry',
action='store_true', default=False, dest='dry')
args = parser.parse_args()
run_dircmpdel(args.file, prompt=args.prompt, dry=args.dry)
|
exratione/thywill-python
|
thywill_server/src/thywill_server/database/__init__.py
|
Python
|
mit
| 105
| 0.028571
|
'''
This component package
|
and its subpa
|
ckages contain wrapper and glue code for database operations.
'''
|
securestate/king-phisher
|
king_phisher/client/tabs/campaign.py
|
Python
|
bsd-3-clause
| 32,510
| 0.027592
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# king_phisher/client/tabs/campaign.py
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import logging
import threading
import time
from king_phisher import errors
from king_phisher import find
from king_phisher import ipaddress
from king_phisher import utilities
from king_phisher.client import export
from king_phisher.client import graphs
from king_phisher.client import gui_utilities
from king_phisher.client.widget import extras
from king_phisher.client.widget import managers
import advancedhttpserver
from gi.repository import GdkPixbuf
from gi.repository import GLib
from gi.repository import Gtk
import rule_engine
from smoke_zephyr.utilities import parse_timespan
UNKNOWN_LOCATION_STRING = 'N/A (Unknown)'
def _dt_field(value):
return value if value is None else utilities.datetime_utc_to_local(value)
class CampaignViewGenericTab(gui_utilities.GladeGObject):
"""
This object is meant to be subclassed by all of the tabs which load and
display information about the current campaign.
"""
label_text = 'Unknown'
"""The label of the tab for display in the GUI."""
top_gobject = 'box'
def __init__(self, *args, **kwargs):
super(CampaignViewGenericTab, self).__init__(*args, **kwargs)
self.label = Gtk.Label(label=self.label_text)
"""The :py:class:`Gtk.Label` representing this tab with text from :py:attr:`~.CampaignViewGenericTab.label_text`."""
self.is_destroyed = threading.Event()
getattr(self, self.top_gobject).connect('destroy', self.signal_destroy)
self.last_load_time = float('-inf')
"""The last time the data was loaded from the server."""
self.refresh_frequency = parse_timespan(str(self.config.get('gui.refresh_frequency', '5m')))
"""The lifetime in seconds to wait before refreshing the data from the server."""
self.loader_thread = None
"""The thread object which loads the data from the server."""
self.loader_thread_lock = threading.Lock()
"""The :py:class:`threading.Lock` object used for synchronization between the loader and main threads."""
self.loader_thread_stop = threading.Event()
"""The :py:class:`threading.Event` object used to request that the loader thread stop before completion."""
self.application.connect('campaign-set', self.signal_kpc_campaign_set)
def _sync_loader_thread(self):
"""
Synchronize the loader thread by ensuring that it is stopped. If it is
currently running, this will use :py:attr:`~.loader_thread_stop` to
request that the loader stops early.
"""
if not self.loader_thread_is_running:
return
# it's alive so tell it to stop, wait for it, then proceed
self.loader_thread_stop.set()
while self.loader_thread.is_alive():
gui_utilities.gtk_sync()
self.loader_thread.join(1)
@property
def rpc(self):
return self.application.rpc
@property
def loader_thread_is_running(self):
if self.loader_thread is None:
return False
return self.loader_thread.is_alive()
def load_campaign_information(self, force=True):
raise NotImplementedError()
def signal_button_clicked_refresh(self, button):
self.load_campaign_information()
def signal_destroy(self, gobject):
self.is_destroyed.set()
self.loader_thread_stop.set()
if isinstance(self.loader_thread, threading.Thread) and self.loader_thread.is_alive():
self.logger.debug("waiting on thread: {0}.loader_thread (tid: 0x{1:x})".format(self.__class__.__name__, self.loader_thread.ident))
while self.loader_thread.is_alive():
gui_utilities.gtk_sync()
self.logger.debug("joined thread: {0}.loader_thread (tid: 0x{1:x})".format(self.__class__.__name__, self.loader_thread.ident))
def signal_kpc_campaign_set(self, *_):
self.load_campaign_information()
class CampaignViewGenericTableTab(CampaignViewGenericTab):
"""
This object is meant to be subclassed by tabs which will display campaign
information of different types from specific database tables. The data in
this object is refreshed when multiple events occur and it uses an internal
timer to represent the last time the data was refreshed.
"""
dependencies = gui_utilities.GladeDependencies(
children=(
'button_refresh',
'entry_filter',
'label_filter_summary',
'revealer_filter',
'treeview_campaign'
)
)
node_query = None
"""
The GraphQL query used to load a particular node from the remote table. This
query is provided with a single parameter of the node's id.
"""
table_name = ''
"""The database table represented by this tab."""
table_query = None
"""
The GraphQL query used to load the desired information from the remote
table. This query is provided with the following three parameters: campaign,
count and cursor.
"""
secret_columns = ()
view_columns = ()
xlsx_worksheet_options = None
def __init__(self, *args, **kwargs):
super(CampaignViewGenericTableTab, self).__init__(*args, **kwargs)
treeview = self.gobjects['treeview_campaign']
self.treeview_manager = managers.TreeViewManager(
t
|
reeview,
selection_mode=Gtk.SelectionMode.MULTIPLE,
cb_delete=self._prompt_to_delete_row,
cb_refresh=self.load_campaign_information
)
self.treeview_manager.set_column_titles(
self.view_column_titles,
column_offset=1,
renderers=tuple(column.cell
|
_renderer() for column in self.view_columns)
)
for column in self.view_columns:
if isinstance(column, extras.ColumnDefinitionDatetime):
self.treeview_manager.column_views[column.title].set_fixed_width(150)
self.popup_menu = self.treeview_manager.get_popup_menu()
"""The :py:class:`Gtk.Menu` object which is displayed when right-clicking in the view area."""
treeview = self.gobjects['treeview_campaign']
self._rule = None
self._rule_context = rule_engine.Context(type_resolver=rule_engine.type_resolver_from_dict(
dict((column.name, rule_engine.DataType.from_type(column.python_type)) for column in self.view_columns)
))
view_column_types = tuple(column.g_type for column in self.view_columns)
self._tv_model = Gtk.ListStore(str, *view_column_types)
self._tv_model_filter = self._tv_model.filter_new()
self._tv_model_filter.set_visible_func(self._tv_filter)
tree_model_sort = Gtk.TreeModelSort(model=self._tv_model_filter)
for idx, column in enumerate(self.view_columns, 1):
if column.sort_function is not None:
tree_model_sort.set_sort_func(idx, column.sort_function, idx)
treeview.set_model(tree_model_sort)
self.application.connect('server-connected', self.signal_kp_server_connected)
tab_config = self._tab_config
filter_revealer = self.gobjects['revealer_filter']
filter_revealer.set_reveal_child(tab_config['filter.show'])
menu_item = Gtk.CheckMenuItem.new_with_label('Show Filter')
menu_item.set_active(filter_revealer.get_reveal_c
|
Sticklyman1936/workload-automation
|
wlauto/common/resources.py
|
Python
|
apache-2.0
| 1,635
| 0.001223
|
# Copyright 2013-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
from wlauto.core.resource import Resource
class FileResource(Resource):
"""
Base class for all resources that are a regular file in the
file system.
"""
def delete(self, instance):
os.remove(instance)
class File(FileResource):
name = 'file'
def __init__(self, owner, path, url=None):
super(File, self).__init__(owner)
self.path = path
self.url = url
def __str__(self):
return '<{}\'s {} {}>'.
|
format(self.owner, self.name, self.path or self.url)
class ExtensionAsset(File):
name = 'extension_asset'
def __init__(self, owner, path):
super(ExtensionAsset, self).__init__(owner, os.path.join(owner.name, path))
class Executable(FileResource):
name = 'executable'
def __init__(self, owner, platform, filename):
super(Executable, self).__init__(owner)
self.platform = platform
self.filename =
|
filename
def __str__(self):
return '<{}\'s {} {}>'.format(self.owner, self.platform, self.filename)
|
archatas/whoosh
|
whoosh/lang/lovins.py
|
Python
|
apache-2.0
| 12,657
| 0.003871
|
"""This module implements the Lovins stemming algorithm. Use the ``stem()``
function::
stemmed_word = stem(word)
"""
from whoosh.util.collections2 import defaultdict
# Conditions
def A(base):
# A No restrictions on stem
return True
def B(base):
# B Minimum stem length = 3
return len(base) > 2
def C(base):
# C Minimum stem length = 4
return len(base) > 3
def D(base):
# D Minimum stem length = 5
return len(base) > 4
def E(base):
# E Do not remove ending after e
return base[-
|
1] != "e"
|
def F(base):
# F Minimum stem length = 3 and do not remove ending after e
return len(base) > 2 and base[-1] != "e"
def G(base):
# G Minimum stem length = 3 and remove ending only after f
return len(base) > 2 and base[-1] == "f"
def H(base):
# H Remove ending only after t or ll
c1, c2 = base[-2:]
return c2 == "t" or (c2 == "l" and c1 == "l")
def I(base):
# I Do not remove ending after o or e
c = base[-1]
return c != "o" and c != "e"
def J(base):
# J Do not remove ending after a or e
c = base[-1]
return c != "a" and c != "e"
def K(base):
# K Minimum stem length = 3 and remove ending only after l, i or u*e
c = base[-1]
cc = base[-3]
return len(base) > 2 and (c == "l" or c == "i" or (c == "e" and cc == "u"))
def L(base):
# L Do not remove ending after u, x or s, unless s follows o
c1, c2 = base[-2:]
return c2 != "u" and c2 != "x" and (c2 != "s" or c1 == "o")
def M(base):
# M Do not remove ending after a, c, e or m
c = base[-1]
return c != "a" and c!= "c" and c != "e" and c != "m"
def N(base):
# N Minimum stem length = 4 after s**, elsewhere = 3
return len(base) > 3 or (len(base) == 3 and base[-1] != "s")
def O(base):
# O Remove ending only after l or i
c = base[-1]
return c == "l" or c == "i"
def P(base):
# P Do not remove ending after c
return base[-1] != "c"
def Q(base):
# Q Minimum stem length = 3 and do not remove ending after l or n
c = base[-1]
return len(base) > 2 and (c != "l" and c != "n")
def R(base):
# R Remove ending only after n or r
c = base[-1]
return c == "n" or c == "r"
def S(base):
# S Remove ending only after dr or t, unless t follows t
l2 = base[-2]
return l2 == "rd" or (base[-1] == "t" and l2 != "tt")
def T(base):
# T Remove ending only after s or t, unless t follows o
c1, c2 = base[-2:]
return c2 == "s" or (c2 == "t" and c1 != "o")
def U(base):
# U Remove ending only after l, m, n or r
c = base[-1]
return c == "l" or c == "m" or c == "n" or c == "r"
def V(base):
# V Remove ending only after c
return base[-1] == "c"
def W(base):
# W Do not remove ending after s or u
c = base[-1]
return c != "s" and c != "u"
def X(base):
# X Remove ending only after l, i or u*e
c = base[-1]
cc = base[-3]
return c == "l" or c == "i" or (c == "e" and cc == "u")
def Y(base):
# Y Remove ending only after in
return base[-2:] == "in"
def Z(base):
# Z Do not remove ending after f
return base[-1] != "f"
def a(base):
# a Remove ending only after d, f, ph, th, l, er, or, es or t
c = base[-1]
l2 = base[-2:]
return (c == "d" or c == "f" or l2 == "ph" or l2 == "th" or c == "l"
or l2 == "er" or l2 == "or" or l2 == "es" or c == "t")
def b(base):
# b Minimum stem length = 3 and do not remove ending after met or ryst
return len(base) > 2 and not (base.endswith("met")
or base.endswith("ryst"))
def c(base):
# c Remove ending only after l
return base[-1] == "l"
# Endings
m = [None] * 12
m[11] = dict((
("alistically", B),
("arizability", A),
("izationally", B)))
m[10] = dict((
("antialness", A),
("arisations", A),
("arizations", A),
("entialness", A)))
m[9] = dict((
("allically", C),
("antaneous", A),
("antiality", A),
("arisation", A),
("arization", A),
("ationally", B),
("ativeness", A),
("eableness", E),
("entations", A),
("entiality", A),
("entialize", A),
("entiation", A),
("ionalness", A),
("istically", A),
("itousness", A),
("izability", A),
("izational", A)))
m[8] = dict((
("ableness", A),
("arizable", A),
("entation", A),
("entially", A),
("eousness", A),
("ibleness", A),
("icalness", A),
("ionalism", A),
("ionality", A),
("ionalize", A),
("iousness", A),
("izations", A),
("lessness", A)))
m[7] = dict((
("ability", A),
("aically", A),
("alistic", B),
("alities", A),
("ariness", E),
("aristic", A),
("arizing", A),
("ateness", A),
("atingly", A),
("ational", B),
("atively", A),
("ativism", A),
("elihood", E),
("encible", A),
("entally", A),
("entials", A),
("entiate", A),
("entness", A),
("fulness", A),
("ibility", A),
("icalism", A),
("icalist", A),
("icality", A),
("icalize", A),
("ication", G),
("icianry", A),
("ination", A),
("ingness", A),
("ionally", A),
("isation", A),
("ishness", A),
("istical", A),
("iteness", A),
("iveness", A),
("ivistic", A),
("ivities", A),
("ization", F),
("izement", A),
("oidally", A),
("ousness", A)))
m[6] = dict((
("aceous", A),
("acious", B),
("action", G),
("alness", A),
("ancial", A),
("ancies", A),
("ancing", B),
("ariser", A),
("arized", A),
("arizer", A),
("atable", A),
("ations", B),
("atives", A),
("eature", Z),
("efully", A),
("encies", A),
("encing", A),
("ential", A),
("enting", C),
("entist", A),
("eously", A),
("ialist", A),
("iality", A),
("ialize", A),
("ically", A),
("icance", A),
("icians", A),
("icists", A),
("ifully", A),
("ionals", A),
("ionate", D),
("ioning", A),
("ionist", A),
("iously", A),
("istics", A),
("izable", E),
("lessly", A),
("nesses", A),
("oidism", A)))
m[5] = dict((
("acies", A),
("acity", A),
("aging", B),
("aical", A),
("alist", A),
("alism", B),
("ality", A),
("alize", A),
("allic", b),
("anced", B),
("ances", B),
("antic", C),
("arial", A),
("aries", A),
("arily", A),
("arity", B),
("arize", A),
("aroid", A),
("ately", A),
("ating", I),
("ation", B),
("ative", A),
("ators", A),
("atory", A),
("ature", E),
("early", Y),
("ehood", A),
("eless", A),
("elily", A),
("ement", A),
("enced", A),
("ences", A),
("eness", E),
("ening", E),
("ental", A),
("ented", C),
("ently", A),
("fully", A),
("ially", A),
("icant", A),
("ician", A),
("icide", A),
("icism", A),
("icist", A),
("icity", A),
("idine", I),
("iedly", A),
("ihood", A),
("inate", A),
("iness", A),
("ingly", B),
("inism", J),
("inity", c),
("ional", A),
("ioned", A),
("ished", A),
("istic", A),
("ities", A),
("itous", A),
("ively", A),
("ivity", A),
("izers", F),
("izing", F),
("oidal", A),
("oides", A),
("otide", A),
("ously", A)))
m[4] = dict((
|
cinderella/incubator-cloudstack
|
test/integration/component/test_snapshots.py
|
Python
|
apache-2.0
| 57,945
| 0.001778
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" P1 tests for Snapshots
"""
#Import Local Modules
import marvin
from marvin.cloudstackTestCase import *
from marvin.cloudstackAPI import *
from integration.lib.utils import *
from integration.lib.base import *
from integration.lib.common import *
from marvin import remoteSSHClient
class Services:
"""Test Snapshots Services
"""
def __init__(self):
self.services = {
"account": {
"email": "test@test.com",
"firstname": "Test",
"lastname": "User",
"username": "test",
# Random characters are appended for unique
# username
"password": "fr3sca",
},
"service_offering": {
"name": "Tiny Instance",
"displaytext": "Tiny Instance",
"cpunumber": 1,
"cpuspeed": 200, # in MHz
"memory": 256, # In MBs
},
"disk_offering": {
"displaytext": "Small Disk",
"name": "Small Disk",
"disksize": 1
},
"server": {
|
"displayname": "TestVM",
"username": "root",
"password": "password",
"ssh_port": 22,
"hypervisor": 'XenServer',
"privateport": 22,
"publicport":
|
22,
"protocol": 'TCP',
},
"mgmt_server": {
"ipaddress": '192.168.100.21',
"username": "root",
"password": "fr3sca",
"port": 22,
},
"recurring_snapshot": {
"intervaltype": 'HOURLY',
# Frequency of snapshots
"maxsnaps": 1, # Should be min 2
"schedule": 1,
"timezone": 'US/Arizona',
# Timezone Formats - http://cloud.mindtouch.us/CloudStack_Documentation/Developer's_Guide%3A_CloudStack
},
"templates": {
"displaytext": 'Template',
"name": 'Template',
"ostypeid": '144f66aa-7f74-4cfe-9799-80cc21439cb3',
"templatefilter": 'self',
},
"diskdevice": "/dev/xvda",
"diskname": "TestDiskServ",
"size": 1, # GBs
"mount_dir": "/mnt/tmp",
"sub_dir": "test",
"sub_lvl_dir1": "test1",
"sub_lvl_dir2": "test2",
"random_data": "random.data",
"ostypeid": '144f66aa-7f74-4cfe-9799-80cc21439cb3',
# Cent OS 5.3 (64 bit)
"sleep": 60,
"timeout": 10,
"mode" : 'advanced', # Networking mode: Advanced, Basic
}
class TestCreateVMsnapshotTemplate(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(TestCreateVMsnapshotTemplate, cls).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client, cls.services)
cls.zone = get_zone(cls.api_client, cls.services)
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostypeid"]
)
cls.services["domainid"] = cls.domain.id
cls.services["server"]["zoneid"] = cls.zone.id
cls.services["template"] = cls.template.id
# Create VMs, NAT Rules etc
cls.account = Account.create(
cls.api_client,
cls.services["account"],
domainid=cls.domain.id
)
cls.services["account"] = cls.account.account.name
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls._cleanup = [
cls.service_offering,
cls.account,
]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created instance, volumes and snapshots
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def test_01_createVM_snapshotTemplate(self):
"""Test create VM, Snapshot and Template
"""
# Validate the following
# 1. Deploy VM using default template, small service offering
# and small data disk offering.
# 2. Perform snapshot on the root disk of this VM.
# 3. Create a template from snapshot.
# 4. Create a instance from above created template.
# 5. listSnapshots should list the snapshot that was created.
# 6. verify that secondary storage NFS share contains the reqd
# volume under /secondary/snapshots/$accountid/$volumeid/$snapshot_uuid
# 7. verify backup_snap_id was non null in the `snapshots` table
# 8. listTemplates() should return the newly created Template,
# and check for template state as READY"
# 9. listVirtualMachines() command should return the deployed VM.
# State of this VM should be Running.
#Create Virtual Machine
self.virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["server"],
templateid=self.template.id,
accountid=self.account.account.name,
|
ric2b/Vivaldi-browser
|
chromium/build/android/pylib/utils/argparse_utils.py
|
Python
|
bsd-3-clause
| 1,695
| 0.00649
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
class CustomHelpAction(argparse.Action):
'''Allows defining custom help actions.
Help actions can run even when the parser would otherwise fail on missing
arguments. The first help or custom help command mentioned on the command
line will have its help text displayed.
Usage:
parser = argparse.ArgumentParser(...)
CustomHelpAction.EnableFor(parser)
parser.add_argument('--foo-help',
|
action='custom_help',
custom_help_text='this is the help message',
help='What this helps with')
'''
# Derived from argparse._HelpAction fr
|
om
# https://github.com/python/cpython/blob/master/Lib/argparse.py
# pylint: disable=redefined-builtin
# (complains about 'help' being redefined)
def __init__(self,
option_strings,
dest=argparse.SUPPRESS,
default=argparse.SUPPRESS,
custom_help_text=None,
help=None):
super().__init__(option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help)
if not custom_help_text:
raise ValueError('custom_help_text is required')
self._help_text = custom_help_text
def __call__(self, parser, namespace, values, option_string=None):
print(self._help_text)
parser.exit()
@staticmethod
def EnableFor(parser):
parser.register('action', 'custom_help', CustomHelpAction)
|
skylines-project/skylines
|
tests/schemas/schemas/test_club.py
|
Python
|
agpl-3.0
| 1,722
| 0.000581
|
import pytest
from marshmallow import ValidationError
from skylines.schemas import ClubSchema
def test_deserialization_fails_for_empty_name():
with pytest.raises(ValidationError) as e:
ClubSchema(only=("name",)).load(dict(name=""))
errors = e.value.messages
assert "name" in errors
assert "Must not be empty." in errors.get("name")
def test_deserialization_fails_for_spaced_name():
with pytest.raises(ValidationError) as e:
ClubSchema(only=("name",)).load(dict(name=" "))
errors = e.value.messages
assert "name" in errors
assert "Must not be empty." in errors.get("name")
def test_deserialization_passes_for_valid_name():
data = ClubSchema(only=("name",)).load(dict(name=" foo ")).data
assert data["name"] == "foo"
def test_deserialization_passes_for_valid_website():
data = ClubSchema(partial=True).load(dict(website="https://skylines.aero")).data
assert data["website"] == "https://skylines.aero"
def test_deserialization_passes_for_empty_website():
data = ClubSchema(partial=True).load(dict(website="")).data
assert data["website"] == ""
def test_deserialization_passes_for_null_website():
data = ClubSchema(partial=True).load(dict(website=None)).data
assert data["website"] is None
def test_deserialization_fails_for_invalid_website():
with pytest.raises(ValidationError) as e:
|
ClubSchema(partial=True).load(dict(website="foo"))
errors = e.value.messages
assert "website" in errors
assert "Not a valid URL." in errors.get(
|
"website")
def test_serialization_passes_for_invalid_website():
data = ClubSchema().dump(dict(website="foobar")).data
assert data["website"] == "foobar"
|
AsgerPetersen/QGIS
|
python/plugins/processing/algs/lidar/lastools/lasinfoPro.py
|
Python
|
gpl-2.0
| 5,416
| 0.001846
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
lasinfoPro.py
---------------------
Date : October 2014 and May 2016
Copyright : (C) 2014 by Martin Isenburg
Email : martin near rapidlasso point com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Martin Isenburg'
__date__ = 'October 2014'
__copyright__ = '(C) 2014, Martin Isenburg'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from .LAStoolsUtils import LAStoolsUtils
from .LAStoolsAlgorithm import LAStoolsAlgorithm
from processing.core.parameters import ParameterSelection
from processing.core.parameters import ParameterBoolean
from processing.core.parameters import ParameterNumber
class lasinfoPro(LAStoolsAlgorithm):
COMPUTE_DENSITY = "COMPUTE_DENSITY"
REPAIR_BB = "REPAIR_BB"
REPAIR_COUNTERS = "REPAIR_COUNTERS"
HISTO1 = "HISTO1"
HISTO2 = "HISTO2"
HISTO3 = "HISTO3"
HISTOGRAM = ["---", "x", "y", "z", "intensity", "classification", "scan_angle", "user_data", "point_source", "gps_time", "X", "Y", "Z"]
HISTO1_BIN = "HISTO1_BIN"
HISTO2_BIN = "HISTO2_BIN"
HISTO3_BIN = "HISTO3_BIN"
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('lasinfoPro')
self.group, self.i18n_group = self.trAlgorithm('LAStools Production')
self.addParametersPointInputFolderGUI()
self.addParameter(ParameterBoolean(lasinfoPro.COMPUTE_DENSITY,
self.tr("compute density"), False))
self.addParameter(ParameterBoolean(lasinfoPro.REPAIR_BB,
self.tr("repair bounding box"), False))
self.addParameter(ParameterBoolean(lasinfoPro.REPAIR_COUNTERS,
self.tr("repair counters"), False))
self.addParameter(ParameterSelection(lasinfoPro.HISTO1,
self.tr("histogram"), lasinfoPro.HISTOGRAM, 0))
self.addParameter(ParameterNumber(lasinfoPro.HISTO1_BIN,
self.tr("bin size"), 0, None, 1.0))
self.addParameter(ParameterSelection(lasinfoPro.HISTO2,
self.tr("histogram"), lasinfoPro.HISTOGRAM, 0))
self.addParameter(ParameterNumber(lasinfoPro.HISTO2_BIN,
self.tr("bin size"), 0, None, 1.0))
self.addParameter(ParameterSelection(lasinfoPro.HISTO3,
self.tr("histogram"), lasinfoPro.HISTOGRAM, 0))
self.addParameter(ParameterNumber(lasinfoPro.HISTO3_BIN,
self.tr("bin size"), 0, None, 1.0))
self.addParametersOutputDirectoryGUI()
self.addParametersOutputAppendixGUI()
self.addParametersAdditionalGUI()
self.addParametersCoresGUI()
self.addParametersVerboseGUI()
def processAlgorithm(self, progress):
if (LAStoolsUtils.hasWine()):
|
commands = [os.path.join(LAStoolsUtils.LAStoolsPath(), "bin", "
|
lasinfo.exe")]
else:
commands = [os.path.join(LAStoolsUtils.LAStoolsPath(), "bin", "lasinfo")]
self.addParametersVerboseCommands(commands)
self.addParametersPointInputFolderCommands(commands)
if self.getParameterValue(lasinfoPro.COMPUTE_DENSITY):
commands.append("-cd")
if self.getParameterValue(lasinfoPro.REPAIR_BB):
commands.append("-repair_bb")
if self.getParameterValue(lasinfoPro.REPAIR_COUNTERS):
commands.append("-repair_counters")
histo = self.getParameterValue(lasinfoPro.HISTO1)
if histo != 0:
commands.append("-histo")
commands.append(lasinfoPro.HISTOGRAM[histo])
commands.append(unicode(self.getParameterValue(lasinfoPro.HISTO1_BIN)))
histo = self.getParameterValue(lasinfoPro.HISTO2)
if histo != 0:
commands.append("-histo")
commands.append(lasinfoPro.HISTOGRAM[histo])
commands.append(unicode(self.getParameterValue(lasinfoPro.HISTO2_BIN)))
histo = self.getParameterValue(lasinfoPro.HISTO3)
if histo != 0:
commands.append("-histo")
commands.append(lasinfoPro.HISTOGRAM[histo])
commands.append(unicode(self.getParameterValue(lasinfoPro.HISTO3_BIN)))
self.addParametersOutputDirectoryCommands(commands)
self.addParametersOutputAppendixCommands(commands)
commands.append("-otxt")
self.addParametersAdditionalCommands(commands)
self.addParametersCoresCommands(commands)
LAStoolsUtils.runLAStools(commands, progress)
|
merc-devel/merc
|
merc/application.py
|
Python
|
mit
| 4,731
| 0.013316
|
import asyncio
import datetime
import logging
import signal
import yaml
import passlib.context
from merc import config
from merc import config_format
from merc import channel
from merc import feature
from merc import server
from merc import user
from merc import util
logger = logging.getLogger(__name__)
class Application(object):
def __init__(self, config_filename, loop=None):
if loop is None:
loop = asyncio.get_event_loop()
self.loop = loop
self.creation_time = datetime.datetime.now()
self.features = feature.FeatureLoader(self)
self.users = user.UserStore(self)
self.channels = channel.ChannelStore(self)
self.network = server.Network(self)
self.crypt_context = None
self.config = None
self.config_filename = config_filename
self.reload_config()
self.register_signal_handlers()
@property
def server(self):
return self.network.local
def check_config(self, cfg):
config.validate(cfg, config_format.Config)
def reload_config(self):
self.features.unload_all()
with open(self.config_filename, "r") as f:
config = yaml.safe_load(f)
try:
self.check_config(config)
for feature_name in config["features"]:
self.features.load(feature_name)
self.features.check_config(config)
except Exception:
logger.critical("Configuration invalid.")
self.features.unload_all()
if self.config:
logger.critical("Reloading old configuration.")
for feature_name in self.config["features"]:
self.features.load(feature_name)
raise
else:
self.config = config
finally:
if self.config:
self.update_from_config()
def update_from_config(self):
self.network.update_local(
self.loop,
self.config["server"]["name"],
self.config["server"]["description"],
self.config["server"]["sid"])
self.crypt_context = passlib.context.CryptContext(
schemes=self.config["crypto"]["hash_schemes"])
def rehash(self):
@asyncio.coroutine
def coro():
yield from self.unbind()
self.reload_config()
yield from self.bind()
return asyncio.async(coro(), loop=self.loop)
@asyncio.coroutine
def bind(self):
yield from self.network.local.bind(self, self.config["bind"])
@asyncio.coroutine
def unbind(self):
yield from self.network.local.unbind()
@property
def version(self):
return util.get_version()
@property
def network_name(self):
return self.config["server"]["network_name"]
@property
def admin_location(self):
return self.config["admin"]["location"]
@property
def admin_location_fine(self):
return self.config["admin"]["location_fine"]
@property
def admin_name(self):
|
return self.config["admin"]["name"]
@property
def admin_email(self):
return self.config["admin"]["email"]
def register_signal_handlers(self):
signal.signal(signal.SIGHUP, lambda signum, frame: self.rehash())
def run_hooks(self, hook_name, *args, **kwargs):
for hook in self.features.get_hooks(hook_name):
hook(self, *args, **kwargs)
def get_feature_locals(self, feature):
return self.features[feature
|
.NAME].server_locals
def start(self):
logger.info("Welcome to merc-{}, running for {} ({}) on network {}.".format(
util.get_version(), self.config["server"]["name"],
self.config["server"]["sid"], self.config["server"]["network_name"]))
self.loop.run_until_complete(self.bind())
self._autoconnect_links()
try:
self.loop.run_forever()
except KeyboardInterrupt:
pass
self.loop.run_until_complete(self.unbind())
self.loop.close()
def _autoconnect_links(self):
for server_name, link_spec in self.config["links"].items():
if link_spec["autoconnect"]:
self.network.connect(server_name)
def main():
import argparse
import coloredlogs
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--config", "-c", help="file to load configuration from",
default="merc.conf")
parser.add_argument("--verbose", "-v", help="enable verbose (debug) logging",
action="store_true", default=False)
args = parser.parse_args()
coloredlogs.install(level=logging.DEBUG if args.verbose else logging.INFO)
logging.getLogger("asyncio").setLevel(logging.WARN)
try:
app = Application(args.config)
app.start()
except config.ParseError as e:
logger.fatal('Could not load configuration file, aborting.')
logger.fatal(e)
except Exception as e:
logger.fatal('Could not initialize merc, aborting.')
logger.fatal(e)
|
anhstudios/swganh
|
data/scripts/templates/object/mobile/shared_dressed_deathstar_debris_cultist_hum_m_02.py
|
Python
|
mit
| 464
| 0.047414
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
####
|
PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_dressed_deathstar_debris_cultist_hum_m_02.iff"
result.attribute_template_id = 9
result.stfName("obj_n","unknown_creature
|
")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
fjruizruano/ngs-protocols
|
divsum_analysis.py
|
Python
|
gpl-3.0
| 974
| 0.011294
|
#!/usr/bin/python
import sys
print "divsum_analysis.py DivsumFile NumberOfNucleotides"
try:
file = sys.argv[1]
except:
file = raw_input("Introduce RepeatMasker's Divsum file: ")
try:
nucs = sys.argv[2]
except:
nucs = raw_input("Introduce number of analysed nucleotides: ")
nucs = int(nucs)
data = open(file).readlines()
s_matrix = data.index("Coverage for each repeat class and divergence (Kimura)\n")
matrix = []
elements = data[s_matrix+1]
elements = elements.split()
for element in elements[1:]:
matrix.append([element,[]])
n_el = len(matrix)
for line in data[s_matrix+2:]:
# print line
info = line.split()
info = info[1:]
for n in range(0,n_
|
el):
matrix[n][1].append(int(info[n]))
abs = open(file+".abs", "w")
rel = open(file+".rel", "w")
for n in range(0,n_el):
ab
|
s.write("%s\t%s\n" % (matrix[n][0], sum(matrix[n][1])))
rel.write("%s\t%s\n" % (matrix[n][0], round(1.0*sum(matrix[n][1])/nucs,100)))
|
metabrainz/listenbrainz-server
|
listenbrainz_spark/recommendations/recording/recommend.py
|
Python
|
gpl-2.0
| 19,039
| 0.004254
|
"""
This script is responsible for generating recommendations for the users. The general flow is as follows:
The best_model saved in HDFS is loaded with the help of model_id which is fetched from model_metadata_df.
`spark_user_id` and `recording_id` are fetched from top_artist_candidate_set_df and are given as input to the
recommender. An RDD of `user`, `product` and `rating` is returned from the recommender which is later converted to
a dataframe by filtering top X (an int supplied as an argument to the script) recommendations for all users sorted on rating
and fields renamed as `spark_user_id`, `recording_id` and `rating`. The ratings are scaled so that they lie between 0 and 1.
This dataframe is joined with recordings_df on recording_id to get the recording mbids which are then sent over the queue.
The same process is done for similar artist candidate set.
"""
import logging
import time
from py4j.protocol import Py4JJavaError
import listenbrainz_spark
from listenbrainz_spark import utils, path
from listenbrainz_spark.exceptions import (PathNotFoundException,
FileNotFetchedException,
SparkSessionNotInitializedException,
RecommendationsNotGeneratedException,
EmptyDataframeExcpetion)
from listenbrainz_spark.recommendations.recording.train_models import get_model_path
from listenbrainz_spark.recommendations.recording.candidate_sets import _is_empty_dataframe
from pyspark.sql import Row
import pyspark.sql.functions as func
from pyspark.sql.window import Window
from pyspark.sql.functions import col, udf, row_number
from pyspark.sql.types import DoubleType
from pyspark.mllib.recommendation import MatrixFactorizationModel
logger = logging.getLogger(__name__)
class RecommendationParams:
def __init__(self, recordings_df, model, top_artist_candidate_set_df, similar_artist_candidate_set_df,
recommendation_top_artist_limit, recommendation_similar_artist_limit):
self.recordings_df = recordings_df
self.model = model
self.top_artist_candidate_set_df = top_artist_candidate_set_df
self.similar_artist_candidate_set_df = similar_artist_candidate_set_df
self.recommendation_top_artist_limit = recommendation_top_artist_limit
self.recommendation_similar_artist_limit = recommendation_similar_artist_limit
def get_most_recent_model_id():
""" Get model id of recently created model.
Returns:
model_id (str): Model identification string.
"""
try:
model_metadata = utils.read_files_from_HDFS(path.RECOMMENDATION_RECORDING_MODEL_METADATA)
except PathNotFoundException as err:
logger.error(str(err), exc_info=True)
raise
except FileNotFetchedException as err:
logger.error(str(err), exc_info=True)
raise
latest_ts = model_metadata.select(func.max('model_created').alias('model_created')).take(1)[0].model_created
model_id = model_metadata.select('model_id') \
.where(col('model_created') == latest_ts).take(1)[0].model_id
return model_id
def load_model():
""" Load model from given path in HDFS.
"""
model_id = get_most_recent_model_id()
dest_path = get_model_path(model_id)
try:
model = MatrixFactorizationModel.load(listenbrainz_spark.context, dest_path)
return model
except Py4JJavaError as err:
logger.error('Unable to load model "{}"\n{}\nAborting...'.format(model_id, str(err.java_exception)),
|
exc_info=True)
raise
def get_recording_mbids(params: Rec
|
ommendationParams, recommendation_df, users_df):
""" Get recording mbids corresponding to recommended recording ids sorted on rating.
Args:
params: RecommendationParams class object.
recommendation_df: Dataframe of spark_user_id, recording id and rating.
users_df : user_id and spark_user_id of active users.
Returns:
dataframe of recommended recording mbids and related info.
"""
df = params.recordings_df.join(recommendation_df, 'recording_id', 'inner') \
.select('rating',
'recording_mbid',
'spark_user_id')
recording_mbids_df = df.join(users_df, 'spark_user_id', 'inner')
window = Window.partitionBy('user_id').orderBy(col('rating').desc())
df = recording_mbids_df.withColumn('rank', row_number().over(window)) \
.select('recording_mbid',
'rank',
'rating',
'spark_user_id',
'user_id')
return df
def filter_recommendations_on_rating(df, limit):
""" Filter top X recommendations for each user on rating where X = limit.
Args:
df: Dataframe of user, product and rating.
limit (int): Number of recommendations to be filtered for each user.
Returns:
recommendation_df: Dataframe of spark_user_id, recording_id and rating.
"""
window = Window.partitionBy('user').orderBy(col('rating').desc())
recommendation_df = df.withColumn('rank', row_number().over(window)) \
.where(col('rank') <= limit) \
.select(col('rating'),
col('product').alias('recording_id'),
col('user').alias('spark_user_id'))
return recommendation_df
def generate_recommendations(candidate_set, params: RecommendationParams, limit):
""" Generate recommendations from the candidate set.
Args:
candidate_set (rdd): RDD of spark_user_id and recording_id.
params: RecommendationParams class object.
limit (int): Number of recommendations to be filtered for each user.
Returns:
recommendation_df: Dataframe of spark_user_id, recording_id and rating.
"""
recommendations = params.model.predictAll(candidate_set)
if recommendations.isEmpty():
raise RecommendationsNotGeneratedException('Recommendations not generated!')
df = listenbrainz_spark.session.createDataFrame(recommendations, schema=None)
recommendation_df = filter_recommendations_on_rating(df, limit)
return recommendation_df
def get_scale_rating_udf(rating):
""" Get user defined function (udf) to scale ratings so that they fall in the
range: 0.0 -> 1.0.
Args:
rating (float): score given to recordings by CF.
Returns:
rating udf.
"""
scaled_rating = (rating / 2.0) + 0.5
return round(min(max(scaled_rating, -1.0), 1.0), 3)
def scale_rating(df):
""" Scale the ratings column of dataframe so that they fall in the
range: 0.0 -> 1.0.
Args:
df: Dataframe to scale.
Returns:
df: Dataframe with scaled rating.
"""
scaling_udf = udf(get_scale_rating_udf, DoubleType())
df = df.withColumn("scaled_rating", scaling_udf(df.rating)) \
.select(col('recording_id'),
col('spark_user_id'),
col('scaled_rating').alias('rating'))
return df
def get_candidate_set_rdd_for_user(candidate_set_df, users):
""" Get candidate set RDD for a given user.
Args:
candidate_set_df: A dataframe of spark_user_id and recording_id for all users.
users: list of user names to generate recommendations for.
Returns:
candidate_set_rdd: An RDD of spark_user_id and recording_id for a given user.
"""
if users:
candidate_set_user_df = candidate_set_df.select('spark_user_id', 'recording_id') \
.where(col('user_id').isin(users))
else:
candidate_set_user_df = candidate_set_df.select('spark_user_id', 'recording_id')
if _is_empty_dataframe(candidate_set
|
BackupTheBerlios/namingmuse
|
tools/freedb-submit.py
|
Python
|
gpl-2.0
| 936
| 0.001068
|
#!/usr/bin/env python
import os, re, sys
if len(sys.argv) < 2:
print "usage: %s <recordfile>" % sys.argv[0]
sys.exit(1)
# Read record
filename = sys.argv[1]
fd = file(filename)
record = fd.read()
fd.close()
# Update revision
newrecord = []
lbreak = "\r\n"
for line in record.splitlines():
if line.startswith('# Revision:'):
rev = int(line.split(':')[1]) + 1
line = '# Revision: %u' % rev
newrecord.append(line)
newrecord = lbreak.join(newrecord)
# Setup mail values
address = 'freedb-submit@freedb
|
.org'
ident = os.path.splitext(filename)[0]
if not re.search('^[a-z]+ [a-z0-9]{8}$', ident):
sys.exit(ident + " is not a valid freedb `discid genre' pair")
subject = "cddb %s" % ident
# Save updated record
fd = file(filena
|
me, "w")
fd.write(newrecord)
fd.close()
# Send mail
print "Subject:", subject
cmd = 'cat "%s" | mutt -s "%s" %s' % (filename, subject, address)
print "%", cmd
os.system(cmd)
|
teamtaverna/core
|
app/api/cruds/utils.py
|
Python
|
mit
| 760
| 0.002632
|
from graphql_relay.node.node import from_global_id
def get_errors(e):
# transform django errors to redux errors
# django: {"key1": [value1], {"key2": [value2]}}
# redux: ["key1", "value1", "key2", "value2"]
fields = e.message_dict.keys()
messages = ['; '.join(m) for m in e.message_dict.values()]
errors = [i for pair in zip(fields, messages) for i in pair]
return errors
|
def get_object(object_name, relayId, otherwise=None):
try:
return object_name.objects.get(pk=from_global_id(relayId)[1])
except:
return otherwise
def load_object(instance, args, exception=['id']):
if instance:
[setattr(instance, key, value) for key, val
|
ue in args.items() if key not in exception]
return instance
|
calancha/DIRAC
|
Resources/Computing/SSHComputingElement.py
|
Python
|
gpl-3.0
| 24,359
| 0.036332
|
########################################################################
# $HeadURL$
# File : SSHComputingElement.py
# Author : Dumitru Laurentiu, A.T.
########################################################################
""" SSH (Virtual) Computing Element: For a given IP/host it will send jobs directly through ssh
"""
from DIRAC.Resources.Computing.ComputingElement import ComputingElement
from DIRAC.Resources.Computing.PilotBundle import bundleProxy, writeScript
from DIRAC.Core.Utilities.List import uniqueElements
from DIRAC.Core.Utilities.File import makeGuid
from DIRAC.Core.Utilities.Pfn import pfnparse
from DIRAC import S_OK, S_ERROR
from DIRAC import rootPath
from DIRAC import gLogger
import os, urllib
import shutil, tempfile
from types import StringTypes
__RCSID__ = "$Id$"
CE_NAME = 'SSH'
MANDATORY_PARAMETERS = [ 'Queue' ]
class SSH:
def __init__( self, user = None, host = None, password = None, key = None, parameters = {}, options = "" ):
self.user = user
if not user:
self.user = parameters.get( 'SSHUser', '' )
self.host = host
if not host:
self.host = parameters.get( 'SSHHost', '' )
self.password = password
if not password:
self.password = parameters.get( 'SSHPassword', '' )
self.key = key
if not key:
self.key = parameters.get( 'SSHKey', '' )
self.options = options
if not len(options):
self.options = parameters.get( 'SSHOptions', '' )
self.log = gLogger.getSubLogger( 'SSH' )
def __ssh_call( self, command, timeout ):
try:
import pexpect
expectFlag = True
except:
from DIRAC import shellCall
expectFlag = False
if not timeout:
timeout = 999
if expectFlag:
ssh_newkey = 'Are you sure you want to continue connecting'
try:
child = pexpect.spawn( command, timeout = timeout )
i = child.expect( [pexpect.TIMEOUT, ssh_newkey, pe
|
xpect.EOF, 'assword: '] )
if i == 0: # Timeout
return S_OK( ( -1, child.before, 'SSH login failed' ) )
elif i == 1: # SSH does not have the public key. Just accept it.
child.sendline ( 'yes' )
child.expect ( 'ass
|
word: ' )
i = child.expect( [pexpect.TIMEOUT, 'assword: '] )
if i == 0: # Timeout
return S_OK( ( -1, str( child.before ) + str( child.after ), 'SSH login failed' ) )
elif i == 1:
child.sendline( self.password )
child.expect( pexpect.EOF )
return S_OK( ( 0, child.before, '' ) )
elif i == 2:
# Passwordless login, get the output
return S_OK( ( 0, child.before, '' ) )
if self.password:
child.sendline( self.password )
child.expect( pexpect.EOF )
return S_OK( ( 0, child.before, '' ) )
else:
return S_ERROR( ( -2, child.before, '' ) )
except Exception, x:
res = ( -1 , 'Encountered exception %s: %s' % ( Exception, str( x ) ) )
return S_ERROR( res )
else:
# Try passwordless login
result = shellCall( timeout, command )
# print ( "!!! SSH command: %s returned %s\n" % (command, result) )
if result['Value'][0] == 255:
return S_ERROR ( ( -1, 'Cannot connect to host %s' % self.host, '' ) )
return result
def sshCall( self, timeout, cmdSeq ):
""" Execute remote command via a ssh remote call
"""
command = cmdSeq
if type( cmdSeq ) == type( [] ):
command = ' '.join( cmdSeq )
key = ''
if self.key:
key = ' -i %s ' % self.key
pattern = "'===><==='"
command = 'ssh -q %s -l %s %s %s "echo %s;%s"' % ( key, self.user, self.host, self.options, pattern, command )
self.log.debug( "SSH command %s" % command )
result = self.__ssh_call( command, timeout )
self.log.debug( "SSH command result %s" % str( result ) )
if not result['OK']:
return result
# Take the output only after the predefined pattern
ind = result['Value'][1].find('===><===')
if ind == -1:
return result
status,output,error = result['Value']
output = output[ind+8:]
if output.startswith('\r'):
output = output[1:]
if output.startswith('\n'):
output = output[1:]
result['Value'] = ( status,output,error )
return result
def scpCall( self, timeout, localFile, destinationPath, upload = True ):
""" Execute scp copy
"""
key = ''
if self.key:
key = ' -i %s ' % self.key
if upload:
command = "scp %s %s %s %s@%s:%s" % ( key, self.options, localFile, self.user, self.host, destinationPath )
else:
command = "scp %s %s %s@%s:%s %s" % ( key, self.options, self.user, self.host, destinationPath, localFile )
self.log.debug( "SCP command %s" % command )
return self.__ssh_call( command, timeout )
class SSHComputingElement( ComputingElement ):
#############################################################################
def __init__( self, ceUniqueID ):
""" Standard constructor.
"""
ComputingElement.__init__( self, ceUniqueID )
self.ceType = CE_NAME
self.controlScript = 'sshce'
self.submittedJobs = 0
self.mandatoryParameters = MANDATORY_PARAMETERS
#############################################################################
def _addCEConfigDefaults( self ):
"""Method to make sure all necessary Configuration Parameters are defined
"""
# First assure that any global parameters are loaded
ComputingElement._addCEConfigDefaults( self )
# Now batch system specific ones
if 'ExecQueue' not in self.ceParameters:
self.ceParameters['ExecQueue'] = self.ceParameters.get( 'Queue', '' )
if 'SharedArea' not in self.ceParameters:
#. isn't a good location, move to $HOME
self.ceParameters['SharedArea'] = '$HOME'
if 'BatchOutput' not in self.ceParameters:
self.ceParameters['BatchOutput'] = 'data'
if 'BatchError' not in self.ceParameters:
self.ceParameters['BatchError'] = 'data'
if 'ExecutableArea' not in self.ceParameters:
self.ceParameters['ExecutableArea'] = 'data'
if 'InfoArea' not in self.ceParameters:
self.ceParameters['InfoArea'] = 'info'
if 'WorkArea' not in self.ceParameters:
self.ceParameters['WorkArea'] = 'work'
if 'SubmitOptions' not in self.ceParameters:
self.ceParameters['SubmitOptions'] = '-'
def _reset( self ):
""" Process CE parameters and make necessary adjustments
"""
self.queue = self.ceParameters['Queue']
if 'ExecQueue' not in self.ceParameters or not self.ceParameters['ExecQueue']:
self.ceParameters['ExecQueue'] = self.ceParameters.get( 'Queue', '' )
self.execQueue = self.ceParameters['ExecQueue']
self.log.info( "Using queue: ", self.queue )
self.sharedArea = self.ceParameters['SharedArea']
self.batchOutput = self.ceParameters['BatchOutput']
if not self.batchOutput.startswith( '/' ):
self.batchOutput = os.path.join( self.sharedArea, self.batchOutput )
self.batchError = self.ceParameters['BatchError']
if not self.batchError.startswith( '/' ):
self.batchError = os.path.join( self.sharedArea, self.batchError )
self.infoArea = self.ceParameters['InfoArea']
if not self.infoArea.startswith( '/' ):
self.infoArea = os.path.join( self.sharedArea, self.infoArea )
self.executableArea = self.ceParameters['ExecutableArea']
if not self.executableArea.startswith( '/' ):
self.executableArea = os.path.join( self.sharedArea, self.executableArea )
self.workArea = self.ceParameters['WorkArea']
if not self.workArea.startswith( '/' ):
self.workArea = os.path.join( self.sharedArea, self.workArea )
result = self._prepareRemoteHost()
self.submitOptions = ''
if 'SubmitOptions' in self.ceParameters:
self.submitOpt
|
ebu/PlugIt
|
tests/helpers/pop_server/server.py
|
Python
|
bsd-3-clause
| 4,827
| 0.001243
|
"""
Small POP server. Heavilly based on
pypopper: a file-based pop3 server (http://code.activestate.com/recipes/534131-pypopper-python-pop3-server/)
Useage:
python server.py
Will return all mail*.txt in the current folder as mail. Output is also printed.
"""
import logging
import socket
import glob
logging.basicConfig(format="%(message)s")
log = logging.getLogger("pypopper")
log.setLevel(logging.INFO)
class ChatterboxConnection(object):
END = "\r\n"
def __init__(self, conn):
self.conn = conn
def __getattr__(self, name):
return getattr(self.conn, name)
def sendall(self, data, END=END):
if len(data) < 50:
log.debug("send: %r", data)
else:
log.debug("send: %r...", data[:50])
data += END
self.conn.sendall(data)
def recvall(self, END=END):
data = []
while True:
chunk = self.conn.recv(4096)
if END in chunk:
data.append(chunk[:chunk.index(END)])
break
data.append(chunk)
if len(data) > 1:
pair = data[-2] + data[-1]
if END in pair:
data[-2] = pair[:pair.index(END)]
data.pop()
break
log.debug("recv: %r", "".join(data))
return "".join(data)
class Message(object):
def __init__(self, filename):
global MSG_INDEX
msg = open(filename, "r")
try:
self.data = data = msg.read()
self.size = len(data)
self.top, bot = data.split("\r\n\r\n", 1)
self.bot = bot.sp
|
lit("\r\n")
self.index = int(filename.split('mail')[1].split('.txt')[0])
f
|
inally:
msg.close()
def handleUser(data, msgs):
log.info("USER:%s", data.split()[1])
return "+OK user accepted"
def handlePass(data, msgs):
log.info("PASS:%s", data.split()[1])
return "+OK pass accepted"
def handleStat(data, msgs):
return "+OK %i %i" % (len(msgs), sum([msg.size for msg in msgs]))
def handleList(data, msgs):
return "+OK %i messages (%i octets)\r\n%s\r\n." % (len(msgs), sum([msg.size for msg in msgs]), '\r\n'.join(["%i %i" % (msg.index, msg.size,) for msg in msgs]))
def handleTop(data, msgs):
cmd, num, lines = data.split()
lines = int(lines)
msg = msgs[int(num) - 1]
text = msg.top + "\r\n\r\n" + "\r\n".join(msg.bot[:lines])
return "+OK top of message follows\r\n%s\r\n." % text
def handleRetr(data, msgs):
log.info("RETRIVE:%s", data.split()[1])
msg = msgs[int(data.split()[1]) - 1]
return "+OK %i octets\r\n%s\r\n." % (msg.size, msg.data)
def handleDele(data, msgs):
log.info("DELETE:%s", data.split()[1])
return "+OK message 1 deleted"
def handleNoop(data, msgs):
return "+OK"
def handleQuit(data, msgs):
return "+OK pypopper POP3 server signing off"
dispatch = dict(
USER=handleUser,
PASS=handlePass,
STAT=handleStat,
LIST=handleList,
TOP=handleTop,
RETR=handleRetr,
DELE=handleDele,
NOOP=handleNoop,
QUIT=handleQuit,
)
def serve(host, port, filenames):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((host, port))
try:
if host:
hostname = host
else:
hostname = "localhost"
log.debug("pypopper POP3 serving '%s' on %s:%s", filenames, hostname, port)
while True:
sock.listen(1)
conn, addr = sock.accept()
log.debug('Connected by %s', addr)
try:
msgs = range(0, len(filenames))
for f in filenames:
msg = Message(f)
msgs[msg.index-1] = msg
conn = ChatterboxConnection(conn)
conn.sendall("+OK pypopper file-based pop3 server ready")
while True:
data = conn.recvall()
command = data.split(None, 1)[0]
try:
cmd = dispatch[command]
except KeyError:
conn.sendall("-ERR unknown command")
else:
conn.sendall(cmd(data, msgs))
if cmd is handleQuit:
return
finally:
conn.close()
msgs = None
except (SystemExit, KeyboardInterrupt):
log.info("pypopper stopped")
except Exception as ex:
log.critical("fatal error", exc_info=ex)
finally:
sock.shutdown(socket.SHUT_RDWR)
sock.close()
if __name__ == "__main__":
filenames = glob.glob("./mail[0-9]*.txt")
serve("127.0.0.1", 22110, filenames)
|
sacharya/nova
|
nova/tests/api/openstack/compute/plugins/v3/test_admin_password.py
|
Python
|
apache-2.0
| 5,987
| 0
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob
from nova.api.openstack.compute.plugins.v3 import admin_password
from nova.compute import api as compute_api
from nova import exception
from nova.openstack.common import jsonutils
from nova import test
from nova.tests.api.openstack import fakes
def fake_get(self, context, id):
return {'uuid': id}
def fake_get_non_existed(self, context, id):
raise exception.InstanceNotFound(instance_id=id)
def fake_set_admin_password(self, context, instance, password=None):
pass
def fake_set_admin_password_failed(self, context, instance, password=None):
raise exception.InstancePasswordSetFailed(instance=instance, reason='')
def fake_set_admin_password_non_implement(self, context, instance,
password=None):
raise NotImplementedError()
class AdminPasswordTest(test.NoDBTestCase):
def setUp(self):
super(AdminPasswordTest, self).setUp()
self.stubs.Set(compute_api.API, 'set_admin_password',
fake_set_admin_password)
self.stubs.Set(compute_api.API, 'get', fake_get)
self.app = fakes.wsgi_app_v3(init_only=('servers',
admin_password.ALIAS))
def _make_request(self, url, body):
req = webob.Request.blank(url)
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.content_type = 'application/json'
res = req.get_response(self.app)
return res
def test_change_password(self):
url = '/v3/servers/1/action'
body = {'change_password': {'admin_password': 'test'}}
res = self._make_request(url, body)
self.assertEqual(res.status_int, 204)
def test_change_password_empty_string(self):
url = '/v3/servers/1/action'
body = {'change_password': {'admin_password': ''}}
res = self._make_request(url, body)
self.assertEqual(res.status_int, 204)
def test_change_password_with_non_implement(self):
url = '/v3/servers/1/action'
body = {'change_password': {'admin_password': 'test'}}
self.stubs.Set(compute_api.API, 'set_admin_password',
fake_set_admin_password_non_implement)
res = self._make_request(url, body)
self.assertEqual(res.status_int, 501)
def test_change_password_with_non_existed_instance(self):
url = '/v3/servers/1/action'
body = {'change_password': {'admin_password': 'test'}}
self.stubs.Set(compute_api.API, 'get', fake_get_non_existed)
res = self._make_request(url, body)
self.assertEqual(res.status_int, 404)
def test_change_password_with_non_string_password(self):
url = '/v3/servers/1/action'
body = {'change_password': {'admin_password': 1234}}
res = self._make_request(url, body)
self.assertEqual(res.status_int, 400)
def test_change_password_failed(self):
url = '/v3/servers/1/action'
body = {'change_password': {'admin_password': 'test'}}
self.stubs.Set(compute_api.API, 'set_admin_password',
fake_set_admin_password_failed)
res = self._make_request(url, body)
self.assertEqual(res.status_int, 409)
def test_change_password_without_admin_password(self):
url = '/v3/servers/1/action'
body = {'change_password': {}}
res = self._make_request(url, body)
self.assertEqual(res.status_int, 400)
def test_change_password_none(self):
url = '/v3/servers/1/action'
body = {'change_password': None}
res = self._make_request(url, body)
self.assertEqual(res.status_int, 400)
class AdminPasswordXMLTest(test.NoDBTestCase):
def setUp(self):
super(AdminPasswordXMLTest, self).setUp()
self.deserializer = admin_password.ChangePasswordDeserializer()
def test_change_password_deserializer(self):
request = '<change_password admin_password="1"></change_password>'
expected = {'body': {'change_password': {'admin_password': '1'}}}
res = self.deserializer.default(request)
self.assertEqual(res, expected)
def test_change_password_deserializer_without_admin_password(self):
|
request = '<change_password></change_password>'
expected = {'body': {'change_password': None}}
res = self.deserializer.default(request)
self.assertEqual(res, expected)
def test_change_pass_no_pass(self):
request = """<?xml version="1.0" encoding="UTF-8"?>
<change_password
xmlns="http://docs.openstack.org/compute/api/v1.1"/> """
request = self.deserializer
|
.default(request)
expected = {
"change_password": None
}
self.assertEqual(request['body'], expected)
def test_change_pass_empty_pass(self):
request = """<?xml version="1.0" encoding="UTF-8"?>
<change_password
xmlns="http://docs.openstack.org/compute/api/v1.1"
admin_password=""/> """
request = self.deserializer.default(request)
expected = {
"change_password": {
"admin_password": "",
},
}
self.assertEqual(request['body'], expected)
|
kubevirt/client-python
|
test/test_v1_i6300_esb_watchdog.py
|
Python
|
apache-2.0
| 927
| 0.001079
|
# coding: utf-8
"""
KubeVirt API
This is KubeVirt API an add-on for Kubernetes.
OpenAPI spec version: 1.0.0
Contact: kubevirt-dev@googlegroups.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubevirt
from kubevirt.rest import ApiException
from kubevirt.models.v1_i6300_esb_watchdog import V1I6300ESBWatchdog
class TestV1I6300ESBWatchdog(unittest.TestCase):
""" V1I6300ESBWatchdog unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1I6300ESBWatchdog(self):
"""
Test V1I6300ESBWatchdog
"""
# FIXME: construct object with mandatory attributes with example values
#model = kubevirt.models.v1_i6300
|
_esb_watchdog.V1I6300ESB
|
Watchdog()
pass
if __name__ == '__main__':
unittest.main()
|
openstack/mistral
|
mistral/tests/unit/api/v2/test_cron_triggers.py
|
Python
|
apache-2.0
| 8,830
| 0
|
# Copyright 2014 - Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import json
from unittest import mock
import sqlalchemy as sa
from mistral.db.v2 import api as db_api
from mistral.db.v2.sqlalchemy import models
from mistral import exceptions as exc
from mistral.services import security
from mistral.tests.unit.api import base
from mistral.tests.unit import base as unit_base
WF = models.WorkflowDefinition(
spec={
'version': '2.0',
'name': 'my_wf',
'tasks': {
'task1': {
'action': 'std.noop'
}
}
}
)
WF.update({'id': '12
|
3e4567-e89b-12d3-a456-426655440000', 'name': 'my_wf'})
TRIGGER = {
'id': '02abb422-55ef-4bb2-8cb9-217a583a6a3f',
'name': 'my_cron_trigger',
'pattern': '* * * * *',
'
|
workflow_name': WF.name,
'workflow_id': '123e4567-e89b-12d3-a456-426655440000',
'workflow_input': '{}',
'workflow_params': '{}',
'scope': 'private',
'remaining_executions': 42
}
trigger_values = copy.deepcopy(TRIGGER)
trigger_values['workflow_input'] = json.loads(
trigger_values['workflow_input'])
trigger_values['workflow_params'] = json.loads(
trigger_values['workflow_params'])
TRIGGER_DB = models.CronTrigger()
TRIGGER_DB.update(trigger_values)
TRIGGER_DB_WITH_PROJECT_ID = TRIGGER_DB.get_clone()
TRIGGER_DB_WITH_PROJECT_ID.project_id = '<default-project>'
MOCK_WF = mock.MagicMock(return_value=WF)
MOCK_TRIGGER = mock.MagicMock(return_value=TRIGGER_DB)
MOCK_TRIGGERS = mock.MagicMock(return_value=[TRIGGER_DB])
MOCK_DELETE = mock.MagicMock(return_value=1)
MOCK_EMPTY = mock.MagicMock(return_value=[])
MOCK_NOT_FOUND = mock.MagicMock(side_effect=exc.DBEntityNotFoundError())
MOCK_DUPLICATE = mock.MagicMock(side_effect=exc.DBDuplicateEntryError())
class TestCronTriggerController(base.APITest):
@mock.patch.object(db_api, "get_cron_trigger", MOCK_TRIGGER)
def test_get(self):
resp = self.app.get('/v2/cron_triggers/my_cron_trigger')
self.assertEqual(200, resp.status_int)
self.assertDictEqual(TRIGGER, resp.json)
@mock.patch.object(db_api, 'get_cron_trigger')
def test_get_operational_error(self, mocked_get):
mocked_get.side_effect = [
# Emulating DB OperationalError
sa.exc.OperationalError('Mock', 'mock', 'mock'),
TRIGGER_DB # Successful run
]
resp = self.app.get('/v2/cron_triggers/my_cron_trigger')
self.assertEqual(200, resp.status_int)
self.assertDictEqual(TRIGGER, resp.json)
@mock.patch.object(db_api, "get_cron_trigger",
return_value=TRIGGER_DB_WITH_PROJECT_ID)
def test_get_within_project_id(self, mock_get):
resp = self.app.get('/v2/cron_triggers/my_cron_trigger')
self.assertEqual(200, resp.status_int)
self.assertTrue('project_id' in resp.json)
@mock.patch.object(db_api, "get_cron_trigger", MOCK_NOT_FOUND)
def test_get_not_found(self):
resp = self.app.get(
'/v2/cron_triggers/my_cron_trigger',
expect_errors=True
)
self.assertEqual(404, resp.status_int)
@mock.patch.object(db_api, "get_cron_trigger", MOCK_TRIGGER)
def test_get_by_id(self):
resp = self.app.get(
"/v2/cron_triggers/02abb422-55ef-4bb2-8cb9-217a583a6a3f")
self.assertEqual(200, resp.status_int)
self.assertDictEqual(TRIGGER, resp.json)
@mock.patch.object(db_api, "get_workflow_definition", MOCK_WF)
@mock.patch.object(db_api, "create_cron_trigger")
def test_post(self, mock_mtd):
mock_mtd.return_value = TRIGGER_DB
resp = self.app.post_json('/v2/cron_triggers', TRIGGER)
self.assertEqual(201, resp.status_int)
self.assertDictEqual(TRIGGER, resp.json)
self.assertEqual(1, mock_mtd.call_count)
values = mock_mtd.call_args[0][0]
self.assertEqual('* * * * *', values['pattern'])
self.assertEqual(42, values['remaining_executions'])
@mock.patch.object(db_api, "get_workflow_definition", MOCK_WF)
@mock.patch.object(db_api, "create_cron_trigger", MOCK_DUPLICATE)
@mock.patch.object(security, "delete_trust")
def test_post_dup(self, delete_trust):
resp = self.app.post_json(
'/v2/cron_triggers', TRIGGER, expect_errors=True
)
self.assertEqual(1, delete_trust.call_count)
self.assertEqual(409, resp.status_int)
@mock.patch.object(db_api, "get_workflow_definition", MOCK_WF)
@mock.patch.object(db_api, "create_cron_trigger", MOCK_DUPLICATE)
def test_post_same_wf_and_input(self):
trig = TRIGGER.copy()
trig['name'] = 'some_trigger_name'
resp = self.app.post_json(
'/v2/cron_triggers', trig, expect_errors=True
)
self.assertEqual(409, resp.status_int)
@mock.patch.object(db_api, "get_cron_trigger", MOCK_TRIGGER)
@mock.patch.object(db_api, "delete_cron_trigger", MOCK_DELETE)
@mock.patch.object(security, "delete_trust")
def test_delete(self, delete_trust):
resp = self.app.delete('/v2/cron_triggers/my_cron_trigger')
self.assertEqual(1, delete_trust.call_count)
self.assertEqual(204, resp.status_int)
@mock.patch.object(db_api, "get_cron_trigger", MOCK_TRIGGER)
@mock.patch.object(db_api, "delete_cron_trigger", MOCK_DELETE)
@mock.patch.object(security, "delete_trust")
def test_delete_by_id(self, delete_trust):
resp = self.app.delete(
'/v2/cron_triggers/02abb422-55ef-4bb2-8cb9-217a583a6a3f')
self.assertEqual(1, delete_trust.call_count)
self.assertEqual(204, resp.status_int)
@mock.patch.object(db_api, "delete_cron_trigger", MOCK_NOT_FOUND)
def test_delete_not_found(self):
resp = self.app.delete(
'/v2/cron_triggers/my_cron_trigger',
expect_errors=True
)
self.assertEqual(404, resp.status_int)
@mock.patch.object(db_api, "get_cron_triggers", MOCK_TRIGGERS)
def test_get_all(self):
resp = self.app.get('/v2/cron_triggers')
self.assertEqual(200, resp.status_int)
self.assertEqual(1, len(resp.json['cron_triggers']))
self.assertDictEqual(TRIGGER, resp.json['cron_triggers'][0])
@mock.patch.object(db_api, 'get_cron_triggers')
def test_get_all_operational_error(self, mocked_get_all):
mocked_get_all.side_effect = [
# Emulating DB OperationalError
sa.exc.OperationalError('Mock', 'mock', 'mock'),
[TRIGGER_DB] # Successful run
]
resp = self.app.get('/v2/cron_triggers')
self.assertEqual(200, resp.status_int)
self.assertEqual(1, len(resp.json['cron_triggers']))
self.assertDictEqual(TRIGGER, resp.json['cron_triggers'][0])
@mock.patch.object(db_api, 'get_cron_triggers')
@mock.patch('mistral.context.MistralContext.from_environ')
def test_get_all_projects_admin(self, mock_context, mock_get_triggers):
admin_ctx = unit_base.get_context(admin=True)
mock_context.return_value = admin_ctx
resp = self.app.get('/v2/cron_triggers?all_projects=true')
self.assertEqual(200, resp.status_int)
self.assertTrue(mock_get_triggers.call_args[1].get('insecure', False))
@mock.patch.object(db_api, 'get_cron_triggers')
@mock.patch('mistral.context.MistralContext.from_environ')
def test_get_all_filter_project(self, mock_context, mock_get_triggers):
admin_ctx = unit_base.get_context(admin=True)
mock_context.return_value = admin_ctx
resp = self.app.get(
|
charismaticchiu/Robotics
|
ArNetworking/pythonExamples/drawingsExample.py
|
Python
|
gpl-2.0
| 6,718
| 0.018904
|
"""
MobileRobots Advanced Robotics Interface for Applications (ARIA)
Copyright (C) 2004, 2005 ActivMedia Robotics LLC
Copyright (C) 2006, 2007, 2008, 2009 MobileRobots Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
alo
|
ng with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
If you wish to redistribute ARIA under different terms, contact
MobileRobots for information about a commercial version of ARIA at
robots@mobilerobots.com or
MobileRobots Inc, 10 Columbia Drive, Amherst, NH 03031; 800-639-9481
"""
from AriaPy import *
from ArNetworkingPy import *
import sys
from math import sin
# This is an exampl
|
e server that shows how to draw arbitrary figures in a
# client (e.g. MobileEyes).
# These are callbacks that respond to client requests for the drawings'
# geometry data.
def exampleHomeDrawingNetCallback(client, requestPkt):
print "exampleHomeDrawingNetCallback"
reply = ArNetPacket()
# 7 Vertices
reply.byte4ToBuf(7)
# Centered on 0,0.
# X: Y:
reply.byte4ToBuf(-500); reply.byte4ToBuf(500); # Vertex 1
reply.byte4ToBuf(-500); reply.byte4ToBuf(-500); # Vertex 2
reply.byte4ToBuf(500); reply.byte4ToBuf(-500); # Vertex 3
reply.byte4ToBuf(500); reply.byte4ToBuf(500); # Vertex 4
reply.byte4ToBuf(0); reply.byte4ToBuf(1000); # Vertex 5
reply.byte4ToBuf(-500); reply.byte4ToBuf(500); # Vertex 6
reply.byte4ToBuf(500); reply.byte4ToBuf(500); # Vertex 7
client.sendPacketUdp(reply)
print "exampleHomeDrawingNetCallback Done."
def exampleDotsDrawingNetCallback(client, requestPkt):
reply = ArNetPacket()
tik = ArUtil.getTime() % 200
t = tik / 5.0
# Three dots
reply.byte4ToBuf(3)
# Dot 1:
reply.byte4ToBuf(3000); # X coordinate (mm)
reply.byte4ToBuf((int) (sin(t) * 1000));# Y
# Dot 2:
reply.byte4ToBuf(3500); # X
reply.byte4ToBuf((int) (sin(t+500) * 1000));# Y
# Dot 3:
reply.byte4ToBuf(4000); # X
reply.byte4ToBuf((int) (sin(t+1000) * 1000));# Y
client.sendPacketUdp(reply)
def exampleXDrawingNetCallback(client, requestPkt):
reply = ArNetPacket()
# X marks the spot. 2 line segments, so 4 vertices:
reply.byte4ToBuf(4)
# Segment 1:
reply.byte4ToBuf(-4250); # X1
reply.byte4ToBuf(250); # Y1
reply.byte4ToBuf(-3750); # X2
reply.byte4ToBuf(-250); # Y2
# Segment 2:
reply.byte4ToBuf(-4250); # X1
reply.byte4ToBuf(-250); # Y1
reply.byte4ToBuf(-3750); # X2
reply.byte4ToBuf(250); # Y2
client.sendPacketUdp(reply)
def exampleArrowsDrawingNetCallback(client, requestPkt):
# 1 arrow that points at the robot
reply = ArNetPacket()
reply.byte4ToBuf(1) # 1 arrow
reply.byte4ToBuf(0); # Pos. X
reply.byte4ToBuf(700); # Pos. Y
client.sendPacketUdp(reply)
# Main program:
Aria.init()
robot = ArRobot()
server = ArServerBase()
parser = ArArgumentParser(sys.argv)
simpleConnector = ArSimpleConnector(parser)
simpleOpener = ArServerSimpleOpener(parser)
parser.loadDefaultArguments()
if not Aria.parseArgs() or not parser.checkHelpAndWarnUnparsed():
Aria.logOptions()
Aria.exit(1)
if not simpleOpener.open(server):
if simpleOpener.wasUserFileBad():
print "Error: Bad user/password/permissions file."
else:
print "Error: Could not open server port. Use -help to see options."
Aria.exit(1)
# Devices
sonarDev = ArSonarDevice()
robot.addRangeDevice(sonarDev)
irs = ArIRs()
robot.addRangeDevice(irs)
bumpers = ArBumpers()
robot.addRangeDevice(bumpers)
sick = ArSick()
robot.addRangeDevice(sick);
# attach services to the server
serverInfoRobot = ArServerInfoRobot(server, robot)
serverInfoSensor = ArServerInfoSensor(server, robot)
# This is the service that provides drawing data to the client.
drawings = ArServerInfoDrawings(server)
# Convenience function that sets up drawings for all the robot's current
# range devices (using default shape and color info)
drawings.addRobotsRangeDevices(robot)
# Add our custom drawings
linedd = ArDrawingData("polyLine", ArColor(255, 0, 0), 2, 49) # shape name, color, size, layer
drawings.addDrawing( linedd, "exampleDrawing_Home", exampleHomeDrawingNetCallback)
dotsdd = ArDrawingData("polyDots", ArColor(0, 255, 0), 250, 48)
drawings.addDrawing(dotsdd, "exampleDrawing_Dots", exampleDotsDrawingNetCallback)
segdd = ArDrawingData("polySegments", ArColor(0, 0, 0), 4, 52)
drawings.addDrawing( segdd, "exampleDrawing_XMarksTheSpot", exampleXDrawingNetCallback)
ardd = ArDrawingData("polyArrows", ArColor(255, 0, 255), 500, 100)
drawings.addDrawing( ardd, "exampleDrawing_Arrows", exampleArrowsDrawingNetCallback)
# modes for moving the robot
modeStop = ArServerModeStop(server, robot)
modeDrive = ArServerModeDrive(server, robot)
modeRatioDrive = ArServerModeRatioDrive(server, robot)
modeWander = ArServerModeWander(server, robot)
modeStop.addAsDefaultMode()
modeStop.activate()
# set up some simple commands ("custom commands")
commands = ArServerHandlerCommands(server)
uCCommands = ArServerSimpleComUC(commands, robot)
loggingCommands = ArServerSimpleComMovementLogging(commands, robot)
configCommands = ArServerSimpleComLogRobotConfig(commands, robot)
# add the commands to enable and disable safe driving to the simple commands
modeDrive.addControlCommands(commands)
# Connect to the robot.
if not simpleConnector.connectRobot(robot):
print "Error: Could not connect to robot... exiting"
Aria.shutdown()
sys.exit(1)
# set up the laser before handing it to the laser mode
simpleConnector.setupLaser(sick)
robot.enableMotors()
# start the robot cycle running in a background thread
robot.runAsync(True)
# start the laser processing cycle in a background thread
sick.runAsync()
# connect the laser if it was requested
if not simpleConnector.connectLaser(sick):
print "Error: Could not connect to laser... exiting"
Aria.shutdown()
sys.exit(1)
# log whatever we wanted to before the runAsync
simpleOpener.checkAndLog()
# run the server thread in the background
server.runAsync()
print "Server is now running on port %d..." % (simpleOpener.getPort())
robot.waitForRunExit()
Aria.shutdown()
|
rogeliorv/asuna
|
manage.py
|
Python
|
apache-2.0
| 250
| 0.004
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.envir
|
on.setdefault("DJANGO_SETTINGS_MODULE", "asuna.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.
|
argv)
|
siouka/dmind
|
plugin.video.tvpor/resources/lib/unwise.py
|
Python
|
gpl-2.0
| 2,506
| 0.027933
|
# -*- coding: latin-1 -*-
import re
import math
import urllib
from string import join
import traceback, sys
class JsUnwiser:
def unwiseAll(self, data):
try:
in_data=data
sPattern = 'eval\\(function\\(w,i,s,e\\).*?}\\((.*?)\\)'
wise_data=re.compile(sPattern).findall(in_data)
for wise_val in wise_data:
unpack_val=self.unwise(wise_val)
#print '\nunpack_val',unpack_val
in_data=in_data.replace(wise_val,unpack_val)
return in_data
except:
traceback.print_exc(file=sys.stdout)
return data
def containsWise(self, data):
return 'w,i,s,e' in data
def unwise(self, sJavascript):
#print 'sJavascript',sJavascript
page_value=""
try:
ss="w,i,s,e=("+sJavascript+')'
exec (ss)
page_value=self.__unpack(w,i,s,e)
except: traceback.print_exc(file=sys.stdout)
return page_value
def __unpack( self,w, i, s, e):
lIll = 0;
ll1I = 0;
Il1l = 0;
ll1l = [];
l1lI = [];
while True:
if (lIll < 5):
l1lI.append(w[lIll])
elif (lIll < len(w)):
ll1l.append(w[lIll]);
lIll+=1;
if (ll1I < 5):
l1lI.append(i[ll1I])
elif (ll1I < len(i)):
ll1l.append(i[ll1I])
ll1I+=1;
if (Il1l < 5):
l1lI.append(s[Il1l])
elif (Il1l < len(s)):
ll1l.append(s[Il1l]);
Il1l+=1;
if (len(w) + len(i) + len(s) + len(e) == len(ll1l) + len(l1lI) + len(e)):
break;
lI1l = ''.join(ll1l)#.join('');
I1lI = ''.join(l1lI)#.join('');
ll1I = 0;
l1ll = [];
for lIll in range(0,len(ll1l),2):
#print 'array i',lIll,len(
|
ll1l)
ll11 = -1;
if ( ord(I1lI[ll1I]) % 2):
ll11 = 1;
#print 'val is ', lI1l[lIll: lIll+2]
l1ll.append(chr( int(lI1l[lIll: lIll+2], 36) - ll11));
ll1I+=1;
if (ll1I >= len(l1lI)):
|
ll1I = 0;
ret=''.join(l1ll)
if 'eval(function(w,i,s,e)' in ret:
ret=re.compile('eval\(function\(w,i,s,e\).*}\((.*?)\)').findall(ret)[0]
return self.unwise(ret)
else:
return ret
|
saukrIppl/seahub
|
seahub/institutions/urls.py
|
Python
|
apache-2.0
| 391
| 0.002558
|
from django.conf.urls import patterns, url
from .views import info, useradmin, user_info, user_remove
urlpatterns = patterns(
'',
url('^info/$', info, name="info"),
url('^useradmin/$', useradmin,
|
name="useradmin"),
url(r'^useradmin/info/(?P<email>[^/]+)/$', user_info, name='user_info'),
url(r'^useradmin/remove/(?P<email>[^/]+)/$', user_remove, name='use
|
r_remove'),
)
|
SMALLplayer/smallplayer-image-creator
|
storage/.xbmc/addons/script.module.urlresolver/lib/urlresolver/plugins/promptfile.py
|
Python
|
gpl-2.0
| 3,129
| 0.004155
|
'''
urlresolver XBMC Addon
Copyright (C) 2013 Bstrdsmkr
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from t0mm0.common.net import Net
from urlresolver.plugnplay.interfaces import UrlResolver
from urlresolver.plugnplay.interfaces import PluginSettings
from urlresolver.plugnplay import Plugin
import re, urllib2, os, xbmcgui
from urlresolver import common
#SET ERROR_LOGO# THANKS TO VOINAGE, BSTRDMKR, ELDORADO
error_logo = os.path.join(common.addon_path, 'resources', 'images', 'redx.png')
class PromptfileResolver(Plugin, UrlResolver, PluginSettings):
implements = [UrlResolver, PluginSettings]
name = "promptfile"
def __init__(self):
p = self.get_setting('priority') or 100
self.priority = int(p)
self.net = Net()
def get_media_url(self, host, media_id):
web_url = self.get_url(host, media_id)
try:
html = self.net.http_GET(web_url).content
data = {}
|
r = re.findall(r'type="hidden"\s*name="(.+?)"\s*value="(.*?)"', html)
for name, value in r:
|
data[name] = value
html = self.net.http_POST(web_url, data).content
html = re.compile(r'clip\s*:\s*\{.*?url\s*:\s*[\"\'](.+?)[\"\']', re.DOTALL).search(html)
if not html:
raise Exception ('File Not Found or removed')
stream_url = html.group(1)
return stream_url
except urllib2.URLError, e:
common.addon.log_error(self.name + ': got http error %d fetching %s' %
(e.code, web_url))
common.addon.show_small_popup('Error','Http error: '+str(e), 5000, error_logo)
return self.unresolvable(code=3, msg=e)
except Exception, e:
common.addon.log_error('**** Promptfile Error occured: %s' % e)
common.addon.show_small_popup(title='[B][COLOR white]PROMPTFILE[/COLOR][/B]', msg='[COLOR red]%s[/COLOR]' % e, delay=5000, image=error_logo)
return self.unresolvable(code=0, msg=e)
def get_url(self, host, media_id):
return 'http://www.promptfile.com/%s' % (media_id)
def get_host_and_id(self, url):
r = re.search('//(.+?)/(.+)', url)
if r:
return r.groups()
else:
return False
def valid_url(self, url, host):
if self.get_setting('enabled') == 'false': return False
return (re.match('http://(www.)?promptfile.com/l/' +
'[0-9A-Za-z\-]+', url) or
'promptfile' in host)
|
shakamunyi/neutron-vrrp
|
neutron/tests/unit/linuxbridge/test_agent_scheduler.py
|
Python
|
apache-2.0
| 1,192
| 0.002517
|
# Copyright (c) 2013 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from neutron.tests.unit.linuxbridge import test_linuxbridge_plugin
from neutron.tests.unit.openvswitch import test_agent_scheduler
clas
|
s LbAgentSchedulerTestCase(
test_agent_scheduler.OvsAgentSchedulerTestCase):
plugin_str = test_linuxbridge_plugin.PLUGIN_NAME
l3_plugin = None
class LbL3AgentNotifierTestCase(
test_agent_scheduler.OvsL3AgentNotifierTestCase):
plugin_
|
str = test_linuxbridge_plugin.PLUGIN_NAME
l3_plugin = None
class LbDhcpAgentNotifierTestCase(
test_agent_scheduler.OvsDhcpAgentNotifierTestCase):
plugin_str = test_linuxbridge_plugin.PLUGIN_NAME
|
bmya/pos-addons
|
pos_session_custom2/__openerp__.py
|
Python
|
lgpl-3.0
| 583
| 0.012007
|
{
'name' : 'Custom pos session report (2)',
|
'version' : '1.0.0',
'author' : 'IT-Projects LLC, Ivan Yelizariev',
'license': 'GPL-3',
'category' : 'Custom',
'website' : 'https://yelizariev.github.io',
'description': """
Tested on Odoo 8.0 258a4cac82ef3b7e6a086f691f3bf8140d37b51c
""",
'data':[
'views/ses
|
sion_view.xml',
'views/pos_session_custom_report1.xml',
'views/report1.xml',
'views/layouts.xml',
],
'depends': ['base','point_of_sale'],
'init_xml': [],
'update_xml': [],
'installable': True,
}
|
uber/tchannel-python
|
tchannel/rw.py
|
Python
|
mit
| 19,427
| 0
|
# Copyright (c) 2016 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
import struct
import six
from .errors import ReadError
skip = '_'
def none():
"""A ReadWriter that consumes nothing and returns None."""
return NoneReadWriter()
def constant(rw, value):
"""A ReadWriter that runs the given ReadWriter and ignores the value.
Always writes and returns ``value`` instead.
:param rw:
ReadWriter to run
:param value:
Value to serialize and return
"""
return ConstantReadWriter(rw, value)
def number(width_bytes):
"""Build a ReadWriter for integers of the given width.
:param width_bytes:
Width of the integer. One of 1, 2, 4 and 8.
"""
return NumberReadWriter(width_bytes)
def args(length_rw):
"""Build a ReadWriter for args=[arg1, arg2, arg3]
:param length_rw:
ReadWriter for the length of each
|
arg
"""
return ArgsReaderWriter(length_rw)
def len_prefixed_string(length_rw, is_binary=False):
"""Build a ReadWriter for strings prefixed with their length.
.. code-block:: python
len_prefixed_string(number(2
|
)) # == str~2
:param length_rw:
ReadWriter for the length of the string
:param is_binary:
Whether the string is a binary blob. If this is False (the default),
the string will be encoded/decoded to UTF-8 before writing/reading.
"""
return LengthPrefixedBlobReadWriter(length_rw, is_binary)
def chain(*rws):
"""Build a ReadWriter from the given list of ReadWriters.
.. code-block:: python
chain(
number(1),
number(8),
len_prefixed_string(number(2)),
) # == n1:1 n2:8 s~2
Reads/writes from the given ReadWriters in-order. Returns lists of values
in the same order as the ReadWriters.
:param rws:
One or more ReadWriters
"""
assert rws is not None
if len(rws) == 1 and isinstance(rws[0], list):
# In case someone does chain([l0, l1, ...])
rws = rws[0]
return ChainReadWriter(rws)
def dictionary(*pairs):
"""Build a ReadWriter that reads/writes dictionaries.
``pairs`` are tuples containing field names and their corresponding
ReadWriters. The fields will be read and written in the same order
provided here.
For example the following ReadWriter will read and write dictionaries in
the form ``{"flags": <byte>, "id": <int32>}``.
.. code-block:: python
dictionary(
("flags", number(1)),
("id", number(4)),
)
For pairs where the key name is `rw.skip`, the value will not be saved and
the serializer will receive None.
:param pairs:
One or more tuples in the from ``(<field name>, <ReadWriter>)``.
"""
return NamedChainReadWriter(pairs)
def instance(cls, *pairs):
"""Build a ReadWriter that reads/writes intances of the given class.
``pairs`` are key-value pairs that specify constructor argument names and
their corresponding ReadWriters. These same names are used to access
attributes on instances when writing.
.. code-block:: python
instance(
Person,
("name", len_prefixed_string(number(2))),
("age", number(1))
)
For pairs where the attribute name is `rw.skip`, the value will not be
passed to the constructor. Further, while serializing, None will be passed
to the serializer.
:param cls:
A class with an ``__init__`` method accepting keyword arguments for
all items specified in ``pairs``
:param pairs:
Key-value pairs mapping argument name to ReadWriter.
"""
return InstanceReadWriter(cls, pairs)
def headers(length_rw, key_rw, value_rw=None):
"""Build a ReadWriter for header lists.
A header is represented as::
count:L (key:K value:V){count}
The value produced is a list of key-value pairs. For example,
.. code-block:: python
headers(
number(L),
len_prefixed_string(number(K)),
len_prefixed_string(number(V)),
)
:param length_rw:
ReadWriter for the number of pairs in the header
:param key_rw:
ReadWriter for a key in a pair
:param value_rw:
ReadWriter for a value in a pair. Defaults to ``key_rw``.
"""
return HeadersReadWriter(length_rw, key_rw, value_rw)
def switch(switch_rw, cases):
"""A ReadWriter that picks behavior based on the value of ``switch_rw``.
.. code-block:: python
switch(
number(1), {
0: option_1_rw(),
1: option_2_rw()
}
)
Produces a tuple in the from ``(switch_value, case_value)``. If a given
switch value did not have a corresponding case, nothing will be written to
the stream and None will be returned as the value when reading.
:param switch_rw:
A ReadWriter that produces a value to dispatch on
:param cases:
Pairs where the key is the expected value from ``switch_rw``. If the
value matches, the corresponding ReadWriter will be executed.
"""
return SwitchReadWriter(switch_rw, cases)
class ReadWriter(object):
"""Provides the ability to read/write types from/to file-like objects.
ReadWrites SHOULD not maintain any state between calls to
``read``/``write`` and MUST be re-usable and thread-safe. The
``read``/``write`` methods MAY Be called on the same ReadWriter instance
multiple times for different requests at the same time.
The file-like stream object MUST provide ``read(int)`` and ``write(str)``
methods with behaviors as follows:
``read(int)``
MUST return the specified number of bytes from the stream. MAY return
fewer bytes if the end of the stream was reached.
``write(str)``
MUST write the given string or buffer to the stream.
"""
def read(self, stream):
"""Read and return the object from the stream.
:param stream:
file-like object providing a `read(int)` method
:returns: the deserialized object
:raises ReadError:
for parse errors or if the input is too short
"""
raise NotImplementedError()
def write(self, obj, stream):
"""Write the object to the stream.
:param stream:
file-like obect providing a `write(str)` method
:returns:
the stream
"""
raise NotImplementedError()
def length(self, obj):
"""Return the number of bytes will actually be written into io.
For cases where the width depends on the input, this should return the
length of data will be written into iostream."""
raise NotImplementedError()
def width(self):
"""Return the number of bytes this ReadWriter is expected to take.
For cases where the width depends on the input, this should return the
minimum width the ReadWriter is expected to take."""
raise NotImplementedError()
def take(self, stre
|
redisca/django-redisca
|
redisca/frontend/urls.py
|
Python
|
mit
| 246
| 0
|
from django.conf.urls import url
from redisca.frontend import views
app_name = 'frontend'
urlpatterns = [
url(r'^$',
|
views.template_list, name='template_list'),
|
url(r'^([a-zA-Z0-9_\./\-]+)$', views.static_template, name='template'),
]
|
ramansbach/cluster_analysis
|
clustering/scripts/gsdSubsample.py
|
Python
|
mit
| 959
| 0.023983
|
#open a gsd file and write out a subsampled version, keeping only every N
|
timesteps
#useful if you want to be analyzing a shorter trajectory
import gsd.hoomd
import argparse
import time
start = time.time()
parser = argparse.ArgumentParser(description='Subsamble GSD trajectory')
parser.add_argument('fname',metavar='input',type=str,help='trajectory file to be subsampled')
parser.add_argumen
|
t('ofname',metavar='output',type=str,help='where to write subsampled trajectory file')
parser.add_argument('N',metavar='N',type=int,help='keep frame each N timesteps')
args = parser.parse_args()
traj = gsd.hoomd.open(args.fname)
frame0 = traj[0]
newtraj = gsd.hoomd.open(args.ofname,'wb')
newtraj.append(frame0)
for i in range(args.N,len(traj),args.N):
s = gsd.hoomd.Snapshot()
pos = traj[i].particles.position
s.particles.position = pos
s.particles.N = len(pos)
newtraj.append(s)
end = time.time()
print('Subsampling took {0} s.'.format(end-start))
|
tobias47n9e/social-core
|
social_core/tests/backends/test_utils.py
|
Python
|
bsd-3-clause
| 1,767
| 0
|
import unittest2 as unittest
from ..models import TestStorage
from ..strategy import TestStrategy
from ...backends.utils import load
|
_backends, get_backend
from ...backends.github import GithubOAuth2
from ...exceptions import MissingBackend
class BaseBackendUtilsTest(unittest.TestCase):
def setUp(self):
self.strategy = TestStrategy(storage=TestStorage)
def tearDown(self):
self.strategy = None
class LoadBackendsTest(BaseBackendUtilsTest):
def test_load_backends(self):
loaded_backends = load_backends((
'social_core.backends.github.GithubOAuth2',
'social_core.backen
|
ds.facebook.FacebookOAuth2',
'social_core.backends.flickr.FlickrOAuth'
), force_load=True)
keys = list(loaded_backends.keys())
self.assertEqual(keys, ['github', 'facebook', 'flickr'])
backends = ()
loaded_backends = load_backends(backends, force_load=True)
self.assertEqual(len(list(loaded_backends.keys())), 0)
class GetBackendTest(BaseBackendUtilsTest):
def test_get_backend(self):
backend = get_backend((
'social_core.backends.github.GithubOAuth2',
'social_core.backends.facebook.FacebookOAuth2',
'social_core.backends.flickr.FlickrOAuth'
), 'github')
self.assertEqual(backend, GithubOAuth2)
def test_get_missing_backend(self):
with self.assertRaisesRegexp(MissingBackend,
'Missing backend "foobar" entry'):
get_backend(('social_core.backends.github.GithubOAuth2',
'social_core.backends.facebook.FacebookOAuth2',
'social_core.backends.flickr.FlickrOAuth'),
'foobar')
|
dstansby/heliopy
|
heliopy/data/helios.py
|
Python
|
gpl-3.0
| 38,861
| 0
|
"""
Methods for importing Helios data.
"""
from datetime import date, time, datetime, timedelta
import os
import pathlib
import urllib.error
from urllib.error import URLError
from collections import OrderedDict
import warnings
import astropy.constants as constants
import astropy.units as u
import numpy as np
import pandas as pd
from bs4 import BeautifulSoup
import requests
from heliopy import config
from heliopy.data import util
from heliopy.data import cdasrest
data_dir = config['download_dir']
use_hdf = config['use_hdf']
helios_dir = os.path.join(data_dir, 'helios')
# new http base_url
remote_base_url = 'https://helios-data.ssl.berkeley.edu/data/'
def _check_probe(probe):
probe = str(probe)
assert probe == '1' or probe == '2', 'Probe number must be 1 or 2'
return probe
def _dist_file_dir(probe, year, doy):
return os.path.join(helios_dir,
'helios{}'.format(probe),
'dist',
'{}'.format(year),
'{}'.format(int(doy)))
def _loaddistfile(probe, year, doy, hour, minute, second):
"""
Method to load a Helios distribution file.
Returns opened file and location of file if file exists. If file doesn't
exist raises an OSError.
Parameters
----------
probe : int, str
Helios probe to import data from. Must be 1 or 2.
year : int
Year
doy : int
Day of year
hour : int
Hour.
minute : int
Minute
second : int
Second
Returns
-------
f : file
Opened distribution function file
filename : str
Filename of opened file
"""
probe = _check_probe(probe)
# Work out location of file
yearstring = str(year)[-2:]
filedir = _dist_file_dir(probe, year, doy)
filename = os.path.join(filedir,
'h' + probe + 'y' + yearstring +
'd' + str(doy).zfill(3) +
'h' + str(hour).zfill(2) +
'm' + str(minute).zfill(2) +
's' + str(second).zfill(2) + '_')
# Try to open distribution file
for extension in ['hdm.0', 'hdm.1', 'ndm.0', 'ndm.1']:
try:
f = open(filename + extension)
filename += extension
except OSError:
continue
if 'f' not in locals():
raise OSError('Could not find file with name ' +
filename[:-1])
else:
return f, filename
def _dist_filename_to_hms(path):
"""Given distribution filename, extract hour, minute, second"""
# year = int(path[-21:-19]) + 1900
# doy = int(path[-18:-15])
hour = int(path[-14:-12])
minute = int(path[-11:-9])
second = int(path[-8:-6])
return hour, minute, second
def integrated_dists(probe, starttime, endtime, verbose=False):
"""
Returns the integrated distributions from experiments i1a and i1b in Helios
distribution function files.
The distributions are integrated over all angles and given as a function
of proton velocity.
Parameters
----------
probe : int
Helios probe to import data from. Must be 1 or 2.
starttime : datetime.datetime
Start of interval
endtime : datetime.datetime
End of interval
verbose : bool
If ``True``, print information whilst loading. Default is ``False``.
Returns
-------
distinfo : pandas.Series
Infromation stored in the top of distribution function files.
"""
extensions = ['hdm.0', 'hdm.1', 'ndm.0', 'ndm.1']
distlist = {'a': [], 'b': []}
starttime_orig = starttime
# Loop through each day
while starttime < endtime:
year = starttime.year
doy = starttime.strftime('%j')
# Directory for today's distribution files
dist_dir = _dist_file_dir(probe, year, doy)
# Locaiton of hdf file to save to/load from
hdffile = 'h' + probe + str(year) + str(doy).zfill(3) +\
'integrated_dists.hdf'
hdffile = os.path.join(dist_dir, hdffile)
todays_dists = {'a': [], 'b': []}
# Check if data is already saved
if os.path.isfile(hdffile):
for key in todays_dists:
todays_dists[key] = pd.read_hdf(hdffile, key=key)
distlist[key].append(todays_dists[key])
starttime += timedelta(days=1)
continue
# If not saved, generate a derived file
else:
# Get every distribution function file present for this day
for f in os.listdir(dist_dir):
path = os.path.join(dist_dir, f)
# Check for distribution function
if path[-5:] in extensions:
hour, minute, second = _dist_filename_to_hms(path)
try:
a, b = integrated_dists_single(probe, year, doy,
hour, minute, second)
except RuntimeError as err:
strerr = 'No ion distribution function data in file'
if str(err) == strerr:
continue
raise err
t = datetime.combine(starttime.date(),
time(hour, minute, second))
if verbose:
print(t)
dists = {'a': a, 'b': b}
for key in dists:
dist = dists[key]
dist['Time'] = t
dist = dist.set_index(['Time', 'v'], drop=True)
todays_dists[key].append(dist)
# Go through a and b and concat all the data
for key in todays_dists:
todays_dists[key] = pd.concat(todays_dists[key])
if use_hdf:
todays_dists[key].to_hdf(hdffile, key=key, mode='a')
distlist[key].append(todays_dists[key])
starttime += timedelta(days=1)
for key in distlist:
distlist[key]
|
= util.timefilter(distlist[key], starttime_orig, endtime)
return distlist
def integrated_dists_single(probe, year, doy, hour, minute, second):
"""
Returns the integrated distributions from experiments i1a and i1b in Helios
distribution function files.
The distributions are integrated over all angles and given as a function
of proton velocity.
Parameters
----------
probe :
|
int, str
Helios probe to import data from. Must be 1 or 2.
year : int
Year
doy : int
Day of year
hour : int
Hour
minute : int
Minute.
second : int
Second
Returns
-------
i1a : pandas.DataFrame
i1a integrated distribution function.
i1b : pandas.DataFrame
i1b integrated distribution function.
"""
probe = _check_probe(probe)
f, _ = _loaddistfile(probe, year, doy, hour, minute, second)
for line in f:
if line[0:19] == ' 1-D i1a integrated':
break
# i1a distribution function
i1adf = f.readline().split()
f.readline()
i1avs = f.readline().split()
f.readline()
# i1b distribution file
i1bdf = f.readline().split()
f.readline()
i1bvs = f.readline().split()
i1a = pd.DataFrame({'v': i1avs, 'df': i1adf}, dtype=float)
i1b = pd.DataFrame({'v': i1bvs, 'df': i1bdf}, dtype=float)
f.close()
return i1a, i1b
def electron_dist_single(probe, year, doy, hour, minute, second,
remove_advect=False):
"""
Read in 2D electron distribution function.
Parameters
----------
probe : int, str
Helios probe to import data from. Must be 1 or 2.
year : int
Year
doy : int
Day of year
hour : int
Hour.
minute : int
Minute
second : int
Second
remove_advect : bool
If ``False``, the distribution is returned in
the spacecraft frame.
If ``True``, the distribution is
returned in the solar wind frame, by subtract
|
CanalTP/kirin
|
tests/mock_navitia/vj_bad_order.py
|
Python
|
agpl-3.0
| 13,166
| 0.000228
|
# coding=utf-8
# Copyright (c) 2001, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any late
|
r version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# tw
|
itter @navitia
# [matrix] channel #navitia:matrix.org (https://app.element.io/#/room/#navitia:matrix.org)
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from tests.mock_navitia import navitia_response
response = navitia_response.NavitiaResponse()
response.queries = [
"vehicle_journeys/?filter=vehicle_journey.has_code(source, Code-orders)&since=20120615T120000Z&until=20120615T190000Z&data_freshness=base_schedule&depth=2"
# resquest time is UTC -> 12:00 is 8:00 local in Sherbrooke
]
response.response_code = 200
response.json_response = """
{
"disruptions": [],
"feed_publishers": [
{
"id": "builder",
"license": "ODBL",
"name": "departure board",
"url": "www.canaltp.fr"
}
],
"links": [
],
"pagination": {
"items_on_page": 1,
"items_per_page": 25,
"start_page": 0,
"total_result": 1
},
"vehicle_journeys": [
{
"calendars": [
{
"active_periods": [
{
"begin": "20120615",
"end": "20130615"
}
],
"week_pattern": {
"friday": true,
"monday": false,
"saturday": false,
"sunday": false,
"thursday": false,
"tuesday": false,
"wednesday": false
}
}
],
"disruptions": [],
"id": "R:vj1",
"name": "R:vj1",
"stop_times": [
{
"arrival_time": "100000",
"departure_time": "100000",
"utc_arrival_time": "140000",
"utc_departure_time": "140000",
"headsign": "R:vj1",
"journey_pattern_point": {
"id": "journey_pattern_point:14"
},
"stop_point": {
"codes": [
{
"type": "source",
"value": "Code-StopR1"
}
],
"coord": {
"lat": "0",
"lon": "0"
},
"equipments": [
"has_wheelchair_boarding",
"has_bike_accepted"
],
"id": "StopR1",
"label": "StopR1",
"links": [],
"name": "StopR1",
"stop_area": {
"coord": {
"lat": "0",
"lon": "0"
},
"id": "StopR1",
"label": "StopR1",
"links": [],
"name": "StopR1",
"timezone": "America/Montreal"
}
}
},
{
"arrival_time": "101000",
"departure_time": "101000",
"utc_arrival_time": "140100",
"utc_departure_time": "140100",
"headsign": "R:vj1",
"journey_pattern_point": {
"id": "journey_pattern_point:15"
},
"stop_point": {
"codes": [
{
"type": "source",
"value": "Code-StopR2"
}
],
"coord": {
"lat": "0",
"lon": "0"
},
"equipments": [
"has_wheelchair_boarding",
"has_bike_accepted"
],
"id": "StopR2",
"label": "StopR2",
"links": [],
"name": "StopR2",
"stop_area": {
"coord": {
"lat": "0",
"lon": "0"
},
"id": "StopR2",
"label": "StopR2",
"links": [],
"name": "StopR2",
"timezone": "America/Montreal"
}
}
},
{
"arrival_time": "102000",
"departure_time": "102000",
"utc_arrival_time": "140200",
"utc_departure_time": "140200",
"headsign": "R:vj1",
"journey_pattern_point": {
"id": "journey_pattern_point:16"
},
"stop_point": {
"codes": [
{
"type": "source",
"value": "Code-StopR3"
}
],
"coord": {
"lat": "0",
"lon": "0"
},
"equipments": [
"has_wheelchair_boarding",
"has_bike_accepted"
],
"id": "StopR3",
"label": "StopR3",
"links": [],
"name": "StopR3",
"stop_area": {
"coord": {
"lat": "0",
"lon": "0"
},
"id": "StopR3",
"label": "StopR3",
"links": [],
"name": "StopR3",
"timezone": "America/Montreal"
}
}
},
{
"arrival_time": "103000",
"departure_time": "103000",
"utc_arrival_time": "140300",
"utc_departure_time": "140300",
"headsign": "R:vj1",
"journey_pattern_point": {
"id": "journey_pattern_point:17"
},
"stop_point": {
"codes": [
{
|
pierrelb/RMG-Py
|
rmgpy/quantityTest.py
|
Python
|
mit
| 38,311
| 0.007021
|
#!/usr/bin/env python
# encoding: utf-8
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2009 Prof. William H. Green (whgreen@mit.edu) and the
# RMG Team (rmg_dev@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
"""
This script contains unit tests of the :mod:`rmgpy.quantity` module.
"""
import unittest
import math
import numpy
import rmgpy.constants as constants
import rmgpy.quantity as quantity
################################################################################
class TestAcceleration(unittest.TestCase):
"""
Contains unit tests of the Acceleration unit type object.
"""
def test_mpers2(self):
"""
Test the creation of an acceleration quantity with units of m/s^2.
"""
q = quantity.Acceleration(1.0,"m/s^2")
self.assertAlmostEqual(q.value, 1.0, 6)
self.assertAlmostEqual(q.value_si, 1.0, delta=1e-6)
self.assertEqual(q.units, "m/s^2")
def test_cmpers2(self):
"""
Test the creation of an acceleration quantity with units of cm/s^2.
"""
q = quantity.Acceleration(1.0,"cm/s^2")
self.assertAlmostEqual(q.value, 1.0, 6)
self.assertAlmostEqual(q.value_si, 0.01, delta=1e-8)
self.assertEqual(q.units, "cm/s^2")
################################################################################
class TestArea(unittest.TestCase):
"""
Contains unit tests of the Area unit type object.
"""
def test_m2(self):
"""
Test the creation of an area quantity with units of m^2.
"""
q = quantity.Area(1.0,"m^2")
self.assertAlmostEqual(q.value, 1.0, 6)
self.assertAlmostEqual(q.value_si, 1.0, delta=1e-6)
self.assertEqual(q.units, "m^2")
def test_cm2(self):
"""
Test the creation of an area quantity with units of m^2.
"""
q = quantity.Area(1.0,"cm^2")
self.assertAlmostEqual(q.value, 1.0, 6)
self.assertAlmostEqual(q.value_si, 1.0e-4, delta=1e-10)
self.assertEqual(q.units, "cm^2")
################################################################################
class TestConcentration(unittest.TestCase):
"""
Contains unit tests of the Concentration unit type object.
"""
def test_perm3(self):
"""
Test the creation of an concentration quantity with units of m^-3.
"""
try:
q = quantity.Concentration(1.0,"m^-3")
self.fail('Allowed invalid unit type "m^-3".')
except quantity.QuantityError:
pass
def test_molperm3(self):
"""
Test the creation of an concentration quantity with units of mol/m^3.
"""
q = quantity.Concentration(1.0,"mol/m^3")
self.assertAlmostEqual(q.value, 1.0, 6)
self.assertAlmostEqual(q.value_si, 1.0, delta=1e-6)
|
self.assertEqual(q.units, "mol/m^3")
def test_moleculesperm3(self):
"""
Test the creation of an concentration quantity with units of molecules/m^3.
"""
q = quantity.Concentration(1.0,"molecules/m^3")
|
self.assertAlmostEqual(q.value, 1.0, 6)
self.assertAlmostEqual(q.value_si*constants.Na, 1.0, delta=1e-6)
self.assertEqual(q.units, "molecules/m^3")
################################################################################
class TestEnergy(unittest.TestCase):
"""
Contains unit tests of the Energy unit type object.
"""
def test_J(self):
"""
Test the creation of an energy quantity with units of J.
"""
try:
q = quantity.Energy(1.0,"J")
self.fail('Allowed invalid unit type "J".')
except quantity.QuantityError:
pass
def test_Jpermol(self):
"""
Test the creation of an energy quantity with units of J/mol.
"""
q = quantity.Energy(1.0,"J/mol")
self.assertAlmostEqual(q.value, 1.0, 6)
self.assertAlmostEqual(q.value_si, 1.0, delta=1e-6)
self.assertEqual(q.units, "J/mol")
def test_cal(self):
"""
Test the creation of an energy quantity with units of cal.
"""
try:
q = quantity.Energy(1.0,"cal")
self.fail('Allowed invalid unit type "cal".')
except quantity.QuantityError:
pass
def test_calpermol(self):
"""
Test the creation of an energy quantity with units of cal/mol.
"""
q = quantity.Energy(1.0,"cal/mol")
self.assertAlmostEqual(q.value, 1.0, 6)
self.assertAlmostEqual(q.value_si, 4.184, delta=1e-6)
self.assertEqual(q.units, "cal/mol")
def test_kJ(self):
"""
Test the creation of an energy quantity with units of kJ.
"""
try:
q = quantity.Energy(1.0,"kJ")
self.fail('Allowed invalid unit type "kJ".')
except quantity.QuantityError:
pass
def test_kJpermol(self):
"""
Test the creation of an energy quantity with units of kJ/mol.
"""
q = quantity.Energy(1.0,"kJ/mol")
self.assertAlmostEqual(q.value, 1.0, 6)
self.assertAlmostEqual(q.value_si, 1000., delta=1e-6)
self.assertEqual(q.units, "kJ/mol")
def test_kcal(self):
"""
Test the creation of an energy quantity with units of kcal.
"""
try:
q = quantity.Energy(1.0,"kcal")
self.fail('Allowed invalid unit type "kcal".')
except quantity.QuantityError:
pass
def test_kcalpermol(self):
"""
Test the creation of an energy quantity with units of kcal/mol.
"""
q = quantity.Energy(1.0,"kcal/mol")
self.assertAlmostEqual(q.value, 1.0, 6)
self.assertAlmostEqual(q.value_si, 4184., delta=1e-6)
self.assertEqual(q.units, "kcal/mol")
def test_Kelvin(self):
"""
Test the creation of an energy quantity with units of K (not really an energy!).
"""
q = quantity.Energy(10.0,"K")
self.assertAlmostEqual(q.value, 10*8.314472, delta=1e-6)
self.assertEqual(q.units, "J/mol")
################################################################################
class TestDipoleMoment(unittest.TestCase):
"""
Contains unit tests of the DipoleMoment unit type object.
"""
def test_Ctimesm(self):
"""
Test the creation of a dipole moment quantity with units of C*m.
"""
q = quantity.DipoleMoment(1.0,"C*m")
self.assertAlmostEqual(q.value, 1.0, 6)
self.assertAlmostEqual(q.value_si, 1.0, 6)
self.assertEqual(q.units, "C*m")
def test_D(self):
"""
Test the creation of a dipole moment quantity with units of J/mol.
"""
q = quantity.DipoleMoment(1.0,"De")
self.assertAlmostEqual
|
backmari/moose
|
python/TestHarness/testers/Tester.py
|
Python
|
lgpl-2.1
| 18,823
| 0.004835
|
import re, os
import util
from InputParameters import InputParameters
from MooseObject import MooseObject
class Tester(MooseObject):
@staticmethod
def validParams():
params = MooseObject.validParams()
# Common Options
params.addRequiredParam('type', "The type of test of Tester to create for this test.")
params.addParam('max_time', 300, "The maximum in seconds that the test will be allowed to run.")
params.addParam('min_reported_time', "The minimum time elapsed before a test is reported as taking to long to run.")
params.addParam('skip', "Provide a reason this test will be skipped.")
params.addParam('deleted', "Tests that only show up when using the '-e' option (Permanently skipped or not implemented).")
params.addParam('heavy', False, "Set to True if this test should only be run when the '--heavy' option is used.")
params.addParam('group', [], "A list of groups for which this test belongs.")
params.addParam('prereq', [], "A list of prereq tests that need to run successfully before launching this test.")
params.addParam('skip_checks', False, "Tells the TestHarness to skip additional checks (This parameter is set automatically by the TestHarness during recovery tests)")
params.addParam('scale_refine', 0, "The number of refinements to do when scaling")
params.addParam('success_message', 'OK', "The successful message")
params.addParam('cli_args', [], "Additional arguments to be passed to the test.")
params.addParam('valgrind', 'NONE', "Set to (NONE, NORMAL, HEAVY) to determine which configurations where valgrind will run.")
# Test Filters
params.addParam('platform', ['ALL'], "A list of platforms for which this test will run on. ('ALL', 'DARWIN', 'LINUX', 'SL', 'LION', 'ML')")
params.addParam('compiler', ['ALL'], "A list of compilers for which this test is valid on. ('ALL', 'GCC', 'INTEL', 'CLANG')")
params.addParam('petsc_version', ['ALL'], "A list of petsc versions for which this test will run on, supports normal comparison operators ('<', '>', etc...)")
params.addParam('mesh_mode', ['ALL'], "A list of mesh modes for which this test will run ('DISTRIBUTED', 'REPLICATED')")
params.addParam('method', ['ALL'], "A test that runs under certain executable configurations ('ALL', 'OPT', 'DBG', 'DEVEL', 'OPROF', 'PRO')")
params.addParam('library_mode', ['ALL'], "A test that only runs when libraries are built under certain configurations ('ALL', 'STATIC', 'DYNAMIC')")
params.addParam('dtk', ['ALL'], "A test that runs only if DTK is detected ('ALL', 'TRUE', 'FALSE')")
params.addParam('unique_ids', ['ALL'], "A test that runs only if UNIQUE_IDs are enabled ('ALL', 'TRUE', 'FALSE')")
params.addParam('recover', True, "A test that runs with '--recover' mode enabled")
params.addParam('vtk', ['ALL'],
|
"A test that runs only if VTK is detected ('ALL', 'TRUE', 'FALSE')")
params.addParam('tecplot', ['ALL'], "A test that runs only if Tecplot is detected ('ALL', 'TRUE', 'FALSE')")
params.addParam('dof_id_bytes', ['ALL'], "A test that runs only if libmesh is configured --with-dof-id-bytes = a specific number, e.g. '4', '8'")
params.addParam('petsc_debug', ['ALL'], "{False,True} -> test only runs when PETSc is configure
|
d with --with-debugging={0,1}, otherwise test always runs.")
params.addParam('curl', ['ALL'], "A test that runs only if CURL is detected ('ALL', 'TRUE', 'FALSE')")
params.addParam('tbb', ['ALL'], "A test that runs only if TBB is available ('ALL', 'TRUE', 'FALSE')")
params.addParam('superlu', ['ALL'], "A test that runs only if SuperLU is available via PETSc ('ALL', 'TRUE', 'FALSE')")
params.addParam('slepc', ['ALL'], "A test that runs only if SLEPc is available ('ALL', 'TRUE', 'FALSE')")
params.addParam('unique_id', ['ALL'], "A test that runs only if libmesh is configured with --enable-unique-id ('ALL', 'TRUE', 'FALSE')")
params.addParam('cxx11', ['ALL'], "A test that runs only if CXX11 is available ('ALL', 'TRUE', 'FALSE')")
params.addParam('asio', ['ALL'], "A test that runs only if ASIO is available ('ALL', 'TRUE', 'FALSE')")
params.addParam('depend_files', [], "A test that only runs if all depend files exist (files listed are expected to be relative to the base directory, not the test directory")
params.addParam('env_vars', [], "A test that only runs if all the environment variables listed exist")
params.addParam('should_execute', True, 'Whether or not the executable needs to be run. Use this to chain together multiple tests based off of one executeable invocation')
params.addParam('required_submodule', [], "A list of initialized submodules for which this test requires.")
params.addParam('check_input', False, "Check for correct input file syntax")
params.addParam('display_required', False, "The test requires and active display for rendering (i.e., ImageDiff tests).")
return params
def __init__(self, name, params):
MooseObject.__init__(self, name, params)
self.specs = params
# Initialize the status bucket class
self.status = util.TestStatus()
# Enumerate the buckets here so ther are easier to work with in the tester class
self.bucket_success = self.status.bucket_success
self.bucket_fail = self.status.bucket_fail
self.bucket_diff = self.status.bucket_diff
self.bucket_pbs = self.status.bucket_pbs
self.bucket_pending = self.status.bucket_pending
self.bucket_deleted = self.status.bucket_deleted
self.bucket_skip = self.status.bucket_skip
self.bucket_silent = self.status.bucket_silent
# Initialize the tester with a pending status
self.setStatus('launched', self.bucket_pending)
# Set the status message
if self.specs['check_input']:
self.success_message = 'SYNTAX PASS'
else:
self.success_message = self.specs['success_message']
# Set up common paramaters
self.should_execute = self.specs['should_execute']
self.check_input = self.specs['check_input']
def getTestName(self):
return self.specs['test_name']
def getPrereqs(self):
return self.specs['prereq']
# Method to return if the test can run
def getRunnable(self):
return self.status.getRunnable()
# Method to return text color based on current test status
def getColor(self):
return self.status.getColor()
# Method to return the input file if applicable to this Tester
def getInputFile(self):
return None
# Method to return the output files if applicable to this Tester
def getOutputFiles(self):
return []
# Method to return the successful message printed to stdout
def getSuccessMessage(self):
return self.success_message
# Method to return status text (exodiff, crash, skipped because x, y and z etc)
def getStatusMessage(self):
return self.status.getStatusMessage()
# Method to return status bucket tuple
def getStatus(self):
return self.status.getStatus()
# Method to set the bucket status
def setStatus(self, reason, bucket):
self.status.setStatus(reason, bucket)
return self.getStatus()
# Method to check if a test has failed. This method will return true if a
# tester has failed at any point during the processing of the test.
# Note: It's possible for a tester to report false for both didFail and
# didPass. This will happen if the tester is in-progress for instance.
# See didPass()
def didFail(self):
return self.status.didFail()
# Method to check for successfull test
# Note: This method can return False until the tester has completely finished.
# For this reas
|
Winand/pandas
|
pandas/core/window.py
|
Python
|
bsd-3-clause
| 68,740
| 0.000029
|
"""
provide a generic structure to support window functions,
similar to how we have a Groupby object
"""
from __future__ import division
import warnings
import numpy as np
from collections import defaultdict
from datetime import timedelta
from pandas.core.dtypes.generic import (
ABCSeries,
ABCDataFrame,
ABCDatetimeIndex,
ABCTimedeltaIndex,
ABCPeriodIndex,
ABCDateOffset)
from pandas.core.dtypes.common import (
is_integer,
is_bool,
is_float_dtype,
is_integer_dtype,
needs_i8_conversion,
is_timedelta64_dtype,
is_list_like,
_ensure_float64,
|
is_scalar)
from pandas.core
|
.base import (PandasObject, SelectionMixin,
GroupByMixin)
import pandas.core.common as com
import pandas._libs.window as _window
from pandas import compat
from pandas.compat.numpy import function as nv
from pandas.util._decorators import (Substitution, Appender,
cache_readonly)
from pandas.core.generic import _shared_docs
from textwrap import dedent
_shared_docs = dict(**_shared_docs)
_doc_template = """
Returns
-------
same type as input
See also
--------
pandas.Series.%(name)s
pandas.DataFrame.%(name)s
"""
class _Window(PandasObject, SelectionMixin):
_attributes = ['window', 'min_periods', 'freq', 'center', 'win_type',
'axis', 'on', 'closed']
exclusions = set()
def __init__(self, obj, window=None, min_periods=None, freq=None,
center=False, win_type=None, axis=0, on=None, closed=None,
**kwargs):
if freq is not None:
warnings.warn("The freq kw is deprecated and will be removed in a "
"future version. You can resample prior to passing "
"to a window function", FutureWarning, stacklevel=3)
self.__dict__.update(kwargs)
self.blocks = []
self.obj = obj
self.on = on
self.closed = closed
self.window = window
self.min_periods = min_periods
self.freq = freq
self.center = center
self.win_type = win_type
self.win_freq = None
self.axis = obj._get_axis_number(axis) if axis is not None else None
self.validate()
@property
def _constructor(self):
return Window
@property
def is_datetimelike(self):
return None
@property
def _on(self):
return None
@property
def is_freq_type(self):
return self.win_type == 'freq'
def validate(self):
if self.center is not None and not is_bool(self.center):
raise ValueError("center must be a boolean")
if self.min_periods is not None and not \
is_integer(self.min_periods):
raise ValueError("min_periods must be an integer")
if self.closed is not None and self.closed not in \
['right', 'both', 'left', 'neither']:
raise ValueError("closed must be 'right', 'left', 'both' or "
"'neither'")
def _convert_freq(self, how=None):
""" resample according to the how, return a new object """
obj = self._selected_obj
index = None
if (self.freq is not None and
isinstance(obj, (ABCSeries, ABCDataFrame))):
if how is not None:
warnings.warn("The how kw argument is deprecated and removed "
"in a future version. You can resample prior "
"to passing to a window function", FutureWarning,
stacklevel=6)
obj = obj.resample(self.freq).aggregate(how or 'asfreq')
return obj, index
def _create_blocks(self, how):
""" split data into blocks & return conformed data """
obj, index = self._convert_freq(how)
if index is not None:
index = self._on
# filter out the on from the object
if self.on is not None:
if obj.ndim == 2:
obj = obj.reindex(columns=obj.columns.difference([self.on]),
copy=False)
blocks = obj._to_dict_of_blocks(copy=False).values()
return blocks, obj, index
def _gotitem(self, key, ndim, subset=None):
"""
sub-classes to define
return a sliced object
Parameters
----------
key : string / list of selections
ndim : 1,2
requested ndim of result
subset : object, default None
subset to act on
"""
# create a new object to prevent aliasing
if subset is None:
subset = self.obj
self = self._shallow_copy(subset)
self._reset_cache()
if subset.ndim == 2:
if is_scalar(key) and key in subset or is_list_like(key):
self._selection = key
return self
def __getattr__(self, attr):
if attr in self._internal_names_set:
return object.__getattribute__(self, attr)
if attr in self.obj:
return self[attr]
raise AttributeError("%r object has no attribute %r" %
(type(self).__name__, attr))
def _dir_additions(self):
return self.obj._dir_additions()
def _get_window(self, other=None):
return self.window
@property
def _window_type(self):
return self.__class__.__name__
def __unicode__(self):
""" provide a nice str repr of our rolling object """
attrs = ["{k}={v}".format(k=k, v=getattr(self, k))
for k in self._attributes
if getattr(self, k, None) is not None]
return "{klass} [{attrs}]".format(klass=self._window_type,
attrs=','.join(attrs))
def _get_index(self, index=None):
"""
Return index as ndarrays
Returns
-------
tuple of (index, index_as_ndarray)
"""
if self.is_freq_type:
if index is None:
index = self._on
return index, index.asi8
return index, index
def _prep_values(self, values=None, kill_inf=True, how=None):
if values is None:
values = getattr(self._selected_obj, 'values', self._selected_obj)
# GH #12373 : rolling functions error on float32 data
# make sure the data is coerced to float64
if is_float_dtype(values.dtype):
values = _ensure_float64(values)
elif is_integer_dtype(values.dtype):
values = _ensure_float64(values)
elif needs_i8_conversion(values.dtype):
raise NotImplementedError("ops for {action} for this "
"dtype {dtype} are not "
"implemented".format(
action=self._window_type,
dtype=values.dtype))
else:
try:
values = _ensure_float64(values)
except (ValueError, TypeError):
raise TypeError("cannot handle this type -> {0}"
"".format(values.dtype))
if kill_inf:
values = values.copy()
values[np.isinf(values)] = np.NaN
return values
def _wrap_result(self, result, block=None, obj=None):
""" wrap a single result """
if obj is None:
obj = self._selected_obj
index = obj.index
if isinstance(result, np.ndarray):
# coerce if necessary
if block is not None:
if is_timedelta64_dtype(block.values.dtype):
from pandas import to_timedelta
result = to_timedelta(
result.ravel(), unit='ns').values.reshape(result.shape)
if result.ndim == 1:
from pandas import Series
return Series(result, index, name=obj.name)
return type(obj)(result, index=index, columns=block.columns)
return result
def _wrap_results(s
|
michalczaplinski/sudoku
|
sudoku_generator.py
|
Python
|
bsd-3-clause
| 5,317
| 0.00583
|
#!/usr/bin/env python
#### Sudoku generator ####
import random
import time
from collections import defaultdict
class Square(object):
'''Main class holding the attributes for each square of the sudoku'''
def __init__(self, x, y):
self.value = None
self.x = x
self.y = y
self.free = range(1, 10)
self.region = None
def addValue(self, value):
self.value = value
def addRegion(self, region):
self.region = region
def removeFromFreeValues(self, value):
self.free.remove(value)
def restoreFree(self):
self.free = range(1,10)
def removeValue(self):
self.value = None
def getValue(self):
return self.value
def getX(self):
return self.x
def getY(self):
return self.y
def getFree(self):
return self.free
def getRegion(self):
return self.region
def createBoard():
board = [ Square(y, x) for x in range(9) for y in range(9) ]
return board
def defineRegions(board):
for square in board:
if square.getX() < 3 and square.getY() < 3:
square.addRegion(0)
elif 3 <= square.getX() < 6 and square.getY() < 3:
square.addRegion(1)
elif 6 <= square.getX() < 9 and square.getY() < 3:
square.addRegion(2)
elif square.getX() < 3 and 3 <= square.getY() < 6:
square.addRegion(3)
elif 3 <= square.getX() < 6 and 3 <= square.getY() < 6:
square.addRegion(4)
elif 6 <= square.getX() < 9 and 3 <= square.getY() < 6:
square.addRegion(5)
elif square.getX() < 3 and 6 <= square.getY() < 9:
square.addRegion(6)
elif 3 <= square.getX() < 6 and 6 <= square.getY() < 9:
square.addRegion(7)
elif 6 <= square.getX() < 9 and 6 <= square.getY() < 9:
square.addRegion(8)
def defineXs(board):
Xdict = {}
for i in range(9):
x_squares = []
for square in board:
if square.getX() == i:
x_squares.append(square)
Xdict[i] = x_squares
return Xdict
def defineYs(board):
Ydict = {}
for i in range(9):
y_squares = []
for square in board:
if square.getY() == i:
y_squares.append(square)
Ydict[i] = y_squares
return Ydict
def defineRegionslist(board):
regions = {}
for i in range(9):
r_squares = []
for square in board:
if square.getRegion() == i:
r_squares.append(square)
regions[i] = r_squares
return regions
def checkIfFree(board, current_square):
free_values = current_square.getFree()
if len(free_values) < 1:
return False
else:
return True
def setValueOnce(value, current_square):
current_square.addValue(value)
current_square.removeFromFreeValues(value)
def checkXValidity(board, current_square):
sameXlist = defineXs(board)[current_square.getX()]
sameXlist.remove(current_square)
x_values = []
for square in sameXlist:
x_values.append(square.getValue())
if current_square.getValue() in x_values:
return False
else:
return True
def checkYValidity(board, current_square):
sameYlist = defineYs(board)[current_square.getY()]
sameYlist.remove(current_square)
y_values = []
for square in sameYlist:
y_values.append(square.getValue())
if current_square.getValue() in y_values:
return False
else:
return True
def checkRegionValidity(board, current_square):
sameRegionlist = defineRegionslist(board)[current_square.getRegion()]
sameRegionlist.remove(current_square)
r_values = []
for square in sameRegionlist:
r_values.append(square.getValue())
if current_square.getValue() in r_values:
return False
else:
return True
def checkConditions(board, square):
if checkXValidity(board, square) == checkYValidity(board, square) == checkRegionValidity(board, square) == True:
return True
else:
return False
def CreateSudoku():
board = createBoard()
defineRegions(board)
index = 0
while index < 81:
current_square = board[index]
if checkIfFree(board, current_square) == False:
current_square.restoreFree()
current_square.removeValue()
index -= 1
continue
value = random.choice(current_square.getFree())
setValueOnce(value, current_square)
if checkConditions(board, current_square) == False:
continue
else:
index += 1
return board
def printSudoku(board):
line = "#-
|
--+---+---#---+---+---#---+---+---#"
line_thick = "#####################################"
print line
for s in board:
if (s.getX() ) % 3 == 0:
print '# ',
elif random.random() > 0.3:
print '| ',
else:
print '| %d' %(s.getValue()),
if (s.getX() +1) % 9 ==
|
0:
if (s.getY() + 1) % 3 == 0:
print '#\n', line_thick
else:
print '#\n', line
if __name__ == "__main__":
sudoku = CreateSudoku()
printSudoku(sudoku)
|
Jamlum/pytomo
|
test_http_server.py
|
Python
|
gpl-2.0
| 1,820
| 0.006044
|
#!/usr/bin/env python
from __future__ import absolute_import
import urllib2
from pytomo import lib_youtube_download
from pytomo import start_pytomo
start_pytomo.configure_log_file('http_test')
ip_address_uri = ("http://173.194.5.107/videoplayback?sparams=id%2Cexpire%2Cip%2Cipbits%2Citag%2Calgorithm%2Cburst%2Cfactor&algorithm=throttle-factor&itag=34&ipbits=8&burst=40&sver=3&signature=CE60F2B393D8E55A0B8529FCB0AAEDEC876A2C8C.9DAE7AE311AD2D4AE8094715551F8E2482DEA790&expire=1304107200&key=yt1&ip=193.0.0.0&factor=1.25&id=39d17ea226880992")
info = {'accept-ranges': 'bytes',
'cache-control': 'private, max-age=20576',
'connection': 'close',
'Content-length': '16840065',
|
'content-type': 'video/x-flv',
'date': 'Fri, 29 Apr 2011 14:12:04 GMT',
'expires': 'Fri, 29 Apr 2011 19:55:00 GMT',
'last-modified': 'Fri, 18 Jun 2010 12:05:11 GMT',
'server': 'gvs 1.0',
'via': '1.1 goodway (NetCache
|
NetApp/6.1.1), 1.1 s-proxy (NetCache NetApp/ 5.6.2R2)',
'x-content-type-options': 'nosniff'}
def mock_response(req):
if req.get_full_url() == ip_address_uri:
mock_file = open('test_pytomo/OdF-oiaICZI.flv')
resp = urllib2.addinfourl(mock_file,info ,
req.get_full_url())
resp.code = 200
resp.msg = "OK"
return resp
class MyHTTPHandler(urllib2.HTTPHandler):
def http_open(self, req):
print "mock opener"
return mock_response(req)
my_opener = urllib2.build_opener(MyHTTPHandler)
urllib2.install_opener(my_opener)
filedownloader = lib_youtube_download.FileDownloader(30)
h = filedownloader._do_download(ip_address_uri)
print h
#
#response = urllib2.urlopen(ip_address_uri)
#print response.read()
#print response.code
#print response.msg
|
oswalpalash/OctaveCodeShare
|
scipy_central/filestorage/admin.py
|
Python
|
bsd-3-clause
| 89
| 0
|
from django.
|
contrib import admin
from models import FileSet
admin.site.registe
|
r(FileSet)
|
iancmcc/simplexmlapi
|
simplexmlapi/api.py
|
Python
|
mit
| 3,090
| 0.002913
|
from node import DotXMLDoc, AttributeParsingError
class SimpleXmlApi(object):
"""
The main API class, comprising a map of attributes to dotted path names.
Accessing an attribute that has been mapped to a dotted name will return
the text value of that node/attribute. If an attribute is passed that
isn't in the map, it's passed off to the L{DotXMLDoc} instance, so that
the document can be walked manually.
May be subclassed, overriding C{_map}, to provide custom APIs for known XML
structures.
"""
_map = {}
_doc = None
def __init__(self, source="", map=None):
"""
@param source: A string containing an XML document
@type source: str
@param map:
@type map: dict
@return: void
"""
if map is not None:
self.load_map(map)
self.load_source(source)
def add_mapping(self, name, path):
"""
Add a new attribute - dotted name mapping to the instance's map
registry.
@param name: The name of the attribute.
@type name: str
@param path: A dotted name that can be traversed.
@type path: str
@return: void
"""
self._map[name] = path
def load_source(self, source):
"""
Parse an XML docume
|
nt and set it as this API's target.
@param source: A string containing an XML document.
@type source: str
@return: void
"""
self._doc = DotXMLDoc(source)
def load_map(self, map):
"""
Update the attribute registry with one or more mappings. Will not
remove attributes that currently exist.
@param map: A dictionary of the form C{\{'attribute':'dotted.name'\}}
|
@type map: dict
@return: void
"""
self._map.update(map)
def del_mapping(self, name):
"""
Remove an attribute mapping from the registry.
@param name: The name of the attribute to remove from the registry.
@type name: str
@return: void
"""
try: del self._map[name]
except KeyError: pass
def __getattr__(self, attr):
try:
return self._traverse(self._map[attr])
except KeyError:
return getattr(self._doc, attr)
def _traverse(self, path):
"""
Traverse a dotted path against the XML document in memory and return
its text value.
@param path: A dotted path that will resolve to a node or attribute.
@type path: str
@return: The text value of the node.
@rtype: str
"""
try:
return eval("self._doc.%s" % path).getValue()
except SyntaxError:
raise AttributeParsingError
def factory(source, map=None, cls=None):
"""
Create a new L{SimpleXmlApi} instance using the given source and optional
attribute map.
To create an instance of a subclass, pass in the C{cls} attribute.
"""
if cls is None:
cls = SimpleXmlApi
return cls(source, map)
|
madzebra/BitSend
|
qa/rpc-tests/txn_doublespend.py
|
Python
|
mit
| 6,649
| 0.004362
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitsend Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test proper accounting with a double-spend conflict
#
from test_framework.test_framework import BitsendTestFramework
from test_framework.util import *
class TxnMallTest(BitsendTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 4
self.setup_clean_chain = False
def add_options(self, parser):
parser.add_option("--mineblock", dest="mine_block", default=False, action="store_true",
help="Test double-spend of 1-confirmed transaction")
def setup_network(self):
# Start with split network:
return super(TxnMallTest, self).setup_network(True)
def run_test(self):
# All nodes should start with 1,250 BSD:
starting_balance = 1250
for i in range(4):
assert_equal(self.nodes[i].getbalance(), starting_balance)
self.nodes[i].getnewaddress("") # bug workaround, coins generated assigned to first getnewaddress!
# Assign coins to foo and bar accounts:
node0_address_foo = self.nodes[0].getnewaddress("foo")
fund_foo_txid = self.nodes[0].sendfrom("", node0_address_foo, 1219)
fund_foo_tx = self.nodes[0].gettransaction(fund_foo_txid)
node0_address_bar = self.nodes[0].getnewaddress("bar")
fund_bar_txid = self.nodes[0].sendfrom("", node0_address_bar, 29)
fund_bar_tx = self.nodes[0].gettransaction(fund_bar_txid)
assert_equal(self.nodes[0].getbalance(""),
starting_balance - 1219 - 29 + fund_foo_tx["fee"] + fund_bar_tx["fee"])
# Coins are sent to node1_address
node1_address = self.nodes[1].getnewaddress("from0")
# First: use raw transaction API to send 1240 BSD to node1_address,
# but don't broadcast:
doublespend_fee = Decimal('-.02')
rawtx_input_0 = {}
rawtx_input_0["txid"] = fund_foo_txid
rawtx_input_0["vout"] = find_output(self.nodes[0], fund_foo_txid, 1219)
rawtx_input_1 = {}
rawtx_input_1["txid"] = fund_bar_txid
rawtx_input_1["vout"] = find_output(self.nodes[0], fund_bar_txid, 29)
inputs = [rawtx_input_0, rawtx_input_1]
change_address = self.nodes[0].getnewaddress()
outputs = {}
outputs[node1_address] = 1240
outputs[change_address] = 1248 - 1240 + doublespend_fee
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
doublespend = self.nodes[0].signrawtransaction(rawtx)
assert_equal(doublespend["complete"], True)
# Create two spends using 1 50 BSD coin each
txid1 = self.nodes[0].sendfrom("foo", node1_address, 40, 0)
txid2 = self.nodes[0].sendfrom("bar", node1_address, 20, 0)
# Have node0 mine a block:
if (self.options.mine_block):
self.nodes[0].generate(1)
sync_blocks(self.nodes[0:2])
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Node0's balance should be st
|
arting balance, plus 50BSD for another
# matured block, minus 40, minus 20, and minus transaction fees:
expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]
if self.options.mine_block: expected += 50
expected += tx1["amount"] + tx1["fee"]
expected += tx2["amount"] + tx2["fee"]
assert_equal(self.nodes[0].getbalance(), expected)
# foo and bar
|
accounts should be debited:
assert_equal(self.nodes[0].getbalance("foo", 0), 1219+tx1["amount"]+tx1["fee"])
assert_equal(self.nodes[0].getbalance("bar", 0), 29+tx2["amount"]+tx2["fee"])
if self.options.mine_block:
assert_equal(tx1["confirmations"], 1)
assert_equal(tx2["confirmations"], 1)
# Node1's "from0" balance should be both transaction amounts:
assert_equal(self.nodes[1].getbalance("from0"), -(tx1["amount"]+tx2["amount"]))
else:
assert_equal(tx1["confirmations"], 0)
assert_equal(tx2["confirmations"], 0)
# Now give doublespend and its parents to miner:
self.nodes[2].sendrawtransaction(fund_foo_tx["hex"])
self.nodes[2].sendrawtransaction(fund_bar_tx["hex"])
doublespend_txid = self.nodes[2].sendrawtransaction(doublespend["hex"])
# ... mine a block...
self.nodes[2].generate(1)
# Reconnect the split network, and sync chain:
connect_nodes(self.nodes[1], 2)
self.nodes[2].generate(1) # Mine another block to make sure we sync
sync_blocks(self.nodes)
assert_equal(self.nodes[0].gettransaction(doublespend_txid)["confirmations"], 2)
# Re-fetch transaction info:
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Both transactions should be conflicted
assert_equal(tx1["confirmations"], -2)
assert_equal(tx2["confirmations"], -2)
# Node0's total balance should be starting balance, plus 100BSD for
# two more matured blocks, minus 1240 for the double-spend, plus fees (which are
# negative):
expected = starting_balance + 100 - 1240 + fund_foo_tx["fee"] + fund_bar_tx["fee"] + doublespend_fee
assert_equal(self.nodes[0].getbalance(), expected)
assert_equal(self.nodes[0].getbalance("*"), expected)
# Final "" balance is starting_balance - amount moved to accounts - doublespend + subsidies +
# fees (which are negative)
assert_equal(self.nodes[0].getbalance("foo"), 1219)
assert_equal(self.nodes[0].getbalance("bar"), 29)
assert_equal(self.nodes[0].getbalance(""), starting_balance
-1219
- 29
-1240
+ 100
+ fund_foo_tx["fee"]
+ fund_bar_tx["fee"]
+ doublespend_fee)
# Node1's "from0" account balance should be just the doublespend:
assert_equal(self.nodes[1].getbalance("from0"), 1240)
if __name__ == '__main__':
TxnMallTest().main()
|
atizo/braindump
|
brainstorming/migrations/0005_auto__add_field_idea_color.py
|
Python
|
mit
| 4,031
| 0.007938
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Idea.color'
db.add_column(u'brainstorming_idea', 'color',
self.gf('django.db.models.fields.CharField')(default='', max_length=100, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Idea.color'
db.delete_column(u'brainstorming_idea', 'color')
models = {
u'brainstorming.brainstorming': {
'Meta': {'ordering': "['-created']", 'object_name': 'Brainstorming'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'creator_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
|
'creator_ip': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'T
|
rue'}),
'question': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'brainstorming.brainstormingwatcher': {
'Meta': {'ordering': "['-created']", 'unique_together': "(('brainstorming', 'email'),)", 'object_name': 'BrainstormingWatcher'},
'brainstorming': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['brainstorming.Brainstorming']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
},
u'brainstorming.emailverification': {
'Meta': {'ordering': "['-created']", 'object_name': 'EmailVerification'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
},
u'brainstorming.idea': {
'Meta': {'ordering': "['-created']", 'object_name': 'Idea'},
'brainstorming': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['brainstorming.Brainstorming']"}),
'color': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'creator_ip': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'creator_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'ratings': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'})
}
}
complete_apps = ['brainstorming']
|
jyt109/gensim
|
gensim/test/test_models.py
|
Python
|
gpl-3.0
| 23,020
| 0.006125
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 Radim Rehurek <radimrehurek@seznam.cz>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Automated tests for checking transformation algorithms (the models package).
"""
import logging
import unittest
import os
import os.path
import tempfile
import numpy
import scipy.linalg
from gensim.corpora import mmcorpus, Dictionary
from gensim.models import lsimodel, ldamodel, tfidfmodel, rpmodel, logentropy_model, ldamulticore
from gensim.models.wrappers import ldamallet
from gensim import matutils
module_path = os.path.dirname(__file__) # needed because sample data files are located in the same folder
datapath = lambda fname: os.path.join(module_path, 'test_data', fname)
# set up vars used in testing ("Deerwester" from the web tutorial)
texts = [['human', 'interface', 'computer'],
['survey', 'user', 'computer', 'system', 'response', 'time'],
['eps', 'user', 'interface', 'system'],
['system', 'human', 'system', 'eps'],
['user', 'response', 'time'],
['trees'],
['graph', 'trees'],
['graph', 'minors', 'trees'],
['graph', 'minors', 'survey']]
dictionary = Dictionary(texts)
corpus = [dictionary.doc2bow(text) for text in texts]
def testfile():
# temporary data will be stored to this file
return os.path.join(tempfile.gettempdir(), 'gensim_models.tst')
class TestLsiModel(unittest.TestCase):
def setUp(self):
self.corpus = mmcorpus.MmCorpus(datapath('testcorpus.mm'))
def testTransform(self):
"""Test lsi[vector] transformation."""
# create the transformation model
model = lsimodel.LsiModel(self.corpus, num_topics=2)
# make sure the decomposition is enough accurate
u, s, vt = scipy.linalg.svd(matutils.corpus2dense(self.corpus, self.corpus.num_terms), full_matrices=False)
self.assertTrue(numpy.allclose(s[:2], model.projection.s)) # singular values must match
# transform one document
doc = list(self.corpus)[0]
transformed = model[doc]
vec = matutils.sparse2full(transformed, 2) # convert to dense vector, for easier equality tests
expected = numpy.array([-0.6594664, 0.142115444]) # scaled LSI version
# expected = numpy.array([-0.1973928, 0.05591352]) # non-scaled LSI version
self.assertTrue(numpy.allclose(abs(vec), abs(expected))) # transformed entries must be equal up to sign
def testCorpusTransform(self):
"""Test lsi[corpus] transformation."""
model = lsimodel.LsiModel(self.corpus, num_topics=2)
got = numpy.vstack(matutils.sparse2full(doc, 2) for doc in model[self.corpus])
expected = numpy.array([
[ 0.65946639, 0.14211544],
[ 2.02454305, -0.42088759],
[ 1.54655361, 0.32358921],
[ 1.81114125, 0.5890525 ],
[ 0.9336738 , -0.27138939],
[ 0.01274618, -0.49016181],
[ 0.04888203, -1.11294699],
[ 0.08063836, -1.56345594],
[ 0.27381003, -1.34694159]])
self.assertTrue(numpy.allclose(abs(got), abs(expected))) # must equal up to sign
def testOnlineTransform(self):
corpus = list(self.corpus)
doc = corpus[0] # use the corpus' first document for testing
# create the transformation model
model2 = lsimodel.LsiModel(corpus=corpus, num_topics=5) # compute everything at once
model = lsimodel.LsiModel(corpus=None, id2word=model2.id2word, num_topics=5) # start with no documents, we will add them later
# train model on a single document
model.add_documents([corpus[0]])
# transform the testing document with this partial transformation
transformed = model[doc]
vec = matutils.sparse2full(transformed, model.num_topics) # convert to dense vector, for easier equality tests
expected = numpy.array([-1.73205078, 0.0, 0.0, 0.0, 0.0]) # scaled LSI version
self.assertTrue(numpy.allclose(abs(vec), abs(expected), atol=1e-6)) # transformed entries must be equal up to sign
# train on another 4 documents
model.add_documents(corpus[1:5], chunksize=2) # train on 4 extra docs, in chunks of 2 documents, for the lols
# transform a document with this partial transformation
transformed = model[doc]
vec = matutils.sparse2full(transformed, model.num_topics) # convert to dense vector, for easier equality tests
expected = numpy.array([-0.66493785, -0.28314203, -1.56376302, 0.05488682, 0.17123269]) # scaled LSI version
self.assertTrue(numpy.allclose(abs(vec), abs(expected), atol=1e-6)) # transformed entries must be equal up to sign
# train on the rest of documents
model.add_documents(corpus[5:])
# make sure the final transformation is the same as if we had decomposed the whole corpus at once
vec1 = matutils.sparse2full(model[doc], model.num_topics)
vec2 = matutils.sparse2full(model2[doc], model2.num_topics)
self.assertTrue(numpy.allclose(abs(vec1), abs(vec2), atol=1e-5)) # the two LSI representations must equal up to sign
def testPersistence(self):
fname = testfile()
model = lsimodel.LsiModel(self.corpus, num_topics=2)
model.save(fname)
model2 = lsimodel.LsiModel.load(fname)
self.assertEqual(model.num_topics, model2.num_topics)
self.assertTrue(numpy.allclose(model.projection.u, model2.projection.u))
self.assertTrue(numpy.allclose(model.projection.s, model2.projection.s))
tstvec = []
self.assertTrue(numpy.allclose(model[tstvec], model2[tstvec])) # try projecting an empty vector
def testPersistenceCompressed(self):
fname = testfile() + '.gz'
model = lsimodel.LsiModel(self.corpus, num_topics=2)
model.save(fname)
model2 = lsimodel.LsiModel.load(fname, mmap=None)
self.assertEqual(model.num_topics, model2.num_topics)
self.assertTrue(numpy.allclose(model.projection.u, model2.projection.u))
self.assertTrue(numpy.allclose(model.projection.s, model2.projection.s))
tstvec = []
self.assertTrue(numpy.allclose(model[tstvec], model2[tstvec])) # try projecting an empty vector
def testLargeMmap(self):
fname = testfile()
model = lsimodel.LsiModel(self.corpus, num_topics=2)
# test storing the internal arrays into separate files
model.save(fname, sep_limit=0)
# now load the external arrays via mmap
model2 = lsimodel.LsiModel.load(fname, mmap='r')
self.assertEqual(model.num_topics, model2.num_topics)
self.assertTrue(isinstance(model2.projection.u, numpy.memmap))
self.assertTrue(isinstance(model2.projection.s, numpy.memmap))
self.assertTrue(numpy.allclose(model.projection.u, model2.projection.u))
self.assertTrue(numpy.allclose(model.projection.s, model2.projection.s))
tstvec = []
self.assertTrue(numpy.allclose(model[tstvec], model2[tstvec])) # try projecting an empty vector
def testLargeMmapCompressed(self):
fname = testfile() + '.gz'
model = lsimodel.LsiModel(self.corpus, num_topics=2)
# test storing the internal arrays into separate files
model.save(fname, sep_limit=0)
# now load the external arrays via mmap
return
# turns out this test doesn't exercise this because there are no arrays
# to be mmaped!
self.assertRaises(IOError, lsimodel.LsiModel.load, fname, mmap='r')
#endclass TestLsiModel
class TestRpModel(unittest.Test
|
Case):
def setUp(self):
self.corpus = mmcorpus.MmCorpus(datapath('testcorpus.mm'))
def testTransform(self):
# create the transformation model
numpy.random.seed(13) # HACK; set fixed seed so that we always get the same random matrix (and can compare against expected results)
model = rpmodel.RpModel(self.corpus, num_topi
|
cs=2)
# transform one document
doc = list(self.corpus)[0]
transformed = model[doc]
vec = matutils.sparse2full(transformed, 2) # convert to dense vector, for easier
|
dunmatt/robobonobo
|
scripts/get_ready.py
|
Python
|
mit
| 979
| 0
|
#!/usr/bin/env python
"""Robobonobo setup script.
Usage:
./get_ready.py [options]
Options:
-h, --help Show this help screen
--version Show the version.
"""
from docopt import docopt
fro
|
m glob import glob
import os
GPIOS = [30, 31, 112, 113, 65, 27]
GPIO_BASE = "/sys/class/gpio"
SLOTS_GLOB = "/sys/devices/bone_capemgr.?/slots"
def write_gpio(filename, msg, pindir=""):
|
with open(os.path.join(GPIO_BASE, pindir, filename), mode="w+") as ex:
ex.write(msg)
def setup_gpio(pin):
write_gpio("export", pin)
pindir = "gpio" + pin
write_gpio("direction", "out", pindir)
write_gpio("value", "0", pindir)
def setup_dto():
for match in glob(SLOTS_GLOB):
with open(match, mode="w+") as slots:
slots.write("robobonobo")
def main():
for gpio in GPIOS:
setup_gpio(str(gpio))
setup_dto()
if __name__ == "__main__":
args = docopt(__doc__, version="Robobonobo setup script v1")
main()
|
hgl888/chromium-crosswalk
|
tools/gypv8sh.py
|
Python
|
bsd-3-clause
| 2,927
| 0.012641
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This script is used by chrome_tests.
|
gypi's js2webui action to maintain the
argument lists and to generate inlinable tests.
"""
import json
import optparse
import os
import subprocess
import sys
import shutil
def HasSameContent(fil
|
ename, content):
'''Returns true if the given file is readable and has the given content.'''
try:
with open(filename) as file:
return file.read() == content
except:
# Ignore all errors and fall back on a safe bet.
return False
def main ():
parser = optparse.OptionParser()
parser.set_usage(
"%prog v8_shell mock.js test_api.js js2webui.js "
"testtype inputfile inputrelfile cxxoutfile jsoutfile")
parser.add_option('-v', '--verbose', action='store_true')
parser.add_option('-n', '--impotent', action='store_true',
help="don't execute; just print (as if verbose)")
parser.add_option('--deps_js', action="store",
help=("Path to deps.js for dependency resolution, " +
"optional."))
parser.add_option('--external', action='store',
help="Load V8's initial snapshot from external files (y/n)")
(opts, args) = parser.parse_args()
if len(args) != 9:
parser.error('all arguments are required.')
(v8_shell, mock_js, test_api, js2webui, test_type,
inputfile, inputrelfile, cxxoutfile, jsoutfile) = args
cmd = [v8_shell]
icudatafile = os.path.join(os.path.dirname(v8_shell), 'icudtl.dat')
if os.path.exists(icudatafile):
cmd.extend(['--icu-data-file=%s' % icudatafile])
v8nativesfile = os.path.join(os.path.dirname(v8_shell), 'natives_blob.bin')
if opts.external == 'y' and os.path.exists(v8nativesfile):
cmd.extend(['--natives_blob=%s' % v8nativesfile])
v8snapshotfile = os.path.join(os.path.dirname(v8_shell), 'snapshot_blob.bin')
if opts.external == 'y' and os.path.exists(v8snapshotfile):
cmd.extend(['--snapshot_blob=%s' % v8snapshotfile])
arguments = [js2webui, inputfile, inputrelfile, opts.deps_js,
cxxoutfile, test_type]
cmd.extend(['-e', "arguments=" + json.dumps(arguments), mock_js,
test_api, js2webui])
if opts.verbose or opts.impotent:
print cmd
if not opts.impotent:
try:
p = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=0)
out, err = p.communicate()
if not HasSameContent(cxxoutfile, out):
with open(cxxoutfile, 'wb') as f:
f.write(out)
shutil.copyfile(inputfile, jsoutfile)
except Exception, ex:
if os.path.exists(cxxoutfile):
os.remove(cxxoutfile)
if os.path.exists(jsoutfile):
os.remove(jsoutfile)
raise
if __name__ == '__main__':
sys.exit(main())
|
dmwm/Docker
|
jenkins_python/scripts/PullRequestTestBegin.py
|
Python
|
apache-2.0
| 887
| 0.001127
|
#! /usr/bin/env python
from __future__ import print_function
import os
import time
from github import Github
gh = Github(os.environ['DMWMBOT_TOKEN'])
codeRepo = os.environ.get('CODE_REPO', 'WMCore')
teamName = os.environ.get('WMCORE_REPO', 'dmwm')
repoName = '%s/%s' % (teamName, codeRepo)
issueID = None
if 'ghprbPullId' in os.environ:
issueID = os.environ['ghprbPullId']
mode = 'PR'
elif 'TargetIssueID' in os.environ:
issueID = os.environ['TargetIssueID']
mode = 'Daily
|
'
print("Looking for %s issue %s" % (repoName, issueID))
repo = gh.get_repo(repoName)
issue = repo.get_issue(int(issueID))
reportURL = os.environ['BUILD_URL']
lastCommit = repo.get_pull(int(issueID)).get_commits().get_page(0
|
)[-1]
lastCommit.create_status(state='pending', target_url=reportURL,
description='Tests started at ' + time.strftime("%d %b %Y %H:%M GMT"))
|
jiaphuan/models
|
research/brain_coder/single_task/data.py
|
Python
|
apache-2.0
| 3,685
| 0.004071
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
"""Manage data for pretraining and RL tasks."""
import ast
from collections import namedtuple
from absl import logging
from single_task import code_tasks # brain coder
RLBatch = namedtuple('RLBatch', ['reward_fns', 'batch_size', 'good_reward'])
class DataManager(object):
"""Interface between environment and model."""
def __init__(self, global_config, run_number=None,
do_code_simplification=False):
"""Constructs a DataManager.
Args:
global_config: A config_lib.Config instance containing all config. See
config in defaults.py.
run_number: Which run this is (of the same experiment). This should be set
when a task cycle is defined in the config. A task cycle is a list of
tasks to cycle through repeatedly, and the selected task is a function
of the run number, i.e. 0-th run, 1-st run, 2-nd run, etc...
This can be None if only a single task is set in the config.
do_code_simplification: When global_config.env.config_for_iclr is True,
use this option to create code simplification (code golf) tasks, vs
fixed length coding tasks. If True, a task with code simplification
reward will be constructed.
Raises:
ValueError: If global_config.env.task and global_config.env.task_cycle
are both set, or both not set. Only one should be given.
ValueError: If global_config.env.task_cycle is set but run_number is None.
"""
env_config = global_config.env
self.batch_size = global_config.batch_size
if env_config.task_cycle:
if env_config.task:
raise ValueError('Do not set both `task` and `task_cycle`.')
if run_number is None:
raise ValueError('Do not use task_cycle for single-run experiment.')
index = run_number % len(env_config.task_cycle)
self.task_name = env_config.task_cycle[index]
logging.info('run_number: %d, task_cycle index: %d', run_number, index)
logging.info('task_cycle: %s', env_config.task_cycle)
elif env_config.task:
self.task_name = env_config.task
else:
raise ValueError('Either `task` or `task_cycle` must be set.')
|
logging.info('Task for this run: "%s"', self.task_name)
logging.info('config_for_iclr=True; do_code_simplification=%s',
do_code_simplification)
self.rl_task = code_tasks.make_task(
task_name=self.task_name,
override_kwargs=ast.literal_eval(env_config.task_kwargs),
max_code_length=global_config.timestep_limit,
require_correct_syntax=env_config.correct_syntax,
do_code_simplification=do_code_simplific
|
ation,
correct_bonus=env_config.task_manager_config.correct_bonus,
code_length_bonus=env_config.task_manager_config.code_length_bonus)
def sample_rl_batch(self):
"""Create reward functions from the current task.
Returns:
RLBatch namedtuple instance, which holds functions and information for
a minibatch of episodes.
* reward_fns: A reward function for each episode. Maps code string to
reward.
* batch_size: Number of episodes in this minibatch.
* good_reward: Estimated threshold of rewards which indicate the algorithm
is starting to solve the task. This is a heuristic that tries to
reduce the amount of stuff written to disk.
"""
reward_fns = self.rl_task.rl_batch(self.batch_size)
return RLBatch(
reward_fns=reward_fns,
batch_size=self.batch_size,
good_reward=self.rl_task.good_reward)
|
softwaresaved/fat
|
lowfat/migrations/0021_blog_status.py
|
Python
|
bsd-3-clause
| 627
| 0.001595
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-06-02 16:10
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migration
|
s.Migration):
dependencies = [
('lowfat', '0020_auto_20160602_1607'),
]
operations = [
migrations.AddField(
model_name='blog',
name='status',
field=models.CharField(choices=[('U', 'Unprocessed'), ('R', 'On Google Drive (for review)'), ('L', 'On pipeline to be published'), ('P', 'Published'), ('D', 'D
|
eclined'), ('O', 'Out of date')], default='U', max_length=1),
),
]
|
lamaisondub/lamaisondub-custom
|
website_forum_private/models/__init__.py
|
Python
|
agpl-3.0
| 21
| 0
|
i
|
mport w
|
ebsite_forum
|
bulik/ldsc
|
setup.py
|
Python
|
gpl-3.0
| 577
| 0.005199
|
from setuptools import setup
setup(name='ldsc',
version='1.0',
description='LD Score Regression (LDSC)',
url='http://github.c
|
om/bulik/ldsc',
author='Brendan Bulik-Sullivan and Hilary Finucane',
author_email='',
license='GPLv3',
packages
|
=['ldscore'],
scripts=['ldsc.py', 'munge_sumstats.py'],
install_requires = [
'bitarray>=0.8,<0.9',
'nose>=1.3,<1.4',
'pybedtools>=0.7,<0.8',
'scipy>=0.18,<0.19',
'numpy>=1.16,<1.17',
'pandas>=0.20,<0.21'
]
)
|
cloudera/ibis
|
ibis/backends/pyspark/tests/test_window_context_adjustment.py
|
Python
|
apache-2.0
| 13,958
| 0
|
import pandas as pd
import pandas.testing as tm
import pyspark.sql.functions as F
import pytest
from pyspark.sql import Window
import ibis
@pytest.mark.parametrize(
('ibis_windows', 'spark_range'),
[
([(ibis.interval(hours=1), 0)], (-3600, 0)), # 1h back looking window
([(ibis.interval(hours=2), 0)], (-7200, 0)), # 2h back looking window
(
[(0, ibis.interval(hours=1))],
(0, 3600),
), # 1h forward looking window
(
[(ibis.interval(hours=1), ibis.interval(hours=1))],
(-3600, 3600),
), # both forward and trailing
],
indirect=['ibis_windows'],
)
def test_window_with_timecontext(client, ibis_windows, spark_range):
"""Test context adjustment for trailing / range window
We expand context according to window sizes, for example, for a table of:
time value
2020-01-01 a
2020-01-02 b
2020-01-03 c
2020-01-04 d
with context = (2020-01-03, 2002-01-04) trailing count for 1 day will be:
time value count
2020-01-03 c 2
2020-01-04 d 2
trailing count for 2 days will be:
time value count
2020-01-03 c 3
2020-01-04 d 3
with context = (2020-01-01, 2002-01-02) count for 1 day forward looking
window will be:
time value count
2020-01-01 a 2
2020-01-02 b 2
"""
table = client.table('time_indexed_table')
context = (
pd.Timestamp('20170102 07:00:00', tz='UTC'),
pd.Timestamp('20170103', tz='UTC'),
)
result_pd =
|
table.mutate(
count=table['value'].count().over(ibis_windows[0])
).execute(timecontext=context)
spark_table = table.compile()
spark_window = (
Window.partitionBy('key')
.orderBy(F.col('time').cast('long'))
.rangeBetween(*spar
|
k_range)
)
expected = spark_table.withColumn(
'count',
F.count(spark_table['value']).over(spark_window),
).toPandas()
expected = expected[
expected.time.between(*(t.tz_convert(None) for t in context))
].reset_index(drop=True)
tm.assert_frame_equal(result_pd, expected)
@pytest.mark.parametrize(
('ibis_windows', 'spark_range'),
[([(None, 0)], (Window.unboundedPreceding, 0))],
indirect=['ibis_windows'],
)
def test_cumulative_window(client, ibis_windows, spark_range):
"""Test context adjustment for cumulative window
For cumulative window, by defination we should look back infinately.
When data is trimmed by time context, we define the limit of looking
back is the start time of given time context. Thus for a table of
time value
2020-01-01 a
2020-01-02 b
2020-01-03 c
2020-01-04 d
with context = (2020-01-02, 2002-01-03) cumulative count will be:
time value count
2020-01-02 b 1
2020-01-03 c 2
"""
table = client.table('time_indexed_table')
context = (
pd.Timestamp('20170102 07:00:00', tz='UTC'),
pd.Timestamp('20170105', tz='UTC'),
)
result_pd = table.mutate(
count_cum=table['value'].count().over(ibis_windows[0])
).execute(timecontext=context)
spark_table = table.compile(timecontext=context)
spark_window = (
Window.partitionBy('key')
.orderBy(F.col('time').cast('long'))
.rangeBetween(*spark_range)
)
expected = spark_table.withColumn(
'count_cum',
F.count(spark_table['value']).over(spark_window),
).toPandas()
expected = expected[
expected.time.between(*(t.tz_convert(None) for t in context))
].reset_index(drop=True)
tm.assert_frame_equal(result_pd, expected)
@pytest.mark.parametrize(
('ibis_windows', 'spark_range'),
[
(
[(ibis.interval(hours=1), 0), (ibis.interval(hours=2), 0)],
[(-3600, 0), (-7200, 0)],
)
],
indirect=['ibis_windows'],
)
def test_multiple_trailing_window(client, ibis_windows, spark_range):
"""Test context adjustment for multiple trailing window
When there are multiple window ops, we need to verify contexts are
adjusted correctly for all windows. In this tests we are constucting
one trailing window for 1h and another trailng window for 2h
"""
table = client.table('time_indexed_table')
context = (
pd.Timestamp('20170102 07:00:00', tz='UTC'),
pd.Timestamp('20170105', tz='UTC'),
)
result_pd = table.mutate(
count_1h=table['value'].count().over(ibis_windows[0]),
count_2h=table['value'].count().over(ibis_windows[1]),
).execute(timecontext=context)
spark_table = table.compile()
spark_window_1h = (
Window.partitionBy('key')
.orderBy(F.col('time').cast('long'))
.rangeBetween(*spark_range[0])
)
spark_window_2h = (
Window.partitionBy('key')
.orderBy(F.col('time').cast('long'))
.rangeBetween(*spark_range[1])
)
expected = (
spark_table.withColumn(
'count_1h', F.count(spark_table['value']).over(spark_window_1h)
)
.withColumn(
'count_2h', F.count(spark_table['value']).over(spark_window_2h)
)
.toPandas()
)
expected = expected[
expected.time.between(*(t.tz_convert(None) for t in context))
].reset_index(drop=True)
tm.assert_frame_equal(result_pd, expected)
@pytest.mark.parametrize(
('ibis_windows', 'spark_range'),
[
(
[(ibis.interval(hours=1), 0), (ibis.interval(hours=2), 0)],
[(-3600, 0), (-7200, 0)],
)
],
indirect=['ibis_windows'],
)
def test_chained_trailing_window(client, ibis_windows, spark_range):
"""Test context adjustment for chained windows
When there are chained window ops, we need to verify contexts are
adjusted correctly for all windows. In this tests we are constucting
one trailing window for 1h and trailng window on the new column for
2h
"""
table = client.table('time_indexed_table')
context = (
pd.Timestamp('20170102 07:00:00', tz='UTC'),
pd.Timestamp('20170105', tz='UTC'),
)
table = table.mutate(
new_col=table['value'].count().over(ibis_windows[0]),
)
table = table.mutate(count=table['new_col'].count().over(ibis_windows[1]))
result_pd = table.execute(timecontext=context)
spark_table = table.compile()
spark_window_1h = (
Window.partitionBy('key')
.orderBy(F.col('time').cast('long'))
.rangeBetween(*spark_range[0])
)
spark_window_2h = (
Window.partitionBy('key')
.orderBy(F.col('time').cast('long'))
.rangeBetween(*spark_range[1])
)
spark_table = spark_table.withColumn(
'new_col', F.count(spark_table['value']).over(spark_window_1h)
)
spark_table = spark_table.withColumn(
'count', F.count(spark_table['new_col']).over(spark_window_2h)
)
expected = spark_table.toPandas()
expected = expected[
expected.time.between(*(t.tz_convert(None) for t in context))
].reset_index(drop=True)
tm.assert_frame_equal(result_pd, expected)
@pytest.mark.xfail(
reason='Issue #2457 Adjust context properly for mixed rolling window,'
' cumulative window and non window ops',
strict=True,
)
@pytest.mark.parametrize(
('ibis_windows', 'spark_range'),
[
(
[(ibis.interval(hours=1), 0), (None, 0)],
[(-3600, 0), (Window.unboundedPreceding, 0)],
)
],
indirect=['ibis_windows'],
)
def test_rolling_with_cumulative_window(client, ibis_windows, spark_range):
"""Test context adjustment for rolling window and cumulative window
cumulative window should calculate only with in user's context,
while rolling window should calculate on expanded context.
For a rolling window of 1 day,
time value
2020-01-01 a
2020-01-02 b
2020-01-03 c
2020-01-04 d
with context = (2020-01-02, 2002-01-03), count will be:
time value roll_count cum_count
2020-01-02 b 2 1
2020-01-03
|
steventimberman/masterDebater
|
venv/lib/python2.7/site-packages/disqus/wxr_feed.py
|
Python
|
mit
| 9,430
| 0.007529
|
import datetime
from django import template
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.contrib.syndication.views import Feed, add_domain
from django.utils import feedgenerator, tzinfo
from django.utils.encoding import iri_to_uri
try:
from django.utils.encoding import force_text
except ImportError:
# Django < 1.5
from django.utils.encoding import force_unicode as force_text
USE_SINGLE_SIGNON = getattr(settings, "DISQUS_USE_SINGLE_SIGNON", False)
class WxrFeedType(feedgenerator.Rss201rev2Feed):
def rss_attributes(self):
return {
'version': self._version,
'xmlns:content': 'http://purl.org/rss/1.0/modules/content/',
'xmlns:dsq': 'http://www.disqus.com/',
'xmlns:dc': 'http://purl.org/dc/elements/1.1/',
'xmlns:wp': 'http://wordpress.org/export/1.0/',
}
def format_date(self, date):
return date.strftime('%Y-%m-%d %H:%M:%S')
def add_item(self, title, link, description, author_email=None,
author_name=None, author_link=None, pubdate=None, comments=None,
unique_id=None, enclosure=None, categories=(), item_copyright=None,
ttl=None, **kwargs):
"""
Adds an item to the feed. All args are expected to be Python Unicode
objects except pubdate, which is a datetime.datetime object, and
enclosure, which is an instance of the Enclosure class.
"""
to_unicode = lambda s: force_text(s, strings_only=True)
if categories:
categories = [to_unicode(c) for c in categories]
if ttl is not None:
# Force ints to unicode
ttl = force_text(ttl)
item = {
'title': to_unicode(title),
'link': iri_to_uri(link),
'description': to_unicode(description),
'author_email': to_unicode(author_email),
'author_name': to_unicode(author_name),
'author_link': iri_to_uri(author_link),
'pubdate': pubdate,
'comments': comments,
'unique_id': to_unicode(unique_id),
'enclosure': enclosure,
'categories': categories or (),
'item_copyright': to_unicode(item_copyright),
'ttl': ttl,
}
item.update(kwargs)
self.items.append(item)
def add_root_elements(self, handler):
pass
def add_item_elements(self, handler, item):
if
|
item['comments'] is None:
return
handler.addQuickElement('title', item['title'])
handler.addQuickElement('link', item['link'])
handler.addQuickElement('content:encoded', item['description'])
handler.addQuickElement('dsq:thread_identifier', item['unique_id'])
handler.addQuickElement('wp:post_date_gmt',
self.format_date
|
(item['pubdate']).decode('utf-8'))
handler.addQuickElement('wp:comment_status', item['comment_status'])
self.write_comments(handler, item['comments'])
def add_comment_elements(self, handler, comment):
if USE_SINGLE_SIGNON:
handler.startElement('dsq:remote', {})
handler.addQuickElement('dsq:id', comment['user_id'])
handler.addQuickElement('dsq:avatar', comment['avatar'])
handler.endElement('dsq:remote')
handler.addQuickElement('wp:comment_id', comment['id'])
handler.addQuickElement('wp:comment_author', comment['user_name'])
handler.addQuickElement('wp:comment_author_email', comment['user_email'])
handler.addQuickElement('wp:comment_author_url', comment['user_url'])
handler.addQuickElement('wp:comment_author_IP', comment['ip_address'])
handler.addQuickElement('wp:comment_date_gmt',
self.format_date(comment['submit_date']).decode('utf-8'))
handler.addQuickElement('wp:comment_content', comment['comment'])
handler.addQuickElement('wp:comment_approved', comment['is_approved'])
if comment['parent'] is not None:
handler.addQuickElement('wp:comment_parent', comment['parent'])
def write_comments(self, handler, comments):
for comment in comments:
handler.startElement('wp:comment', {})
self.add_comment_elements(handler, comment)
handler.endElement('wp:comment')
class BaseWxrFeed(Feed):
feed_type = WxrFeedType
def get_feed(self, obj, request):
current_site = Site.objects.get_current()
link = self._Feed__get_dynamic_attr('link', obj)
link = add_domain(current_site.domain, link)
feed = self.feed_type(
title = self._Feed__get_dynamic_attr('title', obj),
link = link,
description = self._Feed__get_dynamic_attr('description', obj),
)
title_tmp = None
if self.title_template is not None:
try:
title_tmp = template.loader.get_template(self.title_template)
except template.TemplateDoesNotExist:
pass
description_tmp = None
if self.description_template is not None:
try:
description_tmp = template.loader.get_template(self.description_template)
except template.TemplateDoesNotExist:
pass
for item in self._Feed__get_dynamic_attr('items', obj):
if title_tmp is not None:
title = title_tmp.render(
template.RequestContext(request, {
'obj': item, 'site': current_site
}))
else:
title = self._Feed__get_dynamic_attr('item_title', item)
if description_tmp is not None:
description = description_tmp.render(
template.RequestContext(request, {
'obj': item, 'site': current_site
}))
else:
description = self._Feed__get_dynamic_attr('item_description', item)
link = add_domain(
current_site.domain,
self._Feed__get_dynamic_attr('item_link', item),
)
pubdate = self._Feed__get_dynamic_attr('item_pubdate', item)
if pubdate and not hasattr(pubdate, 'tzinfo'):
ltz = tzinfo.LocalTimezone(pubdate)
pubdate = pubdate.replace(tzinfo=ltz)
feed.add_item(
title = title,
link = link,
description = description,
unique_id = self._Feed__get_dynamic_attr('item_guid', item, link),
pubdate = pubdate,
comment_status = self._Feed__get_dynamic_attr('item_comment_status', item, 'open'),
comments = self._get_comments(item)
)
return feed
def _get_comments(self, item):
cmts = self._Feed__get_dynamic_attr('item_comments', item)
output = []
for comment in cmts:
output.append({
'user_id': self._Feed__get_dynamic_attr('comment_user_id', comment),
'avatar': self._Feed__get_dynamic_attr('comment_avatar', comment),
'id': str(self._Feed__get_dynamic_attr('comment_id', comment)),
'user_name': self._Feed__get_dynamic_attr('comment_user_name', comment),
'user_email': self._Feed__get_dynamic_attr('comment_user_email', comment),
'user_url': self._Feed__get_dynamic_attr('comment_user_url', comment),
'ip_address': self._Feed__get_dynamic_attr('comment_ip_address', comment),
'submit_date': self._Feed__get_dynamic_attr('comment_submit_date', comment),
'comment': self._Feed__get_dynamic_attr('comment_comment', comment),
'is_approved': str(self._Feed__get_dynamic_attr('comment_is_approved', comment)),
'parent': str(self._Feed__get_dynamic_attr('comment_parent', comment)),
})
return outp
|
schakrava/rockstor-core
|
src/rockstor/storageadmin/south_migrations/0001_initial.py
|
Python
|
gpl-3.0
| 36,999
| 0.00746
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Pool'
db.create_table(u'storageadmin_pool', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=4096)),
('uuid', self.gf('django.db.models.fields.CharField')(max_length=100, null=True)),
('size', self.gf('django.db.models.fields.IntegerField')(default=0)),
('raid', self.gf('django.db.models.fields.CharField')(max_length=10)),
('toc', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('storageadmin', ['Pool'])
# Adding model 'Disk'
db.create_table(u'storageadmin_disk', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('pool', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['storageadmin.Pool'], null=True, on_delete=models.SET_NULL)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=10)),
('size', self.gf('django.db.models.fields.IntegerField')()),
('offline', self.gf('django.db.models.fields.BooleanField')(default=False)),
('parted', self.gf('django.db.models.fields.BooleanField')()),
))
db.send_create_signal('storageadmin', ['Disk'])
# Adding model 'Share'
db.create_table(u'storageadmin_share', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('pool', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['storageadmin.Pool'])),
('qgroup', self.gf('django.db.models.fields.CharField')(max_length=100)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=4096)),
('uuid', self.gf('django.db.models.fields.CharField')(max_length=100, null=True)),
('size', self.gf('django.db.models.fields.IntegerField')()),
('owner', self.gf('django.db.models.fields.CharField')(default='root', max_length=4096)),
('group', self.gf('django.db.models.fields.CharField')(default='root', max_length=4096)),
('perms', self.gf('django.db.models.fields.CharField')(default='755', max_length=9)),
('toc', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('subvol_name', self.gf('django.db.models.fields.CharField')(max_length=4096)),
('replica', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal('storageadmin', ['Share'])
# Adding model 'Snapshot'
db.create_table(u'storageadmin_snapshot', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('share', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['storageadmin.Share'])),
('name', self.gf('django.db.models.fields.CharField')(max_length=4096)),
('real_name', self.gf('django.db.models.fields.CharField')(default='unknownsnap', max_length=4096)),
('writable', self.gf('django.db.models.fields.BooleanField')(default=False)),
('size', self.gf('django.db.models.fields.IntegerField')(default=0)),
('toc', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('qgroup', self.gf('django.db.models.fields.CharField')(max_length=100)),
('uvisible', self.gf('django.db.models.fields.BooleanField')(default=False)),
('snap_type', self.gf('django.db.models.fields.CharField')(default='admin', max_length=64)),
))
db.send_create_signal('storageadmin', ['Snapshot'])
# Adding unique constraint on 'Snapshot', fields ['share', 'name']
db.create_unique(u'storageadmin_snapshot', ['share_id', 'name'])
# Adding model 'PoolStatistic'
db.create_table(u'storageadmin_poolstatistic', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('pool', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['storageadmin.Pool'])),
('total_capacity', self.gf('django.db.models.fields.IntegerField')()),
('used', self.gf('django.db.models.fields.IntegerField')()),
('ts', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('storageadmin', ['PoolStatistic'])
# Adding model 'ShareStatistic'
db.create_table(u'storageadmin_sharestatistic', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('share', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['storageadmin.Share'])),
('total_capacity', self.gf('django.db.models.fields.IntegerField')()),
('used', self.gf('django.db.models.fields.IntegerField')()),
('ts', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('storageadmin', ['ShareStatistic'])
# Adding model 'NFSExportGroup'
db.create_table(u'storageadmin_nfsexportgroup', (
(u'id', self.g
|
f('django.db.models.fields.AutoField')(primary_key=True)),
('host_str', self.gf('django.db.models.fields.CharField')(max_length=4096)),
('editable', self.gf('django.db.models.fields.CharField')(default='ro', max_length=2)),
('syncable', self.gf('django.db.models.fields.CharField')(default='async', max_length=5)),
('mount_security', self.gf('django.db.models.fields.CharField')(defau
|
lt='insecure', max_length=8)),
('nohide', self.gf('django.db.models.fields.BooleanField')(default=False)),
('enabled', self.gf('django.db.models.fields.BooleanField')(default=True)),
))
db.send_create_signal('storageadmin', ['NFSExportGroup'])
# Adding model 'NFSExport'
db.create_table(u'storageadmin_nfsexport', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('export_group', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['storageadmin.NFSExportGroup'])),
('share', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['storageadmin.Share'])),
('mount', self.gf('django.db.models.fields.CharField')(max_length=4096)),
))
db.send_create_signal('storageadmin', ['NFSExport'])
# Adding model 'SambaShare'
db.create_table(u'storageadmin_sambashare', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('share', self.gf('django.db.models.fields.related.OneToOneField')(related_name='sambashare', unique=True, to=orm['storageadmin.Share'])),
('path', self.gf('django.db.models.fields.CharField')(unique=True, max_length=4096)),
('comment', self.gf('django.db.models.fields.CharField')(default='foo bar', max_length=100)),
('browsable', self.gf('django.db.models.fields.CharField')(default='yes', max_length=3)),
('read_only', self.gf('django.db.models.fields.CharField')(default='no', max_length=3)),
('guest_ok', self.gf('django.db.models.fields.CharField')(default='no', max_length=3)),
('create_mask', self.gf('django.db.models.fields.CharField')(default='0755', max_length=4)),
('admin_users', self.gf('django.db.models.fields.CharField')(default='Administrator', max_length=128)),
))
db.send_create_signal('storageadmin', ['SambaShare'])
# Adding model 'IscsiTarget'
db.create_table(u'storageadmin_iscsitarget', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('share', self.gf('django.db.models.fields.related.ForeignKey')(to=orm[
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.