repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
mbohlool/client-python
|
kubernetes/test/test_rbac_authorization_v1alpha1_api.py
|
Python
|
apache-2.0
| 5,278
| 0.006631
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.apis.rbac_authorization_v1alpha1_api import RbacAuthorizationV1alpha1Api
class TestRbacAuthorizationV1alpha1Api(unittest.TestCase):
""" RbacAuthorizationV1alpha1Api unit test stubs """
def setUp(self):
self.api = kubernetes.client.apis.rbac_authorization_v1alpha1_api.RbacAuthorizationV1alpha1Api()
def tearDown(self):
pass
def test_create_cluster_role(self):
"""
Test case for create_cluster_role
"""
pass
def test_create_cluster_role_binding(self):
"""
Test case for create_cluster_role_binding
"""
pass
def test_create_namespaced_role(self):
"""
Test case for create_namespaced_role
"""
pass
def test_create_namespaced_role_binding(self):
"""
Test case for create_namespaced_role_binding
"""
pass
def test_delete_cluster_role(self):
"""
Test case for delete_cluster_role
"""
pass
def test_delete_cluster_role_binding(self):
"""
Test case for delete_cluster_role_binding
"""
pass
def test_delete_collection_cluster_role(self):
"""
Test case for delete_collection_cluster_role
"""
pass
def test_delete_collection_cluster_role_binding(self):
"""
Test case for delete_collection_cluster_role_binding
"""
pass
def test_delete_collection_namespaced_role(self):
"""
Test case for delete_collection_namespaced_role
"""
pass
def test_delete_collection_namespaced_role_binding(self):
"""
Test case for delete_collection_namespaced_role_binding
"""
pass
def test_delete_namespaced_role(self):
"""
Test case for delete_namespaced_role
"""
pass
def test_delete_namespaced_role_binding(self):
"""
Test case for delete_namespaced_role_binding
"""
pass
def test_get_api_resources(self):
"""
Test case for get_api_resources
"""
pass
def test_list_cluster_role(self):
"""
Test case for list_cluster_role
"""
pass
def test_list_cluster_role_binding(self):
"""
Test case for list_cluster_role_binding
"""
pass
def test_list_namespaced_role(self):
"""
Test case for list_namespaced_role
"""
pass
def test_list_namespaced_role_binding(self):
"""
Test case for list_namespaced_role_binding
"""
pass
def test_list_role_binding_for_all_namespaces(self):
"""
Test case for list_role_binding_for_all_namespaces
"""
pass
def test_list_role_for_all_namespaces(self):
"""
Test case for list_role_for_all_namespaces
"""
pass
def test_patch_cluster_role(self):
"""
Test case for patch_cluster_role
"""
pass
def test_patch_cluster_role_binding(self):
"""
Test case for patch_cluster_role_binding
"""
pass
def test_patch_namespaced_role(self):
"""
Test case for patch_namespaced_role
"""
pass
def test_patch_namespaced_role_binding(self):
"""
Test case for patch_namespaced_role_binding
"""
pass
def test_read_cluster_role(self):
"""
Test case for read_cluster_role
"""
pass
def test_read_cluster_role_binding(self):
"""
Test case for read_cluster_role_binding
"""
pass
def test_read_namespaced_role(self):
"""
Test case for read_namespaced_role
"""
pass
def test_read_namespaced_role_binding(self):
"""
Test case for read_namespaced_role_binding
"""
pass
|
def test_replace_cluster_role(self):
"""
Test case for replace_cluster_role
"""
pass
def test_replace_cluster_role_binding(self):
"""
Test case for replace_cluster_role_bindi
|
ng
"""
pass
def test_replace_namespaced_role(self):
"""
Test case for replace_namespaced_role
"""
pass
def test_replace_namespaced_role_binding(self):
"""
Test case for replace_namespaced_role_binding
"""
pass
if __name__ == '__main__':
unittest.main()
|
boegel/easybuild-easyblocks
|
easybuild/easyblocks/m/mymedialite.py
|
Python
|
gpl-2.0
| 2,788
| 0.002869
|
##
# Copyright 2009-2021 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
#
|
You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for MyMediaLite, implemented as an easyblock
@author: Stijn De Weirdt (Ghent University)
@author: Dries Verdegem (Ghent University)
@author: Kenneth Hoste (Ghent University)
@author: Pieter De Baets (Ghent Univ
|
ersity)
@author: Jens Timmerman (Ghent University)
"""
from distutils.version import LooseVersion
from easybuild.easyblocks.generic.configuremake import ConfigureMake
from easybuild.tools.run import run_cmd
class EB_MyMediaLite(ConfigureMake):
"""Support for building/installing MyMediaLite."""
def configure_step(self):
"""Custom configure step for MyMediaLite, using "make CONFIGURE_OPTIONS='...' configure"."""
if LooseVersion(self.version) < LooseVersion('3'):
cmd = "make CONFIGURE_OPTIONS='--prefix=%s' configure" % self.installdir
run_cmd(cmd, log_all=True, simple=True)
else:
self.cfg.update('installopts', "PREFIX=%s" % self.installdir)
def build_step(self):
"""Custom build step for MyMediaLite, using 'make all' in 'src' directory."""
cmd = "cd src && make all && cd .."
run_cmd(cmd, log_all=True, simple=True)
def sanity_check_step(self):
"""Custom sanity check for MyMediaLite."""
if LooseVersion(self.version) < LooseVersion('3'):
bin_files = ["bin/%s_prediction" % x for x in ['item', 'mapping_item', 'mapping_rating', 'rating']]
else:
bin_files = ["bin/item_recommendation", "bin/rating_based_ranking", "bin/rating_prediction"]
custom_paths = {
'files': bin_files,
'dirs': ["lib/mymedialite"],
}
super(EB_MyMediaLite, self).sanity_check_step(custom_paths=custom_paths)
|
luotao1/Paddle
|
python/paddle/fluid/tests/unittests/xpu/test_gaussian_random_op_xpu.py
|
Python
|
apache-2.0
| 1,364
| 0.006598
|
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissio
|
ns and
# limitations under the License.
from __future__ import print_function
import sys
sys.path.append("..")
import unittest
import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
from paddle.fluid.op import Operator
from paddle.fluid.executor import Executor
from op_test
|
import OpTest
from test_gaussian_random_op import TestGaussianRandomOp
paddle.enable_static()
class TestXPUGaussianRandomOp(TestGaussianRandomOp):
def test_check_output(self):
if paddle.is_compiled_with_xpu():
place = paddle.XPUPlace(0)
outs = self.calc_output(place)
outs = [np.array(out) for out in outs]
outs.sort(key=len)
self.verify_output(outs)
if __name__ == "__main__":
unittest.main()
|
adrianp/cartz
|
server/utils.py
|
Python
|
mit
| 210
| 0
|
import random
import
|
string
def random_string(n):
result = ''
for _ in range(10):
result += random.SystemRandom().choice(
string.ascii_uppercase + str
|
ing.digits)
return result
|
bennylope/django-simple-auth
|
simple_auth/urls.py
|
Python
|
bsd-2-clause
| 156
| 0
|
from django.conf.urls import url
from .views import simple_password
urlpatterns = [
url(r'^$', view=simple_password, name="simple_auth_password"),
|
]
|
|
val314159/framist
|
fssvr/fs.py
|
Python
|
apache-2.0
| 1,267
| 0.037096
|
import os,json
from cgi import escape
def unescape(s):
s = s.replace("<", "<")
s = s.replace(">", ">")
# this has to be last:
s = s.replace("&", "&")
return s
class FilesystemMixin:
def h_fs_get(_,path,eltName=''):
from stat impo
|
rt S_ISDIR
data = (escape(open(path).read())
if not S_ISDIR(os.stat(path).st_mode)
else [(p,S_ISDIR(os.stat(path+'/'+p).st_mode))
for p in os.listdir(path)])
_.ws.send(json.dumps({"method":"fs_get","result":[path,data,eltName]}))
pass
def h_fs_put(_,path,data):
f=open(path,'w')
for x in data: f.write(unescape(x))
f.close(
|
)
pass
def h_fs_system(_,path,eltName='',cwd=None):
import subprocess as sp
import shlex
data=sp.Popen(shlex.split(path),cwd=cwd,stdout=sp.PIPE, stderr=sp.PIPE).communicate()
_.ws.send(json.dumps({"method":"fs_system","result":[path,data,eltName]}));
pass
def h_fs_mkdir (_,path): os.mkdir(path)
def h_fs_rmdir (_,path): os.rmdir(path)
def h_fs_touch (_,path): open(path,'w').close()
def h_fs_unlink(_,path): os.unlink(path)
pass
class FsApp(FilesystemMixin):
def __init__(_,ws):_.ws=ws
|
gokudomatic/cobiv
|
cobiv/modules/hud_components/progresshud/progresshud.py
|
Python
|
mit
| 473
| 0.002114
|
import os
from kivy.lang import Builder
from kivy.properties import NumericProperty, StringProperty
from kivy.uix.anchorlayout import AnchorLayout
from cobiv.modules.core.hud import Hud
Builder.load_file(os.path.abspath(os.path.join(os.path.dirname(__file__), 'progresshud.kv')))
class ProgressHud(Hud, AnchorL
|
ayout):
value = NumericProper
|
ty(0)
caption = StringProperty("")
def __init__(self, **kwargs):
super(ProgressHud, self).__init__(**kwargs)
|
schedulix/schedulix
|
src/examples/SimpleAccess3.py
|
Python
|
agpl-3.0
| 704
| 0.053977
|
import sdms
server = { 'HOST' : 'localhost',
'PORT' : '2506',
'USER' : 'SYSTEM',
'PASSWORD' : 'VerySecret' }
conn = sdms.SDMSConnectionOpenV2(server, server['USER'], server['PASSWORD'], "Simple Access Example")
try:
if 'ERROR' in conn:
print(str(conn))
exit(1)
except:
pass
stmt = "LIST SESSIONS;"
result = sdms.SDMSCommandWithSoc(conn, stmt)
if 'ERROR' in result:
print(str(result['ERROR']
|
))
else:
for r
|
ow in result['DATA']['TABLE']:
print("{0:3} {1:8} {2:32} {3:9} {4:15} {5:>15} {6}".format(\
str(row['THIS']), \
str(row['UID']), \
str(row['USER']), \
str(row['TYPE']), \
str(row['START']), \
str(row['IP']), \
str(row['INFORMATION'])))
conn.close()
|
gslab-econ/gslab_python
|
gslab_scons/tests/test_build_stata.py
|
Python
|
mit
| 6,869
| 0.016305
|
#! /usr/bin/env python
import unittest
import sys
import os
import shutil
import mock
import subprocess
import re
# Import gslab_scons testing helper modules
import _test_helpers as helpers
import _side_effects as fx
# Ensure that Python can find and load the GSLab libraries
os.chdir(os.path.dirname(os.path.realpath(__file__)))
sys.path.append('../..')
import gslab_scons as gs
from gslab_scons._exception_classes import ExecCallError, BadExtensionError
from gslab_make import get_externals
from gslab_make.tests import nostderrout
# Define path to the builder for use in patching
path = 'gslab_scons.builders.build_stata'
class TestBuildStata(unittest.TestCase):
def setUp(self):
if not os.path.exists('./build/'):
os.mkdir('./build/')
@helpers.platform_patch('darwin', path)
@mock.patch('%s.misc.is_in_path' % path)
@mock.patch('%s.subprocess.check_output' % path)
def test_unix(self, mock_check, mock_path):
'''Test build_stata()'s standard behaviour on Unix machines'''
mock_check.side_effect = fx.make_stata_side_effect('stata-mp')
# Mock is_in_path() to finds just one executable of Stata
mock_path.side_effect = fx.make_stata_path_effect('stata-mp')
env = {'stata_executable' : None}
helpers.standard_test(self, gs.build_stata, 'do',
env = env, system_mock = mock_check)
@helpers.platform_patch('win32', path)
@mock.patch('%s.misc.is_in_path' % path)
@mock.patch('%s.subprocess.check_output' % path)
@mock.patch('%s.misc.is_64_windows' % path)
def test_windows(self, mock_is_64, mock_check, mock_path):
'''
Test that build_stata() behaves correctly on a Windows machine
when given appropriate inputs.
'''
mock_check.side_effect = fx.make_stata_side_effect('StataMP-64.exe')
mock_path.side_effect = fx.make_stata_path_effect('StataMP-64.exe')
mock_is_64.return_value = False
env = {'stata_executable' : None}
helpers.standard_test(self, gs.build_stata, 'do',
env = env, system_mock = mock_check)
@helpers.platform_patch('cygwin', path)
@mock.patch('%s.misc.is_in_path' % path)
@mock.patch('%s.subprocess.check_output' % path)
def test_other_platform(self, mock_check, mock_path):
'''
Test build_stata()'s standard behaviour on a non-Unix,
non-win32 machine.
'''
mock_check.side_effect = fx.make_stata_side_effect('stata-mp')
mock_path.side_effect = fx.make_stata_path_effect('stata-mp')
# build_stata() will fail to define a command irrespective of
# whether a stata_executable is specified
env = {'stata_executable' : 'stata-mp'}
with self.assertRaises(NameError):
gs.build_stata(target = './test_output.txt',
source = './test_script.do',
env = env)
env = {'stata_executable' : None}
with self.assertRaises(NameError):
gs.build_stata(target = './test_output.txt',
source = './test_script.do',
env = env)
@helpers.platform_patch('darwin', path)
@mock.patch('%s.subprocess.check_output' % path)
def test_stata_executable_unix(self, mock_check):
mock_check.side_effect = fx.make_stata_side_effect('stata-mp')
env = {'stata_executable': 'stata-mp'}
helpers.standard_test(self, gs.build_stata, 'do',
env = env, system_mock = mock_check)
@helpers.platform_patch('win32', path)
@mock.patch('%s.subprocess.check_output' % path)
def test_stata_executable_windows(self, mock_check):
mock_check.side_effect = fx.make_stata_side_effect('stata-mp')
env = {'stata_executable': 'stata-mp'}
helpers.standard_test(self, gs.build_stata, 'do',
env = env, system_mock = mock_check)
@mock.patch('%s.subprocess.check_output' % path)
def test_cl_arg(self, mock_check):
mock_check.side_effect = fx.make_stata_side_effect('stata-mp')
env = {'stata_executable' : None}
helpers.test_cl_args(self, gs.build_stata, mock_check, 'do',
env = env)
def test_bad_stata_executable(self):
env = {'stata_executable': 'bad_stata_executable'}
with self.assertRaises(ExecCallError):
gs.build_stata(target = './test_output.txt',
source = './test_script.do',
env = env)
@mock.patch('%s.misc.is_in_path' % path)
@mock.patch('%s.subprocess.check_output' % path)
def test_no_executable_in_path(self, mock_check, mock_path):
'''
Test build_stata()'s behaviour when there are no valid Stata
executables in the user's path variable
'''
# We mock the system to not find any executable in the path.
mock_check.side_effect = fx.make_stata_side_effect('')
mock_path.side_effect = fx.make_stata_path_effect('')
env = {'stata_executable': None}
with helpers.platform_patch('darwin', path), self.assertRaises(ExecCallError):
gs.build_stata(target = './test_output.txt',
source = './test_script.do',
env = env)
with helpers.platform_patch('win32', path), self.assertRaises(ExecCallError):
gs.build_stata(target = './test_output.txt',
source = './test_script.do',
env = env)
@mock.patch('%s.subprocess.check_output' % path)
def test_unavailable_executable(self, mock_check):
'''
Test build_stata()'s behaviour when a Stata executable that
isn't recognised is specified.
'''
mock_check.side_effect = fx.make_stata_side_effect('stata-mp')
env = {'stata_executable' : 'stata-se'}
with self.assertRaises(ExecCallError):
gs.build_stata(target = './build/stata.dta',
source = './input/stata_test_script.do',
env = env)
@mock.p
|
atch('%s.subprocess.check_output' % path)
def test_bad_extension(self, mock_check):
mock_check.side_effect
|
= fx.make_stata_side_effect('stata-mp')
env = {'stata_executable': 'stata-mp'}
helpers.bad_extension(self, gs.build_stata,
good = 'test.do', env = env)
def tearDown(self):
if os.path.exists('./build/'):
shutil.rmtree('./build/')
if os.path.isfile('./test_output.txt'):
os.remove('./test_output.txt')
if __name__ == '__main__':
unittest.main()
|
MDU-PHL/meningotype
|
update_meningotype.py
|
Python
|
gpl-3.0
| 1,346
| 0
|
'''
Given a new mlst version or DB, update the container
'''
import pathlib
import click
import ji
|
nja2
import toml
import pendulum
import subprocess
import shlex
def l
|
oad_template(name):
'''
Return the singularity recipe template as unicode text
'''
template = pathlib.Path(name).read_text()
return template
@click.command()
@click.option("--version", default=None)
@click.option("--mlst_version", default="latest")
@click.option("--author", default=None)
@click.option("-c", "--config", default="config.toml")
def update_meningotype_singularity(version, mlst_version, author, config):
'''
Use the config.toml, or override any of the options via the command line
'''
# load the params
config = toml.load(config)
if version is not None:
config['version'] = version
if mlst_version is not None:
config['mlst_version'] = mlst_version
if author is not None:
config['author'] = author
# load the template
loader = jinja2.FunctionLoader(load_template)
env = jinja2.Environment(loader=loader)
SINGULARITY_RECIPE = env.get_template("_singularity.j2").render(config)
# create global version
global_recipe = pathlib.Path("Singularity")
global_recipe.write_text(SINGULARITY_RECIPE)
if __name__ == "__main__":
update_meningotype_singularity()
|
Architizer/mendel
|
mendel/migrations/0009_auto_20160623_1141.py
|
Python
|
agpl-3.0
| 1,091
| 0.001833
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-06-23 15:41
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('mendel', '0008_auto_20160613_1911'),
]
operations = [
migrations.RenameField(
|
model_name='context',
old_name='keyword',
new_name='keyword_given',
),
migrations.RenameField(
model_name='review',
old_name='keyword',
new_name='keyword_given',
),
migrations.Add
|
Field(
model_name='review',
name='keyword_proposed',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='keyword_proposed', to='mendel.Keyword'),
preserve_default=False,
),
migrations.AlterUniqueTogether(
name='review',
unique_together=set([('context', 'keyword_proposed', 'category', 'user', 'status')]),
),
]
|
dmnfarrell/peat
|
PEATSA/Core/__init__.py
|
Python
|
mit
| 123
| 0.00813
|
'''Contains the Co
|
re classes of the PEATSA com
|
mand line tool'''
import ProteinDesignTool, Data, Exceptions, PEATSAParallel
|
opencord/voltha
|
voltha/adapters/interface.py
|
Python
|
apache-2.0
| 20,438
| 0.00044
|
#
# Copyright 2017 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Interface definition for Voltha Adapters
"""
from zope.interface import Interface
class IAdapterInterface(Interface):
"""
A Voltha adapter
"""
def start():
"""
Called once after adapter instance is laoded. Can be used to async
initialization.
:return: (None or Deferred)
"""
def stop():
"""
Called once before adapter is unloaded. It can be used to perform
any cleanup after the adapter.
:return: (None or Deferred)
"""
def adapter_descriptor():
"""
Return the adapter descriptor object for this adapter.
:return: voltha.Adapter grpc object (see voltha/protos/adapter.proto),
with adapter-specific information and config extensions.
"""
def device_types():
"""
Return list of device types supported by the adapter.
:return: voltha.DeviceTypes protobuf object, with optional type
specific extensions.
"""
def health():
"""
Return a 3-state health status using the voltha.HealthStatus message.
:return: Deferred or direct return with voltha.HealthStatus message
"""
def change_master_state(master):
"""
Call
|
ed to indicate if plugin shall assume or lose master role. The
master role can be used to perform functions that must be performed
from a single point in the cluster. In single-node deployments of
Voltha, the plugins are always in master r
|
ole.
:param master: (bool) True to indicate the mastership needs to be
assumed; False to indicate that mastership needs to be abandoned.
:return: (Deferred) which is fired by the adapter when mastership is
assumed/dropped, respectively.
"""
def adopt_device(device):
"""
Make sure the adapter looks after given device. Called when a device
is provisioned top-down and needs to be activated by the adapter.
:param device: A voltha.Device object, with possible device-type
specific extensions. Such extensions shall be described as part of
the device type specification returned by device_types().
:return: (Deferred) Shall be fired to acknowledge device ownership.
"""
def reconcile_device(device):
"""
Make sure the adapter looks after given device. Called when this
device has changed ownership from another Voltha instance to
this one (typically, this occurs when the previous voltha
instance went down).
:param device: A voltha.Device object, with possible device-type
specific extensions. Such extensions shall be described as part of
the device type specification returned by device_types().
:return: (Deferred) Shall be fired to acknowledge device ownership.
"""
def abandon_device(device):
"""
Make sur ethe adapter no longer looks after device. This is called
if device ownership is taken over by another Voltha instance.
:param device: A Voltha.Device object.
:return: (Deferred) Shall be fired to acknowledge abandonment.
"""
def disable_device(device):
"""
This is called when a previously enabled device needs to be disabled
based on a NBI call.
:param device: A Voltha.Device object.
:return: (Deferred) Shall be fired to acknowledge disabling the device.
"""
def reenable_device(device):
"""
This is called when a previously disabled device needs to be enabled
based on a NBI call.
:param device: A Voltha.Device object.
:return: (Deferred) Shall be fired to acknowledge re-enabling the
device.
"""
def reboot_device(device):
"""
This is called to reboot a device based on a NBI call. The admin
state of the device will not change after the reboot
:param device: A Voltha.Device object.
:return: (Deferred) Shall be fired to acknowledge the reboot.
"""
def download_image(device, request):
"""
This is called to request downloading a specified image into
the standby partition of a device based on a NBI call.
This call is expected to be non-blocking.
:param device: A Voltha.Device object.
A Voltha.ImageDownload object.
:return: (Deferred) Shall be fired to acknowledge the download.
"""
def get_image_download_status(device, request):
"""
This is called to inquire about a requested image download
status based on a NBI call.
The adapter is expected to update the DownloadImage DB object
with the query result
:param device: A Voltha.Device object.
A Voltha.ImageDownload object.
:return: (Deferred) Shall be fired to acknowledge
"""
def cancel_image_download(device, request):
"""
This is called to cancel a requested image download
based on a NBI call. The admin state of the device will not
change after the download.
:param device: A Voltha.Device object.
A Voltha.ImageDownload object.
:return: (Deferred) Shall be fired to acknowledge
"""
def activate_image_update(device, request):
"""
This is called to activate a downloaded image from
a standby partition into active partition.
Depending on the device implementation, this call
may or may not cause device reboot.
If no reboot, then a reboot is required to make the
activated image running on device
This call is expected to be non-blocking.
:param device: A Voltha.Device object.
A Voltha.ImageDownload object.
:return: (Deferred) OperationResponse object.
"""
def revert_image_update(device, request):
"""
This is called to deactivate the specified image at
active partition, and revert to previous image at
standby partition.
Depending on the device implementation, this call
may or may not cause device reboot.
If no reboot, then a reboot is required to make the
previous image running on device
This call is expected to be non-blocking.
:param device: A Voltha.Device object.
A Voltha.ImageDownload object.
:return: (Deferred) OperationResponse object.
"""
def self_test_device(device):
"""
This is called to Self a device based on a NBI call.
:param device: A Voltha.Device object.
:return: Will return result of self test
"""
def delete_device(device):
"""
This is called to delete a device from the PON based on a NBI call.
If the device is an OLT then the whole PON will be deleted.
:param device: A Voltha.Device object.
:return: (Deferred) Shall be fired to acknowledge the deletion.
"""
def get_device_details(device):
"""
This is called to get additional device details based on a NBI call.
:param device: A Voltha.Device object.
:return: (Deferred) Shall be fired to acknowledge the retrieval of
additional details.
"""
def update_flows_bulk(device, flows, groups):
"""
Called after any flow table change, but only if the device supports
bulk mode, which is expr
|
tuxar-uk/Spectral-Harmonographs
|
harmonograph.py
|
Python
|
mit
| 3,592
| 0.032294
|
#!/usr/bin/python
''' Spectral Harmonographs Copyright 2014 Alan Richmond (Tuxar.uk)
Trace of 4 decaying sine waves, 2 per axis (x & y)(i.e. 2-pendula), with rainbow colour.
I did this in Java some decades ago (Encyclogram; I no longer have the source), this
version is in Python, with PyGame.
It randomly generates a sequence of harmonographs. It's fast, and can be set to go
much faster (or slower) if you want.
Tip: set the display window to fullscreen. On KDE Ubuntu right-click on the title bar,
select More Actions -> Fullscreen
'''
print "Quit: q key, Screenshot: spacebar"
import pygame, sys, random as r
from pygame.locals import *
from math import pi, sin, cos, exp
# EDIT THESE:
width,height=1280,720 # YouTube HD
width,height=1920,1080 # my left monitor
width,height=1280,1024 # my right monitor
#width,height=2560,1440 # YT channel art
dd=0.99995 # decay factor
dt=0.02 # time increment
speed=200 # yes, speed
hui=57*2 # Hue increment
hue,sat,val,aaa=0,100,100,0
sd=0.005 # frequency spread (from integer)
mx=4 # max range for amplitudes & frequencies
def check_event():
global save
for event in pygame.event.get():
if event.type == QUIT:
sys.exit()
elif event.type == KEYDOWN and event.key == K_q:
sys.exit()
elif event.type == KEYDOWN and event.key == K_SPACE:
save=True
print "Saving when finished..."
steps=0
pygame.init()
pygame.event.set_allowed([QUIT, KEYDOWN])
screen = pygame.display.set_mode((width,height),DOUBLEBUF)
screen.set_alpha(None)
fg=pygame.Color(0,0,0,0)
save=False
while True:
while True:
ax1, ax2 = r.randint(-mx,mx), r.randint(-mx,mx)
maxx=abs(ax1)+abs(ax2)
if maxx>0: break
xscale=width/(2*maxx)
while True:
ay1, ay2 = r.randint(0,mx), r.randint(0,mx)
maxy=abs(ay1)+abs(ay2)
if maxy>0: break
yscale=height/(2*maxy)
fx1, fx2 = r.randint(1,mx) + r.gauss(0,sd), r.randint(1,mx) + r.gauss(0,sd)
fy1,
|
fy2 = r.randint(1,mx) + r.gauss(0,sd), r.randint(1,mx) + r.gauss(0,sd)
px1, px2 = r.uniform(0,2*pi), r.uniform(0,2*pi)
py1, py2 = r.uniform(0,2*pi), r.uniform(0,2*pi)
print ax1,ax2,ay1,ay2
print fx1,fx2,fy1,fy2
print px1,px2,py1,py2
dec=1.0
t=0.0
|
# angle for sin
first=True
while dec>0.015:
# calculate next x,y point along line
x = xscale * dec * (ax1*sin(t * fx1 + px1) + ax2*sin(t * fx2 + px2)) + width/2
y = yscale * dec * (ay1*cos(t * fy1 + py1) + ay2*cos(t * fy2 + py2)) + height/2
dec*=dd # decay
if not first: # ignore any complaint about prev_x,y being undefined
fg.hsva=(hue,sat,val,aaa)
hue = (hue + dt*hui) % 360 # cycle hue
pygame.draw.aaline(screen, fg, (x, y), (prev_x, prev_y), 1)
else:
first=False
prev_x = x # save x,y for next line segment start
prev_y = y
if steps%speed==0: pygame.display.update()
steps+=1
t+=dt # increment angle for sin
check_event()
if save:
pars='shg-{0}_{1}-{2}_{3}-{4}_{5}'.format(ax1,ax2,fx1,fx2,px1,px2)
pygame.image.save(screen, pars+'.jpg')
print "Saved as "+pars+'.jpg'
save=False
screen.fill((0,0,0))
|
tobykurien/rpi_lcars
|
app/ui/widgets/sprite.py
|
Python
|
mit
| 3,393
| 0.006779
|
import pygame
from ui.utils.interpolator import Interpolator
class LcarsWidget(pygame.sprite.DirtySprite):
"""Base class for all widgets"""
def __init__(self, color, pos, size, handler=None):
pygame.sprite.DirtySprite.__init__(self)
if self.image == None:
self.image = pygame.Surface(size).convert()
self.image.fill(color)
self.rect = self.image.get_rect()
self.rect.top = pos[0]
self.rect.left = pos[1]
self.size = (self.rect.width, self.rect.height)
self.long_pressed = False
self.pressed_time = 0
self.focussed = False
self.line = None
self.handler = handler
def update(self, screen):
if not self.visible:
return
|
if self.line != None:
self.line.next()
if self.rect.center == self.line.pos:
self.dirty = 0
self.rect.center = self.line.p
|
os
else:
self.dirty = 0
screen.blit(self.image, self.rect)
def handleEvent(self, event, clock):
handled = False
if not self.visible:
self.focussed = False
return handled
if event.type == pygame.MOUSEBUTTONDOWN:
self.pressed_time = pygame.time.get_ticks()
self.focussed = True
if event.type == pygame.MOUSEMOTION:
if (self.focussed and pygame.time.get_ticks() - self.pressed_time > 1000):
self.long_pressed = True
if self.groups()[0].UI_PLACEMENT_MODE:
self.rect.top = event.pos[1]
self.rect.left = event.pos[0]
self.dirty = 1
if event.type == pygame.MOUSEBUTTONUP:
if self.handler:
self.handler(self, event, clock)
handled = True
if self.focussed and self.long_pressed and self.groups()[0].UI_PLACEMENT_MODE:
print(event.pos[1], event.pos[0])
self.pressed_time = 0
self.long_pressed = False
self.focussed = False
return handled
def applyColour(self, colour):
"""Convert non-black areas of an image to specified colour"""
for x in range(0, self.size[0]):
for y in range(0, self.size[1]):
pixel = self.image.get_at((x, y)).r
if (pixel > 50):
self.image.set_at((x, y), colour)
class LcarsMoveToMouse(LcarsWidget):
"""For testing purposes - move a small square to last clicked position"""
def __init__(self, color):
self.image = None
LcarsWidget.__init__(self, color, (0,0), (10,10))
self.focussed = True
def handleEvent(self, event, clock):
if event.type == pygame.MOUSEBUTTONDOWN:
# move sprite to clicked location using interpolator
fps = clock.get_fps()
x, y = event.pos
self.line = Interpolator(
self.rect.center,
(x, y),
0.5, # duration of interpolation
fps, # current frames per second
1.0, # type of interpolation
0.5 # middle?
)
self.dirty = 1
|
bauerj/electrum-server
|
src/processor.py
|
Python
|
mit
| 9,045
| 0.002211
|
#!/usr/bin/env python
# Copyright(C) 2011-2016 T
|
homas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons
|
to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import json
import Queue as queue
import socket
import threading
import time
import sys
from utils import random_string, timestr, print_log
from utils import logger
class Shared:
def __init__(self, config):
self.lock = threading.Lock()
self._stopped = False
self.config = config
self._paused = True
def paused(self):
with self.lock:
return self._paused
def pause(self):
with self.lock:
self._paused = True
def unpause(self):
with self.lock:
self._paused = False
def stop(self):
print_log("Stopping Stratum")
with self.lock:
self._stopped = True
def stopped(self):
with self.lock:
return self._stopped
class Processor(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.daemon = True
self.dispatcher = None
self.queue = queue.Queue()
def process(self, request):
pass
def add_request(self, session, request):
self.queue.put((session, request))
def push_response(self, session, response):
#print "response", response
self.dispatcher.request_dispatcher.push_response(session, response)
def close(self):
pass
def run(self):
while not self.shared.stopped():
try:
session, request = self.queue.get(True, timeout=1)
msg_id = request.get('id')
except:
continue
if session.stopped():
continue
try:
result = self.process(request)
self.push_response(session, {'id': msg_id, 'result': result})
except BaseException, e:
self.push_response(session, {'id': msg_id, 'error':str(e)})
except:
logger.error("process error", exc_info=True)
self.push_response(session, {'id': msg_id, 'error':'unknown error'})
self.close()
class Dispatcher:
def __init__(self, config):
self.shared = Shared(config)
self.request_dispatcher = RequestDispatcher(self.shared)
self.request_dispatcher.start()
self.response_dispatcher = \
ResponseDispatcher(self.shared, self.request_dispatcher)
self.response_dispatcher.start()
def register(self, prefix, processor):
processor.dispatcher = self
processor.shared = self.shared
processor.start()
self.request_dispatcher.processors[prefix] = processor
class RequestDispatcher(threading.Thread):
def __init__(self, shared):
self.shared = shared
threading.Thread.__init__(self)
self.daemon = True
self.request_queue = queue.Queue()
self.response_queue = queue.Queue()
self.lock = threading.Lock()
self.idlock = threading.Lock()
self.sessions = {}
self.processors = {}
self.lastgc = 0
def push_response(self, session, item):
self.response_queue.put((session, item))
def pop_response(self):
return self.response_queue.get()
def push_request(self, session, item):
self.request_queue.put((session, item))
def pop_request(self):
return self.request_queue.get()
def get_session_by_address(self, address):
for x in self.sessions.values():
if x.address == address:
return x
def run(self):
if self.shared is None:
raise TypeError("self.shared not set in Processor")
while not self.shared.stopped():
session, request = self.pop_request()
try:
self.do_dispatch(session, request)
except:
logger.error('dispatch',exc_info=True)
self.collect_garbage()
self.stop()
def stop(self):
pass
def do_dispatch(self, session, request):
""" dispatch request to the relevant processor """
method = request['method']
params = request.get('params', [])
suffix = method.split('.')[-1]
if session is not None:
if suffix == 'subscribe':
if not session.subscribe_to_service(method, params):
return
prefix = request['method'].split('.')[0]
try:
p = self.processors[prefix]
except:
print_log("error: no processor for", prefix)
return
p.add_request(session, request)
if method in ['server.version']:
try:
session.version = params[0]
session.protocol_version = float(params[1])
except:
pass
def get_sessions(self):
with self.lock:
r = self.sessions.values()
return r
def add_session(self, session):
key = session.key()
with self.lock:
self.sessions[key] = session
def remove_session(self, session):
key = session.key()
with self.lock:
del self.sessions[key]
def collect_garbage(self):
# only for HTTP sessions.
now = time.time()
if time.time() - self.lastgc < 60.0:
return
self.lastgc = now
for session in self.sessions.values():
if session.name == "HTTP" and (now - session.time) > session.timeout:
session.stop()
class Session:
def __init__(self, dispatcher):
self.dispatcher = dispatcher
self.bp = self.dispatcher.processors['blockchain']
self._stopped = False
self.lock = threading.Lock()
self.subscriptions = []
self.address = ''
self.name = ''
self.version = 'unknown'
self.protocol_version = 0.
self.time = time.time()
self.max_subscriptions = dispatcher.shared.config.getint('server', 'max_subscriptions')
threading.Timer(2, self.info).start()
def key(self):
return self.address
# Debugging method. Doesn't need to be threadsafe.
def info(self):
if self.subscriptions:
print_log("%4s" % self.name,
"%21s" % self.address,
"%4d" % len(self.subscriptions),
self.version)
def stop(self):
with self.lock:
if self._stopped:
return
self._stopped = True
self.shutdown()
self.dispatcher.remove_session(self)
self.stop_subscriptions()
def shutdown(self):
pass
def stopped(self):
with self.lock:
return self._stopped
def subscribe_to_service(self, method, params):
if self.stopped():
return False
if len(self.subscriptions) > self.max_subscriptions:
print_log("max subscriptions reached", self.address)
self.stop()
return False
# append to self.subscriptions only if this does not raise
self.bp.do_s
|
colloquium/spacewalk
|
client/tools/osad/test/simple-dispatcher.py
|
Python
|
gpl-2.0
| 1,087
| 0.00644
|
#
# Copyright (c) 2008 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties
|
of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import sys
import test_l
|
ib
class SimpleDispatcherClient(test_lib.SimpleClient):
pass
class SimpleDispatcherRunner(test_lib.SimpleRunner):
client_factory = SimpleDispatcherClient
_resource = 'DISPATCHER'
def fix_connection(self, client):
client.retrieve_roster()
client.send_presence()
def main():
d = SimpleDispatcherRunner('username1', 'password1', "DISPATCHER")
d.main()
if __name__ == '__main__':
sys.exit(main() or 0)
|
wkschwartz/django
|
tests/admin_checks/tests.py
|
Python
|
bsd-3-clause
| 32,171
| 0.000591
|
from django import forms
from django.contrib import admin
from django.contrib.admin import AdminSite
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.middleware import AuthenticationMiddleware
from django.contrib.contenttypes.admin import GenericStackedInline
from django.contrib.messages.middleware import MessageMiddleware
from django.contrib.sessions.middleware import SessionMiddleware
from django.core import checks
from django.test import SimpleTestCase, override_settings
from .models import (
Album, Author, Book, City, Influence, Song, State, TwoAlbumFKAndAnE,
)
class SongForm(forms.ModelForm):
pass
class ValidFields(admin.ModelAdmin):
form = SongForm
fields = ['title']
class ValidFormFieldsets(admin.ModelAdmin):
def get_form(self, request, obj=None, **kwargs):
class ExtraFieldForm(SongForm):
name = forms.CharField(max_length=50)
return ExtraFieldForm
fieldsets = (
(None, {
'fields': ('name',),
}),
)
class MyAdmin(admin.ModelAdmin):
def check(self, **kwargs):
return ['error!']
class AuthenticationMiddlewareSubclass(AuthenticationMiddleware):
pass
class MessageMiddlewareSubclass(MessageMiddleware):
pass
class ModelBackendSubclass(ModelBackend):
pass
class SessionMiddlewareSubclass(SessionMiddleware):
pass
@override_settings(
SILENCED_SYSTEM_CHECKS=['fields.W342'], # ForeignKey(unique=True)
INSTALLED_APPS=[
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'admin_checks',
],
)
class SystemChecksTestCase(SimpleTestCase):
def test_checks_are_performed(self):
admin.site.register(Song, MyAdmin)
try:
errors = checks.run_checks()
expected = ['error!']
self.assertEqual(errors, expected)
finally:
admin.site.unregister(Song)
@override_settings(INSTALLED_APPS=['django.contrib.admin'])
def test_apps_dependencies(self):
errors = admin.checks.check_dependencies()
expected = [
checks.Error(
"'django.contrib.contenttypes' must be in "
"INSTALLED_APPS in order to use the admin application.",
id="admin.E401",
),
checks.Error(
"'django.contrib.auth' must be in INSTALLED_APPS in order "
"to use the admin application.",
id='admin.E405',
),
checks.Error(
"'django.contrib.messages' must be in INSTALLED_APPS in order "
"to use the admin application.",
id='admin.E406',
),
]
self.assertEqual(errors, expected)
@override_settings(TEMPLATES=[])
def test_no_template_engines(self):
self.assertEqual(admin.checks.check_dependencies(), [
checks.Error(
"A 'django.template.backends.django.DjangoTemplates' "
"instance must be configured in TEMPLATES in order to use "
"the admin application.",
id='admin.E403',
)
])
@override_settings(
|
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
|
'context_processors': [],
},
}],
)
def test_context_processor_dependencies(self):
expected = [
checks.Error(
"'django.contrib.auth.context_processors.auth' must be "
"enabled in DjangoTemplates (TEMPLATES) if using the default "
"auth backend in order to use the admin application.",
id='admin.E402',
),
checks.Error(
"'django.contrib.messages.context_processors.messages' must "
"be enabled in DjangoTemplates (TEMPLATES) in order to use "
"the admin application.",
id='admin.E404',
),
checks.Warning(
"'django.template.context_processors.request' must be enabled "
"in DjangoTemplates (TEMPLATES) in order to use the admin "
"navigation sidebar.",
id='admin.W411',
)
]
self.assertEqual(admin.checks.check_dependencies(), expected)
# The first error doesn't happen if
# 'django.contrib.auth.backends.ModelBackend' isn't in
# AUTHENTICATION_BACKENDS.
with self.settings(AUTHENTICATION_BACKENDS=[]):
self.assertEqual(admin.checks.check_dependencies(), expected[1:])
@override_settings(
AUTHENTICATION_BACKENDS=['admin_checks.tests.ModelBackendSubclass'],
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.request',
'django.contrib.messages.context_processors.messages',
],
},
}],
)
def test_context_processor_dependencies_model_backend_subclass(self):
self.assertEqual(admin.checks.check_dependencies(), [
checks.Error(
"'django.contrib.auth.context_processors.auth' must be "
"enabled in DjangoTemplates (TEMPLATES) if using the default "
"auth backend in order to use the admin application.",
id='admin.E402',
),
])
@override_settings(
TEMPLATES=[
{
'BACKEND': 'django.template.backends.dummy.TemplateStrings',
'DIRS': [],
'APP_DIRS': True,
},
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
],
)
def test_several_templates_backends(self):
self.assertEqual(admin.checks.check_dependencies(), [])
@override_settings(MIDDLEWARE=[])
def test_middleware_dependencies(self):
errors = admin.checks.check_dependencies()
expected = [
checks.Error(
"'django.contrib.auth.middleware.AuthenticationMiddleware' "
"must be in MIDDLEWARE in order to use the admin application.",
id='admin.E408',
),
checks.Error(
"'django.contrib.messages.middleware.MessageMiddleware' "
"must be in MIDDLEWARE in order to use the admin application.",
id='admin.E409',
),
checks.Error(
"'django.contrib.sessions.middleware.SessionMiddleware' "
"must be in MIDDLEWARE in order to use the admin application.",
hint=(
"Insert "
"'django.contrib.sessions.middleware.SessionMiddleware' "
"before "
"'django.contrib.auth.middleware.AuthenticationMiddleware'."
),
id='admin.E410',
),
]
self.assertEqual(errors, expected)
@override_settings(MIDDLEWARE=[
'admin_checks.tests.AuthenticationMiddlewareSubclass',
'admin_checks.tests.MessageMiddlewareSubclass',
'admin_checks.tests.SessionMiddlewareSubclass',
])
def test_middleware_subclasses(self):
self.assertEqual(admin.checks.check_dependencies(), [])
@override_settings(MIDDLEWARE=[
'django.contrib.does.not.Exist',
'django.contrib.auth.middleware.A
|
JacobMiki/Emotionnaise-python
|
test.py
|
Python
|
mit
| 1,452
| 0.00551
|
import numpy as np
import cv2
from sys import argv
class Test:
def __init__(self, name, image):
self.image = image
self.name = name
self.list = []
def add(self, function):
self.list.append(function)
def run(self):
cv2.imshow(self.name, self.image)
for function in self.list:
cv2.waitKey()
self.image = function(self.image)
cv2.imshow(self.name, self.image)
cv2.waitKey()
def grayscale(image):
image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
return image
def median(ima
|
ge):
cv2.medianBlur(image, 9, image)
return image
def unsharp(image):
image2 = cv2.GaussianBlur(image, (21,21), 21)
iamge = cv2.addWeighted(image, 1.5, image2, -0.5, 0, image)
return image
def harris(image):
x33 = image.shape[
|
1] / 3
x66 = image.shape[1] / 3 * 2
dst1 = cv2.goodFeaturesToTrack(image[:,:x33], 10, 0.1, 5)
mean1 = np.uint8(cv2.mean(dst1))
cv2.circle(image, (mean1[0], mean1[1]), 2, 255)
dst2 = cv2.goodFeaturesToTrack(image[:,x66:], 10, 0.1, 5)
dst2 += [x66, 0]
mean2 = np.uint8(cv2.mean(dst2))
cv2.circle(image, (mean2[0], mean2[1]), 2, 255)
return image
if __name__ == '__main__':
image = cv2.imread(argv[1])
test = Test('Test', image)
test.add(grayscale)
test.add(median)
test.add(harris)
test.run()
|
ffsdmad/af-web
|
cgi-bin/plugins/plugin_manager.py
|
Python
|
gpl-3.0
| 2,225
| 0.008539
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
#
# autocomplit.py
#
# Copyright 2011 Basmanov Illya <ffsdmad@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import config
import ffcgi
from base import bd_sql
from xslt_proc import xml_xsl_proc
import os, sys, inspect
sys.stderr = sys.stdout
from user import user
#print "Content-Type: text/html; charset=utf8\n\n"
import libs
PLGPATH = "plugins2"
def make_plugin_xml(plg_name):
xml = ""
plg = libs.load_module_name(plg_name)
if plg:
for k in dir(plg):
if k[:2]!="__":
xml += "/"+
|
k
#xml += "<%s>%s</%s>"% (k, plg[k], k)
return xml
class plugin_manager():
def __init__(self):
xml = user[3]+"""<plugins>plugin_manager</plugins>"""
txml = ""
for root, dirs, files in os.walk(PLGPATH):
for
|
f in files:
if f[-3:]==".py" and f!="__init__.py":
plg_path = root + "/" + f
plg_name = plg_path.replace("/", ".")[len(PLGPATH)+1:-3]
txml += "<row><plg_name>%s</plg_name><plg_path>%s</plg_path><title>%s</title></row>"% (plg_name, plg_path, make_plugin_xml(plg_name) )
txml = "<plugin_manager>%s</plugin_manager>" % txml
xml = "<doc>%s</doc>"%(xml+txml)
xsl = "data/af-web.xsl"
libs.save_xml(xml, __file__ )
print xml_xsl_proc(xml,fxsl=xsl)
def main():
return 0
if __name__ == '__main__': main()
|
d3adc0d3/DeadBot
|
events.py
|
Python
|
mit
| 509
| 0.033399
|
class Event:
MESSAGE = 'MESSAGE'
JOIN = 'JOIN'
LEAVE = 'LEAVE'
def __init__(self, _type, user, text):
self._type = _type
self.user = user
self.text = text
def __repr__(self):
return '[{}] {}: {}'.format(self._type, self.user, self.text)
def __str__(self):
return self.__repr__()
def message(user, text):
|
return Event(Event.MESSAGE, user, text)
def join(user)
|
:
return Event(Event.JOIN, user, 'joined the room')
def leave(user):
return Event(Event.LEAVE, user, 'leaved the room')
|
eduNEXT/edunext-platform
|
openedx/core/djangoapps/zendesk_proxy/urls.py
|
Python
|
agpl-3.0
| 412
| 0.004854
|
"""
Map urls to the relevant view handlers
"""
from django.conf.urls import url
from openedx.core.djangoapps.zendesk_proxy.v0.views import Z
|
endeskPassthroughView as v0_view
from openedx.core.djangoapps
|
.zendesk_proxy.v1.views import ZendeskPassthroughView as v1_view
urlpatterns = [
url(r'^v0$', v0_view.as_view(), name='zendesk_proxy_v0'),
url(r'^v1$', v1_view.as_view(), name='zendesk_proxy_v1'),
]
|
globalwordnet/OMW
|
omw/__init__.py
|
Python
|
mit
| 35,734
| 0.007472
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import csv
from collections import (
defaultdict as dd,
OrderedDict as od
)
from math import log
import datetime
from flask import (
Flask,
render_template,
g,
request,
redirect,
u
|
rl_for,
send_from_directory,
flash,
jsonify,
make_response,
Markup,
Response
)
from flask_login import (
login_required,
login_user,
logout_user,
current_user
)
from packaging.version import Version
import gwadoc
import networkx as nx
## profiler
#from werkze
|
ug.contrib.profiler import ProfilerMiddleware
from omw.utils.utils import fetch_sorted_meta_by_version
app = Flask(__name__)
# Common configuration settings go here
app.config['REMEMBER_COOKIE_DURATION'] = datetime.timedelta(minutes=30)
# Installation-specific settings go in omw_config.py
app.config.from_object('config')
# Load these only after creating and configuring the app object
from .common_login import *
from .common_sql import *
from .omw_sql import *
from .wn_syntax import *
import omw.cli
## profiler
#app.config['PROFILE'] = True
#app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])
#app.run(debug = True)
################################################################################
# LOGIN
################################################################################
login_manager.init_app(app)
@app.route("/login", methods=["GET", "POST"])
def login():
""" This login function checks if the username & password
match the admin.db; if the authentication is successful,
it passes the id of the user into login_user() """
if request.method == "POST" and \
"username" in request.form and \
"password" in request.form:
username = request.form["username"]
password = request.form["password"]
user = User.get(username)
# If we found a user based on username then compare that the submitted
# password matches the password in the database. The password is stored
# is a slated hash format, so you must hash the password before comparing it.
if user and hash_pass(password) == user.password:
login_user(user, remember=True)
# FIXME! Get this to work properly...
# return redirect(request.args.get("next") or url_for("index"))
return redirect(url_for("index"))
else:
flash(u"Invalid username, please try again.")
return render_template("login.html")
@app.route("/logout")
@login_required(role=0, group='open')
def logout():
logout_user()
return redirect(url_for("index"))
################################################################################
################################################################################
# SET UP CONNECTION WITH DATABASES
################################################################################
@app.before_request
def connect_dbs():
connect_admin()
connect_omw()
@app.teardown_appcontext
def teardown_dbs(exception):
db = g.pop('admin', None)
if db is not None:
db.close()
db = g.pop('omw', None)
if db is not None:
db.close()
################################################################################
# helper functions
################################################################################
def _get_cookie(name, default):
if name in request.cookies:
return request.cookies.get(name)
else:
return default
################################################################################
# AJAX REQUESTS
################################################################################
@app.route('/_thumb_up_id')
def thumb_up_id():
user = fetch_id_from_userid(current_user.id)
ili_id = request.args.get('ili_id', None)
rate = 1
r = rate_ili_id(ili_id, rate, user)
counts, up_who, down_who = f_rate_summary([ili_id])
html = """ <span style="color:green" title="Who voted up: {}">+{}</span><br>
<span style="color:red" title="Who voted down: {}">-{}</span>
""".format(up_who[int(ili_id)], counts[int(ili_id)]['up'],
down_who[int(ili_id)], counts[int(ili_id)]['down'])
return jsonify(result=html)
@app.route('/_thumb_down_id')
def thumb_down_id():
user = fetch_id_from_userid(current_user.id)
ili_id = request.args.get('ili_id', None)
rate = -1
r = rate_ili_id(ili_id, rate, user)
counts, up_who, down_who = f_rate_summary([ili_id])
html = """ <span style="color:green" title="Who voted up: {}">+{}</span><br>
<span style="color:red" title="Who voted down: {}">-{}</span>
""".format(up_who[int(ili_id)], counts[int(ili_id)]['up'],
down_who[int(ili_id)], counts[int(ili_id)]['down'])
return jsonify(result=html)
@app.route('/_comment_id')
def comment_id():
user = fetch_id_from_userid(current_user.id)
ili_id = request.args.get('ili_id', None)
comment = request.args.get('comment', None)
comment = str(Markup.escape(comment))
dbinsert = comment_ili_id(ili_id, comment, user)
return jsonify(result=dbinsert)
@app.route('/_detailed_id')
def detailed_id():
ili_id = request.args.get('ili_id', None)
rate_hist = fetch_rate_id([ili_id])
comm_hist = fetch_comment_id([ili_id])
users = fetch_allusers()
r_html = ""
for r, u, t in rate_hist[int(ili_id)]:
r_html += '{} ({} — {}): {} <br>'.format(
users[u]['full_name'], users[u]['userID'], t, r)
c_html = ""
for c, u, t in comm_hist[int(ili_id)]:
c_html += '{} ({} — {}): {} <br>'.format(
users[u]['full_name'], users[u]['userID'], t, c)
html = """
<td colspan="9">
<div style="width: 49%; float:left;">
<h6>Ratings</h6>
{}</div>
<div style="width: 49%; float:right;">
<h6>Comments</h6>
{}</div>
</td>""".format(r_html, c_html)
return jsonify(result=html)
@app.route('/_confirm_wn_upload')
def confirm_wn_upload_id():
"""
Ingest the uploaded wordnet into the database and return a report.
This happens when the user has confirmed they want to add a
validated wordnet.
"""
user = fetch_id_from_userid(current_user.id)
fn = request.args.get('fn', None)
report = ingest_wordnet(fn, user)
updateLabels()
return jsonify(result=report)
@app.route('/_add_new_project')
def add_new_project():
user = fetch_id_from_userid(current_user.id)
proj = request.args.get('proj_code', None)
proj = str(Markup.escape(proj))
if user and proj:
dbinsert = insert_new_project(proj, user)
return jsonify(result=dbinsert)
else:
return jsonify(result=False)
@app.route("/_load_lang_selector",methods=["GET"])
def omw_lang_selector():
selected_lang = int(_get_cookie('selected_lang', 1))
selected_lang2 = int(_get_cookie('selected_lang', 1))
lang_id, lang_code = fetch_langs()
html = '<select name="lang" style="font-size: 85%; width: 9em" required>'
for lid in lang_id.keys():
if selected_lang == lid:
html += """<option value="{}" selected>{}</option>
""".format(lid, lang_id[lid][1])
else:
html += """<option value="{}">{}</option>
""".format(lid, lang_id[lid][1])
html += '</select>'
html += '<select name="lang2" style="font-size: 85%; width: 9em" required>'
for lid in lang_id.keys():
if selected_lang2 == lid:
html += """<option value="{}" selected>{}</option>
""".format(lid, lang_id[lid][1])
else:
html += """<option value="{}">{}</option>
""".format(lid, lang_id[lid][1])
html += '</select>'
return jsonify(result=html)
@app.route('/_add_new_language')
def add_new_language():
user = fetch_id_from_userid(current_user.id)
bcp = request.args.get('bcp', None)
bcp = str(Markup.escape(bcp))
iso = request.args.get('iso', None)
iso = str(Markup.escape(iso))
name = request.args.get('name', None)
|
kparal/anaconda
|
pyanaconda/ui/gui/spokes/advstorage/iscsi.py
|
Python
|
gpl-2.0
| 20,233
| 0.003311
|
# iSCSI configuration dialog
#
# Copyright (C) 2013 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Chris Lumens <clumens@redhat.com>
#
from IPy import IP
from collections import namedtuple
import gi
gi.require_version("GLib", "2.0")
from gi.repository import GLib
from pyanaconda import constants
from pyanaconda.threads import threadMgr, AnacondaThread
from pyanaconda.ui.gui import GUIObject
from pyanaconda.ui.gui.utils import escape_markup
from pyanaconda.i18n import _
from pyanaconda import nm
from pyanaconda.regexes import ISCSI_IQN_NAME_REGEX, ISCSI_EUI_NAME_REGEX
__all__ = ["ISCSIDialog"]
STYLE_NONE = 0
STYLE_CHAP = 1
STYLE_REVERSE_CHAP = 2
Credentials = namedtuple("Credentials", ["style",
"targetIP", "initiator", "username",
"password", "rUsername", "rPassword"])
NodeStoreRow = namedtuple("NodeStoreRow", ["selected", "notLoggedIn", "name", "iface", "portal"])
def discover_no_credentials(builder):
return Credentials(STYLE_NONE,
builder.get_object("targetEntry").get_text(),
builder.get_object("initiatorEntry").get_text(),
"", "", "", "")
def discover_chap(builder):
return Credentials(STYLE_CHAP,
builder.get_object("targetEntry").get_text(),
builder.get_object("initiatorEntry").get_text(),
builder.get_object("chapUsernameEntry").get_text(),
builder.get_object("chapPasswordEntry").get_text(),
"", "")
def discover_reverse_chap(builder):
return Credentials(STYLE_REVERSE_CHAP,
builder.get_object("targetEntry").get_text(),
builder.get_object("initiatorEntry").get_text(),
builder.get_object("rchapUsernameEntry").get_text(),
builder.get_object("rchapPasswordEntry").get_text(),
builder.get_object("rchapReverseUsername").get_text(),
builder.get_object("rchapReversePassword").get_text())
# This list maps the current page from the authNotebook to a function to grab
# credentials out of the UI. This works as long as authNotebook keeps the
# filler page at the front.
discoverMap = [discover_no_credentials, discover_chap, discover_reverse_chap]
def login_no_credentials(builder):
return Credentials(STYLE_NONE,
"", "",
"", "", "", "")
def login_chap(builder):
return Credentials(STYLE_CHAP,
"", "",
builder.get_object("loginChapUsernameEntry").get_text(),
builder.get_object("loginChapPasswordEntry").get_text(),
"", "")
def login_reverse_chap(builder):
return Credentials(STYLE_REVERSE_CHAP,
"", "",
builder.get_object("loginRchapUsernameEntry").get_text(),
builder.get_object("loginRchapPasswordEntry").get_text(),
builder.get_object("loginRchapReverseUsername").get_text(),
builder.get_object("loginRchapReversePassword").get_text())
# And this list maps the current page from the loginAuthNotebook to a function
# to grab credentials out of the UI. This works as long as loginAuthNotebook
# keeps the filler page at the front, and we check to make sure "Use the
# credentials from discovery" is not selected first.
loginMap = [login_no_credentials, login_chap, login_reverse_chap]
def credentials_valid(credentials):
if credentials.style == STYLE_NONE:
return True
elif credentials.style == STYLE_CHAP:
return credentials.username.strip() != "" and credentials.password != ""
elif credentials.style == STYLE_REVERSE_CHAP:
return credentials.username.strip() != "" and credentials.password != "" and \
credentials.rUsername.strip() != "" and credentials.rPassword != ""
class ISCSIDialog(GUIObject):
"""
.. inheritance-diagram:: ISCSIDialog
:parts: 3
"""
builderObjects = ["iscsiDialog", "nodeStore", "nodeStoreFiltered"]
mainWidgetName = "iscsiDialog"
uiFile = "spokes/advstorage/iscsi.glade"
def __init__(self, data, storage):
GUIObject.__init__(self, data)
self.storage = storage
self.iscsi = self.storage.iscsi()
|
self._discoveryError = None
self._loginError = False
self._discoveredNodes = []
self._update_devicetree = False
self._authTypeCombo = self.builder.get_object("authTypeCombo")
self._authNotebook = self.builder.get_object("authNotebook")
self._iscsiNotebook =
|
self.builder.get_object("iscsiNotebook")
self._loginButton = self.builder.get_object("loginButton")
self._retryLoginButton = self.builder.get_object("retryLoginButton")
self._loginAuthTypeCombo = self.builder.get_object("loginAuthTypeCombo")
self._loginAuthNotebook = self.builder.get_object("loginAuthNotebook")
self._loginGrid = self.builder.get_object("loginGrid")
self._loginConditionNotebook = self.builder.get_object("loginConditionNotebook")
self._configureGrid = self.builder.get_object("configureGrid")
self._conditionNotebook = self.builder.get_object("conditionNotebook")
self._bindCheckbox = self.builder.get_object("bindCheckbutton")
self._startButton = self.builder.get_object("startButton")
self._okButton = self.builder.get_object("okButton")
self._cancelButton = self.builder.get_object("cancelButton")
self._retryButton = self.builder.get_object("retryButton")
self._initiatorEntry = self.builder.get_object("initiatorEntry")
self._store = self.builder.get_object("nodeStore")
self._storeFilter = self.builder.get_object("nodeStoreFiltered")
def refresh(self):
self._bindCheckbox.set_active(bool(self.iscsi.ifaces))
self._bindCheckbox.set_sensitive(self.iscsi.mode == "none")
self._authTypeCombo.set_active(0)
self._startButton.set_sensitive(True)
self._loginAuthTypeCombo.set_active(0)
self._storeFilter.set_visible_column(1)
self._initiatorEntry.set_text(self.iscsi.initiator)
self._initiatorEntry.set_sensitive(not self.iscsi.initiatorSet)
@property
def selectedNames(self):
return [itr[2] for itr in self._store if itr[0]]
def run(self):
rc = self.window.run()
self.window.destroy()
# We need to call this to get the device nodes to show up
# in our devicetree.
if self._update_devicetree:
self.storage.devicetree.populate()
return rc
##
## DISCOVERY
##
def on_auth_type_changed(self, widget, *args):
self._authNotebook.set_current_page(widget.get_active())
# When we change the notebook, we also need to reverify the credentials
# in order to set the Start button sensitivity.
self.on_discover_field_changed()
def _discover(self, credentials, bind):
# This needs to be in its own thread, not marked with gtk_action_* be
|
GreyRook/madmin
|
doc/conf.py
|
Python
|
apache-2.0
| 8,136
| 0.006268
|
# -*- coding: utf-8 -*-
#
# MAdmin documentation build configuration file, created by
# sphinx-quickstart on Wed Sep 25 13:48:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'MAdmin'
copyright = u'2013, Dominik Schacht'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1.1'
# The full version, including alpha/beta/rc tags.
release = '0.1.1 Alpha'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize
|
the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#h
|
tml_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'MAdmindoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto/manual]).
latex_documents = [
('index', 'MAdmin.tex', u'MAdmin Documentation',
u'Dominik Schacht', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'madmin', u'MAdmin Documentation',
[u'Dominik Schacht'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'MAdmin', u'MAdmin Documentation',
u'Dominik Schacht', 'MAdmin', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
SingularityHA/WebUI
|
infrastructure/migrations/0013_module_list_widget.py
|
Python
|
gpl-3.0
| 411
| 0.002433
|
# encoding: utf8
from django.db import models, migrations
class
|
Migration(migrations.Migration):
dependencies = [
('infrastructure', '0012_auto_20140209_0400'),
]
operations = [
migrations.AddField(
model_name='module_list',
na
|
me='widget',
field=models.TextField(null=True, blank=True),
preserve_default=True,
),
]
|
repology/repology
|
repology/parsers/parsers/cran.py
|
Python
|
gpl-3.0
| 1,459
| 0.001371
|
# Copyright (C) 2017-2019 Dmitry Marakasov <amdmi3@amdmi3.ru>
#
# This file is part of repology
#
# repology is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public Lic
|
ense as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# repology is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public
|
License for more details.
#
# You should have received a copy of the GNU General Public License
# along with repology. If not, see <http://www.gnu.org/licenses/>.
import re
from typing import Iterable
from repology.packagemaker import NameType, PackageFactory, PackageMaker
from repology.parsers import Parser
class CRANCheckSummaryParser(Parser):
def iter_parse(self, path: str, factory: PackageFactory) -> Iterable[PackageMaker]:
with open(path, 'r', encoding='utf-8') as htmlfile:
for nline, line in enumerate(htmlfile, 1):
match = re.search('<tr> <td> <a href="[^"]+">([^<>]+)</a> </td> <td>[ ]*([^ <>]+)[ ]*</td>', line)
if match:
pkg = factory.begin('line {}'.format(nline))
pkg.add_name(match[1], NameType.CRAN_NAME)
pkg.set_version(match[2])
yield pkg
|
cbitstech/Purple-Robot-Django
|
migrations/0037_auto__add_field_purplerobotdevice_first_reading_timestamp.py
|
Python
|
gpl-3.0
| 10,672
| 0.007309
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'PurpleRobotDevice.first_reading_timestamp'
db.add_column(u'purple_robot_app_purplerobotdevice', 'first_reading_timestamp',
self.gf('django.db.models.fields.BigIntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Deleting field 'PurpleRobotDevice.first_reading_timestamp'
db.delete_column(u'purple_robot_app_purplerobotdevice', 'first_reading_timestamp')
models = {
u'purple_robot_app.purplerobotalert': {
'Meta': {'object_name': 'PurpleRobotAlert'},
'action_url': ('django.db.models.fields.URLField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'dismissed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'generated': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'manually_dismissed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'probe': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'severity': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tags': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'user_id': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'})
},
u'purple_robot_app.purplerobotconfiguration': {
'Meta': {'object_name': 'PurpleRobotConfiguration'},
'added': ('django.db.models.fields.DateTimeField', [], {}),
'contents': ('django.db.models.fields.TextField', [], {'max_length': '1048576'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '1024'})
},
u'purple_robot_app.purplerobotdevice': {
'Meta': {'object_name': 'PurpleRobotDevice'},
'config_last_fetched': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'config_last_user_agent': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'configuration': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'devices'", 'null': 'True', 'to'
|
: u
|
"orm['purple_robot_app.PurpleRobotConfiguration']"}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '1048576', 'null': 'True', 'blank': 'True'}),
'device_group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'devices'", 'null': 'True', 'to': u"orm['purple_robot_app.PurpleRobotDeviceGroup']"}),
'device_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '256', 'db_index': 'True'}),
'first_reading_timestamp': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'hash_key': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mute_alerts': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'performance_metadata': ('django.db.models.fields.TextField', [], {'default': "'{}'", 'max_length': '1048576'}),
'test_device': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'purple_robot_app.purplerobotdevicegroup': {
'Meta': {'object_name': 'PurpleRobotDeviceGroup'},
'configuration': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'groups'", 'null': 'True', 'to': u"orm['purple_robot_app.PurpleRobotConfiguration']"}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '1048576', 'null': 'True', 'blank': 'True'}),
'group_id': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '256'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1024'})
},
u'purple_robot_app.purplerobotdevicenote': {
'Meta': {'object_name': 'PurpleRobotDeviceNote'},
'added': ('django.db.models.fields.DateTimeField', [], {}),
'device': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notes'", 'to': u"orm['purple_robot_app.PurpleRobotDevice']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'max_length': '1024'})
},
u'purple_robot_app.purplerobotevent': {
'Meta': {'object_name': 'PurpleRobotEvent'},
'event': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logged': ('django.db.models.fields.DateTimeField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'payload': ('django.db.models.fields.TextField', [], {'max_length': '8388608', 'null': 'True', 'blank': 'True'}),
'user_id': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'db_index': 'True'})
},
u'purple_robot_app.purplerobotexportjob': {
'Meta': {'object_name': 'PurpleRobotExportJob'},
'destination': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {}),
'export_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'probes': ('django.db.models.fields.TextField', [], {'max_length': '8196', 'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'state': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '512'}),
'users': ('django.db.models.fields.TextField', [], {'max_length': '8196', 'null': 'True', 'blank': 'True'})
},
u'purple_robot_app.purplerobotpayload': {
'Meta': {'object_name': 'PurpleRobotPayload'},
'added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'errors': ('django.db.models.fields.TextField', [], {'max_length': '65536', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'payload': ('django.db.models.fields.TextField', [], {'max_length': '8388608'}),
'process_tags': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'user_id': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'db_index': 'True'})
},
u'purple_robot_app.purplerobotreading': {
'Meta': {'object_name': 'PurpleRobotReading', 'index_together': "[['probe', 'user_id'], ['logged', 'user_id'], ['probe', 'logged', 'user_id']]"},
'attac
|
darioml/fyp-public
|
data/make_pngs.py
|
Python
|
gpl-2.0
| 394
| 0.025381
|
from os import listdir
from os.path import isfile, join
import paer
mypath = 'aedat/'
onlyfiles = [ f for f in listdir(mypath) if isfile(join(mypath,f)) and f.endswith('.aedat')]
for fil
|
e in onlyfiles:
ae = paer.aefile(mypath + str(file))
aed= paer.aedata(ae).downsample((16,16))
paer.create_pngs(aed, '16x16_' + str(file) + '_',path='more_images/temp',step=3000, d
|
im=(16,16))
|
synsun/robotframework
|
utest/running/test_handlers.py
|
Python
|
apache-2.0
| 13,048
| 0.00138
|
import unittest
import sys
import inspect
from robot.running.handlers import _PythonHandler, _JavaHandler, DynamicHandler
from robot import utils
from robot.utils.asserts import *
from robot.running.testlibraries import TestLibrary
from robot.running.dynamicmethods import (
GetKeywordArguments, GetKeywordDocumentation, RunKeyword)
from robot.errors import Data
|
Error
from classes import NameLibrary, DocLibrary, ArgInfoLibrary
from ArgumentsPython import ArgumentsPython
if utils.JYTHON:
import ArgumentsJava
def _get_handler_methods(lib):
attrs = [getattr(lib, a) for a
|
in dir(lib) if not a.startswith('_')]
return [a for a in attrs if inspect.ismethod(a)]
def _get_java_handler_methods(lib):
# This hack assumes that all java handlers used start with 'a_' -- easier
# than excluding 'equals' etc. otherwise
return [a for a in _get_handler_methods(lib) if a.__name__.startswith('a_') ]
class LibraryMock:
def __init__(self, name='MyLibrary', scope='GLOBAL'):
self.name = self.orig_name = name
self.scope = scope
class TestPythonHandler(unittest.TestCase):
def test_name(self):
for method in _get_handler_methods(NameLibrary()):
handler = _PythonHandler(LibraryMock('mylib'), method.__name__, method)
assert_equal(handler.name, method.__doc__)
assert_equal(handler.longname, 'mylib.'+method.__doc__)
def test_docs(self):
for method in _get_handler_methods(DocLibrary()):
handler = _PythonHandler(LibraryMock(), method.__name__, method)
assert_equal(handler.doc, method.expected_doc)
assert_equal(handler.shortdoc, method.expected_shortdoc)
def test_arguments(self):
for method in _get_handler_methods(ArgInfoLibrary()):
handler = _PythonHandler(LibraryMock(), method.__name__, method)
args = handler.arguments
argspec = (args.positional, args.defaults, args.varargs, args.kwargs)
expected = eval(method.__doc__)
assert_equal(argspec, expected, method.__name__)
def test_arg_limits(self):
for method in _get_handler_methods(ArgumentsPython()):
handler = _PythonHandler(LibraryMock(), method.__name__, method)
exp_mina, exp_maxa = eval(method.__doc__)
assert_equal(handler.arguments.minargs, exp_mina)
assert_equal(handler.arguments.maxargs, exp_maxa)
def test_getarginfo_getattr(self):
handlers = TestLibrary('classes.GetattrLibrary').handlers
assert_equal(len(handlers), 3)
for handler in handlers:
assert_true(handler.name in ['Foo','Bar','Zap'])
assert_equal(handler.arguments.minargs, 0)
assert_equal(handler.arguments.maxargs, sys.maxsize)
class TestDynamicHandlerCreation(unittest.TestCase):
def test_none_doc(self):
self._assert_doc(None, '')
def test_empty_doc(self):
self._assert_doc('')
def test_non_empty_doc(self):
self._assert_doc('This is some documentation')
def test_non_ascii_doc(self):
self._assert_doc(u'P\xe4iv\xe4\xe4')
if not utils.IRONPYTHON:
def test_with_utf8_doc(self):
doc = u'P\xe4iv\xe4\xe4'
self._assert_doc(doc.encode('UTF-8'), doc)
def test_invalid_doc_type(self):
self._assert_fails('Return value must be string.', doc=True)
def test_none_argspec(self):
self._assert_spec(None, maxargs=sys.maxsize, vararg='varargs', kwarg=False)
def test_none_argspec_when_kwargs_supported(self):
self._assert_spec(None, maxargs=sys.maxsize, vararg='varargs', kwarg='kwargs')
def test_empty_argspec(self):
self._assert_spec([])
def test_mandatory_args(self):
for argspec in [['arg'], ['arg1', 'arg2', 'arg3']]:
self._assert_spec(argspec, len(argspec), len(argspec), argspec)
def test_only_default_args(self):
self._assert_spec(['defarg1=value', 'defarg2=defvalue'], 0, 2,
['defarg1', 'defarg2'], ['value', 'defvalue'])
def test_default_value_may_contain_equal_sign(self):
self._assert_spec(['d=foo=bar'], 0, 1, ['d'], ['foo=bar'])
def test_varargs(self):
self._assert_spec(['*vararg'], 0, sys.maxsize, vararg='vararg')
def test_kwargs(self):
self._assert_spec(['**kwarg'], 0, 0, kwarg='kwarg')
def test_varargs_and_kwargs(self):
self._assert_spec(['*vararg', '**kwarg'],
0, sys.maxsize, vararg='vararg', kwarg='kwarg')
def test_integration(self):
self._assert_spec(['arg', 'default=value'], 1, 2,
['arg', 'default'], ['value'])
self._assert_spec(['arg', 'default=value', '*var'], 1, sys.maxsize,
['arg', 'default'], ['value'], 'var')
self._assert_spec(['arg', 'default=value', '**kw'], 1, 2,
['arg', 'default'], ['value'], None, 'kw')
self._assert_spec(['arg', 'default=value', '*var', '**kw'], 1, sys.maxsize,
['arg', 'default'], ['value'], 'var', 'kw')
def test_invalid_argspec_type(self):
for argspec in [True, [1, 2]]:
self._assert_fails("Return value must be list of strings.", argspec)
def test_mandatory_arg_after_default_arg(self):
for argspec in [['d=v', 'arg'], ['a', 'b', 'c=v', 'd']]:
self._assert_fails('Invalid argument specification: '
'Non-default argument after default arguments.',
argspec)
def test_positional_after_vararg(self):
for argspec in [['*foo', 'arg'], ['arg', '*var', 'arg'],
['a', 'b=d', '*var', 'c'], ['*var', '*vararg']]:
self._assert_fails('Invalid argument specification: '
'Positional argument after varargs.', argspec)
def test_kwarg_not_last(self):
for argspec in [['**foo', 'arg'], ['arg', '**kw', 'arg'],
['a', 'b=d', '**kw', 'c'], ['**kw', '*vararg'],
['**kw', '**kwarg']]:
self._assert_fails('Invalid argument specification: '
'Only last argument can be kwargs.', argspec)
def test_missing_kwargs_support(self):
self._assert_fails("Too few 'run_keyword' method parameters"
" for **kwargs support.",
['**kwargs'])
def _assert_doc(self, doc, expected=None):
expected = doc if expected is None else expected
assert_equal(self._create_handler(doc=doc).doc, expected)
def _assert_spec(self, argspec, minargs=0, maxargs=0, positional=[],
defaults=[], vararg=None, kwarg=None):
if kwarg is None:
kwargs_support_modes = [True, False]
elif kwarg is False:
kwargs_support_modes = [False]
kwarg = None
else:
kwargs_support_modes = [True]
for kwargs_support in kwargs_support_modes:
arguments = self._create_handler(argspec,
kwargs_support=kwargs_support
).arguments
assert_equal(arguments.minargs, minargs)
assert_equal(arguments.maxargs, maxargs)
assert_equal(arguments.positional, positional)
assert_equal(arguments.defaults, defaults)
assert_equal(arguments.varargs, vararg)
assert_equal(arguments.kwargs, kwarg)
def _assert_fails(self, error, argspec=None, doc=None):
assert_raises_with_msg(DataError, error,
self._create_handler, argspec, doc)
def _create_handler(self, argspec=None, doc=None, kwargs_support=False):
lib = LibraryMock('TEST CASE')
if kwargs_support:
lib.run_keyword = lambda name, args, kwargs: None
else:
lib.run_keyword = lambda name, args: None
lib.run_keyword.__name__ = 'run_keyword'
doc = GetKeywordDocumentation(lib)._handle_return_value(doc)
|
brosner/django-notification
|
notification/message.py
|
Python
|
mit
| 3,596
| 0.001947
|
from django.db.models import get_model
from django.utils.translation import ugettext
# a notice like "foo and bar are now friends" is stored in the database
# as "{auth.User.5} and {auth.User.7} are now friends".
#
# encode_object takes an object and turns it into "{app.Model.pk}" or
# "{app.Model.pk.msgid}" if named arguments are used in send()
# decode_object takes "{app.Model.pk}" and turns it into the object
#
# encode_message takes either ("%s and %s are now friends", [foo, bar]) or
# ("%(foo)s and %(bar)s are now friends", {'foo':foo, 'bar':bar}) and turns
# it into "{auth.User.5} and {auth.User.7} are now friends".
#
# decode_message takes "{auth.User.5} and {auth.User.7}" and converts it
# into a string using the given decode function to convert the object to
# string representation
#
# message_to_text and message_to_html use decode_message to produce a
# text and html version of the message respectively.
def encode_object(obj, name=None):
encoded = "%s.%s.%s" % (obj._meta.app_label, obj._meta.object_name, obj.pk)
if name:
encoded = "%s.%s" % (encoded, name)
return "{%s}" % encoded
def encode_message(message_templ
|
ate, objects):
if objects is None:
return message_template
if isinstance(objects, list) or isinstance(objects, tuple):
return message_template % tuple(encode_object(obj) for obj in objects)
if type(objects) is dict:
return message_template % dict((name, encode_object(obj, name)) for name, obj in objects.iteritems())
return ""
def decode_object(ref):
decoded = ref.split(".")
if len(decoded) == 4:
app, name, pk, msgid = deco
|
ded
return get_model(app, name).objects.get(pk=pk), msgid
app, name, pk = decoded
return get_model(app, name).objects.get(pk=pk), None
class FormatException(Exception):
pass
def decode_message(message, decoder):
out = []
objects = []
mapping = {}
in_field = False
prev = 0
for index, ch in enumerate(message):
if not in_field:
if ch == "{":
in_field = True
if prev != index:
out.append(message[prev:index])
prev = index
elif ch == "}":
raise FormatException("unmatched }")
elif in_field:
if ch == "{":
raise FormatException("{ inside {}")
elif ch == "}":
in_field = False
obj, msgid = decoder(message[prev+1:index])
if msgid is None:
objects.append(obj)
out.append("%s")
else:
mapping[msgid] = obj
out.append("%("+msgid+")s")
prev = index + 1
if in_field:
raise FormatException("unmatched {")
if prev <= index:
out.append(message[prev:index+1])
result = "".join(out)
if mapping:
args = mapping
else:
args = tuple(objects)
return ugettext(result) % args
def message_to_text(message):
def decoder(ref):
obj, msgid = decode_object(ref)
return unicode(obj), msgid
return decode_message(message, decoder)
def message_to_html(message):
def decoder(ref):
obj, msgid = decode_object(ref)
if hasattr(obj, "get_absolute_url"): # don't fail silenty if get_absolute_url hasn't been defined
return u"""<a href="%s">%s</a>""" % (obj.get_absolute_url(), unicode(obj)), msgid
else:
return unicode(obj), msgid
return decode_message(message, decoder)
|
stuntgoat/conf-syrup
|
conf_syrup/network_type.py
|
Python
|
mit
| 307
| 0
|
from commands import getoutput
def NetworkFromPrefix(val):
"""
Return the network with the network prefix given.
"""
ifaces = getoutput(
|
'hostname -I') # *nix only.
sifaces = ifaces.strip().split()
for iface in sifaces:
if iface.startswith(val):
|
return iface
|
yattom/crossword
|
crossword2.py
|
Python
|
mit
| 7,931
| 0.003404
|
# coding: utf-8
import re
from crossword import *
class Crossword2(Crossword):
def __init__(self):
self.grid = OpenGrid()
self.connected = {}
self.used_words = []
def copy(self):
copied = Crossword2()
copied.grid = self.grid.copy()
copied.connected = self.connected.copy()
copied.used_words = self.used_words[:]
return copied
def embed(self, pos, direction, word):
assert word not in self.used_words
super(Crossword2, self).embed(pos, direction, word)
self.used_words.append(word)
def all_disconnected_sequences(self):
'''
>>> c = Crossword2()
>>> c.embed((0, 0), HORIZONTAL, 'ANT')
>>> c.embed((0, 0), VERTICAL, 'ATOM')
>>> c.embed((1, 2), HORIZONTAL, 'IT')
>>> c.embed((3, 0), HORIZONTAL, 'MEET')
>>> c.dump()
_#____
#ANT#_
_T#IT#
_O____
#MEET#
_#____
>>> c.all_disconnected_sequences()
[((0, 2), 2, 'T'), ((1, 0), 2, 'T'), ((2, 0), 2, 'O'), ((0, 1), 1, 'N'), ((3, 1), 1, 'E'), ((0, 2), 1, 'TI'), ((0, 2), 1, 'TI.E'), ((3, 2), 1, 'E'), ((1, 3), 1, 'T'), ((1, 3), 1, 'T.T'), ((3, 3), 1, 'T')]
'''
sequences = []
for pos, direction, length in [((r, self.grid.colmin), HORIZONTAL, self.grid.width) for r in range(self.grid.rowmin, self.grid.rowmax + 1)] + [((self.grid.rowmin, c), VERTICAL, self.grid.height) for c in range(self.grid.colmin, self.grid.colmax + 1)]:
line = self.grid.get_word(pos, direction, length)
poslist = self.grid.poslist(pos, direction, length)
sequences += self.extract_sequences(line, poslist, direction)
return [(p, d, w) for (p, d, w) in sequences if not w.endswith('.')]
def extract_sequences(self, line, poslist, direction, idx=0, current_seq=None):
'''
>>> c = Crossword2()
>>> c.extract_sequences('ABC', [(0, 0), (0, 1), (0, 2)], HORIZONTAL)
[((0, 0), 2, 'ABC')]
>>> c.extract_sequences('_A_', [(0, 0), (0, 1), (0, 2)], HORIZONTAL)
[((0, 1), 2, 'A'), ((0, 1), 2, 'A.')]
>>> c.extract_sequences('A_C', [(0, 0), (0, 1), (0, 2)], HORIZONTAL)
[((0, 0), 2, 'A'), ((0, 0), 2, 'A.C'), ((0, 2), 2, 'C')]
>>> c.extract_sequences('A#C', [(0, 0), (0, 1), (0, 2)], HORIZONTAL)
[((0, 0), 2, 'A'), ((0, 2), 2, 'C')]
>>> c.extract_sequences('A_#B_C', [(0, 0), (0, 1), (0, 2), (0, 3), (0, 4), (0,5)], HORIZONTAL)
[((0, 0), 2, 'A'), ((0, 0), 2, 'A.'), ((0, 3), 2, 'B'), ((0, 3), 2, 'B.C'), ((0, 5), 2, 'C')]
>>> c.extract_sequences('A_B__C', [(0, 0), (0, 1), (0, 2), (0, 3), (0, 4), (0,5)], HORIZONTAL)
[((0, 0), 2, 'A'), ((0, 0), 2, 'A.B'), ((0, 2), 2, 'B'), ((0, 0), 2, 'A.B.'), ((0, 2), 2, 'B.'), ((0, 0), 2, 'A.B..C'), ((0, 2), 2, 'B..C'), ((0, 5), 2, 'C')]
'''
if not current_seq: current_seq = []
if idx >= len(line): return current_seq
c = line[idx]
pos = poslist[idx]
if c == FILLED:
return current_seq + self.extract_sequences(line, poslist, direction, idx + 1, [])
if c == EMPTY:
new_current_seq = [(p, d, s + '.') for (p, d, s) in current_seq]
return current_seq + self.extract_sequences(line, poslist, direction, idx + 1, new_current_seq)
if current_seq:
new_current_seq = [(p, d, s + c) for (p, d, s) in current_seq if not self.is_connected(poslist[idx - 1], pos)]
if any([s.endswith('.') for (p, d, s) in current_seq]):
new_current_seq.append((pos, direction, c))
return self.extract_sequences(line, poslist, direction, idx + 1, new_current_seq)
else:
new_current_seq = [(pos, direction, c)]
return self.extract_sequences(line, poslist, direction, idx + 1, new_current_seq)
def build_crossword2(words, monitor=False):
'''
>>> ans = list(build_crossword2(['ANT', 'ART', 'RAT']))
>>> ans[0].dump()
#ANT#
>>> ans[1].dump()
_#___
#ANT#
_R___
_T___
_#___
>>> ans[2].dump()
___#___
__#ANT#
___R___
#RAT#__
___#___
>>> ans[3].dump()
___#_
___R_
_#_A_
#ANT#
_R_#_
_T___
_#___
>>> ans[4].dump()
_#___
_R___
#ANT#
_T___
_#___
>>> ans[5].dump()
___#_
_#_A_
_R_R_
#ANT#
_T_#_
_#___
>>> ans[6].dump()
___#___
___R___
__#ANT#
#ART#__
___#___
>>> ans[7].dump()
___#_
___A_
___R_
#ANT#
___#_
>>> ans[8].dump()
___#__
_#RAT#
___R__
#ANT#_
___#__
>>> ans[9].dump()
___#_
_#_A_
_R_R_
#ANT#
_T_#_
_#___
>>> ans[10].dump()
___#___
___A___
__#RAT#
#ANT#__
___#___
>>> ans[11].dump()
___#_
___R_
___A_
#ANT#
___#_
>>> ans[12].dump()
___#__
_#ART#
___A__
#ANT#_
___#__
>>> ans[13].dump()
___#___
___R___
__#ART#
#ANT#__
___#___
>>> ans[14].dump()
___#_
___R_
_#_A_
#ANT#
_R_#_
_T___
_#___
>>> len(ans)
15
'''
crosswords = [Crossword2()]
crosswords[0].embed((0, 0), HORIZONTAL, words[0])
while True:
if not crosswords: break
crosswords = sorted(crosswords, key=lambda c: evaluate_crossword(c))
base = crosswords.pop(0)
if monitor:
print ('%d candidates...'%(len(crosswords)))
if isinstance(monitor, dict):
base.dump(empty=monitor['EMPTY'], filled=monitor['FILLED'])
else:
base.dump()
print ('')
try:
sequences = base.all_disconnected_sequences()
if is_valid_crossword(sequences):
yield base
candidates = generate_candidates(words, base, sequences)
crosswords += candidates
except ValueError:
# discard this base
pass
def is_valid_crossword(sequences):
return all([len(s) <= 1 or s.find('.') > -1 for _, _, s in sequences])
def generate_candidates(words, base, sequences):
fit_words = []
for sequence in sequences:
available_words = [w for w in words if w not in base.used_words]
fit_words_for_seq = [(p, d, w) for (p, d, w) in propose_words(sequence, available_words) if base.is_fit(p, d, w)]
_, _, s = sequence
if not fit_words_for_seq and len(s) > 1 and s.find('.') == -1:
# dead end; discard this base
raise ValueError('no candidates found')
fit_words += fit_words_for_seq
candidates = []
for p, d, w in fit_words:
copy = base.copy()
copy.embed(p, d, w)
candidates.append(copy)
return candidates
def propose_words(sequence, words):
(p, d, seq) = sequence
proposed_words = []
for word in words:
idx = 0
while True:
m = re.search(seq, word[idx:])
if not m: break
|
proposed_words.append((OpenGrid.pos_inc(p, -(m.start() + idx), d), d, word))
idx += m.start() + 1
return proposed_words
def evaluate_crossword(c):
# return -len(c.used_words)
return (c.grid.width +
|
c.grid.height) * 1.0 / len(c.used_words) ** 2
# return (c.grid.width * c.grid.height) * 1.0 / sum([len(w) for w in c.used_words])
def pickup_crosswords(words, dump_option=None, monitor=False):
best = 9999
for c in build_crossword2(words, monitor=monitor):
if evaluate_crossword(c) < best:
if dump_option:
c.dump(empty=dump_option['EMPTY'], filled=dump_option['FILLED'])
else:
c.dump()
best = evaluate_crossword(c)
print ('score: %f'%(best))
print ('')
if __name__ == '__main__':
import doctest
doctest.testmod()
|
stebenve86/poem_reader
|
utilities/add_poem.py
|
Python
|
apache-2.0
| 1,540
| 0.016234
|
#!/usr/bin/env python3
"""
Copyright 2015 Stefano Benvenuti <ste.benve86@gmail.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import os
import json
import shutil
# helper function for reading a file content
def read_file(filename):
f = None
try:
f = open(filename)
content = json.load(f)
except Exception as e:
print("File \"%s\" cannot be opened or read: %s", filename, e)
sys.exit(1)
finally:
if f is not None:
f.close()
return content
if len(sys.argv) is not 2:
print("""
USAGE: ./add_poem.py JSON_DELTA_FILE_PATH
""")
sys.exit(1)
conf_file = os.path.join("..","poems","poems.json")
# reads old configuration file and new content
content = read_file(conf_file)
new_content = read_file(sys.argv[1])
# merge the values
content.update(new_content)
# write file
shutil.copyfile(conf_file, conf_file + ".bak")
f = None
try:
f = open(conf_file,'w')
json.dump(content, f)
except Exception as e:
print("File \"%s\" cannot be opened or written: %s",
|
filename, e)
sys.exit(1)
finally:
|
if f is not None:
f.close()
|
apiaas/gae-base-app-with-drf
|
src/landing/urls.py
|
Python
|
gpl-3.0
| 145
| 0.006897
|
from djang
|
o.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
url(r'^$', views.main_page, name='m
|
ain_page'),
)
|
JonSteinn/Kattis-Solutions
|
src/Soft Passwords/Python 3/main.py
|
Python
|
gpl-3.0
| 297
| 0.013468
|
print('Yes' if (la
|
mbda a,b : (lambda number: a == b or (a
|
[1:] == b and a[0] in number and len(a)-1 == len(b)) or (a[:-1] == b and a[-1] in number and len(a)-1 == len(b)) or ''.join(x.upper() if x.islower() else x.lower() for x in b) == a)({str(i) for i in range(10)}))(input(), input()) else 'No')
|
mganeva/mantid
|
Testing/SystemTests/tests/analysis/DirectInelasticDiagnostic2.py
|
Python
|
gpl-3.0
| 3,610
| 0.001662
|
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
from __future__ import (absolute_import, division, print_function)
#pylint: disable=invalid-name,no-init
import os
from systemtesting import MantidSystemTest
from mantid.simpleapi import *
from mantid.kernel import PropertyManager
from mantid import config
def MAX_DBL():
import sys
return sys.float_info[0]/2
def getNamedParameter(ws, name):
return ws.getInstrument().getNumberParameter(name)[0]
class DirectInelasticDiagnostic2(MantidSystemTest):
saved_diag_file=''
def requiredMemoryMB(self):
"""Requires 4Gb"""
return 4000
def runTest(self):
red_man = PropertyManager()
red_man_name = "__dgs_reduction_properties"
pmds[red_man_name] = red_man
if 'detvan' in mtd:
detvan = mtd['detvan']
else:
detvan = Load('MAP17186.raw')
if 'sample' in mtd:
sample = mtd['sample']
else:
sample = Load('MAP17269.raw')
# Libisis values to check against
# All PropertyManager properties need to be set
red_man["LowCounts"] = 1e-10
red_man["HighCounts"] = 1e10
red_man["LowOutlier"] = 0.01
red_man["HighOutlier"] = 100.
red_man["ErrorBarCriterion"] = 0.0
red_man["MedianTestLow"] = 0.1
red_man["MedianTestHigh"] = 2.0
red_man["SamBkgMedianTestLow"] = 0.0
red_man["SamBkgMedianTestHigh"] = 1.5
red_man["SamBkgErrorbarCriterion"] = 3.3
red_man["RejectZeroBackground"] = True
# Things needed to run vanadium reduction
red_man["IncidentBeamNormalisation"] = "ToMonitor"
red_man["DetVanIntRangeUnits"] = "Energy"
# properties affecting diagnostics:
#reducer.wb_integr_range = [20,300]
red_man["DetVanIntRangeLow"] = 20.
red_man["DetVanIntRangeHigh"] = 300.
red_man["BackgroundCheck"] = True
red_man["BackgroundTofStart"]=12000.
red_man["BackgroundTofEnd"]=18000.
#reducer.bkgd_range=[12000,18000]
diag_mask = DgsDiagnose(DetVanWorkspace=detvan, SampleWo
|
rkspace=sample,
ReductionProperties=red_man_name)
MaskDetectors(sample, MaskedWorkspace=diag_mask)
# Save the masked spectra numbers to a simple ASCII file for comparison
self.saved_diag_file = os.path.join(config['defaultsave.directory'],
'CurrentDirectInelasticDiag2.txt')
with open(self.saved_diag_file, 'w') as handle:
spectrumInfo = sample.spectrumInfo()
for ind
|
ex in range(sample.getNumberHistograms()):
if spectrumInfo.isMasked(index):
spec_no = sample.getSpectrum(index).getSpectrumNo()
handle.write(str(spec_no) + '\n')
def cleanup(self):
if os.path.exists(self.saved_diag_file):
if self.succeeded():
os.remove(self.saved_diag_file)
else:
os.rename(self.saved_diag_file,
os.path.join(config['defaultsave.directory'],
'DirectInelasticDiag2-Mismatch.txt'))
def validateMethod(self):
return 'validateASCII'
def validate(self):
return (self.saved_diag_file, 'DirectInelasticDiagnostic.txt')
|
tectronics/pyafipws.web2py-app
|
languages/zh-tw.py
|
Python
|
agpl-3.0
| 8,854
| 0.02909
|
# coding: utf8
{
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"更新" 是選擇性的條件式, 格式就像 "欄位1=\'值\'". 但是 JOIN 的資料不可以使用 update 或是 delete"',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'%s rows deleted': '已刪除 %s 筆',
'%s rows updated': '已更新 %s 筆',
'(something like "it-it")': '(格式類似 "zh-tw")',
'A new version of web2py is available': '新版的 web2py 已發行',
'A new version of web2py is available: %s': '新版的 web2py 已發行: %s',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': '注意: 登入管理帳號需要安全連線(HTTPS)或是在本機連線(localhost).',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': '注意: 因為在測試模式不保證多執行緒安全性,也就是說不可以同時執行多個測試案例',
'ATTENTION: you cannot edit the running application!': '注意:不可編輯正在執行的應用程式!',
'About': '關於',
'About application': '關於本應用程式',
'Admin is disabled because insecure channel': '管理功能(Admin)在不安全連線環境下自動關閉',
'Admin is disabled because unsecure channel': '管理功能(Admin)在不安全連線環境下自動關閉',
'Administrator Password:': '管理員密碼:',
'Are you sure you want to delete file "%s"?': '確定要刪除檔案"%s"?',
'Are you sure you want to uninstall application "%s"': '確定要移除應用程式 "%s"',
'Are you sure you want to uninstall application "%s"?': '確定要移除應用程式 "%s"',
'Asíncrona': 'Asíncrona',
'Authentication': '驗證',
'Available databases and tables': '可提供的資料庫和資料表',
'Ayuda': 'Ayuda',
'Cannot be empty': '不可空白',
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': '無法編譯:應用程式中含有錯誤,請除錯後再試一次.',
'Change Password': '變更密碼',
'Check to delete': '打勾代表刪除',
'Check to delete:': '點選以示刪除:',
'Client IP': '客戶端網址(IP)',
'Comprobantes': 'Comprobantes',
'Configuración': 'Configuración',
'Configurar': 'Configurar',
'Consultas': 'Consultas',
'Controller': '控件',
'Controllers': '控件',
'Copyright': '版權所有',
'Cotización': 'Cotización',
'Create new application': '創建應用程式',
'Current request': '目前網路資料要求(request)',
'Current response': '目前網路資料回應(response)',
'Current session': '目前網路連線資訊(session)',
'DB Model': '資料庫模組',
'DESIGN': '設計',
'Database': '資料庫',
'Date and Time': '日期和時間',
'Delete': '刪除',
'Delete:': '刪除:',
'Deploy on Google App Engine': '配置到 Google App Engine',
'Description': '描述',
'Design for': '設計為了',
'Detalles': 'Detalles',
'E-mail': '電子郵件',
'EDIT': '編輯',
'Edit': '編輯',
'Edit Profile': '編輯設定檔',
'Edit This App': '編輯本應用程式',
'Edit application': '編輯應用程式',
'Edit current record': '編輯當前紀錄',
'Editing file': '編輯檔案',
'Editing file "%s"': '編輯檔案"%s"',
'Emisión': 'Emisión',
'Error logs for "%(app)s"': '"%(app)s"的錯誤紀錄',
'Estado (dummy)': 'Estado (dummy)',
'FacturaLibre': 'FacturaLibre',
'FacturaLibre. Aplicación en desarrollo': 'FacturaLibre. Aplicación en desarrollo',
'FacturaLibre. Aplicación web para factura electrónica': 'FacturaLibre. Aplicación web para factura electrónica',
'FacturaLibre: interfase alternativa': 'FacturaLibre: interfase alternativa',
'FacturaLibre: interfaz de usuario alternativa': 'FacturaLibre: interfaz de usuario alternativa',
'First name': '名',
'Functions with no doctests will result in [passed] tests.': '沒有 doctests 的函式會顯示 [passed].',
'Group ID': '群組編號',
'Hello World': '嗨! 世界',
'Import/Export': '匯入/匯出',
'Index': '索引',
'Información General': 'Información General',
'Información Técnica': 'Información Técnica',
'Inicio': 'Inicio',
'Installed applications': '已安裝應用程式',
'Internal State': '內部狀態',
'Invalid Query': '不合法的查詢',
'Invalid action': '不合法的動作(action)',
'Invalid email': '不合法的電子郵件',
'Language files (static strings) updated': '語言檔已更新',
'Languages': '各國語言',
'Last name': '姓',
'Last saved on:': '最後儲存時間:',
'Layout': '網頁配置',
'License for': '軟體版權為',
'Listar comprobantes.': 'Listar comprobantes.',
'Listar detalles': 'Listar detalles',
'Login': '登入',
'Login to the Administrative Interface': '登入到管理員介面',
'Logout': '登出',
'Lost Password': '密碼遺忘',
'Main Menu': '主選單',
'Menu Model': '選單模組(menu)',
'Models': '資料模組',
'Modules': '程式模組',
'NO': '否',
'Name': '名字',
'New Record': '新紀錄',
'No databases in this application': '這應用程式不含資料庫',
'Origin': '原文',
'Original/Translation': '原文/翻譯',
'Password': '密碼',
"Password fields don't match": '密碼欄不匹配',
'Peeking at file': '選擇檔案',
'Powered by': '基於以下技術構建:',
'Query:': '查詢:',
'Record ID': '紀錄編號',
'Register': '註冊',
'Registration key': '註冊金鑰',
'Remember me (for 30 days)': '記住我(30 天)',
'Reset Password key': '重設密碼',
'Resolve Conflict file': '解決衝突檔案',
'Role': '角色',
'Rows in table': '在資料表裏的資料',
'Rows selected': '筆資料被選擇',
'Saved file hash:': '檔案雜湊值已紀錄:',
'Secuencial': 'Secuencial',
'Servicios Web': 'Servicios Web',
'Static files': '靜態檔案',
'Stylesheet': '網頁風格檔',
'Submit': '傳送',
'Sure you want to delete this object?': '確定要刪除此物件?',
'Table name': '資料表名稱',
'Testing application': '測試中的應用程式',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"查詢"是一個像 "db.表1.欄位1==\'值\'" 的條件式. 以"db.表1.欄位1==db.表2.欄位2"方式則相當於執行 JOIN SQL.',
'There are no controllers': '沒有控件(controllers)',
'There are no models': '沒有資料庫模組(models)',
'There are no modules': '沒有程式模組(modules)',
'There are no static files': '沒有靜態檔案',
'There are no translators, only default language is supported': '沒有翻譯檔,只支援原始語言',
'There are no views': '沒有視圖',
'This is the %(filename)s template': '這是%(filename)s檔案的樣板(template)',
'Ticket': '問題單',
'Timestamp': '時間標記',
'Unable to check for upgrades': '無法做升級檢查',
'Unable to download': '無法下載',
'Unable to download app': '無法下載應用程式',
'Update:': '更新:',
'Upload existing application': '更新存在的應用程式',
'Use (...)&(...) for AND, (...)|(...) for
|
OR, and ~(...) for NOT to build more complex queries.': '使用下列方式來組合更複雜的條件式, (...)&(...) 代表同時存在的條件, (...)|(...) 代表擇一的條件, ~(...)則代表反向條件.',
'User %(id)s Logged-in': '使用者 %(id)s 已登入',
'User %(id)s Registered': '使用者 %(id)s 已註冊',
'User ID': '使用者編號',
'Verify Password': '驗證密碼',
'View': '視圖',
'Views': '視圖',
'WSBFE'
|
: 'WSBFE',
'WSFEX': 'WSFEX',
'WSFEv0': 'WSFEv0',
'WSFEv1': 'WSFEv1',
'WSMTXCA': 'WSMTXCA',
'Welcome %s': '歡迎 %s',
'Welcome to web2py': '歡迎使用 web2py',
'YES': '是',
'about': '關於',
'appadmin is disabled because insecure channel': '因為來自非安全通道,管理介面關閉',
'cache': '快取記憶體',
'change password': '變更密碼',
'click here for online examples': '點此處進入線上範例',
'click here for the administrative interface': '點此處進入管理介面',
'customize me!': '請調整我!',
'data uploaded': '資料已上傳',
'database': '資料庫',
'database %s select': '已選擇 %s 資料庫',
'db': 'db',
'design': '設計',
'done!': '完成!',
'edit profile': '編輯設定檔',
'export as csv file': '以逗號分隔檔(csv)格式匯出',
'insert new': '插入新資料',
'insert new %s': '插入新資料 %s',
'invalid request': '不合法的網路要求(request)',
'login': '登入',
'logout': '登出',
'new record inserted': '已插入新紀錄',
'next 100 rows': '往後 100 筆',
'or import from csv file': '或是從逗號分隔檔(CSV)匯入',
'previous 100 rows': '往前 100 筆',
'record': '紀錄',
'record does not exist': '紀錄不存在',
'record id': '紀錄編號',
'register': '註冊',
'selected': '已選擇',
'state': '狀態',
'table': '資料表',
'unable to parse csv file': '無法解析逗號分隔檔(csv)',
'Últ.ID': 'Últ.ID',
'Últ.Nro.Cbte.': 'Últ.Nro.Cbte.',
}
|
unpingco/mp4utils
|
mp4_cut.py
|
Python
|
mit
| 5,940
| 0.051852
|
from datetime import datetime, timedelta
from subprocess import PIPE, call, Popen
import tempfile, os, argparse, sys, re
def get_file_duration(infilename):
'''
:param infilename:
|
input mp4 filename
:type infilename: str
:returns: (h,m,s) tuple
'''
cmd=['ffmpeg','-i',infilename]
p=Popen(cmd,stdout=PIPE,stderr=PIPE)
o
|
utput=p.stderr.read().decode('utf8')
match=re.search('Duration: (.*?)\.',output)
assert match
h,m,s= parse_ts(match.group(1))
return datetime(2017,1,1,h,m,s)
def parse_ts(instring):
'''
parse time notation
'''
x=instring.split(':')
if len(x)==2:
x.insert(0,'0')
h,m,s = map(int,x)
return (h,m,s)
def format_ts(instring):
h,m,s=parse_ts(instring)
return '%02d:%02d:%02d'%(h,m,s)
def run_cmd_dt(start,end,infname,outfname):
assert isinstance(start,datetime)
assert isinstance(end,datetime)
start_time='%02d:%02d:%02d'%(start.hour,start.minute,start.second)
end_time='%02d:%02d:%02d'%(end.hour,end.minute,end.second)
run_cmd(start_time,end_time,infname,outfname)
def run_cmd(start='00:00:00',end='23:00:00',infname='foo.mp4',outfname='outfoo.mp4'):
'''
trigger call to `ffmpeg`
'''
duration = get_duration(start,end)
cmd=['ffmpeg','-ss',format_ts(start),'-t',duration,'-i',
infname,'-acodec','copy','-vcodec','copy',
outfname]
call(cmd,stdout=PIPE,stderr=None)
def get_duration(start='00:00:00',end=''):
'''
end can be negative if prefixed with `n` as in `n00:00:04`
which means four seconds from the end of the file.
'''
if end and not end.startswith('n'): #
he,me,se=parse_ts(end)
end_time=datetime(2017,1,1,he,me,se)
elif end.startswith('n'):
he,me,se=parse_ts(end[1:])
end_time=get_file_duration(args.infile)-timedelta(hours=he,minutes=me,seconds=se)
else:
end_time=get_file_duration(args.infile)
hs,ms,ss=parse_ts(start)
start_time=datetime(2017,1,1,hs,ms,ss)
duration=str(end_time - start_time)
if len(duration)==7: duration = '0'+duration
return duration
if __name__ == '__main__':
parse = argparse
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
description='''Cut a section out of MP4 file and return it using ffmpeg
without re-encoding.
Example: extract from start to 00:11:44
% python mp4_cut.py -e 11:44 -i L.mp4 -o foo.mp4
Example: extract from 00:15:00 to 00:17:34
% python mp4_cut.py -s 15:00 -e 17:34 -i L.mp4 -o foo.mp4
You can also take the complement of the selected slice by using the
--invert flag
% python mp4_cut.py --inverst -s 15:00 -e 17:34 -i L.mp4 -o foo.mp4
The two complementary parts are joined to make the output file.''')
parser.add_argument("-i","--input-file",
dest="infile",
help='input file',
default='',
)
parser.add_argument("-o","--output-file",
dest="outfile",
help='output file',
default='',
)
parser.add_argument("-s","--start-time",
dest="start_time",
help='hh:mm:ss',
default='00:00:00',
)
parser.add_argument("-e","--end-time",
dest="end_time",
help='hh:mm:ss',
default='',
)
parser.add_argument("-c","--chunk_duration",
help='Divide into <n> chunks of this duration hh:mm:ss. Overrides other flags!',
default='',
)
parser.add_argument("--invert",
dest='invert',
default=False,
action='store_true',
help="return complement of indicated section")
args = parser.parse_args()
if args.chunk_duration:
# this over-rides other options
hc,mc,sc=parse_ts(args.chunk_duration)
start_time=datetime(2017,1,1,0,0,0)
end_time=datetime(2017,1,1,hc,mc,sc)
file_length = get_file_duration(args.infile)
dt = timedelta(hours=hc,minutes=mc,seconds=sc)
outfilename_head = args.outfile.replace('.mp4','')
n=0
while end_time < file_length:
run_cmd_dt(start_time,end_time,args.infile,'%s_%03d.mp4'%(outfilename_head,n))
start_time = end_time
end_time = start_time + dt
n += 1
sys.exit()
duration = get_duration(args.start_time,args.end_time)
if args.invert:
if args.start_time=='00:00:00': # tail section
duration = '23:00:00'
cmd=['ffmpeg','-ss',format_ts(args.end_time),'-t',duration,'-i',
args.infile,'-acodec','copy','-vcodec','copy',
args.outfile]
call(cmd,stdout=PIPE,stderr=None)
else: # middle section
start_time='00:00:00'
filename1=tempfile.mktemp('.mp4',dir=os.getcwd())
filename2=tempfile.mktemp('.mp4',dir=os.getcwd())
run_cmd(start_time,args.start_time,args.infile,filename1)
run_cmd(args.end_time,'23:00:00',args.infile,filename2)
fname= tempfile.mktemp(suffix='.txt',dir=os.getcwd())
with open(fname,'w') as fd:
fd.write('file '+os.path.split(filename1)[1]+'\n')
fd.write('file '+os.path.split(filename2)[1]+'\n')
fd.close()
# ffmpeg -safe 0 -f concat -i list.txt -c copy outfile.mp4
cmd=['ffmpeg','-safe','0','-f','concat','-i',fname,'-c','copy',args.outfile ]
call(cmd,stdout=PIPE,stderr=None)
for i in (filename1,filename2,fname):
os.unlink(i)
else:
run_cmd(args.start_time,args.end_time,args.infile,args.outfile)
|
krummas/cassandra
|
pylib/cqlshlib/test/run_cqlsh.py
|
Python
|
apache-2.0
| 12,156
| 0.002303
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: this testing tool is *nix specific
from __future__ import unicode_literals
import os
import sys
import re
import contextlib
import subprocess
import signal
import math
from time import time
from . import basecase
from os.path import join, normpath
def is_win():
return sys.platform in ("cygwin", "win32")
if is_win():
from .winpty import WinPty
DEFAULT_PREFIX = ''
else:
import pty
DEFAULT_PREFIX = os.linesep
DEFAULT_CQLSH_PROMPT = DEFAULT_PREFIX + '(\S+@)?cqlsh(:\S+)?> '
DEFAULT_CQLSH_TERM = 'xterm'
try:
Pattern = re._pattern_type
except AttributeError:
# Python 3.7+
Pattern = re.Pattern
def get_smm_sequence(term='xterm'):
"""
Return the set meta mode (smm) sequence, if any.
On more recent Linux systems, xterm emits the smm sequence
before each prompt.
"""
result = ''
if not is_win():
tput_proc = subprocess.Popen(['tput', '-T{}'.format(term), 'smm'], stdout=subprocess.PIPE)
tput_stdout = tput_proc.communicate()[0]
if (tput_stdout and (tput_stdout != b'')):
result = tput_stdout
if isinstance(result, bytes):
result = result.decode("utf-8")
return result
DEFAULT_SMM_SEQUENCE = get_smm_sequence()
cqlshlog = basecase.cqlshlog
def set_controlling_pty(master, slave):
os.setsid()
os.close(master)
for i in range(3):
os.dup2(slave, i)
if slave > 2:
os.close(slave)
os.close(os.open(os.ttyname(1), os.O_RDWR))
@contextlib.contextmanager
def raising_signal(signum, exc):
"""
Within the wrapped context, the given signal will interrupt signal
calls and will raise the given exception class. The preexisting signal
handling will be reinstated on context exit.
"""
def raiser(signum, frames):
raise exc()
oldhandlr = signal.signal(signum, raiser)
try:
yield
finally:
signal.signal(signum, oldhandlr)
class TimeoutError(Exception):
pass
@contextlib.contextmanager
def timing_out_itimer(seconds):
if seconds is None:
yield
return
with raising_signal(signal.SIGALRM, TimeoutError):
oldval, oldint = signal.getitimer(signal.ITIMER_REAL)
if oldval != 0.0:
raise RuntimeError("ITIMER_REAL already in use")
signal.setitimer(signal.ITIMER_REAL, seconds)
try:
yield
finally:
signal.setitimer(signal.ITIMER_REAL, 0)
@contextlib.contextmanager
def timing_out_alarm(seconds):
if seconds is None:
yield
return
with raising_signal(signal.SIGALRM, TimeoutError):
oldval = signal.alarm(int(math.ceil(seconds)))
if oldval != 0:
signal.alarm(oldval)
raise RuntimeError("SIGALRM already in use")
try:
yield
finally:
signal.alarm(0)
if is_win():
try:
import eventlet
except ImportError as e:
sys.exit("evenlet library required to run cqlshlib tests on Windows")
def timing_out(seconds):
return eventlet.Timeout(seconds, TimeoutError)
else:
# setitimer is new in 2.6, but it's still worth supporting, for potentially
# faster tests because of sub-second resolution on timeouts.
if hasattr(signal, 'setitimer'):
timing_out = timing_out_itimer
else:
timing_out = timing_out_alarm
def noop(*a):
pass
class ProcRunner:
def __init__(self, path, tty=True, env=None, args=()):
self.exe_path = path
self.args = args
self.tty = bool(tty)
self.realtty = self.tty and not is_win()
if env is None:
env = {}
self.env = env
self.readbuf = ''
self.start_proc()
def start_proc(self):
preexec = noop
stdin = stdout = stderr = None
cqlshlog.info("Spawning %r subprocess with args: %r and env: %r"
% (self.exe_path, self.args, self.env))
if self.realtty:
masterfd, slavefd = pty.openpty()
preexec = (lambda: set_controlling_pty(masterfd, slavefd))
self.proc = subprocess.Popen((self.exe_path,) + tuple(self.args),
env=self.env, preexec_fn=preexec,
stdin=stdin, stdout=stdout, stderr=stderr,
close_fds=False)
os.close(slavefd)
self.childpty = masterfd
self.send = self.send_tty
self.read = self.read_tty
else:
stdin = stdout = subprocess.PIPE
stderr = subprocess.STDOUT
self.proc = subprocess.Popen((self.exe_path,) + tuple(self.args),
env=self.env, stdin=stdin, stdout=stdout,
stderr=stderr, bufsize=0, close_fds=False)
self.send = self.send_pipe
if self.tty:
self.winpty = WinPty(self.proc.stdout)
self.read = self.read_winpty
else:
self.read = self.read_pipe
def close(self):
cqlshlog.info("Closing %r subprocess." % (self.exe_path,))
if self.realtty:
os.close(self.childpty)
else:
self.proc.stdin.close()
cqlshlog.debug("Waiting for exit")
return self.proc.wait()
def send_tty(self, data):
if not isinstance(data, bytes):
data = data.encode("utf-8")
os.write(self.childpty, data)
def send_pipe(self, data):
self.proc.stdin.write(data)
def read_tty(self, blksize, timeout=None):
buf = os.read(self.childpty, blksize)
if isinstance(buf, bytes):
buf = buf.decode("utf-8")
return buf
def read_pipe(self, blksize, timeout=None):
buf = self.proc.stdout.read(blksize)
if isinstance(buf, bytes):
buf = buf.decode("utf-8")
return buf
def read_winpty(self, blksize, timeout=None):
buf = self.winpty.read(blksize, timeout)
if isinstance(buf, bytes):
buf = buf.decode("utf-8")
return buf
def read_until(self, until, blksize=4096, timeout=None,
flags=0, ptty_timeout=None, replace=[]):
if not isinstance(until, Pattern):
until = re.compile(until, flags)
cqlshlog.debug("Searching for %r" % (until.pattern,))
got = self.readbuf
self.readbuf = ''
with timing_out(timeout):
while True:
val = self.read(blksize, ptty_timeout
|
)
for replace_target in replace:
if (replace_target != ''):
|
val = val.replace(replace_target, '')
cqlshlog.debug("read %r from subproc" % (val,))
if val == '':
raise EOFError("'until' pattern %r not found" % (until.pattern,))
got += val
m = until.search(got)
if m is not None:
self.readbuf = got[m.end():]
got = got[:m.end()]
return got
def read_lines(self, numlines, blksize=4096, timeout=None):
lines = []
with timing_out(timeout):
for n in range(numlines):
lines.append(self.read_until('\n', blksize=blksize))
r
|
tvhong/one
|
src/game.py
|
Python
|
gpl-3.0
| 8,378
| 0.012175
|
import pygame, random, time, copy
from Piece import Piece
from pygame.locals import *
from gameconstants import *
# block direction constants
CMD_ROTATE_R, CMD_ROTATE_L, CMD_MOVE_R, CMD_MOVE_L = range(4)
OCCUPIED_S = '1'
OCCUPIED_F = '0'
BLANK = ' '
PENDING_MAX = 50 # max number of elements in pendings
PENDING_MIN = 4 # min number of elements in pendings before renewing
COL_STATIC = 1
COL_FALLING = 2
COL_NONE = 0
REVERSE_CMD = {CMD_ROTATE_R:CMD_ROTATE_L,
CMD_ROTATE_L:CMD_ROTATE_R,
CMD_MOVE_R :CMD_MOVE_L,
CMD_MOVE_L :CMD_MOVE_R}
logF = open('gamelog.txt','w')
def init():
global board, pendings, fallingPieces, staticPieces, softDroping
global currentPiece,nextPiece
global level, fallingTime, nextLevelScore, score
global delaying, lastDrop
board = [[BLANK]*BOARDCOLS for i in range(BOARDROWS)]
pendings = [(random.randrange(TYPES), random.randrange(4)) \
for i in range(PENDING_MAX)]
fallingPieces = []
staticPieces = []
nextPiece = None
currentPiece = None
delaying = False
lastDrop = 0
level = 1
fallingTime = _getFallingTime(level)
nextLevelScore = _getNextLvlScore(level)
score = 0
softDroping = False
update.oldTime = int(time.time() * 1000)
def update():
global fallingTime, score, nextLevelScore, fallingPieces
global currentPiece
global delaying,lastDrop
newTime = time.time()
lines = []
# time to move down
if (newTime - lastDrop)*1000 > fallingTime:
#print 'updating !!!!'
lastDrop = newTime
if currentPiece != None:
_moveDown(currentPiece)
# check if any line is eaten
lines = _removeEatenLines()
# print lines;
if len(lines) != 0:
delaying = True;
score += _calculateScore(lines)
if score >= nextLevelScore:
levelUp()
elif delaying:
hardDrop();
delaying = False
elif currentPiece == None:
#print 'making a new piece !!!! so fun!!!'
currentPiece = _getNextPiece()
_addToBoard(currentPiece)
fallingPieces.append(currentPiece)
logF.write(_getStrBoard())
return lines
def levelUp():
global level, fallingTime, nextLevelScore
level += 1
fallingTime = _getFallingTime(level)
nextLevelScore = _getNextLvlScore(level)
def getPieces():
return fallingPieces + staticPieces
def getNextPiece ():
global nextPiece
return nextPiece
def rotateRight():
_movePiece(CMD_ROTATE_R)
def rotateLeft():
_movePiece(CMD_ROTATE_L)
def moveRight():
_movePiece(CMD_MOVE_R)
def moveLeft():
_movePiece(CMD_MOVE_L)
def softDrop():
global fallingTime, softDroping
if not softDroping:
softDroping = True
fallingTime /= 3
def stopSoftDrop():
global fallingTime, softDroping
if softDroping:
softDroping = False
fallingTime = _getFallingTime(level)
def hardDrop():
global fallingPieces,lastDrop
while (len(fallingPieces) > 0):
for piece in fallingPieces:
_moveDown(piece)
lastDrop = time.time()
def checkGameEnd():
for x in range(BOARDCOLS):
if board[PATTERNSIZE-1][x] == OCCUPIED_S:
return True
return False
def close():
logF.close()
def getProjection():
global board, currentPiece
projectPiece = None
if currentPiece != None:
projectPiece = copy.copy(currentPiece)
col = _checkCollision(projectPiece)
while col != COL_STATIC:
projectPiece.moveDown()
col = _checkCollision(projectPiece)
projectPiece.moveUp()
return projectPiece
########################################################################
### Game helper functions
########################################################################
def _getFallin
|
gTime(level):
return 540 - level * 40; # TODO: need a better function
# 500, 460, 420, 380, 340 ...
def _getNextLvlScore(level):
return level*1000; # TODO: need a better function
def _removeEatenLines():
'''only check the static pieces'''
global board, staticPieces
eatenLines = []
for y in range(BOARDROWS):
eaten = True
for x in range(BOARDCOLS):
if board[y][x] != OCCUPIED_S: eaten
|
= False
if eaten:
eatenLines.append(y)
# clear the row in board
for x in range(BOARDCOLS): board[y][x] = BLANK
# clear the row in staticPieces
for p in staticPieces[:]:
ptop, pbot = p.split(y)
if pbot != p:
staticPieces.remove(p)
if ptop != None:
assert len(ptop.boxes)>0
fallingPieces.append(ptop)
_addToBoard(ptop,OCCUPIED_F)
if pbot != None:
assert len(pbot.boxes)>0
staticPieces.append(pbot)
_addToBoard(pbot,OCCUPIED_S)
return eatenLines
def _calculateScore(eatenLines):
global level
n = len(eatenLines);
baseScore = 100
if n == 2: baseScore = 300
elif n == 3: baseScore = 500
elif n == 4: baseScore = 800
#TODO: consider combo?
return n * baseScore * level
def _checkCollision(piece):
'''return true if collide'''
#print 'checking collision!!!'
global board
assert piece != None
for x, y in piece.boxes:
if x>=BOARDCOLS or x<0 or y>=BOARDROWS or board[y][x] == OCCUPIED_S:
return COL_STATIC
for x, y in piece.boxes:
if board[y][x] == OCCUPIED_F:
return COL_FALLING
return COL_NONE
def _movePiece(command):
'''not for moveDown'''
global fallingPieces,currentPiece
if currentPiece == None: return # try to prune line eating case
p = currentPiece
_removeFromBoard(p)
if command == CMD_ROTATE_R:
p.rotateRight()
elif command == CMD_ROTATE_L:
p.rotateLeft()
elif command == CMD_MOVE_R:
p.moveRight()
elif command == CMD_MOVE_L:
p.moveLeft()
# reverse if the command is not possible
if _checkCollision(p) == True:
if command == CMD_ROTATE_L:
p.rotateRight()
elif command == CMD_ROTATE_R:
p.rotateLeft()
elif command == CMD_MOVE_L:
p.moveRight()
elif command == CMD_MOVE_R:
p.moveLeft()
_addToBoard(p)
def _getNextPiece ():
global nextPiece
if nextPiece == None:
nextPiece = _generateNewPiece()
newPiece = nextPiece
nextPiece = _generateNewPiece()
return newPiece
def _generateNewPiece():
global pendings
# refill if needed
if (len(pendings) < PENDING_MIN):
pendings = pendings + [(random.randrange(TYPES),random.randrange(4)) \
for i in range(PENDING_MAX - PENDING_MIN)]
pending = pendings.pop(0);
#print 'im the real new piece here! u imposters!'
return Piece(pending[0], (BOARDCOLS - PATTERNSIZE)/2, 0, pending[1])
'''
def _cmp(piece1, piece2):
# TODO: error here
y1 = piece1.boxes[len(piece1.boxes)-1][1] # get the lowest y
y2 = piece2.boxes[len(piece2.boxes)-1][1]
if (y1 > y2):
return 1
if (y1 < y2):
return -1
return 0
'''
def _moveDown (piece):
global board, fallingPieces, staticPieces, currentPiece
assert piece != None
_removeFromBoard(piece)
piece.moveDown()
col = _checkCollision(piece)
if col==COL_STATIC:
piece.moveUp()
fallingPieces.remove(piece)
staticPieces.append(piece)
_addToBoard(piece,OCCUPIED_S)
if piece == currentPiece:
currentPiece = None
else:
if col==COL_FALLING:
piece.moveUp()
_addToBoard(piece,OCCUPIED_F)
def _getStrBoard():
s = '\n---+---+---\n'
for y in range(BOARDROWS):
for x in range(BOARDCOLS):
s += str(board[y][x])
s += '\n'
|
zr40/scc
|
debugger/cli.py
|
Python
|
mit
| 6,986
| 0.031492
|
import cmd
import json
try:
import readline
except ImportError:
pass
from lib.asmdecoder import AsmDecoder
from lib.uploadprogram import uploadProgram
def run(hardware):
cli = CommandLineInterface(hardware)
cli.printStatus()
cli.cmdloop()
class CommandLineInterface(cmd.Cmd):
def __init__(self, hardware, *args, **kwargs):
cmd.Cmd.__init__(self, *args, **kwargs)
self.hardware = hardware
self.asmdecoder = AsmDecoder(hardware)
self.running = False
self.codeAddressAliases = []
self.codeAddressAliasesDict = {}
self.memoryAddresses = {}
def printStatus(self):
reg = self.hardware.getRegisters()
pc = reg[12] << 8 | reg[13]
print
print ' A: %02X B: %02X SP: %02X' % (reg[0], reg[1], reg[18]<< 8 | reg[19])
print ' R0: %02X R2: %02X R4: %02X R6: %02X' % (reg[2], reg[4], reg[6], reg[8])
print ' R1: %02X R3: %02X R5: %02X R7: %02X' % (reg[3], reg[5], reg[7], reg[9])
print 'PSW: %02X (%s) DPTR: %04X' % (reg[14], self.parsePSW(reg[14]), reg[10] << 8 | reg[11])
print 'Unknown: %02X %02X %02X %02X' % (reg[15], reg[16], reg[17], reg[20])
self.asmdecoder.markVisited(pc)
print
print 'PC = %04X (%s)' % (pc, self.asmdecoder.getStringForAddress(pc))
address, mnemonic = self.asmdecoder.getMnemonic(pc)
self.printInstruction(address, mnemonic, pc, showAlias=False)
def parsePSW(self, psw):
if psw & 0b10000000:
cy = 'C'
else:
cy = '-'
if psw & 0b01000000:
ac = 'A'
else:
ac = '-'
if psw & 0b00100000:
f0 = '*'
else:
f0 = '-'
rs = (psw & 0b00011000) >> 3
if psw & 0b00000100:
ov = 'O'
else:
ov = '-'
if psw & 0b00000010:
f1 = '*'
else:
f1 = '-'
if psw & 0b00000001:
p = 'P'
else:
p = '-'
return '%s%s%s%s%s%s%s' % (cy, ac, f0, rs, ov, f1, p)
def printInstruction(self, address, mnemonic, pc, showAlias=True):
joined = ' '.join((mnemonic[0], ', '.join(mnemonic[1:])))
if address == pc:
marker = '-->'
else:
marker = ' '
if showAlias and address in self.codeAddressAliasesDict:
print ' (%s)' % self.codeAddressAliasesDict[address]
print '%s %04X: %s' % (marker, address, joined)
def do_list(self, line):
'''Shows the previous, current and next instructions located around the specified
address. (Default: program counter)'''
if self.running:
print 'Program is running. Stop execution to issue commands.'
return
instructions = []
pc = self.hardware.getPC()
if line:
target = int(line, 16)
else:
target = self.hardware.getPC()
address = target - 1
for i in xrange(5):
address, mnemonic = self.asmdecoder.getMnemonic(address, direction=-1)
instructions.insert(0, (address, mnemonic))
address -= 1
address = target
for i in xrange(6):
address, mnemonic = self.asmdecoder.getMnemonic(address)
instructions.append((address, mnemonic))
address += 1
for address, mnemonic in instructions:
self.printInstruction(address, mnemonic, pc)
def do_show(self, line):
'''Shows contents of a variable.
Syntax: show <variable>'''
if self.running:
print 'Program is running. Stop execution to issue commands.'
return
if line not in self.memoryAddresses:
print "Variable '%s' not found." % line
return
address = self.memoryAddresses[line]
if address[0] == 'internal':
mem = self.hardware.readDirect(address[1], 0x01)
elif address[0] == 'external':
mem = self.hardware.readExternal(address[1], 0x01)
print '%04X %02X' % (address[1], mem[0])
def do_mem(self, line):
'''Shows memory contents.
Syntax: mem <type> <address>
type can be one of: direct indirect external code (may be abbreviated)
mem shows a block of size 0x20 containing the specified address.'''
if self.running:
print 'Program is running. Stop execution to issue commands.'
return
parts = [part for part in line.split(' ') if part]
if len(parts) != 2 or parts[0][0] not in ('d', 'i', 'e', 'c'):
print 'syntax: mem <type> <address>'
print 'type can be one of: direct indirect external code (may be abbreviated)'
address = int(parts[1], 16) & 0xffe0
if parts[0][0] == 'd':
mem = self.hardware.readDirect(address, 0x20)
elif parts[0][0] == 'i':
mem = self.hardware.readIndirect(address, 0x20)
elif parts[0][0] == 'e':
mem = self.hardware.readExternal(address, 0x20)
elif parts[0][0] == 'c':
mem = self.hardware.readCode(address, 0x20)
print ('%04X ' + ' %02X' * 8) % ((address, ) + tuple(mem[0:8]))
print ('%04X ' + ' %02X' * 8) % ((address + 8, ) + tuple(mem[8:16]))
print ('%04X ' + ' %02X' * 8) % ((address + 16, ) + tuple(mem[16:24]))
print ('%04X ' + ' %02X' * 8) % ((address + 24, ) + tuple(mem[24:32]))
def do_step(self, line):
'Executes the specified number of instructions. (Default: 1)'
if self.running:
print 'Program is running. Stop execution to issue commands.'
return
steps = 1
if line:
steps = int(line)
while steps:
self.hardware.step()
steps -= 1
self.printStatus()
def do_load(self, line):
'''Uploads a program to the hardware.
Syntax: load <path-to-hexfile>'''
if self.running:
print 'Program is running. Stop execution to issue commands.'
return
if line[-4:] == '.hex':
line = line[:-4]
try:
with open(line + '.hex') as inputFile:
prog = inputFile.read()
except IOError, e:
print "Error reading '%s.hex': %s" % (line, e)
return
uploadProgram(prog, self.hardware)
self.do_loadDebug(line)
self.asmdecoder.invalidateCache()
self.asmdecoder.markVisited(self.hardware.getPC())
self.printStatus()
def do_loaddebug(self, line):
'''Loads debug information for a program.
Syntax: loaddebug <path-to-scdebugfile>'''
debugData = {'codeAddressAliases': None, 'memoryAddresses': None}
try:
with open(line + '.scdebug') as inputFile:
debugData = json.load(inputFile)
except IOError, e:
print "Error reading '%s.scdebug': %s" % (line, e)
return
if line[-8:] == '.scdebug':
line = line[:-8]
self.codeAddressAliases = debugData['codeAddressAliases']
self.asmdecoder.addressAliases = self.codeAddressAliases
self.codeAddressAliasesDict = dict(self.codeAddressAliases)
self.memoryAddresses = debugData['memoryAddresses']
def do_run(self, line):
'''Resumes execution of the program.
go disables all commands and enables stop.'''
if self.running:
print 'Program is running. Stop execution to is
|
sue commands.'
return
self.running = True
self.hardware.run()
def do_stop(self, line):
if not self.running:
print "Can't stop. Program is not running."
return
self.hardware.stop()
self.running
|
= False
self.printStatus()
def do_reset(self, line):
if self.running:
print 'Program is running. Stop execution to issue commands.'
return
self.hardware.reset()
self.printStatus()
def do_exit(self, line):
'Quits the debugger.'
return True
def emptyline(self):
pass
def do_EOF(self, line):
print
return self.do_exit(line)
# def postcmd
|
UtrechtUniversity/yoda-ansible
|
library/irods_moduser.py
|
Python
|
gpl-3.0
| 1,822
| 0.001098
|
#!/usr/bin/python
# Copyright (c) 2017-2018 Utrecht University
# GNU General Public License v3.0
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'supported_by': 'community',
'status': ['preview']
}
from ansible.module_utils.basic import *
IRODSCLIENT_AVAILABLE = False
try:
from irods.session import iRODSSession
from irods.models import User
from irods.exception import UserDoesNotExist, iRODSException
except ImportError:
pass
else:
IRODSCLIENT_AVAILABLE = True
def get_session():
env_file = os.path.expanduser('~/.irods/irods_environment.json')
with open(env_file) as data_file:
ienv = json.load(data_file)
return (iRODSSession(irods_env_file=env_file), ienv)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(default=None, required=True),
option=dict(default=None, required=True),
value=dict(default=None, required=True)
),
supports_check_mode=True)
name = module.params["name"]
option = module.params["option"]
value = module.params["value"]
if IRODSCLIENT_AVAILABLE:
try:
session, ienv = get_session()
except iRODSException:
module.fail_json(
msg="Could not establish irods connection. Please check ~/.irods/irods_environment.json"
)
else:
module.fail_json(msg="python-irodsclient needs to be installed")
changed = False
try:
if not module.check_mode:
|
resource = session.users.modify(name, option, value)
except User
|
DoesNotExist:
module.fail_json(msg="User does not exist.")
else:
changed = True
module.exit_json(
changed=changed,
irods_environment=ienv)
if __name__ == '__main__':
main()
|
maxcutler/wp-xmlrpc-rest-wrapper
|
wp-rest.py
|
Python
|
mit
| 13,839
| 0.002457
|
from datetime import timedelta
from flask import Flask, json, helpers, request
from flask.views import MethodView
from wordpress_xmlrpc import Client
from wordpress_xmlrpc import methods as wp_methods
from wordpress_xmlrpc.methods import taxonomies as wp_taxonomies
app = Flask(__name__)
wp = Client('http://localhost/wptrunk/src/xmlrpc.php', 'maxcutler', 'maxcutler')
blog_time_zone = wp.call(wp_methods.options.GetOptions(['time_zone']))[0].value
tz_delta = timedelta(seconds = int(blog_time_zone) * 3600)
default_page_size = 10
def route_to_abs(route):
return request.url_root + route[1:]
@app.route('/')
def api_route():
response = {}
option_map = {
'blog_title': 'name',
'blog_tagline': 'description',
'home_url': 'URL',
}
options = wp.call(wp_methods.options.GetOptions(option_map.keys()))
for option in options:
key = option_map.get(option.name, None)
if key:
response[key] = option.value
resources = {}
post_types = wp.call(wp_methods.posts.GetPostTypes())
for name, post_type in post_types.items():
if name == 'attachment':
continue
endpoint_params = {}
if (name != 'post'):
endpoint_params['post_type'] = name
endpoint = route_to_abs(helpers.url_for(PostCollectionApi.name, **endpoint_params))
resources[name] = {
'versions': {
'v1': endpoint,
'latest': endpoint
},
'supports': ['GET', 'POST', 'DELETE'],
}
extra_resources = [
(UserApi.name, UserCollectionApi.name),
(FileApi.name, FileCollectionApi.name),
(ImageApi.name, ImageCollectionApi.name),
(VideoApi.name, VideoCollectionApi.name),
(AudioApi.name, AudioCollectionApi.name),
(TaxonomyApi.name, TaxonomyCollectionApi.name)
]
for singular, plural in extra_resources:
endpoint = route_to_abs(helpers.url_for(plural))
resources[singular] = {
'versions': {
'v1': endpoint,
'latest': endpoint
},
'supports': ['GET', 'POST', 'DELETE']
}
response['resources'] = resources
return json.jsonify(response)
class PostApi(MethodView):
name = 'post'
media_type = 'application/vnd.wordpress.post.v1'
@staticmethod
def from_xmlrpc_custom_field(field):
return {
'id': field['id'],
'key': field['key'],
'value': field['value'],
'_meta': {
'links': {
'self': '',
},
'supports': ['GET', 'PUT', 'DELETE']
}
}
@staticmethod
def from_xmlrpc(obj):
author = None
if (obj.user):
author = UserApi.from_xmlrpc(wp.call(wp_methods.users.GetUser(obj.user)))
return {
'id': obj.id,
'title': obj.title,
'status': obj.post_status,
'type': obj.post_type,
'link': obj.link,
'date': (obj.date + tz_delta).isoformat(),
'modified': (obj.date_modified + tz_delta).isoformat(),
'format': obj.post_format,
'slug': obj.slug,
'guid': obj.guid,
'excerpt': {
'raw': obj.excerpt
},
'content': {
'raw': obj.content
},
'author': author,
'comment_status': obj.comment_status,
'ping_status': obj.ping_status,
'sticky': obj.sticky,
'date_gmt': obj.date.isoformat(),
'modified_gmt': obj.date_modified.isoformat(),
'terms': map(TaxonomyTermApi.from_xmlrpc, obj.terms),
'metadata': map(PostApi.from_xmlrpc_custom_field, obj.custom_fields),
'_meta': {
'links': {
'self': route_to_abs(helpers.url_for(PostApi.name, id=obj.id)),
'comments': route_to_abs(helpers.url_for(CommentCollectionApi.name, parent_id=obj.id))
},
'supports': ['GET', 'PUT', 'DELETE'],
'media_type': PostApi.media_type
}
}
def get(self, id):
post = wp.call(wp_methods.posts.GetPost(id))
return json.jsonify(PostApi.from_xmlrpc(post))
class PostCollectionApi(MethodView):
name = 'posts'
def get(self):
page = int(request.values.get('page', 1))
post_type = request.values.get('post_type', 'post')
posts = wp.call(wp_methods.posts.GetPosts({
'number': default_page_size,
'offset': (page - 1) * default_page_size,
'post_type': post_type
}))
response = {}
response['items'] = map(PostApi.from_xmlrpc, posts)
meta = {
'supports': ['GET', 'POST']
}
links = {}
paging_params = {}
if (post_type != 'post'):
paging_params['post_type'] = post_type
if len(posts) == default_page_size:
links['next'] = route_to_abs(helpers.url_for(PostCollectionApi.name, page=page+1, **paging_params))
if page > 1:
params = {}
if (page > 2):
params['page'] = page + 1
links['prev'] = route_to_abs(helpers.url_for(PostCollectionApi.name, **dict(paging_params, **params)))
meta['links'] = links
response['_meta'] = meta
return json.jsonify(response)
class CommentApi(MethodView):
name = 'comment'
media_type = 'application/vnd.wordpress.comment.v1'
@staticmethod
def from_xmlrpc(obj):
return {
'_meta': {
'media_type': CommentApi.media_type,
'supports': ['GET', 'PUT', 'DELETE'],
'links': {
'self': route_to_abs(helpers.url_for(CommentApi.name, parent_id=obj.post, id=obj.id))
}
},
'id': obj.id,
'date': obj.date_created.isoformat(),
'status': obj.status,
'content': obj.content,
'link': obj.link,
'author': obj.author,
'author_url': obj.author_url,
'author_email': obj.author_email,
'author_ip': obj.author_ip
}
def get(self, parent_id, id):
comment = wp.call(wp_methods.comments.GetComment(id))
return json.jsonify(CommentApi.from_xmlrpc(comment))
class CommentCollectionApi(MethodView):
name = 'comments'
def get(self, parent_id):
response = {}
page = int(request.values.get('page', 1))
comments = wp.call(wp_methods.comments.GetComments({
'post_id': parent_id,
'number': default_page_size,
'offset': (page - 1) * default_page_size
}))
response['items'] = map(CommentApi.from_xmlrpc, comments)
response['_meta'] = {
'supports': ['GET', 'POST'],
'links': {
'self': route_to_abs(helpers.url_for(CommentCollectionApi.name, parent_id=parent_id)),
'parent': route_to_abs(helpers.url_for(PostApi.name, id=parent_id))
}
}
|
return json.jsonify(response)
class UserApi(MethodView):
name = 'user'
media_type = 'application/vnd.wordpress.user.v1'
@staticmethod
def from_xmlrpc(obj):
return {
'_meta': {
'media_type': UserApi.media_type,
'supports': ['
|
GET'],
'links': {
'self': route_to_abs(helpers.url_for(UserApi.name, id=obj.id))
}
},
'id': obj.id,
'username': obj.username,
'nickname': obj.nickname,
'description': obj.bio,
'email': obj.email,
'url': obj.url
}
def get(self, id):
user = wp.call(wp_methods.users.GetUser(id))
return json.jsonify(UserApi.from_xmlrpc(user))
class UserCollectionApi(MethodView):
name = 'users'
def get(self):
page = int(request.values.get('page', 1))
|
titienmiami/mmc.repository
|
plugin.video.tvalacarta/servers/rtpa.py
|
Python
|
gpl-2.0
| 1,033
| 0.026163
|
# -*- coding: utf-8 -*-
#--------------------------
|
----------------------------------
# pelisalacarta - XBMC Plugin
# Conector para rtpa
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#---------------------------------------------------
|
---------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools
from core import logger
from core import config
def get_video_url( page_url , premium = False , user="" , password="", video_password="", page_data="" ):
logger.info("tvalacarta.servers.rtpa get_video_url(page_url='%s')" % page_url)
data = scrapertools.cache_page(page_url)
url = scrapertools.get_match(data,"'file'\: '([^']+)'")
video_urls = []
video_urls.append( [ "(mp4) [rtpa]" , url ] )
for video_url in video_urls:
logger.info("tvalacarta.servers.rtpa %s - %s" % (video_url[0],video_url[1]))
return video_urls
# Encuentra vídeos del servidor en el texto pasado
def find_videos(data):
encontrados = set()
devuelve = []
return devuelve
|
automl/auto-sklearn
|
test/test_pipeline/components/classification/test_base.py
|
Python
|
bsd-3-clause
| 13,012
| 0.000999
|
from typing import Optional, Dict
import unittest
from autosklearn.pipeline.util import _test_classifier, \
_test_classifier_predict_proba, _test_classifier_iterative_fit
from autosklearn.pipeline.constants import SPARSE
import sklearn.metrics
import numpy as np
from test.test_pipeline.ignored_warnings import ignore_warnings, classifier_warnings
class BaseClassificationComponentTest(unittest.TestCase):
# Magic command to not run tests on base class
__test__ = False
res = None
module = None
sk_module = None
# Hyperparameter which is increased by iterative_fit
step_hyperparameter = None
def test_default_iris(self):
if self.__class__ == BaseClassificationComponentTest:
return
for i in range(2):
predictions, targets, n_calls = \
_test_classifier(dataset="iris",
classifier=self.module)
self.assertAlmostEqual(self.res["default_iris"],
sklearn.metrics.accuracy_score(targets,
predictions),
places=self.res.get(
"default_iris_places", 7))
if self.res.get("iris_n_calls"):
self.assertEqual(self.res["iris_n_calls"], n_calls)
def test_get_max_iter(self):
if self.__class__ == BaseClassificationComponentTest:
return
if not hasattr(self.module, 'iterative_fit'):
return
self.module.get_max_iter()
def test_default_iris_iterative_fit(self):
if self.__class__ == BaseClassificationComponentTest:
return
if not hasattr(self.module, 'iterative_fit'):
return
for i in range(2):
predictions, targets, classifier = \
_test_classifier_iterative_fit(dataset="iris",
classifier=self.module)
self.assertAlmostEqual(self.res["default_iris_iterative"],
sklearn.metrics.accuracy_score(targets,
predictions),
places=self.res.get(
"default_iris_iterative_places", 7))
if self.step_hyperparameter is not None:
self.assertEqual(
getattr(classifier.estimator, self.step_hyperparameter['name']),
self.res.get("iris_iterative_n_iter", self.step_hyperparameter['value'])
)
def test_default_iris_predict_proba(self):
if self.__class__ == BaseClassificationComponentTest:
return
for _ in range(2):
predictions, targets = _test_classifier_predict_proba(
dataset="iris", classifier=self.module
)
self.assertAlmostEqual(
self.res["default_iris_proba"],
skl
|
earn.metrics.log_loss(targets, predictions),
places=self.res.get("default_iris_proba_places", 7)
)
def test_default_iris_sparse(self):
if self.__class__ == BaseClassif
|
icationComponentTest:
return
if SPARSE not in self.module.get_properties()["input"]:
return
for i in range(2):
predictions, targets, _ = \
_test_classifier(dataset="iris",
classifier=self.module,
sparse=True)
self.assertAlmostEqual(self.res["default_iris_sparse"],
sklearn.metrics.accuracy_score(targets,
predictions),
places=self.res.get(
"default_iris_sparse_places", 7))
def test_default_digits_binary(self):
if self.__class__ == BaseClassificationComponentTest:
return
for i in range(2):
predictions, targets, _ = \
_test_classifier(classifier=self.module,
dataset='digits', sparse=False,
make_binary=True)
self.assertAlmostEqual(self.res["default_digits_binary"],
sklearn.metrics.accuracy_score(
targets, predictions),
places=self.res.get(
"default_digits_binary_places", 7))
def test_default_digits(self):
if self.__class__ == BaseClassificationComponentTest:
return
for i in range(2):
predictions, targets, n_calls = \
_test_classifier(dataset="digits",
classifier=self.module)
self.assertAlmostEqual(self.res["default_digits"],
sklearn.metrics.accuracy_score(targets,
predictions),
places=self.res.get(
"default_digits_places", 7))
if self.res.get("digits_n_calls"):
self.assertEqual(self.res["digits_n_calls"], n_calls)
def test_default_digits_iterative_fit(self):
if self.__class__ == BaseClassificationComponentTest:
return
if not hasattr(self.module, 'iterative_fit'):
return
for i in range(2):
predictions, targets, classifier = \
_test_classifier_iterative_fit(dataset="digits",
classifier=self.module)
self.assertAlmostEqual(self.res["default_digits_iterative"],
sklearn.metrics.accuracy_score(targets,
predictions),
places=self.res.get(
"default_digits_iterative_places", 7))
if self.step_hyperparameter is not None:
self.assertEqual(
getattr(classifier.estimator, self.step_hyperparameter['name']),
self.res.get("digits_iterative_n_iter", self.step_hyperparameter['value'])
)
def test_default_digits_multilabel(self):
if self.__class__ == BaseClassificationComponentTest:
return
if not self.module.get_properties()["handles_multilabel"]:
return
for _ in range(2):
predictions, targets, _ = _test_classifier(
classifier=self.module, dataset='digits', make_multilabel=True
)
score = sklearn.metrics.precision_score(
targets, predictions, average='macro', zero_division=0
)
self.assertAlmostEqual(
self.res["default_digits_multilabel"], score,
places=self.res.get("default_digits_multilabel_places", 7)
)
def test_default_digits_multilabel_predict_proba(self):
if self.__class__ == BaseClassificationComponentTest:
return
if not self.module.get_properties()["handles_multilabel"]:
return
for i in range(2):
predictions, targets = \
_test_classifier_predict_proba(classifier=self.module,
make_multilabel=True)
self.assertEqual(predictions.shape, ((50, 3)))
self.assertAlmostEqual(self.res["default_digits_multilabel_proba"],
sklearn.metrics.roc_auc_score(
targets, predictions, average='macro'),
places=self.res.get(
"default_digits_multilabel_proba_places", 7))
def test_target_algorithm_multioutput_multiclass_support(self):
if self
|
GluuFederation/community-edition-setup
|
static/casa/scripts/casa-external_twilio_sms.py
|
Python
|
mit
| 6,740
| 0.004748
|
# This is a modified version of original twilio_sms Gluu's script to work with Casa
from java.util import Arrays
from javax.faces.application import FacesMessage
from org.gluu.jsf2.message import FacesMessages
from org.gluu.oxauth.security import Identity
from org.gluu.oxauth.service import UserService, AuthenticationService
from org.gluu.oxauth.util import ServerUtil
from org.gluu.model.custom.script.type.auth import PersonAuthenticationType
from org.gluu.service.cdi.util import CdiUtil
from org.gluu.util import StringHelper, ArrayHelper
from com.google.common.base import Joiner
from com.twilio import Twilio
import com.twilio.rest.api.v2010.account.Message as TwMessage
from com.twilio.type import PhoneNumber
import random
import sys
class PersonAuthentication(PersonAuthenticationType):
def __init__(self, currentTimeMillis):
self.currentTimeMillis = currentTimeMillis
def init(self, customScript, configurationAttributes):
print "Twilio SMS. Initialized"
return True
def destroy(self, configurationAttributes):
print "Twilio SMS. Destroyed successfully"
return True
def getApiVersion(self):
return 11
def getAuthenticationMethodClaims(self, configurationAttributes):
return None
def isValidAuthenticationMethod(self, usageType, configurationAttributes):
return True
def getAlternativeAuthenticationMethod(self, usageType, configurationAttributes):
return None
def authenticate(self, configurationAttributes, requestParameters, step):
print "TwilioSMS. Authenticate for Step %s" % str(step)
identity = CdiUtil.bean(Identity)
authenticationService = CdiUtil.bean(AuthenticationService)
user = authenticationService.getAuthenticatedUser()
if step == 1:
if user == None:
credentials = identity.getCredentials()
user_name = credentials.getUsername()
user_password = credentials.getPassword()
if StringHelper.isNotEmptySt
|
ring(user_name) and String
|
Helper.isNotEmptyString(user_password):
authenticationService.authenticate(user_name, user_password)
user = authenticationService.getAuthenticatedUser()
if user == None:
return False
#Attempt to send message now if user has only one mobile number
mobiles = user.getAttributeValues("mobile")
if mobiles == None:
return False
else:
code = random.randint(100000, 999999)
identity.setWorkingParameter("randCode", code)
sid = configurationAttributes.get("twilio_sid").getValue2()
token = configurationAttributes.get("twilio_token").getValue2()
self.from_no = configurationAttributes.get("from_number").getValue2()
Twilio.init(sid, token)
if mobiles.size() == 1:
self.sendMessage(code, mobiles.get(0))
else:
chopped = ""
for numb in mobiles:
l = len(numb)
chopped += "," + numb[max(0, l-4) : l]
#converting to comma-separated list (identity does not remember lists in 3.1.3)
identity.setWorkingParameter("numbers", Joiner.on(",").join(mobiles.toArray()))
identity.setWorkingParameter("choppedNos", chopped[1:])
return True
else:
if user == None:
return False
session_attributes = identity.getSessionId().getSessionAttributes()
code = session_attributes.get("randCode")
numbers = session_attributes.get("numbers")
if step == 2 and numbers != None:
#Means the selection number page was used
idx = ServerUtil.getFirstValue(requestParameters, "OtpSmsloginForm:indexOfNumber")
if idx != None and code != None:
sendToNumber = numbers.split(",")[int(idx)]
self.sendMessage(code, sendToNumber)
return True
else:
return False
success = False
form_passcode = ServerUtil.getFirstValue(requestParameters, "OtpSmsloginForm:passcode")
if form_passcode != None and code == form_passcode:
print "TwilioSMS. authenticate. 6-digit code matches with code sent via SMS"
success = True
else:
facesMessages = CdiUtil.bean(FacesMessages)
facesMessages.setKeepMessages()
facesMessages.clear()
facesMessages.add(FacesMessage.SEVERITY_ERROR, "Wrong code entered")
return success
def prepareForStep(self, configurationAttributes, requestParameters, step):
print "TwilioSMS. Prepare for Step %s" % str(step)
return True
def getExtraParametersForStep(self, configurationAttributes, step):
if step > 1:
return Arrays.asList("randCode", "numbers", "choppedNos")
return None
def getCountAuthenticationSteps(self, configurationAttributes):
print "TwilioSMS. getCountAuthenticationSteps called"
if CdiUtil.bean(Identity).getWorkingParameter("numbers") == None:
return 2
else:
return 3
def getPageForStep(self, configurationAttributes, step):
print "TwilioSMS. getPageForStep called %s" % step
print "numbers are %s" % CdiUtil.bean(Identity).getWorkingParameter("numbers")
defPage = "/casa/otp_sms.xhtml"
if step == 2:
if CdiUtil.bean(Identity).getWorkingParameter("numbers") == None:
return defPage
else:
return "/casa/otp_sms_prompt.xhtml"
elif step == 3:
return defPage
return ""
def logout(self, configurationAttributes, requestParameters):
return True
def hasEnrollments(self, configurationAttributes, user):
return user.getAttribute("mobile") != None
def sendMessage(self, code, numb):
try:
if numb[:1] != "+":
numb = "+" + numb
print "TwilioSMS. Sending SMS message (%s) to %s" % (code, numb)
msg = "%s is your passcode to access your account" % code
message = TwMessage.creator(PhoneNumber(numb), PhoneNumber(self.from_no), msg).create()
print "TwilioSMS. Message Sid: %s" % message.getSid()
except:
print "TwilioSMS. Error sending message", sys.exc_info()[1]
|
pinntech/flask-logex
|
runner.py
|
Python
|
mit
| 198
| 0
|
"""Runner for testing ap
|
p and blueprint logging individually"""
import subprocess
from tests.samples import app
if __name__ == '__main__':
app.
|
run()
subprocess.call(['rm', '-rf', 'logs'])
|
kayaman/jararaca
|
test/test_sns_projects.py
|
Python
|
mit
| 1,358
| 0.0081
|
from default_test_case import DefaultTestCase
from sns_project import SnsProject
class TestSnsProjects(DefaultTestCase):
def test_project_with_defaults(self):
project = SnsProject.find(self.loaded_config.sns_projects, 'project_with_defaults')
self.assertEqual(project
|
.raw_message, False)
self.assertEqual(project.env, 'default_sns_env')
self.assertEqual(project.region, 'default_sns_region')
def test_project_with_custom_env(self):
project = SnsProject.find(self.loaded_config.sns_projects, 'project_with_custom_env')
self.assertEqual(project.raw_message, False)
self.assertEqual(project.env, 'custom_sns_env')
self.assertEqual(pr
|
oject.region, 'default_sns_region')
def test_project_with_custom_raw_message(self):
project = SnsProject.find(self.loaded_config.sns_projects, 'project_with_custom_raw_message')
self.assertEqual(project.raw_message, True)
self.assertEqual(project.env, 'default_sns_env')
self.assertEqual(project.region, 'default_sns_region')
def test_project_with_custom_region(self):
project = SnsProject.find(self.loaded_config.sns_projects, 'project_with_custom_region')
self.assertEqual(project.raw_message, False)
self.assertEqual(project.env, 'default_sns_env')
self.assertEqual(project.region, 'custom_sns_region')
if __name__ == '__main__':
unittest.main()
|
ctuning/ck
|
ck/repo/module/soft/module.py
|
Python
|
bsd-3-clause
| 97,841
| 0.035261
|
#
# Collective Knowledge (checking and installing software)
#
# See CK LICENSE.txt for licensing details
# See CK COPYRIGHT.txt for copyright details
#
# Developer: Grigori Fursin, Grigori.Fursin@cTuning.org, http://fursin.net
#
cfg={} # Will be updated by CK (meta description of this module)
work={} # Will be updated by CK (temporal data)
ck=None # Will be updated by CK (initialized CK kernel)
# Local settings
env_install_path='CK_TOOLS'
env_search='CK_DIRS'
##############################################################################
# Initialize module
def init(i):
"""
Input: {}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
}
"""
return {'return':0}
##############################################################################
# detect is given software is already installed and register it in the CK or install it if package exists
def detect(i):
"""
See "check" API
"""
return check(i)
##############################################################################
# detect soft (internal function - gradually outdated)
def internal_detect(i):
"""
Input: {
(host_os) - host OS (detect, if omitted)
(target_os) - target OS (detect, if omitted)
(target_device_id) - target device ID (detect, if omitted)
(data_uoa) or (uoa) - software UOA entry
or
(tags) - search UOA by tags (separated by comma)
(tool) - force this tool name
(env) - if !='', use this env string before calling compiler (to set up env)
(show) - if 'yes', show output
(force_version) - if !='', use this version
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
version_str - version as string
version_lst - version as list of strings
version_raw - raw list of strings (output of --version)
}
"""
import os
o=i.get('out','')
# Check host/target OS/CPU
hos=i.get('host_os','')
tos=i.get('target_os','')
tdid=i.get('target_device_id','')
r=ck.access({'action':'detect',
'module_uoa':cfg['module_deps']['platform.os'],
'host_os':hos,
'target_os':tos,
'target_device_id':tdid,
'skip_info_collection':'yes'})
if r['return']>0: return r
hos=r['host_os_uid']
hosx=r['host_os_uoa']
hosd=r['host_os_dict']
tos=r['os_uid']
tosx=r['os_uoa']
tosd=r['os_dict']
hplat=hosd['ck_name']
tplat=tosd['ck_name']
env=i.get('env','')
ubtr=hosd.get('use_bash_to_run','')
svarb=hosd.get('env_var_start','')
svarb1=hosd.get('env_var_extra1','')
svare=hosd.get('env_var_stop','')
svare1=hosd.get('env_var_extra2','')
sexe=hosd.get('set_executable','')
sbp=hosd.get('bin_prefix','')
envsep=hosd.get('env_separator','')
scall=hosd.get('env_call','')
sext=hosd.get('script_ext','')
# Check soft UOA
duoa=i.get('uoa','')
if duoa=='': duoa=i.get('data_uoa','')
if duoa=='':
# Search
tags=i.get('tags','')
if tags!='':
r=ck.access({'action':'search',
'module_uoa':work['self_module_uid'],
'tags':tags})
if r['return']>0: return r
l=r['lst']
if len(l)>0:
duid=l[0].get('data_uid')
duoa=duid
if duoa=='':
return {'return':1, 'error':'software entry was not found'}
# Load
r=ck.access({'action':'load',
'module_uoa':work['self_module_uid'],
'data_uoa':duoa})
if r
|
['return']>0: return r
d=r['dict']
p=r['path']
duoa=r['data_uoa']
duid=r['data_uid']
if o=='con':
x=duoa
if duid!=duoa: x+=' ('+duid+')'
ck.out('Software d
|
escription entry found: '+x)
# Check if customize script is redirected into another entry:
#
another_entry_with_customize_script=d.get('use_customize_script_from_another_entry', None)
if another_entry_with_customize_script:
r=ck.access({'action':'find',
'module_uoa': another_entry_with_customize_script.get('module_uoa', work['self_module_uid']),
'data_uoa': another_entry_with_customize_script.get('data_uoa','')
})
if r['return']>0: return r
customization_script_path = r['path']
else:
customization_script_path = p
cs=None
rx=ck.load_module_from_path({'path':customization_script_path, 'module_code_name':cfg['custom_script_name'], 'skip_init':'yes'})
if rx['return']==0:
cs=rx['code']
elif another_entry_with_customize_script or not rx['error'].startswith("can't find module code"):
return rx
# Checking name
cus=d.get('customize',{})
tool=i.get('tool','')
if tool=='':
if cus.get('soft_file_as_env','')!='':
tool=svarb+cus['soft_file_as_env']+svare
if cus.get('soft_file_not_tool','')!='yes':
ry=prepare_target_name({'host_os_dict':hosd,
'target_os_dict':tosd,
'cus':cus})
if ry['return']>0: return ry
tool=ry['tool']
# Preparing CMD
soft_version_cmd=cus.get('soft_version_cmd',{}).get(hplat,'')
if o=='con':
ck.out('')
ck.out('Prepared cmd: '+soft_version_cmd+' ...')
# Check version (via customized script) ...
ver=''
lst=[]
ii={'full_path':tool,
'bat':env,
'host_os_dict':hosd,
'target_os_dict':tosd,
'cmd':soft_version_cmd,
'use_locale':cus.get('use_locale_for_version',''),
'customize':cus,
'custom_script_obj':cs,
'data_uid': duid
}
if ck.cfg.get('minimize_soft_detect_output','')!='yes':
ii['out']=o
rx=get_version(ii)
if rx['return']==0:
ver=rx['version']
lst=rx['version_lst']
if ver=='':
return {'return':16, 'error':'version was not detected'}
# Split version
rx=split_version({'version':ver})
if rx['return']>0: return rx
sver=rx['version_split']
if i.get('show','')=='yes':
ck.out('Output:')
ck.out('')
for q in lst:
ck.out(' '+q)
if o=='con':
ck.out('')
ck.out('Version detected: '+ver)
return {'return':0, 'version_str':ver,
'version_lst':sver,
'version_raw':lst}
##############################################################################
# setup environment for a given software -
# it is a low level routine which ask you the exact path to the tool and its version
def setup(i):
"""
Input: {
(host_os) - host OS (detect, if omitted)
(target_os) - target OS (detect, if omitted)
(target_device_id) - target device ID (detect, if omitted)
(data_uoa) or (uoa) - soft configuration UOA
or
(tags) - search UOA by tags (separated by comma)
(soft_name) - use this user friendly name for environment entry
(soft_add_name) - add extra name to above name (such as anaconda)
(customize) - dict with custom parameters
(usually passed to customize script)
skip_add_dirs
skip_add_to_path
skip_add_to_bin
skip_add_to_ld_path
add_include_path
skip_path - skiping installation path (for
|
prasanna08/oppia-ml
|
core/classifiers/TextClassifier/text_classifier_test.py
|
Python
|
apache-2.0
| 2,185
| 0
|
# coding: utf-8
#
# Copyright 2017 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for text classifier."""
import json
import os
from core.classifiers.TextClassifier import TextClassifier
from core.tests import test_utils
import vmconf
def _load_training_data():
file_path = os.path.join(vmconf.DATASETS_DIR,
'string_classifier_data
|
.json')
with open(file_path, 'r') as f:
training_data = json.loads(f.read())
return training_data
class TextClassifierTests(test_utils.GenericTestBase):
"""Tests for text classifier."""
def setUp(self):
super(TextClassifierTests, self).setUp()
self.clf = TextClassifier.TextClassifier()
self.training_data = _load_training_data()
def test_that_text_classifier_works(self):
|
"""Test that entire classifier is working end-to-end."""
self.clf.train(self.training_data)
classifier_data = self.clf.to_dict()
self.clf.validate(classifier_data)
def test_text_classifier_performance(self):
"""Test the performance of the text classifier.
This method measures and tests the run-time and the f1 score of the
classifier. The run-time should be less than 1 second and the f1 score
should be greater than 0.85 for the test to pass.
"""
self.clf.train(self.training_data)
# The weighted f1 score for the test dataset should be at least 0.85.
self.assertGreaterEqual(self.clf.best_score, 0.85)
# The training phase for the test dataset should take less than 2 sec.
self.assertLessEqual(self.clf.exec_time, 2)
|
VShangxiao/tornado
|
tornado/auth.py
|
Python
|
apache-2.0
| 46,438
| 0.000538
|
#!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""This module contains implementations of various third-party
authentication schemes.
All the classes in this file are class mixins designed to be used with
the `tornado.web.RequestHandler` class. They are used in two ways:
* On a login handler, use methods such as ``authenticate_redirect()``,
``authorize_redirect()``, and ``get_authenticated_user()`` to
establish the user's identity and store authentication tokens to your
database and/or cookies.
* In non-login handlers, use methods such as ``facebook_request()``
or ``twitter_request()`` to use the authentication tokens to make
requests to the respective services.
They all take slightly different arguments due to the fact all these
services implement authentication and authorization slightly differently.
See the individual service classes below for complete documentation.
Example usage for Google OAuth:
.. testcode::
class GoogleOAuth2LoginHandler(tornado.web.RequestHandler,
tornado.auth.GoogleOAuth2Mixin):
@tornado.gen.coroutine
def get(self):
if self.get_argument('code', False):
user = yield self.get_authenticated_user(
redirect_uri='http://your.site.com/auth/google',
code=self.get_argument('code'))
# Save the user with e.g. set_secure_cookie
else:
yield self.authorize_redirect(
redirect_uri='http://your.site.com/auth/google',
client_id=self.settings['google_oauth']['key'],
scope=['profile', 'email'],
response_type='code',
extra_params={'approval_prompt': 'auto'})
.. testoutput::
:hide:
.. versionchanged:: 4.0
All of the callback interfaces in this module are now guaranteed
to run their callback with an argument of ``None`` on error.
Previously some functions would do this while others would simply
terminate the request on their own. This change also ensures that
errors are more consistently reported through the ``Future`` interfaces.
"""
from __future__ import absolute_import, division, print_function, with_statement
import base64
import binascii
import functools
import hashlib
import hmac
import time
import uuid
from tornado.concurrent import TracebackFuture, return_future, chain_future
from tornado import gen
from tornado import httpclient
from tornado import escape
from tornado.httputil import url_concat
from tornado.log import gen_log
from tornado.stack_context import ExceptionStackContext
from tornado.util import u, unicode_type, ArgReplacer
try:
import urlparse # py2
except ImportError:
import urllib.parse as urlparse # py3
try:
import urllib.parse as urllib_parse # py3
except ImportError:
import urllib as urllib_parse # py2
try:
long # py2
except NameError:
long = int # py3
class AuthError(Exception):
pass
def _auth_future_to_callback(callback, future):
try:
result = future.result()
except AuthError as e:
gen_log.warning(str(e))
result = None
callback(result)
def _auth_return_future(f):
"""Similar to tornado.concurrent.return_future, but uses the auth
module's legacy callback interface.
Note that when using this decorator the ``callback`` parameter
inside the function will actually be a future.
"""
replacer = ArgReplacer(f, 'callback')
@functools.wraps(f)
def wrapper(*args, **kwargs):
future = TracebackFuture()
callback, args, kwargs = replacer.replace(future, args, kwargs)
if callback is not None:
future.add_done_callback(
functools.partial(_auth_future_to_callback, callback))
def handle_exception(typ, value, tb):
if future.done():
return False
else:
future.set_exc_info((typ, value, tb))
return True
with ExceptionStackContext(handle_exception):
f(*args, **kwargs)
return future
return wrapper
class OpenIdMixin(object):
"""Abstract implementation of OpenID and Attribute Exchange.
Class attributes:
* ``_OPENID_ENDPOINT``: the identity provider's URI.
"""
@return_future
def authenticate_redirect(self, callback_uri=None,
ax_attrs=["name", "email", "language", "username"],
callback=None):
"""Redirects to the authentication URL for this service.
After authentication, the service will redirect back to the given
callback URI with additional parameters including ``openid.mode``.
We request the given attributes for the authenticated user by
default (name, email, language, and username). If you don't need
all those attributes for your app, you can request fewer with
the ax_attrs keyword argument.
.. versionchanged:: 3.1
Returns a `.Future` and takes an optional callback. These are
not strictly necessary as this method is synchronous,
but they are supplied for consistency with
`OAuthMixin.authorize_redirect`.
"""
callback_uri = callback_uri or self.request.uri
args = self._openid_args(callback_uri, ax_attrs=ax_attrs)
self.redirect(self._OPENID_ENDPOINT + "?" + urllib_parse.urlencode(args))
callback()
@_auth_return_future
def get_authenticated_user(self, callback, http_client=None):
"""Fetches the authenticated user data upon redirect.
This method should be called by the handler that receives the
redirect from the `authenticate_redirect()` method (which is
often the same as the one that calls it; in that case you would
call `get_authenticated_user` if the ``openid.mode`` parameter
is present and `authenticate_redirect` if it is not).
The result of this method will generally be used to set a cookie.
"""
# Verify the OpenID response via direct
|
request to the OP
args = dict((k, v[-1]) for k, v in self.request.arguments.ite
|
ms())
args["openid.mode"] = u("check_authentication")
url = self._OPENID_ENDPOINT
if http_client is None:
http_client = self.get_auth_http_client()
http_client.fetch(url, functools.partial(
self._on_authentication_verified, callback),
method="POST", body=urllib_parse.urlencode(args))
def _openid_args(self, callback_uri, ax_attrs=[], oauth_scope=None):
url = urlparse.urljoin(self.request.full_url(), callback_uri)
args = {
"openid.ns": "http://specs.openid.net/auth/2.0",
"openid.claimed_id":
"http://specs.openid.net/auth/2.0/identifier_select",
"openid.identity":
"http://specs.openid.net/auth/2.0/identifier_select",
"openid.return_to": url,
"openid.realm": urlparse.urljoin(url, '/'),
"openid.mode": "checkid_setup",
}
if ax_attrs:
args.update({
"openid.ns.ax": "http://openid.net/srv/ax/1.0",
"openid.ax.mode": "fetch_request",
})
ax_attrs = set(ax_attrs)
required = []
if "name" in ax_attrs:
ax_attrs -= set(["name", "firstname", "fullname", "lastname"])
required += ["firstname", "fullname", "lastname"]
args.update({
"openid.ax
|
fake-name/ReadableWebProxy
|
WebMirror/management/rss_parser_funcs/feed_parse_extractMobileSuitZetaGundamNovelsTranslation.py
|
Python
|
bsd-3-clause
| 405
| 0.024691
|
def extract
|
MobileSuitZetaGundamNovelsTranslation(item):
"""
Parser for 'Mobile Suit Zeta Gundam Novels Translation'
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or 'preview' in item['title'].lower():
return None
if 'WATTT' in item['tags']:
return buildReleaseMessageWithType(item, 'WATTT', vol, c
|
hp, frag=frag, postfix=postfix)
return False
|
kzys/buildbot
|
buildbot/process/properties.py
|
Python
|
gpl-2.0
| 5,112
| 0.005869
|
import re
import weakref
from buildbot import util
class Properties(util.ComparableMixin):
"""
I represent a set of properties that can be interpolated into various
strings in buildsteps.
@ivar properties: dictionary mapping property values to tuples
(value, source), where source is a string identifing the source
of the property.
Objects of this class can be read like a dictionary -- in this case,
only the property value is returned.
As a special case, a property value of None is returned as an empty
string when used as a mapping.
"""
compare_attrs = ('properties',)
def __init__(self, **kwargs):
"""
@param kwargs: initial property values (for testing)
"""
self.properties = {}
self.pmap = PropertyMap(self)
if kwargs: self.update(kwargs, "TEST")
def __getstate__(self):
d = self.__dict__.copy()
del d['pmap']
return d
def __setstate__(self, d):
self.__dict__ = d
self.pmap = PropertyMap(self)
def __contains__(self, name):
return name in self.properties
def __getitem__(self, name):
"""Just get the value for this property."""
rv = self.properties[name][0]
return rv
def has_key(self, name):
return self.properties.has_key(name)
def getProperty(self, name, default=None):
"""Get the value for the given property."""
return self.properties.get(name, (default,))[0]
def getPropertySource(self, name):
return self.properties[name][1]
def asList(self):
"""Return the properties as a sorted list of (name, value, source)"""
l = [ (k, v[0], v[1]) for k,v in self.properties.items() ]
l.sort()
return l
def __repr__(self):
return repr(dict([ (k,v[0]) for k,v in self.properties.iteritems() ]))
def setProperty(self, name, value, source):
self.properties[name] = (value, source)
def update(self, dict, source):
"""Update this object from a dictionary, with an explicit source specified."""
for k, v in dict.items():
self.properties[k] = (v, source)
def updateFromProperties(self, other):
"""Update this object based on another object; the other object's """
self.properties.update(other.properties)
def render(self, value):
"""
Return a variant of value that has any WithProperties objects
substituted. This recurses into Python's compound data types.
"""
# we use isinstance to detect Python's standard data types, and call
# this function recursively for the values in those types
if isinstance(value, (str, unicode)):
return value
elif isinstance(value, WithProperties):
return value.render(self.pmap)
elif isinstance(value, list):
return [ self.render(e) for e in value ]
elif isinstance(value, tuple):
return tuple([ self.render(e) for e in value ])
elif isinstance(value, dict):
return dict([ (self.render(k), self.render(v)) for k,v in value.iteritems() ])
else:
return value
class PropertyMap:
"""
Privately-used mapping object to implement WithProperties' substitutions,
including the rendering of None as ''.
"""
colon_minus_re = re.compile(r"(.*):-(.*)")
colon_plus_re = re.compile(r"(.*):\+(.*)")
def __init__(self, properties):
# use weakref here to avoid a reference loop
self.properties = weakref.ref(properties)
def __getitem__(self, key):
properties = self.properties()
assert properties is not None
# %(prop:-repl)s
# if prop exists, u
|
se it; otherwise, use repl
mo = self.colon_minus_re.match(key)
if mo:
prop, repl = mo.group(1,2)
if properties.has_key(prop):
rv = properties[prop]
else:
rv = repl
|
else:
# %(prop:+repl)s
# if prop exists, use repl; otherwise, an empty string
mo = self.colon_plus_re.match(key)
if mo:
prop, repl = mo.group(1,2)
if properties.has_key(prop):
rv = repl
else:
rv = ''
else:
rv = properties[key]
# translate 'None' to an empty string
if rv is None: rv = ''
return rv
class WithProperties(util.ComparableMixin):
"""
This is a marker class, used fairly widely to indicate that we
want to interpolate build properties.
"""
compare_attrs = ('fmtstring', 'args')
def __init__(self, fmtstring, *args):
self.fmtstring = fmtstring
self.args = args
def render(self, pmap):
if self.args:
strings = []
for name in self.args:
strings.append(pmap[name])
s = self.fmtstring % tuple(strings)
else:
s = self.fmtstring % pmap
return s
|
varses/awsch
|
lantz/drivers/rgblasersystems/__init__.py
|
Python
|
bsd-3-clause
| 414
| 0
|
# -*- coding: utf-8 -*-
"""
lan
|
tz.drivers.rgblasersystems
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:company: RGB Lasersysteme GmbH.
:description: Lasers and Lasers Systems.
:website: http://www.rgb-laser.com/
----
:copyright: 2015 by Lantz Authors, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from .minilasevo import MiniLasEvo
__all__ = ['MiniL
|
asEvo']
|
birsoyo/conan
|
conans/test/build_helpers/cmake_test.py
|
Python
|
mit
| 41,766
| 0.002275
|
import os
import shutil
import stat
import sys
import unittest
import platform
from collections import namedtuple
from conans import tools
from conans.model.conan_file import ConanFile
from conans.model.ref import ConanFileReference
from conans.model.build_info import CppInfo, DepsCppInfo
from conans.model.settings import Settings
from conans.client.conf import default_settings_yml
from conans.client.build.cmake import CMake
from conans.test.utils.tools import TestBufferConanOutput
from conans.tools import cpu_count
from conans.util.files import save, load
from conans.test.utils.test_files import temp_folder
from conans.model.options import Options, PackageOptions
from conans.errors import ConanException
class CMakeTest(unittest.TestCase):
def setUp(self):
self.tempdir = temp_folder(path_with_spaces=False)
self.tempdir2 = temp_folder(path_with_spaces=False)
def tearDown(self):
shutil.rmtree(self.tempdir)
shutil.rmtree(self.tempdir2)
def config_patch_test(self):
conan_file = ConanFileMock()
conan_file.name = "MyPkg"
conan_file.settings = Settings()
conan_file.source_folder = os.path.join(self.tempdir, "src")
conan_file.build_folder = os.path.join(self.tempdir, "build")
conan_file.package_folder = os.path.join(self.tempdir, "pkg")
conan_file.deps_cpp_info = DepsCppInfo()
msg = "FOLDER: " + conan_file.package_folder
for folder in (conan_file.build_folder, conan_file.package_folder):
save(os.path.join(folder, "file1.cmake"), "Nothing")
save(os.path.join(folder, "file2"),
|
msg)
save(os.path.join(folder, "file3.txt"), msg)
save(os.path.join(folder, "file3.cmake"), msg)
save(os.path.j
|
oin(folder, "sub", "file3.cmake"), msg)
cmake = CMake(conan_file, generator="Unix Makefiles")
cmake.patch_config_paths()
for folder in (conan_file.build_folder, conan_file.package_folder):
self.assertEqual("Nothing", load(os.path.join(folder, "file1.cmake")))
self.assertEqual(msg, load(os.path.join(folder, "file2")))
self.assertEqual(msg, load(os.path.join(folder, "file3.txt")))
self.assertEqual("FOLDER: ${CONAN_MYPKG_ROOT}",
load(os.path.join(folder, "file3.cmake")))
self.assertEqual("FOLDER: ${CONAN_MYPKG_ROOT}",
load(os.path.join(folder, "sub", "file3.cmake")))
def config_patch_deps_test(self):
conan_file = ConanFileMock()
conan_file.name = "MyPkg"
conan_file.settings = Settings()
conan_file.source_folder = os.path.join(self.tempdir, "src")
conan_file.build_folder = os.path.join(self.tempdir, "build")
conan_file.package_folder = os.path.join(self.tempdir, "pkg")
conan_file.deps_cpp_info = DepsCppInfo()
ref = ConanFileReference.loads("MyPkg1/0.1@user/channel")
cpp_info = CppInfo(self.tempdir2)
conan_file.deps_cpp_info.update(cpp_info, ref.name)
self.tempdir = temp_folder(path_with_spaces=False)
self.assertEqual(list(conan_file.deps_cpp_info.deps), ['MyPkg1'])
self.assertEqual(conan_file.deps_cpp_info['MyPkg1'].rootpath,
self.tempdir2)
msg = "FOLDER: " + self.tempdir2
for folder in (conan_file.build_folder, conan_file.package_folder):
save(os.path.join(folder, "file1.cmake"), "Nothing")
save(os.path.join(folder, "file2"), msg)
save(os.path.join(folder, "file3.txt"), msg)
save(os.path.join(folder, "file3.cmake"), msg)
save(os.path.join(folder, "sub", "file3.cmake"), msg)
cmake = CMake(conan_file, generator="Unix Makefiles")
cmake.patch_config_paths()
for folder in (conan_file.build_folder, conan_file.package_folder):
self.assertEqual("Nothing", load(os.path.join(folder, "file1.cmake")))
self.assertEqual(msg, load(os.path.join(folder, "file2")))
self.assertEqual(msg, load(os.path.join(folder, "file3.txt")))
self.assertEqual("FOLDER: ${CONAN_MYPKG1_ROOT}",
load(os.path.join(folder, "file3.cmake")))
self.assertEqual("FOLDER: ${CONAN_MYPKG1_ROOT}",
load(os.path.join(folder, "sub", "file3.cmake")))
def partial_build_test(self):
conan_file = ConanFileMock()
conan_file.settings = Settings()
conan_file.should_configure = False
conan_file.should_build = False
conan_file.should_install = False
cmake = CMake(conan_file, generator="Unix Makefiles")
cmake.configure()
self.assertIsNone(conan_file.command)
cmake.build()
self.assertIsNone(conan_file.command)
cmake.install()
self.assertIsNone(conan_file.command)
conan_file.name = None
cmake.patch_config_paths()
cmake.test()
self.assertIsNone(conan_file.command)
def cmake_generator_test(self):
conan_file = ConanFileMock()
conan_file.settings = Settings()
with tools.environment_append({"CONAN_CMAKE_GENERATOR": "My CMake Generator"}):
cmake = CMake(conan_file)
self.assertIn('-G "My CMake Generator"', cmake.command_line)
def cmake_fpic_test(self):
settings = Settings.loads(default_settings_yml)
settings.os = "Linux"
settings.compiler = "gcc"
settings.compiler.version = "6.3"
settings.arch = "x86"
def assert_fpic(the_settings, input_shared, input_fpic, expected_option):
options = []
values = {}
if input_shared is not None:
options.append('"shared": [True, False]')
values["shared"] = input_shared
if input_fpic is not None:
options.append('"fPIC": [True, False]')
values["fPIC"] = input_fpic
conan_file = ConanFileMock(options='{%s}' % ", ".join(options),
options_values=values)
conan_file.settings = the_settings
cmake = CMake(conan_file)
cmake.configure()
if expected_option is not None:
self.assertEquals(cmake.definitions["CONAN_CMAKE_POSITION_INDEPENDENT_CODE"],
expected_option)
else:
self.assertNotIn("CONAN_CMAKE_POSITION_INDEPENDENT_CODE", cmake.definitions)
# Test shared=False and fpic=False
assert_fpic(settings, input_shared=False, input_fpic=False, expected_option="OFF")
# Test shared=True and fpic=False
assert_fpic(settings, input_shared=True, input_fpic=False, expected_option="ON")
# Test shared=True and fpic=True
assert_fpic(settings, input_shared=True, input_fpic=True, expected_option="ON")
# Test shared not defined and fpic=True
assert_fpic(settings, input_shared=None, input_fpic=True, expected_option="ON")
# Test shared not defined and fpic not defined
assert_fpic(settings, input_shared=None, input_fpic=None, expected_option=None)
# Test shared True and fpic not defined
assert_fpic(settings, input_shared=True, input_fpic=None, expected_option=None)
# Test nothing in Windows
settings = Settings.loads(default_settings_yml)
settings.os = "Windows"
settings.compiler = "Visual Studio"
settings.compiler.version = "15"
settings.arch = "x86_64"
assert_fpic(settings, input_shared=True, input_fpic=True, expected_option=None)
def cmake_make_program_test(self):
settings = Settings.loads(default_settings_yml)
settings.os = "Linux"
settings.compiler = "gcc"
settings.compiler.version = "6.3"
settings.arch = "x86"
settings.build_type = "Release"
conan_file = ConanFileMock()
conan_file.settings = settings
conan_file.source_folder = os.path.join(self.tempdir, "my_cache_source_folder")
conan_file.build_fold
|
tangentlabs/django-fancypages
|
fancypages/dashboard/views.py
|
Python
|
bsd-3-clause
| 1,888
| 0
|
from django.views import generic
from django.db.models import get_model
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from . import forms
from ..utils import get_page_model, get_node_model
PageNode = get_node_model()
FancyPage = get_page_model()
ContentBlock = get_model('fancypages', 'ContentBlock')
Container = get_model('fancypages', 'Container')
TabBlock = get_model('fancypages', 'TabBlock')
OrderedContainer = get_model('fancypages', 'OrderedContainer')
class PageListView(generic.TemplateView):
template_name = "fancypages/dashboard/page_list.html"
class PageCreateView(generic.CreateView):
model = FancyPage
form_class = forms.PageNodeForm
template_name = "fancypages/dashboard/page_update.html"
def get_form_kwargs(self):
kwargs = super(PageCreateView, self).get_form_kwargs()
kwargs.update(self.kwargs)
return kwargs
def get_context_data(self, **kwargs):
ctx = super(PageCreateView, self).get_contex
|
t_data(**kwargs)
ctx['title'] = _("Create new page")
return ctx
def get_success_url(self):
return reverse('fp-d
|
ashboard:page-list')
class PageUpdateView(generic.UpdateView):
model = FancyPage
form_class = forms.PageNodeForm
context_object_name = 'fancypage'
template_name = "fancypages/dashboard/page_update.html"
def get_context_data(self, **kwargs):
ctx = super(PageUpdateView, self).get_context_data(**kwargs)
ctx['title'] = _("Update page")
return ctx
def get_success_url(self):
return reverse('fp-dashboard:page-list')
class PageDeleteView(generic.DeleteView):
model = FancyPage
context_object_name = 'fancypage'
template_name = "fancypages/dashboard/page_delete.html"
def get_success_url(self):
return reverse('fp-dashboard:page-list')
|
nuagenetworks/vspk-python
|
vspk/v5_0/nuctranslationmap.py
|
Python
|
bsd-3-clause
| 9,274
| 0.009165
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUMetadatasFetcher
from .fetchers import NUGlobalMetadatasFetcher
from bambou import NURESTObject
class NUCTranslationMap(NURESTObject):
""" Represents a CTranslationMap in the VSD
Notes:
1:1 mapping of customer private IPs in customer domain to customer alias (public) IPs in provider domain and N:1 mapping to customer alias SPAT IP in the provider domain.
"""
__rest_name__ = "ctranslationmap"
__resource_name__ = "ctranslationmaps"
## Constants
CONST_MAPPING_TYPE_PAT = "PAT"
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_MAPPING_TYPE_NAT = "NAT"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
def __init__(self, **kwargs):
""" Initializes a CTranslationMap instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> ctranslationmap = NUCTranslationMap(id=u'xxxx-xxx-xxx-xxx', name=u'CTranslationMap')
>>> ctranslationmap = NUCTranslationMap(data=my_dict)
"""
super(NUCTranslationMap, self).__init__()
# Read/Write Attributes
self._mapping_type = None
self._last_updated_by = None
self._entity_scope = None
self._associated_domain_id = None
self._customer_alias_ip = None
self._customer_ip = None
self._external_id = None
self.expose_attribute(local_name="mapping_type", remote_name="mappingType", attribute_type=str, is_required=True, is_unique=False, choices=[u'NAT', u'PAT'])
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="associated_domain_id", remote_name="associatedDomainID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="customer_alias_ip", remote_name="customerAliasIP", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="customer_ip", remote_name="customerIP", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
# Fetchers
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def mapping_type(self):
""" Get mapping_type value.
Notes:
NAT for 1:1 mapping or PAT for *:1 mappings.
This attribute is named `mappingType` in VSD API.
"""
return self._mapping_type
@mapping_type.setter
def mapping_type(self, value):
""" Set mapping_type value.
Notes:
NAT for 1:1 mapping or PAT for *:1 mappings.
This attribute is named `mappingType` in VSD API.
"""
self._mapping_type = value
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def associated_domain_id(self):
""" Get associated_domain_id value.
Notes:
Domain associated to this address mapping.
This attribute is named `associatedDomainID` in VSD API.
"""
return self._associated_domain_id
@associated_domain_id.setter
def associated_domain_id(self, value):
""" Set associated_domain_id value.
Notes:
Domain associated to this address mapping.
This attribute is named `associatedDomainID` in VSD API.
"""
self._associated_domain_id = value
@property
def customer_alias_ip(self):
""" Get customer_alias_ip value.
Notes:
Customer public IP in the provider domain.
This attribute is named `customerAliasIP` in VSD API.
"""
return self._customer_alias_ip
|
@customer_alias_ip.setter
def customer_alias_ip(self, value):
""" Set customer_alias_ip value.
Notes:
Customer public IP in the provider domain.
This attribute is named `customerAliasIP` in VSD API.
"""
self._customer_alias_ip = value
@property
def customer_ip(self):
""" Get customer_ip value.
Notes:
|
Customer private IP in the customer domain.
This attribute is n
|
pinterest/teletraan
|
deploy-board/deploy_board/webapp/helpers/placements_helper.py
|
Python
|
apache-2.0
| 1,467
| 0.003408
|
# Copyright 2016 Pinterest, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITION
|
S OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
from deploy_board.webapp.helpers.rodimus_client import RodimusClient
rodimus_client = RodimusClient()
def create_placemen
|
t(request, placement_info):
return rodimus_client.post("/placements", request.teletraan_user_id.token, data=placement_info)
def get_all(request, index, size):
params = [('pageIndex', index), ('pageSize', size)]
return rodimus_client.get("/placements", request.teletraan_user_id.token, params=params)
def get_by_provider_and_cell_name(request, provider, cell_name):
if cell_name:
return rodimus_client.get("/placements/cell/%s" % cell_name, request.teletraan_user_id.token)
return rodimus_client.get("/placements/provider/%s" % provider, request.teletraan_user_id.token)
def get_by_id(request, placement_id):
return rodimus_client.get("/placements/%s" % placement_id, request.teletraan_user_id.token)
|
arantebillywilson/python-snippets
|
py2/lpthw/ex13.py
|
Python
|
mit
| 440
| 0.002273
|
#!/usr/bin/env python
#
# ex13.py
#
|
# Author: Billy Wilson Arante
# Created: 2016/04/26 PHT
#
from sys import argv
def main():
"""Exercise 13: Parameters, Unpacking, Va
|
riables"""
script, first, second, third = argv
print "The script is called:", script
print "Your first variable is:", first
print "Your second variable is:", second
print "Your third variable is:", third
if __name__ == "__main__":
main()
|
MashSoftware/place-ui
|
mash_place_ui/__init__.py
|
Python
|
mit
| 578
| 0.00519
|
# flake8: noqa
from flask import Flask
from flask_
|
assets import Environment, Bundle
from flask_compress import Compress
from flask_cache import Cache
from flask_wtf.csrf import CsrfProtect
app = Flask(__name__)
#App config
app.config.from_pyfile('config.py')
# Flask Assets
assets = Environment(app)
css = Bundle('css/custom.css', filters='cssmin', output='css/custom.min.css')
assets.register('custom_css', css)
# Flask Compress
Compress(app)
# Flask Cache
cache = Cache
|
(app,config={'CACHE_TYPE': 'simple'})
# CSRF Protection
CsrfProtect(app)
import mash_place_ui.views
|
Alex-Jaeger/CodeBin
|
src/editor.py
|
Python
|
apache-2.0
| 6,836
| 0.010532
|
import cgi
print "Content-Type: text/html\n"
form = cgi.FieldStorage()
print """
<!DOCTYPE html>
<html lang="en">
<head>
<title>CodeBin</title>
<link rel="stylesheet" type="text/css" href="./css/editor.css"/>
<link rel="stylesheet" rel="stylesheet" type="text/css" media="screen" href="http://openfontlibrary.org/face/hans-kendrick"/>
<link rel="stylesheet" href="./bin/icons/font-awesome-4.0.3/css/font-awesome.min.css"/>
<script src="http://ajax.googleapis.com/ajax/libs/jquery/1.9.0/jquery.min.js" type="text/javascript"></script>
<script src="./js/skulpt.min.js" type="text/javascript"></script>
<script src="./js/skulpt-stdlib.js" type="text/javascript"></script>
<script type="text/javascript">
function Sound(source,volume,loop)
{
this.source=source;
this.volume=volume;
this.loop=loop;
var son;
this.son=son;
this.finish=false;
this.stop=function()
{
document.body.removeChild(this.son);
}
this.start=function()
{
if(this.finish)return false;
this.son=document.createElement("embed");
this.son.setAttribute("src",this.source);
this.son.setAttribute("hidden","true");
this.son.setAttribute("volume",this.volume);
this.son.setAttribute("autostart","true");
this.son.setAttribute("loop",this.loop);
document.body.appendChild(this.son);
}
this.remove=function()
{
document.body.removeChild(this.son);
this.finish=true;
}
this.init=function(volume,loop)
{
this.finish=false;
this.volume=volume;
this.loop=loop;
}
}
//Konami Code Implementation
if (window.addEventListener) {
var keys = [];
var konami = "38,38,40,40,37,39,37,39,66,65";
var r_konami = "65,66,39,37,39,37,40,40,38,38";
var index=0;
window.addEventListener("keydown", function(e){
keys.push(e.keyCode);
if (keys.toString().indexOf(konami) >= 0) {
var bg=["./bin/img/bunny-fail.gif","./bin/img/tab.gif","./bin/img/laughing.gif","./bin/img/beer.gif","./bin/img/ugh.gif","./bin/img/energy.gif"];
var bg_file=bg[index];
document.body.style.backgroundImage="url("+bg_file+")";
if(index>5) {
index=0;
}
else {
index++;
|
}
keys = [];
};
}, true);
};
function startRickRoll() {
alert("Turn up your volume, You just got RickRolled!");
var rickroll = new Sound("bin/mp3/rickroll.mp3",100,true);
rickroll.start();
}
function loadtext()
{
"""
try:
print """
var xmlhttp=new XMLHttpRequest();
xmlhttp.onreadystatechange=function()
{
|
if (xmlhttp.readyState==4 && xmlhttp.status==200)
{
var text=xmlhttp.responseText;
var textArray=text.split("\\r\\n");
text="";
for (var x=0;x<textArray.length;x++)
{
text+=textArray[x];
text+="\\n";
}
editor.getSession().setValue(text);
}
else if (xmlhttp.status==404)
{
editor.getSession().setValue('An error occured.');
}
}
xmlhttp.open("POST","backend.py",true);
xmlhttp.setRequestHeader("Content-type","application/x-www-form-urlencoded");
xmlhttp.send("pick=1&hash="+'%s');
"""%(str(form['hash'].value))
except:
print """editor.getSession().setValue('print \"Hello World\"');"""
print """
}
</script>
</head>
<body onload="loadtext()">
<script type="text/javascript">
// output functions are configurable. This one just appends some text
// to a pre element.
function outf(text) {
var mypre = document.getElementById("output");
mypre.innerHTML = mypre.innerHTML + text;
}
function builtinRead(x) {
if (Sk.builtinFiles === undefined || Sk.builtinFiles["files"][x] === undefined)
throw "File not found: '" + x + "'";
return Sk.builtinFiles["files"][x];
}
// Here's everything you need to run a python program in skulpt
// grab the code from your textarea
// get a reference to your pre element for output
// configure the output function
// call Sk.importMainWithBody()
function runit() {
var prog = editor.getSession().getValue();
var mypre = document.getElementById("output");
mypre.innerHTML = '';
Sk.canvas = "mycanvas";
Sk.pre = "output";
Sk.configure({output:outf, read:builtinRead});
eval(Sk.importMainWithBody("<stdin>",false,prog));
}
function savetext()
{
var xmlhttp=new XMLHttpRequest();
"""
try:
print """
xmlhttp.open("POST","backend.py",true);
xmlhttp.setRequestHeader("Content-type","application/x-www-form-urlencoded");
xmlhttp.send("pick=2&code="+editor.getSession().getValue()+"&hash=%s");
"""%(str(form['hash'].value))
except:
print """
var newHash="";
xmlhttp.onreadystatechange=function()
{
if (xmlhttp.readyState==4 && xmlhttp.status==200)
{
newHash=xmlhttp.responseText;
window.location=document.URL+"?hash="+newHash;
}
else if (xmlhttp.status==404)
{
editor.getSession().setValue("An error occured.");
}
}
xmlhttp.open("POST","backend.py",true);
xmlhttp.setRequestHeader("Content-type","application/x-www-form-urlencoded");
xmlhttp.send("pick=3&code="+editor.getSession().getValue());
"""
print """
}
</script>
<div id="header-content">
<a class="easter-egg" href="#" onclick="startRickRoll()"><h1 id="logo">CodeBin (This site is pretty ghetto. Use at your own risk.)</h1></a>
</div>
<div id="body-content">
<form id="body-form">
<div id="editor"></div>
<div id="output">
<canvas id="mycanvas" ></mycanvas>
</div>
<button id="run-button" type="button" onclick="savetext(); runit();">Run</button>
</form>
</div>
<div id="footer-content">
<p>CodeBin Copyright 2014 Written By <a class="easter-egg" href="http://bit.ly/1eUpyT1">Alex Jaeger</a> and <a class="easter-egg" href="./bin/img/cat.jpg">Dylan Johnson</a>. Documentation and Support provided by the <a class="easter-egg" href="./bin/img/california-condor.jpg">Condor</a></p>
</div>
<script src="./bin/libraries/ace-builds-master/src-noconflict/ace.js" type="text/javascript" charset="utf-8"></script>
<script>
var editor=ace.edit("editor");
editor.setTheme("ace/theme/tomorrow_night");
editor.getSession().setMode("ace/mode/python");
editor.setFontSize(12);
</script>
</body>
</html>
"""
|
kevinarpe/kevinarpe-rambutan3
|
tests/check_args/annotation/test_NUMBER.py
|
Python
|
gpl-3.0
| 362
| 0
|
from rambutan3.check_args.annotation.NUMBER import NUMBER
def test():
assert not NUMBER.matches("abc")
assert not NUMBER.matches(True)
assert N
|
UMBER.matches(-1.234)
assert NUMBER.matches(
|
-1)
assert NUMBER.matches(0)
assert NUMBER.matches(0.234)
assert NUMBER.matches(1)
assert NUMBER.matches(1.234)
assert NUMBER.matches(2)
|
rohe/pysaml2-3
|
tools/parse_xsd2.py
|
Python
|
bsd-2-clause
| 68,704
| 0.005618
|
#!/usr/bin/env python
import re
import time
import getopt
import imp
import sys
import types
import errno
__version__ = 0.5
from xml.etree import cElementTree as ElementTree
INDENT = 4*" "
DEBUG = False
XMLSCHEMA = "http://www.w3.org/2001/XMLSchema"
XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace'
CLASS_PROP = [("c_children", ".copy()"),
("c_attributes", ".copy()"),
("c_child_order", "[:]"),
("c_cardinality", ".copy()")]
BASE_ELEMENT = ["text", "extension_elements", "extension_attributes"]
class MissingPrerequisite(Exception):
pass
def sd_copy(arg):
try:
return arg.copy()
except AttributeError:
return {}
# ------------------------------------------------------------------------
def class_pyify(ref):
return ref.replace("-","_")
PROTECTED_KEYWORDS = ["import", "def", "if", "else", "return", "for",
"while", "not", "try", "except", "in"]
def def_init(imports, attributes):
indent = INDENT+INDENT
indent3 = INDENT+INDENT+INDENT
line = ["%sdef __init__(self," % INDENT]
for elem in attributes:
if elem[0] in PROTECTED_KEYWORDS:
_name = elem[0] +"_"
else:
_name = elem[0]
if elem[2]:
line.append("%s%s='%s'," % (indent3, _name, elem[2]))
else:
line.append("%s%s=%s," % (indent3, _name, elem[2]))
for _, elems in list(imports.items()):
for elem in elems:
if elem in PROTECTED_KEYWORDS:
_name = elem +"_"
else:
_name = elem
line.append("%s%s=None," % (indent3, _name))
line.append("%stext=None," % indent3)
line.append("%sextension_elements=None," % indent3)
line.append("%sextension_attributes=None," % indent3)
line.append("%s):" % indent)
return line
def base_init(imports):
line = []
indent4 = INDENT+INDENT+INDENT+INDENT
if not imports:
line.append("%sSamlBase.__init__(self, " % (INDENT+INDENT))
for attr in BASE_ELEMENT:
if attr in PROTECTED_KEYWORDS:
_name = attr + "_"
else:
_name = attr
line.append("%s%s=%s," % (indent4, _name, _name))
line.append("%s)" % indent4)
else:
# TODO have to keep apart which properties comes from which superior
for sup, elems in list(imports.items()):
line.append("%s%s.__init__(self, " % (INDENT+INDENT, sup))
lattr = elems[:]
lattr.extend(BASE_ELEMENT)
for attr in lattr:
if attr in PROTECTED_KEYWORDS:
_name = attr + "_"
else:
_name = attr
line.append("%s%s=%s," % (indent4, _name, _name))
line.append("%s)" % indent4)
return line
def initialize(attributes):
indent = INDENT+INDENT
line = []
for prop, val, _default in attributes:
if prop in PROTECTED_KEYWORDS:
_name = prop +"_"
else:
_name = prop
if val in PROTECTED_KEYWORDS:
_vname = val +"_"
else:
_vname = val
line.append("%sself.%s=%s" % (indent, _name, _vname))
return line
def _mod_typ(prop):
try:
(mod, typ) = prop.type
except ValueError:
typ = prop.type
mod = None
except TypeError: # No type property
try:
(mod, typ) = prop.ref
except ValueError:
if prop.class_name:
typ = prop.class_name
else:
typ = prop.ref
mod = None
return mod, typ
def _mod_cname(prop, cdict):
if hasattr(prop, "scoped"):
cname = prop.class_name
mod = None
else:
(mod, typ) = _mod_typ(prop)
if not mod:
try:
cname = cdict[typ
|
].class_name
except KeyError:
|
cname = cdict[class_pyify(typ)].class_name
else:
cname = typ
return mod, cname
def leading_uppercase(string):
try:
return string[0].upper()+string[1:]
except IndexError:
return string
except TypeError:
return ""
def leading_lowercase(string):
try:
return string[0].lower()+string[1:]
except IndexError:
return string
except TypeError:
return ""
def rm_duplicates(properties):
keys = []
clist = []
for prop in properties:
if prop.name in keys:
continue
else:
clist.append(prop)
keys.append(prop.name)
return clist
# def rm_duplicates(lista):
# res = []
# for item in lista:
# if item not in res:
# res.append(item)
# return res
def klass_namn(obj):
if obj.class_name:
return obj.class_name
else:
return obj.name
class PyObj(object):
def __init__(self, name=None, pyname=None, root=None):
self.name = name
self.done = False
self.local = True
self.root = root
self.superior = []
self.value_type = ""
self.properties = ([], [])
self.abstract = False
self.class_name = ""
if pyname:
self.pyname = pyname
elif name:
self.pyname = pyify(name)
else:
self.pyname = name
self.type = None
def child_spec(self, target_namespace, prop, mod, typ, lista):
if mod:
namesp = external_namespace(self.root.modul[mod])
pkey = '{%s}%s' % (namesp, prop.name)
typ = "%s.%s" % (mod, typ)
else:
pkey = '{%s}%s' % (target_namespace, prop.name)
if lista:
return "c_children['%s'] = ('%s', [%s])" % (
pkey, prop.pyname, typ)
else:
return "c_children['%s'] = ('%s', %s)" % (
pkey, prop.pyname, typ)
def knamn(self, sup, cdict):
cname = cdict[sup].class_name
if not cname:
(namesp, tag) = cdict[sup].name.split('.')
if namesp:
ctag = self.root.modul[namesp].factory(tag).__class__.__name__
cname = '%s.%s' % (namesp, ctag)
else:
cname = tag + "_"
return cname
def _do_properties(self, line, cdict, ignore, target_namespace):
args = []
child = []
try:
(own, inh) = self.properties
except AttributeError:
(own, inh) = ([], [])
for prop in own:
if isinstance(prop, PyAttribute):
line.append("%sc_attributes['%s'] = %s" % (INDENT,
prop.name, prop.spec()))
if prop.fixed:
args.append((prop.pyname, prop.fixed, None))
else:
if prop.default:
args.append((prop.pyname, prop.pyname, prop.default))
else:
args.append((prop.pyname, prop.pyname, None))
elif isinstance(prop, PyElement):
(mod, cname) = _mod_cname(prop, cdict)
if prop.max == "unbounded":
lista = True
pmax = 0 # just has to be different from 1
else:
pmax = int(prop.max)
lista = False
if prop.name in ignore:
pass
else:
line.append("%s%s" % (INDENT, self.child_spec(
target_namespace, prop,
mod, cname,
lista)))
pmin = int(getattr(prop, 'min', 1))
if pmax == 1 and pmin == 1:
pass
elif prop.max == "unbounded":
|
SaschaMester/delicium
|
tools/telemetry/telemetry/web_perf/metrics/smoothness_unittest.py
|
Python
|
bsd-3-clause
| 13,607
| 0.003087
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from telemetry.internal.results import page_test_results
from telemetry.page import page as page_module
from telemetry.web_perf.metrics import rendering_stats
from telemetry.web_perf.metrics import smoothness
class _MockRenderingStats(object):
stats = ['refresh_period', 'frame_timestamps', 'frame_times', 'paint_times',
'painted_pixel_counts', 'record_times',
'recorded_pixel_counts', 'approximated_pixel_percentages',
'checkerboarded_pixel_percentages', 'input_event_latency',
'frame_queueing_durations', 'scroll_update_latency',
'gesture_scroll_update_latency']
def __init__(self, **kwargs):
self.errors = {}
for stat in self.stats:
value = kwargs[stat] if stat in kwargs else None
setattr(self, stat, value)
#pylint: disable=W0212
class SmoothnessMetricUnitTest(unittest.TestCase):
def setUp(self):
self.metric = smoothness.SmoothnessMetric()
self.page = page_module.Page('file://blank.html')
self.good_timestamps = [[10, 20], [30, 40, 50]]
self.not_enough_frames_timestamps = [[10], [20, 30, 40]]
def testPopulateResultsFromStats(self):
stats = _MockRenderingStats()
for stat in _MockRenderingStats.stats:
# Just set fake data for all of the relevant arrays of stats typically
# found in a RenderingStats object.
setattr(stats, stat, [[10, 20], [30, 40, 50]])
results = page_test_results.PageTestResults()
results.WillRunPage(self.page)
self.metric._PopulateResultsFromStats(results, stats, False)
current_page_run = results.current_page_run
self.assertTrue(current_page_run.ok)
expected_values_count = 12
self.assertEquals(expected_values_count, len(current_page_run.values))
def testHasEnoughFrames(self):
# This list will pass since every sub-array has at least 2 frames.
has_enough_frames = self.metric._HasEnoughFrames(self.good_timestamps)
self.assertTrue(has_enough_frames)
def testHasEnoughFramesWithNotEnoughFrames(self):
# This list will fail since the first sub-array only has a single frame.
has_enough_frames = self.metric._HasEnoughFrames(
self.not_enough_frames_timestamps)
self.assertFalse(has_enough_frames)
def testComputeSurfaceFlingerMetricNoJank(self):
stats = _MockRenderingStats(refresh_period=10,
frame_timestamps=[[10, 20], [130, 140, 150]],
frame_times=[[10], [10, 10]])
avg_surface_fps, jank_count, max_frame_delay, frame_lengths = (
self.metric._ComputeSurfaceFlingerMetric(self.page, stats))
self.assertEquals([1, 1, 1], frame_lengths.values)
self.assertEquals(1, max_frame_delay.value)
self.assertEquals(0, jank_count.value)
self.assertEquals(100, avg_surface_fps.value)
def testComputeSurfaceFlingerMetricJank(self):
stats = _MockRenderingStats(
refresh_period=10,
frame_timestamps=[[10, 20, 50], [130, 140, 150, 170, 180]],
frame_times=[[10, 30], [10, 10, 20, 10]])
avg_surface_fps, jank_count, max_frame_delay, frame_lengths = (
self.metric._ComputeSurfaceFlingerMetric(self.page, stats))
self.assertEquals([1, 3, 1, 1, 2, 1], frame_lengths.values)
self.assertEquals(3, max_frame_delay.value)
self.assertEquals(2, jank_count.value)
self.assertEquals(67, avg_surface_fps.value)
def testComputeFrameTimeMetricWithNotEnoughFrames(self):
stats = _MockRenderingStats(
refresh_period=10,
frame_timestamps=self.not_enough_frames_timestamps,
frame_times=[[10, 20], [30, 40, 50]])
avg_surface_fps, jank_count, max_frame_delay, frame_lengths = (
self.metric._ComputeSurfaceFlingerMetric(self.page, stats))
self.assertEquals(None, avg_surface_fps.value)
self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE,
avg_surface_fps.none_value_reason)
self.assertEquals(None, jank_count.value)
self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE,
jank_count.none_value_reason)
self.assertEquals(None, max_frame_delay.value)
self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE,
max_frame_delay.none_value_reason)
self.assertEquals(None, frame_lengths.values)
self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE,
frame_lengths.none_value_reason)
def testComputeLatencyMetric(self):
stats = _MockRenderingStats(frame_timestamps=self.good_timestamps,
input_event_latency=[[10, 20], [30, 40, 50]])
# pylint: disable=unbalanced-tuple-unpacking
mean_value, discrepancy_value = self.metric._ComputeLatencyMetric(
self.page, stats, 'input_event_latency', stats.input_event_latency)
self.assertEquals(30, mean_value.value)
self.assertEquals(60, discrepancy_value.value)
def testComputeLatencyMetricWithMissingData(self):
stats = _MockRenderingStats(frame_timestamps=self.good_timestamps,
input_event_latency=[[], []])
value = self.metric._ComputeLatencyMetric(
self.page, stats, 'input_event_latency', stats.input_event_latency)
self.assertEquals((), value)
def testComputeLatencyMetricWithNotEnoughFrames(self):
stats = _MockRenderingStats(
frame_timestamps=self.not_enough_frames_timestamps,
input_event_latency=[[], []])
# pylint: disable=unbalanced-tuple-unpacking
mean_value, discrepancy_value = self.metric._ComputeLatencyMetric(
self.page, stats, 'input_event_latency', stats.input_event_latency)
self.assertEquals(None, mean_value.value)
self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE,
mean_value.none_value_reason)
self.assertEquals(None, discrepancy_value.value)
self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE,
discrepancy_value.none_value_reason)
def testComputeGestureScrollUpdateLatency(self):
stats = _MockRenderingStats(
frame_timestamps=self.good_timestamps,
gesture_scroll_update_latency=[[10, 20], [30, 40, 50]])
gesture_value = self.metric._ComputeFirstGestureScrollUpdateLatency(
self.page, stats)[0]
self.assertEquals(10, gesture_value.value)
def testComputeGestureScrollUpdateLatencyWithMissingData(self):
stats = _MockRenderingStats(
frame_timestamps=self.good_timestamps,
ge
|
sture_scroll_update_latency=[[], []])
value = self.metric._ComputeFirstGestureScrollUpdateLatency(
self.page, stats)
self.assertEquals((), value)
d
|
ef testComputeGestureScrollUpdateLatencyWithNotEnoughFrames(self):
stats = _MockRenderingStats(
frame_timestamps=self.not_enough_frames_timestamps,
gesture_scroll_update_latency=[[10, 20], [30, 40, 50]])
gesture_value = self.metric._ComputeFirstGestureScrollUpdateLatency(
self.page, stats)[0]
self.assertEquals(None, gesture_value.value)
self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE,
gesture_value.none_value_reason)
def testComputeQueueingDuration(self):
stats = _MockRenderingStats(frame_timestamps=self.good_timestamps,
frame_queueing_durations=[[10, 20], [30, 40]])
list_of_scalar_values = self.metric._ComputeQueueingDuration(self.page,
stats)
self.assertEquals([10, 20, 30, 40], list_of_scalar_values.values)
def testComputeQueueingDurationWithMissingData(self):
stats = _MockRenderingStats(frame_timestamps=self.good_timestamps,
frame_queueing_durations=[[], []])
list_of_scalar_values = self.metric._ComputeQueueingDuration(
self.page, stats)
self.assertEquals(None, list_of_scalar_values.values)
self.assertEquals('No frame queueing durations recorded.',
list_of_scalar_values.none_value_reason)
def testComputeQueueingDurationWi
|
imec-myhdl/pycontrol-gui
|
BlockEditor/libraries/library_can/Epos_AD.py
|
Python
|
lgpl-2.1
| 283
| 0.024735
|
# cell definition
# name = 'Epos_AD'
# libname = 'can'
inp = 0
outp = 1
parameters = dict() #parametriseerbare cell
properties = {'Device ID': ' 0x01', 'Channel [0/1]': ' 0', 'name': 'epos_areadBlk'} #voor netlisten
#view variables:
ic
|
onSource = 'AD'
views
|
= {'icon':iconSource}
|
nanocell/lsync
|
python/boto/cloudformation/connection.py
|
Python
|
gpl-3.0
| 15,766
| 0.001395
|
# Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
try:
import simplejson as json
except:
import json
import boto
from boto.cloudformation.stack import Stack, StackSummary, StackEvent
from boto.cloudformation.stack import StackResource, StackResourceSummary
from boto.cloudformation.template import Template
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
class CloudFormationConnection(AWSQueryConnection):
"""
A Connection to the CloudFormation Service.
"""
APIVersion = boto.config.get('Boto', 'cfn_version', '2010-05-15')
DefaultRegionName = boto.config.get('Boto', 'cfn_region_name', 'us-east-1')
DefaultRegionEndpoint = boto.config.get('Boto', 'cfn_region_endpoint',
'cloudformation.us-east-1.amazonaws.com')
valid_states = ("CREATE_IN_PROGRESS", "CREATE_FAILED", "CREATE_COMPLETE",
"ROLLBACK_IN_PROGRESS", "ROLLBACK_FAILED", "ROLLBACK_COMPLETE",
"DELETE_IN_PROGRESS", "DELETE_FAILED", "DELETE_COMPLETE")
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
is_secure=True, port=None, proxy=None, proxy_port=None,
proxy_user=None, proxy_pass=None, debug=0,
https_connection_factory=None, region=None, path='/',
converter=None, security_token=None, validate_certs=True):
if not region:
region = RegionInfo(self, self.DefaultRegionName,
self.DefaultRegionEndpoint, CloudFormationConnection)
self.region = region
AWSQueryConnection.__init__(self, aws_access_key_id,
aws_secret_access_key,
is_secure, port, proxy, proxy_port,
proxy_user, proxy_pass,
self.region.endpoint, debug,
https_connection_factory, path,
security_token,
validate_certs=valida
|
te_certs)
def _required_auth_capability(self):
return ['hmac-v4']
def encode_bool(self, v):
v = bool(v)
return {True: "true", False: "false"}[v]
def _build_create_or_update_params(self, stack_name, template_body,
template_url, parameters,
notification_arns, disable_rollback,
timeout_in_minutes, capabilities, tags):
|
"""
Helper that creates JSON parameters needed by a Stack Create or
Stack Update call.
:type stack_name: string
:param stack_name: The name of the Stack, must be unique amoung running
Stacks
:type template_body: string
:param template_body: The template body (JSON string)
:type template_url: string
:param template_url: An S3 URL of a stored template JSON document. If
both the template_body and template_url are
specified, the template_body takes precedence
:type parameters: list of tuples
:param parameters: A list of (key, value) pairs for template input
parameters.
:type notification_arns: list of strings
:param notification_arns: A list of SNS topics to send Stack event
notifications to.
:type disable_rollback: bool
:param disable_rollback: Indicates whether or not to rollback on
failure.
:type timeout_in_minutes: int
:param timeout_in_minutes: Maximum amount of time to let the Stack
spend creating itself. If this timeout is exceeded,
the Stack will enter the CREATE_FAILED state.
:type capabilities: list
:param capabilities: The list of capabilities you want to allow in
the stack. Currently, the only valid capability is
'CAPABILITY_IAM'.
:type tags: dict
:param tags: A dictionary of (key, value) pairs of tags to
associate with this stack.
:rtype: dict
:return: JSON parameters represented as a Python dict.
"""
params = {'ContentType': "JSON", 'StackName': stack_name,
'DisableRollback': self.encode_bool(disable_rollback)}
if template_body:
params['TemplateBody'] = template_body
if template_url:
params['TemplateURL'] = template_url
if template_body and template_url:
boto.log.warning("If both TemplateBody and TemplateURL are"
" specified, only TemplateBody will be honored by the API")
if len(parameters) > 0:
for i, (key, value) in enumerate(parameters):
params['Parameters.member.%d.ParameterKey' % (i + 1)] = key
params['Parameters.member.%d.ParameterValue' % (i + 1)] = value
if capabilities:
for i, value in enumerate(capabilities):
params['Capabilities.member.%d' % (i + 1)] = value
if tags:
for i, (key, value) in enumerate(tags.items()):
params['Tags.member.%d.Key' % (i + 1)] = key
params['Tags.member.%d.Value' % (i + 1)] = value
if len(notification_arns) > 0:
self.build_list_params(params, notification_arns,
"NotificationARNs.member")
if timeout_in_minutes:
params['TimeoutInMinutes'] = int(timeout_in_minutes)
return params
def create_stack(self, stack_name, template_body=None, template_url=None,
parameters=[], notification_arns=[], disable_rollback=False,
timeout_in_minutes=None, capabilities=None, tags=None):
"""
Creates a CloudFormation Stack as specified by the template.
:type stack_name: string
:param stack_name: The name of the Stack, must be unique amoung running
Stacks
:type template_body: string
:param template_body: The template body (JSON string)
:type template_url: string
:param template_url: An S3 URL of a stored template JSON document. If
both the template_body and template_url are
specified, the template_body takes precedence
:type parameters: list of tuples
:param parameters: A list of (key, value) pairs for template input
parameters.
:type notification_arns: list of strings
:param notification_arns: A list of SNS topics to send Stack event
notifications to.
:type disable_rollback: bool
:param disable_rollback: Indicates whether or not to rollback on
failure.
:type timeout_in_minutes: int
:param timeout_in_minutes: Maximum amount of time to let the Stack
spend creating itself. If this timeout is exceeded,
the Stack will enter the CREATE_FAILED state.
:type capabilities: list
:param capabilities: The list of capabilit
|
uttamk/katas
|
roman_numerals/python/roman_numerals/__init__.py
|
Python
|
mit
| 98
| 0.010204
|
sin
|
gle_numeral_to_decimal_map = {"I":
|
1, "V": 5, "X": 10, "L": 50, "C": 100, "D": 500, "M": 1000}
|
geodashio/geodash-server
|
geodashserver/apps.py
|
Python
|
bsd-3-clause
| 143
| 0
|
from djan
|
go.apps import AppConfig
c
|
lass GeoDashServerDjangoConfig(AppConfig):
name = 'geodashserver'
verbose_name = "GeoDash Server"
|
jeremiahyan/odoo
|
odoo/models.py
|
Python
|
gpl-3.0
| 303,080
| 0.002221
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
"""
Object Relational Mapping module:
* Hierarchical structure
* Constraints consistency and validation
* Object metadata depends on its status
* Optimised processing by complex query (multiple actions at once)
* Default field values
* Permissions optimisation
* Persistent object: DB postgresql
* Data conversion
* Multi-level caching system
* Two different inheritance mechanisms
* Rich set of field types:
- classical (varchar, integer, boolean, ...)
- relational (one2many, many2one, many2many)
- functional
"""
import collections
import contextlib
import datetime
import dateutil
import fnmatch
import functools
import inspect
import itertools
import io
import logging
import operator
import pytz
import re
import uuid
import warnings
from collections import defaultdict, OrderedDict
from collections.abc import MutableMapping
from contextlib import closing
from inspect import getmembers, currentframe
from operator import attrgetter, itemgetter
import babel.dates
import dateutil.relativedelta
import psycopg2, psycopg2.extensions
from lxml import etree
from lxml.builder import E
import odoo
from . import SUPERUSER_ID
from . import api
from . import tools
from .exceptions import AccessError, MissingError, ValidationError, UserError
from .osv.query import Query
from .tools import frozendict, lazy_classproperty, ormcache, \
LastOrderedSet, OrderedSet, ReversedIterable, \
groupby, discardattr, partition
from .tools.config import config
from .tools.func import frame_codeinfo
from .tools.misc import CountingStream, clean_context, DEFAULT_SERVER_DATETIME_FORMAT, DEFAULT_SERVER_DATE_FORMAT, get_lang
from .tools.translate import _
from .tools import date_utils
from .tools import populate
from .tools import unique
from .tools.lru import LRU
_logger = logging.getLogger(__name__)
_unlink = logging.getLogger(__name__ + '.unlink')
regex_order = re.compile(r'^(\s*([a-z0-9:_]+|"[a-z0-9:_]+")(\s+(desc|asc))?\s*(,|$))+(?<!,)$', re.I)
regex_object_name = re.compile(r'^[a-z0-9_.]+$')
regex_pg_name = re.compile(r'^[a-z_][a-z0-9_$]*$', re.I)
regex_field_agg = re.compile(r'(\w+)(?::(\w+)(?:\((\w+)\))?)?')
AUTOINIT_RECALCULATE_STORED_FIELDS = 1000
def check_object_name(name):
""" Check if the given name is a valid model name.
The _name attribute in osv and osv_memory object is subject to
some restrictions. This function returns True or False whether
the given name is allowed or not.
TODO: this is an approximation. The goal in this approximation
is to disallow uppercase characters (in some places, we quote
table/column names and in other not, which leads to this kind
of errors:
psycopg2.ProgrammingError: relation "xxx" does not exist).
The same restriction should apply to both osv and osv_memory
objects for consistency.
"""
if regex_object_name.match(name) is None:
return False
return True
def raise_on_invalid_object_name(name):
if not check_object_name(name):
msg = "The _name attribute %s is not valid." % name
raise ValueError(msg)
def check_pg_name(name):
""" Check whether the given name is a valid PostgreSQL identifier name. """
if not regex_pg_name.match(name):
raise ValidationError("Invalid characters in table name %r" %
|
name)
if len(name) > 63:
raise ValidationError("Table name %r is too long" % name)
# match private methods, to prevent their remote invocation
regex_private = re
|
.compile(r'^(_.*|init)$')
def check_method_name(name):
""" Raise an ``AccessError`` if ``name`` is a private method name. """
if regex_private.match(name):
raise AccessError(_('Private methods (such as %s) cannot be called remotely.') % (name,))
def fix_import_export_id_paths(fieldname):
"""
Fixes the id fields in import and exports, and splits field paths
on '/'.
:param str fieldname: name of the field to import/export
:return: split field name
:rtype: list of str
"""
fixed_db_id = re.sub(r'([^/])\.id', r'\1/.id', fieldname)
fixed_external_id = re.sub(r'([^/]):id', r'\1/id', fixed_db_id)
return fixed_external_id.split('/')
def trigger_tree_merge(node1, node2):
""" Merge two trigger trees. """
for key, val in node2.items():
if key is None:
node1.setdefault(None, OrderedSet())
node1[None].update(val)
else:
node1.setdefault(key, {})
trigger_tree_merge(node1[key], node2[key])
class MetaModel(api.Meta):
""" The metaclass of all model classes.
Its main purpose is to register the models per module.
"""
module_to_models = defaultdict(list)
def __new__(meta, name, bases, attrs):
# this prevents assignment of non-fields on recordsets
attrs.setdefault('__slots__', ())
# this collects the fields defined on the class (via Field.__set_name__())
attrs.setdefault('_field_definitions', [])
if attrs.get('_register', True):
# determine '_module'
if '_module' not in attrs:
module = attrs['__module__']
assert module.startswith('odoo.addons.'), \
f"Invalid import of {module}.{name}, it should start with 'odoo.addons'."
attrs['_module'] = module.split('.')[2]
# determine model '_name' and normalize '_inherits'
inherit = attrs.get('_inherit', ())
if isinstance(inherit, str):
inherit = attrs['_inherit'] = [inherit]
if '_name' not in attrs:
attrs['_name'] = inherit[0] if len(inherit) == 1 else name
return super().__new__(meta, name, bases, attrs)
def __init__(self, name, bases, attrs):
super().__init__(name, bases, attrs)
if '__init__' in attrs and len(inspect.signature(attrs['__init__']).parameters) != 4:
_logger.warning("The method %s.__init__ doesn't match the new signature in module %s", name, attrs.get('__module__'))
if not attrs.get('_register', True):
return
# Remember which models to instantiate for this module.
if self._module:
self.module_to_models[self._module].append(self)
if not self._abstract and self._name not in self._inherit:
# this class defines a model: add magic fields
def add(name, field):
setattr(self, name, field)
field.__set_name__(self, name)
def add_default(name, field):
if name not in attrs:
setattr(self, name, field)
field.__set_name__(self, name)
add('id', fields.Id(automatic=True))
add(self.CONCURRENCY_CHECK_FIELD, fields.Datetime(
string='Last Modified on', automatic=True,
compute='_compute_concurrency_field', compute_sudo=False))
add_default('display_name', fields.Char(
string='Display Name', automatic=True, compute='_compute_display_name'))
if attrs.get('_log_access', self._auto):
add_default('create_uid', fields.Many2one(
'res.users', string='Created by', automatic=True, readonly=True))
add_default('create_date', fields.Datetime(
string='Created on', automatic=True, readonly=True))
add_default('write_uid', fields.Many2one(
'res.users', string='Last Updated by', automatic=True, readonly=True))
add_default('write_date', fields.Datetime(
string='Last Updated on', automatic=True, readonly=True))
class NewId(object):
""" Pseudo-ids for new records, encapsulating an optional origin id (actual
record id) and an optional reference (any value).
"""
__slots__ = ['origin', 'ref']
def __init__(self, origin=None, ref=None):
self.
|
bikong2/django
|
tests/auth_tests/test_views.py
|
Python
|
bsd-3-clause
| 45,028
| 0.001621
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import itertools
import os
import re
from importlib import import_module
from django.apps import apps
from django.conf import settings
from django.contrib.admin.models import LogEntry
from django.contrib.auth import REDIRECT_FIELD_NAME, SESSION_KEY
from django.contrib.auth.forms import (
AuthenticationForm, PasswordChangeForm, SetPasswordForm,
)
from django.contrib.auth.models import User
from django.contrib.auth.tests.custom_user import CustomUser
from django.contrib.auth.views import login as login_view, redirect_to_login
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.sites.requests import RequestSite
from django.core import mail
from django.core.urlresolvers import NoReverseMatch, reverse, reverse_lazy
from django.db import connection
from django.http import HttpRequest, QueryDict
from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.test import (
TestCase, ignore_warnings, modify_settings, override_settings,
)
from django.test.utils import patch_logger
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.encoding import force_text
from django.utils.http import urlquote
from django.utils.six.moves.urllib.parse import ParseResult, urlparse
from django.utils.translation import LANGUAGE_SESSION_KEY
from .models import UUIDUser
from .settings import AUTH_TEMPLATES
@override_settings(
LANGUAGES=[
('en', 'English'),
],
LANGUAGE_CODE='en',
TEMPLATES=AUTH_TEMPLATES,
USE_TZ=False,
PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='auth_tests.urls',
)
class AuthViewsTestCase(TestCase):
"""
Helper base class for all the follow test cases.
"""
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161',
last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='testclient',
first_name='Test', last_name='Client', email='testclient@example.com', is_staff=False, is_active=True,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u2 = User.objects.create(
password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161',
last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='inactive',
first_name='Inactive', last_name='User', email='testclient2@example.com', is_staff=False, is_active=False,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u3 = User.objects.create(
password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161',
last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='staff',
first_name='Staff', last_name='Member', email='staffmember@example.com', is_staff=True, is_active=True,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u4 = User.objects.create(
password='', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False,
username='empty_password', first_name='Empty', last_name='Password', email='empty_password@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u5 = User.objects.create(
password='$', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False,
username='unmanageable_password', first_name='Un
|
manageable', last_name='Password',
email='unmanageable_password@example.com', is_staff=False, is_active=True,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u6 = User.objects.create(
password='foo$bar', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False,
username='unknown_password', first_name='Unknown', last_name='Password',
email='unknown_password@example.com', is_st
|
aff=False, is_active=True,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
def login(self, username='testclient', password='password'):
response = self.client.post('/login/', {
'username': username,
'password': password,
})
self.assertIn(SESSION_KEY, self.client.session)
return response
def logout(self):
response = self.client.get('/admin/logout/')
self.assertEqual(response.status_code, 200)
self.assertNotIn(SESSION_KEY, self.client.session)
def assertFormError(self, response, error):
"""Assert that error is found in response.context['form'] errors"""
form_errors = list(itertools.chain(*response.context['form'].errors.values()))
self.assertIn(force_text(error), form_errors)
def assertURLEqual(self, url, expected, parse_qs=False):
"""
Given two URLs, make sure all their components (the ones given by
urlparse) are equal, only comparing components that are present in both
URLs.
If `parse_qs` is True, then the querystrings are parsed with QueryDict.
This is useful if you don't want the order of parameters to matter.
Otherwise, the query strings are compared as-is.
"""
fields = ParseResult._fields
for attr, x, y in zip(fields, urlparse(url), urlparse(expected)):
if parse_qs and attr == 'query':
x, y = QueryDict(x), QueryDict(y)
if x and y and x != y:
self.fail("%r != %r (%s doesn't match)" % (url, expected, attr))
@override_settings(ROOT_URLCONF='django.contrib.auth.urls')
class AuthViewNamedURLTests(AuthViewsTestCase):
def test_named_urls(self):
"Named URLs should be reversible"
expected_named_urls = [
('login', [], {}),
('logout', [], {}),
('password_change', [], {}),
('password_change_done', [], {}),
('password_reset', [], {}),
('password_reset_done', [], {}),
('password_reset_confirm', [], {
'uidb64': 'aaaaaaa',
'token': '1111-aaaaa',
}),
('password_reset_complete', [], {}),
]
for name, args, kwargs in expected_named_urls:
try:
reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.fail("Reversal of url named '%s' failed with NoReverseMatch" % name)
class PasswordResetTest(AuthViewsTestCase):
def test_email_not_found(self):
"""If the provided email is not registered, don't raise any error but
also don't send any email."""
response = self.client.get('/password_reset/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/password_reset/', {'email': 'not_a_real_email@email.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn("http://", mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
# optional multipart text/html email has been added. Make sure original,
# default functionality is 100% the same
self.assertFalse(mail.outbox[0].message().is_multipart())
def test_extra_email_context(self):
"""
extra_email_context should be available in the email template context.
"""
response = self.client.post(
'/password_reset_extra_email_context/',
{'email': 'staffmember@example.com'},
)
self.assertEqual(response.status_code, 302)
self.assertEqual(l
|
xiaoyexu/xCRM
|
crm/admin.py
|
Python
|
gpl-3.0
| 12,452
| 0.002008
|
# -*- coding: UTF-8 -*-
from django.contrib import admin
from .models import *
class UserAdmin(admin.ModelAdmin):
list_display = ('nickName', 'realName')
admin.site.register(User, UserAdmin)
class UserLoginStatusAdmin(admin.ModelAdmin):
list_display = ('key', 'description')
admin.site.register(UserLoginStatus, UserLoginStatusAdmin)
class UserLoginAdmin(admin.ModelAdmin):
list_display = (
'username', 'user', 'userbp', 'password', 'passwordEncrypted', 'status', 'failureCount', 'lastLoginAt',
'pulseAt')
admin.site.register(UserLogin, UserLoginAdmin)
class AuthObjectAdmin(admin.ModelAdmin):
list_display = ('authObject', 'create', 'read', 'update', 'delete')
admin.site.register(AuthObject, AuthObjectAdmin)
class AuthObjectTypeAdmin(admin.ModelAdmin):
list_display = ('key', 'description')
admin.site.register(AuthObjectType, AuthObjectTypeAdmin)
class UserRoleTypeAdmin(admin.ModelAdmin):
list_display = ('key', 'description')
admin.site.register(UserRoleType, UserRoleTypeAdmin)
class UserProfileTypeAdmin(admin.ModelAdmin):
list_display = ('key', 'description')
admin.site.register(UserProfileType, UserProfileTypeAdmin)
class BPAdmin(admin.ModelAdmin):
list_display = (
'id', 'type', 'partnerNo', 'firstName', 'middleName', 'lastName', 'name1', 'name2', 'name3', 'name4', 'title',
'mobile', 'email', 'valid',
'deleteFlag')
search_fields = (
'partnerNo', 'firstName', 'middleName', 'lastName', 'name1', 'name2', 'name3', 'name4', 'title', 'mobile',
'email')
admin.site.register(BP, BPAdmin)
class BPBaseTypeAdmin(admin.ModelAdmin):
list_display = ('key', 'description')
class BPTypeAdmin(admin.ModelAdmin):
list_display = ('key', 'baseType', 'description', 'assignmentBlock')
admin.site.register(BPType, BPTypeAdmin)
class BPRelTypeAdmin(admin.ModelAdmin):
list_display = ('key', 'description', 'descAtoB', 'descBtoA')
admin.site.register(BPRelType, BPRelTypeAdmin)
class BPRelationAdmin(admin.ModelAdmin):
list_display = ('bpA', 'relation', 'bpB', 'comments', 'valid')
admin.site.register(BPRelation, BPRelationAdmin)
class BPCustomizedAdmin(admin.ModelAdmin):
list_display = (
'bp', 'boolAttribute1', 'boolAttribute2', 'boolAttribute3', 'empResp', 'legalPerson', 'actualPerson',
'corpStructure', 'corpLiscense', 'file1', 'file2', 'imgFile1', 'imgFile2')
admin.site.register(BPCustomized, BPCustomizedAdmin)
class AddressTypeAdmin(admin.ModelAdmin):
list_display = ('key', 'description')
admin.site.register(AddressType, AddressTypeAdmin)
class DistrictTypeAdmin(admin.ModelAdmin):
list_display = ('key', 'description')
admin.site.register(DistrictType, DistrictTypeAdmin)
class AddressAdmin(admin.ModelAdmin):
list_display = (
'type', 'district', 'address1', 'address2', 'address3', 'address4', 'phone1', 'contact1', 'phone2', 'contact2')
admin.site.register(Address, AddressAdmin)
class OrderTypeAdmin(admin.ModelAdmin):
list_display = ('key', 'baseType', 'description', 'assignmentBlock')
admin.site.register(OrderType, OrderTypeAdmin)
class OrderBaseTypeAdmin(admin.ModelAdmin):
list_display = ('key', 'description')
admin.site.register(OrderBaseType, OrderBaseTypeAdmin)
class OrderRelTypeAdmin(admin.ModelAdmin):
list_display = ('key', 'description', 'descAtoB', 'descBtoA')
admin.site.register(OrderRelType, OrderRelTypeAdmin)
class OrderRelationAdmin(admin.ModelAdmin):
list_display = ('orderA', 'relation', 'orderB', 'comments', 'valid')
admin.site.register(OrderRelation, OrderRelationAdmin)
class PFTypeAdmin(admin.ModelAdmin):
list_display = ('orderType', 'key
|
', 'description')
ad
|
min.site.register(PFType, PFTypeAdmin)
class PriorityTypeAdmin(admin.ModelAdmin):
list_display = ('orderType', 'key', 'description', 'sortOrder')
admin.site.register(PriorityType, PriorityTypeAdmin)
class StatusTypeAdmin(admin.ModelAdmin):
list_display = ('orderType', 'key', 'description', 'sortOrder')
admin.site.register(StatusType, StatusTypeAdmin)
class OrderExtSelectionFieldTypeAdmin(admin.ModelAdmin):
list_display = ('orderType', 'fieldKey', 'key', 'description', 'sortOrder')
admin.site.register(OrderExtSelectionFieldType, OrderExtSelectionFieldTypeAdmin)
class TextTypeAdmin(admin.ModelAdmin):
list_display = ('orderType', 'key', 'description')
admin.site.register(TextType, TextTypeAdmin)
class BPTextTypeAdmin(admin.ModelAdmin):
list_display = ('bpType', 'key', 'description')
admin.site.register(BPTextType, BPTextTypeAdmin)
class OrderAdmin(admin.ModelAdmin):
list_display = (
'id', 'type', 'description', 'createdBy', 'createdAt', 'updatedBy', 'updatedAt', 'priority', 'status',
'deleteFlag')
search_fields = ('id', 'description')
admin.site.register(Order, OrderAdmin)
class OrderCustomizedAdmin(admin.ModelAdmin):
list_display = (
'order', 'travelAmount', 'amount', 'stage', 'goLiveDate', 'file1', 'file2', 'imgFile1', 'imgFile2')
admin.site.register(OrderCustomized, OrderCustomizedAdmin)
class OrderMultipleValueFieldAdmin(admin.ModelAdmin):
list_display = ('id', 'order', 'field', 'charValue1', 'charValue2')
admin.site.register(OrderMultipleValueField, OrderMultipleValueFieldAdmin)
class OrderPFAdmin(admin.ModelAdmin):
list_display = ('order', 'pf', 'bp', 'relatedOrder', 'main')
admin.site.register(OrderPF, OrderPFAdmin)
class OrderTextAdmin(admin.ModelAdmin):
list_display = ('type', 'order', 'createdBy', 'createdAt', 'content')
search_fields = ('content',)
admin.site.register(OrderText, OrderTextAdmin)
class BPTextAdmin(admin.ModelAdmin):
list_display = ('type', 'bp', 'createdBy', 'createdAt', 'content')
search_fields = ('content',)
admin.site.register(BPText, BPTextAdmin)
class OrderExtFieldTypeAdmin(admin.ModelAdmin):
list_display = ('orderType', 'key', 'description')
admin.site.register(OrderExtFieldType, OrderExtFieldTypeAdmin)
class OrderExtFieldAdmin(admin.ModelAdmin):
list_display = ('type', 'originalOrder', 'value', 'relatedBp', 'relatedOrder', 'relatedSelection')
admin.site.register(OrderExtField, OrderExtFieldAdmin)
class SiteLanguageAdmin(admin.ModelAdmin):
list_display = ('key', 'description')
admin.site.register(SiteLanguage, SiteLanguageAdmin)
class SiteAppTypeAdmin(admin.ModelAdmin):
list_display = ('appId', 'description')
admin.site.register(SiteAppType, SiteAppTypeAdmin)
class SitePhraseAdmin(admin.ModelAdmin):
list_display = ('phraseId', 'app', 'phraseLan', 'content', 'bigContent')
search_fields = ('phraseId', 'content', 'bigContent')
admin.site.register(SitePhrase, SitePhraseAdmin)
class SiteMenuItemAdmin(admin.ModelAdmin):
list_display = ('role', 'parentMenuId', 'phraseId', 'appId', 'pageApp', 'sortOrder', 'valid')
admin.site.register(SiteMenuItem, SiteMenuItemAdmin)
class FieldTypeAdmin(admin.ModelAdmin):
list_display = ('key', 'description')
admin.site.register(FieldType, FieldTypeAdmin)
class OrderFieldDefAdmin(admin.ModelAdmin):
list_display = (
'orderType', 'fieldKey', 'attributeType', 'fieldType', 'valueType', 'storeType', 'storeColumn', 'storeKey')
admin.site.register(OrderFieldDef, OrderFieldDefAdmin)
class BPFieldDefAdmin(admin.ModelAdmin):
list_display = (
'bpType', 'fieldKey', 'attributeType', 'fieldType', 'valueType', 'storeType', 'storeColumn', 'storeKey')
admin.site.register(BPFieldDef, BPFieldDefAdmin)
class UserSavedSearchFavoriteAdmin(admin.ModelAdmin):
list_display = ('userlogin', 'type', 'name', 'sortOrder', 'property', 'operation', 'low', 'high')
admin.site.register(UserSavedSearchFavorite, UserSavedSearchFavoriteAdmin)
class OrderBEDefAdmin(admin.ModelAdmin):
list_display = ('orderType', 'businessEntity')
admin.site.register(OrderBEDef, OrderBEDefAdmin)
class BPBEDefAdmin(admin.ModelAdmin):
list_display = ('bpType', 'businessEntity')
admin.site.register(BPBEDef, BPBEDefAdmin)
class ViewTypeAdmin(admin.ModelAdmin):
list_display = ('key', 'description')
admin.site.re
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/layout/_computed.py
|
Python
|
mit
| 395
| 0
|
import _plotly_utils.basev
|
alidators
class ComputedValidator(_plotly_utils.basevalidators.AnyValidator):
def __init__(self, plotly_name="computed", parent_name="layout", **kwargs):
super(ComputedValidator, self).__init__(
plotly_name=plotly_name,
|
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
**kwargs
)
|
rajrohith/blobstore
|
samples/file/__init__.py
|
Python
|
apache-2.0
| 902
| 0.003333
|
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the Licen
|
se at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from .sas_usage impor
|
t FileSasSamples
from .share_usage import ShareSamples
from .directory_usage import DirectorySamples
from .file_usage import FileSamples
|
wallnerryan/quantum_migrate
|
quantum/plugins/cisco/nexus/cisco_nexus_plugin_v2.py
|
Python
|
apache-2.0
| 9,634
| 0
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Cisco Systems, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Sumit Naiksatam, Cisco Systems, Inc.
# @author: Edgar Magana, Cisco Systems, Inc.
# @author: Arvind Somya, Cisco Systems, Inc. (asomya@cisco.com)
#
"""
PlugIn for Nexus OS driver
"""
import logging
from quantum.common import exceptions as exc
from quantum.openstack.common import importutils
from quantum.plugins.cisco.common import cisco_constants as const
from quantum.plugins.cisco.common import cisco_credentials_v2 as cred
from quantum.plugins.cisco.common import cisco_exceptions as excep
from quantum.plugins.cisco.db import network_db_v2 as cdb
from quantum.plugins.cisco.db import nexus_db_v2 as nxos_db
from quantum.plugins.cisco.l2device_plugin_base import L2DevicePluginBase
from quantum.plugins.cisco.nexus import cisco_nexus_configuration as conf
LOG = logging.getLogger(__name__)
class NexusPlugin(L2DevicePluginBase):
"""
Nexus PLugIn Main Class
"""
_networks = {}
def __init__(self):
"""
Extracts the configuration parameters from the configuration file
"""
self._client = importutils.import_object(conf.NEXUS_DRIVER)
LOG.debug(_("Loaded driver %s"), conf.NEXUS_DRIVER)
self._nexus_switches = conf.NEXUS_DETAILS
self.credentials = {}
def get_credential(self, nexus_ip):
if nexus_ip not in self.credentials:
_nexus_username = cred.Store.get_username(nexus_ip)
_nexus_password = cred.Store.get_password(nexus_ip)
self.credentials[nexus_ip] = {
'username': _nexus_username,
'password': _nexus_password
}
return self.credentials[nexus_ip]
def get_all_networks(self, tenant_id):
"""
Returns a dictionary containing all
<network_uuid, network_name> for
the specified tenant.
"""
LOG.debug(_("NexusPlugin:get_all_networks() called"))
return self._networks.values()
def create_network(self, tenant_id, net_name, net_id, vlan_name, vlan_id,
host, instance):
"""
Create a VLAN in the appropriate switch/port,
and configure the approp
|
riate interfaces
for this VLAN
"""
LOG.debug(_("NexusPlugin:create_network() called")
|
)
# Grab the switch IP and port for this host
switch_ip = ''
port_id = ''
for switch in self._nexus_switches.keys():
for hostname in self._nexus_switches[switch].keys():
if str(hostname) == str(host):
switch_ip = switch
port_id = self._nexus_switches[switch][hostname]['ports']
# Check if this network is already in the DB
binding = nxos_db.get_port_vlan_switch_binding(
port_id, vlan_id, switch_ip)
if not binding:
_nexus_ip = switch_ip
_nexus_ports = (port_id,)
_nexus_ssh_port = \
self._nexus_switches[switch_ip]['ssh_port']['ssh_port']
_nexus_creds = self.get_credential(_nexus_ip)
_nexus_username = _nexus_creds['username']
_nexus_password = _nexus_creds['password']
# Check for vlan/switch binding
vbinding = nxos_db.get_nexusvlan_binding(vlan_id, switch_ip)
if not vbinding:
# Create vlan and trunk vlan on the port
self._client.create_vlan(
vlan_name, str(vlan_id), _nexus_ip,
_nexus_username, _nexus_password,
_nexus_ports, _nexus_ssh_port, vlan_id)
else:
# Only trunk vlan on the port
man = self._client.nxos_connect(_nexus_ip,
int(_nexus_ssh_port),
_nexus_username,
_nexus_password)
self._client.enable_vlan_on_trunk_int(man,
port_id,
vlan_id)
nxos_db.add_nexusport_binding(port_id, str(vlan_id),
switch_ip, instance)
new_net_dict = {const.NET_ID: net_id,
const.NET_NAME: net_name,
const.NET_PORTS: {},
const.NET_VLAN_NAME: vlan_name,
const.NET_VLAN_ID: vlan_id}
self._networks[net_id] = new_net_dict
return new_net_dict
def delete_network(self, tenant_id, net_id, **kwargs):
"""
Deletes the VLAN in all switches, and removes the VLAN configuration
from the relevant interfaces
"""
LOG.debug(_("NexusPlugin:delete_network() called"))
def get_network_details(self, tenant_id, net_id, **kwargs):
"""
Returns the details of a particular network
"""
LOG.debug(_("NexusPlugin:get_network_details() called"))
network = self._get_network(tenant_id, net_id)
return network
def update_network(self, tenant_id, net_id, **kwargs):
"""
Updates the properties of a particular
Virtual Network.
"""
LOG.debug(_("NexusPlugin:update_network() called"))
def get_all_ports(self, tenant_id, net_id, **kwargs):
"""
This is probably not applicable to the Nexus plugin.
Delete if not required.
"""
LOG.debug(_("NexusPlugin:get_all_ports() called"))
def create_port(self, tenant_id, net_id, port_state, port_id, **kwargs):
"""
This is probably not applicable to the Nexus plugin.
Delete if not required.
"""
LOG.debug(_("NexusPlugin:create_port() called"))
def delete_port(self, device_id, vlan_id):
"""
Delete port bindings from the database and scan
whether the network is still required on
the interfaces trunked
"""
LOG.debug(_("NexusPlugin:delete_port() called"))
# Delete DB row for this port
row = nxos_db.get_nexusvm_binding(vlan_id, device_id)
if row:
nxos_db.remove_nexusport_binding(row['port_id'], row['vlan_id'],
row['switch_ip'],
row['instance_id'])
# Check for any other bindings with the same vlan_id and switch_ip
bindings = nxos_db.get_nexusvlan_binding(
row['vlan_id'], row['switch_ip'])
if not bindings:
# Delete this vlan from this switch
_nexus_ip = row['switch_ip']
_nexus_ports = (row['port_id'],)
_nexus_ssh_port = \
self._nexus_switches[_nexus_ip]['ssh_port']['ssh_port']
_nexus_creds = self.get_credential(_nexus_ip)
_nexus_username = _nexus_creds['username']
_nexus_password = _nexus_creds['password']
self._client.delete_vlan(
str(row['vlan_id']), _nexus_ip,
_nexus_username, _nexus_password,
_nexus_ports, _nexus_ssh_port)
return row['instance_id']
def update_port(self, tenant_id, net_id, port_id, port_state, **kwargs):
"""
This is probably not applicable to the Nexus plugin.
Delete if not required.
"""
LOG.debug(_("NexusPlugin:update_port() called"))
d
|
harunyasar/rabbitmq_playground
|
default_exchange_receiver.py
|
Python
|
gpl-3.0
| 779
| 0.002567
|
from sender import *
import threading
QUEUE_NAME = 'event_queue'
class CompetingReceiver(object):
def __init__(self):
self.connection = Connection().initialize()
def receive(self):
self.connection.channel.queue_declare(QUEUE_NAME, False, False, False, None)
self.connection.channel.basic_consume(self.connection.callback, QUEUE_NAME, True)
self.connection.channel.start_consuming()
if __name__ == '__main__':
connecti
|
on1 = CompetingReceiver()
connection2 = CompetingReceiver()
t1 = threading.Thread(target=connection1.receive())
t2 = threading.Thread(target=connection2.receive())
t1.start()
|
t2.start()
t1.join()
t2.join()
connection1.connection.destroy()
connection2.connection.destroy()
|
teoreteetik/api-snippets
|
lookups/lookup-get-basic-example-1/lookup-get-basic-example-1.6.x.py
|
Python
|
mit
| 406
| 0
|
# Download the Python helper library from twilio.com/docs/python/install
from t
|
wilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACCOUNT_SID"
auth_token = "your_auth_token"
client = Client(a
|
ccount_sid, auth_token)
number = client.lookups.phone_numbers("+15108675309").fetch(type="carrier")
print(number.carrier['type'])
print(number.carrier['name'])
|
xskh2007/zjump
|
dbtool/test.py
|
Python
|
gpl-2.0
| 722
| 0.009772
|
# -*- coding: UTF-8 -*-
# 来源:疯狂的蚂蚁的博客www.c
|
razyant.net总结整理
import MySQLdb as mdb
import sys
#获取数据库的链接对象
#con = mdb.connect('192.168.2.117', 'root', 'zzjr#2015', 'disconf')
con = mdb.connect('localhost', 'root', '', 'jumpserver')
with con:
#获取普通的查询cursor
cur = con.cursor()
cur.execute("select * from juser_user")
rows = cur.fetchall()
#获取连接对象的描述信息
desc = cur.description
print 'cur.description:',desc
#打印表头,就是字段名字
for i in desc:
print i[0]
print "%s %3s" %
|
(desc[0][0], desc[1][0])
# print rows[2][11].decode('ascii').encode('utf-8')
print rows[2][11]
|
rspavel/spack
|
var/spack/repos/builtin/packages/gdk-pixbuf/package.py
|
Python
|
lgpl-2.1
| 3,882
| 0.002061
|
# Copy
|
right 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class GdkPixbuf(Package):
"""The Gdk Pixbuf is a toolkit for image loading and pixel buffer
manipulation. It is used by GTK+ 2 and GTK+ 3 to load and
manipulate images. In the past it was distributed as part of
GTK+ 2 but it was spl
|
it off into a separate package in
preparation for the change to GTK+ 3."""
homepage = "https://developer.gnome.org/gdk-pixbuf/"
url = "https://ftp.acc.umu.se/pub/gnome/sources/gdk-pixbuf/2.40/gdk-pixbuf-2.40.0.tar.xz"
list_url = "https://ftp.acc.umu.se/pub/gnome/sources/gdk-pixbuf/"
list_depth = 1
version('2.40.0', sha256='1582595099537ca8ff3b99c6804350b4c058bb8ad67411bbaae024ee7cead4e6')
version('2.38.2', sha256='73fa651ec0d89d73dd3070b129ce2203a66171dfc0bd2caa3570a9c93d2d0781')
version('2.38.0', sha256='dd50973c7757bcde15de6bcd3a6d462a445efd552604ae6435a0532fbbadae47')
version('2.31.2', sha256='9e467ed09894c802499fb2399cd9a89ed21c81700ce8f27f970a833efb1e47aa')
variant('x11', default=False, description="Enable X11 support")
depends_on('meson@0.46.0:', type='build', when='@2.37.92:')
depends_on('meson@0.45.0:', type='build', when='@2.37.0:')
depends_on('ninja', type='build', when='@2.37.0:')
depends_on('shared-mime-info', type='build', when='@2.36.8: platform=linux')
depends_on('shared-mime-info', type='build', when='@2.36.8: platform=cray')
depends_on('pkgconfig', type='build')
# Building the man pages requires libxslt and the Docbook stylesheets
depends_on('libxslt', type='build')
depends_on('docbook-xsl', type='build')
depends_on('gettext')
depends_on('glib@2.38.0:')
depends_on('jpeg')
depends_on('libpng')
depends_on('zlib')
depends_on('libtiff')
depends_on('gobject-introspection')
depends_on('libx11', when='+x11')
# Replace the docbook stylesheet URL with the one that our
# docbook-xsl package uses/recognizes.
patch('docbook-cdn.patch')
def url_for_version(self, version):
url = "https://ftp.acc.umu.se/pub/gnome/sources/gdk-pixbuf/{0}/gdk-pixbuf-{1}.tar.xz"
return url.format(version.up_to(2), version)
def setup_dependent_build_environment(self, env, dependent_spec):
env.prepend_path("XDG_DATA_DIRS", self.prefix.share)
def setup_dependent_run_environment(self, env, dependent_spec):
env.prepend_path("XDG_DATA_DIRS", self.prefix.share)
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
meson_args = std_meson_args
meson_args += ['-Dx11={0}'.format('+x11' in spec)]
meson('..', *meson_args)
ninja('-v')
if self.run_tests:
ninja('test')
ninja('install')
def configure_args(self):
args = []
# disable building of gtk-doc files following #9771
args.append('--disable-gtk-doc-html')
true = which('true')
args.append('GTKDOC_CHECK={0}'.format(true))
args.append('GTKDOC_CHECK_PATH={0}'.format(true))
args.append('GTKDOC_MKPDF={0}'.format(true))
args.append('GTKDOC_REBASE={0}'.format(true))
return args
@when('@:2.36')
def install(self, spec, prefix):
configure('--prefix={0}'.format(prefix), *self.configure_args())
make()
if self.run_tests:
make('check')
make('install')
if self.run_tests:
make('installcheck')
def setup_build_environment(self, env):
# The "post-install.sh" script uses gdk-pixbuf-query-loaders,
# which was installed earlier.
env.prepend_path('PATH', self.prefix.bin)
|
xenigmax/seqan
|
util/bin/demo_checker.py
|
Python
|
bsd-3-clause
| 4,665
| 0.005145
|
#!/usr/bin/env python2
"""Demo checker script.
Given a demo .cpp file PATH.cpp we can make it a small test if there is a file
PATH.cpp.stdout and/or PATH.cpp.stderr. The test is implemented using this
script.
The script is called with the options --binary-path and one or both of
--stdout-path and --stderr-path. The demo is executed and the test succeeds
if the exit code is 0 and the standard/error output is the same as in the
.stdout/.stderr file. If there is output and the file is missing then this is
a failure as well.
"""
__author__ = """Manuel Holtgrewe <manuel.holtgrewe@fu-berlin.de>
Temesgen H. Dadi <temesgen.dadi@fu-berlin.de>
"""
import argparse
import difflib
import subprocess
import sys
import re
def t(s):
"""Force Windows line endings to Unix line endings."""
return s.replace("\r\n", "\n")
def fuzzyEqual(pattern, text):
"""checks if the expected output is eqal to the actualoutput using a reqex
use the literal [VAR] if the part of the output is not expected to be the same all the time.
"""
if len(pattern) != len(text):
print >> sys.stderr, 'Number of lines differ. Expected output has %s lines whereas actual has %s lines.' % (len(pattern), len(text))
return False
for i in range(len(pattern)):
T = text[i]
P = pattern[i]
if T == P :
continue
else :
if '[VAR]' not in P:
print >> sys.stderr, 'Line %s is different between expected and actual outputs.' % (i)
return False
else:
P = (re.escape(P)).replace('\\[VAR\\]', "[+-]?(\d+(\.\d*)?|\.\d+)([eE][+-]?\d+)?")
r = re.compile(P)
if re.match(r, T) == None:
print >> sys.stderr, 'Line %s is different (REGEX) between expected and actual outputs.' % (i)
return False
return True
def loadExpected(args):
"""Load the expected file contents."""
out, err = '', ''
if args.stdout_p
|
ath:
with open(args.stdout_path, 'rb') as f:
out = f.read()
if args.stderr_path:
with open(args.stderr_path, 'rb') as f:
err = f.read()
return t(out.strip()).split('\n'), t(err.strip()).split('\n')
def runDemo(args):
cmd = [args.binary_path]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
st
|
doutbuff, stderrbuff = p.communicate()
return t(stdoutbuff.strip()).split('\n'), t(stderrbuff.strip()).split('\n'), p.returncode
def main():
"""Program entry point."""
parser = argparse.ArgumentParser(description='Run SeqAn demos as apps.')
parser.add_argument('--binary-path', dest='binary_path', required='True',
help='Path to the demo binary to execute.')
parser.add_argument('--stdout-path', dest='stdout_path',
help='Path to standard out file to compare to.',
default=None)
parser.add_argument('--stderr-path', dest='stderr_path',
help='Path to standard error file to compare to.',
default=None)
args = parser.parse_args()
print >>sys.stderr, 'Running %s.' % args.binary_path
actual_out, actual_err, ret = runDemo(args)
if ret != 0:
print >>sys.stderr, 'ERROR: Return code of %s was %s.' % (args.binary_path, ret)
return 1
else:
print >>sys.stderr, 'Return code was %s.' % ret
print >>sys.stderr, 'Loading files "%s", "%s".' % (args.stdout_path, args.stderr_path)
expected_out, expected_err = loadExpected(args)
is_stdout_as_expected = fuzzyEqual(expected_out, actual_out)
is_stderr_as_expected = fuzzyEqual(expected_err, actual_err)
if not is_stdout_as_expected:
print >>sys.stderr, 'The standard output was not as expected!'
l = difflib.context_diff(expected_out, actual_out,
fromfile='expected', tofile='actual')
print >>sys.stderr, '\n'.join(l)
else:
print >>sys.stderr, 'Standard output was as expected.'
if not is_stderr_as_expected:
print >>sys.stderr, 'The standard error was not as expected!'
l = difflib.context_diff(expected_err, actual_err,
fromfile='expected', tofile='actual')
print >>sys.stderr, '\n'.join(l)
else:
print >>sys.stderr, 'Standard error was as expected.'
# here we used not because we need return-code 0 (False) if test is successful
return not (is_stdout_as_expected and is_stderr_as_expected)
if __name__ == '__main__':
sys.exit(main())
|
ratnania/pyccel
|
pyccel/codegen/__init__.py
|
Python
|
mit
| 125
| 0.024
|
# -*- coding: UTF-8 -*-
from .c
|
odegen import *
from .cmake
|
import *
from .printing import *
from .utilities import *
|
agoose77/hivesystem
|
dragonfly/logic/toggle.py
|
Python
|
bsd-2-clause
| 568
| 0.001761
|
import bee
from bee.segments import *
class
|
toggle(bee.worker):
inp = antenna("push", "trigger")
on = variable("bool")
parameter(on, False)
state = output("pull", "bool")
connect(on, state)
true = output("push", "trigger")
trig_true = triggerfunc(true)
false = output("push", "trigger")
trig_false = trigge
|
rfunc(false)
@modifier
def m_trig(self):
if self.on:
self.on = False
self.trig_false()
else:
self.on = True
self.trig_true()
trigger(inp, m_trig)
|
SnowWalkerJ/quantlib
|
quant/data/wind/tables/cbondbalancesheet.py
|
Python
|
gpl-3.0
| 14,897
| 0.019439
|
from ....common.db.sql import VARCHAR, Numeric as NUMBER, DateTime as DATETIME, Column, BaseModel, CLOB, DATE
VARCHAR2 = VARCHAR
class CBondBalanceSheet(BaseModel):
"""
4.158 中国债券发行主体资产负债表
Attributes
----------
object_id: VARCHAR2(100)
对象ID
s_info_compcode: VARCHAR2(40)
公司ID
ann_dt: VARCHAR2(8)
公告日期
report_period: VARCHAR2(8)
报告期
statement_type: VARCHAR2(10)
报表类型 报表类型:408001000:合并报表408004000:合并报表(调整)408005000:合并报表(更正前)408006000:母公司报表408009000:母公司报表(调整)408010000:母公司报表(更正前)
crncy_code: VARCHAR2(10)
货币代码 CNY
monetary_cap: NUMBER(20,4)
货币资金
tradable_fin_assets: NUMBER(20,4)
交易性金融资产
notes_rcv: NUMBER(20,4)
应收票据
acct_rcv: NUMBER(20,4)
应收账款
oth_rcv: NUMBER(20,4)
其他应收款
prepay: NUMBER(20,4)
预付款项
dvd_rcv: NUMBER(20,4)
应收股利
int_rcv: NUMBER(20,4)
应收利息
inventories: NUMBER(20,4)
存货
consumptive_bio_assets: NUMBER(20,4)
消耗性生物资产
deferred_exp: NUMBER(20,4)
待摊费用
non_cur_assets_due_within_1y: NUMBER(20,4)
一年内到期的非流动资产
settle_rsrv: NUMBER(20,4)
结算备付金
loans_to_oth_banks: NUMBER(20,4)
拆出资金
prem_rcv: NUMBER(20,4)
应收保费
rcv_from_reinsurer: NUMBER(20,4)
应收分保账款
rcv_from_ceded_insur_cont_rsrv: NUMBER(20,4)
应收分保合同准备金
red_monetary_cap_for_sale: NUMBER(20,4)
买入返售金融资产
oth_cur_assets: NUMBER(20,4)
其他流动资产
tot_cur_assets: NUMBER(20,4)
流动资产合计
fin_assets_avail_for_sale: NUMBER(20,4)
可供出售金融资产
held_to_mty_invest: NUMBER(20,4)
持有至到期投资
long_term_eqy_invest: NUMBER(20,4)
长期股权投资
invest_real_estate: NUMBER(20,4)
投资性房地产
time_deposits: NUMBER(20,4)
定期存款
oth_assets: NUMBER(20,4)
其他资产
long_term_rec: NUMBER(20,4)
长期应收款
fix_assets: NUMBER(20,4)
固定资产
const_in_prog: NUMBER(20,4)
在建工程
proj_matl: NUMBER(20,4)
工程物资
fix_assets_disp: NUMBER(20,4)
固定资产清理
productive_bio_assets: NUMBER(20,4)
生产性生物资产
oil_and_natural_gas_assets: NUMBER(20,4)
油气资产
intang_assets: NUMBER(20,4)
无形资产
r_and_d_costs: NUMBER
|
(20,4)
开发支出
goodwill: NUMBER(20,4)
商誉
long_term_deferred_exp: NUMBER(20,4)
长期待摊费用
deferred_tax_assets: NUMBER(20,4)
递延所得税资产
loans_and_adv_granted: NUMBER(20,4)
发放贷款及垫款
oth_non_cur_assets: NUMBER(20,4)
其他非流动资
|
产
tot_non_cur_assets: NUMBER(20,4)
非流动资产合计
cash_deposits_central_bank: NUMBER(20,4)
现金及存放中央银行款项
asset_dep_oth_banks_fin_inst: NUMBER(20,4)
存放同业和其它金融机构款项
precious_metals: NUMBER(20,4)
贵金属
derivative_fin_assets: NUMBER(20,4)
衍生金融资产
agency_bus_assets: NUMBER(20,4)
代理业务资产
subr_rec: NUMBER(20,4)
应收代位追偿款
rcv_ceded_unearned_prem_rsrv: NUMBER(20,4)
应收分保未到期责任准备金
rcv_ceded_claim_rsrv: NUMBER(20,4)
应收分保未决赔款准备金
rcv_ceded_life_insur_rsrv: NUMBER(20,4)
应收分保寿险责任准备金
rcv_ceded_lt_health_insur_rsrv: NUMBER(20,4)
应收分保长期健康险责任准备金
mrgn_paid: NUMBER(20,4)
存出保证金
insured_pledge_loan: NUMBER(20,4)
保户质押贷款
cap_mrgn_paid: NUMBER(20,4)
存出资本保证金
independent_acct_assets: NUMBER(20,4)
独立账户资产
clients_cap_deposit: NUMBER(20,4)
客户资金存款
clients_rsrv_settle: NUMBER(20,4)
客户备付金
incl_seat_fees_exchange: NUMBER(20,4)
其中:交易席位费
rcv_invest: NUMBER(20,4)
应收款项类投资
tot_assets: NUMBER(20,4)
资产总计
st_borrow: NUMBER(20,4)
短期借款
borrow_central_bank: NUMBER(20,4)
向中央银行借款
deposit_received_ib_deposits: NUMBER(20,4)
吸收存款及同业存放
loans_oth_banks: NUMBER(20,4)
拆入资金
tradable_fin_liab: NUMBER(20,4)
交易性金融负债
notes_payable: NUMBER(20,4)
应付票据
acct_payable: NUMBER(20,4)
应付账款
adv_from_cust: NUMBER(20,4)
预收款项
fund_sales_fin_assets_rp: NUMBER(20,4)
卖出回购金融资产款
handling_charges_comm_payable: NUMBER(20,4)
应付手续费及佣金
empl_ben_payable: NUMBER(20,4)
应付职工薪酬
taxes_surcharges_payable: NUMBER(20,4)
应交税费
int_payable: NUMBER(20,4)
应付利息
dvd_payable: NUMBER(20,4)
应付股利
oth_payable: NUMBER(20,4)
其他应付款
acc_exp: NUMBER(20,4)
预提费用
deferred_inc: NUMBER(20,4)
递延收益
st_bonds_payable: NUMBER(20,4)
应付短期债券
payable_to_reinsurer: NUMBER(20,4)
应付分保账款
rsrv_insur_cont: NUMBER(20,4)
保险合同准备金
acting_trading_sec: NUMBER(20,4)
代理买卖证券款
acting_uw_sec: NUMBER(20,4)
代理承销证券款
non_cur_liab_due_within_1y: NUMBER(20,4)
一年内到期的非流动负债
oth_cur_liab: NUMBER(20,4)
其他流动负债
tot_cur_liab: NUMBER(20,4)
流动负债合计
lt_borrow: NUMBER(20,4)
长期借款
bonds_payable: NUMBER(20,4)
应付债券
lt_payable: NUMBER(20,4)
长期应付款
specific_item_payable: NUMBER(20,4)
专项应付款
provisions: NUMBER(20,4)
预计负债
deferred_tax_liab: NUMBER(20,4)
递延所得税负债
deferred_inc_non_cur_liab: NUMBER(20,4)
递延收益-非流动负债
oth_non_cur_liab: NUMBER(20,4)
其他非流动负债
tot_non_cur_liab: NUMBER(20,4)
非流动负债合计
liab_dep_oth_banks_fin_inst: NUMBER(20,4)
同业和其它金融机构存放款项
derivative_fin_liab: NUMBER(20,4)
衍生金融负债
cust_bank_dep: NUMBER(20,4)
吸收存款
agency_bus_liab: NUMBER(20,4)
代理业务负债
oth_liab: NUMBER(20,4)
其他负债
prem_received_adv: NUMBER(20,4)
预收保费
deposit_received: NUMBER(20,4)
存入保证金
insured_deposit_invest: NUMBER(20,4)
保户储金及投资款
unearned_prem_rsrv: NUMBER(20,4)
未到期责任准备金
out_loss_rsrv: NUMBER(20,4)
未决赔款准备金
life_insur_rsrv: NUMBER(20,4)
寿险责任准备金
lt_health_insur_v: NUMBER(20,4)
长期健康险责任准备金
independent_acct_liab: NUMBER(20,4)
独立账户负债
incl_pledge_loan: NUMBER(20,4)
其中:质押借款
claims_payable: NUMBER(20,4)
应付赔付款
dvd_payable_insured: NUMBER(20,4)
应付保单红利
tot_liab: NUMBER(20,4)
负债合计
cap_stk: NUMBER(20,4)
股本
cap_rsrv: NUMBER(20,4)
资本公积金
special_rsrv: NUMBER(20,4)
专项储备
surplus_rsrv: NUMBER(20,4)
盈余公积金
undistributed_profit: NUMBER(20,4)
未分配利润
less_tsy_stk: NUMBER(20,4)
减:库存股
prov_nom_risks: NUMBER(20,4)
一般风险准备
cnvd_diff_foreign_curr_stat: NUMBER(20,4)
外币报表折算差额
unconfirmed_invest_loss: NUMBER(20,4)
未确认的投资损失
minority_int: NUMBER(20,4)
少数股东权益
tot_shrhldr_eqy_excl_min_int: NUMBER(20,4)
股东权益合计(不含少数股东权益)
tot_shrhldr_eqy_incl_min_int: NUMBER(20,4)
股东权益合计(含少数股东权益)
tot_liab_shrhldr_eqy: NUMBER(20,4)
负债及股东权益总计
opdate: DATETIME
opdate
opmode: VARCHAR(1)
opmode
"""
__tablename__ = "CBondBalanceSheet"
object_id = Column(VARCHAR2(100), primary_key=True)
s_info_compcode = Column(VARCHAR2(40))
ann_dt = Column(VARCHAR2(8))
report_period = Column(VARCHAR2(8))
statement_type = Column(VARCHAR2(10))
crncy_code = Column(VARCHAR2(10))
monetary_cap = Column(NUMBER(20,4))
tradable_fin_assets = Column(NUMBER(20,4))
notes_rcv = Column(NUMBER(20,4))
acct_rcv = Column(NUMBER(20,4))
oth_rcv = Column(NUMBER(20,4))
prepay = Column(NUMBER(20,4))
dvd_rcv = Column(NUMBER(20,4))
int_rcv = Column(NUMBER(20,4))
inventories = Column(NUMBER(20,4))
consumptive_bio_assets = Column(NUMBER(20,4))
deferred_exp = Column(NUMBER(20,4))
non_cur_assets_due_within_1y = Column(NUMBER(20,4))
settle_rsrv = Column(NUMBER(20,4))
loan
|
pjurik2/pykarma
|
feeds/rss.py
|
Python
|
mit
| 3,205
| 0.005304
|
import os, sys
import random
import time
import feedparser
import itertools
import HTMLParser
from feed import Feed
if os.getcwd().rstrip(os.sep).endswith('feeds'):
os.chdir('..')
sys.path.insert(0, os.getcwd())
from gui_client import new_rpc
import web
import reddit
class RSSFeed(Feed):
def __init__(self):
self.title = 'RSS Feed'
self.streams = []
self.wait_range = (60, 70)
self.max_error_wait = 600
self.max_subs = 0
self.urls = set()
def configure(self):
pass
def watch(self, new_streams=None):
self.configure()
self.web = web.Web()
try:
self.rpc = new_rpc(self.title)
except:
self.rpc = None
print 'Warning: Running without RPC'
if new_streams is None:
new_streams = []
streams = self.streams + new_streams
for url in itertools.cycle(streams):
print url
self.check_feed(url)
time.sleep(random.randint(*self.wait_range))
def check_feed(self, url):
for fail_count in itertools.count():
try:
datad = feedparser.parse(url)
except:
print 'Parse error for', url
time.sleep(min(2 ** fail_count, self.max_error_wait))
else:
break
try:
posts = datad['items']
except:
print 'No items field for', url
posts = []
for post in posts:
self.check_post(post)
def check_post(self, post):
if ('link' not in post):
return False
url = self.url_pre_filter(post['link'])
try:
req = self.web.get(url)
url = r
|
eq.geturl()
except:
print 'URL retrieval error for ', url
retu
|
rn False
url = self.url_post_filter(url)
if (url in self.urls) or not url.startswith('http://'):
return False
self.urls.add(url)
feed_title = self.default_title_filter(post.get('title', ''))
page_title = self.default_title_filter(self.web.title(req))
title = self.title_filter(page_title, feed_title)
if self.rpc is not None:
subreddit = self.rpc.get_title_subreddit(title)
keywords = self.rpc.get_title_keywords(title)
if self.rpc.get_link_posted_count(url, title) <= self.max_subs:
stats = self.rpc.get_learned_stats(title, keywords)
self.rpc.gui_link_add(self.title, title, url, subreddit, keywords, **stats)
try:
req.close()
except:
pass
print title
print url
def url_pre_filter(self, url):
return url
def url_post_filter(self, url):
return url
def default_title_filter(self, title):
h = HTMLParser.HTMLParser()
return h.unescape(title)
def title_filter(self, page_title, feed_title):
return page_title
if __name__ == '__main__':
f = RSSFeed()
f.watch(['http://www.physorg.com/rss-feed/'])
|
stormi/tsunami
|
src/secondaires/cuisine/commandes/recettes/__init__.py
|
Python
|
bsd-3-clause
| 2,392
| 0.003349
|
# -*-coding:Utf-8 -*
# Copyright (c) 2012 NOEL-BARON Léo
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "
|
AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant la commande 'recettes' et ses sous-commandes.
Dans ce fichier se trouve la commande même.
"""
from primaires.interpreteur.commande.commande import Commande
from .editer import PrmEditer
from .lister import PrmLister
from .supprimer import PrmSupprimer
class CmdRecettes(Commande):
"""Commande 'recettes'.
"""
def __init__(self):
"""Constructeur de la commande"""
Commande.__init__(self, "recettes", "recipes")
self.groupe = "administrateur"
self.aide_courte = "manipulation des recettes"
self.aide_longue = \
""
def ajouter_parametres(self):
"""Ajout des paramètres"""
self.ajouter_parametre(PrmEditer())
self.ajouter_parametre(PrmLister())
self.ajouter_parametre(PrmSupprimer())
|
cyanfish/heltour
|
heltour/tournament/migrations/0146_league_description.py
|
Python
|
mit
| 414
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2017-02-23 22:12
from django.db import migrations, models
class Migration(migrations.Migration):
|
dependencies = [
('tournament', '0145
|
_auto_20170211_1825'),
]
operations = [
migrations.AddField(
model_name='league',
name='description',
field=models.TextField(blank=True),
),
]
|
asiviero/brbeerindex
|
beerindex/spiders/beerspider.py
|
Python
|
lgpl-2.1
| 4,667
| 0.012642
|
from scrapy.spiders import Spider
from scrapy.selector import HtmlXPathSelector
from scrapy.http import Request
from scrapy.conf import settings
from beerindex.items import BeerindexItem
import logging
import lxml.html
from urlparse import urlparse
import re
class BeerSpider(Spider):
name = "beerspider"
beer_sites = {
'www.wbeer.com.br':
{
"start_url" : 'https://www.wbeer.com.br/browse.ep?cID=103354',
"next_link" : '.paginacao li.prox a::attr(href)',
"product_link" : '.catalogo-lista .lista .informacoes a::attr("href")',
"xpath_title" : "//span[@itemprop='name']//text()",
"xpath_price" : "//div[@class='preco-por']//text()",
"xpath_style
|
" : "
|
//div[@class='resumo']//span[@class='nome-tipo']//text()"
},
'www.emporioveredas.com.br' : {
"start_url" : 'http://www.emporioveredas.com.br/cervejas-importadas.html',
"next_link" : '.pager a.next::attr(href)',
"product_link" : '.products-grid a.product-image ::attr("href")',
"xpath_title" : "//h1[@itemprop='name']//text()",
"xpath_price" : "//div[@class='product-shop']//span[@itemprop='price']//text()",
"xpath_style" : "//table[@id='product-attribute-specs-table']//tr[contains(.,'Estilo')]//td[last()]//text()"
},
'www.mundodascervejas.com' : {
"start_url" : 'http://www.mundodascervejas.com/buscar?q=cerveja',
"next_link" : '.topo .pagination a[rel="next"]::attr("href")',
"product_link" : '#listagemProdutos a.produto-sobrepor::attr("href")',
"xpath_title" : "//h1[@itemprop='name']//text()",
"xpath_price" : "//div[@class='principal']//div[contains(@class,'preco-produto')]//strong[contains(@class,'preco-promocional')]//text()",
"xpath_style" : "//div[@id='descricao']//table//tr[contains(.,'Estilo')]//td[last()]//text()"
},
'www.clubeer.com.br': {
"start_url" : 'http://www.clubeer.com.br/loja',
"next_link" : '#pagination li.current + li a::attr("href")',
"product_link" : '.minhascervejas li .areaborder > a:first-child::attr("href")',
"xpath_title" : "//h1[@itemprop='name']//text()",
"xpath_price" : "//div[@id='principal']//div[contains(@class,'areaprecos')]//span[@itemprop='price']//text()",
"xpath_style" : "//div[contains(@class,'areaprodutoinfoscontent')]//ul[contains(.,'ESTILO')]//li[position()=2]//text()"
},
'www.clubedomalte.com.br': {
"start_url" : 'http://www.clubedomalte.com.br/pais',
"next_link" : '.paginacao li.pg:last-child a::attr("href")',
"product_link" : '.mainBar .spotContent > a:first-child::attr("href")',
"xpath_title" : "//h1[@itemprop='name']//text()",
"xpath_price" : "//div[contains(@class,'interna')]//div[contains(@class,'preco')]//*[@itemprop='price']//text()",
"xpath_style" : "//div[contains(@class,'areaprodutoinfoscontent')]//ul[contains(.,'ESTILO')]//li[position()=2]//text()"
}
}
def domain_from_url(self,url):
parsed = urlparse(url)
return parsed.netloc
#allowed_domains = ["www.cervejastore.com.br"]
# start_urls = ['http://www.mundodascervejas.com/buscar?q=cerveja']
# start_urls = ["http://www.emporioveredas.com.br/cervejas-importadas.html"]
start_urls = [beer_sites[store]["start_url"] for store in beer_sites]
def parse(self,response):
domain = self.domain_from_url(response.url)
for url in response.css(self.beer_sites[domain]["next_link"]).extract():
request = Request(response.urljoin(url.strip()), self.parse)
yield request
titles = response.css(self.beer_sites[domain]["product_link"]).extract()
for title in titles:
yield Request(response.urljoin(title), self.parse_product)
def parse_product(self,response):
domain = self.domain_from_url(response.url)
item = BeerindexItem()
item["name"] = response.xpath(self.beer_sites[domain]["xpath_title"]).extract_first()
item["style"] = response.xpath(self.beer_sites[domain]["xpath_style"]).extract_first()
item["link"] = response.url
item["price"] = "".join(response.xpath(self.beer_sites[domain]["xpath_price"]).extract())
item["price"] = re.sub(r"\s+", "", item["price"], flags=re.UNICODE)
item["price"] = re.sub(r"[^\d,\.+]", "", item["price"], flags=re.UNICODE)
item["price"] = re.sub(r",", ".", item["price"], flags=re.UNICODE)
yield item
|
Makeystreet/makeystreet
|
woot/apps/catalog/migrations/0100_auto__add_field_space_kind__add_field_space_logo__add_field_space_lati.py
|
Python
|
apache-2.0
| 50,619
| 0.007645
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Space.kind'
db.add_column(u'catalog_space', 'kind',
self.gf('django.db.models.fields.CharField')(default='', max_length=200),
keep_default=False)
# Adding field 'Space.logo'
db.add_column(u'catalog_space', 'logo',
self.gf('django.db.models.fields.URLField')(max_length=400, null=True, blank=True),
keep_default=False)
# Adding field 'Space.latitude'
db.add_column(u'catalog_space', 'latitude',
self.gf('django.db.models.fields.IntegerField')(default=0, null=True, blank=True),
keep_default=False)
# Adding field 'Space.longitude'
db.add_column(u'catalog_space', 'longitude',
self.gf('djang
|
o.db.models.fields.IntegerField')(default=0, null=True, blank=True),
|
keep_default=False)
def backwards(self, orm):
# Deleting field 'Space.kind'
db.delete_column(u'catalog_space', 'kind')
# Deleting field 'Space.logo'
db.delete_column(u'catalog_space', 'logo')
# Deleting field 'Space.latitude'
db.delete_column(u'catalog_space', 'latitude')
# Deleting field 'Space.longitude'
db.delete_column(u'catalog_space', 'longitude')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'catalog.cfistoreitem': {
'Meta': {'object_name': 'CfiStoreItem'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'item': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.Product']", 'unique': 'True'}),
'likers': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'cfi_store_item_likes'", 'symmetrical': 'False', 'through': "orm['catalog.LikeCfiStoreItem']", 'to': u"orm['auth.User']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.comment': {
'Meta': {'object_name': 'Comment'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'body': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'likes_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.documentation': {
'Meta': {'object_name': 'Documentation'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '1000'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'catalog.emailcollect': {
'Meta': {'object_name': 'EmailCollect'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '30'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'catalog.image': {
'Meta': {'object_name': 'Image'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'comments': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['catalog.Comment']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'large_url': ('django.db.models.fields.URLField', [], {'max_length': '1000'}),
'likes_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'small_url': ('django.db.models.fields.URLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'images'", 'null': 'True', 'to': u"orm['auth.User']"})
},
'catalog.like': {
'Meta': {'object_name': 'Like'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
|
tannoa2/RackHD
|
test/tests/switch/test_rackhd11_switch_pollers.py
|
Python
|
apache-2.0
| 12,932
| 0.004098
|
'''
Copyright 2016, EMC, Inc.
Author(s):
FIT test script template
'''
import fit_path # NOQA: unused import
import sys
import subprocess
import pprint
import fit_common
import test_api_utils
# LOCAL
NODELIST = []
def get_switches():
# returns a list with valid node IDs that match ARGS_LIST.sku in 'Name' or 'Model' field
# and matches node BMC MAC address in ARGS_LIST.obmmac if specified
# Otherwise returns list of all IDs that are not 'Unknown' or 'Unmanaged'
nodelist = []
# check if user specified a single nodeid to run against
# user must know the nodeid and any check for a valid nodeid is skipped
nodeid = fit_common.fitargs()['nodeid']
if nodeid != 'None':
nodelist.append(nodeid)
else:
catalog = fit_common.rackhdapi('/api/1.1/nodes')
for nodeentry in catalog['json']:
if nodeentry['type'] == 'switch':
nodelist.append(nodeentry['id'])
return nodelist
NODELIST = get_switches()
def get_rackhd_nodetype(nodeid):
nodetype = ""
# get the node info
mondata = fit_common.rackhdapi("/api/1.1/nodes/" + nodeid)
if mondata['status'] != 200:
print "Incorrect HTTP return code on nodeid, expected 200, received: {}".format(mondata['status'])
else:
# get the sku id contained in the node
sku = mondata['json'].get("sku")
if sku:
skudata = fit_common.rackhdapi("/api/1.1/skus/" + sku)
if skudata['status'] != 200:
print "Incorrect HTTP return code on sku, expected 200, received: {}".format(skudata['status'])
else:
nodetype = mondata['json'].get("name")
else:
nodetype = mondata['json'].get("name")
print "nodeid {} did not return a valid sku in get_rackhd_nodetype".format(nodeid)
return nodetype
from nose.plugins.attrib import attr
@attr(all=True, regression=True, smoke=True)
@fit_common.unittest.skipIf(NODELIST == [],"No switches defined, skipping test.")
class rackhd11_switch_pollers(fit_common.unittest.TestCase):
def test_get_id_pollers(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Display the poller data per node."
print "\t{0}".format(msg)
for node in NODELIST:
mon_data = fit_common.rackhdapi("/api/1.1/nodes/" + node + "/pollers")
self.assertIn(mon_data['status'], [200], "Incorrect HTTP return code")
for item in mon_data['json']:
# check required fields
self.assertGreater(item['pollInterval'], 0, 'pollInterval field error')
for subitem in ['node', 'config', 'createdAt', 'id', 'name', 'config']:
self.assertIn(subitem, item, subitem + ' field error')
if fit_common.VERBOSITY >= 2:
print "\nNode: ", node
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
poll_data = fit_common.rackhdapi("/api/1.1/pollers/" + poller_id)
if fit_common.VERBOSITY >= 2:
print "\nPoller: " + poller + " ID: " + str(poller_id)
print fit_common.json.dumps(poll_data['json'], indent=4)
def test_verify_poller_headers(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Verify header data reported on the poller"
print "\t{0}".format(msg)
for node in NODELIST:
mon_data = fit_common.rackhdapi("/api/1.1/nodes/" + node + "/pollers")
self.assertIn(mon_data['status'], [200], "Incorrect HTTP return code")
nodetype = get_rackhd_nodetype(node)
if fit_common.VERBOSITY >= 2:
print "\nNode: {} Type: {}".format(node, nodetype)
# Run test against managed nodes only
if nodetype != "unknown" and nodetype != "Unmanaged":
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
poller_data = test_api_utils.get_poller_data_by_id(poller_id)
if fit_common.VERBOSITY >= 3:
print "\nPoller: " + poller + " ID: " + str(poller_id)
print fit_common.json.dumps(poller_data, indent=4)
def test_verify_poller_data(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Check number of polls being kept for poller ID"
print "\t{0}".format(msg)
for node in NODELIST:
if fit_common.VERBOSITY >= 2:
print "\nNode: ", node
nodetype = get_rackhd_nodetype(node)
# Run test against managed nodes only
if nodetype != "unknown" and nodetype != "Unmanaged":
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
poller_data = test_api_utils.get_poller_data_by_id(poller_id)
poll_len = len(poller_data)
if fit_common.VERBOSITY >= 2:
print "\nPoller: " + poller + " ID: " + str(poller_id)
print "Number of polls for "+ str(poller_id) + ": " + str(len(poller_data))
self.assertLessEqual(poll_len, 10, 'Number of cached polls should not exceed 10')
def test_get_current_poller_data(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Display most current data from poller"
print "\t{0}".format(msg)
for node in NODELIST:
if fit_common.VERBOSITY >= 2:
print "\nNode: ", node
nodetype = get_rackhd_nodetype(node)
# Run test against managed nodes only
if nodetype != "unknown" and nodetype != "Unmanaged":
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
if fit_common.VERBOSITY >= 2:
print "\nPoller: " + poller + " ID: " + str(poller_id)
monurl = "/api/1.1/pollers/" + str(poller_id) + "/data/current"
mondata = fit_common.rackhdapi(url_cmd=monurl)
if fit_common.VERBOSITY >= 2:
print fit_common.json.dumps(mondata, indent=4)
def test_get_poller_status_timestamp(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Display status and ti
|
mestamp from current poll"
print "\t{0}".format(msg)
for node in NODELIST:
if fit_common.VERBOSITY
|
>= 2:
print "\nNode: ", node
nodetype = get_rackhd_nodetype(node)
# Run test against managed nodes only
if nodetype != "unknown" and nodetype != "Unmanaged":
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
if fit_common.VERBOSITY >= 2:
print "\nPoller: " + poller + " ID: " + str(poller_id)
monurl = "/api/1.1/pollers/" + str(poller_id) + "/data/current"
mondata = fit_common.rackhdapi(url_cmd=monurl)
print "Return status", mondata['status']
if mondata['status'] == 200:
if fit_common.VERBOSITY >= 2:
print "Timestamp:", mondata['json'][0]['timestamp']
print fit_common.json.dumps(mondata['json'][0], indent=4)
def test_verify_poller_error_counter(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Check for Poller Errors"
print "\t{0}".format(msg)
errorlist = []
for node in NODELIST:
mon_data = fit_common.rackhdapi("/
|
teeple/pns_server
|
work/install/Python-2.7.4/Lib/test/test_setcomps.py
|
Python
|
gpl-2.0
| 3,847
| 0.00104
|
doctests = """
########### Tests mostly copied from test_listcomps.py ############
Test simple loop with conditional
>>> sum({i*i for i in range(100) if i&1 == 1})
166650
Test simple case
>>> {2*y + x + 1 for x in (0,) for y in (1,)}
set([3])
Test simple nesting
>>> list(sorted({(i,j) for i in range(3) for j in range(4)}))
[(0, 0), (0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2), (1, 3), (2, 0), (2, 1), (2, 2), (2, 3)]
Test nesting with the inner expression dependent on the outer
>>> list(sorted({(i,j) for i in range(4) for j in range(i)}))
[(1, 0), (2, 0), (2, 1), (3, 0), (3, 1), (3, 2)]
Make sure the induction variable is not exposed
>>> i = 20
>>> sum({i*i for i in range(100)})
328350
>>> i
20
Verify that syntax error's are raised for setcomps used as lvalues
>>> {y for y in (1,2)} = 10 # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
SyntaxError: ...
>>> {y for y in (1,2)} += 10 # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
SyntaxError: ...
Make a nested set comprehension that acts like set(range())
>>> def srange(n):
... return {i for i in range(n)}
>>> list(sorted(srange(10)))
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
Same again, only as a lambda expression instead of a function definition
>>> lrange = lambda n: {i for i in range(n)}
>>> list(sorted(lrange(10)))
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
Generators can call other generators:
>>> def grange(n):
... for x in {i for i in range(n)}:
... yield x
>>> list(sorted(grange(5)))
[0, 1, 2, 3, 4]
Make sure that None is a valid return value
>>> {None for i in range(10)}
set([None])
########### Tests for various scoping corner cases ############
Return lambdas that use the iteration variable as a default argument
>>> items = {(lambda i=i: i) for i in range(5)}
>>> {x() for x in items} == set(range(5))
True
Same again, only this time as a closure variable
>>> items = {(lambda: i) for i in range(5)}
>>> {x() for x in items}
set([4])
Another way to test that the iteration variable is local to the list comp
>>> items = {(lambda: i) for i in range(5)}
>>> i = 20
>>> {x() for x in items}
set([4])
And confirm that a closure can jump over the list comp scope
>>> items = {(lambda: y) for i in range(5)}
>>> y = 2
>>> {x() for x in items}
set([2])
We also repeat each of the above scoping tests inside a function
>>> def test_func():
... items = {(lambda i=i: i) for i in range(5)}
... return {x() for x in items}
>>> test_func() == set(range(5))
True
>>> def test_func():
... items = {(lambda: i) fo
|
r i in range(5)}
... return {x() for x in ite
|
ms}
>>> test_func()
set([4])
>>> def test_func():
... items = {(lambda: i) for i in range(5)}
... i = 20
... return {x() for x in items}
>>> test_func()
set([4])
>>> def test_func():
... items = {(lambda: y) for i in range(5)}
... y = 2
... return {x() for x in items}
>>> test_func()
set([2])
"""
__test__ = {'doctests' : doctests}
def test_main(verbose=None):
import sys
from test import test_support
from test import test_setcomps
test_support.run_doctest(test_setcomps, verbose)
# verify reference counting
if verbose and hasattr(sys, "gettotalrefcount"):
import gc
counts = [None] * 5
for i in range(len(counts)):
test_support.run_doctest(test_setcomps, verbose)
gc.collect()
counts[i] = sys.gettotalrefcount()
print(counts)
if __name__ == "__main__":
test_main(verbose=True)
|
maciekzdaleka/lab11
|
write-aws-queue.py
|
Python
|
mit
| 1,195
| 0.009205
|
# This script created a queue
#
# Author - Paul Doyle Nov 2015
#
#
import boto.sqs
import boto.sqs.queue
from boto.sqs.message import Message
from boto.sqs.connection import SQSConnection
from boto.exception import SQSError
import sy
|
s
import urllib2
# Get the keys from a specific url and then use them to connect to AWS Service
response = urllib2.urlopen('http://ec2-52-30-7-5.eu-west-1.compute.amazonaws.com:81/key')
html=response.read()
result = html.split(':')
#print (result[0])
#print (result[1])
access_key_id = result[0]
secret_access_key = result[1]
#print (access_key_id,secret_access_key)
# Set up a connection to the AWS service.
conn = boto.sqs.connect_to_region("eu-west-1", aws_acce
|
ss_key_id=access_key_id, aws_secret_access_key=secret_access_key)
student_number = 'C13470112'
#conn.delete_queue(sys.argv[1])
queue_name = student_number+sys.argv[1]
# Get a list of the queues that exists and then print the list out
rs = conn.get_queue(queue_name)
# Get a list of the queues that exists and then print the list out
#rs = conn.get_all_queues()
print "adding message...."
rs.set_message_class(Message)
m = Message()
m.set_body(sys.argv[2])
rs.write(m)
|
wohllab/milkyway_proteomics
|
galaxy_milkyway_files/tools/wohl-proteomics/wohl_skyline/msstats_plots_wrapper.py
|
Python
|
mit
| 7,436
| 0.023534
|
import os, sys, re
import optparse
import shutil
import pandas
import numpy
import gc
import subprocess
#####################################
#This is a script to combine the output reports from
#Skyline, in preparation for MSstats! Let's get started.
#
#VERSION 0.70A
version="0.70A"
#DATE: 10/11/2016
date="10/11/2016"
#####################################
print "-----------------------------------------------------------------------"
print "Welcome to the MSstats wrapper for Galaxy, Wohlschlegel Lab UCLA"
print "Written by William Barshop"
print "Version: ",version
print "Date: ",date
basedir=os.getcwd()
####################################
#Argument parsing! So much fun!
#We'll use OptParse even though some
#people really rave about argparse...
#
#
# NB: With Optparse, if an option is
# not specified, it will take a
# value of None
####################################
parser = optparse.OptionParser()
parser.add_option("--experiment_file",action="store",type="string",dest="experiment_file")
parser.add_option("--folder",action="store",type="string",dest="operation_folder",default=".")
parser.add_option("--msstats-image-RData",action="store",type="string",dest="image_RData")
parser.add_option("--msstats-comparison-csv",action="store",type="string",dest="comparison_csv")
################# OUTPUTS ################################
parser.add_option("--comparisonPlotOutput",action="store",type="string",dest="comparisonPlotOutput")
parser.add_option("--heatmapOutput",action="store",type="string",dest="heatmapOutput")
parser.add_option("--volcanoPlotOutput",action="store",type="string",dest="volcanoPlotOutput")
parser.add_option("--RScriptOutput",action="store",type="string",dest="RScriptOutput")
################## BELOW THIS ARE PLOTTING OPTIONS ############################## These are actually all going to be moved into a separate tool
#general options
parser.add_option("--significance",action="store",type="float",dest="significance") # For the volcano plots...
parser.add_option("--FCthreshold",action="store",type="float",dest="FCthreshold") # FC threshold For the volcano plots...
parser.add_option("--ylimUp",action="store",type="float",dest="ylimUp") # ylimUp threshold for the plots
parser.add_option("--ylimDown",action="store",type="float",dest="ylimDown") # ylimDown threshold for plots
parser.add_option("--xlimUp",action="store",type="float",dest="xlimUp") # xlimUp threshold for Volcano plots
parser.add_option("--autoAxes",action="store_true",dest="autoAxes")
parser.add_option("--xAxisSize",action="store",type="int",dest="xAxisSize")
parser.add_option("--yAxisSize",action="store",type="int",dest="yAxisSize")
parser.add_option("--width",action="store",type="int",dest="width",default=10)
parser.add_option("--height",action="store",type="int",dest="height",default=10)
#HeatMap
parser.add_option("--numProtein",action="store",type="int",dest="numProtein",default=180) # Number of proteins per heatmap... Max is 180
parser.add_option("--clustering",action="store",type="string",dest="clustering",default="protein") # clustering type for heatmap... Can be "protein", "comparison", "both"
#VolcanoPlot
parser.add_option("--dotSize",action="store",type="int",dest="dotSize",default=3)#volcanoplot
parser.add_option("--textSize",action="store",type="int",dest="textSize",default=4)#volcanoplot
parser.add_option("--proteinName",action="store_true",dest="proteinName") # On volcano plot, draw protein names?
parser.add_option("--legendSize",action="store",type="int",dest="legendSize",default=7)
(options,args) = parser.parse_args()
if options.autoAxes:
xlimUp="FALSE"
ylimUp="FALSE"
ylimDown="FALSE"
else:
xlimUp=options.xlimUp
ylimUp=options.ylimUp
ylimDown=options.ylimDown
if options.proteinName:
proteinName="TRUE"
else:
proteinName="FALSE"
print "Now we're going to prepare the R script for MSstats graphing..."
#Let's start by reading in the experiment structure.
group_information = pandas.read_csv(options.experiment_file,sep='\t')
comparison_df = pandas.read_csv(options.comparison_csv)
with open("MSstats_Script.R",'wb') as script_writer:
script_writer.write("library(MSstats)\n")
script_writer.write("setwd(\""+str(basedir)+"\")\n") #We're going to set the current directory...
script_writer.write("load(\""+str(options.image_RData)+"\")\n")
#script_writer.write("comparisonResult<-read.csv(\""+str(options.comparison_csv)+"\")\n") #We will load in the input CSV file! (In this case by absolute path, though that's not necessary...)
#script_writer.write("write.csv(comparisonResult$ComparisonResult,file=\"comparisonResult_output.csv\")\n")
#OKAY! So, now we're going to write out the plots... This may take a bit...
|
#So, first, let's check if we can output a heatmap (number of comp
|
arisons >2)
if len(comparison_df['Label'].unique().tolist())>=2:
#script_writer.write("groupComparisonPlots(data=comparisonResult$ComparisonResult,type=\"Heatmap\", logBase.pvalue=2, sig="+str(options.significance)+", FCcutoff="+str(options.FCthreshold)+",ylimUp="+str(ylimUp)+",ylimDown="+str(ylimDown)+",xlimUp="+str(xlimUp)+",x.axis.size="+str(options.xAxisSize)+",y.axis.size="+str(options.yAxisSize)+",numProtein="+str(options.numProtein)+",clustering=\""+options.clustering+"\",width="+str(options.width)+",height="+str(options.height)+")\n") #add width, height, address
script_writer.write("groupComparisonPlots(data=comparisonResult$ComparisonResult,type=\"Heatmap\", logBase.pvalue=2,x.axis.size="+str(options.xAxisSize)+",y.axis.size="+str(options.yAxisSize)+",numProtein="+str(options.numProtein)+",clustering=\""+options.clustering+"\",width="+str(options.width)+",height="+str(options.height)+")\n") #add width, height, address
#pass
script_writer.write("groupComparisonPlots(data=comparisonResult$ComparisonResult,ProteinName=\""+proteinName+"\",type=\"VolcanoPlot\", logBase.pvalue=2, sig="+str(options.significance)+", FCcutoff="+str(options.FCthreshold)+",ylimUp="+str(ylimUp)+",ylimDown="+str(ylimDown)+",xlimUp="+str(xlimUp)+",x.axis.size="+str(options.xAxisSize)+",dot.size="+str(options.dotSize)+",text.size="+str(options.textSize)+",legend.size="+str(options.legendSize)+",width="+str(options.width)+",height="+str(options.height)+",which.Comparison=\"all\")\n")
script_writer.write("groupComparisonPlots(data=comparisonResult$ComparisonResult,type=\"ComparisonPlot\", sig="+str(options.significance)+",x.axis.size="+str(options.xAxisSize)+",dot.size="+str(options.dotSize)+",legend.size="+str(options.legendSize)+",width="+str(options.width)+",height="+str(options.height)+",which.Comparison=\"all\")\n")
#OKAY.... The R Script has been written!
#We're going to execute the R script now!
print "Copying RScript back to Galaxy..."
shutil.copy('MSstats_Script.R',options.RScriptOutput)
subprocess.check_call(['Rscript', 'MSstats_Script.R'],shell=False,stderr=sys.stdout.fileno())
print "Moving files to final output locations...."
#print os.listdir(os.getcwd())
#shutil.copy('TMP_dataProcess_output.csv',options.processedOutput)
#shutil.copy('comparisonResult_output.csv',options.comparisonOutput)
shutil.copy('VolcanoPlot.pdf',options.volcanoPlotOutput)
if len(comparison_df['Label'].unique().tolist())>2:
shutil.copy('Heatmap.pdf',options.heatmapOutput)
shutil.copy('ComparisonPlot.pdf',options.comparisonPlotOutput)
print "All done!"
|
googledatalab/pydatalab
|
solutionbox/ml_workbench/tensorflow/setup.py
|
Python
|
apache-2.0
| 1,189
| 0.003364
|
# Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
|
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITI
|
ONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and limitations under
# the License.
# This setup file is used when running cloud training or cloud dataflow jobs.
from setuptools import setup, find_packages
setup(
name='trainer',
version='1.0.0',
packages=find_packages(),
description='Google Cloud Datalab helper sub-package',
author='Google',
author_email='google-cloud-datalab-feedback@googlegroups.com',
keywords=[
],
license="Apache Software License",
long_description="""
""",
install_requires=[
'tensorflow==1.15.2',
'protobuf==3.1.0',
'pillow==6.2.0', # ML Engine does not have PIL installed
],
package_data={
},
data_files=[],
)
|
hajgato/easybuild-easyblocks
|
setup.py
|
Python
|
gpl-2.0
| 3,586
| 0.012828
|
##
# Copyright 2012-2013 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
This script can be used to install easybuild-easyblocks, e.g. using:
|
easy_install --user .
or
python setup.py --prefix=$HOME/easybuild
@author: Kenneth Hoste (Ghent University)
"""
import os
import r
|
e
import sys
from distutils import log
sys.path.append('easybuild')
from easyblocks import VERSION
API_VERSION = str(VERSION).split('.')[0]
suff = ''
rc_regexp = re.compile("^.*(rc[0-9]*)$")
res = rc_regexp.search(str(VERSION))
if res:
suff = res.group(1)
dev_regexp = re.compile("^.*[0-9]dev$")
if dev_regexp.match(str(VERSION)):
suff = 'dev'
API_VERSION += suff
# log levels: 0 = WARN (default), 1 = INFO, 2 = DEBUG
log.set_verbosity(1)
try:
from setuptools import setup
log.info("Installing with setuptools.setup...")
except ImportError, err:
log.info("Failed to import setuptools.setup, so falling back to distutils.setup")
from distutils.core import setup
# Utility function to read README file
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
log.info("Installing version %s (required versions: API >= %s)" % (VERSION, API_VERSION))
setup(
name = "easybuild-easyblocks",
version = str(VERSION),
author = "EasyBuild community",
author_email = "easybuild@lists.ugent.be",
description = """Python modules which implement support for installing particular (groups of) software packages with EasyBuild.""",
license = "GPLv2",
keywords = "software build building installation installing compilation HPC scientific",
url = "http://hpcugent.github.com/easybuild",
packages = ["easybuild", "easybuild.easyblocks", "easybuild.easyblocks.generic"],
package_dir = {"easybuild.easyblocks": "easybuild/easyblocks"},
package_data = {'easybuild.easyblocks': ["[a-z0-9]/*.py"]},
long_description = read("README.rst"),
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: GNU General Public License v2 (GPLv2)",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 2.4",
"Topic :: Software Development :: Build Tools",
],
platforms = "Linux",
provides = ["easybuild", "easybuild.easyblocks", "easybuild.easyblocks.generic"],
install_requires = ["easybuild-framework >= %s" % API_VERSION],
zip_safe = False,
)
|
amlyj/pythonStudy
|
2.7/rpc/RPyC/demo.py
|
Python
|
mit
| 1,415
| 0.002126
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 17-8-13 下午1:31
# @Author : Tom.Lee
# @CopyRight : 2016-2017
# @File : demo.py
# @Product : PyCharm
# @Docs :
# @Source :
import rpyc
from rpyc.utils.server import ThreadedServer
class MyService(rpyc.Service):
data = []
def exposed_save_data(self, d):
self.data.append(d)
def exposed_get_data(self):
return self.data
class MyClient(object):
@classmethod
|
def conn(cls):
connections = rpyc.connect('localhost', 15111)
connections.root.save_data(123)
print connections.r
|
oot.get_data()
if __name__ == '__main__':
import threading
import time
server = ThreadedServer(MyService, port=15111)
client = MyClient()
def start():
print '*************************************'
print '*************************************'
print '*****************RpyC****************'
print '************ **************'
print '*************************************'
print '***************start server**********'
print '*************************************'
print '*************************************\n\n'
server.start()
threading.Thread(target=start).start()
client.conn()
time.sleep(5)
server.close()
print 'service stop.'
|
Show-Me-the-Code/python
|
Drake-Z/0010/0010.py
|
Python
|
mit
| 1,049
| 0.01164
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'第 0010 题:使用 Python 生成类似于图中的字母验证码图片'
from PIL import Image, ImageDraw, ImageFont, ImageFilter
import random
# 随机字母:
def rndChar():
return chr(rand
|
om.randint(65, 90))
# 随机颜色1:
def rndColor():
return (random.randint(64, 255), random.randint(64, 255), random.randint(64, 255))
# 随机颜色2:
def rndColor2():
return (random.randint(32, 127), random.randint(32, 127), random.randint(32, 127))
# 240 x 60:
width = 60 * 4
height = 60
image = Image.new('RGB', (width, height), (255, 255, 255))
# 创建Font对象:
font = ImageFont.truetype('C:\Windows\Fonts\Arial.ttf', 36)
# 创建Draw对象:
draw = Ima
|
geDraw.Draw(image)
# 填充每个像素:
for x in range(width):
for y in range(height):
draw.point((x, y), fill=rndColor())
# 输出文字:
for t in range(4):
draw.text((60 * t + 10, 10), rndChar(), font=font, fill=rndColor2())
# 模糊:
image = image.filter(ImageFilter.BLUR)
image.save('0010\code.jpg', 'jpeg')
|
harish2rb/pyGeoNet
|
test/test_pygeonet_processing.py
|
Python
|
gpl-3.0
| 4,709
| 0.031004
|
# pyGeoNet_readGeotiff
#import sys
#import os
from osgeo import gdal
#from string import *
import numpy as np
from time import clock
import pygeonet_defaults as defaults
import pygeonet_prepare as Parameters
from math import modf, floor
#from scipy.stats.mstats import mquantiles
def read_dem_from_geotiff(demFileName,demFilePath):
# Open the GeoTIFF format DEM
fullFilePath = demFilePath + demFileName
#fullFilePath = "G:\\HarishLaptop_Backup\\TI102782W0E\\PythonScripts\\pyGeoNet1.0\\data\\skunk.tif"
print fullFilePath
ary = []
ds = gdal.Open(fullFilePath, gdal.GA_ReadOnly)
geotransform = ds.GetGeoTransform()
'''
print 'Driver: ', ds.GetDriver().ShortName,'/', \
ds.GetDriver().LongName
print 'Size is ',ds.RasterXSize,'x',ds.RasterYSize, \
'x',ds.RasterCount
print 'Projection is ',ds.GetProjection()
if not geotransform is None:
print 'Origin = (',geotransform[0], ',',geotransform[3],')'
print 'Pixel Size = (',geotransform[1], ',',geotransform[5],')'
'''
ary = ds.GetRasterBand(1).ReadAsArray()
#Parameters.geospatialReferenceArray
#Parameters.geoReferencingMatrix
#Parameters.geoBoundingBox
Parameters.demPixelScale = geotransform[1]
Parameters.xLowerLeftCoord = geotransform[0]
Parameters.yLowerLeftCoord = geotransform[3]
return ary
def quantile(x, q, qtype = 7, issorted = False):
"""
Args:
x - input data
q - quantile
qtype - algorithm
issorted- True if x already sorted.
Compute quantiles from input array x given q.For median,
specify q=0.5.
References:
http://reference.wolfram.com/mathematica/ref/Quantile.html
http://wiki.r-project.org/rwiki/doku.php?id=rdoc:stats:quantile
Author:
Ernesto P.Adorio Ph.D.
UP Extension Program in Pampanga, Clark Field.
"""
if not issorted:
y = sorted(x)
else:
y = x
if not (1 <= qtype <= 9):
return None # error!
# Parameters for the Hyndman and Fan algorithm
abcd = [(0, 0, 1, 0), # inverse empirical distrib.function., R type 1
(0.5, 0, 1, 0), # similar to type 1, averaged, R type 2
(0.5, 0, 0, 0), # nearest order statistic,(SAS) R type 3
(0, 0, 0, 1), # California linear interpolation, R type 4
(0.5, 0, 0, 1), # hydrologists method, R type 5
(0, 1, 0, 1), # mean-based estimate(Weibull method), (SPSS,Minitab), type 6
(1, -1, 0, 1), # mode-based method,(S, S-Plus), R type 7
(1.0/3, 1.0/3, 0, 1), # median-unbiased , R type 8
(3/8.0, 0.25, 0, 1) # normal-unbiased, R type 9.
]
a, b, c, d = abcd[qtype-1]
n = len(x)
g, j = modf( a + (n+b) * q -1)
if j < 0:
return y[0]
elif j >= n:
return y[n-1] # oct. 8, 2010 y[n]???!! uncaught off by 1 error!!!
j = int(floor(j))
if g == 0:
return y[j]
else:
return y[j] + (y[j+1]- y[j])* (c + d * g)
def main():
#demFileName = "skunk.tif"
#demFilePath = "G:\\HarishLaptop_Backup\\TI102782W0E\\PythonScripts\\pyGeoNet1.0\\data\\"
print "Reading input file path :",Parameters.demDataFilePath
print "Reading input file :",Parameters.demFileName
rawDemArray = read_dem_from_geotiff(Parameters.demFileName,Parameters.demDataFilePath)
nanDemArray=rawDemArray
nanDemArray[nanDemArray < defaults.demNanFlag]= np.NAN
Parameters.minDemValue= np.min(nanDemArray[:])
Parameters.maxDemValue= np.max(nanDemArray[:])
# Area of analysi
|
s
Parameters.xDemSize=np.size(rawDemArray,0)
Parameters.yDemSize=np.size(ra
|
wDemArray,1)
# Calculate pixel length scale and assume square
Parameters.maxLowerLeftCoord = np.max([Parameters.xDemSize, Parameters.yDemSize])
print 'DTM size: ',Parameters.xDemSize, 'x' ,Parameters.yDemSize
#-----------------------------------------------------------------------------
# Compute slope magnitude for raw and filtered DEMs
print 'Computing slope of raw DTM'
slopeMagnitudeDemArray = np.gradient(nanDemArray,Parameters.demPixelScale)
print slopeMagnitudeDemArray
# Computation of the threshold lambda used in Perona-Malik nonlinear
# filtering. The value of lambda (=edgeThresholdValue) is given by the 90th
# quantile of the absolute value of the gradient.
print'Computing lambda = q-q-based nonlinear filtering threshold'
mult = Parameters.xDemSize * Parameters.yDemSize
print np.size(slopeMagnitudeDemArray,0)
edgeThresholdValue = quantile(np.reshape(slopeMagnitudeDemArray,mult),defaults.demSmoothingQuantile)
print edgeThresholdValue
if __name__ == '__main__':
t0 = clock()
main()
t1 = clock()
print "time taken to complete the script is::",t1-t0," seconds"
print "script complete"
|
abrenaut/waybackscraper
|
waybackscraper/exceptions.py
|
Python
|
mit
| 82
| 0
|
# -*- coding: utf-8 -*-
|
c
|
lass ScrapeError(Exception):
"""scraping Failed"""
|
uclouvain/osis
|
assessments/templatetags/score_display.py
|
Python
|
agpl-3.0
| 1,668
| 0.0018
|
##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the admini
|
stration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2
|
015-2021 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from django import template
register = template.Library()
@register.filter
def score_display(value, decimal_option):
if value is None or str(value) == '-':
return ""
else:
try:
if decimal_option:
return "{0:.1f}".format(value)
else:
return "{0:.0f}".format(value)
except:
return value
|
bigown/SOpt
|
Python/OOP/Static.py
|
Python
|
mit
| 117
| 0.017094
|
class Car:
def beep():
print('Beep')
car = C
|
ar()
Car.beep()
#ht
|
tps://pt.stackoverflow.com/q/482008/101
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.