repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
BackupTheBerlios/kimchi | src/ui/mvc/delegate/KTableDelegate.py | Python | bsd-3-clause | 2,652 | 0.00905 | # coding: utf-8
'''
Copyright (c) 2010, Alexandru Dancu
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the project nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN A | NY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
'''
from PyQt4.QtCore import | *
from PyQt4.QtGui import *
from properties import NEW_TABLE
class KTableDelegate(QItemDelegate):
def __init__(self, parent = None):
super(KTableDelegate, self).__init__(parent)
# def createEditor(self, parent, option, index):
#
# editor = QLineEdit(parent)
# self.connect(editor, SIGNAL("returnPressed()"),
# self.commitAndCloseEditor)
#
# return editor
def commitAndCloseEditor(self):
editor = self.sender()
if isinstance(editor, (QTextEdit, QLineEdit)):
self.emit(SIGNAL("commitData(QWidget*)"), editor)
self.emit(SIGNAL("closeEditor(QWidget*)"), editor)
def setEditorData(self, editor, index):
text = index.model().data(index, Qt.DisplayRole).toString()
editor.setText(text)
def setModelData(self, editor, model, index):
text = unicode(editor.text())
if not text:
text = NEW_TABLE
model.setData(index, QVariant(text))
|
rbaravalle/imfractal | imfractal/Algorithm/MFS_3D.py | Python | bsd-3-clause | 11,780 | 0.00798 | """
Copyright (c) 2013 Rodrigo Baravalle
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with th | e distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR B | E LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from Algorithm import *
import numpy as np
from math import log10
import scipy.signal
import scipy.io as sio
from scipy.stats import norm
class MFS_3D (Algorithm):
"""
:3D implementation of MFS through holder exponents f(alpha)
:version: 1.0
:author: Rodrigo Baravalle
"""
def __init__(self):
pass
def setDef(self, ind, f, ite, filename, file_mask, params):
# parameters: ind -> determines how many levels are used when computing the density
# choose 1 for using directly the image measurement im or
# >= 6 for computing the density of im (quite stable for >=5)
# f ----> determines the dimension of MFS vector
# ite ---> determines how many levels are used when computing MFS for each
self.ind_num = ind # number of pixels for averaging
self.f_num = f # window
self.ite_num = ite
self.filename = filename
self.file_mask = file_mask
self.params = params
def gauss_kern(self,size_x, size_y, size_z):
""" Returns a normalized 3D gauss kernel array for convolutions """
m = np.float32(size_x)
n = np.float32(size_y)
o = np.float32(size_z)
sigma = 2; # ???
if(size_x <= 3): sigma = 1.5;
if(size_x == 5): sigma = 2.5;
z, y, x = np.mgrid[-(m-1)/2:(m-1)/2+1, -(n-1)/2:(n-1)/2+1, -(o-1)/2:(o-1)/2+1]
b = 2*(sigma**2)
square = lambda i : i**2
fm = lambda i: map(square, i)
x2 = map(fm, x)
y2 = map(fm, y)
z2 = map(fm, z)
g = np.sum([x2, y2, z2], axis=0).astype(np.float32)
g = np.exp(g).astype(np.float32)
return g / g.sum()
def determine_threshold(self, arr):
# compute histogram of values
bins = range(np.min(arr), np.max(arr) + 1)
h = np.histogram(arr, bins=bins)
threshold = np.min(arr)
# get x% of mass -> threshold
assert (len(arr.shape) == 3)
total_pixels = arr.shape[0] * arr.shape[1] * arr.shape[2]
for i in range(len(bins) + 1):
# compute sum of h(x) from x = 0 to x = i
partial_sum_vector = np.cumsum(h[0][: (i + 1)])
partial_sum = partial_sum_vector[len(partial_sum_vector) - 1]
percentage = (float)(partial_sum) / (float)(total_pixels)
if percentage > 0.75:
threshold = np.min(arr) + i
break
return threshold
def openMatlab(self, name, filename, greyscale):
import scipy.io as sio
arr = np.array(sio.loadmat(filename)[name]).astype(np.int32)
if greyscale:
return arr
if name == "S":
threshold = self.determine_threshold(arr)
arr = arr > threshold
a_v = arr.cumsum()
print "Amount of white pixels: ", a_v[len(a_v) - 1]
# debug - to see the spongious structure
# plt.imshow((arr[:,:,50]), cmap=plt.gray())
# plt.show()
return arr
def gradient(self, data):
Nx, Ny, Nz = data.shape
basic_fx = np.array([[-1, 0, 1], [0, 0, 0], [0, 0, 0]])
basic_fy = basic_fx.T
basic_fxy = [[-1, 0, 0], [0, 0, 0], [0, 0, 1]]
basic_fyx = [[0, 0, -1], [0, 0, 0], [1, 0, 0]]
fx = np.float32(0.5) * np.array([basic_fx, basic_fx, basic_fx])
fy = np.float32(0.5) * np.array([basic_fy, basic_fy, basic_fy])
fxy = np.float32(0.5) * np.array([basic_fxy, basic_fxy, basic_fxy])
fyx = np.float32(0.5) * np.array([basic_fyx, basic_fyx, basic_fyx])
a = scipy.signal.convolve(data, fx, mode="full")
Nx, Ny, Nz = a.shape
a = a[0:Nx - 2, 1:Ny - 1, 1:Nz - 1] # fix me, check z indices!
b = scipy.signal.convolve(data, fy, mode="full")
Nx, Ny, Nz = b.shape
b = b[1:Nx - 1, 0:Ny - 2, 1:Nz - 1]
c = scipy.signal.convolve(data, fxy, mode="full")
Nx, Ny, Nz = c.shape
c = c[1:Nx - 1, 1:Ny - 1, 1:Nz - 1]
d = scipy.signal.convolve(data, fyx, mode="full")
Nx, Ny, Nz = d.shape
d = d[1:Nx - 1, 1:Ny - 1, 1:Nz - 1]
data = a ** 2 + b ** 2 + c ** 2 + d ** 2
data = np.sqrt(data)
data = np.floor(data)
return data
def laplacian(self, data): # MFS of Laplacion
# 3d, octave:
# f1 = fspecial3('gaussian', 5, 1);
# f2 = -ones(3,3,3);
# f2(2,2,2) = 26;
# f = convn(f1, f2);
laplacian_kernel = np.load('exps/data/laplacian_kernel.npy')
print "SHAPES: !"
print laplacian_kernel.shape
print data.shape
a = scipy.signal.convolve(data, laplacian_kernel, mode="full")
Nx, Ny, Nz = a.shape
a = a[3:Nx - 3, 3:Ny - 3, 3:Nz - 3]
a = np.floor((a < 0).choose(a, 0))
return a
def getFDs(self, data = []):
"""
@param string filename : volume location
@param string file_mask : mask volume location
@return [float] : 3D multi fractal dimentions
@author: Rodrigo Baravalle. Code ported from Matlab and extended to 3D
"""
if len(data) == 0:
# data is a 3D grayscale volume
data = self.openMatlab('S', self.filename, True)
data_mask = self.openMatlab('M', self.file_mask, True)
# Masking
data = data * (data_mask > 0)
# Other multifractal measures
if self.params['gradient'] == True:
data = self.gradient(data)
else:
if self.params['laplacian'] == True:
print "laplacian!"
data = self.laplacian(data)
#Using [0..255] to denote the intensity profile of the image
grayscale_box = [0, 255]
#sigmoid function
#data = norm.cdf(data, loc=200.0, scale=100.0);
#Preprocessing: default intensity value of image ranges from 0 to 255
if abs(data).max()< 1:
data = data * grayscale_box[1]
else:
# put every value into [0, 255]
data = (data - data.min()) * 255 / (data.max() - data.min())
#######################
#DEBUG
print data.max(), data.min(), data.sum()
### Estimating density function of the volume
### by solving least squares for D in the equation
### log10(bw) = D*log10(c) + b
r = 1.0 / max(data.shape)
c = np.dot(range(1, self.ind_num+1), r)
c = map(lambda i: log10(i), c)
bw = np.zeros((self.ind_num, data.shape[0], data.shape[1], data.shape[2])).astype(np.float32)
bw[0] = data + 1
|
stvstnfrd/edx-platform | lms/djangoapps/courseware/testutils.py | Python | agpl-3.0 | 11,543 | 0.002512 | """
Common test utilities for courseware functionality
"""
from abc import ABCMeta, abstractmethod
from datetime import datetime, timedelta
import ddt
import six
from mock import patch
from six.moves.urllib.parse import urlencode
from lms.djangoapps.courseware.field_overrides import OverrideModulestoreFieldData
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from openedx.features.course_experience.url_helpers import get_legacy_courseware_url
from common.djangoapps.student.tests.factories import AdminFactory, CourseEnrollmentFactory, UserFactory
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory, check_mongo_calls
@ddt.ddt
class RenderXBlockTestMixin(six.with_metaclass(ABCMeta, object)):
"""
Mixin for testing the courseware.render_xblock function.
It can be used for testing any higher-level endpoint that calls this method.
"""
# DOM elements that appear in the LMS Courseware,
# but are excluded from the xBlock-only rendering.
COURSEWARE_CHROME_HTML_ELEMENTS = [
'<ol class="tabs course-tabs"',
'<footer id="footer-openedx"',
'<div class="window-wrap"',
'<div class="preview-menu"',
'<div class="container"',
]
# DOM elements that appear in an xBlock,
# but are excluded from the xBlock-only rendering.
XBLOCK_REMOVED_HTML_ELEMENTS = [
'<div class="wrap-instructor-info"',
]
# DOM elements that appear in the LMS Courseware, but are excluded from the
# xBlock-only rendering, and are specific to a particular block.
BLOCK_SPECIFIC_CHROME_HTML_ELEMENTS = {
# Although bookmarks were removed from all chromeless views of the
# verti | cal, it is LTI specifically that must never include them.
'vertical_block': ['<div class="bookmark-button-wrapper"'],
'html_block': [],
| }
def setUp(self):
"""
Clear out the block to be requested/tested before each test.
"""
super(RenderXBlockTestMixin, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments
# to adjust the block to be tested, update block_name_to_be_tested before calling setup_course.
self.block_name_to_be_tested = 'html_block'
@abstractmethod
def get_response(self, usage_key, url_encoded_params=None):
"""
Abstract method to get the response from the endpoint that is being tested.
Arguments:
usage_key: The course block usage key. This ensures that the positive and negative tests stay in sync.
url_encoded_params: URL encoded parameters that should be appended to the requested URL.
"""
pass # pragma: no cover # lint-amnesty, pylint: disable=unnecessary-pass
def login(self):
"""
Logs in the test user.
"""
self.client.login(username=self.user.username, password='test')
def course_options(self):
"""
Options to configure the test course. Intended to be overridden by
subclasses.
"""
return {
'start': datetime.now() - timedelta(days=1)
}
def setup_course(self, default_store=None):
"""
Helper method to create the course.
"""
if not default_store:
default_store = self.store.default_modulestore.get_modulestore_type()
with self.store.default_store(default_store):
self.course = CourseFactory.create(**self.course_options())
chapter = ItemFactory.create(parent=self.course, category='chapter')
self.vertical_block = ItemFactory.create(
parent_location=chapter.location,
category='vertical',
display_name="Vertical"
)
self.html_block = ItemFactory.create(
parent=self.vertical_block,
category='html',
data="<p>Test HTML Content<p>"
)
self.problem_block = ItemFactory.create(
parent=self.vertical_block,
category='problem',
display_name='Problem'
)
CourseOverview.load_from_module_store(self.course.id)
# block_name_to_be_tested can be `html_block` or `vertical_block`.
# These attributes help ensure the positive and negative tests are in sync.
self.block_to_be_tested = getattr(self, self.block_name_to_be_tested)
self.block_specific_chrome_html_elements = self.BLOCK_SPECIFIC_CHROME_HTML_ELEMENTS[
self.block_name_to_be_tested
]
def setup_user(self, admin=False, enroll=False, login=False):
"""
Helper method to create the user.
"""
self.user = AdminFactory() if admin else UserFactory()
if enroll:
CourseEnrollmentFactory(user=self.user, course_id=self.course.id)
if login:
self.login()
def verify_response(self, expected_response_code=200, url_params=None):
"""
Helper method that calls the endpoint, verifies the expected response code, and returns the response.
Arguments:
expected_response_code: The expected response code.
url_params: URL parameters that will be encoded and passed to the request.
"""
if url_params:
url_params = urlencode(url_params)
response = self.get_response(self.block_to_be_tested.location, url_params)
if expected_response_code == 200:
self.assertContains(response, self.html_block.data, status_code=expected_response_code)
unexpected_elements = self.block_specific_chrome_html_elements
unexpected_elements += self.COURSEWARE_CHROME_HTML_ELEMENTS + self.XBLOCK_REMOVED_HTML_ELEMENTS
for chrome_element in unexpected_elements:
self.assertNotContains(response, chrome_element)
else:
self.assertNotContains(response, self.html_block.data, status_code=expected_response_code)
return response
@ddt.data(
('vertical_block', ModuleStoreEnum.Type.mongo, 13),
('vertical_block', ModuleStoreEnum.Type.split, 6),
('html_block', ModuleStoreEnum.Type.mongo, 14),
('html_block', ModuleStoreEnum.Type.split, 6),
)
@ddt.unpack
def test_courseware_html(self, block_name, default_store, mongo_calls):
"""
To verify that the removal of courseware chrome elements is working,
we include this test here to make sure the chrome elements that should
be removed actually exist in the full courseware page.
If this test fails, it's probably because the HTML template for courseware
has changed and COURSEWARE_CHROME_HTML_ELEMENTS needs to be updated.
"""
with self.store.default_store(default_store):
self.block_name_to_be_tested = block_name
self.setup_course(default_store)
self.setup_user(admin=True, enroll=True, login=True)
with check_mongo_calls(mongo_calls):
url = get_legacy_courseware_url(self.course.id, self.block_to_be_tested.location)
response = self.client.get(url)
expected_elements = self.block_specific_chrome_html_elements + self.COURSEWARE_CHROME_HTML_ELEMENTS
for chrome_element in expected_elements:
self.assertContains(response, chrome_element)
@ddt.data(
(ModuleStoreEnum.Type.mongo, 5),
(ModuleStoreEnum.Type.split, 5),
)
@ddt.unpack
def test_success_enrolled_staff(self, default_store, mongo_calls):
with self.store.default_store(default_store):
if default_store is ModuleStoreEnum.Type.mongo:
mongo_calls = self.get_success_enrolled_staff_mongo_count()
self.setup_course(default_store)
self.setup_user(admin=True, enroll=True, login=True)
# The 5 mongoDB calls include calls for
# Old Mongo:
# (1) fill_in_run
|
googleapis/python-channel | google/cloud/channel_v1/types/entitlements.py | Python | apache-2.0 | 11,801 | 0.001271 | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.cloud.channel_v1.types import common
from google.cloud.channel_v1.types import offers
from google.cloud.channel_v1.types import products
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.cloud.channel.v1",
manifest={
"Entitlement",
"Parameter",
"AssociationInfo",
"ProvisionedService",
"CommitmentSettings",
"RenewalSettings",
"TrialSettings",
"TransferableSku",
"TransferEligibility",
},
)
class Entitlement(proto.Message):
r"""An entitlement is a representation of a customer's ability to
use a service.
Attributes:
name (str):
Output only. Resource name of an entitlement in the form:
accounts/{account_id}/customers/{customer_id}/entitlements/{entitlement_id}.
create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The time at which the
entitlement is created.
update_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The time at which the
entitlement is updated.
offer (str):
Required. The offer resource name for which the entitlement
is to be created. Takes the form:
accounts/{account_id}/offers/{offer_id}.
commitment_settings (google.cloud.channel_v1.types.CommitmentSettings):
Commitment settings for a commitment-based
Offer. Required for commitment based offers.
provisioning_state (google.cloud.channel_v1.types.Entitlement.ProvisioningState):
Output only. Current provisioning state of
the entitlement.
provisioned_service (google.cloud.channel_v1.types.ProvisionedService):
Output only. Service provisioning details for
the entitlement.
suspension_reasons (Sequence[google.cloud.channel_v1.types.Entitlement.SuspensionReason]):
Output only. Enumerable of all current
suspension reasons for an entitlement.
purchase_order_id (str):
Optional. This purchase order (PO)
information is for resellers to use for their
company tracking usage. If a purchaseOrderId
value is given, it appears in the API responses
and shows up in the invoice. The property
accepts up to 80 plain text characters.
trial_settings (google.cloud.channel_v1.types.TrialSettings):
Output only. Settings for trial offers.
association_info (google.cloud.channel_v1.types.AssociationInfo):
Association information to other
entitlements.
parameters (Sequence[google.cloud.channel_v1.types.Parameter]):
Extended entitlement parameters. When creating an
entitlement, valid parameter names and values are defined in
the
[Offer.parameter_definitions][google.cloud.channel.v1.Offer.parameter_definitions].
The response may include the following output-only
Parameters:
- assigned_units: The number of licenses assigned to users.
- max_units: The maximum assignable units for a flexible
offer.
- num_units: The total commitment for commitment-based
offers.
"""
class ProvisioningState(proto.Enum):
r"""Indicates the current provisioning state of the entitlement."""
PROVISIONING_STATE_UNSPECIFIED = 0
ACTIVE = 1
SUSPENDED = 5
class SuspensionReason(proto.Enum):
r"""Suspension reason for an entitlement if
[provisioning_state][google.cloud.channel.v1.Entitlement.provisioning_state]
= SUSPENDED.
"""
SUSPENSION_REASON_UNSPECIFIED = 0
RESELLER_INITIATED = 1
TRIAL_ENDED = 2
RENEWAL_WITH_TYPE_CANCEL = 3
PENDING_TOS_ACCEPTANCE = 4
OTHER = 100
name = proto.Field(proto.STRING, number=1,)
create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,)
update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp,)
offer = proto.Field(proto.STRING, number=8,)
commitment_settings = proto.Field(
proto.MESSAGE, number=12, message="CommitmentSettings",
)
provisioning_state = proto.Field(proto.ENUM, number=13, enum=ProvisioningState,)
provisioned_service = proto.Field(
proto.MESSAGE, number=16, message="ProvisionedService",
)
suspension_reasons = proto.RepeatedField(
proto.ENUM, number=18, enum=SuspensionReason,
)
purchase_order_id = proto.Field(proto.STRING, number=19,)
trial_settings = proto.Field(proto.MESSAGE, number=21, message="TrialSettings",)
association_info = proto.Field(proto.MESSAGE, number=23, message="AssociationInfo",)
parameters = proto.RepeatedField(proto.MESSAGE, number=26, message="Parameter",)
class Parameter(proto.Message):
r"""Definition for extended entitlement parameters.
Attributes:
name (str):
Name of the parameter.
value (google.cloud.channel_v1.types.Value):
Value of the parameter.
editable (bool):
Output only. Specifies whether this parameter is allowed to
be changed. For example, for a Google Workspace Business
Starter entitlement in commitment plan, num_units is
editable when entitlement is active.
"""
name = proto.Field(proto.STRING, number=1,)
value = proto.Field(proto.MESSAGE, number=2, message=common.Value,)
editable = proto.Field(proto.BOOL, number=3,)
class AssociationInfo(proto.Message):
r"""Association links that an entitlement has to other
entitlements.
Attributes:
base_entitlement (str):
The name of the base entitlement, for which
this entitlement is an add-on.
"""
base_entitlement = proto.Field(proto.STRING, number=1,)
class ProvisionedService(proto.Message):
r"""Service provisioned for an entitlement.
Attributes:
provisioning_id (str):
Output only. Provision | ing ID of the
entitlement. For Google Workspace, this is the
underlying Subscription ID. For Google Cloud
Platform, this is the Billing Account ID of the
billing subaccount.".
product_id (str):
Output only. The product pertaining to the
provisioning resource as specifie | d in the Offer.
sku_id (str):
Output only. The SKU pertaining to the
provisioning resource as specified in the Offer.
"""
provisioning_id = proto.Field(proto.STRING, number=1,)
product_id = proto.Field(proto.STRING, number=2,)
sku_id = proto.Field(proto.STRING, number=3,)
class CommitmentSettings(proto.Message):
r"""Commitment settings for commitment-based offers.
Attributes:
start_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Commitment start timestamp.
end_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Commitment end timestamp.
renewal_settings (google.cloud.channel_v1.types.RenewalSettings):
Optional. Renewal settings applicable for a
commitment-based Offer.
"""
start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,)
end_time = proto.Field(proto.MESSAGE, number=2, message=time |
google/fuzzbench | docs/reference/benchmarks.py | Python | apache-2.0 | 6,523 | 0.000307 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Code for generating the table in benchmarks.md."""
import collections
import multiprocessing
import os
from pathlib import Path
import re
import subprocess
import sys
import tarfile
import zipfile
from common import benchmark_utils
from common import filesystem
from common import fuzzer_utils as common_fuzzer_utils
from common import oss_fuzz
from common import utils
from fuzzers import utils as fuzzer_utils
BUILD_ARCHIVE_EXTENSION = '.tar.gz'
COVERAGE_BUILD_PREFIX = 'coverage-build-'
LEN_COVERAGE_BUILD_PREFIX = len(COVERAGE_BUILD_PREFIX)
GUARDS_REGEX = re.compile(rb'INFO:.*\((?P<num_guards>\d+) guards\).*')
ONE_MB = 1024**2
BENCHMARK_INFO_FIELDS = ['benchmark', 'target', 'dict', 'seeds', 'guards', 'MB']
BenchmarkInfo = collections.namedtuple('BenchmarkInfo', BENCHMARK_INFO_FIELDS)
def get_benchmark_infos(builds_dir):
"""Get BenchmarkInfo for each benchmark that has a build in
builds_dir."""
build_paths = [
os.path.join(builds_dir, path)
for path in os.listdir(builds_dir)
if path.endswith(BUILD_ARCHIVE_EXTENSION)
]
pool = multiprocessing.Pool()
return pool.map(get_benchmark_info, build_paths)
def get_real_benchmark_name(benchmark):
"""The method we use to infer benchmark names from coverage builds
doesn't quite work because the project name is used in OSS-Fuzz
builds instead. This function figures out the actual benchmark based on
the project name."""
benchmarks_dir = os.path.join(utils.ROOT_DIR, 'benchmarks')
real_benchmarks = os.listdir(benchmarks_dir)
if benchmark in real_benchmarks:
return benchmark
for real_benchmark in real_benchmarks:
if not os.path.isdir(os.path.join(benchmarks_dir, real_benchmark)):
continue
if not benchmark_utils.is_oss_fuzz(real_benchmark):
continue
config = oss_fuzz.get_config(real_benchmark)
if config['project'] == benchmark:
return real_benchmark
return None
def count_oss_fuzz_seeds(fuzz_target_path):
"""Count the number of seeds in the OSS-Fuzz seed archive for
|fuzze_target_path|."""
zip_file_name = fuzz_target_path + '_seed_corpus.zip'
if not os.path.exists(zip_file_name):
return 0
with zipfile.ZipFile(zip_file_name) as zip_file:
return len([
filename for filename in zip_file.namelist()
if not filename.endswith('/')
])
def count_standard_seeds(seeds_dir):
"""Count the number of seeds for a standard benchmark."""
return len([p for p in Path(seeds_dir).glob('**/*') if p.is_file()])
def get_seed_count(benchmark_path, fuzz_target_path):
"""Count the number of seeds for a benchmark."""
standard_seeds_dir = os.path.join(benchmark_path, 'seeds')
if os.path.exists(standard_seeds_dir):
return count_standard_seeds(standard_seeds_dir)
return count_oss_fuzz_seeds(fuzz_target_path)
def get_num_guards(fuzz_target_path):
"""Returns the number of guards in |fuzz_target_path|."""
result = subprocess.run([fuzz_target_path, '-runs=0'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
check=True)
output = result.stdout
match = GUARDS_REGEX.search(output)
assert match, 'Couldn\'t determine guards for ' + fuzz_target_path
return int(match.groupdict()['num_guards'])
def get_binary_size_mb(fuzz_target_path):
"""Returns the size of |fuzz_target_path| in MB, rounded to two
decimal places."""
size = os.path.getsize(fuzz_target_path)
return round(size / ONE_MB, 2)
def get_fuzz_target(benchmark, benchmark_path):
"""Returns the fuzz target and its path for |benchmark|."""
if benchmark_utils.is_oss_fuzz(benchmark):
fuzz_target = oss_fuzz.get_config(benchmark)['fuzz_target']
else:
fuzz_target = common_fuzzer_utils.DEFAULT_FUZZ_TARGET_NAME
fuzz_target_path = common_fuzzer_utils.get_fuzz_target_binary(
benchmark_path, fuzz_target)
assert fuzz_target_path, 'Couldn\'t find fuzz target for ' + benchmark
return fuzz_target, fuzz_target_path
def get_benchmark_info(build_path):
"""Get BenchmarkInfo for the benchmark in |build_path|."""
basename = os.path.basename(build_path)
benchmark = basename[len(COVERAGE_B | UILD_PREFIX
):-len(BUILD_ARCHIVE_EXTENSION)]
benchmark = get_real_benchmark_name(benchmark)
parent_dir = os.path.dirname(build_path)
benchmark_path = os.path.join(parent_dir, benchmark)
filesystem.create_directory(benchmark_path)
with tarfile.open(build_path) as tar_file:
tar_file.extractall(benchmark_path)
fuzz_target, fuzz_target_path = get_fuzz_target(benchmark, be | nchmark_path)
has_dictionary = bool(fuzzer_utils.get_dictionary_path(fuzz_target_path))
seeds = get_seed_count(benchmark_path, fuzz_target_path)
num_guards = get_num_guards(fuzz_target_path)
size = get_binary_size_mb(fuzz_target_path)
return BenchmarkInfo(benchmark, fuzz_target, has_dictionary, seeds,
num_guards, size)
def infos_to_markdown_table(benchmark_infos):
"""Conver a list of BenchmarkInfos into a markdown table and
return the result."""
markdown = ''
for benchmark_info in sorted(benchmark_infos,
key=lambda info: info.benchmark):
markdown += '|{}|{}|{}|{}|{}|{}|\n'.format(*benchmark_info)
return markdown
def main():
"""Print a markdown table with important data on each
benchmark."""
if len(sys.argv) != 2:
print('Usage {} <coverage_builds_directory>'.format(sys.argv[0]))
return 1
coverage_builds_dir = sys.argv[1]
infos = get_benchmark_infos(coverage_builds_dir)
print(infos_to_markdown_table(infos))
print(BENCHMARK_INFO_FIELDS)
return 0
if __name__ == '__main__':
sys.exit(main())
|
xiang12835/python_web | py2_web2py/web2py/applications/admin/languages/bg.py | Python | apache-2.0 | 35,738 | 0.020631 | # -*- coding: utf-8 -*-
{
'!langcode!': 'bg',
'!langname!': 'Български',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN',
'"User Exception" debug mode. ': '"User Exception" debug mode. ',
'%s': '%s',
'%s %%{row} deleted': '%s записите бяха изтрити',
'%s %%{row} updated': '%s записите бяха обновени',
'%s selected': '%s selected',
'%s students registered': '%s students registered',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'(requires internet access)': '(requires internet access)',
'(requires internet access, experimental)': '(requires internet access, experimental)',
'(something like "it-it")': '(something like "it-it")',
'(version %s)': '(version %s)',
'?': '?',
'@markmin\x01Searching: **%s** %%{file}': 'Searching: **%s** files',
'A new version of web2py is available': 'A new version of web2py is available',
'A new version of web2py is available: %s': 'A new version of web2py is available: %s',
'Abort': 'Abort',
'About': 'about',
'About application': 'About application',
'Accept Terms': 'Accept Terms',
'Add breakpoint': 'Add breakpoint',
'additional code for your application': 'additional code for your application',
'Additional code for your application': 'Additional code for your application',
'Admin design page': 'Admin design page',
'admin disabled because no admin password': 'admin disabled because no admin password',
'admin disabled because not supported on google app engine': 'admin disabled because not supported on google apps engine',
'admin disabled because too many invalid login attempts': 'admin disabled because too many invalid login attempts',
'admin disabled because unable to access password file': 'admin disabled because unable to access password file',
'Admin is disabled because insecure channel': 'Admin is disabled because insecure channel',
'Admin is disabled because unsecure channel': 'Admin is disabled because unsecure channel',
'Admin language': 'Admin language',
'Admin versioning page': 'Admin versioning page',
'administrative interface': 'administrative interface',
'Administrator Password:': 'Administrator Password:',
'and rename it (required):': 'and rename it (required):',
'and rename it:': 'and rename it:',
'App does not exist or you are not authorized': 'App does not exist or you are not authorized',
'appadmin': 'appadmin',
'appadmin is disabled because insecure channel': 'appadmin is disabled because insecure channel',
'Application': 'Application',
'application "%s" uninstalled': 'application "%s" uninstalled',
'Application cannot be generated in demo mode': 'Application cannot be generated in demo mode',
'application compiled': 'application compiled',
'Application exists already': 'Application exists already',
'application is compiled and cannot be designed': 'application is compiled and cannot be designed',
'Application name:': 'Application name:',
'Application updated via git pull': 'Application updated via git pull',
'are not used': 'are not used',
'are not used yet': 'are not used yet',
'Are you sure you want to delete file "%s"?': 'Are you sure you want to delete file "%s"?',
'Are you sure you want to delete plugin "%s"?': 'Are you sure you want to delete plugin "%s"?',
'Are you sure you want to delete this object?': 'Are you sure you want to delete this object?',
'Are you sure you want to uninstall application "%s"': 'Are you sure you want to uninstall application "%s"',
'Are you sure you want to uninstall application "%s"?': 'Are you sure you want to uninstall application "%s"?',
'Are you sure you want to upgrade web2py now?': 'Are you sure you want to upgrade web2py now?',
'Are you sure?': 'Are you sure?',
'arguments': 'arguments',
'at char %s': 'at char %s',
'at line %s': 'at line %s',
'ATTENTION:': 'ATTENTION:',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': 'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.',
'ATTENTION: you cannot edit the running application!': 'ATTENTION: you cannot edit the running application!',
'Autocomplete Python Code': 'Autocomplete Python Code',
'Available databases and tables': 'Available databases and tables',
'Available Databases and Tables': 'Available Databases and Tables',
'back': 'back',
'Back to the plugins list': 'Back to the plugins list',
'Back to wizard': 'Back to wizard',
'Basics': 'Basics',
'Begin': 'Begin',
'breakpoint': 'breakpoint',
'Breakpoints': 'Breakpoints',
'breakpoints': 'breakpoints',
'Bulk Register': 'Bulk Register',
'Bulk Student Registration': 'Bulk Student Registration',
'Cache': 'Cache',
'cache': 'cache',
'Cache Cleared': 'Cache Cleared',
'Cache Keys': 'Cache Keys',
'cache, errors and sessions cleaned': 'cache, errors and sessions cleaned',
'can be a git repo': 'can be a git repo',
'Cancel': 'Cancel',
'Cannot be empty': 'Cannot be empty',
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': 'Cannot compile: there are errors in your app. Debug it, correct errors and try again.',
'Cannot compile: there are errors in your app:': 'Cannot compile: there are errors in your app:',
'cannot create file': 'cannot create file',
'cannot upload file "%(filename)s"': 'cannot upload file "%(filename)s"',
'Change Admin Password': 'Change Admin Password',
'Change admin password': 'change admin password',
'change editor settings': 'change editor settings',
'Changelog': 'Changelog',
'check all': 'check all',
'Check for upgrades': 'check for upgrades',
'Check to d | elete': 'Check to delete',
'Checking for upgrades...': 'Checking for upgrades...',
'Clean': 'clean',
'Clear': 'Clear',
'Clear CACHE?': 'Clear CACHE?',
'Clear DISK': 'Clear DISK',
'Clear RAM': 'Clear RAM',
'click her | e for online examples': 'щракни тук за онлайн примери',
'click here for the administrative interface': 'щракни тук за административния интерфейс',
'Click row to expand traceback': 'Click row to expand traceback',
'Click row to view a ticket': 'Click row to view a ticket',
'click to check for upgrades': 'click to check for upgrades',
'code': 'code',
'Code listing': 'Code listing',
'collapse/expand all': 'collapse/expand all',
'Command': 'Command',
'Comment:': 'Comment:',
'Commit': 'Commit',
'Commit form': 'Commit form',
'Committed files': 'Committed files',
'Compile': 'compile',
'Compile (all or nothing)': 'Compile (all or nothing)',
'Compile (skip failed views)': 'Compile (skip failed views)',
'compiled application removed': 'compiled application removed',
'Condition': 'Condition',
'continue': 'continue',
'Controllers': 'Controllers',
'controllers': 'controllers',
'Count': 'Count',
'Create': 'create',
'create file with filename:': 'create file with filename:',
'create new application:': 'create new application:',
'Create new simple application': 'Create new simple application',
'Create/Upload': 'Create/Upload',
'created by': 'created by',
'Created by:': 'Created by:',
'Created On': 'Created On',
'Created on:': 'Created on:',
'crontab': 'crontab',
'Current request': 'Current request',
'Current response': 'Current response',
'Current session': 'Current session',
'currently running': 'currently running',
'currently saved or': 'currently saved or',
'data uploaded': 'данните бяха качени',
'Database': 'Database',
'database': 'database',
'Database %s select': 'Database %s select',
'database %s select': 'database %s select',
'Database administration': 'Database administration',
'database administration': 'database administration',
'Database Administration (appadmin)': 'Database Administration (appadmin)',
'Date and Time': 'Date and Time',
'db': 'дб',
'Debug': 'Debug',
'defines tables': 'defines tables',
'Delete': 'Delete',
'delete': 'delete',
'delete all checked': 'delete all checked',
'delete plugin': 'delete plugin',
'Delete this file (you will be asked to confirm deletion)': 'Delete this file (you |
mruffalo/sysv_ipc | extras/memory_leak_tests.py | Python | bsd-3-clause | 20,971 | 0.000763 | # Python modules
import gc
import os
import subprocess
import random
import re
import sys
# My module
import sysv_ipc
S | KIP_SEMAPHORE_TESTS = False
SKIP_SHARED_MEMORY_TESTS = False
SKIP_MESSAGE_QUEUE_TESTS = False
# TEST_COUNT = 10
TEST_COUNT = 1024 * 100
PY_MAJOR_VERSION = sys.version_info[0]
# ps output looks like this:
# RSZ VSZ
# 944 75964
ps_output_regex = re.compile("""
^
\s* # whitespace before first heading
\S* # first heading (e.g. RSZ)
\s+ # whitespace between headings
\S* # second heading (e.g VSZ)
\s+ # newline and whitespace | before first numeric value
(\d+) # first value
\s+ # whitespace between values
(\d+) # second value
\s* # trailing whitespace if any
$
""", re.MULTILINE | re.VERBOSE)
# On OS X, Ubuntu and OpenSolaris, both create/destroy tests show some growth
# is rsz and vsz. (e.g. 3248 versus 3240 -- I guess these are measured
# in kilobytes?) When I increased the number of iterations by a factor of 10,
# the delta didn't change which makes me think it isn't an actual leak
# but just some memory consumed under normal circumstances.
def random_string(length):
return ''.join(random.sample("abcdefghijklmnopqrstuvwxyz", length))
def print_mem_before():
s = "Memory usage before, RSS = %d, VSZ = %d" % get_memory_usage()
print(s)
def print_mem_after():
gc.collect()
if gc.garbage:
s = "Leftover garbage:" + str(gc.garbage)
print(s)
else:
print("Python's GC reports no leftover garbage")
s = "Memory usage after, RSS = %d, VSZ = %d" % get_memory_usage()
print(s)
def get_memory_usage():
# `ps` has lots of format options that vary from OS to OS, and some of
# those options have aliases (e.g. vsz, vsize). The ones I use below
# appear to be the most portable.
s = subprocess.Popen(["ps", "-p", str(os.getpid()), "-o", "rss,vsz"],
stdout=subprocess.PIPE).communicate()[0]
# Output looks like this:
# RSZ VSZ
# 944 75964
if PY_MAJOR_VERSION > 2:
s = s.decode(sys.getfilesystemencoding())
m = ps_output_regex.match(s)
rsz = int(m.groups()[0])
vsz = int(m.groups()[1])
return rsz, vsz
# Assert manual control over the garbage collector
gc.disable()
if SKIP_SEMAPHORE_TESTS:
print("Skipping semaphore tests")
else:
print("Running semaphore create/destroy test...")
print_mem_before()
for i in range(1, TEST_COUNT):
sem = sysv_ipc.Semaphore(None, sysv_ipc.IPC_CREX)
sem.remove()
print_mem_after()
print("Running semaphore acquire/release test...")
print_mem_before()
sem = sysv_ipc.Semaphore(42, sysv_ipc.IPC_CREX)
for i in range(1, TEST_COUNT):
sem.release()
sem.acquire()
sem.remove()
print_mem_after()
print("Running semaphore Z test...")
print_mem_before()
sem = sysv_ipc.Semaphore(42, sysv_ipc.IPC_CREX)
for i in range(1, TEST_COUNT):
sem.Z()
sem.remove()
print_mem_after()
if sysv_ipc.SEMAPHORE_TIMEOUT_SUPPORTED:
print("Running semaphore acquire timeout test...")
print_mem_before()
sem = sysv_ipc.Semaphore(42, sysv_ipc.IPC_CREX)
for i in range(1, TEST_COUNT):
try:
sem.acquire(.001)
except sysv_ipc.BusyError:
pass
sem.remove()
print_mem_after()
else:
print("Skipping semaphore acquire timeout test (not supported on this platform)")
if sysv_ipc.SEMAPHORE_TIMEOUT_SUPPORTED:
print("Running semaphore Z timeout test...")
print_mem_before()
sem = sysv_ipc.Semaphore(42, sysv_ipc.IPC_CREX)
# Release the semaphore to make the value is non-zero so that .Z()
# has to wait for the timeout.
sem.release()
for i in range(1, TEST_COUNT):
try:
sem.Z(.001)
except sysv_ipc.BusyError:
pass
sem.remove()
print_mem_after()
else:
print("Skipping semaphore Z timeout test (not supported on this platform)")
print("Running semaphore key read test...")
print_mem_before()
sem = sysv_ipc.Semaphore(42, sysv_ipc.IPC_CREX)
for i in range(1, TEST_COUNT):
foo = sem.key
sem.remove()
print_mem_after()
print("Running semaphore id read test...")
print_mem_before()
sem = sysv_ipc.Semaphore(42, sysv_ipc.IPC_CREX)
for i in range(1, TEST_COUNT):
foo = sem.id
sem.remove()
print_mem_after()
print("Running semaphore value read test...")
print_mem_before()
sem = sysv_ipc.Semaphore(42, sysv_ipc.IPC_CREX)
for i in range(1, TEST_COUNT):
foo = sem.value
sem.remove()
print_mem_after()
print("Running semaphore value write test...")
print_mem_before()
sem = sysv_ipc.Semaphore(42, sysv_ipc.IPC_CREX)
for i in range(1, TEST_COUNT):
value = random.randint(0, sysv_ipc.SEMAPHORE_VALUE_MAX)
sem.value = value
assert(sem.value == value)
sem.remove()
print_mem_after()
print("Running semaphore undo read test...")
print_mem_before()
sem = sysv_ipc.Semaphore(42, sysv_ipc.IPC_CREX)
for i in range(1, TEST_COUNT):
foo = sem.undo
sem.remove()
print_mem_after()
print("Running semaphore undo write test...")
print_mem_before()
sem = sysv_ipc.Semaphore(42, sysv_ipc.IPC_CREX)
for i in range(1, TEST_COUNT):
undo = random.randint(0, 1)
sem.undo = undo
assert(sem.undo == undo)
sem.remove()
print_mem_after()
print("Running semaphore block read test...")
print_mem_before()
sem = sysv_ipc.Semaphore(42, sysv_ipc.IPC_CREX)
for i in range(1, TEST_COUNT):
foo = sem.block
sem.remove()
print_mem_after()
print("Running semaphore block write test...")
print_mem_before()
sem = sysv_ipc.Semaphore(42, sysv_ipc.IPC_CREX)
for i in range(1, TEST_COUNT):
block = random.randint(0, 1)
sem.block = block
assert(sem.block == block)
sem.remove()
print_mem_after()
print("Running semaphore mode read test...")
print_mem_before()
sem = sysv_ipc.Semaphore(42, sysv_ipc.IPC_CREX)
for i in range(1, TEST_COUNT):
foo = sem.mode
sem.remove()
print_mem_after()
print("Running semaphore mode write test...")
print_mem_before()
sem = sysv_ipc.Semaphore(42, sysv_ipc.IPC_CREX)
for i in range(1, TEST_COUNT):
# octal 600 = decimal 384
sem.mode = 384
sem.remove()
print_mem_after()
print("Running semaphore uid read test...")
print_mem_before()
sem = sysv_ipc.Semaphore(42, sysv_ipc.IPC_CREX)
for i in range(1, TEST_COUNT):
foo = sem.uid
sem.remove()
print_mem_after()
print("Running semaphore uid write test...")
print_mem_before()
sem = sysv_ipc.Semaphore(42, sysv_ipc.IPC_CREX)
uid = sem.uid
for i in range(1, TEST_COUNT):
sem.uid = uid
sem.remove()
print_mem_after()
print("Running semaphore gid read test...")
print_mem_before()
sem = sysv_ipc.Semaphore(42, sysv_ipc.IPC_CREX)
for i in range(1, TEST_COUNT):
foo = sem.gid
sem.remove()
print_mem_after()
print("Running semaphore gid write test...")
print_mem_before()
sem = sysv_ipc.Semaphore(42, sysv_ipc.IPC_CREX)
gid = sem.gid
for i in range(1, TEST_COUNT):
sem.gid = gid
sem.remove()
print_mem_after()
print("Running semaphore cuid read test...")
print_mem_before()
sem = sysv_ipc.Semaphore(42, sysv_ipc.IPC_CREX)
for i in range(1, TEST_COUNT):
foo = sem.cuid
sem.remove()
print_mem_after()
print("Running semaphore cgid read test...")
print_mem_before()
sem = sysv_ipc.Semaphore(42, sysv_ipc.IPC_CREX)
for i in range(1, TEST_COUNT):
foo = sem.cgid
sem.remove()
print_mem_after()
print("Running semaphore last_pid read test...")
print_mem_before()
|
miguelsdc/nao_robot | nao_driver/scripts/nao_behaviors.py | Python | bsd-3-clause | 5,440 | 0.0125 | #!/usr/bin/env python
#
# ROS node to control NAO's built-in and user-installed behaviors using NaoQI
# Tested with NaoQI: 1.12
#
# Copyright (c) 2012, 2013 Miguel Sarabia
# Imperial College London
#
# Redistribution and use in source and binary forms, with or | without
# modification, are permitted provided that the followin | g conditions are met:
#
# # Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# # Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# # Neither the name of the Imperial College London nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import threading
import roslib
roslib.load_manifest('nao_driver')
import rospy
import actionlib
from nao_driver import *
from nao_msgs.msg import(
RunBehaviorAction,
RunBehaviorResult
)
from nao_msgs.srv import (
GetInstalledBehaviors,
GetInstalledBehaviorsResponse,
)
class NaoBehaviors(NaoNode):
#This should be treated as a constant
NODE_NAME = "nao_behaviors"
def __init__( self ):
#Initialisation
NaoNode.__init__( self )
rospy.init_node( self.NODE_NAME )
#We need this variable to be able to call stop behavior when preempted
self.behavior = None
self.lock = threading.RLock()
#Proxy for listingBehaviors and stopping them
self.behaviorProxy = self.getProxy( "ALBehaviorManager" )
# Register ROS services
self.getInstalledBehaviorsService = rospy.Service(
"get_installed_behaviors",
GetInstalledBehaviors,
self.getInstalledBehaviors
)
#Prepare and start actionlib server
self.actionlibServer = actionlib.SimpleActionServer(
"run_behavior",
RunBehaviorAction,
self.runBehavior,
False
)
self.actionlibServer.register_preempt_callback( self.stopBehavior )
self.actionlibServer.start()
def getInstalledBehaviors( self, request ):
result = self.behaviorProxy.getInstalledBehaviors()
return GetInstalledBehaviorsResponse( result )
def runBehavior( self, request ):
#Note this function is executed from a different thread
rospy.logdebug(
"Execution of behavior: '{}' requested".format(request.behavior))
#Check requested behavior is installed
if not request.behavior in self.behaviorProxy.getInstalledBehaviors():
error_msg = "Behavior '{}' not installed".format(request.behavior)
self.actionlibServer.set_aborted(text = error_msg)
rospy.logdebug(error_msg)
return
with self.lock:
# Check first if we're already preempted, and return if so
if self.actionlibServer.is_preempt_requested():
self.actionlibServer.set_preempted()
rospy.logdebug("Behavior execution preempted before it started")
return
#Save name of behavior to be run
self.behavior = request.behavior
#Execute behavior (on another thread so we can release lock)
taskID = self.behaviorProxy.post.runBehavior( self.behavior )
# Wait for task to complete (or be preempted)
rospy.logdebug("Waiting for behavior execution to complete")
self.behaviorProxy.wait( taskID, 0 )
#Evaluate results
with self.lock:
self.behavior = None
# If preempted, report so
if self.actionlibServer.is_preempt_requested() :
self.actionlibServer.set_preempted()
rospy.logdebug("Behavior execution preempted")
# Otherwise, set as succeeded
else:
self.actionlibServer.set_succeeded()
rospy.logdebug("Behavior execution succeeded")
def stopBehavior( self ):
with self.lock:
if self.behavior and self.actionlibServer.is_active() :
self.behaviorProxy.stopBehavior( self.behavior )
if __name__ == '__main__':
node = NaoBehaviors()
rospy.loginfo( node.NODE_NAME + " running..." )
rospy.spin()
rospy.loginfo( node.NODE_NAME + " stopped." )
exit(0)
|
Sunhick/hacker_rank | Algorithms/Dynamic Programming/The-Coin-Change-Problem.py | Python | mit | 496 | 0.010081 | def count(S, m, n):
table = [[0 for x in range(m)] for x | in range(n+1)]
for i in range(m):
table[0][i] = 1
for i in range(1, n+1):
for j in range(m):
x = table[i - S[j]][j] if i-S[j] >= 0 else 0
y = table[i][j-1] if j >= 1 else 0
table[i][j] = x + y
| return table[n][m-1]
n,m = [int(a) for a in raw_input().strip().split(' ')]
s = [int(a) for a in raw_input().strip().split(' ')]
print count(s, m, n)
|
sgammon/libcloud | libcloud/compute/drivers/gandi.py | Python | apache-2.0 | 20,171 | 0.00005 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Gandi driver for compute
"""
import sys
from datetime import datetime
from libcloud.common.gandi import BaseGandiDriver, GandiException,\
NetworkInterface, IPAddress, Disk
from libcloud.compute.base import StorageVolume
from libcloud.compute.types import NodeState, Provider
from libcloud.compute.base import Node, NodeDriver
from libcloud.compute.base import NodeSize, NodeImage, NodeLocation
NODE_STATE_MAP = {
'running': NodeState.RUNNING,
'halted': NodeState.TERMINATED,
'paused': NodeState.TERMINATED,
'locked': NodeState.TERMINATED,
'being_created': NodeState.PENDING,
'invalid': NodeState.UNKNOWN,
'legally_locked': NodeState.PENDING,
'deleted': NodeState.TERMINATED
}
NODE_PRICE_HOURLY_USD = 0.02
INSTANCE_TYPES = {
'small': {
'id': 'small',
'name': 'Small instance',
'cpu': 1,
'memory': 256,
'disk': 3,
'bandwidth': 10240,
},
'medium': {
'id': 'medium',
'name': 'Medium instance',
'cpu': 1,
'memory': 1024,
'disk': 20,
'bandwidth': 10240,
},
'large': {
'id': 'large',
'name': 'Large instance',
'cpu': 2,
'memory': 2048,
'disk': 50,
'bandwidth': 10240,
},
'x-large': {
'id': 'x-large',
'name': 'Extra Large instance',
'cpu': 4,
'memory': 4096,
'disk': 100,
'bandwidth': 10240,
},
}
class GandiNodeDriver(BaseGandiDriver, NodeDriver):
"""
Gandi node driver
"""
api_name = 'gandi'
friendly_name = 'Gandi.net'
website = 'http://www.gandi.net/'
country = 'FR'
type = Provider.GANDI
# TODO : which features to enable ?
features = {}
def __init__(self, *args, **kwargs):
"""
@inherits: :class:`NodeDriver.__init__`
"""
super(BaseGandiDriver, self).__init__(*args, **kwargs)
def _resource_info(self, type, id):
try:
obj = self.connection.request('hosting.%s.info' % type, int(id))
return obj.object
except Exception:
e = sys.exc_info()[1]
raise GandiException(1003, e)
return None
def _node_info(self, id):
return self._resource_info('vm', id)
def _volume_info(self, id):
return self._resource_info('disk', id)
# Generic methods for driver
def _to_node(self, vm):
return Node(
id=vm['id'],
name=vm['hostname'],
state=NODE_STATE_MAP.get(
vm['state'],
NodeState.UNKNOWN
),
public_ips=vm.get('ips', []),
private_ips=[],
driver=self,
extra={
'ai_active': vm.get('ai_active'),
'datacenter_id': vm.get('datacenter_id'),
| 'description': vm.get('description')
}
)
def _to_nodes(self, vms):
return [self._to_node(v) for v in vms]
def _to_volume(self, disk):
extra = {'can_snapshot': disk['can_snapshot']}
return StorageVolume(
id=disk['id'],
name=disk['name'],
size=int(disk['size']),
driver=self,
extra=extra)
def _to_volumes(self, disks):
return [self._t | o_volume(d) for d in disks]
def list_nodes(self):
vms = self.connection.request('hosting.vm.list').object
ips = self.connection.request('hosting.ip.list').object
for vm in vms:
vm['ips'] = []
for ip in ips:
if vm['ifaces_id'][0] == ip['iface_id']:
ip = ip.get('ip', None)
if ip:
vm['ips'].append(ip)
nodes = self._to_nodes(vms)
return nodes
def reboot_node(self, node):
op = self.connection.request('hosting.vm.reboot', int(node.id))
self._wait_operation(op.object['id'])
vm = self._node_info(int(node.id))
if vm['state'] == 'running':
return True
return False
def destroy_node(self, node):
vm = self._node_info(node.id)
if vm['state'] == 'running':
# Send vm_stop and wait for accomplish
op_stop = self.connection.request('hosting.vm.stop', int(node.id))
if not self._wait_operation(op_stop.object['id']):
raise GandiException(1010, 'vm.stop failed')
# Delete
op = self.connection.request('hosting.vm.delete', int(node.id))
if self._wait_operation(op.object['id']):
return True
return False
def deploy_node(self, **kwargs):
"""
deploy_node is not implemented for gandi driver
:rtype: ``bool``
"""
raise NotImplementedError(
'deploy_node not implemented for gandi driver')
def create_node(self, **kwargs):
"""
Create a new Gandi node
:keyword name: String with a name for this new node (required)
:type name: ``str``
:keyword image: OS Image to boot on node. (required)
:type image: :class:`NodeImage`
:keyword location: Which data center to create a node in. If empty,
undefined behavior will be selected. (optional)
:type location: :class:`NodeLocation`
:keyword size: The size of resources allocated to this node.
(required)
:type size: :class:`NodeSize`
:keyword login: user name to create for login on machine (required)
:type login: ``str``
:keyword password: password for user that'll be created (required)
:type password: ``str``
:keyword inet_family: version of ip to use, default 4 (optional)
:type inet_family: ``int``
:rtype: :class:`Node`
"""
if kwargs.get('login') is None or kwargs.get('password') is None:
raise GandiException(
1020, 'login and password must be defined for node creation')
location = kwargs.get('location')
if location and isinstance(location, NodeLocation):
dc_id = int(location.id)
else:
raise GandiException(
1021, 'location must be a subclass of NodeLocation')
size = kwargs.get('size')
if not size and not isinstance(size, NodeSize):
raise GandiException(
1022, 'size must be a subclass of NodeSize')
# If size name is in INSTANCE_TYPE we use new rating model
instance = INSTANCE_TYPES.get(size.id)
cores = instance['cpu'] if instance else int(size.id)
src_disk_id = int(kwargs['image'].id)
disk_spec = {
'datacenter_id': dc_id,
'name': 'disk_%s' % kwargs['name']
}
vm_spec = {
'datacenter_id': dc_id,
'hostname': kwargs['name'],
'login': kwargs['login'],
'password': kwargs['password'], # TODO : use NodeAuthPassword
'memory': int(size.ram),
'cores': cores,
'bandwidth': int(size.bandwidth),
'ip_version': kwargs.get('inet_family', 4),
}
# Call create_from helper api. Return 3 operations : disk_create,
# iface_cr |
Ebag333/Pyfa | eos/effects/weaponupgradescpuneedbonuspostpercentcpulocationshipmodulesrequiringmissilelauncheroperation.py | Python | gpl-3.0 | 517 | 0.003868 | # weaponUpgradesCpuNeedBonusPostPercentCpuLocatio | nShipModu | lesRequiringMissileLauncherOperation
#
# Used by:
# Implants named like: Zainou 'Gnome' Launcher CPU Efficiency LE (6 of 6)
# Skill: Weapon Upgrades
type = "passive"
def handler(fit, container, context):
level = container.level if "skill" in context else 1
fit.modules.filteredItemBoost(lambda mod: mod.item.requiresSkill("Missile Launcher Operation"),
"cpu", container.getModifiedItemAttr("cpuNeedBonus") * level)
|
overfl0/Bulletproof-Arma-Launcher | src/utils/requests_wrapper.py | Python | gpl-3.0 | 2,188 | 0.001828 | # Bulletproof Arma Launcher
# Copyright (C) 2017 Lukasz Taczuk
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
from __future__ import unicode_literals
import requests
from kivy.logger import Logger
class DownloadException(Exception):
pass
def download_url(*args, **kwargs):
"""Helper function that adds our error handling to requests.get.
It also retries the | fetching of the data in case an exception occurrs.
"""
retries_total = 3
for retries_left in reversed(range(retries_total)):
try:
return _download_url(*args, **kwargs)
except Exception as ex:
if retries_left > 0:
Logger.error('download_url: retrying the download after receiving an exception: {}'.format(
repr(ex)))
| continue
raise
def _download_url(domain, *args, **kwargs):
"""
Helper function that adds our error handling to requests.get.
Feel free to refactor it.
"""
if not domain:
domain = "the domain"
try:
res = requests.get(*args, **kwargs)
except requests.exceptions.ConnectionError as ex:
try:
reason_errno = ex.message.reason.errno
if reason_errno == 11004:
raise DownloadException('Could not resolve {}. Check your DNS settings.'.format(domain))
except Exception:
raise DownloadException('Could not connect to the server.')
raise DownloadException('Could not connect to the server.')
except requests.exceptions.Timeout:
raise DownloadException('The server timed out while downloading data from the server.')
except requests.exceptions.RequestException as ex:
raise DownloadException('Could not download data from the server.')
return res
|
mrosenstihl/PulsePrograms | make_clean.py | Python | bsd-2-clause | 679 | 0.041237 | #!/usr/bin/env python
import os
print "Cleaning directory %s"%(os.path.realpath('.'))
rubbish_filetypes = ('h5','hdf','.dat','.pyc', '.png', '.pdf', '.tar.gz')
rubb | ish_startnames = ('job','logdata','Amplitude','Real','spool','pool')
choosing = raw_input("Continue [yes/anykey_for_NO] ?")
if choosing == 'yes':
print "Cleaning directory"
for root,dir,files in os.walk('.'):
if dir != ".bzr":
print dir
for file in files:
print file,
if file.endswith(rubbish_filetypes) or f | ile.startswith(rubbish_startnames):
delete_file = os.path.join(root,file)
os.remove(delete_file)
print "...delete"
else:
print "...skipped"
print "finnished"
|
rven/odoo | addons/sale_management/models/sale_order.py | Python | agpl-3.0 | 12,391 | 0.00347 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import timedelta
from odoo import api, fields, mo | dels, _
from odoo.exceptions import UserError, ValidationError
class SaleOrder(models.Model):
_inherit = 'sale.order'
@api.model
def default_get(self, fields_list):
defa | ult_vals = super(SaleOrder, self).default_get(fields_list)
if "sale_order_template_id" in fields_list and not default_vals.get("sale_order_template_id"):
company_id = default_vals.get('company_id', False)
company = self.env["res.company"].browse(company_id) if company_id else self.env.company
default_vals['sale_order_template_id'] = company.sale_order_template_id.id
return default_vals
sale_order_template_id = fields.Many2one(
'sale.order.template', 'Quotation Template',
readonly=True, check_company=True,
states={'draft': [('readonly', False)], 'sent': [('readonly', False)]},
domain="['|', ('company_id', '=', False), ('company_id', '=', company_id)]")
sale_order_option_ids = fields.One2many(
'sale.order.option', 'order_id', 'Optional Products Lines',
copy=True, readonly=True,
states={'draft': [('readonly', False)], 'sent': [('readonly', False)]})
@api.constrains('company_id', 'sale_order_option_ids')
def _check_optional_product_company_id(self):
for order in self:
companies = order.sale_order_option_ids.product_id.company_id
if companies and companies != order.company_id:
bad_products = order.sale_order_option_ids.product_id.filtered(lambda p: p.company_id and p.company_id != order.company_id)
raise ValidationError(_(
"Your quotation contains products from company %(product_company)s whereas your quotation belongs to company %(quote_company)s. \n Please change the company of your quotation or remove the products from other companies (%(bad_products)s).",
product_company=', '.join(companies.mapped('display_name')),
quote_company=order.company_id.display_name,
bad_products=', '.join(bad_products.mapped('display_name')),
))
@api.returns('self', lambda value: value.id)
def copy(self, default=None):
if self.sale_order_template_id and self.sale_order_template_id.number_of_days > 0:
default = dict(default or {})
default['validity_date'] = fields.Date.context_today(self) + timedelta(self.sale_order_template_id.number_of_days)
return super(SaleOrder, self).copy(default=default)
@api.onchange('partner_id')
def onchange_partner_id(self):
super(SaleOrder, self).onchange_partner_id()
template = self.sale_order_template_id.with_context(lang=self.partner_id.lang)
self.note = template.note or self.note
def _compute_line_data_for_template_change(self, line):
return {
'display_type': line.display_type,
'name': line.name,
'state': 'draft',
}
def _compute_option_data_for_template_change(self, option):
price = option.product_id.lst_price
discount = 0
if self.pricelist_id:
pricelist_price = self.pricelist_id.with_context(uom=option.uom_id.id).get_product_price(option.product_id, 1, False)
if self.pricelist_id.discount_policy == 'without_discount' and price:
discount = max(0, (price - pricelist_price) * 100 / price)
else:
price = pricelist_price
return {
'product_id': option.product_id.id,
'name': option.name,
'quantity': option.quantity,
'uom_id': option.uom_id.id,
'price_unit': price,
'discount': discount
}
def update_prices(self):
self.ensure_one()
res = super().update_prices()
for line in self.sale_order_option_ids:
line.price_unit = self.pricelist_id.get_product_price(line.product_id, line.quantity, self.partner_id, uom_id=line.uom_id.id)
return res
@api.onchange('sale_order_template_id')
def onchange_sale_order_template_id(self):
if not self.sale_order_template_id:
self.require_signature = self._get_default_require_signature()
self.require_payment = self._get_default_require_payment()
return
template = self.sale_order_template_id.with_context(lang=self.partner_id.lang)
# --- first, process the list of products from the template
order_lines = [(5, 0, 0)]
for line in template.sale_order_template_line_ids:
data = self._compute_line_data_for_template_change(line)
if line.product_id:
price = line.product_id.lst_price
discount = 0
if self.pricelist_id:
pricelist_price = self.pricelist_id.with_context(uom=line.product_uom_id.id).get_product_price(line.product_id, 1, False)
if self.pricelist_id.discount_policy == 'without_discount' and price:
discount = max(0, (price - pricelist_price) * 100 / price)
else:
price = pricelist_price
data.update({
'price_unit': price,
'discount': discount,
'product_uom_qty': line.product_uom_qty,
'product_id': line.product_id.id,
'product_uom': line.product_uom_id.id,
'customer_lead': self._get_customer_lead(line.product_id.product_tmpl_id),
})
order_lines.append((0, 0, data))
self.order_line = order_lines
self.order_line._compute_tax_id()
# then, process the list of optional products from the template
option_lines = [(5, 0, 0)]
for option in template.sale_order_template_option_ids:
data = self._compute_option_data_for_template_change(option)
option_lines.append((0, 0, data))
self.sale_order_option_ids = option_lines
if template.number_of_days > 0:
self.validity_date = fields.Date.context_today(self) + timedelta(template.number_of_days)
self.require_signature = template.require_signature
self.require_payment = template.require_payment
if template.note:
self.note = template.note
def action_confirm(self):
res = super(SaleOrder, self).action_confirm()
for order in self:
if order.sale_order_template_id and order.sale_order_template_id.mail_template_id:
self.sale_order_template_id.mail_template_id.send_mail(order.id)
return res
def get_access_action(self, access_uid=None):
""" Instead of the classic form view, redirect to the online quote if it exists. """
self.ensure_one()
user = access_uid and self.env['res.users'].sudo().browse(access_uid) or self.env.user
if not self.sale_order_template_id or (not user.share and not self.env.context.get('force_website')):
return super(SaleOrder, self).get_access_action(access_uid)
return {
'type': 'ir.actions.act_url',
'url': self.get_portal_url(),
'target': 'self',
'res_id': self.id,
}
class SaleOrderLine(models.Model):
_inherit = "sale.order.line"
_description = "Sales Order Line"
sale_order_option_ids = fields.One2many('sale.order.option', 'line_id', 'Optional Products Lines')
# Take the description on the order template if the product is present in it
@api.onchange('product_id')
def product_id_change(self):
domain = super(SaleOrderLine, self).product_id_change()
if self.product_id and self.order_id.sale_order_template_id:
for line in self.order_id.sale_order_template_id.sale_order_template_line_ids:
if line.product_id == self.product_id:
self.name = line.with_context( |
fdemian/Morpheus | api/LoadOptions.py | Python | bsd-2-clause | 3,767 | 0.00876 | from tornado.options import define, options
def load_options(config_file):
# General application settings
define('port', type=int, group='application', help='Port to run the application from.')
define('compress_response', type=bool, group='application', help='Whether or not to compress the response.')
define('notifications_enabled', type=bool, group='application', help='Whether or not to enable notifications.')
# Security options
define('serve_https', type=bool, group='application', help='Whether to serve the application via HTTPS or not.')
define('ssl_cert', type=str, group='application', help='Path to the SSL certificate.')
define('ssl_key', type=str, group='application', help='Path to the SSL key.')
define('cookie_secret', type=str, group='application', help='Cookie signing secret.')
# Mail settings
define('from_address', type=str, group='application', help='Address to send the confirmation mail from.')
define(' | mail_template', type=str, group='application', help='Locat | ion of the mail template (relative to /static).')
define('mail_subject', type=str, group='application', help='Subject of the confirmation mail.')
define('mail_host', type=str, group='application', help='Host used to send emails.')
define('mail_port', type=int, group='application', help='Port used to send emails.')
# JWT settings.
define('jwt_secret', type=str, group='application', help='Secret to use when encoding JWT Token.')
define('jwt_algorithm', type=str, group='application', help='Algorithm to use when encoding JWT Token.')
define('jwt_expiration_seconds', type=int, group='application', help='Time before the JWT token expires (seconds).')
# Oauth settings
define('facebook_api_key', type=str, group='application', help='Facebook API key.')
define('facebook_api_secret', type=str, group='application', help='Facebook API key.')
define('facebook_redirect_url', type=str, group='application', help='Facebook redirect URL.')
define('facebook_icon_url', type=str, group='application', help='Icon URL.')
define('google_oauth_key', type=str, group='application', help='Google Access Token.')
define('google_oauth_secret', type=str, group='application', help='Google secret.')
define('google_discovery_url', type=str, group='application', help='Discovery document for google.')
define('google_redirect_url', type=str, group='application', help='Facebook redirect URL.')
define('google_icon_url', type=str, group='application', help='Icon URL.')
define('github_client_id', type=str, group='application', help='Github client.')
define('github_client_secret', type=str, group='application', help='Github secrets.')
define('github_redirect_url', type=str, group='application', help='Facebook redirect URL.')
define('github_icon_url', type=str, group='application', help='Icon URL.')
define('yahoo_client_id', type=str, group='application', help='Facebook redirect URL.')
define('yahoo_client_secret', type=str, group='application', help='Facebook redirect URL.')
define('yahoo_redirect_url', type=str, group='application', help='Facebook redirect URL.')
define('yahoo_icon_url', type=str, group='application', help='Icon URL.')
# define('twitter_consumer_key', type=str, group='application', help='Twitter API key.')
# define('twitter_consumer_secret', type=str, group='application', help='Twitter API secret.')
# define('twitter_access_token', type=str, group='application', help='Twitter Access Token.')
# define('twitter_access_token_secret', type=str, group='application', help='Twitter Access Token secret.')
options.parse_config_file(config_file)
return options.group_dict('application')
|
AndrewNeo/hybridius | forms.py | Python | mit | 1,328 | 0.024849 | from wtforms import Form, BooleanField, TextField, PasswordField, validators
import re
illegal_shortcode_names = ["admin"]
legal_shortcode_regex = "^[a-zA-Z0-9\.\_\-\~\!\$\&\'\(\)\*\,\;\=]*$"
def shortcode_validator(form, field):
if field.data is not None:
if (field.data in illegal_shortcode_names):
raise validators.ValidationError("Can't use a protected name.")
matchObject = re.match(legal_shortcode_regex, field.data)
if not matchObject:
raise validators.ValidationError("Can't use illegal characters.")
class AddForm(Form):
is_random = BooleanField("Shortcode is random", default=True)
shortcode = TextField("Custom shortcode", [
validators.Length(max=20),
| shortcode_validator
])
target_url = TextField("Destination | ", [
validators.Required(),
validators.Length(max=1024, message="Max limit 1024 characters.")
])
class LoginForm(Form):
username = TextField("Username", [validators.Required()])
password = PasswordField("Password", [validators.Required()])
login_validator = None
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if not self.login_validator(self.username.data, self.password.data):
self.username.errors.append("Invalid login.")
return False
return True
|
milankl/swm | calc/process/var_subset_last.py | Python | gpl-3.0 | 1,070 | 0.018692 | ## READ VARIABLE FROM SEVERAL NCFILES and store subset of it as NPY
from __future__ import print_function
path = '/network/aopp/cirrus/pred/kloewer/swm_bf_cntrl/data/'
#path = '/network/aopp/cirrus/pred/kloewer/swm_back_ronew/'
import os; os.chdir(path) # change working directory
import numpy as np
from netCDF4 import Dataset
# OPTIONS
runfolder = [0,6]
s = 40 # read s-th last time step
for r in runfolder:
print(('Stor | e last time step from run %i') % r)
## read data
runpath = path+'run%04i' % r
ncu = Dataset(runpath+'/u.nc')
u = ncu['u'][-s,:,:]
ncu.close()
print('u read.')
np.save(runpath+'/u_last.npy',u)
del u
ncv = Dataset(runpath+'/v.nc')
v = ncv['v'][-s,:,:] |
ncv.close()
print('v read.')
np.save(runpath+'/v_last.npy',v)
del v
nceta = Dataset(runpath+'/eta.nc')
eta = nceta['eta'][-s,:,:]
#time = nceta['t'][::sub] # in seconds
#t = time / 3600. / 24. # in days
nceta.close()
print('eta read.')
np.save(runpath+'/eta_last.npy',eta)
del eta
|
hplustree/trove | trove/common/wsgi.py | Python | apache-2.0 | 23,631 | 0.000296 | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Wsgi helper utilities for trove"""
import math
import re
import time
import traceback
import uuid
import eventlet.wsgi
import jsonschema
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_service import service
from oslo_utils import encodeutils
import paste.urlmap
import webob
import webob.dec
import webob.exc
from trove.common import base_wsgi
from trove.common import cfg
from trove.common import context as rd_context
from trove.common import exception
from trove.common.i18n import _
from trove.common import pastedeploy
from trove.common import utils
CONTEXT_KEY = 'trove.context'
Router = base_wsgi.Router
Debug = base_wsgi.Debug
Middleware = base_wsgi.Middleware
JSONDictSerializer = base_wsgi.JSONDictSerializer
RequestDeserializer = base_wsgi.RequestDeserializer
CONF = cfg.CONF
# Raise the default from 8192 to accommodate large tokens
eventlet.wsgi.MAX_HEADER_LINE = CONF.max_header_line
eventlet.patcher.monkey_patch(all=False, socket=True)
LOG = logging.getLogger('trove.common.wsgi')
def versioned_urlmap(*args, **kwargs):
urlmap = paste.urlmap.urlmap_factory(*args, **kwargs)
return VersionedURLMap(urlmap)
def launch(app_name, port, paste_config_file, data={},
host='0.0.0.0', backlog=128, threads=1000, workers=None):
"""Launches a wsgi server based on the passed in paste_config_file.
Launch provides a easy way to create a paste app from the config
file and launch it via the service launcher. It takes care of
all of the plumbing. The only caveat is that the paste_config_file
must be a file that paste.deploy can find and handle. There is
a helper method in cfg.py that finds files.
Example:
conf_file = CONF.find_file(CONF.api_paste_config)
launcher = wsgi.launch('myapp', CONF.bind_port, conf_file)
launcher.wait()
"""
LOG.debug("Trove started on %s", host)
app = pastedeploy.paste_deploy_app(paste_config_file, app_name, data)
server = base_wsgi.Service(app, port, host=host,
backlog=backlog, threads=threads)
return service.launch(CONF, server, workers)
# Note: taken from Nova
def serializers(**serializers):
"""Attaches serializers to a method.
This decorator associates a dictionary of serializers with a
method. Note that the function attributes are directly
manipulated; the method is not wrapped.
"""
def decorator(func):
if not hasattr(func, 'wsgi_serializers'):
func.wsgi_serializers = {}
func.wsgi_serializers.update(serializers)
return func
return decorator
class TroveMiddleware(Middleware):
# Note: taken from nova
@classmethod
def factory(cls, global_config, **local_config):
"""Used for paste app factories in paste.deploy config files.
Any local configuration (that is, values under the [filter:APPNAME]
section of the paste config) will be passed into the `__init__` method
as kwargs.
A hypothetical configuration would look like:
[filter:analytics]
redis_host = 127.0.0.1
paste.filter_factory = nova.api.analytics:Analytics.factory
which would result in a call to the `Analytics` class as
import nova.api.analytics
analytics.Analytics(app_from_paste, redis_host='127.0.0.1')
You could of course re-implement the `factory` method in subclasses,
but using the kwarg passing it shouldn't be necessary.
"""
def _factory(app):
return cls(app, **local_config)
return _factory
class VersionedURLMap(object):
def __init__(self, urlmap):
self.urlmap = urlmap
def __call__(self, environ, start_response):
req = Request(environ)
if req.url_version is None and req.accept_version is not None:
version = "/v" + req.accept_version
http_exc = webob.exc.HTTPNotAcceptable(_("version not supported"))
app = self.urlmap.get(version, Fault(http_exc))
else:
app = self.urlmap
return app(environ, start_response)
class Router(base_wsgi.Router):
# Original router did not allow for serialization of the 404 error.
# To fix this the _dispatch was modified to use Fault() objects.
@staticmethod
@webob.dec.wsgify
def _dispatch(req):
"""
Called by self._router after matching the incoming request to a route
and putting the information into req.environ. Either returns 404
or the routed WSGI app's response.
"""
match = req.environ['wsgiorg.routing_args'][1]
if not match:
return Fault(webob.exc.HTTPNotFound())
app = match['controller']
return app
class Request(base_wsgi.Request):
@property
def params(self):
return utils.stringify_keys(super(Request, self).params)
def best_match_content_type(self, supported_content_types=None):
"""Determine the most acceptable content-type.
Based on the query extension then the Accept header.
"""
parts = self.path.rsplit('.', 1)
if len(parts) > 1:
format = parts[1]
if format in ['json']:
return 'application/{0}'.format(parts[1])
ctypes = {
'application/vnd.openstack.trove+json': "application/json",
'application/json': "application/json",
}
bm = self.accept.best_match(ctypes.keys())
return ctypes.get(bm, 'application/json')
@utils.cached_property
def accept_version(self):
accept_header = self.headers.get('ACCEPT', "")
accept_version_re = re.compile(".*?application/vnd.openstack.trove"
"(\+.+?)?;"
"version=(?P<version_no>\d+\.?\d*)")
match = accept_version_re.search(accept_header)
return match.group("version_no") if match else None
@utils.cached_property
def url_version(self):
versioned_url_re = re.compile("/v(?P<version_no>\d+\.?\d*)")
match = versioned_url_re.search(self.path)
return match.group("version_no") if match else None
class Result(object):
"""A result whose serialization is compatible with JSON."""
def __init__(self, data, status=200):
self._ | data = data
| self.status = status
def data(self, serialization_type):
"""Return an appropriate serialized type for the body.
serialization_type is not used presently, but may be
in the future, so it stays.
"""
if hasattr(self._data, "data_for_json"):
return self._data.data_for_json()
return self._data
class Resource(base_wsgi.Resource):
def __init__(self, controller, deserializer, serializer,
exception_map=None):
exception_map = exception_map or {}
self.model_exception_map = self._invert_dict_list(exception_map)
super(Resource, self).__init__(controller, deserializer, serializer)
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, request):
return super(Resource, self).__call__(request)
def execute_action(self, action, request, **action_args):
if getattr(self.controller, action, None) is None:
return Fault(webob.exc.HTTPNotFound())
try:
self.controller.validate_request(action, action_args)
result = super(Resou |
Crosse/vcard4 | vcard4/parameters/RFCParameters.py | Python | bsd-2-clause | 413 | 0.002421 | """
This module provides vCard parameters that are | defined by the vCard 4.0
RFC.
"""
from vcard4.parameters import BaseParameter
class Language(BaseParameter):
"""
A LANGUAGE | parameter.
Example:
ROLE;LANGUAGE=tr:hoca
"""
def __init__(self, language):
super(Language, self).__init__('LANGUAGE', language)
def __repr__(self):
return 'Language(%r)' % self.value
|
Four-Stooges/Server | public/resources/scripts/latestupload.py | Python | mit | 392 | 0.012755 | import sys
from pymong | o import MongoClient
# Connecting to the mongo client
client = MongoClient('localhost',27017)
# Connecting to the database
db = client['rescueHomeless']
# Connecting to the required collection
collection = db['userDB']
userEmail | = sys.argv[1]
result = collection.find({'email':userEmail})
pIDs = result['personIDs']
if len(pIDs)==0:
exit(1)
print(pIDs.pop())
exit(0) |
krux/adspygoogle | examples/adspygoogle/dfp/v201204/inventory_service/create_ad_units.py | Python | apache-2.0 | 2,765 | 0.008318 | #!/usr/bin/python
#
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example creates new ad units.
To determine which ad units exist, run get_all_ad_units.py
Tags: InventoryService.createAdUnits
"""
__author__ = 'api.shamjeff@gmail.com (Jeff Sham)'
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
from adspygoogle.common import Utils
def main(client, parent_id):
# Initialize appropriate service.
inventory_service = client.GetService('InventoryService', version='v201204')
# Create ad unit size.
ad_unit_size = {
'size': {
'width': '300',
'height': '250'
},
'environmentType': 'BROWSER'
}
# Create ad unit objects.
web_ad_unit = {
'name': 'Web_ad_unit_%s' % Utils.GetUniqueName(),
'parentId': parent_id,
'description': 'Web ad unit description.',
'targetWindow': 'BLANK',
'targetPlatform': 'WEB',
| 'adUnitSizes': [ad_unit_size]
}
mobile_ad_unit = {
'name': 'Mobile_ad_unit_%s' % Utils.GetUniqueName(),
'parentId': parent_id,
'description': 'Mobile ad unit description.',
'targetWindow': 'BLANK',
'targetPlatform': 'MOBILE',
'adUnitSizes': [ad_unit_size]
}
# Add ad units.
ad_units = inventory_service.CreateAdUnits([web_ad_unit, mobile_ad_unit])
# Display results.
for ad_unit in ad_units:
pri | nt ('Ad unit with ID \'%s\', name \'%s\', and target platform \'%s\' '
'was created.' % (ad_unit['id'], ad_unit['name'],
ad_unit['targetPlatform']))
if __name__ == '__main__':
# Initialize client object.
dfp_client = DfpClient(path=os.path.join('..', '..', '..', '..', '..'))
# Get the Network Service.
network_service = dfp_client.GetService('NetworkService', version='v201204')
# Set the parent ad unit's ID for all ad units to be created under.
parent_id = network_service.GetCurrentNetwork()[0]['effectiveRootAdUnitId']
main(dfp_client, parent_id)
|
rohitranjan1991/home-assistant | homeassistant/components/zha/siren.py | Python | mit | 5,710 | 0.000876 | """Support for ZHA sirens."""
from __future__ import annotations
import functools
from typing import Any
from zigpy.zcl.clusters.security import IasWd as WD
from homeassistant.components.siren import (
ATTR_DURATION,
SUPPORT_DURATION,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SirenEntity,
)
from homeassistant.components.siren.const import (
ATTR_TONE,
ATTR_VOLUME_LEVEL,
SUPPORT_TONES,
SUPPORT_VOLUME_SET,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.event import async_call_later
from .core import discovery
from .core.channels.security import IasWd
from .core.const import (
CHANNEL_IAS_WD,
DATA_ZHA,
SIGNAL_ADD_ENTITIES,
WARNING_DEVICE_MODE_BURGLAR,
WARNING_DEVICE_MODE_EMERGENCY,
WARNING_DEVICE_MODE_EMERGENCY_PANIC,
WARNING_DEVICE_MODE_FIRE,
WARNING_DEVICE_MODE_FIRE_PANIC,
WARNING_DEVICE_MODE_POLICE_PANIC,
WARNING_DEVICE_MODE_STOP,
WARNING_DEVICE_SOUND_HIGH,
WARNING_DEVICE_STROBE_HIGH,
WARNING_DEVICE_STROBE_NO,
Strobe,
)
from .core.registries import ZHA_ENTITIES
from .core.typing import ChannelType, ZhaDeviceType
from .entity import ZhaEntity
MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.SIREN)
DEFAULT_DURATION = 5 # seconds
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> | None:
"""Set up the Zigbee Home Automation siren from config entry."""
entities_to_create = hass.data[DATA_ZHA][Platform.SIREN]
unsub = async_dispatcher_connect(
hass,
SIGNAL_ADD_ENTITIES,
functools.partial(
discovery.async_add_entities,
async_add_entities,
en | tities_to_create,
update_before_add=False,
),
)
config_entry.async_on_unload(unsub)
@MULTI_MATCH(channel_names=CHANNEL_IAS_WD)
class ZHASiren(ZhaEntity, SirenEntity):
"""Representation of a ZHA siren."""
def __init__(
self,
unique_id: str,
zha_device: ZhaDeviceType,
channels: list[ChannelType],
**kwargs,
) -> None:
"""Init this siren."""
self._attr_supported_features = (
SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_DURATION
| SUPPORT_VOLUME_SET
| SUPPORT_TONES
)
self._attr_available_tones: list[int | str] | dict[int, str] | None = {
WARNING_DEVICE_MODE_BURGLAR: "Burglar",
WARNING_DEVICE_MODE_FIRE: "Fire",
WARNING_DEVICE_MODE_EMERGENCY: "Emergency",
WARNING_DEVICE_MODE_POLICE_PANIC: "Police Panic",
WARNING_DEVICE_MODE_FIRE_PANIC: "Fire Panic",
WARNING_DEVICE_MODE_EMERGENCY_PANIC: "Emergency Panic",
}
super().__init__(unique_id, zha_device, channels, **kwargs)
self._channel: IasWd = channels[0]
self._attr_is_on: bool = False
self._off_listener = None
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn on siren."""
if self._off_listener:
self._off_listener()
self._off_listener = None
tone_cache = self._channel.data_cache.get(WD.Warning.WarningMode.__name__)
siren_tone = (
tone_cache.value
if tone_cache is not None
else WARNING_DEVICE_MODE_EMERGENCY
)
siren_duration = DEFAULT_DURATION
level_cache = self._channel.data_cache.get(WD.Warning.SirenLevel.__name__)
siren_level = (
level_cache.value if level_cache is not None else WARNING_DEVICE_SOUND_HIGH
)
strobe_cache = self._channel.data_cache.get(Strobe.__name__)
should_strobe = (
strobe_cache.value if strobe_cache is not None else Strobe.No_Strobe
)
strobe_level_cache = self._channel.data_cache.get(WD.StrobeLevel.__name__)
strobe_level = (
strobe_level_cache.value
if strobe_level_cache is not None
else WARNING_DEVICE_STROBE_HIGH
)
if (duration := kwargs.get(ATTR_DURATION)) is not None:
siren_duration = duration
if (tone := kwargs.get(ATTR_TONE)) is not None:
siren_tone = tone
if (level := kwargs.get(ATTR_VOLUME_LEVEL)) is not None:
siren_level = int(level)
await self._channel.issue_start_warning(
mode=siren_tone,
warning_duration=siren_duration,
siren_level=siren_level,
strobe=should_strobe,
strobe_duty_cycle=50 if should_strobe else 0,
strobe_intensity=strobe_level,
)
self._attr_is_on = True
self._off_listener = async_call_later(
self._zha_device.hass, siren_duration, self.async_set_off
)
self.async_write_ha_state()
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn off siren."""
await self._channel.issue_start_warning(
mode=WARNING_DEVICE_MODE_STOP, strobe=WARNING_DEVICE_STROBE_NO
)
self._attr_is_on = False
self.async_write_ha_state()
@callback
def async_set_off(self, _) -> None:
"""Set is_on to False and write HA state."""
self._attr_is_on = False
if self._off_listener:
self._off_listener()
self._off_listener = None
self.async_write_ha_state()
|
dana-i2cat/felix | vt_manager/src/python/vt_manager/communication/sfa/methods/reset_slice.py | Python | apache-2.0 | 1,080 | 0.007407 | from vt_manager.communication.sfa.util.xrn import urn_to_hrn
from vt_manager.communication.sfa.util.method import Method
from vt_manager.communication.sfa.util.parameter import Parameter, Mixed
class reset_slice(Method):
"""
Reset the specified slice
@param cred credential string specifying the rights of the caller
@param xrn human reada | ble name of slice to instantiate (hrn or urn)
@return 1 is successful, faults otherwise
"""
interfaces = ['aggregate', 'slicemgr', 'component']
accepts = [
Parameter(str, "Credential string"),
Parameter(str, "Human readable name of slice to instantiate (hrn or urn)"),
Mixed(Parameter(str, "Human readable name of the original caller"),
Parameter(None, "Origin hrn not specified"))
]
returns | = Parameter(int, "1 if successful")
def call(self, cred, xrn, origin_hrn=None):
hrn, type = urn_to_hrn(xrn)
self.api.auth.check(cred, 'resetslice', hrn)
self.api.manager.reset_slice (self.api, xrn)
return 1
|
chuck211991/django-pyodbc | tests/basic/models.py | Python | bsd-3-clause | 15,295 | 0.001896 | # coding: utf-8
"""
1. Bare-bones model
This is a basic model with only two non-primary-key fields.
"""
# Python 2.3 doesn't have set as a builtin
try:
set
except NameError:
from sets import Set as set
# Python 2.3 doesn't have sorted()
try:
sorted
except NameError:
from django.utils.itercompat import sorted
from django.db import models
class Article(models.Model):
headline = models.CharField(max_length=100, default='Default headline')
pub_date = m | odels.DateTimeField()
class Meta:
ordering = ('pub_date','headline')
def __unicode__(self):
return self.headline
__test__ = {'API_TESTS': """
# No articles are in the system yet.
>>> Article.objects.all()
[]
# Create | an Article.
>>> from datetime import datetime
>>> a = Article(id=None, headline='Area man programs in Python', pub_date=datetime(2005, 7, 28))
# Save it into the database. You have to call save() explicitly.
>>> a.save()
# Now it has an ID. Note it's a long integer, as designated by the trailing "L".
>>> a.id
1L
# Models have a pk property that is an alias for the primary key attribute (by
# default, the 'id' attribute).
>>> a.pk
1L
# Access database columns via Python attributes.
>>> a.headline
'Area man programs in Python'
>>> a.pub_date
datetime.datetime(2005, 7, 28, 0, 0)
# Change values by changing the attributes, then calling save().
>>> a.headline = 'Area woman programs in Python'
>>> a.save()
# Article.objects.all() returns all the articles in the database.
>>> Article.objects.all()
[<Article: Area woman programs in Python>]
# Django provides a rich database lookup API.
>>> Article.objects.get(id__exact=1)
<Article: Area woman programs in Python>
>>> Article.objects.get(headline__startswith='Area woman')
<Article: Area woman programs in Python>
>>> Article.objects.get(pub_date__year=2005)
<Article: Area woman programs in Python>
>>> Article.objects.get(pub_date__year=2005, pub_date__month=7)
<Article: Area woman programs in Python>
>>> Article.objects.get(pub_date__year=2005, pub_date__month=7, pub_date__day=28)
<Article: Area woman programs in Python>
>>> Article.objects.get(pub_date__week_day=5)
<Article: Area woman programs in Python>
# The "__exact" lookup type can be omitted, as a shortcut.
>>> Article.objects.get(id=1)
<Article: Area woman programs in Python>
>>> Article.objects.get(headline='Area woman programs in Python')
<Article: Area woman programs in Python>
>>> Article.objects.filter(pub_date__year=2005)
[<Article: Area woman programs in Python>]
>>> Article.objects.filter(pub_date__year=2004)
[]
>>> Article.objects.filter(pub_date__year=2005, pub_date__month=7)
[<Article: Area woman programs in Python>]
>>> Article.objects.filter(pub_date__week_day=5)
[<Article: Area woman programs in Python>]
>>> Article.objects.filter(pub_date__week_day=6)
[]
# Django raises an Article.DoesNotExist exception for get() if the parameters
# don't match any object.
>>> Article.objects.get(id__exact=2)
Traceback (most recent call last):
...
DoesNotExist: Article matching query does not exist.
>>> Article.objects.get(pub_date__year=2005, pub_date__month=8)
Traceback (most recent call last):
...
DoesNotExist: Article matching query does not exist.
>>> Article.objects.get(pub_date__week_day=6)
Traceback (most recent call last):
...
DoesNotExist: Article matching query does not exist.
# Lookup by a primary key is the most common case, so Django provides a
# shortcut for primary-key exact lookups.
# The following is identical to articles.get(id=1).
>>> Article.objects.get(pk=1)
<Article: Area woman programs in Python>
# pk can be used as a shortcut for the primary key name in any query
>>> Article.objects.filter(pk__in=[1])
[<Article: Area woman programs in Python>]
# Model instances of the same type and same ID are considered equal.
>>> a = Article.objects.get(pk=1)
>>> b = Article.objects.get(pk=1)
>>> a == b
True
# You can initialize a model instance using positional arguments, which should
# match the field order as defined in the model.
>>> a2 = Article(None, 'Second article', datetime(2005, 7, 29))
>>> a2.save()
>>> a2.id
2L
>>> a2.headline
'Second article'
>>> a2.pub_date
datetime.datetime(2005, 7, 29, 0, 0)
# ...or, you can use keyword arguments.
>>> a3 = Article(id=None, headline='Third article', pub_date=datetime(2005, 7, 30))
>>> a3.save()
>>> a3.id
3L
>>> a3.headline
'Third article'
>>> a3.pub_date
datetime.datetime(2005, 7, 30, 0, 0)
# You can also mix and match position and keyword arguments, but be sure not to
# duplicate field information.
>>> a4 = Article(None, 'Fourth article', pub_date=datetime(2005, 7, 31))
>>> a4.save()
>>> a4.headline
'Fourth article'
# Don't use invalid keyword arguments.
>>> a5 = Article(id=None, headline='Invalid', pub_date=datetime(2005, 7, 31), foo='bar')
Traceback (most recent call last):
...
TypeError: 'foo' is an invalid keyword argument for this function
# You can leave off the value for an AutoField when creating an object, because
# it'll get filled in automatically when you save().
>>> a5 = Article(headline='Article 6', pub_date=datetime(2005, 7, 31))
>>> a5.save()
>>> a5.id
5L
>>> a5.headline
'Article 6'
# If you leave off a field with "default" set, Django will use the default.
>>> a6 = Article(pub_date=datetime(2005, 7, 31))
>>> a6.save()
>>> a6.headline
u'Default headline'
# For DateTimeFields, Django saves as much precision (in seconds) as you
# give it.
>>> a7 = Article(headline='Article 7', pub_date=datetime(2005, 7, 31, 12, 30))
>>> a7.save()
>>> Article.objects.get(id__exact=7).pub_date
datetime.datetime(2005, 7, 31, 12, 30)
>>> a8 = Article(headline='Article 8', pub_date=datetime(2005, 7, 31, 12, 30, 45))
>>> a8.save()
>>> Article.objects.get(id__exact=8).pub_date
datetime.datetime(2005, 7, 31, 12, 30, 45)
>>> a8.id
8L
# Saving an object again doesn't create a new object -- it just saves the old one.
>>> a8.save()
>>> a8.id
8L
>>> a8.headline = 'Updated article 8'
>>> a8.save()
>>> a8.id
8L
>>> a7 == a8
False
>>> a8 == Article.objects.get(id__exact=8)
True
>>> a7 != a8
True
>>> Article.objects.get(id__exact=8) != Article.objects.get(id__exact=7)
True
>>> Article.objects.get(id__exact=8) == Article.objects.get(id__exact=7)
False
# dates() returns a list of available dates of the given scope for the given field.
>>> Article.objects.dates('pub_date', 'year')
[datetime.datetime(2005, 1, 1, 0, 0)]
>>> Article.objects.dates('pub_date', 'month')
[datetime.datetime(2005, 7, 1, 0, 0)]
>>> Article.objects.dates('pub_date', 'day')
[datetime.datetime(2005, 7, 28, 0, 0), datetime.datetime(2005, 7, 29, 0, 0), datetime.datetime(2005, 7, 30, 0, 0), datetime.datetime(2005, 7, 31, 0, 0)]
>>> Article.objects.dates('pub_date', 'day', order='ASC')
[datetime.datetime(2005, 7, 28, 0, 0), datetime.datetime(2005, 7, 29, 0, 0), datetime.datetime(2005, 7, 30, 0, 0), datetime.datetime(2005, 7, 31, 0, 0)]
>>> Article.objects.dates('pub_date', 'day', order='DESC')
[datetime.datetime(2005, 7, 31, 0, 0), datetime.datetime(2005, 7, 30, 0, 0), datetime.datetime(2005, 7, 29, 0, 0), datetime.datetime(2005, 7, 28, 0, 0)]
# dates() requires valid arguments.
>>> Article.objects.dates()
Traceback (most recent call last):
...
TypeError: dates() takes at least 3 arguments (1 given)
>>> Article.objects.dates('invalid_field', 'year')
Traceback (most recent call last):
...
FieldDoesNotExist: Article has no field named 'invalid_field'
>>> Article.objects.dates('pub_date', 'bad_kind')
Traceback (most recent call last):
...
AssertionError: 'kind' must be one of 'year', 'month' or 'day'.
>>> Article.objects.dates('pub_date', 'year', order='bad order')
Traceback (most recent call last):
...
AssertionError: 'order' must be either 'ASC' or 'DESC'.
# Use iterator() with dates() to return a generator that lazily requests each
# result one at a time, to save memory.
>>> for a in Article.objects.dates('pub_date', 'day', order='DESC').iterator():
... print repr(a)
datetime.datetime(2005, 7, 31, 0, 0)
datetime.datetime(2005, 7, 30, 0, 0)
datetime.datetime(2005, 7, 29, 0, 0)
datetime.datetime(2005, 7, 28, 0, 0)
# You can combine queries with & and |. |
HubSpot/vitess | test/cluster/sandbox/sandbox_utils.py | Python | apache-2.0 | 1,777 | 0.009004 | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in wr | iting, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governi | ng permissions and
# limitations under the License.
"""Sandbox util functions."""
import datetime
import os
import random
def create_log_file(log_dir, filename):
"""Create a log file.
This function creates a timestamped log file, and updates a non-timestamped
symlink in the log directory.
Example: For a log called init.INFO, this function will create a log file
called init.INFO.20170101-120000.100000 and update a symlink
init.INFO to point to it.
Args:
log_dir: string, Base path for logs.
filename: string, The base name of the log file.
Returns:
The opened file handle.
"""
timestamp = datetime.datetime.now().strftime('%Y%m%d-%H%M%S.%f')
symlink_name = os.path.join(log_dir, filename)
timestamped_name = '%s.%s' % (symlink_name, timestamp)
if os.path.islink(symlink_name):
os.remove(symlink_name)
os.symlink(timestamped_name, symlink_name)
return open(timestamped_name, 'w')
def generate_random_name():
with open('naming/adjectives.txt', 'r') as f:
adjectives = [l.strip() for l in f if l.strip()]
with open('naming/animals.txt', 'r') as f:
animals = [l.strip() for l in f if l.strip()]
return '%s%s' % (random.choice(adjectives), random.choice(animals))
|
pjdelport/feincms | feincms/module/medialibrary/modeladmins.py | Python | bsd-3-clause | 8,526 | 0.004926 | # ------------------------------------------------------------------------
# coding=utf-8
# ------------------------------------------------------------------------
from __future__ import absolute_import
import os
from django import forms
from django.conf import settings as django_settings
from django.contrib import admin
from django.contrib import messages
from django.contrib.auth.decorators import permission_required
from django.contrib.sites.models import Site
from django.core.files.images import get_image_dimensions
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.template.defaultfilters import filesizeformat
from django.utils.safestring import mark_safe
from django.utils.translation import ungettext, ugettext_lazy as _
from django.views.decorators.csrf import csrf_protect
from ...translations import admin_translationinline, lookup_translations
from .models import Category, MediaFileTranslation
from .forms import MediaCategoryAdminForm, MediaFileAdminForm
from .thumbnail import admin_thumbnail
# -----------------------------------------------------------------------
class CategoryAdmin(admin.ModelAdmin):
form = MediaCategoryAdminForm
list_display = ['path']
list_filter = ['parent']
list_per_page = 25
search_fields = ['title']
prepopulated_fields = { 'slug': ('title',), }
#-------------------------------------------------------------------------
def assign_category(modeladmin, request, queryset):
class AddCategoryForm(forms.Form):
_selected_action = forms.CharField(widget=forms.MultipleHiddenInput)
category = forms.ModelChoiceField(Category.objects.all())
form = None
if 'apply' in request.POST:
form = AddCategoryForm(request.POST)
if form.is_valid():
category = form.cleaned_data['category']
count = 0
for mediafile in queryset:
category.mediafile_set.add(mediafile)
count += 1
message = ungettext('Successfully added %(count)d media file to %(category)s.',
'Successfully added %(count)d media files to %(category)s.',
count) % {'count':count, 'category':category}
modeladmin.message_user(request, message)
return HttpResponseRedirect(request.get_full_path())
if 'cancel' in request.POST:
return HttpResponseRedirect(request.get_full_path())
if not form:
form = AddCategoryForm(initial={
'_selected_action': request.POST.getlist(admin.ACTION_CHECKBOX_NAME),
})
return render_to_response('admin/medialibrary/add_to_category.html', {
'mediafiles': queryset,
'category_form': form,
}, context_instance=RequestContext(request))
assign_category.short_description = _('Add selected media files to category')
#-------------------------------------------------------------------------
def save_as_zipfile(modeladmin, request, queryset):
from .zip import export_zipfile
site = Site.objects.get_current()
try:
zip_name = export_zipfile(site, queryset)
messages.info(request, _("ZIP file exported as %s") % zip_name)
except Exception, e:
messages.error(request, _("ZIP file export failed: %s") % str(e))
return
return HttpResponseRedirect(os.path.join(django_settings.MEDIA_URL, zip_name))
save_as_zipfile.short_description = _('Export selected media files as zip file')
# ------------------------------------------------------------------------
class MediaFileAdmin(admin.ModelAdmin):
form = MediaFileAdminForm
save_on_top = True
date_hierarchy = 'created'
inlines = [admin_translationinline(MediaFileTranslation)]
list_display = ['admin_thumbnail', '__unicode__', 'file_info', 'formatted_created']
list_display_links = ['__unicode__']
list_filter = ['type', 'categories']
list_per_page = 25
search_fields = ['copyright', 'file', 'translations__caption']
filter_horizontal = ("categories",)
actions = [assign_category, save_as_zipfile]
def get_urls(self):
from django.conf.urls import patterns, include, url
urls = super(MediaFileAdmin, self).get_urls()
my_urls = patterns('',
url(r'^mediafile-bulk-upload/$', self.admin_site.admin_view(MediaFileAdmin.bulk_upload), {}, name='mediafile_bulk_upload')
)
return my_urls + urls
def changelist_view(self, request, extra_context=None):
if extra_context is None:
extra_context = {}
extra_context['categories'] = Catego | ry.objects.order_by('title')
return super(MediaFileAdmin, self).changelist_view(request, extra_context=extra_context)
def admin_thumbnail(self, obj | ):
image = admin_thumbnail(obj)
if image:
return mark_safe(u"""
<a href="%(url)s" target="_blank">
<img src="%(image)s" alt="" />
</a>""" % {
'url': obj.file.url,
'image': image,})
return ''
admin_thumbnail.short_description = _('Preview')
admin_thumbnail.allow_tags = True
def formatted_file_size(self, obj):
return filesizeformat(obj.file_size)
formatted_file_size.short_description = _("file size")
formatted_file_size.admin_order_field = 'file_size'
def formatted_created(self, obj):
return obj.created.strftime("%Y-%m-%d")
formatted_created.short_description = _("created")
formatted_created.admin_order_field = 'created'
def file_type(self, obj):
t = obj.filetypes_dict[obj.type]
if obj.type == 'image':
# get_image_dimensions is expensive / slow if the storage is not local filesystem (indicated by availability the path property)
try:
obj.file.path
except NotImplementedError:
return t
try:
d = get_image_dimensions(obj.file.file)
if d:
t += " %d×%d" % ( d[0], d[1] )
except (IOError, ValueError), e:
t += " (%s)" % e.strerror
return t
file_type.admin_order_field = 'type'
file_type.short_description = _('file type')
file_type.allow_tags = True
def file_info(self, obj):
"""
Method for showing the file name in admin.
Note: This also includes a hidden field that can be used to extract
the file name later on, this can be used to access the file name from
JS, like for example a TinyMCE connector shim.
"""
from feincms.utils import shorten_string
return u'<input type="hidden" class="medialibrary_file_path" name="_media_path_%d" value="%s" id="_refkey_%d" /> %s <br />%s, %s' % (
obj.id,
obj.file.name,
obj.id,
shorten_string(os.path.basename(obj.file.name), max_length=40),
self.file_type(obj),
self.formatted_file_size(obj),
)
file_info.admin_order_field = 'file'
file_info.short_description = _('file info')
file_info.allow_tags = True
@staticmethod
@csrf_protect
@permission_required('medialibrary.add_mediafile')
def bulk_upload(request):
from .zip import import_zipfile
if request.method == 'POST' and 'data' in request.FILES:
try:
count = import_zipfile(request.POST.get('category'), request.POST.get('overwrite', False), request.FILES['data'])
messages.info(request, _("%d files imported") % count)
except Exception, e:
messages.error(request, _("ZIP import failed: %s") % str(e))
else:
messages.error(request, _("No input file given"))
return HttpResponseRedirect(reverse('admin:medialibrary_mediafile_changelist'))
def queryset(self, request): |
whutch/atria | cwmud/libs/miniboa.py | Python | mit | 35,233 | 0 | # -*- coding: utf-8 -*- line endings: unix -*-
"""A bare-bones cross-platform Telnet server."""
# miniboa.py
# Copyright 2009 Jim Storch
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain a
# copy of the License at http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Changes made by pR0Ps.CM[at]gmail[dot]com on 18/07/2012
# -Updated for use with Python 3.x
# -Repackaged into a single file to simplify distribution
# -Other misc fixes and changes
# Report any bugs in this implementation to me (email above)
# Further changes made by Will Hutcheson (will@whutch.com)
# for the Clockwork MUD Server (https://github.com/whutch/cwmud)
import socket
import select
import sys
import re
import time
from ..core.logs import get_logger
logging = get_logger("miniboa")
# Telnet Notes
# ============
#
# (See RFC 854 for more information)
#
# Negotiating a Local Option
# --------------------------
#
# Side A begins with:
#
# "IAC WILL/WONT XX" Meaning "I would like to [use|not use] option XX."
#
# Side B replies with either:
#
# "IAC DO XX" Meaning "OK, you may use option XX."
# "IAC DONT XX" Meaning "No, you cannot use option XX."
#
#
# Negotiating a Remote Option
# ----------------------------
#
# Side A begins with:
#
# "IAC DO/DONT XX" Meaning "I would like YOU to [use|not use] option XX."
#
# Side B replies with either:
#
# "IAC WILL XX" Meaning "I will begin using option XX"
# "IAC WONT XX" Meaning "I will not begin using option XX"
#
#
# The syntax is designed so that if both parties receive simultaneous requests
# for the same option, each will see the other's request as a positive
# acknowledgement of it's own.
#
# If a party receives a request to enter a mode that it is already in, the
# request should not be acknowledged.
UNKNOWN = -1
# Cap sockets to 500 on Windows because WinSock can only process 512 at time
# Cap sockets to 1000 on Linux because you can only have 1024 file descriptors
MAX_CONNECTIONS = 500 if sys.platform == 'win32' else 1000
PARA_BREAK = re.compile(r"(\n\s*\n)", re.MULTILINE)
ANSI_CODES = {
'^k': '\x1b[22;30m', # black
'^K': '\x1b[1;30m', # bright black (grey)
'^r': '\x1b[22;31m', # red
'^R': '\x1b[1;31m', # bright red
'^g': '\x1b[22;32m', # green
'^G': '\x1b[1;32m', # bright green
'^y': '\x1b[22;33m', # yellow
'^Y': '\x1b[1;33m', # bright yellow
'^b': '\x1b[22;34m', # blue
'^B': '\x1b[1;34m', # bright blue
'^m': '\x1b[22;35m', # magenta
'^M': '\x1b[1;35m', # bright magenta
'^c': '\x1b[22;36m', # cyan
'^C': '\x1b[1;36m', # bright cyan
'^w': '\x1b[22;37m', # white
'^W': '\x1b[1;37m', # bright white
'^0': '\x1b[40m', # black background
'^1': '\x1b[41m', # red background
'^2': '\x1b[42m', # green background
'^3': '\x1b[43m', # yellow background
'^4': '\x1b[44m', # blue background
'^5': '\x1b[45m', # magenta background
'^6': '\x1b[46m', # cyan background
'^d': '\x1b[39m', # default (should be white on black)
'^I': '\x1b[7m', # inverse text on
'^i': '\x1b[27m', # inverse text off
'^~': '\x1b[0m', # reset all
'^U': '\x1b[4m', # underline on
'^u': '\x1b[24m', # underline off
'^!': '\x1b[1m', # bold on
'^.': '\x1b[22m', # bold off
'^s': '\x1b[2J', # clear screen
'^l': '\x1b[2K', # clear to end of line
}
# Telnet commands
SE = chr(240) # End of sub-negotiation parameters
NOP = chr(241) # No operation
DATMK = chr(242) # Data stream portion of a sync.
BREAK = chr(243) # NVT Character BRK
IP = chr(244) # Interrupt Process
AO = chr(245) # Abort Output
AYT = chr(246) # Are you there
EC = chr(247) # Erase Character
EL = chr(248) # Erase Line
GA = chr(249) # The Go Ahead Signal
SB = chr(250) # Sub-option to follow
WILL = chr(251) # Will; request or confirm option begin
WONT = chr(252) # | Wont; deny option request
DO = chr(253) # Do = Request or confirm remote option
DONT = chr(254) # Don't = Demand or confirm option halt
IAC = chr(255) # Interpret as Command
SEND = chr(1) # Sub-process negotiation SEND command
IS = chr(0) # Sub-process negotiation I | S command
# Telnet options
BINARY = chr(0) # Transmit Binary
ECHO = chr(1) # Echo characters back to sender
RECON = chr(2) # Reconnection
SGA = chr(3) # Suppress Go-Ahead
TTYPE = chr(24) # Terminal Type
NAWS = chr(31) # Negotiate About Window Size
LINEMODE = chr(34) # Line Mode
_COMMAND_NAMES = {
SE: "SE",
NOP: "NOP",
DATMK: "DATMK",
BREAK: "BREAK",
IP: "IP",
AO: "AO",
AYT: "AYT",
EC: "EC",
EL: "EL",
GA: "GA",
SB: "SB",
WILL: "WILL",
WONT: "WONT",
DO: "DO",
DONT: "DONT",
IAC: "IAC",
SEND: "SEND",
IS: "IS",
}
_OPTION_NAMES = {
BINARY: "BINARY",
ECHO: "ECHO",
RECON: "RECON",
SGA: "SGA",
TTYPE: "TTYPE",
NAWS: "NAWS",
LINEMODE: "LINEMODE",
}
class ConnectionLost(Exception):
"""Custom exception to signal a lost connection to the Telnet Server."""
pass
def strip_caret_codes(text):
"""Strip out any caret codes from a string.
:param str text: The text to strip the codes from
:returns str: The clean text
"""
text = text.replace('^^', '\x00')
for token, foo in ANSI_CODES.items():
text = text.replace(token, '')
return text.replace('\x00', '^')
def colorize(text, ansi=True):
"""Replace tokens in a string with ANSI sequences, or strip them out.
:param str text: The text to replace the tokens in
:param bool ansi: Whether to colorize the string or strip it of tokens
:returns str: The colorized text
"""
if ansi:
text = text.replace('^^', '\x00')
for token, code in ANSI_CODES.items():
text = text.replace(token, code)
text = text.replace('\x00', '^')
else:
text = strip_caret_codes(text)
return text
def word_wrap(text, columns=80, indent=4, padding=2):
"""Given a block of text, break it into a list of lines wrapped to length.
:param str text: The text to wrap
:param int columns: The number of columns to wrap the text to
:param int indent: The number of spaces to indent additionally lines to
:param int padding: The number of columns to pad each side with
:returns list: A list of wrapped strings
"""
paragraphs = PARA_BREAK.split(text)
lines = []
columns -= padding
for para in paragraphs:
if para.isspace():
continue
line = ' ' * indent
for word in para.split():
if (len(line) + 1 + len(word)) > columns:
lines.append(line)
line = ' ' * padding
line += word
else:
line += ' ' + word
if not line.isspace():
lines.append(line)
return lines
class TelnetOption(object):
"""Simple class used to track the status of an extended Telnet option."""
def __init__(self):
self.local_option = UNKNOWN # Local state of an option
self.remote_option = UNKNOWN # Remote state of an option
self.reply_pending = False # Are we expecting a reply?
class TelnetClient(object):
"""A client connection via Telnet."""
def __init__(self, sock, addr_tup):
"""Create a new client connection.
:param socket.socket sock: The socket of the new connection
:param tuple addr_tup: A tuple of (address, port number)
"""
self.protocol = 'telnet'
self.active = True # Turns False when the connection is lost
self.sock = sock # The connection's socket
self.fileno = sock.fileno() # The socket's file descriptor
self.address = addr_tup[0] # The client's remote TCP/IP address
|
longde123/MultiversePlatform | client/Scripts/AnimationState.py | Python | mit | 2,968 | 0.012803 | #
# The Multiverse Platform is made available under the MIT License.
#
# Copyright (c) 2012 The Multiverse Foundation
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
#
#
import ClientAPI
class AnimationState:
#
# Constructor
#
def __init__(self):
assert False
#
# Property Getters
#
def _get_Time(self):
return self._state.State.Time
def __getattr__(self, attrname):
if attrname in self._getters:
return self._getters[attrname](self)
else:
raise AttributeError, attrname
#
# Property Setters
#
def __setattr__(self, attrname, value):
if attrname in self._setters:
self._setters[attrname](self, value)
else:
raise AttributeError, attrname
_getters = { 'Time' : _get_Time }
_setters = { }
#
# Methods
#
def AddTime(self, t):
return self._state.AddTime(t)
def RegisterTimeEventHandler(self, time, handler):
AnimationStateEventWrapper(self, handler, time)
#
# This class is just another way of making an AnimationState, with a different constructor,
# since we don't have constructor overloading within a single class. This should only
# be used internally by the API.
#
class _ExistingAnimationState(AnimationState):
#
# Constructor
#
def __init__(self, state):
self.__dict__['_state'] = | state
def __setattr__(self, attrname, value):
AnimationState.__setattr__(self, attrname, value)
class AnimationStateEventWrapper:
def __init__(self, state, handler, triggerTime):
self.animState = sta | te
self.realHandler = handler
state._state.RegisterTimeEventHandler(triggerTime, self.Handler)
def Handler(self, axiomState, triggerTime):
self.realHandler(self.animState, triggerTime)
|
tdent/pycbc | pycbc/waveform/bank.py | Python | gpl-3.0 | 39,353 | 0.002338 | # Copyright (C) 2012 Alex Nitz, Josh Willis, Andrew Miller
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# =============================================================================
#
# Preamble
#
# =============================================================================
#
"""
This module provides classes that describe banks of waveforms
"""
import types
import logging
import os.path
import h5py
from copy import copy
import numpy as np
from ligo.lw import table, lsctables, utils as ligolw_utils
import pycbc.waveform
import pycbc.pnutils
import pycbc.waveform.compress
from pycbc import DYN_RANGE_FAC
from pycbc.types import FrequencySeries, zeros
import pycbc.io
from pycbc.io.ligolw import LIGOLWContentHandler
import hashlib
def sigma_cached(self, psd):
""" Cache sigma calculate for use in tandem with the FilterBank class
"""
if not hasattr(self, '_sigmasq'):
from pycbc.opt import LimitedSizeDict
self._sigmasq = LimitedSizeDict(size_limit=2**5)
key = id(psd)
if not hasattr(psd, '_sigma_cached_key'):
psd._sigma_cached_key = {}
if key not in self._sigmasq or id(self) not in psd._sigma_cached_key:
psd._sigma_cached_key[id(self)] = True
# If possible, we precalculate the sigmasq vector for all possible waveforms
if pycbc.waveform.waveform_norm_exists(self.approximant):
if not hasattr(psd, 'sigmasq_vec'):
psd.sigmasq_vec = {}
if self.approximant not in psd.sigmasq_vec:
psd.sigmasq_vec[self.approximant] = \
pycbc.waveform.get_waveform_filter_norm(
self.approximant,
psd,
len(psd),
psd.delta_f,
self.min_f_lower
)
if not hasattr(self, 'sigma_scale'):
# Get an amplitude normalization (mass dependant constant norm)
amp_norm = pycbc.waveform.get_template_amplitude_norm(
self.params, approximant=self.approximant)
amp_norm = 1 if amp_norm is None else amp_norm
self.sigma_scale = (DYN_RANGE_FAC * amp_norm) ** 2.0
curr_sigmasq = psd.sigmasq_vec[self.approximant]
kmin = int(self.f_lower / psd.delta_f)
self._sigmasq[key] = self.sigma_scale * \
(curr_sigmasq[self.end_idx-1] - curr_sigmasq[kmin])
else:
if not hasattr(self, 'sigma_view'):
from pycbc.filter.matchedfilter import get_cutoff_indices
N = (len(self) -1) * 2
kmin, kmax = get_cutoff_indices(
self.min_f_lower or self.f_lower, self.end_frequency,
self.delta_f, N)
self.sslice = slice(kmin, kmax)
self.sigma_view = self[self.sslice].squared_norm() * 4.0 * self.delta_f
if not hasattr(psd, 'invsqrt'):
psd.invsqrt = 1.0 / psd
self._sigmasq[key] = self.sigma_view.inner(psd.invsqrt[self.sslice])
return self._sigmasq[key]
# helper function for parsing approximant strings
def boolargs_from_apprxstr(approximant_strs):
"""Parses a list of strings specifying an approximant and where that
approximant should be used into a list that can be understood by
FieldArray.parse_boolargs.
Parameters
----------
apprxstr : (list of) string(s)
The strings to parse. Each string should be formatted `APPRX:COND`,
where `APPRX` is the approximant and `COND` is a string specifying
where it should be applied (see `FieldArgs.parse_boolargs` for examples
of conditional strings). The last string in the list may exclude a
conditional argument, which is the same as specifying ':else'.
Returns
-------
boolargs : list
A list of tuples giving the approximant and where to apply them | . This
can be passed directly to `FieldArray.parse_boolargs`.
"""
if not isinstance(approximant_strs, list):
approximant_strs = [approximant_strs]
return [tuple(arg.split(':')) for arg in approximant_strs]
def add_approximant_arg(parser, default=None, help=None):
"""Adds an approximant argument to the given parser.
Parameters
----------
parser : ArgumentParser
The argum | ent parser to add the argument to.
default : {None, str}
Specify a default for the approximant argument. Defaults to None.
help : {None, str}
Provide a custom help message. If None, will use a descriptive message
on how to specify the approximant.
"""
if help is None:
help=str("The approximant(s) to use. Multiple approximants to use "
"in different regions may be provided. If multiple "
"approximants are provided, every one but the last must be "
"be followed by a conditional statement defining where that "
"approximant should be used. Conditionals can be any boolean "
"test understood by numpy. For example, 'Apprx:(mtotal > 4) & "
"(mchirp <= 5)' would use approximant 'Apprx' where total mass "
"is > 4 and chirp mass is <= 5. "
"Conditionals are applied in order, with each successive one "
"only applied to regions not covered by previous arguments. "
"For example, `'TaylorF2:mtotal < 4' 'IMRPhenomD:mchirp < 3'` "
"would result in IMRPhenomD being used where chirp mass is < 3 "
"and total mass is >= 4. The last approximant given may use "
"'else' as the conditional or include no conditional. In either "
"case, this will cause the last approximant to be used in any "
"remaning regions after all the previous conditionals have been "
"applied. For the full list of possible parameters to apply "
"conditionals to, see WaveformArray.default_fields(). Math "
"operations may also be used on parameters; syntax is python, "
"with any operation recognized by numpy.")
parser.add_argument("--approximant", nargs='+', type=str, default=default,
metavar='APPRX[:COND]',
help=help)
def parse_approximant_arg(approximant_arg, warray):
"""Given an approximant arg (see add_approximant_arg) and a field
array, figures out what approximant to use for each template in the array.
Parameters
----------
approximant_arg : list
The approximant argument to parse. Should be the thing returned by
ArgumentParser when parsing the argument added by add_approximant_arg.
warray : FieldArray
The array to parse. Must be an instance of a FieldArray, or a class
that inherits from FieldArray.
Returns
-------
array
A numpy array listing the approximants to use for each element in
the warray.
"""
return warray.parse_boolargs(boolargs_from_apprxstr(approximant_arg))[0]
def tuple_to_hash(tuple_to_be_hashed):
"""
Return a hash for a numpy array, avoids native (unsafe) python3 hash function
Parameters
----------
tuple_to_be_hashed: tuple
The tuple which is being hashed
Must be convertible to a numpy array
Returns
-------
int
an integer representation of the hashed arra |
TADT1909/PythonProjects | ImageToText.py | Python | mit | 1,698 | 0.043401 | #! python3
# Python 3 Image to text
# 20.07.17 fixed bugs
# require | to install pillow, numpy
# pip install pillow
# pip install numpy
# -*- coding: UTF-8 -*-
from PIL import Image
import numpy as np
import random
import math
def convert(num) :
a0 = ['■','■','■','■','■','■','■','■']
a1 = ['$','#','%','$','&','@','@']
a2 = ['Q','W','E','R','Y','U','O']
a3 = ['I','L','I','L','J','I','J']
a4 = ['a','s','c','v','x','n','i']
a5 = ['.',',',':',' ','\'',' ','`']
a6 = ['.',' ','`',' ',' ','`',' ']
rand = random.randin | t(0,6)
if num in range(0,40) :
return a6[rand]
if num in range(40,90) :
return a5[rand]
if num in range(90,130) :
return a4[rand]
if num in range(130,170) :
return a3[rand]
if num in range(170,200) :
return a2[rand]
if num in range(200,230) :
return a1[rand]
if num in range(230,255) :
return a0[rand]
return ' '
size = 200 #need to customize
filename = ""
filename = input("File Name? ")
image = Image.open(filename).convert('L')
width, height = image.size
height = int(math.floor(size*width /height))
width = size
image = image.resize((width, height),Image.ANTIALIAS)
image.load()
data = np.asarray( image, dtype="int32" )
out = [["" for x in range(width)] for y in range(height)]
for i in range(height) :
for j in range(width) :
out[i][j] = convert(data[i][j])
with open("image.txt",'w', encoding='utf-8') as f:
string = ""
for i in range(height) :
for j in range(width) :
string += out[i][j]
string += " \n"
f.write(string) |
sonofeft/XYmath | xymath/newtGreg2.py | Python | gpl-3.0 | 5,126 | 0.02419 | from __future__ import absolute_import
from __future__ import division
from builtins import zip
from builtins import map
from builtins import range
from builtins import object
from past.utils import old_div
import bisect
class quadNG(object): # quadratic Newton-Gregory Interpolation
''' quadratic Newton-Gregory Interpolation
assume a formula of:
f = a + b(x-x0) + c(x-x0)(x-x1)
'''
def __init__(self, xInp, yInp):
# make sure that x values are monotonically increasing
x = list(map(float, xInp))
y = list(map(float, yInp))
c = list(zip(x,y))
c.sort()
x = []
y = []
aLast = None
for aa,bb in c:
# simply drop multivalued x points
if aa != aLast:
x.append(aa)
y.append(bb)
aLast = aa
self.x = x
self.y = y
# start making quadratic constant arrays from difference tables
self.a = []
self.b = []
self.c = []
dif1 = []
f | or i in range( len(x) - 1 ):
dif1.append( old_div((y[i+1]-y[i]), (x[i+1]-x[i])) )
dif2 = []
for i in range( len(x) - 2 ):
dif2.append( old_div((dif1[i+1]-dif1[i]), (x[i+2]-x[i])) )
for i in range( len(x)-2):
self.a.append( y[i] )
self.b.append( dif1[i] )
self.c.append( dif2[i] )
self.iMax = len(self.a) - 1
|
def __call__(self, xval=0.0):
return self.getValue( xval )
def getIndex(self, xval=0.0):
'''Override this for computed index version'''
i = bisect.bisect_left(self.x, xval) - 1
if i<0:
return 0
elif i>self.iMax:
return self.iMax
return i
def getValue(self, xval=0.0):
xval = float( xval )
if xval > self.x[-1]:
return self.y[-1]
if xval < self.x[0]:
return self.y[0]
i = self.getIndex(xval)
dx = xval - self.x[i]
dx2 = xval-self.x[i+1]
return self.a[i] + dx*( self.b[i] + dx2*self.c[i] )
def deriv(self, xval=0.0):
xval = float( xval )
i = self.getIndex(xval)
dx = xval - self.x[i]
dx2 = xval-self.x[i+1]
return self.b[i] + self.c[i]*(2.*xval - self.x[i] - self.x[i+1])
def deriv2nd(self, xval=0.0):
xval = float( xval )
i = self.getIndex(xval)
return 2.0*self.c[i]
class quadNG_compInd(quadNG): # quadratic Newton-Gregory Interpolation
''' quadratic Newton-Gregory Interpolation WITH Computed Index
assume a formula of:
f = a + b(x-x0) + c(x-x0)(x-x1)
'''
def __init__(self, xBeg, xEnd, yInp):
'''Assume that yInp is in order.
Create x array to correspond
'''
self.Npts = len(yInp)
self.Nintervals = self.Npts-1
self.h = old_div((xEnd-xBeg), float( self.Nintervals )) # constant step size
self.xBeg = xBeg
self.xEnd = xEnd
xL = []
for i in range( self.Nintervals ):
xval = xBeg + float(i) * self.h
xL.append( xval )
xL.append( xEnd )
quadNG.__init__(self, xL, yInp)
self.iMax = len(self.a)-1
def getIndex(self, xval=0.0):
'''Overrides parent object to compute index
Assume that the computation below is faster than bisect.bisect_left
(Becomes more important for longer lists)
profiling for lists of about 200 entries was about 20% faster
'''
i = int( old_div((xval-self.xBeg), self.h) )
if i<0:
return 0
elif i>self.iMax:
return self.iMax
return i
if __name__ == "__main__": #Self Test
import sys
# do rapid laser example from PPT file
x = [2.,4.25,5.25,7.81,9.2,10.6]
y = [7.2,7.1,6.,5.,3.5,5.]
if 1:
q = quadNG_compInd(2.0, 10.6, y)
else:
q = quadNG( x, y)
rs = [ ['x','f(x)cubic','fd1(x)','fd2(x)'] ]
Npts = 100
for i in range(Npts+1):
xval = 12.*i/Npts
rs.append( [xval, q(xval), q.deriv(xval), q.deriv2nd(xval)] )
from . import xlChart
xl = xlChart.xlChart()
xl.xlApp.DisplayAlerts = 0 # Allow Quick Close without Save Message
# Chart 1 / Sheet 1
xl.makeChart(rs, title="Rapid Laser Data",nCurves = 3,
chartName="quadNG",
sheetName="quadNGData")
rs = [ ['x','y'] ]
for i in range(len(q.x)):
rs.append( [q.x[i], q.y[i]] )
xl.makeDataSheet( rs, sheetName="DataSheet")
xl.addNewSeriesToCurrentSheetChart( xColumn=1, yColumn=2)
xl.setMarkerSize( NSeries=4, size=10)
xl.setSeriesColor( NSeries=4, red=255, green=0, blue=0)
xl.setLineThickness( NSeries=4, thickness=0)
xl.focusChart(1)
sys.exit()
|
reuterbal/photobooth | photobooth/worker/PictureMailer.py | Python | agpl-3.0 | 3,515 | 0 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Photobooth - a flexible photo booth software
# Copyright (C) 2018 Balthasar Reuter <photobooth at re - web dot eu>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.base import MIMEBase
from email.mime.text import MIMEText
from email.utils import formatdate
from email import encoders
from pathlib import Path
from .WorkerTask import WorkerTask
def send_mail(send_from, send_to, subject, message, picture, filename,
server, port, is_auth, username, password, is_tls):
"""Compose and send email with provided info and attachments.
Based on https://stackoverflow.com/a/16509278
Args:
send_from (str): from name
send_to (str): to name
subject (str): message title
message (str): message body
picture (jpg byte_data): ByteIO data of the JPG picture
filename (str): Filename of picture
server (str): mail server host name
port (int): port number
is_auth (bool): server requires authentication
username (str): server auth username
password (str): server auth password
is_tls (bool): use TLS mode
"""
msg = MIMEMultipart()
msg['From'] = send_from
msg['To'] = send_to
msg['Date'] = formatdate(localtime=True)
msg['Subject'] = subject
msg.attach(MIMEText(message))
part = MIMEBase('application', "octet-stream")
part.set_payload(picture.getbuffer())
encoders.encode_base64(part)
part.add_header('Content-Disposition',
'attachment; filename="{}"'.format(filename))
msg.attach(part)
smtp = smtplib.SMTP(server, port)
if is_tls:
smtp.starttls()
if is_auth:
smtp.login(username, password)
smtp.sendmail(send_from, send_to, msg.as_string())
smtp.quit()
class PictureMailer(WorkerTask):
def __init__(self, config):
super().__init__()
self._sender = c | onfig.get('Mailer', 'sender')
self._recipient = config.get('Mailer', 'recipient')
self._subject = config.get('Mailer', 'subject')
self._message = config.get('Mailer', 'message')
self._server = config.get('Mailer', 'server')
self._po | rt = config.getInt('Mailer', 'port')
self._is_auth = config.getBool('Mailer', 'use_auth')
self._user = config.get('Mailer', 'user')
self._password = config.get('Mailer', 'password')
self._is_tls = config.getBool('Mailer', 'use_tls')
def do(self, picture, filename):
logging.info('Sending picture to %s', self._recipient)
send_mail(self._sender, self._recipient, self._subject, self._message,
picture, Path(filename).name, self._server, self._port,
self._is_auth, self._user, self._password, self._is_tls)
|
aboganas/frappe | frappe/utils/jinja.py | Python | mit | 5,056 | 0.028877 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
def get_jenv():
import frappe
if not getattr(frappe.local, 'jenv', None):
from jinja2 import Environment, DebugUndefined
# frappe will be loaded last, so app templates will get precedence
jenv = Environment(loader = get_jloader(),
undefined=DebugUndefined)
set_filters(jenv)
jenv.globals.update(get_allowed_functions_for_jenv())
frappe.local.jenv = jenv
return frappe.local.jenv
def get_template(path):
return get_jenv().get_template(path)
def validate_template(html):
"""Throws exception if there is a syntax error in the Jinja Template"""
import frappe
from jinja2 import TemplateSyntaxError
jenv = get_jenv()
try:
jenv.from_string(html)
except TemplateSyntaxError, e:
frappe.msgprint('Line {}: {}'.format(e.lineno, e.message))
frappe.throw(frappe._("Syntax error in template"))
def render_template(template, context, is_path=None):
'''Render a template using Jinja
:param template: path or HTML containing the | jinja template
:param context: dict of properties to pass to the template
:param is_path: (optional) assert that the `template` parameter is a path'''
# if it ends with .html then its a freaking path, not html
if (is_path
or template.startswith("templates/")
or (template.endswith('.html') and '\n' not in template)):
return get_jenv().get_template(template).render(co | ntext)
else:
return get_jenv().from_string(template).render(context)
def get_allowed_functions_for_jenv():
import os
import frappe
import frappe.utils
import frappe.utils.data
from frappe.utils.autodoc import automodule, get_version
from frappe.model.document import get_controller
from frappe.website.utils import get_shade
from frappe.modules import scrub
import mimetypes
datautils = {}
for key, obj in frappe.utils.data.__dict__.items():
if key.startswith("_"):
# ignore
continue
if hasattr(obj, "__call__"):
# only allow functions
datautils[key] = obj
if "_" in getattr(frappe.local, 'form_dict', {}):
del frappe.local.form_dict["_"]
out = {
# make available limited methods of frappe
"frappe": {
"_": frappe._,
"get_url": frappe.utils.get_url,
"format_value": frappe.format_value,
"format_date": frappe.utils.data.global_date_format,
"form_dict": getattr(frappe.local, 'form_dict', {}),
"local": frappe.local,
"get_hooks": frappe.get_hooks,
"get_meta": frappe.get_meta,
"get_doc": frappe.get_doc,
"get_list": frappe.get_list,
"get_all": frappe.get_all,
"utils": datautils,
"user": getattr(frappe.local, "session", None) and frappe.local.session.user or "Guest",
"get_fullname": frappe.utils.get_fullname,
"get_gravatar": frappe.utils.get_gravatar_url,
"full_name": getattr(frappe.local, "session", None) and frappe.local.session.data.full_name or "Guest",
"render_template": frappe.render_template
},
"autodoc": {
"get_version": get_version,
"automodule": automodule,
"get_controller": get_controller
},
"_": frappe._,
"get_shade": get_shade,
"scrub": scrub,
"guess_mimetype": mimetypes.guess_type,
"dev_server": 1 if os.environ.get('DEV_SERVER', False) else 0
}
if not frappe.flags.in_setup_help:
out['get_visible_columns'] = frappe.get_attr("frappe.www.print.get_visible_columns")
out['frappe']['date_format'] = frappe.db.get_default("date_format") or "yyyy-mm-dd"
out['frappe']["db"] = {
"get_value": frappe.db.get_value,
"get_default": frappe.db.get_default,
}
return out
def get_jloader():
import frappe
if not getattr(frappe.local, 'jloader', None):
from jinja2 import ChoiceLoader, PackageLoader, PrefixLoader
if frappe.local.flags.in_setup_help:
apps = ['frappe']
else:
apps = frappe.local.flags.web_pages_apps or frappe.get_installed_apps(sort=True)
apps.reverse()
if not "frappe" in apps:
apps.append('frappe')
frappe.local.jloader = ChoiceLoader(
# search for something like app/templates/...
[PrefixLoader(dict(
(app, PackageLoader(app, ".")) for app in apps
))]
# search for something like templates/...
+ [PackageLoader(app, ".") for app in apps]
)
return frappe.local.jloader
def set_filters(jenv):
import frappe
from frappe.utils import global_date_format, cint, cstr, flt, markdown
from frappe.website.utils import get_shade, abs_url
jenv.filters["global_date_format"] = global_date_format
jenv.filters["markdown"] = markdown
jenv.filters["json"] = frappe.as_json
jenv.filters["get_shade"] = get_shade
jenv.filters["len"] = len
jenv.filters["int"] = cint
jenv.filters["str"] = cstr
jenv.filters["flt"] = flt
jenv.filters["abs_url"] = abs_url
if frappe.flags.in_setup_help: return
# load jenv_filters from hooks.py
for app in frappe.get_installed_apps():
for jenv_filter in (frappe.get_hooks(app_name=app).jenv_filter or []):
filter_name, filter_function = jenv_filter.split(":")
jenv.filters[filter_name] = frappe.get_attr(filter_function)
|
FedoraScientific/salome-smesh | src/Tools/blocFissure/gmu/sortFaces.py | Python | lgpl-2.1 | 497 | 0.018109 | # -*- coding: utf-8 -*-
import logging
from geomsmesh import geompy
# -------------------------------------------------------------------- | ---------
# --- tri par surface de faces
def sortFaces(facesToSort):
"""
tri des faces par surface
"""
logging.info('start')
surFaces = [(geompy.BasicProperties(face)[1], i, face) for i, face in enumerate(facesToSort)]
surFaces.sort()
facesSorted = [face for surf, i, face in surFaces]
return facesSorted, surFaces[0][0 | ], surFaces[-1][0]
|
YaniLozanov/Software-University | Python/PyCharm/03.Logical checks/09.Password Guess.py | Python | mit | 348 | 0 | # Problem:
# Write a program that enters a password (one line with any text) and
# checks if it is entered matches the phrase "s3cr3t! P @ ssw0rd".
# In case of a collision, bring "Welcome".
# In case of inconsistency "Wrong Password!"
password = input()
if passw | ord == "s3cr3t!P@ssw0rd":
print("Welcome")
else:
pri | nt("Wrong password!")
|
18F/regulations-site | regulations/tests/views_preamble_tests.py | Python | cc0-1.0 | 10,884 | 0 | # -*- coding: utf-8 -*-
from mock import patch
from unittest import TestCase
from datetime import date, timedelta
from django.http import Http404
from django.test import RequestFactory, override_settings
from fr_notices.navigation import make_preamble_nav
from regulations.generator.layers import diff_applier
from regulations.views import preamble
from regulations.views.preamble import CommentState
class PreambleViewTests(TestCase):
_mock_preamble = dict(text='1', label=['1'], node_type='', children=[
dict(text='2', label=['1', 'c'], node_type='', children=[
dict(text='3', label=['1', 'c', 'i'], node_type='', children=[]),
dict(text='4', label=['1', 'c', 'x'], node_type='', children=[])
]),
dict(text='5', label=['1', '1'], node_type='', children=[])
])
def test_find_subtree(self):
"""When a node is present in a tree, we should be able to find it.
When it is not, we should get None"""
root = self._mock_preamble
fn = preamble.find_subtree
self.assertEqual(fn(root, ['1'])['text'], '1')
self.assertEqual(fn(root, ['1', 'c'])['text'], '2')
self.assertEqual(fn(root, ['1', 'c', 'i'])['text'], '3')
self.assertEqual(fn(root, ['1', 'c', 'x'])['text'], '4')
self.assertEqual(fn(root, ['1', '1'])['text'], '5')
self.assertIsNone(fn(root, ['2']))
self.assertIsNone(fn(root, ['1', '2']))
self.assertIsNone(fn(root, ['1', 'c', 'r']))
self.assertIsNone(fn(root, ['1', 'c', 'i', 'r']))
@patch('fr_notices.navigation.CFRChangeBuilder')
@patch('regulations.generator.generator.api_reader')
@patch('regulations.views.preamble.ApiReader')
def test_get_integration(self, ApiReader, api_reader, CFRChangeBuilder):
"""Verify that the contexts are built correctly before being sent to
the template. AJAX/partial=true requests should only get the inner
context (i.e. no UI-related context)"""
ApiReader.return_value.preamble.return_value = self._mock_preamble
api_reader.ApiReader.return_value.layer.return_value = {
'1-c-x': ['something']
}
view = preamble.PreambleView.as_view()
path = '/preamble/1/c/x?layers=meta'
response = view(RequestFactory().get(path), paragraphs='1/c/x')
self.assertEqual(
response.context_d | ata['sub_context']['node']['text'], '4')
| self.assertEqual(
response.context_data['sub_context']['node']['children'], [])
# layer data is present
self.assertEqual(
response.context_data['sub_context']['node']['meta'], 'something')
self.assertEqual(
response.context_data['preamble_toc'],
make_preamble_nav(self._mock_preamble['children']),
)
self.assertNotIn('node', response.context_data)
response = view(RequestFactory().get(path + '&partial=true'),
paragraphs='1/c/x')
self.assertIn('sub_context', response.context_data)
self.assertEqual(
response.context_data['sub_context']['node']['text'],
'4',
)
request = RequestFactory().get(
path, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
response = view(request, paragraphs='1/c/x')
self.assertIn('sub_context', response.context_data)
self.assertEqual(
response.context_data['sub_context']['node']['text'],
'4',
)
@override_settings(
PREAMBLE_INTRO={'1': {'meta': {
'publication_date': '2001-01-01',
'comments_close': (date.today() + timedelta(days=1)).isoformat()
}}})
@patch('regulations.views.preamble.ApiReader')
def test_comments_open_from_settings(self, ApiReader):
"""
Mock the PREAMBLE_INTRO data from settings for this test of the
comments being open.
"""
_, meta, _ = preamble.notice_data('1')
assert meta['comment_state'] == CommentState.OPEN
def _setup_mock_response(self, ApiReader, **kwargs):
"""Mock the ApiReader response, replacing meta data fields with
kwargs"""
ApiReader.return_value.preamble.return_value = self._mock_preamble
notice = {
"action": "Proposed rule",
"agencies": ["Environmental Protection Agency"],
"cfr_title": 40,
"cfr_parts": ["300"],
"comments_close": "2011-09-09",
"dockets": ["EPA-HQ-SFUND-2010-1086",
"FRL-9925-69-OLEM"],
"primary_agency": "Environmental Protection Agency",
"title": ("Addition of a Subsurface Intrusion Component to the "
"Hazard Ranking System"),
"publication_date": "2011-02-02",
"regulatory_id_numbers": ["2050-AG67"],
}
notice.update(kwargs)
ApiReader.return_value.notice.return_value = notice
@patch('regulations.views.preamble.ApiReader')
def test_comments_open(self, ApiReader):
future = date.today() + timedelta(days=10)
self._setup_mock_response(ApiReader, comments_close=future.isoformat())
_, meta, _ = preamble.notice_data('1')
assert meta['comment_state'] == CommentState.OPEN
@patch('regulations.views.preamble.ApiReader')
def test_comments_prepub(self, ApiReader):
future = date.today() + timedelta(days=10)
self._setup_mock_response(ApiReader,
publication_date=future.isoformat())
_, meta, _ = preamble.notice_data('1')
assert meta['comment_state'] == CommentState.PREPUB
@patch('regulations.views.preamble.ApiReader')
def test_comments_closed(self, ApiReader):
self._setup_mock_response(ApiReader)
_, meta, _ = preamble.notice_data('1')
assert meta['comment_state'] == CommentState.CLOSED
@patch('fr_notices.navigation.CFRChangeBuilder')
@patch('regulations.generator.generator.api_reader')
@patch('regulations.views.preamble.ApiReader')
def test_get_top_level_redirect(self, ApiReader, api_reader,
CFRChangeBuilder):
ApiReader.return_value.preamble.return_value = self._mock_preamble
api_reader.ApiReader.return_value.layer.return_value = {
'1-c-x': ['something']
}
view = preamble.PreambleView.as_view()
path = '/preamble/1'
response = view(RequestFactory().get(path), paragraphs='1')
assert response.status_code == 302
assert response.get('Location') == '/preamble/1/c'
@patch('regulations.views.preamble.ApiReader')
def test_get_404(self, ApiReader):
"""When a requested doc is not present, we should return a 404"""
ApiReader.return_value.preamble.return_value = None
view = preamble.PreambleView.as_view()
self.assertRaises(Http404, view,
RequestFactory().get('/preamble/1/c/x'),
paragraphs='1/c/x')
@patch('regulations.views.preamble.ApiReader')
def test_get_subtree_404(self, ApiReader):
"""When a requested _subtree_ is not present, we should 404"""
ApiReader.return_value.preamble.return_value = self._mock_preamble
view = preamble.PreambleView.as_view()
self.assertRaises(Http404, view,
RequestFactory().get('/preamble/1/not/here'),
paragraphs='1/not/here')
@patch('regulations.views.preamble.ApiReader')
def test_notice_data(self, ApiReader):
"""We should try to fetch data corresponding to both the Preamble and
the Notice"""
ApiReader.return_value.preamble.return_value = self._mock_preamble
ApiReader.return_value.notice.return_value = {
'publication_date': '2002-02-02',
'comments_close': '2003-03-03',
'cfr_title': 21, 'cfr_parts': ['123']}
for doc_id in ('123_456', '123-456'):
preamble_, meta, notice = preamble.notice_data(doc_id)
self.assertEqual(preamble_, self._mock_preamble)
|
ducksboard/libsaas | libsaas/filters/auth.py | Python | mit | 6,734 | 0 | import base64
from hashlib import sha1
import hmac
import time
import random
from libsaas import http, port
class BasicAuth(object):
"""
Adds a Basic authentication header to each request.
"""
def __init__(self, username, password):
self.username = username
self.passw | ord = password
def __call__(self, request):
# According to RFC2617 the username and password are *TEXT, which
# RFC2616 says may con | tain characters from outside of ISO-8859-1 if
# they are MIME-encoded. Our first approach was to assume latin-1 in
# username and password, but practice has proved us wrong (services
# like Zendesk allow non-latin-1 characters in both, which are used
# in basic auth for their API). To be as compatible as possible,
# allow unicode in username and password, but keep resulting base64
# in latin-1.
auth = port.to_u('{0}:{1}').format(port.to_u(self.username),
port.to_u(self.password))
encoded = port.to_u(base64.b64encode(port.to_b(auth)), 'latin-1')
header = 'Basic {0}'.format(encoded)
request.headers['Authorization'] = header
class OAuthRFC5849(object):
"""
Signs each request according to RFC5849.
Only supports header-based authentication and only uses HMAC-SHA1.
The oauth_token and oauth_token_secret parameters can be None. This is
useful for making Temporary Credentials requests (section 2.1 of the RFC).
"""
def __init__(self, oauth_token, oauth_token_secret, key, secret):
self.oauth_token = oauth_token
self.oauth_token_secret = oauth_token_secret
self.key = key
self.secret = secret
def __call__(self, request):
nonce = self.generate_nonce()
timestamp = self.generate_timestamp()
base = self.get_base_string(request, nonce, timestamp)
key = self.encode(self.secret) + '&'
if self.oauth_token_secret:
key += self.encode(self.oauth_token_secret)
digest = hmac.new(port.to_b(key), port.to_b(base), sha1).digest()
signature = self.encode(base64.b64encode(digest))
params = self.oauth_params(nonce, timestamp, signature)
auth = ','.join('{0}="{1}"'.format(key, val)
for key, val in sorted(params))
header = 'OAuth ' + auth
if request.headers is None:
request.headers = {}
request.headers['Authorization'] = header
def use_request_params(self, request):
"""
Whether request parameters should be included in the signature base
string. For RFC5849 OAuth, and under the assumptions libsaas makes,
they are included if request params are not a blob.
"""
if request.params is None:
return False
if isinstance(request.params, (port.text_type, port.binary_type)):
return False
return True
def get_base_string(self, request, nonce, timestamp):
# if there are query string params, remove them from the basic string,
# as encode_parameters already took care of including them
parsed = port.urlparse(request.uri)
uri = port.urlunparse((parsed.scheme, parsed.netloc, parsed.path,
parsed.params, '', parsed.fragment))
return (self.encode(request.method) + '&' +
self.encode(uri) + '&' +
self.normalized_params(request, nonce, timestamp))
def normalized_params(self, request, nonce, timestamp):
params = ()
# check for query string parameters
params += self.encode_qs_params(request)
# if request parameters are to be included, encode them
if self.use_request_params(request):
params += self.encode_request_params(request)
# add OAuth parameters, like oauth_token, oauth_nonce etc
params += self.oauth_params(nonce, timestamp)
# sort them the way RFC5849 requires
normalized = '&'.join(sorted((key + '=' + value
for key, value in params)))
# and encode the resulting string
return self.encode(normalized)
def encode_qs_params(self, request):
# If there is a query string, split it out and include in the
# parameters. In typical libsaas usage there will never be a query
# string, since parameters should be passed as Request.params, but just
# in case someone tried, check it.
query = port.urlparse(request.uri).query
params = port.parse_qsl(query, True)
return tuple((self.encode(key), self.encode(val))
for key, val in params)
def encode_request_params(self, request):
# if params are a dict, make it into a sequence
params = request.params
try:
params = tuple(params.items())
except AttributeError:
pass
# encode keys and values
return tuple((self.encode(key), self.encode(val))
for key, val in params)
def generate_nonce(self):
return str(random.getrandbits(64))
def generate_timestamp(self):
return str(int(time.time()))
def oauth_params(self, nonce, timestamp, signature=None):
params = (('oauth_nonce', nonce), ('oauth_timestamp', timestamp),
('oauth_consumer_key', self.key),
('oauth_signature_method', 'HMAC-SHA1'))
if self.oauth_token:
params += (('oauth_token', self.oauth_token), )
if signature:
params += (('oauth_signature', signature), )
return params
def encode(self, val):
# RFC5849 says that ~ should not be quoted, but / should
return port.quote(port.to_b(val), safe='~')
class OAuth1a(OAuthRFC5849):
"""
Signs each request according to OAuth Core 1.0 Revision A.
"""
def use_request_params(self, request):
"""
OAuth 1.0a only mentions POST requests, so for instance PUT bodies,
even it their content-type is application/x-www-form-urlencoded won't
be part of the signature base string.
"""
if request.params is None:
return False
# GET parameters get appended to the URL, so they're used
if request.method.upper() in http.URLENCODE_METHODS:
return True
if request.method.upper() != 'POST':
return False
if isinstance(request.params, (port.text_type, port.binary_type)):
return False
return True
# alias OAuthRFC5849 (the sane version of the spec) to just OAuth
OAuth = OAuthRFC5849
|
MontrealCorpusTools/polyglot-server | iscan/templatetags/extra_tags.py | Python | mit | 253 | 0.003953 | from django imp | ort template
from django.conf import settings
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.simple_tag
@stringfilter
def get_settings_val(setting):
return | getattr(settings, setting)
|
jesusaurus/openstack-tests | swift/swiftTest.py | Python | apache-2.0 | 9,677 | 0.002273 | #!/usr/bin/env python
# Copyright 2012-2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
#python libs
import os
import csv
import hashlib
from datetime import datetime
#swift libs
from swiftclient import client as swift
class SwiftServiceTest(object):
def __init__(self, username=None, password=None, tenant=None,
auth_url=None, auth_ver='2.0', swift_url=None, debug=False):
self.username = username
self.password = password
self.tenant = tenant
self.auth_url = auth_url
self.swift_url = swift_url
self.auth_ver = auth_ver
self.debug = debug
self.token = None
self.http_conn = None
def connect(self, force=False):
if self.http_conn is not None and not force:
return
swift_url, self.token = swift.get_auth(auth_url=self.auth_url,
user=self.username,
key=self.password,
auth_version=self.auth_ver,
tenant_name=self.tenant)
if self.debug:
print(self.auth_url)
print(self.token)
print(self.swift_url)
print(swift_url)
print
if not swift_url == self.swift_url:
print("Different swift_url returned from swift")
self.http_conn = swift.http_connection(self.swift_url)
if self.debug:
print(self.http_conn)
| print
def get_account(self, deep=True):
if not self.http_conn:
self.connect()
account_info = swift.head_account(url=self.swift_url,
token=self.token,
http_conn=self.http_conn)
account_head, containers = swift.get_account(url=self.swift_url,
| token=self.token,
http_conn=self.http_conn)
if self.debug:
print(account_info)
print(account_head)
for container in containers:
print(container)
print
if deep:
self.get_containers(containers)
def get_containers(self, containers):
for container in containers:
info, objects = swift.get_container(url=self.swift_url,
token=self.token,
http_conn=self.http_conn,
container=container['name'])
if self.debug:
print(container['name'])
print(info)
for obj in objects:
print(obj)
print
def create_container(self, name, headers=None):
if not self.http_conn:
self.connect()
swift.put_container(url=self.swift_url, token=self.token,
http_conn=self.http_conn, container=name,
headers=headers)
if self.debug:
print("Container {0} created".format(name))
def find_container(self, name):
if not self.http_conn:
self.connect()
retval = swift.get_container(url=self.swift_url, token=self.token,
http_conn=self.http_conn, container=name)
if self.debug:
print(retval)
return retval
def modify_container(self, name, headers):
if not self.http_conn:
self.connect()
swift.post_container(url=self.swift_url, token=self.token,
http_conn=self.http_conn, container=name,
headers=headers)
if self.debug:
print("Container {0} modified".format(name))
def delete_container(self, name):
if not self.http_conn:
self.connect()
swift.delete_container(url=self.swift_url, token=self.token,
http_conn=self.http_conn, container=name)
if self.debug:
print("Container {0} deleted".format(name))
def create_object(self, cname, oname, contents, length=None):
swift.put_object(url=self.swift_url, token=self.token,
http_conn=self.http_conn, container=cname,
name=oname, contents=contents, content_length=length)
def get_object(self, cname, oname):
return swift.get_object(url=self.swift_url, token=self.token,
http_conn=self.http_conn, container=cname,
name=oname)
def delete_object(self, cname, oname):
swift.delete_object(url=self.swift_url, token=self.token,
http_conn=self.http_conn, container=cname,
name=oname)
def test_api(self, test_name):
print("Checking API")
self.connect()
self.get_account()
self.create_container(test_name,
headers={'X-Container-Meta-Foo': 'Foo'})
self.modify_container(test_name,
headers={'X-Container-Meta-Foo': 'Bar'})
self.find_container(test_name)
self.delete_container(test_name)
self.get_account()
def stress_test(self, test_name, count=10, size=2**20):
print("Creating and deleting {0} containers".format(count))
self.connect()
start = datetime.now()
with open('/dev/urandom') as dev_rand:
for i in range(count):
name = '{0}{1}'.format(test_name,i)
self.create_container(name)
for i in range(count):
obj = 'obj{0}'.format(i)
if self.debug:
print(name,obj)
contents = dev_rand.read(size)
sha = hashlib.sha1(contents).hexdigest()
header='X-Container-Meta-{0}'.format(obj)
headers={header: sha}
self.create_object(cname=name, oname=obj,
contents=contents, length=size)
self.modify_container(name=name, headers=headers)
create_time = datetime.now() - start
start = datetime.now()
for i in range(count):
name = '{0}{1}'.format(test_name,i)
cont = self.find_container(name)
for i in range(count):
obj = 'obj{0}'.format(i)
headers, contents = self.get_object(cname=name, oname=obj)
sha = hashlib.sha1(contents).hexdigest()
header = 'x-container-meta-{0}'.format(obj)
if cont[0][header] != sha:
print
print('Bad SHA')
print
raise ValueError
self.delete_object(cname=name, oname=obj)
self.delete_container(name)
delete_time = datetime.now() - start
name = 'stress-{0}-{1}-{2}-times.csv'.format(test_name, count, size)
with open(name, 'w+b') as csvfile:
output = csv.writer(csvfile)
output.writerow(['Create time', 'Delete time'])
output.writerow([create_time.seconds / 60.0,
delete_time.seconds / 60.0])
def test_suite(self, test_name):
self.test_api(test_name)
self.stress_test( |
mhnatiuk/phd_sociology_of_religion | scrapper/build/pyOpenSSL/examples/simple/client.py | Python | gpl-2.0 | 1,260 | 0.003968 | # -*- coding: latin-1 -*-
#
# Copyright (C) AB Strakt
# Copyright (C) Jean-Paul Calderone
# See LICENSE for details. |
"""
Simple SSL client, using blocking I/O
"""
f | rom OpenSSL import SSL
import sys, os, select, socket
def verify_cb(conn, cert, errnum, depth, ok):
# This obviously has to be updated
print 'Got certificate: %s' % cert.get_subject()
return ok
if len(sys.argv) < 3:
print 'Usage: python[2] client.py HOST PORT'
sys.exit(1)
dir = os.path.dirname(sys.argv[0])
if dir == '':
dir = os.curdir
# Initialize context
ctx = SSL.Context(SSL.SSLv23_METHOD)
ctx.set_verify(SSL.VERIFY_PEER, verify_cb) # Demand a certificate
ctx.use_privatekey_file (os.path.join(dir, 'client.pkey'))
ctx.use_certificate_file(os.path.join(dir, 'client.cert'))
ctx.load_verify_locations(os.path.join(dir, 'CA.cert'))
# Set up client
sock = SSL.Connection(ctx, socket.socket(socket.AF_INET, socket.SOCK_STREAM))
sock.connect((sys.argv[1], int(sys.argv[2])))
while 1:
line = sys.stdin.readline()
if line == '':
break
try:
sock.send(line)
sys.stdout.write(sock.recv(1024))
sys.stdout.flush()
except SSL.Error:
print 'Connection died unexpectedly'
break
sock.shutdown()
sock.close()
|
dongfangyixi/HypeNet-tensorflow | model.py | Python | apache-2.0 | 9,884 | 0.030763 | #coding:utf-8
import tensorflow as tf
from TFCommon.Layers import EmbeddingLayer
from TFCommon.RNNCell import LSTMCell
class HypeNet(object):
def __init__(self,word_num,word_embd_dim,pos_num,pos_embd_dim,
parse_num,parse_embd_dim,LSTM_dim,x_vector_dim,y_vector_dim,classification_dim):
self.word_num=word_num
self.word_embd_dim=word_embd_dim
self.pos_num=pos_num
self.pos_embd_dim=pos_embd_dim
self.parse_num=parse_num
self.parse_embd_dim=parse_embd_dim
#self.direct_embd_dim=direct_embd_dim
self.LSTM_dim=LSTM_dim
self.x_vector_dim=x_vector_dim
self.y_vector_dim=y_vector_dim
self.classification_dim=classification_dim
self.global_step=tf.Variable(0,name='g_s',trainable=False)
self.class_num=2
self.lr=0.001
self.saver=tf.train.Saver(tf.global_variables())
#self.x_seq_len=x_seq_len
#self.y_seq_len=y_seq_len
#word input: a list of path with variable length path_num contain elem of shape (seq_len,batch_size,word_idx)
#postag input: same
#parse input: same
#direct input: same except last dim (batch_size,seq_len,direct_dim=3)
#path_count: shape (batch_size,path_num)
def _build(self,input_word,input_pos,input_parse,input_direct,path_count,input_x_phrase,input_y_phrase,train_flag):
with tf.variable_scope('PATH_LOOP'):
#pathnum=5
word_embd_layer=EmbeddingLayer(self.word_num,self.word_embd_dim,reuse=None)
i_w_pathnum,i_w_seqlen,i_w_batchsize=input_word.get_shape()
i_pos_pathnum,i_pos_seqlen,i_pos_batchsize=input_pos.get_shape()
i_parse_pathnum,i_parse_seqlen,i_parse_batchsize=input_parse.get_shape()
inp | ut_direct=tf.expand_dims(input_direct,-1)
i_d_pathnum,i_d_seqlen,i_d_batchsize,i_d_dim=input_direct.get_shape()
word_init=tf.zeros(shape=(i_w_seqlen,i_w_batchsize),dtype=tf.int32)
pos_init=tf.zeros(shape=(i_pos_seqlen,i_pos_batchsize),dtype=tf.int32)
parse_init=tf.zeros(shape=(i_parse_seqlen,i_parse_batchsize),dtype=tf.int32)
| direct_init=tf.zeros(shape=(i_d_seqlen,i_d_batchsize,i_d_dim),dtype=tf.float32)
weighted_sum=tf.zeros(shape=(i_w_batchsize,self.LSTM_dim),dtype=tf.float32)
if train_flag:
cell=tf.nn.rnn_cell.DropoutWrapper(LSTMCell(self.LSTM_dim),input_keep_prob=0.7,output_keep_prob=0.7)
else:
cell=LSTMCell(self.LSTM_dim)
count_sum=tf.reduce_sum(path_count,axis=0)
samble=tf.reduce_sum(path_count,axis=-1)
#print 'samble',samble.get_shape()
#result=tf.eu
cond=lambda x,*_: tf.not_equal(samble[x],0.)#not samble[x]==0. #x<input_word.get_shape()[0]
def _body(path_idx,
input_word_path,
input_pos_path,
input_parse_path,
input_direct_path,
weighted_sum):
#print 'w_s',weighted_sum.get_shape()
input_word_path=tf.nn.embedding_lookup(input_word,path_idx)
input_pos_path=tf.nn.embedding_lookup(input_pos,path_idx)
input_parse_path=tf.nn.embedding_lookup(input_parse,path_idx)
input_direct_path=tf.nn.embedding_lookup(input_direct,path_idx)
#print 'word:',input_word_path.get_shape()
#print 'input_direct_path:',input_direct_path.get_shape()
count=tf.nn.embedding_lookup(path_count,path_idx)
path_idx+=1
with tf.variable_scope('encoder'):
word_embd=word_embd_layer(input_word_path,scope='word_embd')
pos_embd=EmbeddingLayer(self.pos_num,self.pos_embd_dim)(input_pos_path,scope='pos_embd')
parse_embd=EmbeddingLayer(self.parse_num,self.parse_embd_dim)(input_parse_path,scope='parse_embd')
print word_embd.get_shape()
print pos_embd.get_shape()
print parse_embd.get_shape()
print input_direct_path.get_shape()
feature_vec=tf.concat(values=[word_embd,pos_embd,parse_embd,input_direct_path],axis=-1,name='concat')#shape ([path_num],seq_len,batch_size,sum(dim))
path_encoder,_=tf.nn.dynamic_rnn(cell,feature_vec,dtype=tf.float32)# shape batch_size,LSTM_dim
print count
print 'path_encoder',path_encoder.get_shape()
weighted_sum=tf.add(weighted_sum,path_encoder[-1]*tf.expand_dims(count,-1)) #if the path is padded set path_count[path_idx]=0
print 'w_s2',weighted_sum.get_shape()
return path_idx,input_word_path,input_pos_path,input_parse_path,input_direct_path,weighted_sum
_,_,_,_,_,weighted_sum=tf.while_loop(cond,_body,loop_vars=[0,word_init,pos_init,parse_init,direct_init,weighted_sum])
weighted_avg=weighted_sum/tf.expand_dims(count_sum,-1)
with tf.variable_scope('x_phrase'):
x_embd=word_embd_layer(input_x_phrase,scope='word_embd')
if train_flag:
x_vector,_=tf.nn.dynamic_rnn(
tf.nn.rnn_cell.DropoutWrapper(
LSTMCell(self.x_vector_dim),
input_keep_prob=0.7,
output_keep_prob=0.7),
x_embd,
dtype=tf.float32)
else:
x_vector,_=tf.nn.dynamic_rnn(
LSTMCell(self.x_vector_dim),
x_embd,
dtype=tf.float32)
#x_vector=word_embd_layer(input_x_phrase,scope='word_embd')
x_vector=tf.transpose(x_vector,[1,0,2])[-1]
print 'x_vector:',x_vector.get_shape()
with tf.variable_scope('y_phrase'):
y_embd=word_embd_layer(input_y_phrase,scope='word_embd')
if train_flag:
y_vector,_=tf.nn.dynamic_rnn(
tf.nn.rnn_cell.DropoutWrapper(
LSTMCell(self.y_vector_dim),
input_keep_prob=0.7,
output_keep_prob=0.7),
y_embd,
dtype=tf.float32)
else:
y_vector,_=tf.nn.dynamic_rnn(
LSTMCell(self.y_vector_dim),
y_embd,
dtype=tf.float32)
#y_vector=word_embd_layer(input_y_phrase,scope='word_embd')
y_vector=tf.transpose(y_vector,[1,0,2])[-1]
with tf.variable_scope('classification'):
feature=tf.concat((x_vector,weighted_avg,y_vector),axis=-1)
feature_dim=feature.get_shape()[1]
W=tf.get_variable(name='cl_W',shape=(feature_dim,self.class_num))
b=tf.get_variable(name='cl_b',shape=(self.class_num))
score=tf.matmul(feature,W)+b
#print 'score',score.get_shape()
return score,samble
def calculate_loss(self,labels,logits):
loss=tf.nn.softmax_cross_entropy_with_logits(labels=labels,logits=logits)
return tf.reduce_mean(loss)
def back_prop(self):
with tf.variable_scope('backprop'):
self.opt=tf.train.AdamOptimizer(self.lr)
self.upd=self.opt.minimize(self.loss,global_step=self.global_step)
return self.upd
def summary(self,prefix):
collection=[]
collection.append(tf.summary.scalar('%s/loss'%prefix,self.loss))
merged=tf.summary.merge(collection)
return merged
def build(self,seq_len,batch_size,x_seq_len,y_seq_len,train_flag=True):
#seq_len=10
#batch_size=8
#x_p_len=3
#y_p_len=4
with tf.variable_scope('INPUTS') |
gvlproject/python-genomespaceclient | genomespaceclient/__init__.py | Python | mit | 162 | 0 | from .client import GSDataFormat # noqa
from .client import GSFileMeta | data # noqa
from .client impor | t GenomeSpaceClient # noqa
from .shell import main # noqa
|
saymedia/seosuite-dashboard-api | api/admin.py | Python | mit | 187 | 0.005348 | from django.contrib import admin
from api.models import (
CrawlUrls,
CrawlLinks,
)
# Register your models here.
admin.site.register(Craw | lUrls)
admin.site.register(CrawlLi | nks) |
voyagersearch/voyager-py | processing/locale/make_mo_files.py | Python | apache-2.0 | 1,085 | 0.002765 | # (C) Copyright 2014 Voyager Search
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed | to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import glob
def make_mo_files():
"""Utility function to generate MO files."""
po_files = glob.glob(os.path.join(os.path.dirname(__file__), 'LC_MESSAGES', '*.po') | )
try:
sys.path.append(os.path.join(os.path.dirname(sys.executable), "tools", "i18n"))
import msgfmt
for po_file in po_files:
msgfmt.make(po_file, po_file.replace('.po', '.mo'))
except (IOError, ImportError):
pass
if __name__ == '__main__':
make_mo_files()
|
JudoWill/glue | glue/utils/qt/tests/test_qmessagebox_widget.py | Python | bsd-3-clause | 357 | 0 | from .. import QMessageBoxPatched as QMessageBox
from ....qt import get_qapp
from ....external.qt import | QtGu | i
def test_main():
app = get_qapp()
w = QMessageBox(QMessageBox.Critical, "Error", "An error occurred")
w.setDetailedText("Spam")
w.select_all()
w.copy_detailed()
assert app.clipboard().text() == "Spam"
app.quit()
|
googlestadia/renderdoc | util/test/tests/Vulkan/VK_CBuffer_Zoo.py | Python | mit | 31,170 | 0.004588 | import rdtest
import renderdoc as rd
class VK_CBuffer_Zoo(rdtest.TestCase):
def get_capture(self):
return rdtest.run_and_capture("demos_x64", "VK_CBuffer_Zoo", 5)
def check_capture(self):
draw = self.find_draw("Draw")
self.check(draw is not None)
self.controller.SetFrameEvent(draw.eventId, False)
# Make an output so we can pick pixels
out: rd.ReplayOutput = self.controller.CreateOutput(rd.CreateHeadlessWindowingData(), rd.ReplayOutputType.Texture)
self.check(out is not None)
out.SetDimensions(100, 100)
pipe: rd.PipeState = self.controller.GetPipelineState()
stage = rd.ShaderStage.Pixel
# Verify that the GLSL draw is first
disasm = self.controller.DisassembleShader(pipe.GetGraphicsPipelineObject(), pipe.GetShaderReflection(stage),
'')
self.check('GLSL' in disasm)
cbuf: rd.BoundCBuffer = pipe.GetConstantBuffer(stage, 0, 0)
var_check = rdtest.ConstantBufferChecker(
self.controller.GetCBufferVariableContents(pipe.GetShader(stage),
pipe.GetShaderEntryPoint(stage), 0,
cbuf.resourceId, cbuf.byteOffset))
# For more detailed reference for the below checks, see the commented definition of the cbuffer
# in the shader source code in the demo itself
# vec4 a;
var_check.check('a').cols(4).rows(1).value([0.0, 1.0, 2.0, 3.0])
# vec3 b;
var_check.check('b').cols(3).rows(1).value([4.0, 5.0, 6.0])
# vec2 c; vec2 d;
var_check.check('c').cols(2).rows(1).value([8.0, 9.0])
var_check.check('d').cols(2).rows(1).value([10.0, 11.0])
# float e; vec3 f;
var_check.check('e').cols(1).rows(1).value([12.0])
var_check.check('f').cols(3).rows(1).value([16.0, 17.0, 18.0])
# vec4 dummy0;
var_check.check('dummy0')
# float j; vec2 k;
var_check.check('j').cols(1).rows(1).value([24.0])
var_check.check('k').cols(2).rows(1).value([26.0, 27.0])
# vec2 l; float m;
var_check.check('l').cols(2).rows(1).value([28.0, 29.0])
var_check.check('m').cols(1).rows(1).value([30.0])
# float n[4];
var_check.check('n').cols(0).rows(0).arraySize(4).members({
0: lambda x: x.cols(1).rows(1).value([32.0]),
1: lambda x: x.cols(1).rows(1).value([36.0]),
2: lambda x: x.cols(1).rows(1).value([40.0]),
3: lambda x: x.cols(1).rows(1).value([44.0]),
})
# vec4 dummy1;
var_check.check('dummy1')
# float o[4];
var_check.check('o').cols(0).rows(0).arraySize(4).members({
0: lambda x: x.cols(1).rows(1).value([52.0]),
1: lambda x: x.cols(1).rows(1).value([56.0]),
2: lambda x: x.cols(1).rows(1).value([60.0]),
3: lambda x: x.cols(1).rows(1).value([64.0]),
})
# float p;
var_check.check('p').cols(1).rows(1).value([68.0])
# vec4 dummy2;
var_check.check('dummy2')
# column_major vec4x4 q;
var_check.check('q').cols(4).rows(4).column_major().value([76.0, 80.0, 84.0, 88.0,
77.0, 81.0, 85.0, 89.0,
78.0, 82.0, 86.0, 90.0,
79.0, 83.0, 87.0, 91.0])
# row_major vec4x4 r;
var_check.check('r').cols(4).rows(4).row_major().value([92.0, 93.0, 94.0, 95.0,
96.0, 97.0, 98.0, 99.0,
100.0, 101.0, 102.0, 103.0])
# column_major vec4x3 s;
var_check.check('s').cols(4).rows(3).column_major().value([108.0, 112.0, 116.0, 120.0,
109.0, 113.0, 117.0, 121.0,
110.0, 114.0, 118.0, 122.0])
# vec4 dummy3;
var_check.check('dummy3')
# row_major vec4x3 t;
var_check.check('t').cols(4).rows(3).row_major().value([128.0, 129.0, 130.0, 131.0,
132.0, 133.0, 134.0, 135.0,
| 136.0, 137.0, 138.0, 139.0])
# vec4 dummy4;
var_check.check('dummy4')
# | column_major vec2x3 u;
var_check.check('u').cols(3).rows(2).column_major().value([144.0, 148.0, 152.0,
145.0, 149.0, 153.0])
# vec4 dummy5;
var_check.check('dummy5')
# row_major vec3x2 v;
var_check.check('v').cols(3).rows(2).row_major().value([160.0, 161.0, 162.0,
164.0, 165.0, 166.0])
# vec4 dummy6;
var_check.check('dummy6')
# column_major vec3x2 w;
var_check.check('w').cols(2).rows(2).column_major().value([172.0, 176.0,
173.0, 177.0])
# vec4 dummy7;
var_check.check('dummy7')
# row_major vec3x2 x;
var_check.check('x').cols(2).rows(2).row_major().value([184.0, 185.0,
188.0, 189.0])
# vec4 dummy8;
var_check.check('dummy8')
# row_major vec2x2 y;
var_check.check('y').cols(2).rows(2).row_major().value([196.0, 197.0,
200.0, 201.0])
# float z;
var_check.check('z').cols(1).rows(1).value([204.0])
# vec4 dummy9;
var_check.check('dummy9')
# vec4 multiarray[3][2];
var_check.check('multiarray').cols(0).rows(0).arraySize(3).members({
0: lambda x: x.cols(0).rows(0).arraySize(2).members({
0: lambda y: y.cols(4).rows(1).value([228.0, 229.0, 230.0, 231.0]),
1: lambda y: y.cols(4).rows(1).value([232.0, 233.0, 234.0, 235.0]),
}),
1: lambda x: x.cols(0).rows(0).arraySize(2).members({
0: lambda y: y.cols(4).rows(1).value([236.0, 237.0, 238.0, 239.0]),
1: lambda y: y.cols(4).rows(1).value([240.0, 241.0, 242.0, 243.0]),
}),
2: lambda x: x.cols(0).rows(0).arraySize(2).members({
0: lambda y: y.cols(4).rows(1).value([244.0, 245.0, 246.0, 247.0]),
1: lambda y: y.cols(4).rows(1).value([248.0, 249.0, 250.0, 251.0]),
}),
})
# struct vec3_1 { vec3 a; float b; };
# struct nested { vec3_1 a; vec4 b[4]; vec3_1 c[4]; };
# nested structa[2];
var_check.check('structa').cols(0).rows(0).arraySize(2).members({
# structa[0]
0: lambda s: s.cols(0).rows(0).structSize(3).members({
'a': lambda x: x.cols(0).rows(0).structSize(2).members({
'a': lambda y: y.cols(3).rows(1).value([252.0, 253.0, 254.0]),
'b': lambda y: y.cols(1).rows(1).value([255.0]),
}),
'b': lambda x: x.cols(0).rows(0).arraySize(4).members({
0: lambda y: y.cols(4).rows(1).value([256.0, 257.0, 258.0, 259.0]),
1: lambda y: y.cols(4).rows(1).value([260.0, 261.0, 262.0, 263.0]),
2: lambda y: y.cols(4).rows(1).value([264.0, 265.0, 266.0, 267.0]),
3: lambda y: y.cols(4).rows(1).value([268.0, 269.0, 270.0, 271.0]),
}),
'c': lambda x: x.cols(0).rows(0).arraySize(4).members({
0: lambda y: y.cols(0).rows(0).structSize(2).members({
'a': lambda z: z.cols(3).rows(1).value([272.0, 273.0, 274.0]),
'b': lambda z: z.cols(1).rows(1).value([275.0]),
|
antoinecarme/pyaf | tests/artificial/transf_Difference/trend_MovingMedian/cycle_5/ar_/test_artificial_1024_Difference_MovingMedian_5__20.py | Python | bsd-3-clause | 269 | 0.085502 | import pyaf.Bench.TS_datasets a | s tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "MovingMedian", cycle_length = 5, transform = | "Difference", sigma = 0.0, exog_count = 20, ar_order = 0); |
alexkasko/krakatau-java | krakatau-lib/src/main/resources/Lib/Krakatau/error.py | Python | gpl-3.0 | 429 | 0.002331 | class ClassLoaderError(Exception):
def __init__(self, typen=None, data=""):
self.type = typen
self.data = data
message = u"\n{}: {}".format(typen, data) if typen else unicode(data)
super(ClassLoaderError, self).__init__(message)
class VerificationError(Exception):
def __i | nit__(self, message, data=None):
super(VerificationError | , self).__init__(message)
self.data = data
|
thomasowenmclean/tei_transformer | tests/test_tags.py | Python | gpl-2.0 | 362 | 0.022099 | import unittest
class TestTeiT | ag(unittest.TestCase):
pass
class TestFmtTag(unittest.TestCase):
pass
class TestRendTag(unittest.TestCase):
pass
class TestDeleteMe(unittest.TestCase):
pass
class TestDontTouchMe(unittest.TestCase):
pass
class TestReplaceMeWText(unittest.TestCase):
pass
class TestUnwrapMe(uni | ttest.TestCase):
pass |
windelbouwman/ppci-mirror | ppci/build/buildtasks.py | Python | bsd-2-clause | 7,591 | 0 |
"""
Defines task classes that can compile, link etc..
Task can depend upon one another.
These task are wrappers around the functions provided in the buildfunctions
module
"""
from .tasks import Task, TaskError, register_task
from ..utils.reporting import HtmlReportGenerator, DummyReportGenerator
from .. import api
from ..lang.tools.common import ParserException
from ..common import CompilerError
@register_task
class EmptyTask(Task):
""" Basic task that does nothing """
def run(self):
pass
@register_task
class EchoTask(Task):
""" Simple task that echoes a message """
def run(self):
message = self.get_argument('message')
print(message)
@register_task
class PropertyTask(Task):
""" | Sets a property to a value """
def run(self):
name = self.arguments['name']
value = self.arguments['value']
self.target.project.set_property(name, value)
@register_task
class BuildTask(Task):
""" Builds another build description file (build.xml) """
def run(self):
project = self.relpath(self.get_argument('file'))
api.construct(project)
class O | utputtingTask(Task):
""" Base task for tasks that create an object file """
def store_object(self, obj):
""" Store the object in the specified file """
output_filename = self.relpath(self.get_argument('output'))
self.ensure_path(output_filename)
with open(output_filename, 'wt', encoding='utf8') as output_file:
obj.save(output_file)
@register_task
class AssembleTask(OutputtingTask):
""" Task that can runs the assembler over the source and enters the
output into an object file """
def run(self):
arch = self.get_argument('arch')
source = self.relpath(self.get_argument('source'))
if 'debug' in self.arguments:
debug = bool(self.get_argument('debug'))
else:
debug = False
try:
obj = api.asm(source, arch, debug=debug)
except ParserException as err:
raise TaskError('Error during assembly:' + str(err))
except CompilerError as err:
raise TaskError('Error during assembly:' + str(err))
except OSError as err:
raise TaskError('Error:' + str(err))
self.store_object(obj)
self.logger.debug('Assembling finished')
@register_task
class C3CompileTask(OutputtingTask):
""" Task that compiles C3 source for some target into an object file """
def run(self):
arch = self.get_argument('arch')
sources = self.open_file_set(self.arguments['sources'])
if 'includes' in self.arguments:
includes = self.open_file_set(self.arguments['includes'])
else:
includes = []
if 'report' in self.arguments:
report_file = self.relpath(self.arguments['report'])
reporter = HtmlReportGenerator(
open(report_file, 'wt', encoding='utf8')
)
else:
reporter = DummyReportGenerator()
debug = bool(self.get_argument('debug', default=False))
opt = int(self.get_argument('optimize', default='0'))
with reporter:
obj = api.c3c(
sources, includes, arch, opt_level=opt,
reporter=reporter, debug=debug)
self.store_object(obj)
@register_task
class CCompileTask(OutputtingTask):
""" Task that compiles C code for some target into an object file """
def run(self):
arch = self.get_argument('arch')
sources = self.open_file_set(self.arguments['sources'])
if 'includes' in self.arguments:
includes = self.open_file_set(self.arguments['includes'])
else:
includes = []
if 'report' in self.arguments:
report_file = self.relpath(self.arguments['report'])
reporter = HtmlReportGenerator(
open(report_file, 'wt', encoding='utf8')
)
else:
reporter = DummyReportGenerator()
debug = bool(self.get_argument('debug', default=False))
opt = int(self.get_argument('optimize', default='0'))
coptions = api.COptions()
coptions.add_include_paths(includes)
with reporter:
objs = []
for source in sources:
with open(source, 'r') as f:
obj = api.cc(
f, arch, coptions=coptions, opt_level=opt,
reporter=reporter, debug=debug)
objs.append(obj)
obj = api.link(
objs, partial_link=True, reporter=reporter, debug=debug)
self.store_object(obj)
@register_task
class PascalCompileTask(OutputtingTask):
""" Task that compiles pascal code for some target into an object file """
def run(self):
arch = self.get_argument('arch')
sources = self.open_file_set(self.arguments['sources'])
if 'report' in self.arguments:
report_file = self.relpath(self.arguments['report'])
reporter = HtmlReportGenerator(
open(report_file, 'wt', encoding='utf8')
)
else:
reporter = DummyReportGenerator()
debug = bool(self.get_argument('debug', default=False))
opt = int(self.get_argument('optimize', default='0'))
with reporter:
obj = api.pascal(sources, arch, opt_level=opt, reporter=reporter)
obj = api.link(
(obj,), partial_link=True, reporter=reporter, debug=debug)
self.store_object(obj)
@register_task
class WasmCompileTask(OutputtingTask):
""" Task that compiles a wasm module into an object file """
def run(self):
arch = self.get_argument('arch')
source = self.open_file_set(self.arguments['source'])
opt = int(self.get_argument('optimize', default='0'))
if 'report' in self.arguments:
report_file = self.relpath(self.arguments['report'])
reporter = HtmlReportGenerator(
open(report_file, 'wt', encoding='utf8')
)
else:
reporter = DummyReportGenerator()
self.logger.debug('loading %s', source[0])
with reporter:
with open(source[0], 'rb') as f:
obj = api.wasmcompile(
f, arch, opt_level=opt, reporter=reporter)
self.store_object(obj)
@register_task
class LinkTask(OutputtingTask):
""" Link together a collection of object files """
def run(self):
if 'layout' in self.arguments:
layout = self.relpath(self.get_argument('layout'))
else:
layout = None
objects = self.open_file_set(self.get_argument('objects'))
debug = bool(self.get_argument('debug', default=False))
partial = bool(self.get_argument('partial', default=False))
try:
obj = api.link(
objects, layout=layout, use_runtime=True,
partial_link=partial, debug=debug)
except CompilerError as err:
raise TaskError(err.msg)
self.store_object(obj)
@register_task
class ObjCopyTask(Task):
""" Binary move parts of object code. """
def run(self):
image_name = self.get_argument('imagename')
output_filename = self.relpath(self.get_argument('output'))
object_filename = self.relpath(self.get_argument('objectfile'))
fmt = self.get_argument('format')
api.objcopy(object_filename, image_name, fmt, output_filename)
|
andrepuschmann/dutycycleviz | dutycycleviz.py | Python | gpl-3.0 | 5,638 | 0.013125 | #!/usr/bin/env python
#
# dutycycleviz.py
#
# Copyright (C) 2013, Andre Puschmann <andre.puschmann@tu-ilmenau.de>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WI | THOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABI | LITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
import numpy as np
import pylab as pl
import scipy.special as ss
from random import Random,expovariate,uniform
def kumaraswamy(a, b, x):
e1 = x ** (a - 1)
e2 = a * b * e1
e3 = 1 - x ** a
e4 = b - 1
return e2 * e3 ** e4
def kumaraswamy_cdf(a, b, x):
e1 = 1 - x ** a
e2 = e1 ** b
return e1 - e2
def kumaraswamy_cdf_invers(a, b, u):
e1 = 1 / b
e2 = (1 - u) ** e1
e3 = 1 - e2
e4 = 1 / a
return e3 ** e4
def plot_kumaraswamy(a, b, desc='Kumaraswamy'):
Ly = []
Lx = []
xes = np.mgrid[0:1:1000j]
for x in xes:
Lx.append(x)
Ly.append(kumaraswamy(a, b, x))
pl.plot(Lx, Ly, label="%s (a=%.2f, b=%.2f)" %(desc, a, b))
def beta(a, b, mew):
e1 = ss.gamma(a + b)
e2 = ss.gamma(a)
e3 = ss.gamma(b)
e4 = mew ** (a - 1)
e5 = (1 - mew) ** (b - 1)
return (e1/(e2*e3)) * e4 * e5
def plot_beta(a, b):
Ly = []
Lx = []
mews = np.mgrid[0:1:100j]
for mew in mews:
Lx.append(mew)
Ly.append(beta(a, b, mew))
pl.plot(Lx, Ly, label="Beta: a=%f, b=%f" %(a,b))
# mu is location
# sigma is scale
# xi is shape
# U is input
# X = mu + sigma(U^-xi - 1) / xi (http://en.wikipedia.org/wiki/Generalized_Pareto_distribution)
def gpd_cdf_invers(mu, sigma, xi, U):
e1 = U ** (-xi) - 1
e2 = sigma * e1 / xi
return mu + e2
# based on invers CDF of the Kumaraswamy distribution found
# here: http://www.johndcook.com/blog/2009/11/24/kumaraswamy-distribution/
def plot_activity(a, b, N=10):
start = 0
end = N * 100
Ly = []
Lx = []
samples = xrange(start, end, N)
print samples
for x in samples:
r = uniform(0, 1)
#print "r: %.2f" % (r)
hui = kumaraswamy_cdf_invers(a, b, r)
holds = xrange(0, N, 1)
for hold in holds:
print "y: %i" % (x+hold)
Lx.append(x+hold)
Ly.append(hui)
pl.plot(Lx, Ly, label="CDF: a=%f, b=%f" %(a,b))
def plot_gpd_activity(busy_mu, busy_sigma, busy_xi, idle_mu, idle_sigma, idle_xi, desc="Random"):
Ly = []
Lx = []
current_time = 0
total_busy_time = 0
iterations = xrange(0,40)
for i in iterations:
# start a new busy period
Lx.append(current_time)
Ly.append(1)
# get length of next busy period
u = uniform(0, 1)
next_busy = gpd_cdf_invers(busy_mu, busy_sigma, busy_xi, u)
#print "next_busy: %f" % next_busy
current_time += next_busy
total_busy_time += next_busy
Lx.append(current_time)
Ly.append(1)
# start a new idle period
Lx.append(current_time)
Ly.append(0)
# compute length for next idle period
u = uniform(0, 1)
next_idle = gpd_cdf_invers(idle_mu, idle_sigma, idle_xi, u)
#print "next_idle: %f" % next_idle
current_time += next_idle
Lx.append(current_time)
Ly.append(0)
# compute duty_cycle for current example
duty_cycle = total_busy_time / current_time
#print "total_busy_time: %f" % total_busy_time
#print "current_time: %f" % current_time
#print "duty_cycle: %f" % duty_cycle
#plt.axis( [0, 10, 0, 6])
pl.ylim(-0.1, 1.1)
pl.legend()
return pl.plot(Lx, Ly, label="%s, DC=%.2f" % (desc, duty_cycle))
def main():
#plot_beta(0.1, 0.1)
#plot_beta(1, 1)
#plot_beta(2, 3)
#plot_beta(8, 4)
#plot_kumaraswamy(0.66, 20.8, "Sporadic use") # L1 case
#plot_kumaraswamy(0.17, 0.35, "Intermittent use, high-load after low-load periods") # M1 case
#plot_kumaraswamy(11.49, 0.38, "Used most of the time") # H1 case
#plot_kumaraswamy(2.44, 317.31, "Regularly use, low activity") # L2 case
#plot_kumaraswamy(8.60, 1581.54, "Weak oscillations around mean") # M2 case
#plot_kumaraswamy(19.81, 10.59, "Constant itensive use") # H2 case
#pl.xlim(0.0, 1.0)
#pl.ylim(0.0, 20.0)
#plot_activity(0.17, 0.35, 10)
#plot_activity(19.81, 10.59)
pl.figure( 1 )
ax1 = pl.subplot( 5, 1, 1 ) # 2 rows, 1 column, figure 1
# for low load level
plot_gpd_activity(3.5150, 1.6960, 0.0285, 3.61, 38.3633, 0.2125, "Very low")
ax2 = pl.subplot( 5, 1, 2 )
plot_gpd_activity(3.5150, 2.6240, 0.1884, 3.578, 10.9356, 0.1784, "Low")
ax3 = pl.subplot( 5, 1, 3 )
plot_gpd_activity(3.5150, 5.1483, 0.1978, 3.5160, 4.6583, 0.2156, "Medium")
ax4 = pl.subplot( 5, 1, 4 )
plot_gpd_activity(3.5470, 10.7968, 0.1929, 3.5310, 2.6272, 0.2119, "High")
ax5 = pl.subplot( 5, 1, 5 )
plot_gpd_activity(3.5940, 52.8611, 0.2377, 3.5160, 1.6609, 0.0068, "Very high")
# Have a single legend for each subplot
ax1.legend()
ax2.legend()
ax3.legend()
ax4.legend()
ax5.legend()
pl.show()
if __name__ == "__main__":
main()
|
akosyakov/intellij-community | python/testData/mover/multiLineSelection_afterDown.py | Python | apache-2.0 | 95 | 0.063158 | class Test | (object):
def q(self):
c = 3
<selection>a = 1
b = 2
<caret>< | /selection> |
facebook/fbthrift | thrift/compiler/test/fixtures/namespace/gen-py3lite/my/namespacing/test/hsmodule/lite_clients.py | Python | apache-2.0 | 3,331 | 0.003903 | #
# Autogenerated by Thrift
#
# DO NOT EDIT
# @generated
#
import typing as _typing
import py3lite_module_root.apache.thrift.metadata.lite_types as _fbthrift_metadata
import folly.i | obuf as _fbthrift_iobuf
from thrift.py3lite.client import (
AsyncClient as _fbthrift_py3lite_AsyncClient,
SyncClient as _fbthrift_py3lite_SyncClient,
Client as _fbthrift_py3lite_Client,
)
import thrift.py3lite.exceptions as _fbthrift_py3lite_exceptions
import thrift.py3lite.types as _fbthrift_py3lite_types
import py3lite_modu | le_root.my.namespacing.test.hsmodule.lite_types
import py3lite_module_root.my.namespacing.test.hsmodule.lite_metadata
class HsTestService(_fbthrift_py3lite_Client["HsTestService.Async", "HsTestService.Sync"]):
@staticmethod
def __get_thrift_name__() -> str:
return "hsmodule.HsTestService"
@staticmethod
def __get_metadata__() -> _fbthrift_metadata.ThriftMetadata:
return py3lite_module_root.my.namespacing.test.hsmodule.lite_metadata.gen_metadata_service_HsTestService()
class Async(_fbthrift_py3lite_AsyncClient):
@staticmethod
def __get_thrift_name__() -> str:
return "hsmodule.HsTestService"
@staticmethod
def __get_metadata__() -> _fbthrift_metadata.ThriftMetadata:
return py3lite_module_root.my.namespacing.test.hsmodule.lite_metadata.gen_metadata_service_HsTestService()
async def init(
self,
int1: int
) -> int:
resp = await self._send_request(
"HsTestService",
"init",
py3lite_module_root.my.namespacing.test.hsmodule.lite_types._fbthrift_HsTestService_init_args(
int1=int1,),
py3lite_module_root.my.namespacing.test.hsmodule.lite_types._fbthrift_HsTestService_init_result,
)
# shortcut to success path for non-void returns
if resp.success is not None:
return resp.success
raise _fbthrift_py3lite_exceptions.ApplicationError(
_fbthrift_py3lite_exceptions.ApplicationErrorType.MISSING_RESULT,
"Empty Response",
)
class Sync(_fbthrift_py3lite_SyncClient):
@staticmethod
def __get_thrift_name__() -> str:
return "hsmodule.HsTestService"
@staticmethod
def __get_metadata__() -> _fbthrift_metadata.ThriftMetadata:
return py3lite_module_root.my.namespacing.test.hsmodule.lite_metadata.gen_metadata_service_HsTestService()
def init(
self,
int1: int
) -> int:
resp = self._send_request(
"HsTestService",
"init",
py3lite_module_root.my.namespacing.test.hsmodule.lite_types._fbthrift_HsTestService_init_args(
int1=int1,),
py3lite_module_root.my.namespacing.test.hsmodule.lite_types._fbthrift_HsTestService_init_result,
)
# shortcut to success path for non-void returns
if resp.success is not None:
return resp.success
raise _fbthrift_py3lite_exceptions.ApplicationError(
_fbthrift_py3lite_exceptions.ApplicationErrorType.MISSING_RESULT,
"Empty Response",
)
|
clovemfeng/studydemo | python_code/chapter11/page422.py | Python | gpl-2.0 | 1,263 | 0.003959 |
from find_it impo | rt find_closest
fr | om tm2secs2tm import time2secs, secs2time, format_time
def find_nearest_time(look_for, target_data):
what = time2secs(look_for)
where = [time2secs(t) for t in target_data]
res = find_closest(what, where)
return(secs2time(res))
row_data = {}
with open('PaceData.csv') as paces:
column_headings = paces.readline().strip().split(',')
column_headings.pop(0)
for each_line in paces:
row = each_line.strip().split(',')
row_label = row.pop(0)
inner_dict = {}
for i in range(len(column_headings)):
inner_dict[format_time(row[i])] = column_headings[i]
row_data[row_label] = inner_dict
distance_run = input('Enter the distance attempted: ')
recorded_time = input('Enter the recorded time: ')
predicted_distance = input('Enter the distance you want a prediction for: ')
closest_time = find_nearest_time(format_time(recorded_time), row_data[distance_run])
closest_column_heading = row_data[distance_run][closest_time]
prediction = [k for k in row_data[predicted_distance].keys()
if row_data[predicted_distance][k] == closest_column_heading]
print('The predicited time running ' + predicted_distance + ' is: ' + prediction[0] + '.')
|
AdamWill/anaconda | pyanaconda/ui/gui/spokes/lib/resize.py | Python | gpl-2.0 | 21,784 | 0.002158 | # Disk resizing dialog
#
# Copyright (C) 2012-2013 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A P | ARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public L | icense along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from collections import namedtuple
import gi
gi.require_version("Gdk", "3.0")
gi.require_version("Gtk", "3.0")
from gi.repository import Gdk, Gtk
from pyanaconda.i18n import _, C_, N_, P_
from pyanaconda.ui.gui import GUIObject
from pyanaconda.ui.gui.utils import blockedHandler, escape_markup, timed_action
from blivet.size import Size
from blivet.formats.fs import FS
__all__ = ["ResizeDialog"]
DEVICE_ID_COL = 0
DESCRIPTION_COL = 1
FILESYSTEM_COL = 2
RECLAIMABLE_COL = 3
ACTION_COL = 4
EDITABLE_COL = 5
TYPE_COL = 6
TOOLTIP_COL = 7
RESIZE_TARGET_COL = 8
NAME_COL = 9
TY_NORMAL = 0
TY_FREE_SPACE = 1
TY_PROTECTED = 2
PartStoreRow = namedtuple("PartStoreRow", ["id", "desc", "fs", "reclaimable",
"action", "editable", "ty",
"tooltip", "target", "name"])
PRESERVE = N_("Preserve")
SHRINK = N_("Shrink")
DELETE = N_("Delete")
NOTHING = ""
class ResizeDialog(GUIObject):
builderObjects = ["actionStore", "diskStore", "resizeDialog", "resizeAdjustment"]
mainWidgetName = "resizeDialog"
uiFile = "spokes/lib/resize.glade"
def __init__(self, data, storage, payload):
GUIObject.__init__(self, data)
self.storage = storage
self.payload = payload
self._initialFreeSpace = Size(0)
self._selectedReclaimableSpace = Size(0)
self._actionStore = self.builder.get_object("actionStore")
self._diskStore = self.builder.get_object("diskStore")
self._selection = self.builder.get_object("diskView-selection")
self._view = self.builder.get_object("diskView")
self._diskStore = self.builder.get_object("diskStore")
self._reclaimable_label = self.builder.get_object("reclaimableSpaceLabel")
self._selected_label = self.builder.get_object("selectedSpaceLabel")
self._required_label = self.builder.get_object("requiredSpaceLabel")
markup = _("Installation requires a total of <b>%s</b> for system data.")
required_dev_size = self.payload.requiredDeviceSize(FS.biggest_overhead_FS())
self._required_label.set_markup(markup % escape_markup(str(required_dev_size)))
self._reclaimDescLabel = self.builder.get_object("reclaimDescLabel")
self._resizeButton = self.builder.get_object("resizeButton")
self._preserveButton = self.builder.get_object("preserveButton")
self._shrinkButton = self.builder.get_object("shrinkButton")
self._deleteButton = self.builder.get_object("deleteButton")
self._resizeSlider = self.builder.get_object("resizeSlider")
def _description(self, part):
# First, try to find the partition in some known Root. If we find
# it, return the mountpoint as the description.
for root in self.storage.roots:
for (mount, device) in root.mounts.items():
if device == part:
return "%s (%s)" % (mount, root.name)
# Otherwise, fall back on increasingly vague information.
if not part.isleaf:
return part.children[0].name
if getattr(part.format, "label", None):
return part.format.label
elif getattr(part.format, "name", None):
return part.format.name
else:
return ""
def _get_tooltip(self, device):
if device.protected:
return _("This device contains the installation source.")
else:
return None
def populate(self, disks):
totalDisks = 0
totalReclaimableSpace = Size(0)
self._initialFreeSpace = Size(0)
self._selectedReclaimableSpace = Size(0)
canShrinkSomething = False
free_space = self.storage.get_free_space(disks=disks)
for disk in disks:
# First add the disk itself.
editable = not disk.protected
if disk.partitioned and disk.format.supported:
fstype = ""
diskReclaimableSpace = Size(0)
else:
fstype = disk.format.name
diskReclaimableSpace = disk.size
itr = self._diskStore.append(None, [disk.id,
"%s %s" % (disk.size.human_readable(max_places=1), disk.description),
fstype,
"<span foreground='grey' style='italic'>%s total</span>",
_(PRESERVE),
editable,
TY_NORMAL,
self._get_tooltip(disk),
int(disk.size),
disk.name])
if disk.partitioned and disk.format.supported:
# Then add all its partitions.
for dev in disk.children:
if dev.is_extended and disk.format.logical_partitions:
continue
# Devices that are not resizable are still deletable.
if dev.resizable:
freeSize = dev.size - dev.min_size
resizeString = _("%(freeSize)s of %(devSize)s") \
% {"freeSize": freeSize.human_readable(max_places=1), "devSize": dev.size.human_readable(max_places=1)}
if not dev.protected:
canShrinkSomething = True
else:
freeSize = dev.size
resizeString = "<span foreground='grey'>%s</span>" % \
escape_markup(_("Not resizeable"))
if dev.protected:
ty = TY_PROTECTED
else:
ty = TY_NORMAL
self._diskStore.append(itr, [dev.id,
self._description(dev),
dev.format.name,
resizeString,
_(PRESERVE),
not dev.protected,
ty,
self._get_tooltip(dev),
int(dev.size),
dev.name])
diskReclaimableSpace += freeSize
# And then add another uneditable line that lists how much space is
# already free in the disk.
diskFree = free_space[disk.name][0]
if diskFree >= Size("1MiB"):
freeSpaceString = "<span foreground='grey' style='italic'>%s</span>" % \
escape_markup(_("Free space"))
self._diskStore.append(itr, [disk.id,
freeSpaceString,
"",
|
macosforge/ccs-calendarserver | txdav/xml/rfc6578.py | Python | apache-2.0 | 4,056 | 0.001233 | ##
# Copyright (c) 2009-2017 Apple Inc. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER D | EALINGS IN THE
# SOFTWARE.
##
"""
RFC 6578 (Collection Synchronization f | or WebDAV) XML Elements
This module provides XML element definitions for use with WebDAV
Synchronization.
See RFC 6578: http://www.ietf.org/rfc/rfc6578.txt
"""
__all__ = []
from txdav.xml.base import WebDAVElement, WebDAVTextElement, dav_namespace
from txdav.xml.element import registerElement, registerElementClass
from txdav.xml.rfc2518 import MultiStatus
@registerElement
@registerElementClass
class SyncCollection (WebDAVElement):
"""
DAV report used to retrieve specific calendar component items via
their URIs.
"""
name = "sync-collection"
# To allow for an empty element in a supported-report-set property we need
# to relax the child restrictions
allowed_children = {
(dav_namespace, "sync-token"): (0, 1), # When used in the REPORT this is required
(dav_namespace, "sync-level"): (0, 1), # When used in the REPORT this is required
(dav_namespace, "prop"): (0, 1),
}
def __init__(self, *children, **attributes):
super(SyncCollection, self).__init__(*children, **attributes)
self.property = None
self.sync_token = None
self.sync_level = None
self.sync_limit = None
for child in self.children:
qname = child.qname()
if qname == (dav_namespace, "sync-token"):
self.sync_token = str(child)
elif qname == (dav_namespace, "sync-level"):
self.sync_level = str(child)
elif qname == (dav_namespace, "limit"):
if len(child.children) == 1 and child.children[0].qname() == (dav_namespace, "nresults"):
try:
self.sync_limit = int(str(child.children[0]))
except TypeError:
pass
elif qname == (dav_namespace, "prop"):
if self.property is not None:
raise ValueError("Only one of DAV:prop allowed")
self.property = child
@registerElement
@registerElementClass
class SyncToken (WebDAVTextElement):
"""
Synchronization token used in report and as a property.
"""
name = "sync-token"
hidden = True
protected = True
@registerElement
@registerElementClass
class SyncLevel (WebDAVTextElement):
"""
Synchronization level used in report.
"""
name = "sync-level"
@registerElement
@registerElementClass
class Limit (WebDAVElement):
"""
Synchronization limit in report.
"""
name = "limit"
allowed_children = {
(dav_namespace, "nresults"): (1, 1), # When used in the REPORT this is required
}
@registerElement
@registerElementClass
class NResults (WebDAVTextElement):
"""
Synchronization numerical limit.
"""
name = "nresults"
# Extend MultiStatus, to add sync-token
MultiStatus.allowed_children[(dav_namespace, "sync-token")] = (0, 1)
|
qvicksilver/ansible | lib/ansible/utils/plugins.py | Python | gpl-3.0 | 8,792 | 0.003071 | # (c) 2012, Daniel Hokka Zakrisson <daniel@hozac.com>
#
# This file i | s part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# | You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
import os.path
import sys
import glob
import imp
from ansible import constants as C
from ansible import errors
MODULE_CACHE = {}
PATH_CACHE = {}
PLUGIN_PATH_CACHE = {}
_basedirs = []
def push_basedir(basedir):
# avoid pushing the same absolute dir more than once
basedir = os.path.realpath(basedir)
if basedir not in _basedirs:
_basedirs.insert(0, basedir)
class PluginLoader(object):
'''
PluginLoader loads plugins from the configured plugin directories.
It searches for plugins by iterating through the combined list of
play basedirs, configured paths, and the python path.
The first match is used.
'''
def __init__(self, class_name, package, config, subdir, aliases={}):
self.class_name = class_name
self.package = package
self.config = config
self.subdir = subdir
self.aliases = aliases
if not class_name in MODULE_CACHE:
MODULE_CACHE[class_name] = {}
if not class_name in PATH_CACHE:
PATH_CACHE[class_name] = None
if not class_name in PLUGIN_PATH_CACHE:
PLUGIN_PATH_CACHE[class_name] = {}
self._module_cache = MODULE_CACHE[class_name]
self._paths = PATH_CACHE[class_name]
self._plugin_path_cache = PLUGIN_PATH_CACHE[class_name]
self._extra_dirs = []
def print_paths(self):
''' Returns a string suitable for printing of the search path '''
# Uses a list to get the order right
ret = []
for i in self._get_paths():
if i not in ret:
ret.append(i)
return os.pathsep.join(ret)
def _all_directories(self, dir):
results = []
results.append(dir)
for root, subdirs, files in os.walk(dir):
if '__init__.py' in files:
for x in subdirs:
results.append(os.path.join(root,x))
return results
def _get_package_paths(self):
''' Gets the path of a Python package '''
paths = []
if not self.package:
return []
if not hasattr(self, 'package_path'):
m = __import__(self.package)
parts = self.package.split('.')[1:]
self.package_path = os.path.join(os.path.dirname(m.__file__), *parts)
paths.extend(self._all_directories(self.package_path))
return paths
else:
return [ self.package_path ]
def _get_paths(self):
''' Return a list of paths to search for plugins in '''
if self._paths is not None:
return self._paths
ret = []
ret += self._extra_dirs
for basedir in _basedirs:
fullpath = os.path.realpath(os.path.join(basedir, self.subdir))
if os.path.isdir(fullpath):
files = glob.glob("%s/*" % fullpath)
# allow directories to be two levels deep
files2 = glob.glob("%s/*/*" % fullpath)
if files2 is not None:
files.extend(files2)
for file in files:
if os.path.isdir(file) and file not in ret:
ret.append(file)
if fullpath not in ret:
ret.append(fullpath)
# look in any configured plugin paths, allow one level deep for subcategories
if self.config is not None:
configured_paths = self.config.split(os.pathsep)
for path in configured_paths:
path = os.path.realpath(os.path.expanduser(path))
contents = glob.glob("%s/*" % path)
for c in contents:
if os.path.isdir(c) and c not in ret:
ret.append(c)
if path not in ret:
ret.append(path)
# look for any plugins installed in the package subtree
ret.extend(self._get_package_paths())
package_dirs = self._get_package_paths()
self._paths = ret
return ret
def add_directory(self, directory, with_subdir=False):
''' Adds an additional directory to the search path '''
directory = os.path.realpath(directory)
if directory is not None:
if with_subdir:
directory = os.path.join(directory, self.subdir)
if directory not in self._extra_dirs:
self._extra_dirs.append(directory)
def find_plugin(self, name, suffixes=None, transport=''):
''' Find a plugin named name '''
if not suffixes:
if self.class_name:
suffixes = ['.py']
else:
if transport == 'winrm':
suffixes = ['.ps1', '']
else:
suffixes = ['.py', '']
for suffix in suffixes:
full_name = '%s%s' % (name, suffix)
if full_name in self._plugin_path_cache:
return self._plugin_path_cache[full_name]
for i in self._get_paths():
path = os.path.join(i, full_name)
if os.path.isfile(path):
self._plugin_path_cache[full_name] = path
return path
return None
def has_plugin(self, name):
''' Checks if a plugin named name exists '''
return self.find_plugin(name) is not None
__contains__ = has_plugin
def get(self, name, *args, **kwargs):
''' instantiates a plugin of the given name using arguments '''
if name in self.aliases:
name = self.aliases[name]
path = self.find_plugin(name)
if path is None:
return None
if path not in self._module_cache:
self._module_cache[path] = imp.load_source('.'.join([self.package, name]), path)
return getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
def all(self, *args, **kwargs):
''' instantiates all plugins with the same arguments '''
for i in self._get_paths():
matches = glob.glob(os.path.join(i, "*.py"))
matches.sort()
for path in matches:
name, ext = os.path.splitext(os.path.basename(path))
if name.startswith("_"):
continue
if path not in self._module_cache:
self._module_cache[path] = imp.load_source('.'.join([self.package, name]), path)
yield getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
action_loader = PluginLoader(
'ActionModule',
'ansible.runner.action_plugins',
C.DEFAULT_ACTION_PLUGIN_PATH,
'action_plugins'
)
cache_loader = PluginLoader(
'CacheModule',
'ansible.cache',
C.DEFAULT_CACHE_PLUGIN_PATH,
'cache_plugins'
)
callback_loader = PluginLoader(
'CallbackModule',
'ansible.callback_plugins',
C.DEFAULT_CALLBACK_PLUGIN_PATH,
'callback_plugins'
)
connection_loader = PluginLoader(
'Connection',
'ansible.runner.connection_plugins',
C.DEFAULT_CONNECTION_PLUGIN_PATH,
'connection_plugins',
aliases={'paramiko': 'paramiko_ssh'}
)
shell_loader = PluginLoader(
'ShellModule',
'ansible.runner.shell_plugins',
'shell_plugins',
'shell_plugins',
)
module_finder = PluginLoader(
'',
'ansible.modules',
C.DEFAULT_MODULE_PATH,
'library' |
2014c2g5/2014cadp | wsgi/local_data/brython_programs/list1.py | Python | gpl-3.0 | 609 | 0.007678 | 資料 = [1, 2, 3, 4, 5]
'''
program: list1.py
'''
print(資料[:3])
print(資料[2:])
print(資料[1:2])
a = [3, 5, 7, 11, 13]
for x in a:
if x == 7:
print('list contains 7')
break
print(list | (range(10)))
for 索引 in range(-5, 6, 2):
print(索引)
squares = [ x*x for x | in range(0, 11) ]
print(squares)
a = [10, 'sage', 3.14159]
b = a[:]
#list.pop([i]) 取出 list 中索引值為 i 的元素,預設是最後一個
print(b.pop())
print(a)
數列 = [0]*10
print(數列)
'''
delete 用法
'''
a = [1, 2, 3, 4]
print("刪除之前:", a)
del a[:2]
print("刪除之後:", a) |
reinbach/django-machina | machina/apps/forum_member/migrations/0001_initial.py | Python | bsd-3-clause | 1,410 | 0.004255 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
import machina.models.fields
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ForumProfile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('avatar', machina.models.fields.ExtendedImageField(upload_to='machina/avatar_images', null=True, verbose_name='Avatar', blank=True)),
('signature', machina.models.fields.MarkupTextField(max_length=255, no_rendered_field=True, null=True, v | erbose_name='Signature', blank=True)),
('posts_count', models.PositiveIntegerField(default=0, verbose_name='Total posts', blank=True)),
('_signature_rendered', models.TextField(null=True, editable=False, blank=True)),
('user', models.OneToOneField(related_name='forum_profile', verbose_name='User', to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
],
| options={
'abstract': False,
'verbose_name': 'Forum profile',
'verbose_name_plural': 'Forum profiles',
},
),
]
|
ml-slac/deep-jets | train.py | Python | mit | 2,806 | 0.010335 | import numpy as np
from keras.layers import containers
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, MaxoutDense, Activation
from keras.optimizers import SGD, RMSprop, Adagrad, Adam
from keras.regularizers import l2
from keras.callbacks import EarlyStopping
# import matplotlib.pyplot as plt
# import matplotlib.animation as animation
# from matplotlib.colors import LinearSegmentedColormap
# from matplotlib.colors import LogNorm
# class DrawWeights(keras.callbacks.Callback):
# def __init__(self, figsize, layer_id=0, param_id=0, weight_slice=(slice(None), 0)):
# self.layer_id = layer_id
# self.param_id = param_id
# self.weight_slice = weight_slice
# # Initialize the figure and axis
# self.fig = plt.figure(figsize=figsize)
# self.ax = self.fig.add_subplot(1, 1, 1)
# def on_train_begin(self):
# self.imgs = []
# def on_batch_end(self, batch, indices, loss, accuracy):
# # Get a snapshot of the weight matrix every 5 batches
# if batch % 5 == 0:
# # Access the full weight matrix
# weights = s | elf.model.layers[self.layer_id].params[self.param | _id].get_value()
# # Create the frame and add it to the animation
# img = self.ax.imshow(weights[self.weight_slice], interpolation='nearest')
# self.imgs.append(img)
# def on_train_end(self):
# # Once the training has ended, display the animation
# anim = animation.ArtistAnimation(self.fig, self.imgs, interval=10, blit=False)
# plt.show()
train = np.load('./wprime800_QCD200-600_train.npz')
test = np.load('./wprime800_QCD200-600_test.npz')
weights = np.load('./wprime800_QCD200-600_train_weights.npz')['weights']
# -- build the model
dl = Sequential()
dl.add(Dense(625, 500, W_regularizer=l2(0.0001)))
dl.add(Activation('relu'))
dl.add(Dropout(0.1))
dl.add(Dense(500, 256, W_regularizer=l2(0.0001)))
dl.add(Activation('relu'))
dl.add(Dropout(0.1))
dl.add(Dense(256, 128, W_regularizer=l2(0.0001)))
dl.add(Activation('relu'))
dl.add(Dropout(0.1))
dl.add(Dense(128, 64, W_regularizer=l2(0.0001)))
dl.add(Activation('tanh'))
dl.add(Dropout(0.1))
dl.add(Dense(64, 25))
dl.add(Activation('tanh'))
dl.add(Dropout(0.1))
dl.add(Dense(25, 1))
dl.add(Activation('sigmoid'))
dl.compile(loss='binary_crossentropy', optimizer=Adam(), class_mode='binary')
# -- train!
dl.fit(train['X'], train['y'], validation_data=(test['X'], test['y']),
batch_size=256,
nb_epoch=100,
callbacks=[
EarlyStopping(verbose=True, patience=2)
],
sample_weight=weights,
show_accuracy=True,
verbose=2)
with open('deepjets.yaml') as f:
f.write(dl.to_yaml())
dl.save_weights('deepjets.h5')
|
mescobal/geined | geined.py | Python | gpl-3.0 | 11,692 | 0.007216 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Menu principal del sistema GEINED"""
import cgitb ; cgitb.enable()
import cgi
import htm
import subprocess
import pagina
def principal():
"""Menu principal"""
pag = pagina.Pagina("Menu principal", 10)
print(htm.h1("Menú principal"))
print(htm.table(
htm.tr(
htm.celda_menu("Recepción", "geined.py?accion=recepcion",
"recepcion.png") +
htm.celda_menu("Académico", "geined.py?accion=academico",
"academico.png") +
htm.celda_menu("Administración", "geined.py?accion=administracion",
"administracion.png")) +
htm.tr(
htm.celda_menu("Dirección", "geined.py?accion=direccion",
"direccion.png") +
htm.celda_menu("Usuario", "datos_usuario.py", "usuario.png") +
htm.celda_menu("Sistema", "geined.py?accion=sistema",
"sistema.png")) +
htm.tr(
htm.celda_menu("Desarrollo", "des.py?accion=listado", "bug.png") +
htm.celda_menu("Ayuda", "ayuda.html", "ayuda.png") +
htm.celda_menu("Salir", "logout.php", "salir.png")),
clase="tabla_menu"))
version = "Desconocida"
try:
p = subprocess.Popen(["bzr", "revno"], shell=False,
stdout=subprocess.PIPE)
v = p.communicate()
version = str(v[0])
except OSError, e:
version = "Desconocida. Error: " + str(e)
print('<div class="pie_pagina">')
print("Revisión Nº " + \
version + \
" - " + \
"Software Libre, hecho con Software Libre " + \
"<img src='./img/pingu.png' border='0' width='48' height='48'>")
print("</div>")
pag.fin()
def recepcion():
"""Menu recepcion"""
pag = pagina.Pagina("Recepcion", 10)
print(htm.encabezado("Recepción", "Principal",
"geined.py?accion=principal"))
print('<table class="tabla_menu">')
print(htm.tr(
htm.celda_menu("Clientes", "cli.py", "clientes.png") +
htm.celda_menu("Inscripción", "alu_inscripcion1.py",
"inscripcion.png") +
htm.celda_menu("Caja", "caja.php", "caja.png")) +
htm.tr(
htm.celda_menu("Comprobantes", "geined.py?accion=comprobantes",
"comprobantes.png") +
htm.celda_menu("Mensaje al pie", "menpie.php", "pie.png") +
htm.celda_menu("Deudores", "deudores.php", "clientes.png")) +
htm.tr(
htm.celda_menu("Llamadas", "lla.php", "llamadas.png") +
htm.td("") +
htm.td("")))
print("</table>")
pag.fin()
def academico():
"""Menu academico"""
pag = pagina.Pagina("Coordinación académica", 10)
print(htm.encabezado("Académico", "Principal",
"geined.py?accion=principal"))
print("<table class='tabla_menu'")
print(htm.tr(
htm.celda_menu("Docentes", "doc.py?accion=listado",
"docentes.png") +
htm.celda_menu("Cursos", "cur.php?accion=listado", "cursos.png") +
htm.celda_menu("Alumnos", "alu.py?accion=listado", "alumnos.png")) +
htm.tr(
htm.celda_menu("Drop-outs", "dro.py", "dropout.png") +
htm.celda_menu("Interesados", "int.py", "emblem-people.png") +
htm.td("")))
print('</table>')
pag.fin()
def administracion():
"""Menu administracion"""
pag = pagina.Pagina("Tareas administrativas", 10)
print(htm.encabezado("Administración", "Principal",
"geined.py?accion=principal"))
print("<table class='tabla_menu'")
print(htm.tr(
htm.celda_menu("Sueldos", "geined.py?accion=sueldos",
"calendario.png") +
htm.celda_menu("Contabilidad", "geined.py?accion=contabilidad",
"contabilidad.png") +
htm.celda_menu("Stock", "geined.py?accion=stock", "stock.png")) +
htm.tr(
htm.celda_menu("Fee", "fee.py", "fee.png") +
htm.celda_menu("Inventario", "inventario.py", "inventario.png") +
htm.td("")))
print('</table>')
pag.fin()
def sueldos():
"""Menu sueldos"""
pag = pagina.Pagina("Sistema de sueldos", 10)
print(htm.encabezado("Sueldos", "Administración",
"geined.py?accion=administracion"))
print(htm.ul(
htm.li(htm.a("var.py?accion=listado",
"Variables del sistema de sueldos")) +
htm.li(htm.a("sal.py?accion=listado", "Valores salariales")) +
htm.li(htm.a("liq.py?accion=listado", "Liquidación de sueldos")) +
htm.li(htm.a("calc_ant_ret.py?accion=seleccionar",
"Cálculo retroactivo de antigüedad"))))
pag.fin()
def contabilidad():
"""Menu contabilidad"""
pag = pagina.Pagina("Sistema de contabilidad", 10)
print(htm.encabezado("Contabilidad", "Administración",
"geined.py?accion=administracion"))
print(htm.ul(
htm.li(htm.a("cuentas.py?accion=listado", "Plan de cuentas")) +
htm.li(htm.a("transacciones.py?accion=listado&inicio=0",
"Transacciones")) +
htm.li(htm.a("por_rubro.py?accion=listado", "Listado por rubro")) +
htm.li(htm.a("mantenim.py?accion=mantenimiento", "Mantenimiento")) +
htm.li(htm.a("conciliacion.py?accion=listado", "Conciliaciones"))))
pag.fin()
def stock():
"""Menu Stock"""
pag = pagina.Pagina("Sistema de stock", 10)
print(htm.encabezado("Stock", "Administración",
"geined.py?accion=administracion"))
print(htm.ul(htm.li(htm.a("biecam.py?accion=listado",
"Bienes de cambio"))))
pag.fin()
def financiero():
"""Menu financiero"""
pag = pagina.Pagina("Administración Financiera", 10)
print(htm.encabezado("Administración financiera", "Administración",
"geined.py?accion=direccion"))
print(htm.ul(
htm.li(htm.a("consolidado.py?accion=listado", "Consolidado anual")) +
htm.li(htm.a("balcom.py?accion=listado", "Balance comparativo")) +
htm.li(htm.a("inf_fin.py?accion=menu", "Informe financiero")) +
htm.li(htm.a("cur_rent.py", "Rentabilidad de cursos")) +
htm.li(htm.a("bc_suc.php", "Balance comparativo por sucursal")) +
htm.li(htm.a("111018_bancos.py", "Cobranzas a depositar")) +
htm.li(htm.a("evolucion.py", "Evolución financiera"))))
pag.fin()
def direccion():
"""Menu direccion"""
pag = pagina.Pagina("Dirección", 10)
print(htm.encabezado("Dirección", "Principal",
"geined.py?accion=principal"))
print(htm.ul(
htm.li(htm.a("empleados.py?accion=listado", "Empleados")) +
htm.li(htm.a("tcu.py?accion=listado", "Tipos de cursos")) +
htm.li(htm.a("bancos.php?accion=listado", "Bancos")) +
htm.li(htm.a("geined.py?accion=financiero",
"Administración financiera")) +
htm.li(htm.a("dir_111018.php", "Depósitos MN"))))
pag.fin()
def sistema():
"""Menu del sistema"""
pag = pagina.Pagina("Administración del sistema", 10)
print(htm.encabezado("Sistema", "Principal", "geined.py?accion=principal"))
print(htm.ul(
htm.li(htm.a("usu.py?accion=listado", | "Usuarios")) +
htm.li(htm.a("ccl.py?accion=listado", "Categorías de clientes")) +
htm.li(htm.a("cem.php?accion=listado", "Categorías de empleados")) +
| htm.li(htm.a("dep.php?accion=listado", "Depósitos")) +
htm.li(htm.a("pro.php?accion=listado", "Proveedores")) +
htm.li(htm.a("upload.php", "Subir archivos")) +
htm.li(htm.a("download.php", "Bajar archivos")) +
htm.li(htm.a("prod.php?accion=listado", "Productos")) +
htm.li(htm.a("codss.php?accion=listado",
"Códigos de Seguridad Social"))))
pag.fin()
def comprobantes():
"""Menu de comprobantes"""
pag = pagi |
live-clones/dolfin-adjoint | tests_dolfin/hessian_identity_list/hessian_identity_list.py | Python | lgpl-3.0 | 1,279 | 0.003127 | from dolfin import *
from dolfin_adjoint import *
parameters["adjoint"]["cache_factorizations"] = True
mesh = UnitSquareMesh(3, 3)
V = FunctionSpace(mesh, "R", 0)
test = TestFunction(V)
trial = TrialFunction(V)
def main(m):
u = interpolate(Constant(0.1), V, name="Solution")
F = inner(u*u, test)*dx - inner(m, test)*dx
solve(F == 0, u)
F = inner(sin(u)*u*u*trial, test)*dx - inner(u**4, test)*dx
solve(lhs(F) == rhs(F), u)
return u
if __name__ == "__main__":
m = interpolate(Constant(2.13), V, name="Parameter1")
u = main(m)
parameters["adjoint"]["stop_annotating"] = True
J = Functional((inner(u, u))**3*dx + inner(m, m)*dx, name="NormSquared")
Jm = assemble(inner(u, u)**3*dx + inner(m, m)*dx)
controls = [Control(m)]
dJdm = compute_gradient(J, controls, forget=None)
HJm = hessian(J, controls, warn=False)
| def Jhat(m):
m = m[0] # the control is a list of length | one, so Jhat will have to
# except a list as well
u = main(m)
return assemble(inner(u, u)**3*dx + inner(m, m)*dx)
direction = [interpolate(Constant(0.1), V)]
minconv = taylor_test(Jhat, controls, Jm, dJdm, HJm=HJm,
perturbation_direction=direction)
assert minconv > 2.9
|
Rafiot/logbook | scripts/make-release.py | Python | bsd-3-clause | 4,069 | 0.000983 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
make-release
~~~~~~~~~~~~
Helper script that performs a release. Does pretty much everything
automatically for us.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import sys
import os
import re
from datetime import datetime, date
from subprocess import Popen, PIPE
_date_clean_re = re.compile(r'(\d+)(st|nd|rd|th)')
def parse_changelog():
with open('CHANGES') as f:
lineiter = iter(f)
for line in lineiter:
match = re.search('^Version\s+(.*)', line.strip())
if match is None:
continue
length = len(match.group(1))
version = match.group(1).strip()
if lineiter.next().count('-') != len(match.group(0)):
continue
while 1:
change_info = lineiter.next().strip()
if change_info:
break
match = re.search(r'released on (\w+\s+\d+\w+\s+\d+)'
r'(?:, codename (.*))?(?i)', change_info)
if match is None:
continue
datestr, codename = match.groups()
return version, parse_date(datestr), codename
def bump_version(version):
try:
parts = map(int, version.split('.'))
except ValueError:
fail('Current version is not numeric')
parts[-1] += 1
return '.'.join(map(str, parts))
def parse_date(string):
string = _date_clean_re.sub(r'\1', string)
return datetime.strptime(string, '%B %d %Y')
def set_filename_version(filename, version_number, pattern):
changed = []
def inject_version(match):
before, old, after = match.groups()
changed.append(True)
return before + version_number + after
with open(filename) as f:
contents = re.sub(r"^(\s*%s\s*=\s*')(.+?)(')(?sm)" % pattern,
inject_version, f.read())
if not changed:
fail('Could not find %s in %s', pattern, filename)
with open(filename, 'w') as f:
f.write(contents)
def set_init_version(version):
info('Setting __init__.py version to %s', version)
set_filename_version('logbook/__init__.py', version, '__version__')
def set_setup_version(version):
info('Setting setup.py version to %s', version)
set_filename_version('setup.py', version, 'version')
def build_and_upload():
Popen([sys.executable, 'setup.py', 'release', 'sdist', 'upload']).wait()
def fail(message, *args):
print >> sys.stderr, 'Error:', message % args
sys.exit(1)
def info(message, *args):
print >> sys.stderr, message % args
def get_git_tags():
return set(Popen(['git', 'tag'], stdout=PIPE).communicate()[0].splitlines())
def git_is_clean():
return Popen(['git', 'diff', '--quiet']).wait() == 0
def make_git_commit(message, *args):
message = message % args
Popen(['git', 'commit', '-am', message]).wait()
def make_git_tag(tag):
info('Tagging "%s"', tag)
Popen(['git', 'tag', tag]).wait()
def main():
| os.chdir(os.path.join(os.path.dirname(__file__), '..'))
rv = parse_changelog()
if rv is None:
fail('Could not parse changelog')
version, release_date, codename = rv
dev_version = bump_version(version) + '-dev'
info('Releasing %s (codename %s, release date %s)',
version, codename, release_date.strftime('%d/%m/%Y'))
tags = get_git_tags()
if version in tags:
fail('Version "%s" is already tagged', version)
if release | _date.date() != date.today():
fail('Release date is not today (%s != %s)' % (release_date.date(), date.today()))
if not git_is_clean():
fail('You have uncommitted changes in git')
set_init_version(version)
set_setup_version(version)
make_git_commit('Bump version number to %s', version)
make_git_tag(version)
build_and_upload()
set_init_version(dev_version)
set_setup_version(dev_version)
if __name__ == '__main__':
main()
|
dubourg/openturns | python/test/t_KernelMixture_std.py | Python | gpl-3.0 | 4,057 | 0.000986 | #! /usr/bin/env python
from __future__ import print_function
from openturns import *
TESTPREAMBLE()
RandomGenerator.SetSeed(0)
try:
# Instanciate one distribution object
dimension = 3
meanPoint = NumericalPoint(dimension, 1.0)
meanPoint[0] = 0.5
meanPoint[1] = -0.5
sigma = NumericalPoint(dimension, 1.0)
sigma[0] = 2.0
sigma[1] = 3.0
sample = NumericalSample(0, dimension)
# Create a collection of distribution
aCollection = DistributionCollection()
aCollection.add(Normal(meanPoint, sigma, IdentityMatrix(dimension)))
sample.add(meanPoint)
meanPoint += NumericalPoint(meanPoint.getDimension(), 1.0)
aCollection.add(Normal(meanPoint, sigma, IdentityMatrix(dimension)))
sample.add(meanPoint)
meanPoint += NumericalPoint(meanPoint.getDimension(), 1.0)
aCollection.add(Normal(meanPoint, sigma, IdentityMatrix(dimension)))
sample.add(meanPoint)
# Instanciate one distribution object
distribution = KernelMixture(Normal(), sigma, sample)
print("Distribution ", repr(distribution))
print("Distribution ", distribution)
distributionRef = Mixture(aCollection)
# Is this distribution elliptical ?
print("Elliptical = ", distribution.isElliptical())
# Is this distribution continuous ?
print("Continuous = ", distribution.isContinuous())
# Test for realization of distribution
oneRealization = distribution.getRealization()
print("oneRealization=", repr(oneRealization))
# Test for sampling
size = 100
oneSample = distribution.getSample(size)
print("oneSample first=", repr(
oneSample[0]), " last=", repr(oneSample[size | - 1]))
print("mean=", repr(oneSample.computeMean()))
print("covariance=", repr(oneSample.computeCovariance()))
# Define a point
point = NumericalPoint(distribution.getDimension(), 1.0)
print("Point= ", repr(point))
# Show PDF and CDF of point
eps = 1e-5
# derivative of PDF with regards its | arguments
DDF = distribution.computeDDF(point)
print("ddf =", repr(DDF))
print("ddf (ref)=", repr(distributionRef.computeDDF(point)))
# by the finite difference technique
ddfFD = NumericalPoint(dimension)
for i in range(dimension):
left = NumericalPoint(point)
left[i] += eps
right = NumericalPoint(point)
right[i] -= eps
ddfFD[i] = (distribution.computePDF(left) -
distribution.computePDF(right)) / (2.0 * eps)
print("ddf (FD)=", repr(ddfFD))
# PDF value
LPDF = distribution.computeLogPDF(point)
print("log pdf=%.6f" % LPDF)
PDF = distribution.computePDF(point)
print("pdf =%.6f" % PDF)
print("pdf (ref)=%.6f" % distributionRef.computePDF(point))
# by the finite difference technique from CDF
if (dimension == 1):
print("pdf (FD)=%.6f" % ((distribution.computeCDF(point + NumericalPoint(1, eps)) -
distribution.computeCDF(point + NumericalPoint(1, -eps))) / (2.0 * eps)))
# derivative of the PDF with regards the parameters of the distribution
CDF = distribution.computeCDF(point)
print("cdf=%.6f" % CDF)
CCDF = distribution.computeComplementaryCDF(point)
print("ccdf=%.6f" % CCDF)
print("cdf (ref)=%.6f" % distributionRef.computeCDF(point))
# quantile
quantile = distribution.computeQuantile(0.95)
print("quantile=", repr(quantile))
print("quantile (ref)=", repr(distributionRef.computeQuantile(0.95)))
print("cdf(quantile)=%.6f" % distribution.computeCDF(quantile))
mean = distribution.getMean()
print("mean=", repr(mean))
print("mean (ref)=", repr(distributionRef.getMean()))
covariance = distribution.getCovariance()
print("covariance=", repr(covariance))
print("covariance (ref)=", repr(distributionRef.getCovariance()))
#parameters = distribution.getParametersCollection()
# print "parameters=" , parameters
except:
import sys
print("t_KernelMixture_std.py", sys.exc_info()[0], sys.exc_info()[1])
|
ryanwitt/django-liberace | liberace/systems/debian.py | Python | bsd-2-clause | 554 | 0.00722 | from fabric.api import *
settings_fabric = settings
def identify(env):
if 'linux' in env.uname.lower():
with settings_fabric(warn_only=True):
env.lsb_release = env.lsb_release or run('lsb_release -d').lower()
if 'debian' in env.lsb_release:
return True
def settings(env):
env.webserver_config_dir = '/etc/apache2/sites-enabled/'
env.webserver_restart_cmd = '/etc/init.d/apache2 restart'
env.webserver_user = 'www-data'
def install_req | uirements(env):
raise NotImplementedError()
| |
prasadtalasila/IRCLogParser | lib/slack/nickTracker.py | Python | gpl-3.0 | 2,925 | 0.003419 | import re
import lib.slack.config as config
import lib.slack.util as util
def nick_tracker(log_dict):
"""
Tracks all nicks and the identifies nicks which point to same user
Args:
log_dict(dictionary): with key as dateTime.date object and value as {"data":datalist,"channel_name":channels name}
Returns:
nicks(list): all nicks
nick_same_list(list): list of lists with each list corresponding to nicks of same user
"""
nicks = [] # list of all the nicknames
nick_same_list = [[] for i in xrange(config.MAX_EXPECTED_DIFF_NICKS)]
# Getting all the nicknames in a list
def nick_append(nick, nicks):
if nick not in nicks:
nicks.append(nick)
return nicks
for day_content_all_channels in log_dict.values():
# traverse over data of different channels for that day
for day_content in day_content_all_channels:
day_ | logs = day_content["log_data"]
for day_log in day_logs:
# use regex to get the string between <> and appended it to the nicks list
if(util.check_if_msg_line (day_log)):
| m = re.search(r"\<(.*?)\>", day_log)
nick = util.correctLastCharCR(m.group(0)[1:-1])
nicks = nick_append(nick, nicks)
''' Forming list of lists for avoiding nickname duplicacy '''
for line in day_logs:
if("Nick change:" in line):
old_nick = line.split()[3]
new_nick = line.split()[5]
nicks = nick_append(old_nick, nicks)
nicks = nick_append(new_nick, nicks)
for i in xrange(config.MAX_EXPECTED_DIFF_NICKS):
if old_nick in nick_same_list[i] or new_nick in nick_same_list[i]:
if old_nick not in nick_same_list[i]:
nick_same_list[i].append(old_nick)
if new_nick not in nick_same_list[i]:
nick_same_list[i].append(new_nick)
break
if not nick_same_list[i]:
nick_same_list[i].append(old_nick)
nick_same_list[i].append(new_nick)
break
for nick in nicks:
for index in xrange(config.MAX_EXPECTED_DIFF_NICKS):
if nick in nick_same_list[index]:
break
if not nick_same_list[index]:
nick_same_list[index].append(nick)
break
if config.DEBUGGER:
print "========> 30 on {} nicks".format(len(nicks))
print nicks[:30]
print "========> 30 on {} nick_same_list".format(len(nick_same_list))
print nick_same_list[:30]
return [nicks, nick_same_list] |
ArcherSys/ArcherSys | Lib/test/test_bool.py | Python | mit | 36,233 | 0.004554 | <<<<<<< HEAD
<<<<<<< HEAD
# Test properties of bool promised by PEP 285
import unittest
from test import support
import os
class BoolTest(unittest.TestCase):
def test_subclass(self):
try:
class C(bool):
pass
except TypeError:
pass
else:
self.fail("bool should not be subclassable")
self.assertRaises(TypeError, int.__new__, bool, 0)
def test_print(self):
try:
fo = open(support.TESTFN, "w")
print(False, True, file=fo)
fo.close()
fo = open(support.TESTFN, "r")
self.assertEqual(fo.read(), 'False True\n')
finally:
fo.close()
os.remove(support.TESTFN)
def test_repr(self):
self.assertEqual(repr(False), 'False')
self.assertEqual(repr(True), 'True')
self.assertEqual(eval(repr(False)), False)
self.assertEqual(eval(repr(True)), True)
def test_str(self):
self.assertEqual(str(False), 'False')
self.assertEqual(str(True), 'True')
def test_int(self):
self.assertEqual(int(False), 0)
self.assertIsNot(int(False), False)
self.assertEqual(int(True), 1)
self.assertIsNot(int(True), True)
def test_float(self):
self.assertEqual(float(False), 0.0)
self.assertIsNot(float(False), False)
self.assertEqual(float(True), 1.0)
self.assertIsNot(float(True), True)
def test_math(self):
self.assertEqual(+False, 0)
self.assertIsNot(+False, False)
self.assertEqual(-False, 0)
self.assertIsNot(-False, False)
self.assertEqual(abs(False), 0)
self.assertIsNot(abs(False), False)
self.assertEqual(+True, 1)
self.assertIsNot(+True, True)
self.assertEqual(-True, -1)
self.assertEqual(abs(True), 1)
self.assertIsNot(abs(True), True)
self.assertEqual(~False, -1)
self.assertEqual(~True, -2)
self.assertEqual(False+2, 2)
self.assertEqual(True+2, 3)
self.assertEq | ual(2+False, 2)
self.assertEqual(2+True, 3)
self.assertEqual(False+False, 0)
self.assertIsNot(False+False, False)
self.assertEqual(False+True, 1)
self.assertIsNot(False+True, True)
self.assertEqual(True+False, 1)
self.assertIsNot(True+False, True)
self.assertEqual(True+True, 2)
self.assertEqual(True-True, 0)
self.assertIsNot(True-True, False)
self.asse | rtEqual(False-False, 0)
self.assertIsNot(False-False, False)
self.assertEqual(True-False, 1)
self.assertIsNot(True-False, True)
self.assertEqual(False-True, -1)
self.assertEqual(True*1, 1)
self.assertEqual(False*1, 0)
self.assertIsNot(False*1, False)
self.assertEqual(True/1, 1)
self.assertIsNot(True/1, True)
self.assertEqual(False/1, 0)
self.assertIsNot(False/1, False)
for b in False, True:
for i in 0, 1, 2:
self.assertEqual(b**i, int(b)**i)
self.assertIsNot(b**i, bool(int(b)**i))
for a in False, True:
for b in False, True:
self.assertIs(a&b, bool(int(a)&int(b)))
self.assertIs(a|b, bool(int(a)|int(b)))
self.assertIs(a^b, bool(int(a)^int(b)))
self.assertEqual(a&int(b), int(a)&int(b))
self.assertIsNot(a&int(b), bool(int(a)&int(b)))
self.assertEqual(a|int(b), int(a)|int(b))
self.assertIsNot(a|int(b), bool(int(a)|int(b)))
self.assertEqual(a^int(b), int(a)^int(b))
self.assertIsNot(a^int(b), bool(int(a)^int(b)))
self.assertEqual(int(a)&b, int(a)&int(b))
self.assertIsNot(int(a)&b, bool(int(a)&int(b)))
self.assertEqual(int(a)|b, int(a)|int(b))
self.assertIsNot(int(a)|b, bool(int(a)|int(b)))
self.assertEqual(int(a)^b, int(a)^int(b))
self.assertIsNot(int(a)^b, bool(int(a)^int(b)))
self.assertIs(1==1, True)
self.assertIs(1==0, False)
self.assertIs(0<1, True)
self.assertIs(1<0, False)
self.assertIs(0<=0, True)
self.assertIs(1<=0, False)
self.assertIs(1>0, True)
self.assertIs(1>1, False)
self.assertIs(1>=1, True)
self.assertIs(0>=1, False)
self.assertIs(0!=1, True)
self.assertIs(0!=0, False)
x = [1]
self.assertIs(x is x, True)
self.assertIs(x is not x, False)
self.assertIs(1 in x, True)
self.assertIs(0 in x, False)
self.assertIs(1 not in x, False)
self.assertIs(0 not in x, True)
x = {1: 2}
self.assertIs(x is x, True)
self.assertIs(x is not x, False)
self.assertIs(1 in x, True)
self.assertIs(0 in x, False)
self.assertIs(1 not in x, False)
self.assertIs(0 not in x, True)
self.assertIs(not True, False)
self.assertIs(not False, True)
def test_convert(self):
self.assertRaises(TypeError, bool, 42, 42)
self.assertIs(bool(10), True)
self.assertIs(bool(1), True)
self.assertIs(bool(-1), True)
self.assertIs(bool(0), False)
self.assertIs(bool("hello"), True)
self.assertIs(bool(""), False)
self.assertIs(bool(), False)
def test_format(self):
self.assertEqual("%d" % False, "0")
self.assertEqual("%d" % True, "1")
self.assertEqual("%x" % False, "0")
self.assertEqual("%x" % True, "1")
def test_hasattr(self):
self.assertIs(hasattr([], "append"), True)
self.assertIs(hasattr([], "wobble"), False)
def test_callable(self):
self.assertIs(callable(len), True)
self.assertIs(callable(1), False)
def test_isinstance(self):
self.assertIs(isinstance(True, bool), True)
self.assertIs(isinstance(False, bool), True)
self.assertIs(isinstance(True, int), True)
self.assertIs(isinstance(False, int), True)
self.assertIs(isinstance(1, bool), False)
self.assertIs(isinstance(0, bool), False)
def test_issubclass(self):
self.assertIs(issubclass(bool, int), True)
self.assertIs(issubclass(int, bool), False)
def test_contains(self):
self.assertIs(1 in {}, False)
self.assertIs(1 in {1:1}, True)
def test_string(self):
self.assertIs("xyz".endswith("z"), True)
self.assertIs("xyz".endswith("x"), False)
self.assertIs("xyz0123".isalnum(), True)
self.assertIs("@#$%".isalnum(), False)
self.assertIs("xyz".isalpha(), True)
self.assertIs("@#$%".isalpha(), False)
self.assertIs("0123".isdigit(), True)
self.assertIs("xyz".isdigit(), False)
self.assertIs("xyz".islower(), True)
self.assertIs("XYZ".islower(), False)
self.assertIs("0123".isdecimal(), True)
self.assertIs("xyz".isdecimal(), False)
self.assertIs("0123".isnumeric(), True)
self.assertIs("xyz".isnumeric(), False)
self.assertIs(" ".isspace(), True)
self.assertIs("\xa0".isspace(), True)
self.assertIs("\u3000".isspace(), True)
self.assertIs("XYZ".isspace(), False)
self.assertIs("X".istitle(), True)
self.assertIs("x".istitle(), False)
self.assertIs("XYZ".isupper(), True)
self.assertIs("xyz".isupper(), False)
self.assertIs("xyz".startswith("x"), True)
self.assertIs("xyz".startswith("z"), False)
def test_boolean(self):
self.assertEqual(True & 1, 1)
self.assertNotIsInstance(True & 1, bool)
self.assertIs(True & True, True)
self.assertEqual(True | 1, 1)
self.assertNotIsInstance(True | 1, bool)
self.assertIs(True | True, True)
self.assertEqual(True ^ 1, 0)
self.assertNotIsInstance(True ^ 1, bool)
self.assertIs(True ^ True, False)
def test_fileclosed(self):
try:
f = open(support.TESTFN, "w")
self |
tudennis/LeetCode---kamyu104-11-24-2015 | Python/beautiful-arrangement-ii.py | Python | mit | 1,326 | 0 | # Time: O(n)
# Space: O(1)
# Given two integers n and k,
# you need to construct a lis | t which contains n different positive integers
# ranging from 1 to n and obeys the following requirement:
# Suppose this list is [a1, a2, a3, ... , an],
# then the list [|a1 - a2|, |a2 - a3|, |a3 - a4|, ... , |an-1 - an|] has
# exactly k distinct integers.
#
# If there are multiple | answers, print any of them.
#
# Example 1:
# Input: n = 3, k = 1
# Output: [1, 2, 3]
# Explanation: The [1, 2, 3] has three different positive integers ranging
# from 1 to 3, and the [1, 1] has exactly 1 distinct integer: 1.
#
# Example 2:
# Input: n = 3, k = 2
# Output: [1, 3, 2]
# Explanation: The [1, 3, 2] has three different positive integers ranging
# from 1 to 3,
# and the [2, 1] has exactly 2 distinct integers: 1 and 2.
#
# Note:
# The n and k are in the range 1 <= k < n <= 10^4.
class Solution(object):
def constructArray(self, n, k):
"""
:type n: int
:type k: int
:rtype: List[int]
"""
result = []
left, right = 1, n
while left <= right:
if k % 2:
result.append(left)
left += 1
else:
result.append(right)
right -= 1
if k > 1:
k -= 1
return result
|
ivmech/iviny-scope | lib/xlsxwriter/test/comparison/test_chart_axis14.py | Python | gpl-3.0 | 2,805 | 0 | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013, John McNamara, jmcnamara@cpan.org
#
import unittest
import os
from ...workbook import Workbook
from ..helperfunctions import _compare_xlsx_files
class TestCompareXLSXFiles(unittest.TestCase):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'chart_axis14.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {'xl/charts/chart1.xml': ['<c:formatCode']}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
filename = self.got_filename
####################################################
workbook = Workbook(filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'stock'})
date_format = workbook.add_format({'num_format': 14})
chart.axis_ids = [43814272, 54517760]
data = [
[39083, 39084, 39085, 39086, 39087],
[27.2, 25.03, 19.05, 20.34, 18.5],
[23.49, 19.55, 15.12, 17.84, 16.34],
[25.45, 23.05, 17.32, 20.45, 17.34],
]
for row in range(5):
worksheet.write(row, 0, data[0][row], date_format)
worksheet.write(row, 1, data[1][row])
worksheet.write(row, 2, data[2][row])
worksheet.write(row, 3, data[3][row])
worksheet.set_column('A:D', 11)
chart.add_series({
'categories': '=Sheet1!$A$1:$A$5',
'values': '=Sheet1!$B$1:$B$5',
})
chart.add_series({
'categories': '=Sheet1!$A$1:$A$5',
'values': '=Sheet1!$C$1:$C$5',
})
chart.add_series({
'categories': '=She | et1!$A$1:$A$5',
' | values': '=Sheet1!$D$1:$D$5',
})
chart.set_y_axis({'min': 0, 'max': 30})
chart.set_x_axis({'min': 39083, 'max': 39087})
worksheet.insert_chart('E9', chart)
workbook.close()
####################################################
got, exp = _compare_xlsx_files(self.got_filename,
self.exp_filename,
self.ignore_files,
self.ignore_elements)
self.assertEqual(got, exp)
def tearDown(self):
# Cleanup.
if os.path.exists(self.got_filename):
os.remove(self.got_filename)
if __name__ == '__main__':
unittest.main()
|
99cloud/keystone_register | openstack_dashboard/register/register.py | Python | apache-2.0 | 6,455 | 0.007126 | '''
Created on Nov 2, 2012
@author: maodouzi
'''
import logging
from keystoneclient.v2_0 import client as keystone_client
from novaclient.v1_1 import client as nova_client
from cinderclient.v1 import client as cinder_client
from keystoneclient.exceptions import BadRequest
from openstack_dashboard.local.local_settings import OPENSTACK_HOST
LOG = logging.getLogger(__name__)
DEFAULT_ROLE = None
MEMBER_ROLE = "_member_"
ENDPOINT_URL = "http://%s:35357/v2.0" % OPENSTACK_HOST
ERR_MSG = {"accountExist": "Account already exist"
}
ERR_MSG = {key:"ERROR: %s !" % value for key, value in ERR_MSG.items()}
class RequestException(Exception):
def __init__(self, message=None):
self.message = str(message) or self.__class__.message
def __str__(self):
return self.message
class RequestClient(object):
def __init__(self, username, password, email, phoneNum, realName, corpName,
applyReason, quota, token, adminTenant, adminUser, adminPasswd,
endpoint=ENDPOINT_URL):
self.token = token
self.endpoint = endpoint
self.conn = keystone_client.Client(token=self.token, endpoint=self.endpoint)
self._fetchInfo()
self.novaConn = nova_client.Client(username=adminUser,
api_key=adminPasswd,
project_id=adminTenant,
auth_url=endpoint)
self.cinderConn = cinder_client.Client(username=adminUser,
api_key=adminPasswd,
project_id=adminTenant,
auth_url=endpoint)
self.quota = quota
self.username = username
self.password = password
self.email = email
self.realName = realName
self.phoneNum = phoneNum
self.corpName = corpName
self.applyReason = applyReason
self.description = "==".join((self.email, self.phoneNum, self.realName, self.corpName, self.applyReason))
if self._isAccountExist():
raise RequestException(ERR_MSG["accountExist"])
def createAccount(self):
try:
self._createTenant()
self._updateQuota()
self._createUser()
self._addRole()
except Exception as e:
self.deleteAccount()
raise RequestException(e)
def deleteAccount(self):
self._deleteTenant()
self._del | eteUser()
def _checkRequestArgs(self):
return self._isRequestValid() and (not self._isAccountExist())
def _fetchInfo(self):
| try:
self.tenantList = self.conn.tenants.list()
self.userList = self.conn.users.list()
self.roleList = self.conn.roles.list()
self.tenantDict = {str(item.name):str(item.id) for item in self.tenantList}
self.userDict = {str(item.name):str(item.id) for item in self.userList}
self.memberRoleId = [str(item.id) for item in self.roleList
if str(item.name) == MEMBER_ROLE][0]
try:
self.username
except AttributeError:
pass
else:
self.tenantId = self.tenantDict.get(self.username, False)
self.userId = self.userDict.get(self.username, False)
if self.tenantId and self.userId:
self.boundRoleList = self.conn.roles.roles_for_user(user=self.userId,
tenant=self.tenantId)
self.boundRoleDict = {str(item.name):str(item.id) for item in self.boundRoleList}
else:
self.boundRoleDict = {}
except BadRequest as e:
LOG.debug(e)
raise RequestException(e)
except IndexError as e:
LOG.debug(e)
raise RequestException("No role named %s" % MEMBER_ROLE)
def _isRequestValid(self):
return True
def _isAccountExist(self):
return self._isTenantNameExist() or self._isUserNameExist()
def _isTenantNameExist(self):
return self.username in self.tenantDict
def _isUserNameExist(self):
return self.username in self.userDict
def _isBound2Role(self):
return MEMBER_ROLE in self.boundRoleDict
def _createTenant(self):
if not self._isTenantNameExist():
self.conn.tenants.create(tenant_name=self.username,
description=self.description,
enabled=True)
self._fetchInfo()
def _deleteTenant(self):
if self._isTenantNameExist():
self.conn.tenants.delete(tenant=self.tenantId)
self._fetchInfo()
def _createUser(self):
self._createTenant()
if not self._isUserNameExist():
self.conn.users.create(name=self.username,
password=self.password,
email=self.email,
tenant_id=self.tenantId,
enabled=False)
self._fetchInfo()
def _deleteUser(self):
if self._isUserNameExist():
self.conn.users.delete(user=self.userId)
self._fetchInfo()
def _addRole(self):
if not self._isBound2Role():
self.conn.roles.add_user_role(self.userId, self.memberRoleId, self.tenantId)
self._fetchInfo()
def _getQuota(self):
quotaDict = {}
quotaDict["nova"] = self.novaConn.quotas.get(tenant_id=self.tenantId)
quotaDict["cinder"] = self.cinderConn.quotas.get(tenant_id=self.tenantId)
return quotaDict
def _updateQuota(self):
nova_quota = self.quota.copy()
del nova_quota["volumes"]
del nova_quota["gigabytes"]
self.novaConn.quotas.update(tenant_id=self.tenantId, **nova_quota)
self.cinderConn.quotas.update(tenant_id=self.tenantId,
volumes=self.quota["volumes"],
gigabytes=self.quota["gigabytes"]
)
|
byung-u/ProjectEuler | Problem_100_199/euler_100.py | Python | mit | 1,335 | 0.004498 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
Problem 100
If a box contains twenty-one coloured discs, composed of fifteen blue discs and six red discs, and two discs were taken at random, it can be seen that the probability of taking two blue discs, P(BB) = (15/21)×(14/20) = 1/2.
The next such arrangement, for which there is exactly 50% chance of taking two blue discs at random, is a box containing eighty-five blue discs and thirty-five red discs.
By finding the first arrangement to contain over 1012 = 1,000,000,000,000 discs in total, determine the number of blue discs that the box would contain.
'''
from itertools import count
from math import sqrt, ceil
# https://oeis.org/A001542
def get_nominator(n):
a = ceil((((3 + 2 * sqrt(2)) ** n) - ((3 - 2 * sqrt(2)) ** n)) / (2 * sqrt(2)))
return a
def p100_use_diophantine():
L, x, y = 10 ** 12, 1, 1
while y < L: x, y = 3 * x + 2 * y - 2, 4 * x + 3 * y - 3
print(x)
# Actually Diophantine pairs.. https://oeis.org/A011900
def p100(): # Answer: 756872327473, 0.01s
L = 10 ** 12
n = 1
for i in count(1):
np = get_nominator(i // 2) # pattern is repeated
res = n * (n+np)
n = n + np
if res * 1. | 414 > L: # 15/21, 85/120 is around 1.414xxxx
print(res)
| break
return
p100()
|
grnet/e-science | orka/setup.py | Python | agpl-3.0 | 676 | 0.028107 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""setup.py: setuptools control."""
import ez_setup
ez_setup.use_setuptools()
from os.path import dirname, abspath, join
from setuptools import | setup
BASE_DIR = join(dirname(abspath(__file__)), 'orka/orka.py')
import orka
requires = ['kamaki','paramiko','requests','PyYAML']
# setup
setup(
name = "orka",
packages = ["orka"],
# starts from this main
entry_points = {
"console_scripts": ['orka = orka.orka:main']
},
version = orka.__version__,
description = "Python command line application for creating and deleting Hadoop clusters in ~okeanos.",
| install_requires = requires
)
|
shirou/ansible | test/units/TestModuleUtilsBasic.py | Python | gpl-3.0 | 12,253 | 0.003101 | import os
import tempfile
import unittest
from nose.tools import raises
from nose.tools import timed
from ansible import errors
from ansible.module_common import ModuleReplacer
from ansible.utils import md5 as utils_md5
TEST_MODULE_DATA = """
from ansible.module_utils.basic import *
def get_module():
return AnsibleModule(
argument_spec = dict(),
supports_check_mode = True,
no_log = True,
)
get_module()
"""
class TestModuleUtilsBasic(unittest.TestCase):
def cleanup_temp_file(self, fd, path):
try:
os.close(fd)
os.remove(path)
except:
pass
def cleanup_temp_dir(self, path):
try:
os.rmdir(path)
except:
pass
def setUp(self):
# create a temporary file for the test module
# we're about to generate
self.tmp_fd, self.tmp_path = tempfile.mkstemp()
os.write(self.tmp_fd, TEST_MODULE_DATA)
# template the module code and eval it
module_data, module_style, shebang = ModuleReplacer().modify_module(self.tmp_path, {}, "", {})
d = {}
exec(module_data, d, d)
self.module = d['get_module']()
# module_utils/basic.py screws with CWD, let's save it and reset
self.cwd = os.getcwd()
def tearDown(self):
self.cleanup_temp_file(self.tmp_fd, self.tmp_path)
# Reset CWD back to what it was before basic.py changed it
os.chdir(self.cwd)
#################################################################################
# run_command() tests
# test run_command with a string command
def test_run_command_string(self):
(rc, out, err) = self.module.run_command("/bin/echo -n 'foo bar'")
self.assertEqual(rc, 0)
self.assertEqual(out, 'foo bar')
(rc, out, err) = self.module.run_command("/bin/echo -n 'foo bar'", use_unsafe_shell=True)
self.assertEqual(rc, 0)
self.assertEqual(out, 'foo bar')
# test run_command with an array of args (with both use_unsafe_shell=True|False)
def test_run_command_args(self):
(rc, out, err) = self.module.run_command(['/bin/echo', '-n', "foo bar"])
self.assertEqual(rc, 0)
self.assertEqual(out, 'foo bar')
(rc, out, err) = self.module.run_command(['/bin/echo', '-n', "foo bar"], use_unsafe_shell=True)
self.assertEqual(rc, 0)
self.assertEqual(out, 'foo bar')
# test run_command with leading environment variables
@raises(SystemExit)
def test_run_command_string_with_env_variables(self):
self.module.run_command('FOO=bar /bin/echo -n "foo bar"')
@raises(SystemExit)
def test_run_command_args_with_env_variables(self):
self.module.run_command(['FOO=bar', '/bin/echo', '-n', 'foo bar'])
def test_run_command_string_unsafe_with_env_variables(self):
(rc, out, err) = self.module.run_command('FOO=bar /bin/echo -n "foo bar"', use_unsafe_shell=True)
self.assertEqual(rc, 0)
self.assertEqual(out, 'foo bar')
# test run_command with a command pipe (with both use_unsafe_shell=True|False)
def test_run_command_string_unsafe_with_pipe(self):
(rc, out, err) = self.module.run_command('echo "foo bar" | cat', use_unsafe_shell=True)
self.assertEqual(rc, 0)
self.assertEqual(out, 'foo bar\n')
# test run_command with a shell redirect in (with both use_unsafe_shell=True|False)
def test_run_command_string_unsafe_with_redirect_in(self):
(rc, out, err) = self.module.run_command('cat << EOF\nfoo bar\nEOF', use_unsafe_shell=True)
self.assertEqual(rc, 0)
self.assertEqual(out, 'foo bar\n')
# test run_command with a shell redirect out (with both use_unsafe_shell=True|False)
def test_run_command_string_unsafe_with_redirect_out(self):
tmp_fd, tmp_path = tempfile.mkstemp()
| try:
| (rc, out, err) = self.module.run_command('echo "foo bar" > %s' % tmp_path, use_unsafe_shell=True)
self.assertEqual(rc, 0)
self.assertTrue(os.path.exists(tmp_path))
md5sum = utils_md5(tmp_path)
self.assertEqual(md5sum, '5ceaa7ed396ccb8e959c02753cb4bd18')
except:
raise
finally:
self.cleanup_temp_file(tmp_fd, tmp_path)
# test run_command with a double shell redirect out (append) (with both use_unsafe_shell=True|False)
def test_run_command_string_unsafe_with_double_redirect_out(self):
tmp_fd, tmp_path = tempfile.mkstemp()
try:
(rc, out, err) = self.module.run_command('echo "foo bar" >> %s' % tmp_path, use_unsafe_shell=True)
self.assertEqual(rc, 0)
self.assertTrue(os.path.exists(tmp_path))
md5sum = utils_md5(tmp_path)
self.assertEqual(md5sum, '5ceaa7ed396ccb8e959c02753cb4bd18')
except:
raise
finally:
self.cleanup_temp_file(tmp_fd, tmp_path)
# test run_command with data
def test_run_command_string_with_data(self):
(rc, out, err) = self.module.run_command('cat', data='foo bar')
self.assertEqual(rc, 0)
self.assertEqual(out, 'foo bar\n')
# test run_command with binary data
def test_run_command_string_with_binary_data(self):
(rc, out, err) = self.module.run_command('cat', data='\x41\x42\x43\x44', binary_data=True)
self.assertEqual(rc, 0)
self.assertEqual(out, 'ABCD')
# test run_command with a cwd set
def test_run_command_string_with_cwd(self):
tmp_path = tempfile.mkdtemp()
try:
(rc, out, err) = self.module.run_command('pwd', cwd=tmp_path)
self.assertEqual(rc, 0)
self.assertTrue(os.path.exists(tmp_path))
self.assertEqual(out.strip(), os.path.realpath(tmp_path))
except:
raise
finally:
self.cleanup_temp_dir(tmp_path)
class TestModuleUtilsBasicHelpers(unittest.TestCase):
''' Test some implementation details of AnsibleModule
Some pieces of AnsibleModule are implementation details but they have
potential cornercases that we need to check. Go ahead and test at
this level that the functions are behaving even though their API may
change and we'd have to rewrite these tests so that we know that we
need to check for those problems in any rewrite.
In the future we might want to restructure higher level code to be
friendlier to unittests so that we can test at the level that the public
is interacting with the APIs.
'''
MANY_RECORDS = 7000
URL_SECRET = 'http://username:pas:word@foo.com/data'
SSH_SECRET = 'username:pas:word@foo.com/data'
def cleanup_temp_file(self, fd, path):
try:
os.close(fd)
os.remove(path)
except:
pass
def cleanup_temp_dir(self, path):
try:
os.rmdir(path)
except:
pass
def _gen_data(self, records, per_rec, top_level, secret_text):
hostvars = {'hostvars': {}}
for i in range(1, records, 1):
host_facts = {'host%s' % i:
{'pstack':
{'running': '875.1',
'symlinked': '880.0',
'tars': [],
'versions': ['885.0']},
}}
if per_rec:
host_facts['host%s' % i]['secret'] = secret_text
hostvars['hostvars'].update(host_facts)
if top_level:
hostvars['secret'] = secret_text
return hostvars
def setUp(self):
self.many_url = repr(self._gen_data(self.MANY_RECORDS, True, True,
self.URL_SECRET))
self.many_ssh = repr(self._gen_data(self.MANY_RECORDS, True, True,
self.SSH_SECRET))
self.one_url = repr(self._gen_data(self.MANY_RECORDS, False, True,
self.URL_SECRET))
self.one_ssh = repr(self._gen_data(self.MANY_RECORDS, False, True,
self.SSH_SECRET))
self.zero_secrets = repr(self._gen_dat |
Guidobelix/pyload | module/plugins/hoster/AndroidfilehostCom.py | Python | gpl-3.0 | 2,454 | 0.014262 | # -*- coding: utf-8 -*
#
# Test links:
# https://www.androidfilehost.com/?fid=95916177934518197
import re
from module.plugins.internal.SimpleHoster import SimpleHoster
class AndroidfilehostCom(SimpleHoster):
__name__ = "AndroidfilehostCom"
__type__ = "hoster"
__version__ = "0.05"
__status__ = "testing"
__pattern__ = r'https?://(?:www\.)?androidfilehost\.com/\?fid=\d+'
__config__ = [("activated" , "bool", "Activated" , True),
("use_premium" , "bool", "Use premium account if available" , True),
("fallback" , "bool", "Fallback to free download if premium fails" , True),
("chk_filesize", "bool", "Check file size" , True),
("max_wait" , "int" , "Reconnect if waiting time is greater than minutes", 10 )]
__description__ = """Androidfilehost.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("zapp-brannigan", "fuerst.reinje@web.de")]
NAME_PATTERN = r'<br />(?P<N>.*?)</h1>'
SIZE_PATTERN = r'<h4>size</h4>\s*<p>(?P<S>[\d.,]+)(?P | <U>[\w^_]+)</p>'
HASHSUM_PATTERN = r'<h4>(?P<H>.* | ?)</h4>\s*<p><code>(?P<D>.*?)</code></p>'
OFFLINE_PATTERN = r'404 not found'
WAIT_PATTERN = r'users must wait <strong>(\d+) secs'
def setup(self):
self.multiDL = True
self.resume_download = True
self.chunk_limit = 1
def handle_free(self, pyfile):
wait = re.search(self.WAIT_PATTERN, self.data)
self.log_debug("Waiting time: %s seconds" % wait.group(1))
fid = re.search(r'id="fid" value="(\d+)" />', self.data).group(1)
self.log_debug("FID: %s" % fid)
html = self.load("https://www.androidfilehost.com/libs/otf/mirrors.otf.php",
post={'submit': 'submit',
'action': 'getdownloadmirrors',
'fid' : fid})
self.link = re.findall('"url":"(.*?)"', html)[0].replace("\\", "")
mirror_host = self.link.split("/")[2]
self.log_debug("Mirror Host: %s" % mirror_host)
html = self.load("https://www.androidfilehost.com/libs/otf/stats.otf.php",
get={'fid' : fid,
'w' : 'download',
'mirror': mirror_host})
|
release-engineering/product-definition-center | pdc/apps/componentbranch/serializers.py | Python | mit | 11,180 | 0.000537 | #
# Copyright (c) 2017 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
from rest_framework import serializers
from django.conf import settings as django_settings
import re
from datetime import datetime
import six
from pdc.apps.common.fields import ChoiceSlugField
from pdc.apps.component.models import ReleaseComponentType, GlobalComponent
from pdc.apps.componentbranch.models import (
ComponentBranch, SLA, SLAToComponentBranch)
from pdc.apps.common.serializers import StrictSerializerMixin
def is_branch_active(branch):
"""
Checks to see if the branch is active by seeing if there are valid SLAs
tied to the branch
:param branch: a ComponentBranch object
:return: a boolean
"""
slas = branch.slas.all()
today = datetime.utcnow().date()
for sla in slas:
if sla.eol >= today:
# If the branch has at least one SLA that hasn't gone EOL, it is
# still active
return True
return False
class BranchNameField(serializers.Field):
"""
A serializer field that verifies the branch's name matches policy
"""
doc_format = "string"
@staticmethod
def bad_branch_name(branch_name):
"""
Determines if the branch name collides with the defined regex blacklist
:param branch_name: string representing the branch name
:return: boolean
"""
return django_settings.COMPONENT_BRANCH_NAME_BLACKLIST_REGEX and \
re.match(django_settings.COMPONENT_BRANCH_NAME_BLACKLIST_REGEX,
branch_name)
def to_representation(self, obj):
"""
Serializes the internal value
:param obj: string representing the branch name
:return: string representing the branch name
"""
return obj
def to_internal_value(self, data):
"""
Takes the supplied value and ensures it conforms to branch name
standards such as having a max length of 300 and conforming to the
configured regex.
:param data: the object representing the branch name
:return: the validated branch name
"""
if not isinstance(data, six.text_type):
msg = ('A string was not supplied. The type was "{0}".'
.format(type(data).__name__))
raise serializers.ValidationError(msg)
if len(data) > 300:
raise serializers.ValidationError(
'The string must be less than 300 characters')
if self.bad_branch_name(data):
raise serializers.ValidationError(
'The branch name is not allowed based on the regex "{0}"'
.format(django_settings.COMPONENT_BRANCH_NAME_BLACKLIST_REGEX))
return data
class SLASerializer(StrictSerializerMixin,
serializers.ModelSerializer):
"""
Serializer for the SLA model
"""
class Meta:
model = SLA
fields = ('id', 'name', 'description')
def update(self, instance, validated_data):
"""
Override the update function to not allow a user to modify the SLA name
"""
if 'name' in validated_data and instance.name != validated_data['name']:
error_msg = 'You may not modify the SLA\'s name due to policy'
raise seria | lizers.ValidationError({'name': [error_msg]})
return super(SLASerializer, self).update(instance, validated_data)
class SLAToComponentBranchSerializerForComponentBranch(
serializers.ModelSerializer):
"""
A serializer for the SLAToComponentBranch model to be used in the
ComponentBranch serializer
"""
sla = ChoiceSlugField(slug_field='name', read_only=True)
eol = seriali | zers.DateField(read_only=True)
class Meta:
model = SLAToComponentBranch
fields = ('id', 'sla', 'eol')
class ComponentBranchSerializer(StrictSerializerMixin,
serializers.ModelSerializer):
"""
A serializer for the ComponentBranch model
"""
name = BranchNameField()
global_component = serializers.SlugRelatedField(
slug_field='name', queryset=GlobalComponent.objects.all())
type = ChoiceSlugField(
slug_field='name', queryset=ReleaseComponentType.objects.all())
critical_path = serializers.BooleanField(default=False)
slas = SLAToComponentBranchSerializerForComponentBranch(
many=True, read_only=True)
active = serializers.SerializerMethodField('is_active')
def is_active(self, branch):
"""
Calls the is_branch_active function to determine if the branch is still
active
:param branch: a ComponentBranch object
:return: a boolean
"""
return is_branch_active(branch)
class Meta:
model = ComponentBranch
fields = ('id', 'global_component', 'name', 'slas', 'type', 'active',
'critical_path')
def update(self, instance, validated_data):
"""
Override the update function to not allow a user to modify the branch
name
"""
if 'name' in validated_data and instance.name != validated_data['name']:
raise serializers.ValidationError({
'name': ['You may not modify the branch\'s name due to policy']
})
return super(ComponentBranchSerializer, self).update(
instance, validated_data)
class ComponentBranchSerializerWithoutSLA(serializers.Serializer):
"""
A serializer for the ComponentBranch model to be used in the
SLAToComponentBranch serializer
"""
id = serializers.IntegerField(read_only=True)
name = BranchNameField()
global_component = serializers.SlugRelatedField(
slug_field='name', queryset=GlobalComponent.objects.all())
type = ChoiceSlugField(
slug_field='name', queryset=ReleaseComponentType.objects.all())
critical_path = serializers.BooleanField(required=False)
active = serializers.SerializerMethodField('is_active')
def is_active(self, branch):
"""
Calls the is_branch_active function to determine if the branch is still
active
:param branch: a ComponentBranch object
:return: a boolean
"""
return is_branch_active(branch)
class SLAToComponentBranchSerializer(StrictSerializerMixin,
serializers.Serializer):
"""
A serializer for the SLAToComponentBranch model that allows branch creation
"""
id = serializers.IntegerField(read_only=True)
sla = ChoiceSlugField(slug_field='name', queryset=SLA.objects.all())
branch = ComponentBranchSerializerWithoutSLA()
eol = serializers.DateField()
def create(self, validated_data):
"""
Creates the SLAToComponentBranch entry based on the serialized data
"""
branch_component_type_name = validated_data['branch']['type']
component_type = ReleaseComponentType.objects.filter(
name=branch_component_type_name).first()
if not component_type:
error_msg = (
'The specified ReleaseComponentType "{0}" does not exist'
.format(branch_component_type_name))
raise serializers.ValidationError({'branch.type': [error_msg]})
branch_global_component_name = \
validated_data['branch']['global_component']
branch_global_component = GlobalComponent.objects.filter(
name=branch_global_component_name).first()
if not branch_global_component:
error_msg = ('The specified GlobalComponent "{0}" does not exist'
.format(branch_global_component_name))
raise serializers.ValidationError(
{'branch.global_component': [error_msg]})
branch_name = validated_data['branch']['name']
branch_critical_path = validated_data['branch'].get('critical_path')
branch = ComponentBranch.objects.filter(
name=branch_name,
type=component_type.id,
global_component=branch_global_component.id).first()
if branch:
|
ack8006/Python-mode-klen | pymode/libs/pylama/lint/extensions.py | Python | lgpl-3.0 | 738 | 0 | """ Load extensions. """
from os import listdir, path as op
CURDIR = op.dirname(__file__)
LINTERS = dict()
PREFIX = 'pylama_'
try:
from importlib import import_module
except ImportError:
from ..libs.importlib import import_module
for p in listdir(CURDIR):
if p.startswith(PREFIX) and op.isdir(op.join(CURDIR, p)):
name = p[len(PREFIX):]
try:
module = import_module('.lint.%s%s' % (PREFIX, name), 'pylama')
LINTERS[name] = geta | ttr(module, 'Linter')()
except ImportError:
| continue
try:
from pkg_resources import iter_entry_points
for entry in iter_entry_points('pylama.linter'):
LINTERS[entry.name] = entry.load()()
except ImportError:
pass
|
ricomoss/python-april-2014 | class7/to_battle/script.py | Python | mit | 380 | 0.005263 | #!/ | usr/bin/env python
import os.path
import sys
MODULE_ROOT = os.path.join(os.path.dirname(__file__), '..')
sys.path.insert(0, MODULE_ROOT)
from to_battle.player import Hero, Villain
from to_battle.battle import Battle
if __name__ == '__main__':
player1 = Hero(name='Rico')
player2 = Villain(name='Thanos')
battle = Battle(player1, p | layer2)
battle.do_battle()
|
eqrx/mauzr | mauzr/platform/cpython/__init__.py | Python | agpl-3.0 | 5,160 | 0 | """ Bootstrap the mauzr agent on cpython systems. """
import contextlib
import threading
import logging
import _thread
__author__ = "Alexander Sowitzki"
class Core:
""" Manage program components on cpython platforms.
The core can either be started directly by calling :func:`run` or
by using it as a context manager. The first case blocks, the second case
spawns a thread that is running the scheduler.
:param suit: Suit this agent belongs to.
:type suit: str
:param agent: Name of the agent.
:type agent: str
:param instance: Instance of this agent. May be None
:type instance: str
:param parser: Argparse instance to use. If None, a new one will be used.
:type parser: argparse.ArgumentParser
"""
def __init__(self, suit, agent, instance=None, parser=None):
self._contexts = []
self.scheduler_thread = None
self._stack = contextlib.ExitStack()
self.shutdown_event = threading.Event()
self.scheduler = None
self.mqtt = None
self.telegram = None
self.config = None
self._setup_config(suit, agent, instance, parser)
self._setup_logging()
self._setup_scheduler()
if "mqtt" in self.config:
self._setup_mqtt()
@staticmethod
def on_failure():
""" Call when an unrecoverable failure happens.
Shuts the program dow | n.
"""
_thread.interrupt_main()
def _setup_config( | self, suit, agent, instance, parser):
from mauzr.platform.cpython.config import Config
self.config = Config(suit, agent, instance, parser)
self.config.parse()
def _setup_logging(self):
""" Setup logging. """
level = self.config.get("log_level", "info").upper()
logging.basicConfig(level=logging.getLevelName(level),
format="{levelname} {asctime} {name}: {message}",
style="{")
@staticmethod
def logger(name):
""" Create a logger instance.
:param name: Name of the caller that wants to receive the logger.
:type name: str
:returns: Logger instance.
:rtype: logging.Logger
"""
return logging.getLogger(name)
def add_context(self, context):
""" Add a context to be managed by this core.
:param context: Unit to be added
:type context: object
"""
self._contexts.append(context)
def shutdown(self):
"""Ask the agent to shut down. """
self.shutdown_event.set()
def run(self):
""" Setup modules and units and run the scheduler.
Block until shutdown is requested.
"""
self.scheduler_thread = False
with self:
with contextlib.suppress(KeyboardInterrupt):
self.scheduler.run()
def _run_scheduler(self):
# Dispatch run.
try:
self.scheduler.run()
except Exception:
# Interrupt main thread
self.on_failure()
raise
def __enter__(self):
# Start thread for scheduler if not called by run,
# start modules and units.
try:
for subject in self._contexts:
self._stack.enter_context(subject)
if self.scheduler_thread is not False:
thread = threading.Thread(target=self._run_scheduler,
name="scheduler")
self.scheduler_thread = thread
self.scheduler_thread.start()
return self
except Exception:
self._stack.close()
raise
def __exit__(self, *exc_details):
# Stop scheduler thread if existing, stop modules and units.
self.shutdown_event.set()
if self.scheduler_thread is not False:
self.scheduler_thread.join()
return self._stack.close()
def _setup_mqtt(self, cfgbase="mqtt", **kwargs):
""" Setup the MQTT manager and client.
See :class:`mauzr.platform.mqtt.Manager` and
:class:`mauzr.platform.cpython.mqtt.Client`.
Keyword arguments given to this function are passed to both
constructors.
:param cfgbase: Configuration entry for this unit.
:type cfgbase: str
:param kwargs: Keyword arguments that will be merged into the config.
:type kwargs: dict
"""
from mauzr.platform.cpython.mqtt import Client
from mauzr.platform.mqtt import Manager
self.mqtt = Manager(self, cfgbase, **kwargs)
mqtt = Client(self, cfgbase, **kwargs)
mqtt.manager = self.mqtt
self.mqtt.mqtt = mqtt
def _setup_scheduler(self):
# Setup scheduler.
from mauzr.platform.cpython.scheduler import Scheduler
self.scheduler = Scheduler(self.shutdown_event)
def setup_telegram(self, *args, **kwargs):
""" Setup a telegram bot.
See :class:`mauzr.platform.cpython.telegram.Bot`.
"""
from mauzr.platform.cpython.telegrambot import Bot
self.telegram = Bot(self, *args, **kwargs)
|
d-mittal/pystruct | examples/plot_letters.py | Python | bsd-2-clause | 3,493 | 0 | """
===============================
OCR Letter sequence recognition
===============================
This example illustrates the use of a chain CRF for optical character
recognition. The example is taken from Taskar et al "Max-margin markov random
fields".
Each example consists of a handwritten word, that was presegmented into
characters. Each character is represented as a 16x8 binary image. The task is
to classify the image into one of the 26 characters a-z. The first letter of
every word was ommited as it was capitalized and the task does only consider
small caps letters.
We compare classification using a standard linear SVM that classifies
each letter individually with a chain CRF that can exploit correlations
between neighboring letters (the correlation is particularly strong
as the same words are used during training and testsing).
The first figures shows the segmented letters of four words from the test set.
In set are the ground truth (green), the prediction using SVM (blue) and the
prediction using a chain CRF (red).
The second figure shows the pairwise potentials learned by the chain CRF.
The strongest patterns are "y after l" and "n after i".
There are obvious extensions that both methods could benefit from, such as
window features or non-linea | r kernels. This example is more meant to give a
demonstration | of the CRF than to show its superiority.
"""
import numpy as np
import matplotlib.pyplot as plt
from sklearn.svm import LinearSVC
from pystruct.datasets import load_letters
from pystruct.models import ChainCRF
from pystruct.learners import FrankWolfeSSVM
abc = "abcdefghijklmnopqrstuvwxyz"
letters = load_letters()
X, y, folds = letters['data'], letters['labels'], letters['folds']
# we convert the lists to object arrays, as that makes slicing much more
# convenient
X, y = np.array(X), np.array(y)
X_train, X_test = X[folds == 1], X[folds != 1]
y_train, y_test = y[folds == 1], y[folds != 1]
# Train linear SVM
svm = LinearSVC(dual=False, C=.1)
# flatten input
svm.fit(np.vstack(X_train), np.hstack(y_train))
# Train linear chain CRF
model = ChainCRF()
ssvm = FrankWolfeSSVM(model=model, C=.1, max_iter=11)
ssvm.fit(X_train, y_train)
print("Test score with chain CRF: %f" % ssvm.score(X_test, y_test))
print("Test score with linear SVM: %f" % svm.score(np.vstack(X_test),
np.hstack(y_test)))
# plot some word sequenced
n_words = 4
rnd = np.random.RandomState(1)
selected = rnd.randint(len(y_test), size=n_words)
max_word_len = max([len(y_) for y_ in y_test[selected]])
fig, axes = plt.subplots(n_words, max_word_len, figsize=(10, 10))
fig.subplots_adjust(wspace=0)
for ind, axes_row in zip(selected, axes):
y_pred_svm = svm.predict(X_test[ind])
y_pred_chain = ssvm.predict([X_test[ind]])[0]
for i, (a, image, y_true, y_svm, y_chain) in enumerate(
zip(axes_row, X_test[ind], y_test[ind], y_pred_svm, y_pred_chain)):
a.matshow(image.reshape(16, 8), cmap=plt.cm.Greys)
a.text(0, 3, abc[y_true], color="#00AA00", size=25)
a.text(0, 14, abc[y_svm], color="#5555FF", size=25)
a.text(5, 14, abc[y_chain], color="#FF5555", size=25)
a.set_xticks(())
a.set_yticks(())
for ii in range(i + 1, max_word_len):
axes_row[ii].set_visible(False)
plt.matshow(ssvm.w[26 * 8 * 16:].reshape(26, 26))
plt.title("Transition parameters of the chain CRF.")
plt.xticks(np.arange(25), abc)
plt.yticks(np.arange(25), abc)
plt.show()
|
DailyActie/Surrogate-Model | 01-codes/scikit-learn-master/examples/ensemble/plot_adaboost_regression.py | Python | mit | 1,530 | 0.002614 | """
======================================
Decision Tree Regression with AdaBoost
======================================
A decision tree is boosted using the AdaBoost.R2 [1] algorithm on a 1D
sinusoidal dataset with a small amount of Gaussian noise.
299 boosts (300 decision trees) is compared with a single decision tree
regressor. As the number of boosts is increased the regressor can fit more
detail.
.. [1] H. Drucker, "Improving Regressors using Boosting Techniques", 1997.
"""
print(__doc__)
# Author: Noel Dawe <noel.dawe@gmail.com>
#
# License: BSD 3 clause
# importing necessary libraries
import matplotlib.pyplot as plt
import numpy as np
from sklearn.ensemble import AdaBoostRegressor
from sklearn.tree import DecisionTreeRegressor
# Create the dataset
rng = np.random.RandomState(1)
X = np | .linspace(0, 6, 100)[:, np.newaxis]
y = np.sin(X).ravel() + np.sin(6 * X).ravel() + rng.normal(0, 0.1, X.shape[0])
# Fit regression model
regr_1 = DecisionTreeRegressor(max_depth=4)
regr_2 = AdaBoostRegressor(DecisionTreeRegressor(max_depth=4),
n_estimators=300, random_state=rng)
regr_1.fit(X, y)
regr_2.fit(X, y)
# Predict
y_1 = regr_1.predict(X)
y_2 = regr_2.predict(X)
# Plot the results
plt.figure()
plt.scatter(X, y, c="k", label="training samples")
plt.plot(X, y_1, c="g", | label="n_estimators=1", linewidth=2)
plt.plot(X, y_2, c="r", label="n_estimators=300", linewidth=2)
plt.xlabel("data")
plt.ylabel("target")
plt.title("Boosted Decision Tree Regression")
plt.legend()
plt.show()
|
ProstoKSI/distributed-queue | distributed_queue/tests/test_serializers.py | Python | mit | 556 | 0.005396 | import unittest
from distributed_queue.serializers import BaseSerializer, JsonSerializer
class TestSerializers(unittest.TestCase):
def test_base_serializer(self):
self.assertRaises(NotImplementedError, BaseSerializer.dumps, | {})
self.assertRaises(NotImplementedError, BaseSerializer.loads, "{}")
def test_json_serializer(self | ):
serializer = JsonSerializer
obj = {"a": 1, "b": [1, 2, "3"]}
data = serializer.dumps(obj)
copy_obj = serializer.loads(data)
self.assertEqual(obj, copy_obj)
|
pedohorse/hpaste | python3.7libs/hpaste/hpastecollectionwidget.py | Python | lgpl-3.0 | 12,806 | 0.003904 | import hou
from PySide2.QtCore import Slot, QSortFilterProxyModel, QRegExp, Qt
from PySide2.QtWidgets import QInputDialog, QMessageBox
from . import hpaste
from .hcollections.collectionwidget import CollectionWidget
from .hcollections.collectionbase import CollectionSyncError, CollectionItem
from .hcollections.githubcollection import GithubCollection
from .hcollections.QDoubleInputDialog import QDoubleInputDialog
from .logger import defaultLogger as log
from urllib import error # just for exception catching
# TODO: implement some kind of collection rescan
from .githubauthorizator import GithubAuthorizator
class HPasteCollectionWidget(object):
class __HPasteCollectionWidget(CollectionWidget):
def __init__(self, parent=None):
super(HPasteCollectionWidget.__HPasteCollectionWidget, self).__init__(parent, metadataExposedKeys=('raw_url', 'nettype'))
for x in range(1, 5):
self.ui.mainView.horizontalHeader().hideSection(x)
self.__nepane = None
self.__netType = ''
self.__nettypeFilter = QSortFilterProxyModel(self)
self.__nettypeFilter.setFilterKeyColumn(4)
self.__nettypeFilter.setFilterRegExp(QRegExp("*", Qt.CaseInsensitive, QRegExp.Wildcard))
self.appendFilter(self.__nettypeFilter)
self.accepted.connect(self.doOnAccept)
self.__insideAuthCallback = False
# self.setProperty("houdiniStyle", True)
ss = "QTableView{border : 0px solid; gridline-color: rgb(48,48,48)}"
ss += "QHeaderView::section{border-style: none; border-bottom: 0px; border-right: 0px;}"
self.setStyleSheet(ss)
self.__savedNetworkViewPos = None
def setNetworkEditor(self, pane):
if not isinstance(pane, hou.NetworkEditor):
pane = None
self.__nepane = pane # save to position pasted nodes in it
self.__savedNetworkViewPos = pane.cursorPosition()
if pane is None:
nettype = '*'
self.__netType = '' # Used to create new snippet types
else:
nettype = hpaste.getChildContext(pane.pwd(), hou.applicationVersion())
self.__netType = nettype
self.__nettypeFilter.setFilterRegExp(QRegExp(nettype, Qt.CaseInsensitive, QRegExp.Wildcard))
@Slot(object)
def doOnAccept(self, item):
if item is None:
return
try:
try: # >h16
hou.clearAllSelected()
except: # <=h15.5
hou.node("/obj").setSelected(False, clear_all_selected=True)
hpaste.stringToNodes(item.content(), ne=self.__nepane, override_network_position=self.__savedNetworkViewPos)
except RuntimeWarning as e:
log('Warnings encountered during load:\n%s' % str(e), 2)
except Exception as e:
hou.ui.displayMessage("could not paste: %s" % str(e), severity=hou.severityType.Warning)
def _addItem(self, collection):
# Please, dont throw from here!
try:
nodes = hou.selectedItems()
except:
nodes = hou.selectedNodes()
if len(nodes) == 0:
QMessageBox.warning(self, 'not created', 'selection is empty, nothing to add')
return
while True:
# btn,(name,desc) = (0,('1','2'))#hou.ui.readMultiInput('enter some information about new item',('name','description'),buttons=('Ok','Cancel'))
name, desc, public, good = QDoubleInputDialog.getDoubleTextCheckbox(self, 'adding a new item to %s' % collection.name(), 'enter new item details', 'name', 'description', 'public', '', 'a snippet', False)
if not good:
return
if len(name) > 0:
break; # validity check
try:
# print(name)
# print(desc)
# p | rint(hpaste.nodesToString(nodes))
self.model().addItemToCollection(collection, name, desc, hpaste.no | desToString(nodes), public, metadata={'nettype': self.__netType})
except CollectionSyncError as e:
QMessageBox.critical(self, 'something went wrong!', 'Server error occured: %s' % str(e))
def _changeAccess(self, index):
item = index.internalPointer()
text, good = QInputDialog.getItem(None, 'modify item access', 'choose new access type:', ['private', 'public'], current=item.access() == CollectionItem.AccessType.public, editable=False)
if not good:
return
newaccess = CollectionItem.AccessType.public if text == 'public' else CollectionItem.AccessType.private
if newaccess == item.access():
return
item.setAccess(newaccess)
def _replaceContent(self, index):
try:
nodes = hou.selectedItems()
except:
nodes = hou.selectedNodes()
if len(nodes) == 0:
QMessageBox.warning(self, 'cannot replace', 'selection is empty')
return
item = index.internalPointer()
good = QMessageBox.warning(self, 'sure?', 'confirm that you want to replace the content of selected item "%s". This operation can not be undone.' % item.name(), QMessageBox.Ok | QMessageBox.Cancel) == QMessageBox.Ok
if not good:
return
try:
item.setContent(hpaste.nodesToString(nodes))
except CollectionSyncError as e:
QMessageBox.critical(self, 'something went wrong!', 'Server error occured: %s' % str(e))
def _itemInfo(self, index):
item = index.internalPointer()
accesstext = 'public' if item.access() == CollectionItem.AccessType.public else 'private'
readonlytext = 'readonly' if item.readonly() else 'editable'
info = 'name: %s\n%s\naccess: %s\n%s\n\ncollection id: %s\n\nmetadata:\n' % (item.name(), item.description(), accesstext, readonlytext, item.id())
info += '\n'.join(('%s : %s' % (key, item.metadata()[key]) for key in item.metadata()))
QMessageBox.information(self, 'item information', info)
def _renameItem(self, index):
item = index.internalPointer()
oldname = item.name()
olddesc = item.description()
newname, newdesc, good = QDoubleInputDialog.getDoubleText(self, 'modify item info', 'Enter new item name and description', 'name', 'description', oldname, olddesc)
if not good:
return
if newname != oldname:
item.setName(newname)
if newdesc != olddesc:
item.setDescription(newdesc)
def _removeIcon(self, index):
ok = QMessageBox.warning(self, 'sure?', 'confirm removing Icon. This operation can not be undone.', QMessageBox.Ok | QMessageBox.Cancel) == QMessageBox.Ok
if ok:
super(HPasteCollectionWidget.__HPasteCollectionWidget, self)._removeIcon(index)
def _confirmRemove(self, index):
return QMessageBox.warning(self, 'sure?', 'confirm removing the item from collection. This operation can not be undone.', QMessageBox.Ok | QMessageBox.Cancel) == QMessageBox.Ok
# a callback for authoriser
def _authCallback(self, callbackinfo):
auth, public, action = callbackinfo
if self.__insideAuthCallback: return # prevent looping
self.__insideAuthCallback = True
try:
if action == 0 or (action == 2 and not auth['enabled']):
good = self.removeCollection(auth['user'])
if not good: # means something went wrong during removal attempt - probably async collection syncing problem. Try later
if public:
GithubAuthorizator.setPublicCollsctionEnabled(auth['user'], True)
|
thobbs/cassandra-dtest | compression_test.py | Python | apache-2.0 | 7,536 | 0.003583 | import os
from assertions import assert_crc_check_chance_equal
from scrub_test import TestHelper
from tools import since
class TestCompression(TestHelper):
def _get_compression_type(self, file):
types = {
'0010': 'NONE',
'789c': 'DEFLATE'
}
with open(file, 'rb') as fh:
file_start = fh.read(2)
return types.get(file_start.encode('hex'), 'UNKNOWN')
@since("3.0")
def disable_compression_cql_test(self):
"""
@jira_ticket CASSANDRA-8384
using new cql create table syntax to disable compression
"""
cluster = self.cluster
cluster.populate(1).start(wait_for_binary_proto=True)
[node] = cluster.nodelist()
session = self.patient_cql_connection(node)
self.create_ks(session, 'ks', 1)
session.execute("create table disabled_compression_table (id uuid PRIMARY KEY ) WITH compression = {'enabled': false};")
session.cluster.refresh_schema_metadata()
meta = session.cluster.metadata.keyspaces['ks'].tables['disabled_compression_table']
self.assertEqual('false', meta.options['compression']['enabled'])
for n in range(0, 100):
session.execute("insert into disabled_compression_table (id) values (uuid());")
sstables = self.flush('disabled_compression_table')
sstable_paths = self.get_table_paths('disabled_compression_table')
found = False
for sstable_path in sstable_paths:
sstable = os.path.join(sstable_path, sstables['disabled_compression_table'][1])
if os.path.exists(sstable):
self.assertEqual('NONE', self._get_compression_type(sstable))
found = True
self.assertTrue(found)
@since("3.0")
def compression_cql_options_test(self):
"""
@jira_ticket CASSANDRA-8384
using new cql create table syntax to configure compression
"""
cluster = self.cluster
cluster.populate(1).start(wait_for_binary_proto=True)
[node] = cluster.nodelist()
session = self.patient_cql_connection(node)
self.create_ks(session, 'ks', 1)
session.execute("""
create table compression_opts_table
(id uuid PRIMARY KEY )
WITH compression = {
'class': 'DeflateCompressor',
'chunk_length_in_kb': 256
}
AND crc_check_chance = 0.25;
""")
session.cluster.refresh_schema_metadata()
meta = session.cluster.metadata.keyspaces['ks'].tables['compression_opts_table']
self.assertEqual('org.apache.cassandra.io.compress.DeflateCompressor', meta.options['compression']['class'])
self.assertEqual('256', meta.options['compression']['chunk_length_in_kb'])
assert_crc_check_chance_equal(session, "compression_opts_table", 0.25)
warn = node.grep_log("The option crc_check_chance was deprecated as a compression option.")
self.assertEqual(len(warn), 0)
session.execute("""
alter table compression_opts_table
WITH compression = {
'class': 'DeflateCompressor',
'chunk_length_in_kb': 256,
'crc_check_chance': 0.6
}
""")
warn = node.grep_log("The option crc_check_chance was deprecated as a compression option.")
self.assertEqual(len(warn), 1)
# check metadata again after crc_check_chance_update
session.cluster.refresh_schema_metadata()
meta = session.cluster.metadata.keyspaces['ks'].tables['compression_opts_table']
self.assertEqual('org.apache.cassandra.io.compress.DeflateCompressor', meta.options['compression']['class'])
self.assertEqual('256', meta.options['compression']['chunk_length_in_kb'])
assert_crc_check_chance_equal(session, "compression_opts_table", 0.6)
for n in range(0, 100):
session.execute("insert into compression_opts_table (id) values (uuid());")
sstables = self.flush('compression_opts_table')
sstable_paths = self.get_table_paths('compression_opts_table')
found = False
for sstable_path in sstable_paths:
sstable = os.path.join(sstable_path, sstables['compression_opts_table'][1])
if os.path.exists(sstable):
self.assertEqual('DEFLATE', self._get_compression_type(sstable))
found = True
self.assertTrue(found)
@since("3.0")
def compression_cql_disabled_with_alter_test(self):
"""
@jira_ticket CASSANDRA-8384
starting with compression enabled then disabling it
"""
cluster = self.cluster
cluster.populate(1).start(wait_for_binary_proto=True)
[node] = cluster.nodelist()
session = self.patient_cql_connection(node)
self.create_ks(session, 'ks', 1)
session.execute("""
create table start_enabled_compression_table
(id uuid PRIMARY KEY )
WITH compression = {
'class': 'SnappyCompressor',
'chunk_length_in_kb': 256
}
AND crc_check_chance = 0.25;
""")
meta = session.cluster.metadata.keyspaces['ks'].tables['start_enabled_compression_table']
self.assertEqual('org.apache.cassandra.io.compress.SnappyCompressor', meta.options['compression']['class'])
self.assertEqual('256', meta.options['compression']['chunk_length_in_kb'])
assert_crc_check_chance_equal(session, "start_enabled_compression_table", 0.25)
session.execute("alter table start_enabled_compression_table with compression = {'enabled': false};")
session.cluster.refresh_schema_metadata()
meta = session.cluster.metadata.keyspaces['ks'].tables['start_enabled_compression_table']
self.assertEqual('false', meta.options['compression']['enabled'])
@since("3.0")
def compression_cql_enabled_with_alter_test(self):
"""
@jira_ticket CASSANDRA-8384
starting with compression disabled and enabling it
"""
cluster = self.cluster
cluster.populate(1).start(wait_for_binary_proto=True)
[node] = cluster.nodelist()
session = self.patient_cql_connection(node)
self.create_ks(session, 'ks', 1)
session.execute("create table start_disabled_compression_table (id uuid PRIMARY KEY ) WITH compression = {'enabled': false};")
meta = session.cluster.metadata.keyspaces['ks'].tables['start_disabled_compression_table']
self.assertEqual('false', meta.options['compression']['enabled'])
session.execute("""alter table start_disabled_compression_table
WITH compression = {
'class': 'SnappyCompressor',
'chunk_length_in_kb': 256
} AND crc_check_chance = 0.25;""")
session.cluster.refresh_schema_metadata()
meta = session.cluster.metadata.keyspaces['ks'].tab | les['start_disabled_compression_table']
self.assertEqual('org.apache.cassandra.io.compress.SnappyCompressor', meta.options['compression']['class'])
self.assertEqual('256', meta.options['compression']['chunk_length_in_kb'])
assert_crc_check_chance_equal(session, "start_disabled_compression_t | able", 0.25)
|
MridulS/BinPy | BinPy/examples/source/Gates/NOT.py | Python | bsd-3-clause | 731 | 0.00684 |
# coding: utf-8
# Examples for NOT class
# In[1]:
# imports
from __future__ import print_function
from Bin | Py.Gates import *
# In[2]:
# Initializing the NOT class
gate = NOT(0)
# Output of the NOT gate
print (gate.output())
# In[3]:
# Input is changed to 0
gate.setInput(1)
# To get the input states
print (gate.getInputStates())
# In[4]:
# New Output of the NOT gate
print (gate.output())
# In[5]:
# Using Connectors as the input lines
# Take a Connector
conn = Connector()
# Set Output of gate to Connecto | r conn
gate.setOutput(conn)
# Put this connector as the input to gate1
gate1 = NOT(conn)
# Output of the gate1
print (gate1.output())
# In[6]:
# Information about gate instance
print (gate)
|
josenavas/qiime | scripts/clean_raxml_parsimony_tree.py | Python | gpl-2.0 | 3,356 | 0.001788 | #!/usr/bin/env python
# File created on 10 Nov 2011
from __future__ import division
__author__ = "Jesse Stombaugh"
__copyright__ = "Copyright 2011, The QIIME project"
__credits__ = ["Jesse Stombaugh"]
__license__ = "GPL"
__version__ = "1.9.1-dev"
__maintainer__ = "Jesse Stombaugh"
__email__ = "jesse.stombaugh@colorado.edu"
from qiime.util import parse_command_line_parameters, make_option
from cogent.parse.tree import DndParser
from cogent.core.tree import PhyloNode
from qiime.clean_raxml_parsimony_tree import decorate_numtips, decorate_depth,\
get_insert_dict, drop_duplicate_nodes
scoring_methods = ['depth', 'numtips']
script_info = {}
script_info['brief_description'] = "Remove duplicate tips from Raxml Tree"
script_info[
'script_description'] = "This script allows the user to remove specific duplicate tips from a Raxml tree."
script_info['script_usage'] = []
script_info['script_usage'].append(
("Example (depth):",
"For this case the user can pass in input Raxml tree, duplicate tips, and define an output filepath. When using the depth option, only the deepest replicate is kept. ",
" %prog -i raxml_v730_final_placement.tre -t 6 -o raxml_v730_final_placement_depth.tre"))
script_info['script_usage'].append(
("Example (numtips):",
"For this case the user can pass in input Raxml tree, duplicate tips, and define an output filepath. When using the numtips option, the replicate with the fewest siblings is kept. ",
" %prog -i raxml_v730_final_placement.tre -t 6 -o raxml_v730_final_placement_numtips.tre -s numtips"))
script_info['output_description'] = ""
script_info['required_options'] = [
make_option(
'-i',
'--input_tree',
type="existing_filepath",
help='the input raxml parsimony tree'),
make_option(
'-t',
'--tips_to_keep',
type="string",
help='the input tips to score and retain (comma-separated list)'),
make_option(
'-o',
'--output_fp',
type="new_filepath",
help='the output filepath'),
]
script_info['optional_options'] = [
make_option(
'-s',
'--scoring_method',
type="choice",
help='the scoring method either depth or numtips [default: %default]',
default='depth',
choices=scoring_methods),
]
script_info['version'] = __version__
def main():
option_parser, opts, args =\
parse_command_line_parameters(**script_info)
# get options
tree_fp = opts.input_tree
tips_to_keep = opts.tips_to_keep.spl | it(',')
scoring_method = opts.scoring_method
# load tree
tree = DndParser(open(tree_fp, 'U'), constructor=PhyloNode)
# decorate measurements onto tree (either by depth or by num | ber of
# children)
if scoring_method == 'depth':
tree2 = decorate_depth(tree)
elif scoring_method == 'numtips':
tree2 = decorate_numtips(tree)
# get the nodes for the inserted sequences
nodes_dict = get_insert_dict(tree2, set(tips_to_keep))
# remove nodes accordingly
final_tree = drop_duplicate_nodes(tree2, nodes_dict)
# final_tree.nameUnnamedNodes()
# write out the resulting tree
open_outpath = open(opts.output_fp, 'w')
open_outpath.write(final_tree.getNewick(with_distances=True))
open_outpath.close()
if __name__ == "__main__":
main()
|
bruecksen/isimip | isi_mip/climatemodels/migrations/0047_auto_20170118_1428.py | Python | mit | 45,674 | 0.003416 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-01-18 13:28
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('climatemodels', '0046_auto_20170117_1034'),
]
operations = [
migrations.AlterField(
model_name='agriculture',
name='calibrated_values',
field=models.TextField(blank=True, default='', null=True, verbose_name='Calibrated values'),
),
migrations.AlterField(
model_name='agriculture',
name='co2_effects',
field=models.TextField(blank=True, default='', help_text='Methods for model calibration and validation', null=True, verbose_name='CO2 Effects'),
),
migrations.AlterField(
model_name='agriculture',
name='criteria_for_evaluation',
field=models.TextField(blank=True, default='', null=True, verbose_name='Criteria for evaluation (validation)'),
),
migrations.AlterField(
model_name='agriculture',
name='crop_cultivars',
field=models.TextField(blank=True, default='', null=True, verbose_name='Crop cultivars'),
),
migrations.AlterField(
model_name='agriculture',
name='crop_phenology',
field=models.TextField(blank=True, default='', help_text='Methods for model calibration and validation', null=True, verbose_name='Crop phenology'),
),
migrations.AlterField(
model_name='agriculture',
name='crop_residue',
field=models.TextField(blank=True, default='', null=True, verbose_name='Crop residue'),
),
migrations.AlterField(
model_name='agriculture',
name='crops',
field=models.TextField(blank=True, default='', null=True, verbose_name='Crops'),
),
migrations.AlterField(
model_name='agriculture',
name='evapo_transpiration',
field=models.TextField(blank=True, default='', help_text='Methods for model calibration and validation', null=True, verbose_name='Evapo-transpiration'),
),
migrations.AlterField(
model_name='agriculture',
name='fertilizer_application',
field=models.TextField(blank=True, default='', null=True, verbose_name='Fertilizer application'),
),
migrations.AlterField(
model_name='agriculture',
name='initial_crop_residue',
field=models.TextField(blank=True, default='', null=True, verbose_name='Initial crop residue'),
),
migrations.AlterField(
model_name='agriculture',
name='initial_soil_C_and_OM',
field=models.TextField(blank=True, default='', null=True, verbose_name='Initial soil C and OM'),
),
migrations.AlterField(
model_name='agriculture',
name='initial_soil_nitrate_and_ammonia',
field=models.TextField(blank=True, default='', null=True, verbose_name='Initial soil nitrate and ammonia'),
),
migrations.AlterField(
model_name='agriculture',
name='initial_soil_water',
field=models.TextField(blank=True, default='', null=True, verbose_name='Initial soil water'),
),
migrations.AlterField(
model_name='agriculture',
name='irrigation',
field=models.TextField(blank=True, default='', null=True, verbose_name='Irrigation'),
),
migrations.AlterField(
model_name='agriculture',
name='land_coverage',
field=models.TextField(blank=True, default='', null=True, verbose_name='Land cover'),
),
migrations.AlterField(
model_name='agriculture',
name='lead_area_development',
field=models.TextField(blank=True, default='', help_text='Methods for model calibration and validation', null=True, verbose_name='Lead area development'),
),
migrations.AlterField(
model_name='agriculture',
name='light_interception',
field=models.TextField(blank=True, default='', help_text='Methods for model calibration and validation', null=True, verbose_name='Light interception'),
),
migrations.AlterField(
model_name='agriculture',
name='light_utilization',
field=models.TextField(blank=True, default='', help_text='Methods for model calibration and validation', null=True, verbose_name='Light utilization'),
),
migrations.AlterField(
model_name='agriculture',
name='output_variable_and_dataset',
field=models.TextField(blank=True, default='', null=True, verbose_name='Output variable and dataset for calibration validation'),
),
migrations.AlterField(
model_name='agriculture',
name='parameters_number_and_description',
field=models.TextField(blank=True, default='', null=True, verbose_name='Parameters, number and description'),
),
migrations.AlterField(
model_name='agriculture',
name='planting_date_decision',
field=models.TextField(blank=True, default='', null=True, verbose_name='Planting date decision'),
),
migrations.AlterField(
model_name='agriculture',
name='planting_density',
field=models.TextField(blank=True, default='', null=True, verbose_name='Planting density'),
),
migrations.AlterField(
model_name='agriculture',
name='root_distribution_over_depth',
field=models.TextField(blank=True, default='', help_text='Methods for model calibration and validation', null=True, verbose_name='Root distribution over depth'),
),
migrations.AlterField(
model_name='agriculture',
name='soil_CN_modeling',
field=models.TextField(blank=True, default='', help_text='Methods for model calibration and validation', null=True, verbose_name='Soil CN modeling'),
),
migrations.AlterField(
model_name='agriculture',
name='spatial_scale_of_calibration_validation',
field=models.TextField(blank=True, default='', null=True, verbose_name='Spatial scale of calibration/validation'),
),
migrations.AlterField(
model_name='agriculture',
name='stresses_involved',
field=models.TextField(blank=True, default='', help_text='Methods for model calibration and validation', null=True, verbose_name='Stresses involved'),
),
migrations.AlterField(
model_name='agriculture',
name='temporal_scale_of_calibration_validation',
field=models.TextField(blank=True, default='', null=True, verbose_name='Temporal scale of calibration/validation'),
),
migrations.AlterField(
model_name='agriculture',
name='type_of_heat_stress',
field=models.TextField(blank=True, default='', help_text='Methods for model calibration and validation', null=True, verbose_name='Type of heat stress'),
),
migrations.AlterField(
model_name='agriculture',
| name='type_of_water_stress',
field=models.TextField(blank=True, default='', help_text='Methods for model calibration and validation', null=True, verbose_name='Type of wat | er stress'),
),
migrations.AlterField(
model_name='agriculture',
name='water_dynamics',
field=models.TextField(blank=True, default='', help_text='Methods for model calibration and validation', null=True, verbose_name='Water dynamics'),
),
migrations.AlterField(
model_name='agriculture',
name='yield_formation',
field=models.TextField(blank=True, default='', help_text='Methods for model calibration and validation', null=True, verbose_name='Yield formation'),
),
|
zephiro/django-boilerplate | {{cookiecutter.repo_name}}/{{cookiecutter.project_name}}/manage.py | Python | mit | 250 | 0 | #!/usr/bin/env python
import os
import sys
if _ | _name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.project")
from django.core.management import execute_from_command_line
execute_from_command_line | (sys.argv)
|
itsallvoodoo/csci-school | CSCI220/Week 08 - MAR05-09/prog6_7.py | Python | apache-2.0 | 489 | 0.00409 | # prog6_7.py
# This program calculates total wages in a week given | hours works and pay rate
# <Chad Hobbs>
def main(): # Main program
hrs = eval(input("How many hours have been worked this week?: "))
rate = eval(input("What is the pay rate for this employee?: "))
if hrs > 40:
wages = 40 * rate + (hrs - 40) * rate * 1.5
else:
wages = hrs * rate
pri | nt()
print("The wages for this week is ${0:0.2f}.".format(wages))
main()
|
Kiddinglife/iamhungry | iamhungry/app/views.py | Python | lgpl-3.0 | 3,665 | 0.024557 | """
Definition of views.
"""
from django.shortcuts import render
from django.http import HttpRequest
from django.template import RequestContext
from datetime import datetime |
def home(request):
"""Renders the home page."""
assert isinstance(request, Ht | tpRequest)
return render(request,
'app/index.html',
context_instance = RequestContext(request,
{
'title':'Home Page',
'year':datetime.now().year,
}))
def contact(request):
"""Renders the contact page."""
assert isinstance(request, HttpRequest)
return render(request,
'app/contact.html',
context_instance = RequestContext(request,
{
'title':'Contact',
'message':'Your contact page.',
'year':datetime.now().year,
}))
def about(request):
"""Renders the about page."""
assert isinstance(request, HttpRequest)
return render(request,
'app/about.html',
context_instance = RequestContext(request,
{
'title':'About',
'message':'Your application description page.',
'year':datetime.now().year,
}))
def faq(request):
"""Renders the about page."""
assert isinstance(request, HttpRequest)
return render(request,
'app/about.html',
context_instance = RequestContext(request,
{
'title':'About',
'message':'Your application description page.',
'year':datetime.now().year,
}))
def signup(request):
"""Renders the about page."""
assert isinstance(request, HttpRequest)
return render(request,
'app/about.html',
context_instance = RequestContext(request,
{
'title':'About',
'message':'Your application description page.',
'year':datetime.now().year,
}))
def blog(request):
"""Renders the about page."""
assert isinstance(request, HttpRequest)
return render(request,
'app/about.html',
context_instance = RequestContext(request,
{
'title':'About',
'message':'Your application description page.',
'year':datetime.now().year,
}))
def termofuse(request):
"""Renders the about page."""
assert isinstance(request, HttpRequest)
return render(request,
'app/about.html',
context_instance = RequestContext(request,
{
'title':'About',
'message':'Your application description page.',
'year':datetime.now().year,
}))
def addyourrestaurant(request):
"""Renders the about page."""
assert isinstance(request, HttpRequest)
return render(request,
'app/about.html',
context_instance = RequestContext(request,
{
'title':'About',
'message':'Your application description page.',
'year':datetime.now().year,
}))
def career(request):
"""Renders the about page."""
assert isinstance(request, HttpRequest)
return render(request,
'app/about.html',
context_instance = RequestContext(request,
{
'title':'About',
'message':'Your application description page.',
'year':datetime.now().year,
}))
def privacy(request):
"""Renders the about page."""
assert isinstance(request, HttpRequest)
return render(request,
'app/about.html',
context_instance = RequestContext(request,
{
'title':'About',
'message':'Your application description page.',
'year':datetime.now().year,
})) |
asottile/pushmanager | pushmanager/tests/test_template_pushes.py | Python | apache-2.0 | 835 | 0.007186 | import testify as T
from pushmanager.testing.testservlet import TemplateTestCase
class PushesTemplateTest(TemplateTestCase):
authenticated = True
pushes_page = 'pushes.html'
new_push_pag | e = 'new-push.html'
def render_pushes_page(self, page_title='Pushes', pushes=[], pushes_per_page=50, last_push=None):
return self.render_etree(self.pushes_page,
page_title=page_title,
pushes=pushes,
rpp=pushes_per_page,
last_push=last_push
)
def test_include_new_push(self):
tree = self.render_pushes_page()
found_form = []
for form in tree.iter('form'):
if form.attrib['id'] == 'push-info-for | m':
found_form.append(form)
T.assert_equal(len(found_form), 1)
if __name__ == '__main__':
T.run()
|
grahamking/lintswitch | lintswitch/main.py | Python | gpl-3.0 | 4,151 | 0 | """ lintswitch lints your code in the background.
http://github.com/grahamking/lintswitch
"""
import sys
import socket
import logging
import os
import os.path
import argparse
from threading import Thread
try:
# python 3
from queue import Queue
except ImportError:
# python 2
from Queue import Queue
from lintswitch import checkers, emitters, http_server
DESC = 'Linting server - https://github.com/grahamking/lintswitch'
LOG = logging.getLogger(__name__)
def main():
"""Start here"""
parser = make_parser()
args = parser.parse_args()
if args.version:
from lintswitch import __version__
print(__version__)
return 0
log_params = {'level': args.loglevel}
if args.logfile:
log_params['filename'] = args.logfile
logging.basicConfig(**log_params) # pylint: disable=W0142
LOG.debug('lintswitch start')
work_queue = Queue()
check_proc = Thread(target=worker,
args=(work_queue, args))
check_proc.daemon = True
check_proc.start()
server = Thread(target=http_server.http_server,
args=(args.httpport,))
server.daemon = True
server.start()
# Listen for connections from vim (or other) plugin
listener = socket.socket()
listener.bind(('127.0.0.1', args.lintport))
listener.listen(10)
try:
main_loop(listener, work_queue)
except KeyboardInterrupt:
listener.close()
print('Bye')
return 0
def make_parser():
"""argparse object which can parse command line arguments,
or print help.
"""
parser = argparse.ArgumentParser(
description=DESC,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
'-v', '--version',
action='store_true',
help='Print version info and quit')
parser.add_argument(
'--loglevel',
default='DEBUG',
choices=['DEBUG', 'INFO', 'WARN', 'ERROR', 'FATAL'],
help='One of DEBUG, INFO, WARN, ERROR or FATAL')
parser.add_argument(
'--logfile',
default=None,
help='Full path of log file. Defaults to stdout.')
parser.add_argument(
'--lintport',
type=int,
default=4008,
help='Port to listen for lint requests')
parser.add_argument(
' | --httpport',
type=int,
default=8008,
help='Port for web browser interface')
parser.add_argument(
'--pymetrics_warn',
type=int,
default=5,
help='Cyclomatic complexity considered a warning, per function')
parser.add_argument(
'--pymetrics_error',
type=int,
default=10,
help='Cyclomatic complexity considered an error, pe | r function')
return parser
def main_loop(listener, work_queue):
"""Wait for connections and process them.
@param listener: a socket.socket, open and listening.
"""
while True:
conn, _ = listener.accept()
data = conn.makefile().read()
conn.close()
work_queue.put(data)
def worker(work_queue, args):
"""Takes filename from queue, checks them and displays (emit) result.
"""
while 1:
filename = work_queue.get()
filename = filename.strip()
if not filename:
continue
check_result = checkers.check(filename, args)
if not check_result:
continue
errors, warnings, summaries = check_result
html = emitters.emit(filename, errors, warnings, summaries)
http_server.SHARED_CONDITION.acquire()
http_server.SHARED_RESULT = html
http_server.SHARED_CONDITION.notifyAll()
http_server.SHARED_CONDITION.release()
def find(name):
"""Finds a program on system path."""
for directory in syspath():
candidate = os.path.join(directory, name)
if os.path.exists(candidate):
return candidate
return None
def syspath():
"""OS path as array of strings"""
path = os.getenv('PATH').split(':')
return path
if __name__ == '__main__':
sys.exit(main())
|
vadosl/photorganizer | photorganizer/photo/migrations/0001_initial.py | Python | mit | 8,572 | 0.007116 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Album'
db.create_table(u'photo_album', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=60)),
('public', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal(u'photo', ['Album'])
# Adding model 'Tag'
db.create_table(u'photo_tag', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('tag', self.gf('django.db.models.fields.CharField')(max_length=50)),
))
db.send_create_signal(u'photo | ', ['Tag'])
# Adding model 'Image | '
db.create_table(u'photo_image', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=60, null=True, blank=True)),
('image', self.gf('django.db.models.fields.files.FileField')(max_length=100)),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('rating', self.gf('django.db.models.fields.IntegerField')(default=50)),
('width', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('height', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, blank=True)),
('thumbnail2', self.gf('django.db.models.fields.files.ImageField')(max_length=100, null=True, blank=True)),
))
db.send_create_signal(u'photo', ['Image'])
# Adding M2M table for field tags on 'Image'
m2m_table_name = db.shorten_name(u'photo_image_tags')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('image', models.ForeignKey(orm[u'photo.image'], null=False)),
('tag', models.ForeignKey(orm[u'photo.tag'], null=False))
))
db.create_unique(m2m_table_name, ['image_id', 'tag_id'])
# Adding M2M table for field albums on 'Image'
m2m_table_name = db.shorten_name(u'photo_image_albums')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('image', models.ForeignKey(orm[u'photo.image'], null=False)),
('album', models.ForeignKey(orm[u'photo.album'], null=False))
))
db.create_unique(m2m_table_name, ['image_id', 'album_id'])
def backwards(self, orm):
# Deleting model 'Album'
db.delete_table(u'photo_album')
# Deleting model 'Tag'
db.delete_table(u'photo_tag')
# Deleting model 'Image'
db.delete_table(u'photo_image')
# Removing M2M table for field tags on 'Image'
db.delete_table(db.shorten_name(u'photo_image_tags'))
# Removing M2M table for field albums on 'Image'
db.delete_table(db.shorten_name(u'photo_image_albums'))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'photo.album': {
'Meta': {'object_name': 'Album'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '60'})
},
u'photo.image': {
'Meta': {'object_name': 'Image'},
'albums': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['photo.Album']", 'symmetrical': 'False', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'rating': ('django.db.models.fields.IntegerField', [], {'default': '50'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['photo.Tag']", 'symmetrical': 'False', 'blank': 'True'}),
'thumbnail2': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '60', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
|
spbguru/repo1 | nupic/support/__init__.py | Python | gpl-3.0 | 27,136 | 0.011829 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
## @file
Internal package.
Package containing modules that are used internally by Numenta Python
tools and plugins to extend standard library functionality.
These modules should NOT be used by client applications.
The following modules are included:
nupic.support.paths
Module containing filesystem path manipulation utilities.
nupic.support.serialization
Module containing Python object serialization (pickling and unpickling) and
versioning utilities.
nupic.support.compress
Module containing Python object encoding and compression utilities.
nupic.support.processes
Module containing operating system process management utilities and wrappers.
nupic.support.output
Module containing operating system interprocess communication utilities and
wrappers.
nupic.support.diff
Module containing file difference calculation wrappers.
nupic.support.vision
Temporary location for vision framework before the move to nupic.vision.
nupic.support.deprecate
Contains the deprecate decorator used for automatic handling of deprecated
methods.
nupic.support.memchecker
Contains the MemChecker class, for checking physical memory and monitoring
memory usage.
nupic.support.imagesearch
Contains functions for searching for images on the web and downloading them.
"""
from __future__ import with_statement
# Standard imports
import os
import sys
import inspect
import logging
import logging.config
import logging.handlers
from platform import python_version
import struct
from StringIO import StringIO
import time
import traceback
from configuration import Configuration
from nu | pic.support.fshelpers import makeDirectoryFromAbsolutePath
# Local imports
#############################################################################
def getCallerInfo(depth=2):
"""Utility function to get information about function callers
The information is the tuple (function/method name, filename, class)
The class will be None if the caller is just a function and not an object
m | ethod.
depth: how far back in the callstack to go to extract the caller info
"""
f = sys._getframe(depth)
method_name = f.f_code.co_name
filename = f.f_code.co_filename
arg_class = None
args = inspect.getargvalues(f)
if len(args[0]) > 0:
arg_name = args[0][0] # potentially the 'self' arg if its a method
arg_class = args[3][arg_name].__class__.__name__
return (method_name, filename, arg_class)
#############################################################################
def title(s=None, additional='', stream=sys.stdout, frame='-'):
"""Utility function to display nice titles
It automatically extracts the name of the function/method it is called from
and you can add additional text. title() will then print the name
of the function/method and the additional text surrounded by tow lines
of dashes. If you don't want the name of the function, you can provide
alternative text (regardless of the additional text)
@param s - text to display, uses the function name and arguments by default
@param additional - extra text to display (not needed if s is not None)
@param stream - the stream to print to. Ny default goes to standard output
@param frame - the character used for the over and under line. Default is '-'
Examples:
def foo():
title()
will display:
---
foo
---
def foo():
title(additional='(), this is cool!!!')
will display:
----------------------
foo(), this is cool!!!
----------------------
def foo():
title('No function name here!')
will display:
----------------------
No function name here!
----------------------
"""
if s is None:
callable_name, file_name, class_name = getCallerInfo(2)
s = callable_name
if class_name is not None:
method_name = s
s = class_name + '.' + callable_name
lines = (s + additional).split('\n')
length = max(len(line) for line in lines)
print >> stream, '-' * length
print >> stream, s + additional
print >> stream, '-' * length
#############################################################################
def bringToFront(title):
"""Bring a top-level window with a given title
to the front on Windows"""
if sys.platform != 'win32':
return
import ctypes
find_window = ctypes.windll.user32.FindWindowA
set_foreground_window = ctypes.windll.user32.SetForegroundWindow
hwnd = find_window(None, title)
if hwnd == 0:
raise Exception('There is no window titled: "%s"' % title)
set_foreground_window(hwnd)
#############################################################################
def getUserDocumentsPath():
"""
Find the user's "Documents" directory (OS X), "My Documents" directory
(Windows), or home directory (Unix).
"""
# OS X and Windows code from:
# http://www.blueskyonmars.com/2005/08/05
# /finding-a-users-my-documents-folder-on-windows/
# Alternate Windows code from:
# http://bugs.python.org/issue1763
if sys.platform.startswith('win'):
if sys.platform.startswith('win32'):
# Try the primary method on 32-bit windows
try:
from win32com.shell import shell
alt = False
except ImportError:
try:
import ctypes
dll = ctypes.windll.shell32
alt = True
except:
raise Exception("Could not find 'My Documents'")
else:
# Use the alternate method on 64-bit Windows
alt = True
if not alt:
# Primary method using win32com
df = shell.SHGetDesktopFolder()
pidl = df.ParseDisplayName(0, None,
"::{450d8fba-ad25-11d0-98a8-0800361b1103}")[1]
path = shell.SHGetPathFromIDList(pidl)
else:
# Alternate method using ctypes rather than win32com
buf = ctypes.create_string_buffer(300)
dll.SHGetSpecialFolderPathA(None, buf, 0x0005, False)
path = buf.value
elif sys.platform.startswith('darwin'):
from Carbon import Folder, Folders
folderref = Folder.FSFindFolder(Folders.kUserDomain,
Folders.kDocumentsFolderType,
False)
path = folderref.as_pathname()
else:
path = os.getenv('HOME')
return path
#############################################################################
def getArgumentDescriptions(f):
"""
Get the arguments, default values, and argument descriptions for a function.
Returns a list of tuples: (argName, argDescription, defaultValue). If an
argument has no default value, the tuple is only two elements long (as None
cannot be used, since it could be a default value itself).
Parses the argument descriptions out of the function docstring, using a
format something lke this:
[junk]
argument_name: description...
description...
description...
[junk]
[more arguments]
It will find an argument as long as the exact argument name starts the line.
It will then strip a trailing colon, if present, then strip the rest of the
line and use it to start the description. It will then strip and append any
subsequent lines with a greater indent level than the original argument name.
"""
# Get the argument names and default values
argspec = insp |
mypaint/mypaint | gui/drawwindow.py | Python | gpl-2.0 | 34,612 | 0.000751 | # -*- coding: utf-8 -*-
#
# This file is part of MyPaint.
# Copyright (C) 2007-2019 by the MyPaint Development Team.
# Copyright (C) 2007-2014 by Martin Renold <martinxyz@gmx.ch>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
"""Main drawing window.
Painting is done in tileddrawwidget.py.
"""
## Imports
from __future__ import division, print_function
import os
import os.path
import webbrowser
from warnings import warn
import logging
import math
import xml.etree.ElementTree as ET
from lib.gibindings import Gtk
from lib.gibindings import Gdk
from . import compatibility
from . import historypopup
from . import stategroup
from . import colorpicker # noqa: F401 (registration of GObject classes)
from . import windowing # noqa: F401 (registration of GObject classes)
from . import toolbar
from . import dialogs
from . import layermodes # noqa: F401 (registration of GObject classes)
from . import quickchoice
import gui.viewmanip # noqa: F401 (registration of GObject classes)
import gui.layermanip # noqa: F401 (registration of GObject classes)
import gui.brushmanip # noqa: F401
from lib.color import HSVColor
from . import uicolor
import gui.picker
import gui.footer
from . import brushselectionwindow # noqa: F401 (registration)
from .overlays import LastPaintPosOverlay
from .overlays import ScaleOverlay
from .framewindow import FrameOverlay
from .symmetry import SymmetryOverlay
import gui.tileddrawwidget
import gui.displayfilter
import gui.meta
import lib.xml
import lib.glib
from lib.gettext import | gettext as _
from lib.gettext import C_
logger = logging.getLogger(__name__)
## Module constants
BRUSHPACK_URI = 'https://github.com/mypaint/mypaint/wiki/Brush-Packages'
## Class definitions
class DrawWindow (Gtk.Window):
"""Main drawing window"""
## Class configuration
__gtype_name__ = 'MyPaintDrawWindow'
_MODE_ICON_TEMPLATE = "<b>{name}</b>\n{description}"
#: Constructor callables and canned args for named quick chooser |
#: instances. Used by _get_quick_chooser().
_QUICK_CHOOSER_CONSTRUCT_INFO = {
"BrushChooserPopup": (
quickchoice.BrushChooserPopup, [],
),
"ColorChooserPopup": (
quickchoice.ColorChooserPopup, [],
),
"ColorChooserPopupFastSubset": (
quickchoice.ColorChooserPopup, ["fast_subset", True],
),
}
## Initialization and lifecycle
def __init__(self):
super(DrawWindow, self).__init__()
import gui.application
app = gui.application.get_app()
self.app = app
self.app.kbm.add_window(self)
# Window handling
self._updating_toggled_item = False
self.is_fullscreen = False
# Enable drag & drop
drag_targets = [
Gtk.TargetEntry.new("text/uri-list", 0, 1),
Gtk.TargetEntry.new("application/x-color", 0, 2)
]
drag_flags = (
Gtk.DestDefaults.MOTION |
Gtk.DestDefaults.HIGHLIGHT |
Gtk.DestDefaults.DROP)
drag_actions = Gdk.DragAction.DEFAULT | Gdk.DragAction.COPY
self.drag_dest_set(drag_flags, drag_targets, drag_actions)
# Connect events
self.connect('delete-event', self.quit_cb)
self.connect("drag-data-received", self._drag_data_received_cb)
self.connect("window-state-event", self.window_state_event_cb)
self.connect("button-press-event", self._button_press_cb)
# Deferred setup
self._done_realize = False
self.connect("realize", self._realize_cb)
self.app.filehandler.current_file_observers.append(self.update_title)
# Named quick chooser instances
self._quick_choosers = {}
# Park the focus on the main tdw rather than on the toolbar. Default
# activation doesn't really mean much for MyPaint's main window, so
# it's safe to do this and it looks better.
# self.main_widget.set_can_default(True)
# self.main_widget.set_can_focus(True)
# self.main_widget.grab_focus()
def _button_press_cb(self, window, event):
windowing.clear_focus(window)
def _realize_cb(self, drawwindow):
# Deferred setup: anything that needs to be done when self.app is fully
# initialized.
if self._done_realize:
return
self._done_realize = True
doc = self.app.doc
tdw = doc.tdw
assert tdw is self.app.builder.get_object("app_canvas")
tdw.display_overlays.append(FrameOverlay(doc))
tdw.display_overlays.append(SymmetryOverlay(doc))
self.update_overlays()
self._init_actions()
kbm = self.app.kbm
kbm.add_extra_key('Menu', 'ShowPopupMenu')
kbm.add_extra_key('Tab', 'FullscreenAutohide')
self._init_stategroups()
self._init_menubar()
self._init_toolbars()
topbar = self.app.builder.get_object("app_topbar")
topbar.menubar = self.menubar
topbar.toolbar1 = self._toolbar1
topbar.toolbar2 = self._toolbar2
# Workspace setup
ws = self.app.workspace
ws.tool_widget_added += self.app_workspace_tool_widget_added_cb
ws.tool_widget_removed += self.app_workspace_tool_widget_removed_cb
# Footer bar updates
self.app.brush.observers.append(self._update_footer_color_widgets)
tdw.transformation_updated += self._update_footer_scale_label
doc.modes.changed += self._modestack_changed_cb
context_id = self.app.statusbar.get_context_id("active-mode")
self._active_mode_context_id = context_id
self._update_status_bar_mode_widgets(doc.modes.top)
mode_img = self.app.builder.get_object("app_current_mode_icon")
mode_img.connect("query-tooltip", self._mode_icon_query_tooltip_cb)
mode_img.set_has_tooltip(True)
# Update picker action sensitivity
layerstack = doc.model.layer_stack
layerstack.layer_inserted += self._update_layer_pick_action
layerstack.layer_deleted += self._update_layer_pick_action
def _init_actions(self):
# Actions are defined in resources.xml.
# all we need to do here is connect some extra state management.
ag = self.action_group = self.app.builder.get_object("WindowActions")
self.update_fullscreen_action()
# Set initial state from user prefs
ag.get_action("ToggleScaleFeedback").set_active(
self.app.preferences.get("ui.feedback.scale", False))
ag.get_action("ToggleLastPosFeedback").set_active(
self.app.preferences.get("ui.feedback.last_pos", False))
# Keyboard handling
for action in self.action_group.list_actions():
self.app.kbm.takeover_action(action)
def _init_stategroups(self):
sg = stategroup.StateGroup()
p2s = sg.create_popup_state
hist = p2s(historypopup.HistoryPopup(self.app, self.app.doc.model))
self.popup_states = {
'ColorHistoryPopup': hist,
}
hist.autoleave_timeout = 0.600
self.history_popup_state = hist
for action_name, popup_state in self.popup_states.items():
label = self.app.find_action(action_name).get_label()
popup_state.label = label
def _init_menubar(self):
# Load Menubar, duplicate into self.popupmenu
ui_dir = os.path.dirname(os.path.abspath(__file__))
menupath = os.path.join(ui_dir, 'menu.xml')
with open(menupath) as fp:
menubar_xml = fp.read()
self.app.ui_manager.add_ui_from_string(menubar_xml)
self.popupmenu = self._clone_menu(
menubar_xml,
'PopupMenu',
self.app.doc.tdw,
)
self.menubar = self.app.ui_manager.get_widget('/Menubar')
def _init_toolbars(self):
self._toolbar_manager = toolbar.ToolbarManager(self)
self._toolbar1 = self._toolbar_manag |
the-xkcd-community/the-red-spider-project | src/xkcd-search.py | Python | mit | 1,201 | 0.029975 | #!/usr/bin/env python2
# Copyright 2012 Neil Forrester
# Licensed under the Red Spider Project License.
# See the License.txt that shipped with your copy of this software for details.
import re
import argparse
import os
import sys
import codecs
xf = __import__('xkcd-fetch')
if __name__ == "__main__":
# set up command line arguments
parser = argparse.ArgumentParser(description = 'Searches cached xkcd comics, will not download new ones.')
parser.add_argument('regex',
metavar = 'REGEX',
help = 'A python regular expression')
args = parser.parse_args()
# if files don't exist, exit
if not os.path.exists(xf.comic_data_path):
sys.stderr.write("No comic data found.\n" +
"Is RED_SPIDER_ROOT set?\n" +
"Have you run xkcd-fetch yet?\n")
sys.exit(1)
regex = re.compile(args.regex)
# read | the cache from the file
comics = xf.read_cache()
# search the comics
for num in comics.keys():
if any(map(regex.search, [comics[num].comic_title,
comics[num].title_text,
comics[num].transcript,
com | ics[num].news])):
print num
|
10printhello/Blank-Heroku-Django-App | backoffice/backoffice/apps/pages/urls.py | Python | gpl-2.0 | 217 | 0.009217 | from dja | ngo.conf.urls import patterns, include, url
from apps.pages import views
# See: https://docs.djangoproject.com/en/dev/topics/http/urls/
ur | lpatterns = patterns('',
url(r'^$', views.home, name='home'),
)
|
thomasaarholt/hyperspy | hyperspy/tests/io/test_edax.py | Python | gpl-3.0 | 19,779 | 0.001466 | import gc
import hash | lib
import os
import os.path
import tempfile
import zipfile
import numpy as np
import pytest
import requests
from hyperspy import signals
from hyperspy.io import load
MY_PATH = os.path.dirname(__file__)
ZIPF = os.path.join(MY_PATH, "edax_files.zip")
TMP | _DIR = tempfile.TemporaryDirectory()
TEST_FILES_OK = os.path.isfile(ZIPF)
REASON = ""
SHA256SUM = "e217c71efbd208da4b52e9cf483443f9da2175f2924a96447ed393086fe32008"
# The test files are not included in HyperSpy v1.4 because their file size is 36.5MB
# taking the HyperSpy source distribution file size above PyPI's 60MB limit.
# As a temporary solution, we attempt to download the test files from GitHub
# and skip the tests if the download fails.
if not TEST_FILES_OK:
try:
r = requests.get(
"https://github.com/hyperspy/hyperspy/blob/e7a323a3bb9b237c24bd9267d2cc4fcb31bb99f3/hyperspy/tests/io/edax_files.zip?raw=true")
SHA256SUM_GOT = hashlib.sha256(r.content).hexdigest()
if SHA256SUM_GOT == SHA256SUM:
with open(ZIPF, 'wb') as f:
f.write(r.content)
TEST_FILES_OK = True
else:
REASON = "wrong sha256sum of downloaded file. Expected: %s, got: %s" % SHA256SUM, SHA256SUM_GOT
except BaseException as e:
REASON = "download of EDAX test files failed: %s" % e
def setup_module():
if TEST_FILES_OK:
with zipfile.ZipFile(ZIPF, 'r') as zipped:
zipped.extractall(TMP_DIR.name)
pytestmark = pytest.mark.skipif(not TEST_FILES_OK,
reason=REASON)
def teardown_module():
TMP_DIR.cleanup()
class TestSpcSpectrum_v061_xrf:
@classmethod
def setup_class(cls):
cls.spc = load(os.path.join(TMP_DIR.name, "spc0_61-ipr333_xrf.spc"))
cls.spc_loadAll = load(os.path.join(TMP_DIR.name,
"spc0_61-ipr333_xrf.spc"),
load_all_spc=True)
@classmethod
def teardown_class(cls):
del cls.spc, cls.spc_loadAll
gc.collect()
def test_data(self):
# test datatype
assert np.uint32 == TestSpcSpectrum_v061_xrf.spc.data.dtype
# test data shape
assert (4000,) == TestSpcSpectrum_v061_xrf.spc.data.shape
# test 40 datapoints
assert (
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 319, 504, 639, 924,
1081, 1326, 1470, 1727, 1983, 2123, 2278, 2509, 2586, 2639,
2681, 2833, 2696, 2704, 2812, 2745, 2709, 2647, 2608, 2620,
2571, 2669] == TestSpcSpectrum_v061_xrf.spc.data[:40].tolist())
def test_parameters(self):
elements = TestSpcSpectrum_v061_xrf.spc.metadata.as_dictionary()[
'Sample']['elements']
sem_dict = TestSpcSpectrum_v061_xrf.spc.metadata.as_dictionary()[
'Acquisition_instrument']['SEM'] # this will eventually need to
# be changed when XRF-specific
# features are added
eds_dict = sem_dict['Detector']['EDS']
signal_dict = TestSpcSpectrum_v061_xrf.spc.metadata.as_dictionary()[
'Signal']
# Testing SEM parameters
np.testing.assert_allclose(30, sem_dict['beam_energy'])
np.testing.assert_allclose(0, sem_dict['Stage']['tilt_alpha'])
# Testing EDS parameters
np.testing.assert_allclose(45, eds_dict['azimuth_angle'])
np.testing.assert_allclose(35, eds_dict['elevation_angle'])
np.testing.assert_allclose(137.92946, eds_dict['energy_resolution_MnKa'],
atol=1E-5)
np.testing.assert_allclose(2561.0, eds_dict['live_time'], atol=1E-6)
# Testing elements
assert ({'Al', 'Ca', 'Cl', 'Cr', 'Fe', 'K', 'Mg', 'Mn', 'Si', 'Y'} ==
set(elements))
# Testing HyperSpy parameters
assert 'EDS_SEM' == signal_dict['signal_type']
assert isinstance(TestSpcSpectrum_v061_xrf.spc, signals.EDSSEMSpectrum)
def test_axes(self):
spc_ax_manager = {'axis-0': {'_type': 'UniformDataAxis',
'name': 'Energy',
'navigate': False,
'is_binned': True,
'offset': 0.0,
'scale': 0.01,
'size': 4000,
'units': 'keV'}}
assert (spc_ax_manager ==
TestSpcSpectrum_v061_xrf.spc.axes_manager.as_dictionary())
def test_load_all_spc(self):
spc_header = TestSpcSpectrum_v061_xrf.spc_loadAll.original_metadata[
'spc_header']
np.testing.assert_allclose(4, spc_header['analysisType'])
np.testing.assert_allclose(4, spc_header['analyzerType'])
np.testing.assert_allclose(2013, spc_header['collectDateYear'])
np.testing.assert_allclose(9, spc_header['collectDateMon'])
np.testing.assert_allclose(26, spc_header['collectDateDay'])
np.testing.assert_equal(b'Garnet1.', spc_header['fileName'].view('|S8')[0])
np.testing.assert_allclose(45, spc_header['xRayTubeZ'])
class TestSpcSpectrum_v070_eds:
@classmethod
def setup_class(cls):
cls.spc = load(os.path.join(TMP_DIR.name, "single_spect.spc"))
cls.spc_loadAll = load(os.path.join(TMP_DIR.name,
"single_spect.spc"),
load_all_spc=True)
@classmethod
def teardown_class(cls):
del cls.spc, cls.spc_loadAll
gc.collect()
def test_data(self):
# test datatype
assert np.uint32 == TestSpcSpectrum_v070_eds.spc.data.dtype
# test data shape
assert (4096,) == TestSpcSpectrum_v070_eds.spc.data.shape
# test 1st 20 datapoints
assert (
[0, 0, 0, 0, 0, 0, 1, 2, 3, 3, 10, 4, 10, 10, 45, 87, 146, 236,
312, 342] == TestSpcSpectrum_v070_eds.spc.data[:20].tolist())
def test_parameters(self):
elements = TestSpcSpectrum_v070_eds.spc.metadata.as_dictionary()[
'Sample']['elements']
sem_dict = TestSpcSpectrum_v070_eds.spc.metadata.as_dictionary()[
'Acquisition_instrument']['SEM']
eds_dict = sem_dict['Detector']['EDS']
signal_dict = TestSpcSpectrum_v070_eds.spc.metadata.as_dictionary()[
'Signal']
# Testing SEM parameters
np.testing.assert_allclose(22, sem_dict['beam_energy'])
np.testing.assert_allclose(0, sem_dict['Stage']['tilt_alpha'])
# Testing EDS parameters
np.testing.assert_allclose(0, eds_dict['azimuth_angle'])
np.testing.assert_allclose(34, eds_dict['elevation_angle'])
np.testing.assert_allclose(129.31299, eds_dict['energy_resolution_MnKa'],
atol=1E-5)
np.testing.assert_allclose(50.000004, eds_dict['live_time'], atol=1E-6)
# Testing elements
assert ({'Al', 'C', 'Ce', 'Cu', 'F', 'Ho', 'Mg', 'O'} ==
set(elements))
# Testing HyperSpy parameters
assert 'EDS_SEM' == signal_dict['signal_type']
assert isinstance(TestSpcSpectrum_v070_eds.spc, signals.EDSSEMSpectrum)
def test_axes(self):
spc_ax_manager = {'axis-0': {'_type': 'UniformDataAxis',
'name': 'Energy',
'navigate': False,
'is_binned': True,
'offset': 0.0,
'scale': 0.01,
'size': 4096,
'units': 'keV'}}
assert (spc_ax_manager ==
TestSpcSpectrum_v070_eds.spc.axes_manager.as_dictionary())
def test_load_all_spc(self):
spc_header = TestSpcSpectrum_v070_eds.spc_loadAll.original_metadata[
'spc_header']
np.testing.assert_allclose(4, spc_header['analysisType'])
np.testing.assert_allclose(5, spc_header['analyzerType'])
np.testing.assert_allclose(2016, spc_header['collectDateYear'])
|
elyezer/robottelo | tests/foreman/ui/test_oscapcontent.py | Python | gpl-3.0 | 6,070 | 0 | """Tests for Oscapcontent
:Requirement: Oscapcontent
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: UI
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
import unittest2
from fauxfactory import gen_string
from nailgun import entities
from robottelo.config import settings
from robottelo.constants import OSCAP_DEFAULT_CONTENT
from robottelo.datafactory import invalid_values_list, valid_data_list
from robottelo.decorators import (
skip_if_bug_open,
skip_if_not_set,
tier1,
tier2,
)
from robottelo.helpers import get_data_file
from robottelo.test import UITestCase
from robottelo.ui.factory import make_oscapcontent
from robottelo.ui.locators import common_locators
from robottelo.ui.session import Session
class OpenScapContentTestCase(UITestCase):
"""Implements Oscap Content tests in UI."""
@classmethod
@skip_if_not_set('oscap')
def setUpClass(cls):
super(OpenScapContentTestCase, cls).setUpClass()
path = settings.oscap.content_path
cls.content_path = get_data_file(path)
org = entities.Organization(name=gen_string('alpha')).create()
cls.org_name = org.name
proxy = entities.SmartProxy().search(
query={
u'search': u'name={0}'.format(
settings.server.hostname)
}
)[0]
proxy.organization = [org]
@tier1
def test_positive_create(self):
"""Create OpenScap content.
:id: 6580cffa-da37-40d5-affa-cfb1ff27c545
:Steps:
1. Create an openscap content.
2. Provide all the appropriate parameters.
:expectedresults: Whether creating content for OpenScap is successful.
:CaseImportance: Critical
"""
with Session(self.browser) as session:
for content_name in valid_data_list():
with self.subTest(content_name):
| make_oscapcontent(
session,
name=content_name,
content_path=self.content_path,
content_org=self.org_name,
)
se | lf.assertIsNotNone(
self.oscapcontent.search(content_name))
@skip_if_bug_open('bugzilla', 1289571)
@tier1
def test_negative_create_with_invalid_name(self):
"""Create OpenScap content with negative values
:id: 8ce0e8b4-396a-43cd-8cbe-fb60fcf853b0
:Steps:
1. Create an openscap content.
2. Provide all the appropriate parameters.
:expectedresults: Creating content for OpenScap is not successful.
:BZ: 1289571
:CaseImportance: Critical
"""
with Session(self.browser) as session:
for content_name in invalid_values_list(interface='ui'):
with self.subTest(content_name):
make_oscapcontent(
session,
name=content_name,
content_path=self.content_path,
content_org=self.org_name,
)
self.assertIsNotNone(session.nav.wait_until_element(
common_locators['haserror']))
@tier1
@unittest2.skip('oscap contents are not installed by default.'
'Installer needs to be fixed')
def test_positive_default(self):
"""Check whether OpenScap content exists by default.
:id: 0beca127-8294-4d85-bace-b9170215c0cd
:Steps:
1. Set Org as Any Org.
2. Navigate to oscap Content page.
:expectedresults: Whether oscap content exists by default.
:CaseImportance: Critical
"""
# see BZ 1336374
with Session(self.browser):
self.assertIsNotNone(self.oscapcontent.search(
OSCAP_DEFAULT_CONTENT['rhel7_content']))
self.assertIsNotNone(self.oscapcontent.search(
OSCAP_DEFAULT_CONTENT['rhel6_content']))
@tier2
def test_positive_update(self):
"""Update OpenScap content.
:id: 9870555d-0b60-41ab-a481-81d4d3f78fec
:Steps:
1. Create an openscap content.
2. Provide all the appropriate parameters.
3. Update the openscap content, here the Org.
:expectedresults: Whether creating content for OpenScap is successful.
:CaseLevel: Integration
"""
org = entities.Organization(name=gen_string('alpha')).create()
content_name = gen_string('alpha')
with Session(self.browser) as session:
make_oscapcontent(
session,
name=content_name,
content_path=self.content_path,
content_org=self.org_name,
)
self.oscapcontent.update(content_name, content_org=org.name)
session.nav.go_to_select_org(org.name)
self.assertIsNotNone(
self.oscapcontent.search(content_name))
@tier1
def test_positive_delete(self):
"""Create OpenScap content and then delete it.
:id: 8eade129-5666-4e90-ba3e-f0c51a3090ce
:Steps:
1. Create an openscap content.
2. Provide all the appropriate parameters.
3. Delete the openscap content.
:expectedresults: Deleting content for OpenScap is successful.
:CaseImportance: Critical
"""
with Session(self.browser) as session:
for content_name in valid_data_list():
with self.subTest(content_name):
make_oscapcontent(
session,
name=content_name,
content_path=self.content_path,
content_org=self.org_name,
)
self.assertIsNotNone(
self.oscapcontent.search(content_name))
self.oscapcontent.delete(content_name)
|
abramhindle/UnnaturalCodeFork | python/testdata/launchpad/lib/lp/translations/utilities/kde_po_importer.py | Python | agpl-3.0 | 3,250 | 0.000308 | # Copyright 2009 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Import module for legacy KDE .po files.
This is an extension of standard gettext PO files.
You can read more about this file format from:
* http://l10n.kde.org/docs/translation-howto/gui-peculiarities.html
* http://docs.kde.org/development/en/kdesdk/kbabel/kbabel-pluralforms.html
* http://websvn.kde.org/branches/KDE/3.5/kdelibs/kdecore/klocale.cpp
"""
__metaclass__ = type
__all__ = [
'KdePOImporter'
]
from zope.interface import implements
from lp.translations.interfaces.translationfileformat import (
TranslationFileFormat,
)
from lp.translations.interfaces.translationimporter import (
ITranslationFormatImporter,
)
from lp.translations.utilities.gettext_po_importer import GettextPOImporter
class KdePOImporter(GettextPOImporter):
"""Support class for importing KDE .po files."""
implements(ITranslationFormatImporter)
def getFormat(self, file_contents):
"""See `ITranslationFormatImporter`."""
# XXX DaniloSegan 20070904: I first tried using POParser()
# to check if the file is a legacy KDE PO file or not, but
# that is too slow in some cases like tarball uploads (processing
# of all PO files in a tarball is done in the same transaction,
# and with extremely big PO files, this will be too slow). Thus,
# a heuristic verified to be correct on all PO files from
# Ubuntu language packs.
if ('msgid "_n: ' in file_contents or
'msgid ""\n"_n: ' in file_contents or
'msgid "_: ' in file_contents or
'msgid ""\n"_: ' in file_contents):
return TranslationFileFormat.KDEPO
else:
return TranslationFileFormat.PO
priority = 10
content_type = 'application/x-po'
def parse(self, translation_import_queue_entry):
"""See `ITranslationFormatImporter`."""
translation_file = GettextPOImporter.parse(
self, translation_import_queue_entry)
plural_prefix = u'_n: '
context_prefix = u'_: '
for message in translation_file.messages:
msgid = message.msgid_singular
if msgid.startswith(plural_prefix) and '\n' in msgid:
# This is a KDE plural form
singular, plural = msgid[len(plural_prefix):].split('\n')
message.msgid_singular = singular
message.msgid_plural = plural
msgstrs = message._translations
if len(msgstrs) > 0:
message._translations = msgstrs[0].split('\n')
self.internal_format = TranslationFileFormat.KDEPO
elif msgid.startswith(context_prefix) and '\n' in msgid:
# This is a KDE context message
| message.context, message.msgid_singular = (
msgid[len(context_prefix):].split('\n', 1))
self.in | ternal_format = TranslationFileFormat.KDEPO
else:
# Other messages are left as they are parsed by
# GettextPOImporter
pass
return translation_file
|
pypa/warehouse | warehouse/migrations/versions/34b18e18775c_add_last_totp_value_to_user.py | Python | apache-2.0 | 940 | 0.001064 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is | distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
add last totp value to user
Revision ID: 34b18e18775c
Revises: 0ac2f506ef2e
Create Date: 2019-08-15 21:28:47.621282
"""
import sqlalchemy as sa
from alembic import op
revision | = "34b18e18775c"
down_revision = "0ac2f506ef2e"
def upgrade():
op.add_column("users", sa.Column("last_totp_value", sa.String(), nullable=True))
def downgrade():
op.drop_column("users", "last_totp_value")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.