repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
kishkaru/python-driver
|
tests/integration/long/test_ssl.py
|
Python
|
apache-2.0
| 10,956
| 0.003468
|
# Copyright 2013-2016 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import unittest2 as unittest
except ImportError:
import unittest
import os, sys, traceback, logging, ssl
from cassandra.cluster import Cluster, NoHostAvailable
from cassandra import ConsistencyLevel
from cassandra.query import SimpleStatement
from tests.integration import use_singledc, PROTOCOL_VERSION, get_cluster, remove_cluster
log = logging.getLogger(__name__)
DEFAULT_PASSWORD = "pythondriver"
# Server keystore trust store locations
SERVER_KEYSTORE_PATH = "tests/integration/long/ssl/.keystore"
SERVER_TRUSTSTORE_PATH = "tests/integration/long/ssl/.truststore"
# Client specific keys/certs
CLIENT_CA_CERTS = 'tests/integration/long/ssl/cassandra.pem'
DRIVER_KEYFILE = "tests/integration/long/ssl/driver.key"
DRIVER_CERTFILE = "tests/integration/long/ssl/driver.pem"
DRIVER_CERTFILE_BAD = "tests/integration/long/ssl/python_driver_bad.pem"
def setup_cluster_ssl(client_auth=False):
"""
We need some custom setup for this module. This will start the ccm cluster with basic
ssl connectivity, and client authenticiation if needed.
"""
use_singledc(start=False)
ccm_cluster = get_cluster()
ccm_cluster.stop()
# Fetch the absolute path to the keystore for ccm.
abs_path_server_keystore_path = os.path.abspath(SERVER_KEYSTORE_PATH)
# Configure ccm to use ssl.
config_options = {'client_encryption_options': {'enabled': True,
'keystore': abs_path_server_keystore_path,
'keystore_password': DEFAULT_PASSWORD}}
if(client_auth):
abs_path_server_truststore_path = os.path.abspath(SERVER_TRUSTSTORE_PATH)
client_encyrption_options = config_options['client_encryption_options']
client_encyrption_options['require_client_auth'] = True
client_encyrption_options['truststore'] = abs_path_server_truststore_path
client_encyrption_options['truststore_password'] = DEFAULT_PASSWORD
ccm_cluster.set_configuration_options(config_options)
ccm_cluster.start(wait_for_binary_proto=True, wait_other_notice=True)
def teardown_module():
"""
The rest of the tests don't need ssl enabled, remove the cluster so as to not interfere with other tests.
"""
ccm_cluster = get_cluster()
ccm_cluster.stop()
remove_cluster()
def validate_ssl_options(ssl_options):
# find absolute path to client CA_CERTS
tries = 0
while True:
if tries > 5:
raise RuntimeError("Failed to connect to SSL cluster after 5 attempts")
try:
cluster = Cluster(protocol_version=PROTOCOL_VERSION, ssl_options=ssl_options)
session = cluster.connect()
break
except Exception:
ex_type, ex, tb = sys.exc_info()
log.warn("{0}: {1} Backtrace: {2}".format(ex_type.__name__, ex, traceback.extract_tb(tb)))
del tb
tries += 1
# attempt a few simple commands.
insert_keyspace = """CREATE KEYSPACE ssltest
WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '3'}
"""
statement = SimpleStatement(insert_keyspace)
statement.consistency_level = 3
session.execute(statement)
drop_keyspace = "DROP KEYSPACE ssltest"
statement = SimpleStatement(drop_keyspace)
statement.consistency_level = ConsistencyLevel.ANY
session.execute(statement)
cluster.shutdown()
class SSLConnectionTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
setup_cluster_ssl()
def test_can_connect_with_ssl_ca(self):
"""
Test to validate that we are able to connect to a cluster using ssl.
test_can_connect_with_ssl_ca performs a simple sanity check to ensure that we can connect to a cluster with ssl
authentication via simple server-side shared certificate authority. The client is able to validate the identity
of the server, however by using this method the server can't trust the client unless additional authentication
has been provided.
@since 2.6.0
@jira_ticket PYTHON-332
@expected_result The client can connect via SSL and preform some basic operations
@test_category connection:ssl
"""
# find absolute path to client CA_CERTS
abs_path_ca_cert_path = os.path.abspath(CLIENT_CA_CERTS)
ssl_options = {'ca_certs': abs_path_ca_cert_path,
'ssl_version': ssl.PROTOCOL_TLSv1}
validate_ssl_options(ssl_options=ssl_options)
def test_can_connect_with_ssl_ca_host_match(self):
"""
Test to validate that we are able to connect to a cluster using ssl, and host matching
test_can_connect_with_ssl_ca_host_match performs a simple sanity check to ensure that we can connect to a cluster with ssl
authentication via simple server-side shared certificate authority. It also validates that the host ip matches what is expected
@since 3.3
@jira_ticket PYTHON-296
@expected_result The client can connect via SSL and preform some basic operations, with check
|
_hostname specified
@test_category connection:ssl
"""
# find absolute path to client CA_CERTS
abs_path_ca_cert_path = os.path.abspath(CLIENT_CA_CERTS)
ssl_options = {'ca_certs': abs_path_ca_cert_path,
'ssl_vers
|
ion': ssl.PROTOCOL_TLSv1,
'cert_reqs': ssl.CERT_REQUIRED,
'check_hostname': True}
validate_ssl_options(ssl_options=ssl_options)
class SSLConnectionAuthTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
setup_cluster_ssl(client_auth=True)
def test_can_connect_with_ssl_client_auth(self):
"""
Test to validate that we can connect to a C* cluster that has client_auth enabled.
This test will setup and use a c* cluster that has client authentication enabled. It will then attempt
to connect using valid client keys, and certs (that are in the server's truststore), and attempt to preform some
basic operations
@since 2.7.0
@expected_result The client can connect via SSL and preform some basic operations
@test_category connection:ssl
"""
# Need to get absolute paths for certs/key
abs_path_ca_cert_path = os.path.abspath(CLIENT_CA_CERTS)
abs_driver_keyfile = os.path.abspath(DRIVER_KEYFILE)
abs_driver_certfile = os.path.abspath(DRIVER_CERTFILE)
ssl_options = {'ca_certs': abs_path_ca_cert_path,
'ssl_version': ssl.PROTOCOL_TLSv1,
'keyfile': abs_driver_keyfile,
'certfile': abs_driver_certfile}
validate_ssl_options(ssl_options)
def test_can_connect_with_ssl_client_auth_host_name(self):
"""
Test to validate that we can connect to a C* cluster that has client_auth enabled, and hostmatching
This test will setup and use a c* cluster that has client authentication enabled. It will then attempt
to connect using valid client keys, and certs (that are in the server's truststore), and attempt to preform some
basic operations, with check_hostname specified
@jira_ticket PYTHON-296
@since 3.3
@expected_result The client can connect via SSL and preform some basic operations
@test_category connection:ssl
"""
|
lneuhaus/pyrpl
|
pyrpl/widgets/module_widgets/na_widget.py
|
Python
|
gpl-3.0
| 15,889
| 0.004217
|
"""
The network analyzer records the coherent response of the signal at the port
:code:`input` to a sinusoidal excitation of variable frequency sent to the
output selected in :code:`output_direct`.
.. note:: If :code:`output_direct='off'`, another module's input can be set
to :code:`networkanalyzer` to test its response to a frequency sweep.
* :attr:`~pyrpl.software_modules.network_analyzer.NetworkAnalyzer.amplitude`
sets the amplitude of the sinusoidal excitation in Volts.
* :attr:`~pyrpl.software_modules.network_analyzer.NetworkAnalyzer.start_freq`/:attr:`~pyrpl.software_modules.network_analyzer.NetworkAnalyzer.stop_freq`
define the frequency range over which a transfer function is recorded.
Swapping the values of :code:`start_freq` and :code:`stop_freq` reverses the
direction of the frequency sweep. Setting :code:`stop_freq = start_freq`
enables the "zero-span" mode, where the coherent response at a constant
frequency is recorded as a function of time.
* :attr:`~pyrpl.software_modules.network_analyzer.NetworkAnalyzer.points`
defines the number of frequency points in the recorded transfer function.
* :attr:`~pyrpl.software_modules.network_analyzer.NetworkAnalyzer.rbw` is
the cutoff frequency of the low-pass filter after demodulation. Furthermore,
the time :math:`\\tau` spent to record each point is
:math:`\\tau=\\texttt{average_per_point} / \\texttt{rbw}`.
* :attr:`~pyrpl.software_modules.network_analyzer.NetworkAnalyzer.average_per_point`:
Each point is averaged inside the FPGA before being retrieved by the
client computer that runs PyRPL. You should increase this parameter or
|
decrease :code:`rbw` if the communication time between the Red Pitaya and
the client computer limits the acquisition speed.
* :att
|
r:`~pyrpl.software_modules.network_analyzer.NetworkAnalyzer.acbandwidth`
is the cutoff frequency of a high-pass filter applied to the input before
demodulation. A setting of zero disables the high-pass filter.
* :attr:`~pyrpl.software_modules.network_analyzer.NetworkAnalyzer.logscale`
enables the use of a logarithmic scale for the frequency axis, resulting in
a logarithmic distribution of the frequency points as well.
* :attr:`~pyrpl.software_modules.network_analyzer.NetworkAnalyzer.infer_open_loop_tf`
applies the transformation :math:`T \\rightarrow \\frac{T}{1+T}` to the displayed
transfer function to correct for the effect of a closed feedback loop
(not implemented at the moment).
"""
from .base_module_widget import ModuleWidget
from.acquisition_module_widget import AcquisitionModuleWidget
from qtpy import QtCore, QtWidgets
import pyqtgraph as pg
from time import time
import numpy as np
import sys
class NaWidget(AcquisitionModuleWidget):
"""
Network Analyzer Tab.
"""
starting_update_rate = 0.2 # this would be a good idea to change this number dynamically when the curve becomes
CHUNK_SIZE = 500
# more and more expensive to display.
def init_gui(self):
"""
Sets up the gui
"""
#self.main_layout = QtWidgets.QVBoxLayout()
self.init_main_layout(orientation="vertical")
self.init_attribute_layout()
self.button_layout = QtWidgets.QHBoxLayout()
#self.setLayout(self.main_layout)
self.setWindowTitle("NA")
self.win = pg.GraphicsWindow(title="Magnitude")
self.label_benchmark = pg.LabelItem(justify='right')
self.win.addItem(self.label_benchmark, row=1,col=0)
self._last_benchmark_value = np.nan
self.win_phase = pg.GraphicsWindow(title="Phase")
self.plot_item = self.win.addPlot(row=1, col=0, title="Magnitude (dB)")
self.plot_item_phase = self.win_phase.addPlot(row=1, col=0,
title="Phase (deg)")
self.plot_item_phase.setXLink(self.plot_item)
self.button_single = QtWidgets.QPushButton("Run single")
self.button_single.my_label = "Single"
self.button_continuous = QtWidgets.QPushButton("Run continuous")
self.button_continuous.my_label = "Continuous"
self.button_stop = QtWidgets.QPushButton('Stop')
self.button_save = QtWidgets.QPushButton("Save curve")
self.chunks = [] #self.plot_item.plot(pen='y')
self.chunks_phase = []
self.main_layout.addWidget(self.win)
self.main_layout.addWidget(self.win_phase)
aws = self.attribute_widgets
self.attribute_layout.removeWidget(aws["trace_average"])
self.attribute_layout.removeWidget(aws["curve_name"])
#self.button_layout.addWidget(aws["trace_average"])
#self.button_layout.addWidget(aws["curve_name"])
super(NaWidget, self).init_gui()
#self.button_layout.addWidget(self.button_single)
#self.button_layout.addWidget(self.button_continuous)
#self.button_layout.addWidget(self.button_stop)
#self.button_layout.addWidget(self.button_save)
#self.main_layout.addLayout(self.button_layout)
#self.button_single.clicked.connect(self.run_single_clicked)
#self.button_continuous.clicked.connect(self.run_continuous_clicked)
#self.button_stop.clicked.connect(self.button_stop_clicked)
#self.button_save.clicked.connect(self.save_clicked)
self.arrow = pg.ArrowItem()
self.arrow.setVisible(False)
self.arrow_phase = pg.ArrowItem()
self.arrow_phase.setVisible(False)
self.plot_item.addItem(self.arrow)
self.plot_item_phase.addItem(self.arrow_phase)
self.last_updated_point = 0
self.last_updated_time = 0
#self.display_state(self.module.running_state)
self.update_running_buttons()
self.update_period = self.starting_update_rate # also modified in clear_curve.
# Not sure why the stretch factors in button_layout are not good by
# default...
#self.button_layout.setStretchFactor(self.button_single, 1)
#self.button_layout.setStretchFactor(self.button_continuous, 1)
#self.button_layout.setStretchFactor(self.button_stop, 1)
#self.button_layout.setStretchFactor(self.button_save, 1)
self.x_log_toggled() # Set the axis in logscale if it has to be
def autoscale(self):
"""
log_mode = self.module.logscale
self.plot_item.setLogMode(x=log_mod, y=None) # this seems also needed
self.plot_item_phase.setLogMode(x=log_mod, y=None)
"""
self.plot_item.setRange(xRange=[self.module.start_freq, self.module.stop_freq])
self.plot_item_phase.setRange(xRange=[self.module.start_freq, self.module.stop_freq])
def clear_curve(self):
"""
Clear all chunks
"""
self.update_period = self.starting_update_rate # let's assume update of curve takes 50 ms
while(True):
try:
chunk = self.chunks.pop()
chunk_phase = self.chunks_phase.pop()
chunk.clear()
chunk_phase.clear()
except IndexError:
break
self.label_benchmark.setText("")
def x_log_toggled(self):
"""
change x_log of axis
"""
log_mod = self.module.logscale
self.plot_item.setLogMode(x=log_mod, y=None) # this seems also needed
self.plot_item_phase.setLogMode(x=log_mod, y=None)
for chunk, chunk_phase in zip(self.chunks, self.chunks_phase):
chunk.setLogMode(xMode=log_mod, yMode=None)
chunk_phase.setLogMode(xMode=log_mod, yMode=None)
def scan_finished(self):
"""
if in run continuous, needs to redisplay the number of averages
"""
self.update_current_average()
self.update_point(self.module.points-1, force=True) # make sure all points in the scan are updated
def set_benchmark_text(self, text):
self.label_benchmark.setText(text)
def update_point(self, index, force=False):
"""
To speed things up, the curves are plotted by chunks of
self.CHUNK_SIZE points. All points between last_updated_point and
index will be redrawn.
""
|
venthur/pyff
|
src/lib/P300VisualElement/TestIt.py
|
Python
|
gpl-2.0
| 1,957
| 0.005621
|
# TestIt.py
# Copyright (C) 2009 Matthias Treder
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Use TestIt to test your new visual elements.
Test it opens a test screen, displays the element and goes through
its states.
"""
import sys
import pygame
import Textrow
bgcolor = 0, 0, 0
screenSize = 500, 500
""" Import & define your element here"""
#import Hexagon,math
#e = Hexagon.Hexagon(color=(255,255,255),radius=60,text="ABCD",edgecolor=(255,255,255),textcolor=(10,10,100),textsize=10,colorkey=(0,0,0),antialias=True)
text = "LUXUS"
""" Init pygame, open screen etc """
pygame.init()
screen = pygame.display.set_mode(screenSize)
background = pygame.Surface(screenSize)
background.fill(bgcolor)
screen.blit(background, [0, 0])
pygame.display.update()
""" Loop between the states and pause in between """
width, height = screenSize
e.pos = (width / 2, height / 2)
e.refresh()
e.update(0)
pos = 0
while
|
1:
screen.blit(background, [0, 0])
screen.blit(e.image, e.rect)
pygame.display.flip()
e.update()
pygame.time.delay(400)
e.highlight = [pos]
e.refresh()
pos = (pos + 1) % len(text)
for event in pygame.event.get():
if event.type in (pygame.KEYDOWN, pygame.MOUSEBUTTONDOWN):
sys.exit(0)
brea
|
k
|
lexdene/sniper-jinja
|
setup.py
|
Python
|
gpl-3.0
| 1,172
| 0
|
from setuptools import setup
with open('README.rst') as f:
LONG_DESCRIPTION = f.rea
|
d()
with open('pip-req.d/install.txt') as f:
install_requires = []
for line in f:
if '#' in line:
# remove comment
line = line[:line.index('#')]
line = line.strip()
if line:
install_requires.append(line)
print(install_requires)
setup(
name="sniper-jinja",
version="0.0
|
.2",
description="a jinja2 plugin for sniper",
long_description=LONG_DESCRIPTION,
url="https://github.com/lexdene/sniper-jinja",
license='GPLv3',
author="Elephant Liu",
author_email="lexdene@gmail.com",
packages=['sniper_jinja'],
platforms=['any'],
install_requires=install_requires,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
],
)
|
lionleaf/dwitter
|
dwitter/templatetags/insert_magic_links.py
|
Python
|
apache-2.0
| 2,048
| 0.003418
|
import re
from django import template
register = template.Library()
def user_dweet_to_link(m):
text = m.group('text')
dweet_id = m.group('dweet_id')
username = m.group('username')
if username is None:
url = 'd/' + dweet_id
else:
url = 'u/' + username
result = '<a href="/{0}">{0}</a>'.format(url)
return text.replace(url, result)
def hashtag_to_link(m):
text = m.group('text')
hashtag = m.group('hashtag')
url = 'h/' + hashtag
tag = '#' + hashtag
result = '<a href="/{0}">{1}</a>'.format(url, tag)
return text.replace(tag, result)
@register.filter(is_safe=True)
def insert_magic_links(text):
|
text = re.sub(
r'(?:^|(?<=\s))' # start of string or whitespace
r'/?' # optional /
r'(?P<text>' # capture original pattern
r'[^a-zA-Z\d]?d/(?P<dweet_id>\d+)[^a-zA-Z]?' # dweet reference
r'|'
|
# or
r'[^a-zA-Z\d]?u/(?P<username>[\w.@+-]+)[^a-zA-Z\d]?)' # user reference
r'(?=$|\s|#)', # end of string, whitespace or hashtag
user_dweet_to_link,
text
)
text = re.sub(
r'(?P<text>' # capture original pattern
# (?:^|\s) # go to the start of the line or the next space
# (?=\S*#) # lookahead to see is there a '#' after a bunch of non-whitespace characters?
# (?!\S*:) # lookahead to check aren't any ':' characters there
# (otherwise it's likely an anchor)
# \S*# # skip any of the characters leading up to the '#'
# then do the hashtag grouping that was there before:
# (?P<hashtag>[_a-zA-Z][_a-zA-Z\d]*)
r'(?:^|\s)(?=\S*#)(?!\S*:)\S*#(?P<hashtag>[_a-zA-Z][_a-zA-Z\d]*))',
hashtag_to_link,
text
)
return text
|
ray-project/ray
|
python/ray/tune/suggest/_mock.py
|
Python
|
apache-2.0
| 1,752
| 0
|
from typing import Dict, List, Optional
from ray.tune.suggest.suggestion import Searcher, ConcurrencyLimiter
from ray.tune.suggest.search_generator import SearchGenerator
from ray.tune.trial import Trial
class _MockSearcher(Searcher):
def __init__(self, **kwargs):
self.live_trials = {}
self.counter = {"result": 0, "complete": 0}
self.final_results = []
self.stall = False
self.results = []
super(_MockSearcher, self).__init__(**kwargs)
def suggest(self, trial_id: str):
if not self.stall:
self.live_trials[trial_id] = 1
return {"test_var
|
iable": 2}
return None
def on_trial_result(self, trial_id: str, result: Dict):
self.counter["result"] += 1
self.results += [result]
def on_trial_complete(
self, trial_id: str, result: Optional[Dict] = None, error: bool = False
):
self.counter["complete"] += 1
if result:
self._process_result(result)
if trial_id in self.live_trials:
del self.live_trials[trial_id]
|
def _process_result(self, result: Dict):
self.final_results += [result]
class _MockSuggestionAlgorithm(SearchGenerator):
def __init__(self, max_concurrent: Optional[int] = None, **kwargs):
self.searcher = _MockSearcher(**kwargs)
if max_concurrent:
self.searcher = ConcurrencyLimiter(
self.searcher, max_concurrent=max_concurrent
)
super(_MockSuggestionAlgorithm, self).__init__(self.searcher)
@property
def live_trials(self) -> List[Trial]:
return self.searcher.live_trials
@property
def results(self) -> List[Dict]:
return self.searcher.results
|
bufferx/stormed-amqp
|
stormed/method/codegen/queue.py
|
Python
|
mit
| 3,261
| 0.028212
|
from stormed.util import WithFields
class Declare(WithFields):
_name = "queue.declare"
_class_id = 50
_method_id = 10
_sync = True
_content = False
_fields = [
('ticket' , 'short'),
('queue' , 'shortstr'),
('passive' , 'bit'),
('durable' , 'bit'),
('exclusive' , 'bit'),
('auto_delete' , 'bit'),
('nowait' , 'bit'),
('arguments' , 'table'),
]
class DeclareOk(WithFields):
_name = "queue.declare-ok"
_class_id = 50
_method_id = 11
_sync = False
_content = False
_fields = [
('queue' , 'shortstr'),
('message_count' , 'long'),
('consumer_count' , 'long'),
]
class Bind(WithFields):
_name = "queue.bind"
_class_id = 50
_method_id = 20
_sync = True
_content = False
_fields = [
('ticket' , 'short'),
('queue' , 'shortstr'),
('exchange' , 'shortstr'),
('routing_key' , 'shortstr'),
('nowait' , 'bit'),
('arguments' , 'table'),
]
class BindOk(WithFields):
_name = "q
|
ueue.bind-ok"
_class_id = 50
_method_id = 21
|
_sync = False
_content = False
_fields = [
]
class Purge(WithFields):
_name = "queue.purge"
_class_id = 50
_method_id = 30
_sync = True
_content = False
_fields = [
('ticket' , 'short'),
('queue' , 'shortstr'),
('nowait' , 'bit'),
]
class PurgeOk(WithFields):
_name = "queue.purge-ok"
_class_id = 50
_method_id = 31
_sync = False
_content = False
_fields = [
('message_count' , 'long'),
]
class Delete(WithFields):
_name = "queue.delete"
_class_id = 50
_method_id = 40
_sync = True
_content = False
_fields = [
('ticket' , 'short'),
('queue' , 'shortstr'),
('if_unused' , 'bit'),
('if_empty' , 'bit'),
('nowait' , 'bit'),
]
class DeleteOk(WithFields):
_name = "queue.delete-ok"
_class_id = 50
_method_id = 41
_sync = False
_content = False
_fields = [
('message_count' , 'long'),
]
class Unbind(WithFields):
_name = "queue.unbind"
_class_id = 50
_method_id = 50
_sync = True
_content = False
_fields = [
('ticket' , 'short'),
('queue' , 'shortstr'),
('exchange' , 'shortstr'),
('routing_key' , 'shortstr'),
('arguments' , 'table'),
]
class UnbindOk(WithFields):
_name = "queue.unbind-ok"
_class_id = 50
_method_id = 51
_sync = False
_content = False
_fields = [
]
id2method = {
10: Declare,
11: DeclareOk,
20: Bind,
21: BindOk,
30: Purge,
31: PurgeOk,
40: Delete,
41: DeleteOk,
50: Unbind,
51: UnbindOk,
}
|
meteorfox/PerfKitBenchmarker
|
perfkitbenchmarker/linux_benchmarks/hpcc_benchmark.py
|
Python
|
apache-2.0
| 7,898
| 0.00899
|
# Copyright 2014 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs HPC Challenge.
Homepage: http://icl.cs.utk.edu/hpcc/
Most of the configuration of the HPC-Challenge revolves around HPL, the rest of
the HPCC piggybacks upon the HPL configration.
Homepage: http://www.netlib.org/benchmark/hpl/
HPL requires a BLAS library (Basic Linear Algebra Subprograms)
OpenBlas: http://www.openb
|
las.net/
HPL also requires a MPI (Message Passing Interface) Library
OpenMPI: http://www.open-mpi.org/
MPI needs to be configured:
Configuring MPI:
http://techtinkering.com/2009/12/02/setting-up-a-beowulf-cluster-using-open-mpi-on-linux/
Once HPL is built the configuration file must be created:
Configuring HPL.dat:
http://www.advancedclustering.com/faq/how-do-i-tune-my-hplda
|
t-file.html
http://www.netlib.org/benchmark/hpl/faqs.html
"""
import logging
import math
import re
from perfkitbenchmarker import configs
from perfkitbenchmarker import data
from perfkitbenchmarker import flags
from perfkitbenchmarker import regex_util
from perfkitbenchmarker import sample
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import hpcc
FLAGS = flags.FLAGS
HPCCINF_FILE = 'hpccinf.txt'
MACHINEFILE = 'machinefile'
BLOCK_SIZE = 192
STREAM_METRICS = ['Copy', 'Scale', 'Add', 'Triad']
BENCHMARK_NAME = 'hpcc'
BENCHMARK_CONFIG = """
hpcc:
description: Runs HPCC. Specify the number of VMs with --num_vms
vm_groups:
default:
vm_spec: *default_single_core
vm_count: null
"""
flags.DEFINE_integer('memory_size_mb',
None,
'The amount of memory in MB on each machine to use. By '
'default it will use the entire system\'s memory.')
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def CheckPrerequisites():
"""Verifies that the required resources are present.
Raises:
perfkitbenchmarker.data.ResourceNotFound: On missing resource.
"""
data.ResourcePath(HPCCINF_FILE)
def CreateMachineFile(vms):
"""Create a file with the IP of each machine in the cluster on its own line.
Args:
vms: The list of vms which will be in the cluster.
"""
with vm_util.NamedTemporaryFile() as machine_file:
master_vm = vms[0]
machine_file.write('localhost slots=%d\n' % master_vm.num_cpus)
for vm in vms[1:]:
machine_file.write('%s slots=%d\n' % (vm.internal_ip,
vm.num_cpus))
machine_file.close()
master_vm.PushFile(machine_file.name, MACHINEFILE)
def CreateHpccinf(vm, benchmark_spec):
"""Creates the HPCC input file."""
num_vms = len(benchmark_spec.vms)
if FLAGS.memory_size_mb:
total_memory = FLAGS.memory_size_mb * 1024 * 1024 * num_vms
else:
# Sum of Free, Cached, Buffers in kb
stdout, _ = vm.RemoteCommand("""
awk '
BEGIN {total =0}
/MemFree:/ {total += $2}
/Cached:/ {total += $2}
/Buffers:/ {total += $2}
END {print total}
' /proc/meminfo
""")
available_memory = int(stdout)
total_memory = available_memory * 1024 * num_vms
total_cpus = vm.num_cpus * num_vms
block_size = BLOCK_SIZE
# Finds a problem size that will fit in memory and is a multiple of the
# block size.
base_problem_size = math.sqrt(total_memory * .1)
blocks = int(base_problem_size / block_size)
blocks = blocks if (blocks % 2) == 0 else blocks - 1
problem_size = block_size * blocks
# Makes the grid as 'square' as possible, with rows < columns
sqrt_cpus = int(math.sqrt(total_cpus)) + 1
num_rows = 0
num_columns = 0
for i in reversed(range(sqrt_cpus)):
if total_cpus % i == 0:
num_rows = i
num_columns = total_cpus / i
break
file_path = data.ResourcePath(HPCCINF_FILE)
vm.PushFile(file_path, HPCCINF_FILE)
sed_cmd = (('sed -i -e "s/problem_size/%s/" -e "s/block_size/%s/" '
'-e "s/rows/%s/" -e "s/columns/%s/" %s') %
(problem_size, block_size, num_rows, num_columns, HPCCINF_FILE))
vm.RemoteCommand(sed_cmd)
def PrepareHpcc(vm):
"""Builds HPCC on a single vm."""
logging.info('Building HPCC on %s', vm)
vm.Install('hpcc')
def Prepare(benchmark_spec):
"""Install HPCC on the target vms.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
vms = benchmark_spec.vms
master_vm = vms[0]
PrepareHpcc(master_vm)
CreateHpccinf(master_vm, benchmark_spec)
CreateMachineFile(vms)
master_vm.RemoteCommand('cp %s/hpcc hpcc' % hpcc.HPCC_DIR)
for vm in vms[1:]:
vm.Install('fortran')
master_vm.MoveFile(vm, 'hpcc', 'hpcc')
master_vm.MoveFile(vm, '/usr/bin/orted', 'orted')
vm.RemoteCommand('sudo mv orted /usr/bin/orted')
def ParseOutput(hpcc_output, benchmark_spec):
"""Parses the output from HPCC.
Args:
hpcc_output: A string containing the text of hpccoutf.txt.
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of samples to be published (in the same format as Run() returns).
"""
results = []
metadata = dict()
match = re.search('HPLMaxProcs=([0-9]*)', hpcc_output)
metadata['num_cpus'] = match.group(1)
metadata['num_machines'] = len(benchmark_spec.vms)
metadata['memory_size_mb'] = FLAGS.memory_size_mb
value = regex_util.ExtractFloat('HPL_Tflops=([0-9]*\\.[0-9]*)', hpcc_output)
results.append(sample.Sample('HPL Throughput', value, 'Tflops', metadata))
value = regex_util.ExtractFloat('SingleRandomAccess_GUPs=([0-9]*\\.[0-9]*)',
hpcc_output)
results.append(sample.Sample('Random Access Throughput', value,
'GigaUpdates/sec'))
for metric in STREAM_METRICS:
regex = 'SingleSTREAM_%s=([0-9]*\\.[0-9]*)' % metric
value = regex_util.ExtractFloat(regex, hpcc_output)
results.append(sample.Sample('STREAM %s Throughput' % metric, value,
'GB/s'))
value = regex_util.ExtractFloat(r'PTRANS_GBs=([0-9]*\.[0-9]*)', hpcc_output)
results.append(sample.Sample('PTRANS Throughput', value, 'GB/s', metadata))
return results
def Run(benchmark_spec):
"""Run HPCC on the cluster.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
vms = benchmark_spec.vms
master_vm = vms[0]
num_processes = len(vms) * master_vm.num_cpus
mpi_cmd = ('mpirun -np %s -machinefile %s --mca orte_rsh_agent '
'"ssh -o StrictHostKeyChecking=no" ./hpcc' %
(num_processes, MACHINEFILE))
master_vm.RobustRemoteCommand(mpi_cmd)
logging.info('HPCC Results:')
stdout, _ = master_vm.RemoteCommand('cat hpccoutf.txt', should_log=True)
return ParseOutput(stdout, benchmark_spec)
def Cleanup(benchmark_spec):
"""Cleanup HPCC on the cluster.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
vms = benchmark_spec.vms
master_vm = vms[0]
master_vm.RemoveFile('hpcc*')
master_vm.RemoveFile(MACHINEFILE)
for vm in vms[1:]:
vm.RemoveFile('hpcc')
vm.RemoveFile('/usr/bin/orted')
|
mathiasertl/django-ca
|
ca/django_ca/acme/messages.py
|
Python
|
gpl-3.0
| 2,539
| 0.005908
|
# This file is part of django-ca (https://github.com/mathiasertl/django-ca).
#
# django-ca is free software: you can redistribute it and/or modify it under the terms of the GNU General
# Public License as published by the Free Software Foundation, eith
|
er version 3 of the License, or (at your
# option) any later version.
#
# django-ca is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License along with djang
|
o-ca. If not, see
# <http://www.gnu.org/licenses/>.
"""Specialized variants of ACME message classes."""
import josepy as jose
from acme import fields
from acme import messages
identifiers_decoder = messages.Order._fields["identifiers"].fdec # pylint: disable=no-member; false positive
class Order(messages.Order):
"""An object describing an ACME order.
This class adds the not_before/not_after field to :py:class:`acme:acme.messages.Order`.
"""
not_before = fields.RFC3339Field("notBefore", omitempty=True)
not_after = fields.RFC3339Field("notAfter", omitempty=True)
class NewOrder(messages.ResourceBody):
"""An object describing a new order.
This class differs from :py:class:`acme:acme.messages.NewOrder` in that the fields for this message are
the subset of fields described for the ``newOrder`` resource in RFC 8555, section 7.4. Unlike in the ACME
class, the `identifiers` field is mandatory, while the `not_before` and `not_after` fields are added.
.. seealso:: `RFC 8555, section 7.4 <https://tools.ietf.org/html/rfc8555#section-7.4>`__
"""
resource_type = messages.NewOrder.resource_type
identifiers = jose.json_util.Field("identifiers", omitempty=False, decoder=identifiers_decoder)
not_before = fields.RFC3339Field("notBefore", omitempty=True)
not_after = fields.RFC3339Field("notAfter", omitempty=True)
class CertificateRequest(messages.ResourceBody):
"""ACME message expected when finalizing an order.
This class differs from :py:class:`acme:acme.messages.CertificateRequest` in that it does not set the
resource type.
.. seealso:: `RFC 8555, section 7.4 <https://tools.ietf.org/html/rfc8555#section-7.4>`__
"""
resource_type = messages.CertificateRequest.resource_type
csr = jose.json_util.Field("csr", decoder=jose.json_util.decode_csr, encoder=jose.json_util.encode_csr)
|
RegulatoryGenomicsUPF/pyicoteo
|
utest/testUtils.py
|
Python
|
gpl-3.0
| 566
| 0.012367
|
import unittest
from pyicoteolib.utils import DualSortedReader
from pyicoteolib.core impo
|
rt BED
class TestUtils(unittest.TestCase):
def test_dual_reader(self):
reader = DualSortedReader("test_files/mini_sorted.bed", "test_files/mini_sorted2.bed", BED, False, False)
merged_file = open("test_files/mini_sorted_merged.bed")
for line in reader:
if line:
self.assertEqual(line, merged_file.next())
|
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestUtils))
return suite
|
engdan77/edoAutoHomeMobile
|
twisted/lore/test/test_lore.py
|
Python
|
mit
| 42,212
| 0.002203
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
# ++ single anchor added to individual output file
# ++ two anchors added to individual output file
# ++ anchors added to individual output files
# ++ entry added to index
# ++ index entry pointing to correct file and anchor
# ++ multiple entries added to index
# ++ multiple index entries pointing to correct files and anchors
# __ all of above for files in deep directory structure
#
# ++ group index entries by indexed term
# ++ sort index entries by indexed term
# __ hierarchical index entries (e.g. language!programming)
#
# ++ add parameter for what the index filename should be
# ++ add (default) ability to NOT index (if index not specified)
#
# ++ put actual index filename into INDEX link (if any) in the template
# __ make index links RELATIVE!
# __ make index pay attention to the outputdir!
#
# __ make index look nice
#
# ++ add section numbers to headers in lore output
# ++ make text of index entry links be chapter numbers
# ++ make text of index entry links be section numbers
#
# __ put all of our test files someplace neat and tidy
#
import os, shutil, errno, time
from StringIO import StringIO
from xml.dom import minidom as dom
from twisted.trial import unittest
from twisted.python.filepath import FilePath
from twisted.lore import tree, process, indexer, numberer, htmlbook, default
from twisted.lore.default import factory
from twisted.lore.latex import LatexSpitter
from twisted.python.util import sibpath
from twisted.lore.scripts import lore
from twisted.web import domhelpers
from twisted.test.testutils import XMLAssertionMixin
def sp(originalFileName):
return sibpath(__file__, originalFileName)
options = {"template" : sp("template.tpl"), 'baseurl': '%s', 'ext': '.xhtml'}
d = options
class RemoveBlanksTests(unittest.TestCase):
"""
Tests for L{tree._removeLeadingBlankLines} and
L{tree._removeLeadingTrailingBlankLines}.
"""
def setUp(self):
self.inputString = '\n\n\n\nfoo\nbar\n\n\n'
def test_removeLeadingBlankLines(self):
"""
L{tree._removeLeadingBlankLines} removes leading blank lines from a string and returns a list containing the remaining characters.
"""
result = tree._removeLeadingBlankLines(self.inputString)
self.assertEqual(result,
['f', 'o', 'o', '\n', 'b', 'a', 'r', '\n', '\n', '\n'])
def test_removeLeadingTrailingBlankLines(self):
"""
L{tree._removeLeadingTrailingBlankLines} removes leading and trailing
blank lines from a string and returns a string with all lines joined.
"""
result = tree._removeLeadingTrailingBlankLines(self.inputString)
self.assertEqual(result, 'foo\nbar\n')
class TestFactory(unittest.TestCase, XMLAssertionMixin):
file = sp('simple.html')
linkrel = ""
def assertEqualFiles1(self, exp, act):
if (exp == act): return True
fact = open(act)
self.assertEqualsFile(exp, fact.read())
def assertEqualFiles(self, exp, act):
if (exp == act): return True
fact = open(sp(act))
self.assertEqualsFile(exp, fact.read())
def assertEqualsFile(self, exp, act):
expected = open(sp(exp)).read()
self.assertEqual(expected, act)
def makeTemp(self, *filenames):
tmp = self.mktemp()
os.mkdir(tmp)
for filename in filenames:
tmpFile = os.path.join(tmp, filename)
shutil.copyfile(sp(filename), tmpFile)
return tmp
########################################
def setUp(self):
indexer.reset()
numberer
|
.reset()
def testProcessingFunctionFactory(self):
base = FilePath(self.mktemp()
|
)
base.makedirs()
simple = base.child('simple.html')
FilePath(__file__).sibling('simple.html').copyTo(simple)
htmlGenerator = factory.generate_html(options)
htmlGenerator(simple.path, self.linkrel)
self.assertXMLEqual(
"""\
<?xml version="1.0" ?><!DOCTYPE html PUBLIC '-//W3C//DTD XHTML 1.0 Transitional//EN' 'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd'><html lang="en" xmlns="http://www.w3.org/1999/xhtml">
<head><title>Twisted Documentation: My Test Lore Input</title></head>
<body bgcolor="white">
<h1 class="title">My Test Lore Input</h1>
<div class="content">
<span/>
<p>A Body.</p>
</div>
<a href="index.xhtml">Index</a>
</body>
</html>""",
simple.sibling('simple.xhtml').getContent())
def testProcessingFunctionFactoryWithFilenameGenerator(self):
base = FilePath(self.mktemp())
base.makedirs()
def filenameGenerator(originalFileName, outputExtension):
name = os.path.splitext(FilePath(originalFileName).basename())[0]
return base.child(name + outputExtension).path
htmlGenerator = factory.generate_html(options, filenameGenerator)
htmlGenerator(self.file, self.linkrel)
self.assertXMLEqual(
"""\
<?xml version="1.0" ?><!DOCTYPE html PUBLIC '-//W3C//DTD XHTML 1.0 Transitional//EN' 'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd'><html lang="en" xmlns="http://www.w3.org/1999/xhtml">
<head><title>Twisted Documentation: My Test Lore Input</title></head>
<body bgcolor="white">
<h1 class="title">My Test Lore Input</h1>
<div class="content">
<span/>
<p>A Body.</p>
</div>
<a href="index.xhtml">Index</a>
</body>
</html>""",
base.child("simple.xhtml").getContent())
def test_doFile(self):
base = FilePath(self.mktemp())
base.makedirs()
simple = base.child('simple.html')
FilePath(__file__).sibling('simple.html').copyTo(simple)
templ = dom.parse(open(d['template']))
tree.doFile(simple.path, self.linkrel, d['ext'], d['baseurl'], templ, d)
self.assertXMLEqual(
"""\
<?xml version="1.0" ?><!DOCTYPE html PUBLIC '-//W3C//DTD XHTML 1.0 Transitional//EN' 'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd'><html lang="en" xmlns="http://www.w3.org/1999/xhtml">
<head><title>Twisted Documentation: My Test Lore Input</title></head>
<body bgcolor="white">
<h1 class="title">My Test Lore Input</h1>
<div class="content">
<span/>
<p>A Body.</p>
</div>
<a href="index.xhtml">Index</a>
</body>
</html>""",
base.child("simple.xhtml").getContent())
def test_doFile_withFilenameGenerator(self):
base = FilePath(self.mktemp())
base.makedirs()
def filenameGenerator(originalFileName, outputExtension):
name = os.path.splitext(FilePath(originalFileName).basename())[0]
return base.child(name + outputExtension).path
templ = dom.parse(open(d['template']))
tree.doFile(self.file, self.linkrel, d['ext'], d['baseurl'], templ, d, filenameGenerator)
self.assertXMLEqual(
"""\
<?xml version="1.0" ?><!DOCTYPE html PUBLIC '-//W3C//DTD XHTML 1.0 Transitional//EN' 'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd'><html lang="en" xmlns="http://www.w3.org/1999/xhtml">
<head><title>Twisted Documentation: My Test Lore Input</title></head>
<body bgcolor="white">
<h1 class="title">My Test Lore Input</h1>
<div class="content">
<span/>
<p>A Body.</p>
</div>
<a href="index.xhtml">Index</a>
</body>
</html>""",
base.child("simple.xhtml").getContent())
def test_munge(self):
indexer.setIndexFilename("lore_index_file.html")
doc = dom.parse(open(self.file))
node = dom.parse(open(d['template']))
tree.munge(doc, node, self.linkrel,
os.path.dirname(self.file),
self.file,
d['ext'], d['baseurl'], d)
self.assertXMLEqual(
"""\
<?xml version="1.0" ?><!DOCTYPE html PUBLIC '-//W3C//DTD XHTML 1.0 Transitional//EN' 'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd'><html lang="en" xmlns="http://www.w3.org/1999/xhtml">
<head><title>Twisted Documentation: My Test Lore Input</title></head>
<body bgcolor="white">
<h1 class="title">My Test Lor
|
openstates/openstates.org
|
dashboards/urls.py
|
Python
|
mit
| 685
| 0.00292
|
from django.urls import path, re_path
from .views import (
user_overview,
api_overvi
|
ew,
dq_overview,
dq_overview_session,
dqr_listing,
)
from utils.common import states
# Only allow valid state abbreviations
state_abbrs = [s.abbr.lower() for s in states]
state_abbr_pattern = r"({})".format("|".join(state_abbrs))
urlpatterns = [
path("users/", user_overview),
|
path("api/", api_overview),
path("dq_dashboard/", dqr_listing),
re_path(r"^dq_overview/(?P<state>{})/$".format(state_abbr_pattern), dq_overview),
re_path(
r"^dq_overview/(?P<state>{})/(?P<session>[-\w ]+)/$".format(state_abbr_pattern),
dq_overview_session,
),
]
|
hazelnusse/sympy-old
|
sympy/core/numbers.py
|
Python
|
bsd-3-clause
| 44,106
| 0.004285
|
from basic import Atom, SingletonMeta, S, Basic
from decorators import _sympifyit
from cac
|
he import Memoizer, MemoizerArg
import sympy.mpmath as mpmath
import sympy.mpmath.libmpf as mlib
import sympy.mpmath.libm
|
pc as mlibc
from sympy.mpmath.libelefun import mpf_pow, mpf_pi, mpf_e, phi_fixed
import decimal
rnd = mlib.round_nearest
# TODO: we should use the warnings module
_errdict = {"divide": False}
def seterr(divide=False):
"""
Should sympy raise an exception on 0/0 or return a nan?
divide == True .... raise an exception
divide == False ... return nan
"""
_errdict["divide"] = divide
# (a,b) -> gcd(a,b)
_gcdcache = {}
# TODO caching with decorator, but not to degrade performance
def igcd(a, b):
"""Computes integer greates common divisor of two numbers.
The algorithm is based on the well known Euclid's algorithm. To
improve speed, igcd() has its own caching mechanizm implemented.
"""
try:
return _gcdcache[(a,b)]
except KeyError:
if a and b:
if b < 0:
b = -b
while b:
a, b = b, a % b
else:
a = abs(a or b)
_gcdcache[(a,b)] = a
return a
def ilcm(a, b):
"""Computes integer least common multiple of two numbers. """
if a == 0 and b == 0:
return 0
else:
return a * b // igcd(a, b)
def igcdex(a, b):
"""Returns x, y, g such that g = x*a + y*b = gcd(a, b).
>>> igcdex(2, 3)
(-1, 1, 1)
>>> igcdex(10, 12)
(-1, 1, 2)
>>> x, y, g = igcdex(100, 2004)
>>> x, y, g
(-20, 1, 4)
>>> x*100 + y*2004
4
"""
if (not a) and (not b):
return (0, 1, 0)
if not a:
return (0, b//abs(b), abs(b))
if not b:
return (a//abs(a), 0, abs(a))
if a < 0:
a, x_sign = -a, -1
else:
x_sign = 1
if b < 0:
b, y_sign = -b, -1
else:
y_sign = 1
x, y, r, s = 1, 0, 0, 1
while b:
(c, q) = (a % b, a // b)
(a, b, r, s, x, y) = (b, c, x-q*r, y-q*s, r, s)
return (x*x_sign, y*y_sign, a)
@Memoizer((int, long), return_value_converter = lambda d: d.copy())
def factor_trial_division(n):
"""
Factor any integer into a product of primes, 0, 1, and -1.
Returns a dictionary {<prime: exponent>}.
"""
if not n:
return {0:1}
factors = {}
if n < 0:
factors[-1] = 1
n = -n
if n==1:
factors[1] = 1
return factors
d = 2
while n % d == 0:
try:
factors[d] += 1
except KeyError:
factors[d] = 1
n //= d
d = 3
while n > 1 and d*d <= n:
if n % d:
d += 2
else:
try:
factors[d] += 1
except KeyError:
factors[d] = 1
n //= d
if n>1:
try:
factors[n] += 1
except KeyError:
factors[n] = 1
return factors
class Number(Atom):
"""
Represents any kind of number in sympy.
Floating point numbers are represented by the Real class.
Integer numbers (of any size), together with rational numbers (again, there
is no limit on their size) are represented by the Rational class.
If you want to represent for example 1+sqrt(2), then you need to do:
Rational(1) + sqrt(Rational(2))
"""
is_commutative = True
is_comparable = True
is_bounded = True
is_finite = True
__slots__ = []
# Used to make max(x._prec, y._prec) return x._prec when only x is a float
_prec = -1
is_Number = True
def __new__(cls, *obj):
if len(obj)==1: obj=obj[0]
if isinstance(obj, (int, long)):
return Integer(obj)
if isinstance(obj,tuple) and len(obj)==2:
return Rational(*obj)
if isinstance(obj, (str,float,mpmath.mpf,decimal.Decimal)):
return Real(obj)
if isinstance(obj, Number):
return obj
raise TypeError("expected str|int|long|float|Decimal|Number object but got %r" % (obj))
def _as_mpf_val(self, prec):
"""Evaluate to mpf tuple accurate to at least prec bits"""
raise NotImplementedError('%s needs ._as_mpf_val() method' % \
(self.__class__.__name__))
def _eval_evalf(self, prec):
return Real._new(self._as_mpf_val(prec), prec)
def _as_mpf_op(self, prec):
prec = max(prec, self._prec)
return self._as_mpf_val(prec), prec
def __float__(self):
return mlib.to_float(self._as_mpf_val(53))
def _eval_derivative(self, s):
return S.Zero
def _eval_conjugate(self):
return self
def _eval_order(self, *symbols):
# Order(5, x, y) -> Order(1,x,y)
return C.Order(S.One, *symbols)
def __eq__(self, other):
raise NotImplementedError('%s needs .__eq__() method' % (self.__class__.__name__))
def __ne__(self, other):
raise NotImplementedError('%s needs .__ne__() method' % (self.__class__.__name__))
def __lt__(self, other):
raise NotImplementedError('%s needs .__lt__() method' % (self.__class__.__name__))
def __le__(self, other):
raise NotImplementedError('%s needs .__le__() method' % (self.__class__.__name__))
def __gt__(self, other):
return _sympify(other).__lt__(self)
def __ge__(self, other):
return _sympify(other).__le__(self)
def as_coeff_terms(self, x=None):
# a -> c * t
return self, tuple()
class Real(Number):
"""
Represents a floating point number. It is capable of representing
arbitrary-precision floating-point numbers
Usage:
======
Real(3.5) .... 3.5 (the 3.5 was converted from a python float)
Real("3.0000000000000005")
Notes:
======
- Real(x) with x being a Python int/long will return Integer(x)
"""
is_real = True
is_irrational = False
is_integer = False
__slots__ = ['_mpf_', '_prec']
# mpz can't be pickled
def __getnewargs__(self):
return (mlib.to_pickable(self._mpf_),)
def __getstate__(self):
d = Basic.__getstate__(self).copy()
del d["_mpf_"]
return mlib.to_pickable(self._mpf_), d
def __setstate__(self, state):
_mpf_, d = state
_mpf_ = mlib.from_pickable(_mpf_)
self._mpf_ = _mpf_
Basic.__setstate__(self, d)
is_Real = True
def floor(self):
return C.Integer(int(mlib.to_int(mlib.mpf_floor(self._mpf_, self._prec))))
def ceiling(self):
return C.Integer(int(mlib.to_int(mlib.mpf_ceil(self._mpf_, self._prec))))
@property
def num(self):
return mpmath.mpf(self._mpf_)
def _as_mpf_val(self, prec):
return self._mpf_
def _as_mpf_op(self, prec):
return self._mpf_, max(prec, self._prec)
def __new__(cls, num, prec=15):
prec = mpmath.settings.dps_to_prec(prec)
if isinstance(num, (int, long)):
return Integer(num)
if isinstance(num, (str, decimal.Decimal)):
_mpf_ = mlib.from_str(str(num), prec, rnd)
elif isinstance(num, tuple) and len(num) == 4:
_mpf_ = num
else:
_mpf_ = mpmath.mpf(num)._mpf_
if not num:
return C.Zero()
obj = Basic.__new__(cls)
obj._mpf_ = _mpf_
obj._prec = prec
return obj
@classmethod
def _new(cls, _mpf_, _prec):
if _mpf_ == mlib.fzero:
return S.Zero
obj = Basic.__new__(cls)
obj._mpf_ = _mpf_
obj._prec = _prec
return obj
def _hashable_content(self):
return (self._mpf_, self._prec)
def _eval_is_positive(self):
return self.num > 0
def _eval_is_negative(self):
return self.num < 0
def __neg__(self):
return Real._new(mlib.mpf_neg(self._mpf_), self._prec)
@_sympifyit('other', NotImplemented)
def __mul__(self, other):
if isinstance(other, Number):
rhs, prec = other._as_mpf_op(self._prec)
return Real._new(mlib.mpf_mul(self._mpf_, rhs
|
freedomtan/tensorflow
|
tensorflow/python/framework/sparse_tensor.py
|
Python
|
apache-2.0
| 17,827
| 0.005441
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Sparse tensors."""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import numpy as np
from tensorflow.python import pywrap_tensorflow # pylint: disable=unused-import
from tensorflow.python import _pywrap_utils
from tensorflow.python import tf2
from tensorflow.python.framework import composite_tensor
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_spec
from tensorflow.python.framework import tensor_util
from tensorflow.python.framework import type_spec
from tensorflow.python.ops import gen_sparse_ops
from tensorflow.python.types import internal
from tensorflow.python.util.tf_export import tf_export
# pylint: disable=protected-access
_eval_using_default_session = ops._eval_using_default_session
_override_helper = ops._override_helper
# pylint: enable=protected-access
@tf_export("sparse.SparseTensor", "SparseTensor")
class SparseTensor(internal.NativeObject, composite_tensor.CompositeTensor):
"""Represents a sparse tensor.
TensorFlow represents a sparse tensor as three separate dense tensors:
`indices`, `values`, and `dense_shape`. In Python, the three tensors are
collected into a `SparseTensor` class for ease of use. If you have separate
`indices`, `values`, and `dense_shape` tensors, wrap them in a `SparseTensor`
object before passing to the ops below.
Concretely, the sparse tensor `SparseTensor(indices, values, dense_shape)`
comprises the following components, where `N` and `ndims` are the number
of values and number of dimensions in the `SparseTensor`, respectively:
* `indices`: A 2-D int64 tensor of shape `[N, ndims]`, which specifies the
indices of the elements in the sparse tensor that contain nonzero values
(elements are zero-indexed). For example, `indices=[[1,3], [2,4]]` specifies
that the elements with indexes of [1,3] and [2,4] have nonzero values.
* `values`: A 1-D tensor of any type and shape `[N]`, which supplies the
values for each element in `indices`. For example, given `indices=[[1,3],
[2,4]]`, the parameter `values=[18, 3.6]` specifies that element [1,3] of
the sparse tensor has a value of 18, and element [2,4] of the tensor has a
value of 3.6.
* `dense_shape`: A 1-D int64 tensor of shape `[ndims]`, which specifies the
dense_shape of the sparse tensor. Takes a list indicating the number of
elements in each dimension. For example, `dense_shape=[3,6]` specifies a
two-dimensional 3x6 tensor, `dense_shape=[2,3,4]` specifies a
three-dimensional 2x3x4 tensor, and `dense_shape=[9]` specifies a
one-dimensional tensor with 9 elements.
The corresponding dense tensor satisfies:
```python
dense.shape = dense_shape
dense[tuple(indices[i])] = values[i]
```
By convention, `indices` should be sorted in row-major order (or equivalently
lexicographic order on the tuples `indices[i]`). This is not enforced when
`SparseTensor` objects are constructed, but most ops assume correct ordering.
If the ordering of sparse tensor `st` is wrong, a fixed version can be
obtained by calling `tf.sparse.reorder(st)`.
Example: The sparse tensor
```python
SparseTensor(indices=[[0, 0], [1, 2]], values=[1, 2], dense_shape=[3, 4])
```
represents the dense tensor
```python
[[1, 0, 0, 0]
[0, 0, 2, 0]
[0, 0, 0, 0]]
```
"""
@classmethod
def from_value(cls, sparse_tensor_value):
if not is_sparse(sparse_tensor_value):
raise TypeError("Neither a SparseTensor nor SparseTensorValue: %s." %
sparse_tensor_value)
return SparseTensor(
indices=sparse_tensor_value.indices,
values=sparse_tensor_value.values,
dense_shape=sparse_tensor_value.dense_shape)
def __init__(self, indices, values, dense_shape):
"""Creates a `SparseTensor`.
Args:
indices: A 2-D int64 tensor of shape `[N, ndims]`.
values: A 1-D tensor of any type and shape `[N]`.
dense_shape: A 1-D int64 tensor of shape `[ndims]`.
Raises:
ValueError: When building an eager SparseTensor if `dense_shape` is
unknown or contains unknown elements (None or -1).
"""
with ops.name_scope(None, "SparseTensor", [indices, values, dense_shape]):
indices = ops.convert_to_tensor(
indices, name="indices", dtype=dtypes.int64)
# TODO(touts): Consider adding mutable_values() when 'values'
# is a VariableOp and updating users of SparseTensor.
values = ops.convert_to_tensor(values, name="values")
dense_shape = ops.convert_to_tensor(
dense_shape, name="dense_shape", dtype=dtypes.int64)
dense_shape_default = tensor_util.constant_value_as_shape(dense_shape)
self._indices = indices
self._values = values
self._dense_shape = dense_shape
self._dense_shape_default = dense_shape_default
indices_shape = indices.shape.with_rank(2)
values_shape = values.shape.with_rank(1)
dense_shape_shape = dense_shape.shape.with_rank(1)
# Assert number of rows in indices match the number of elements in values.
indices_shape.dims[0].assert_is_compatible_with(values_shape.dims[0])
# Assert number of columns in indices matches the number of elements in
# dense_shape.
indices_shape.dims[1].assert_is_compatible_with(dense_shape_shape.dims[0])
def get_shape(self):
"""Get the `TensorShape` representing the shape of the dense tensor.
Returns:
A `TensorShape` object.
"""
return self._dense_shape_default
@property
def indices(self):
"""The indices of non-zero values in the represented dense tensor.
Returns:
A 2-D Tensor of int64 with dense_shape `[N, ndims]`, where `N` is the
number of non-zero values in the tensor, and `ndims` is the rank.
"""
return self._indices
@property
def values(self):
"""The non-zero values in the represented dense tensor.
Returns:
A 1-D Tensor of any data type.
"""
return self._values
def with_values(self, new_values):
"""Returns a copy of `self` with `values` replaced by `new_values`.
This method produces a new `SparseTensor` that has the same nonzero
`indices` and same `dense_shape`, but updated values.
Args:
new_values: The values of the new `SparseTensor`. Needs to have the same
shape as the current `.values` `Tensor`. May have a different type than
the current `values`.
Returns:
A `SparseTensor` with identical indices and shape but up
|
dated values.
Example usage:
>>> st = tf.sparse.from_dense([[1, 0, 2, 0], [3, 0, 0, 4]])
>>> tf.sparse.to_dense(st.with_values([10, 20, 30, 40])) # 4 nonzero values
<tf.Tensor: shape=(2, 4), dtype=int32, numpy=
array([[10, 0, 20, 0],
[30, 0, 0, 40]], dtype=int32)>
"""
return SparseTensor(self._indices, new_values, self._dense_shape)
@property
def op(se
|
lf):
"""The `Operation` that produces `values` as an output."""
return self._values.op
@property
def dtype(self):
"""The `DType` of elements in this tensor."""
return self._values.dtype
@property
def dense_shape(self):
"""A 1-D Tensor of int64 representing the shape of the dense tensor."""
return
|
rockdreamer/redmine-docs-to-drive
|
model.py
|
Python
|
mit
| 12,096
| 0.003142
|
# coding: utf-8
from sqlalchemy import Column, DateTime, Index, Integer, String, Text, text, \
ForeignKey
from sqlalchemy.orm import relationship, backref
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
metadata = Base.metadata
class RedmineToDriveMapping(Base):
__tablename__ = 'redmine_to_drive_mapping'
__table_args__ = (
Index('index_mapping_on_redmine_id_and_type', 'mapping_type', 'redmine_id', unique=True),
)
id = Column(Integer, primary_key=True)
mapping_type = Column(String(255))
drive_id = Column(String(255))
redmine_id = Column(Integer)
last_update = Column(DateTime)
__mapper_args__ = {
'polymorphic_on': mapping_type,
'polymorphic_identity': 'redmine_to_drive_mapping',
'with_polymorphic': '*'
}
class RedmineDmsfFolderToDriveMapping(RedmineToDriveMapping):
__mapper_args__ = {'polymorphic_identity': 'dmsf_folder'}
class RedmineDmsfFileToDriveMapping(RedmineToDriveMapping):
__mapper_args__ = {'polymorphic_identity': 'dmsf_file'}
class RedmineDmsfFileRevisionToDriveMapping(RedmineToDriveMapping):
__mapper_args__ = {'polymorphic_identity': 'dmsf_file_revision'}
class RedmineBasedirToDriveMapping(RedmineToDriveMapping):
__mapper_args__ = {'polymorphic_identity': 'basedir'}
class RedmineProjectToDriveMapping(RedmineToDriveMapping):
__mapper_args__ = {'polymorphic_identity': 'project'}
class RedmineProjectDocumentsToDriveMapping(RedmineToDriveMapping):
__mapper_args__ = {'polymorphic_identity': 'project_docs'}
class RedmineProjectDmsfToDriveMapping(RedmineToDriveMapping):
__mapper_a
|
rgs__ = {'polymorphic_identity': 'project_dmsf'}
class RedmineDocumentToDriveMapping(RedmineToDriveMapping):
__mapper_args__ = {'polymorphic_identity': 'document'}
class RedmineDocumentAttachmentToDriveMapping(RedmineToDriveMapping):
__mapper_args__ = {'polymorphic_identity': 'document_attachment'}
class Attachment(Base):
__tablename__ = 'atta
|
chments'
id = Column(Integer, primary_key=True)
container_type = Column(String(30))
container_id = Column(Integer)
filename = Column(String(255), nullable=False, server_default=text("''"))
disk_filename = Column(String(255), nullable=False, server_default=text("''"))
filesize = Column(Integer, nullable=False, server_default=text("'0'"))
content_type = Column(String(255), server_default=text("''"))
digest = Column(String(40), nullable=False, server_default=text("''"))
downloads = Column(Integer, nullable=False, server_default=text("'0'"))
author_id = Column(Integer, ForeignKey('users.id'), nullable=False, index=True, server_default=text("'0'"))
created_on = Column(DateTime, index=True)
description = Column(String(255))
disk_directory = Column(String(255))
__mapper_args__ = {'polymorphic_on': container_type}
class DocumentAttachment(Attachment):
__mapper_args__ = {'polymorphic_identity': 'Document'}
document = relationship("Document",
backref="attachments",
primaryjoin="Document.id == DocumentAttachment.container_id",
foreign_keys='DocumentAttachment.container_id')
drive = relationship("RedmineDocumentAttachmentToDriveMapping",
backref="attachment",
primaryjoin="RedmineDocumentAttachmentToDriveMapping.redmine_id == DocumentAttachment.id",
foreign_keys='RedmineDocumentAttachmentToDriveMapping.redmine_id')
class DmsfFileRevision(Base):
__tablename__ = 'dmsf_file_revisions'
id = Column(Integer, primary_key=True)
dmsf_file_id = Column(Integer, ForeignKey('dmsf_files.id'), nullable=False)
source_dmsf_file_revision_id = Column(Integer, ForeignKey('dmsf_file_revisions.id'))
name = Column(String(255), nullable=False)
dmsf_folder_id = Column(Integer, ForeignKey('dmsf_folders.id'))
disk_filename = Column(String(255), nullable=False)
size = Column(Integer)
mime_type = Column(String(255))
title = Column(String(255))
description = Column(Text)
workflow = Column(Integer)
major_version = Column(Integer, nullable=False)
minor_version = Column(Integer, nullable=False)
comment = Column(Text)
deleted = Column(Integer, nullable=False, server_default=text("'0'"))
deleted_by_user_id = Column(Integer)
user_id = Column(Integer, ForeignKey('users.id'), nullable=False)
created_at = Column(DateTime)
updated_at = Column(DateTime)
project_id = Column(Integer, ForeignKey('projects.id'), nullable=False)
drive = relationship("RedmineDmsfFileRevisionToDriveMapping",
backref="file_revision",
primaryjoin="RedmineDmsfFileRevisionToDriveMapping.redmine_id == DmsfFileRevision.id",
foreign_keys='RedmineDmsfFileRevisionToDriveMapping.redmine_id')
class DmsfFile(Base):
__tablename__ = 'dmsf_files'
id = Column(Integer, primary_key=True)
project_id = Column(Integer, ForeignKey('projects.id'), nullable=False)
dmsf_folder_id = Column(Integer, ForeignKey('dmsf_folders.id'))
name = Column(String(255), nullable=False)
notification = Column(Integer, nullable=False, server_default=text("'0'"))
deleted = Column(Integer, nullable=False, server_default=text("'0'"))
deleted_by_user_id = Column(Integer, ForeignKey('users.id'))
created_at = Column(DateTime)
updated_at = Column(DateTime)
revisions = relationship('DmsfFileRevision', backref='dmsf_file',
order_by='DmsfFileRevision.major_version, DmsfFileRevision.minor_version')
drive = relationship("RedmineDmsfFileToDriveMapping",
backref="file",
primaryjoin="RedmineDmsfFileToDriveMapping.redmine_id == DmsfFile.id",
foreign_keys='RedmineDmsfFileToDriveMapping.redmine_id')
class DmsfFolder(Base):
__tablename__ = 'dmsf_folders'
id = Column(Integer, primary_key=True)
project_id = Column(Integer, ForeignKey('projects.id'), nullable=False)
dmsf_folder_id = Column(Integer, ForeignKey('dmsf_folders.id'))
title = Column(String(255), nullable=False)
description = Column(Text)
notification = Column(Integer, nullable=False, server_default=text("'0'"))
user_id = Column(Integer, ForeignKey('users.id'), nullable=False)
created_at = Column(DateTime)
updated_at = Column(DateTime)
child_folders = relationship("DmsfFolder", backref=backref('parent', remote_side=[id]))
files = relationship('DmsfFile', backref=backref('folder', remote_side=[id]), order_by='DmsfFile.id')
revisions = relationship('DmsfFileRevision', backref=backref('folder', remote_side=[id]),
order_by='DmsfFileRevision.id')
drive = relationship("RedmineDmsfFolderToDriveMapping",
backref="file_revision",
primaryjoin="RedmineDmsfFolderToDriveMapping.redmine_id == DmsfFolder.id",
foreign_keys='RedmineDmsfFolderToDriveMapping.redmine_id')
class Document(Base):
__tablename__ = 'documents'
id = Column(Integer, primary_key=True)
project_id = Column(Integer, ForeignKey('projects.id'), nullable=False, index=True, server_default=text("'0'"))
category_id = Column(Integer, nullable=False, index=True, server_default=text("'0'"))
title = Column(String(60), nullable=False, server_default=text("''"))
description = Column(Text)
created_on = Column(DateTime, index=True)
drive = relationship("RedmineDocumentToDriveMapping",
backref="document",
primaryjoin="RedmineDocumentToDriveMapping.redmine_id == Document.id",
foreign_keys='RedmineDocumentToDriveMapping.redmine_id')
class MemberRole(Base):
__tablename__ = 'member_roles'
id = Column(Integer, primary_key=True)
member_id = Column(Integer, ForeignKey('users.id'), nullable=False, index=True)
role_id = Column(Integer, nullable=False, index=True)
|
fake-name/ReadableWebProxy
|
WebMirror/management/rss_parser_funcs/feed_parse_extractGhosttouchWordpressCom.py
|
Python
|
bsd-3-clause
| 560
| 0.033929
|
def extractGhosttouchWordpressCom(item):
'''
Parser for 'ghosttouch.wordpress.com'
'''
|
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_ty
|
pe)
return False
|
sethshill/final
|
build/lib.linux-armv7l-2.7/bibliopixel/log.py
|
Python
|
mit
| 1,041
| 0.000961
|
import logging, sys
from logging import DEBUG, INFO, WARNING, ERROR, CRITICAL
class InfoFilter(logging.Filter):
def filter(self, rec):
return rec.levelno in (logging.DEBUG, logging.INFO)
def _new_custom_logger(name='BiblioPixel',
fmt='%(levelname)s - %(module)s - %(message)s'):
logger = logging.getLogger(name)
formatter = logging.Formatter(fmt=fmt)
if len(logger.handlers) == 0:
logger.setLevel(logging.INFO)
h1 = logging.StreamHandler(sys.stdout)
h1.setLevel(logging.DEBUG)
h1.addFilter(InfoFilter())
h1.setFormatter(formatter)
h2 = logging.StreamHandler(sys.stderr)
h2.setLevel(logging.WARNING)
h
|
2.setFormatter(formatter)
logger.addHandler(h1)
logger.addHandler(h2)
return logger
logger = _new_custom_logger()
setLogLevel = logger.set
|
Level
debug, info, warning, error, critical, exception = (
logger.debug, logger.info, logger.warning, logger.error, logger.critical,
logger.exception)
|
ClearCorp/odoo-costa-rica
|
TODO-9.0/l10n_cr_account_trial_balance_currency_report/__openerp__.py
|
Python
|
agpl-3.0
| 2,013
| 0.002981
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Addons modules by CLEARCORP S.A.
# Copyright (C) 2009-TODAY CLEARCORP S.A. (<http://clearcorp.co.cr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY W
|
ARRANTY; without even the imp
|
lied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Trial Balance Currency Report',
'version': '1.0',
'author': 'ClearCorp',
'category': 'Finance',
'description': """
Trial Balance Currency Report.
==============================
Create the Trial Balance Currency report
Configuration:
1. Configure type account that it will appear in wizard.
This configuration is in Accounting -> Financial Reports -> Account Reports and
select in field "Base Catalog Account Type" choose option create a new Base Catalog Account Type
with code "TRIBACU".
""",
'website': "http://clearcorp.co.cr",
'depends': ['account_report_lib',
'report',
'account',
],
'data': [
'report/report.xml',
'wizard/l10n_cr_account_trial_balance_wizard_view.xml',
'report_menus.xml',
'views/report_trial_balance_currency.xml',
],
'active': False,
'installable': True,
'license': 'AGPL-3',
}
|
leifos/pagefetch
|
pagefetch_project/pagefetch/game_leaderboards.py
|
Python
|
mit
| 5,894
| 0.005769
|
__author__ = 'leif'
from game_models import HighScore, UserProfile
from ifind.common.utils import encode_string_to_url
from django.contrib.auth.models import User
from django.db.models import Max, Sum, Avg
# ranking based on highest total score (top x players)
# ranking of players based on level/xp
# top players in each category
# top schools ranked by average total_score of players
# boys vs girls based on average total_score
# and breakdown by age / boys vs girls
# add in school, gender, age, into UserProfile
# ranking based on the last 30 days / highest scores
class GameLeaderboard(object):
def __init__(self, top=20):
self.top = top
def get_leaderboard(self):
pass
def highscore_to_list(self, highscores):
"""
:param highscores: list of rows from HighScore
:return: a formatted list rank, username, uid, category, cid, highest_score
"""
leaders = list()
for i, hs in enumerate(highscores):
entry = {'rank': i+1, 'username': hs.user.username, 'score': hs.highest_score}
if hs.category:
print hs.category
entry['category'] = hs.category.name
entry['category_url'] = encode_string_to_url(hs.category.name)
print entry
leaders.append(entry)
return leaders
class CatHighScoresLeaderboard(GameLeaderboard):
def get_leaderboard(self):
hs = HighScore.objects.all().order_by('-highest_score')[:self.top]
return self.highscore_to_list(hs[:10])
def __str__(self):
return 'Highest Category Scores'
class HighScoresLeaderboard(GameLeaderboard):
def get_leaderboard(self):
highscores = HighScore.objects.values('user').annotate(highest_score=Sum('highest_score')).order_by('-highest_score')[:self.top]
leaders = list()
|
for i, hs in enumerate(highscores):
username = User.objects.get(id=hs['user'])
entry = {'rank': i+1, 'username': username, 'score': hs['highest_score']}
leaders.append(entry)
return leaders
def __str__(self):
return 'Highest Overall Scores'
class SchoolLeaderboard(GameLeaderboard):
def get_leaderboard(self):
users =
|
UserProfile.objects.all()
schools = {}
score_list=[]
for user in users:
if user.school != "":
if user.school not in schools:
user_score = self._get_user_score_sum(user)
#list is [total_score_of_users,num_of_users]
schools[user.school] = [user_score,1]
else:
schools[user.school][0] += self._get_user_score_sum(user)
schools[user.school][1] += 1
for school, values in schools.iteritems():
dummy_user = User(username=school)
score_list.append(HighScore(user=dummy_user, highest_score=values[0]/values[1] ,category=None ))
score_list.sort(key=lambda x: x.highest_score, reverse=True)
return self.highscore_to_list(score_list)
def _get_user_score_sum(self, user):
hs = HighScore.objects.filter(user=user.user)
user_score = 0
for sc in hs:
user_score += sc.highest_score
return user_score
def __str__(self):
return 'School High Scores'
class AgeLeaderboard(GameLeaderboard):
def get_leaderboard(self):
users = UserProfile.objects.all()
age_groups = {}
score_list=[]
for user in users:
if user.age is not None:
if user.age not in age_groups:
user_score = self._get_user_score_sum(user)
#list is [total_score_of_users,num_of_users]
age_groups[user.age] = [user_score,1]
else:
age_groups[user.age][0] += self._get_user_score_sum(user)
age_groups[user.age][1] += 1
for age_group, values in age_groups.iteritems():
dummy_user = User(username=age_group)
score_list.append(HighScore(user=dummy_user, highest_score=values[0]/values[1] ,category=None ))
score_list.sort(key=lambda x: x.highest_score, reverse=True)
return self.highscore_to_list(score_list)
def _get_user_score_sum(self, user):
hs = HighScore.objects.filter(user=user.user)
user_score = 0
for sc in hs:
user_score += sc.highest_score
return user_score
def __str__(self):
return 'High Scores by age group'
class GenderLeaderboard(GameLeaderboard):
def get_leaderboard(self):
users = UserProfile.objects.all()
gender_groups = {}
score_list=[]
for user in users:
if user.gender != '':
if user.gender not in gender_groups:
user_score = self._get_user_score_sum(user)
#list is [total_score_of_users,num_of_users]
gender_groups[user.gender] = [user_score,1]
else:
gender_groups[user.gender][0] += self._get_user_score_sum(user)
gender_groups[user.gender][1] += 1
for gender_group, values in gender_groups.iteritems():
dummy_user = User(username=gender_group)
score_list.append(HighScore(user=dummy_user, highest_score=values[0]/values[1] ,category=None ))
score_list.sort(key=lambda x: x.highest_score, reverse=True)
return self.highscore_to_list(score_list)
def _get_user_score_sum(self, user):
hs = HighScore.objects.filter(user=user.user)
user_score = 0
for sc in hs:
user_score += sc.highest_score
return user_score
def __str__(self):
return 'High Scores by gender group'
|
yandex/mastermind
|
src/python-mastermind/src/mastermind/errors.py
|
Python
|
gpl-2.0
| 508
| 0.003937
|
class MastermindError(Exception)
|
:
@property
def code(self):
return MASTERMIND_ERROR_CODES[type(self)]
@staticmethod
def make_error(code, msg):
if code not in MASTERMIND_ERROR_CLS:
raise ValueError('Unknown error code {}'.format(code
|
))
return MASTERMIND_ERROR_CLS[code](msg)
GENERAL_ERROR_CODE = 1024
MASTERMIND_ERROR_CODES = {
MastermindError: GENERAL_ERROR_CODE
}
MASTERMIND_ERROR_CLS = dict((v, k) for k, v in MASTERMIND_ERROR_CODES.iteritems())
|
bqbn/addons-server
|
src/olympia/promoted/migrations/0001_initial.py
|
Python
|
bsd-3-clause
| 2,466
| 0.004461
|
# Generated by Django 2.2.14 on 2020-07-09 10:37
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import olympia.amo.models
class Migration(migrations.Migration):
initial = True
dependencies = [
('versions', '0008_auto_20200625_1114'),
('addons', '0014_remove_addon_view_source'),
]
operations = [
migrations.CreateModel(
name='PromotedApproval',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, ser
|
ialize=False, verbose_name='ID')),
('created', models.DateTimeField(blank=True, default=django.utils.timezone.now, editable=False)),
('modified', models.DateTimeField(auto_no
|
w=True)),
('group_id', models.SmallIntegerField(choices=[(1, 'Recommended'), (2, 'Verified - Tier 1'), (3, 'Verified - Tier 2'), (4, 'Line'), (5, 'Spotlight'), (6, 'Strategic')], null=True)),
('version', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='promoted_approvals', to='versions.Version')),
],
bases=(olympia.amo.models.SearchMixin, olympia.amo.models.SaveUpdateMixin, models.Model),
),
migrations.CreateModel(
name='PromotedAddon',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(blank=True, default=django.utils.timezone.now, editable=False)),
('modified', models.DateTimeField(auto_now=True)),
('group_id', models.SmallIntegerField(choices=[(0, 'Not Promoted'), (1, 'Recommended'), (2, 'Verified - Tier 1'), (3, 'Verified - Tier 2'), (4, 'Line'), (5, 'Spotlight'), (6, 'Strategic')], default=0)),
('addon', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='addons.Addon')),
],
options={
'get_latest_by': 'created',
'abstract': False,
'base_manager_name': 'objects',
},
bases=(olympia.amo.models.SearchMixin, olympia.amo.models.SaveUpdateMixin, models.Model),
),
migrations.AddConstraint(
model_name='promotedapproval',
constraint=models.UniqueConstraint(fields=('group_id', 'version'), name='unique_promoted_version'),
),
]
|
banesullivan/ParaViewGeophysics
|
examples/filters-general/normalize-array.py
|
Python
|
bsd-3-clause
| 1,235
| 0
|
"""
Normalize Array
~~~~~~~~~~~~~~~
This example will demonstrate how to perform a normalization or any custom
mathematical operation on a single data array for an input data set.
This filter allow the user to select an array from the input data set to be
normalized. The filter will append another array to that data set for the
output. The user can specify how they want to rename the array, can choose a
multiplier, and can choose from two types of common normalizations:
Feature Scaling and Standard Score.
This example demos :class:`PVGeo.filters.NormalizeArray`
"""
import numpy as np
import pyvista
from pyvi
|
sta import examples
import PVGeo
from PVGeo.filters import NormalizeArray
###############################################################################
# Create some input data. this can
|
be any `vtkDataObject`
mesh = examples.load_uniform()
title = 'Spatial Point Data'
mesh.plot(scalars=title)
###############################################################################
# Apply the filter
f = NormalizeArray(normalization='feature_scale', new_name='foo')
output = f.apply(mesh, title)
print(output)
###############################################################################
output.plot(scalars='foo')
|
lukecwik/incubator-beam
|
sdks/python/apache_beam/examples/dataframe/wordcount_test.py
|
Python
|
apache-2.0
| 2,150
| 0.005581
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Test for the wordcount example."""
# pytype: skip-file
import collections
import logging
import re
import tempfile
import unittest
f
|
rom apache_beam.examples.dataframe import wordcount
from apache_beam.testing.util import open_shards
class WordCountTest(unittest.TestCase):
SAMPLE_TEXT = """
a
a b
a b c
loooooonger words
"""
def create_temp_file(self, contents):
with tempfile.NamedTemporaryFile(delete=False) as f:
f.write(contents.encode('utf-8'))
|
return f.name
def test_basics(self):
temp_path = self.create_temp_file(self.SAMPLE_TEXT)
expected_words = collections.defaultdict(int)
for word in re.findall(r'[\w]+', self.SAMPLE_TEXT):
expected_words[word] += 1
wordcount.run(['--input=%s*' % temp_path, '--output=%s.result' % temp_path])
# Parse result file and compare.
results = []
with open_shards(temp_path + '.result-*') as result_file:
for line in result_file:
match = re.search(r'(\S+),([0-9]+)', line)
if match is not None:
results.append((match.group(1), int(match.group(2))))
elif line.strip():
self.assertEqual(line.strip(), 'word,count')
self.assertEqual(sorted(results), sorted(expected_words.items()))
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()
|
mkhl/haskell.sugar
|
setup.py
|
Python
|
mit
| 1,606
| 0.056145
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from distutils.core import setup
import py2app
import os, glob
def data_files(dirname = 'xml'):
"""Figure out the sugar structure and massage it into data_files format."""
def data_file(component):
return (os.path.join('..', '..', component),
glob.glob(os.path.join(dirname, component, '*')))
base = ['LICENSE', 'README.mdown', 'Languages.xml']
result = [(os.path.join('..', '..'), base)]
dirs = set([
'CodeSenseLibraries',
'CodeSenseProviders',
'ContextualSettings',
'FileActions',
'Itemizers',
'PlaceholderThemes',
'Syntaxes',
'SyntaxInjections',
'TextAc
|
tions',
])
for subdir in dirs.intersection(set(os.listdir(dirname))):
result.append(data_file(subdir))
return result
def sources(dirname):
"""Find (re)source files for our bundle and massage them into data_files format."""
return glob.glob(os.path.
|
join(dirname, '*'))
setup(
name = 'Haskell',
version = '0.0',
author = u'Martin Kühl',
author_email = 'martin.kuehl@gmail.com',
url = 'http://github.com/mkhl/haskell.sugar',
description = 'A Sugar adding sweet Haskell support for Espresso',
data_files = sources('res') + data_files('xml'),
plugin = sources('src'),
options = dict(
py2app = dict(
extension = '.sugar',
semi_standalone = True,
site_packages = True,
plist = dict(
CFBundleVersion = '0.0',
CFBundleIdentifier = 'org.purl.net.mkhl.haskell',
CFBundleGetInfoString = u'Haskell sugar for Espresso v0.0α',
NSHumanReadableCopyright = u'(c) 2009 Martin Kühl. Released under the MIT license.',
),
),
),
)
|
fuhaha/tensorflow_study
|
docs/chapter6/distributed_mnist.py
|
Python
|
apache-2.0
| 4,760
| 0.004915
|
import math
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
# Flags for defining the tf.train.ClusterSpec
tf.app.flags.DEFINE_string("ps_hosts", "",
"Comma-separated list of hostname:port pairs")
tf.app.flags.DEFINE_string("worker_hosts", "",
"Comma-separated list of hostname:port pairs")
# Flags for defining the tf.train.Server
tf.app.flags.DEFINE_string("job_name", "", "One of 'ps', 'worker'")
tf.app.flags.DEFINE_integer("task_index", 0, "Index of task within the job")
tf.app.flags.DEFINE_integer("hidden_units", 100,
"Number of units in the hidden layer of the NN")
tf.app.flags.DEFINE_string("data_dir", "/tmp/mnist-data",
"Directory for storing mnist data")
tf.app.flags.DEFINE_integer("batch_size", 100, "Training batch size")
FLAGS = tf.app.flags.FLAGS
IMAGE_PIXELS = 28
def build_model(device):
with tf.device(device):
# 특정 gpu에만 작업을 할당 하려면 worker_device="/job:worker/task:%d/gpu:0" 이용
# 단, CUDA_VISIBLE_DEVICES를 이용하여 동작하는 상태에서 해당 gpu의 식별 번호가 0이 어야 함.
# with tf.device(tf.train.replica_device_setter(
# worker_device="/job:worker/task:%d/gpu:0" % FLAGS.task_index,
# cluster=cluster)):
# Variables of the hidden layer
hid_w = tf.Variable(
tf.truncated_normal([IMAGE_PIXELS * IMAGE_PIXELS, FLAGS.hidden_units],
stddev=1.0 / IMAGE_PIXELS), name="hid_w")
hid_b = tf.Variable(tf.zeros([FLAGS.hidden_units]), name="hid_b")
# Variables of the softmax layer
sm_w = tf.Variable(
tf.truncated_normal([FLAGS.hidden_units, 10],
stddev=1.0 / math.sqrt(FLAGS.hidden_units)),
name="sm_w")
sm_b = tf.Variable(tf.zeros([10]), name="sm_b")
x = tf.placeholder(tf.float32, [None, IMAGE_PIXELS * IMAGE_PIXELS])
y_ = tf.placeholder(tf.float32, [None, 10])
hid_lin = tf.nn.xw_plus_b(x, hid_w, hid_b)
hid = tf.nn.relu(hid_lin)
y = tf.nn.softmax(tf.nn.xw_plus_b(hid, sm_w, sm_b))
loss = -tf.reduce_sum(y_ * tf.log(tf.clip_by_value(y, 1e-10, 1.0)))
global_step = tf.Variable(0)
return loss, global_step, x, y_
def main(_):
ps_hosts = FLAGS.ps_hosts.split(",")
worker_hosts = FLAGS.worker_hosts.split(",")
# Create a cluster from the parameter server and worker hosts.
cluster = tf.train.ClusterSpec({"ps": ps_hosts, "worker": worker_hosts})
# Create and start a server for the local task.
server = tf.train.Server(cluster,
job_name=FLAGS.job_name,
task_index=FLAGS.task_index)
if FLAGS.job_name == "ps":
server.join()
elif FLAGS.job_name == "worker":
# devices = tf.train.replica_device_setter(worker_device="/job:worker/task:%d/gpu:0" % FLAGS.task_index, cluster=cluster)
devices = tf.train.replica_device_setter(worker_device="/job:worker/task:%d" % FLAGS.task_index, cluster=cluster)
loss, global_step, x, y_ = build_model(devices)
train_op = tf.train.AdagradOptimizer(0.01).minimize(loss, global_step=global_step)
saver = tf.train.Saver()
summary_op = tf.merge_all_summaries()
init_op = tf.global_variables_initializer()
# Create a "supervisor", which oversees the training process.
sv = tf.train.Supervisor(is_chief=(FLAGS.task_index == 0),
logdir="/tmp/train_logs",
init_op=init_op,
summary_op=summary_op,
|
saver=saver,
global_step=global_step,
save_model_secs=600)
mnist = input_data.read_data_sets(FLAGS.data_dir, one_hot=True)
# The supervisor takes care of session initialization, restoring from
# a checkpoint, and closing when done or an error occurs.
config = tf.ConfigProto(allow_soft_placement=True)
with sv.managed_session(server.target, config=config) as sess:
|
# Loop until the supervisor shuts down or 1000000 steps have completed.
step = 0
while not sv.should_stop() and step < 1000000:
# Run a training step asynchronously.
# See `tf.train.SyncReplicasOptimizer` for additional details on how to
# perform *synchronous* training.
batch_xs, batch_ys = mnist.train.next_batch(FLAGS.batch_size)
train_feed = {x: batch_xs, y_: batch_ys}
_, step = sess.run([train_op, global_step], feed_dict=train_feed)
if step % 100 == 0:
print "Done step %d" % step
# Ask for all the services to stop.
sv.stop()
if __name__ == "__main__":
tf.app.run()
|
tensorflow/tfx
|
tfx/components/base/base_driver.py
|
Python
|
apache-2.0
| 999
| 0.001001
|
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in com
|
pliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, softwar
|
e
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Stub for pre-rename `tfx.dsl.components.base.base_driver`."""
from tfx.dsl.components.base import base_driver
from tfx.utils import deprecation_utils
BaseDriver = deprecation_utils.deprecated_alias( # pylint: disable=invalid-name
deprecated_name='tfx.components.base.base_driver.BaseDriver',
name='tfx.dsl.components.base.base_driver.BaseDriver',
func_or_class=base_driver.BaseDriver)
|
pombredanne/func
|
test/unittest/test_func_transmit.py
|
Python
|
gpl-2.0
| 8,415
| 0.010458
|
#!/usr/bin/python
##
## Copyright 2008, Various
## Adrian Likins <alikins@redhat.com>
##
## This software may be freely redistributed under the terms of the GNU
## general public license.
##
import os
import socket
import subprocess
import time
import unittest
import simplejson
import func.utils
from func import yaml
from func import jobthing
def structToYaml(data):
# takes a data structure, serializes it to
# yaml
buf = yaml.dump(data)
return buf
def structToJSON(data):
#Take data structure for the test
#and serializes it using json
serialized = simplejson.dumps(input)
return serialized
class BaseTest(object):
# assume we are talking to localhost
# th = socket.gethostname()
th = socket.getfqdn()
nforks=1
async=False
ft_cmd = "func-transmit"
# just so we can change it easy later
def _serialize(self, data):
raise NotImplementedError
def _deserialize(self, buf):
raise NotImplementedError
def _call_async(self, data):
data['async'] = True
data['nforks'] = 4
job_id = self._call(data)
no_answer = True
while (no_answer):
out = self._call({'clients': '*',
'method':'job_status',
'parameters': job_id})
if out[0] == jobthing.JOB_ID_FINISHED:
no_answer = False
else:
time.sleep(.25)
result = out[1]
return result
def _call(self, data):
f = self._serialize(data)
p = subprocess.Popen(self.ft_cmd, shell=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
output = p.communicate(input=f)
return self._deserialize(output[0])
def call(self, data):
if self.async:
return self._call_async(data)
return self._call(data)
def __init__(self):
pass
# we do this all over the place...
def assert_on_fault(self, result):
assert func.utils.is_error(result[self.th]) == False
# assert type(result[self.th]) != xmlrpclib.Fault
class YamlBaseTest(BaseTest):
# i'd add the "yaml" attr here for nosetest to find, but it doesnt
# seem to find it unless the class is a test class directly
ft_cmd = "func-transmit --yaml"
def _serialize(self, data):
buf = yaml.dump(data)
return buf
def _deserialize(self, buf):
data = yaml.load(buf).next()
return data
class JSONBaseTest(BaseTest):
ft_cmd = "func-transmit --json"
def _serialize(self, data):
buf = simplejson.dumps(data)
return buf
def _deserialize(self, buf):
data = simplejson.loads(buf)
return data
class ListMinion(object):
def test_list_minions(self):
out = self.call({'clients': '*',
'method': 'list_minions'})
def test_list_minions_no_match(self):
out = self.call({'clients': 'somerandom-name-that-shouldnt-be-a_real_host_name',
'method': 'list_minions'})
assert out == []
def test_list_minions_group_name(self):
out = self.call({'clients': '@test',
'method': 'list_minions'})
def test_list_minions_no_clients(self):
out = self.call({'method': 'list_minions'})
class ListMinionAsync(ListMinion):
async = True
class TestListMinionYaml(YamlBaseTest, ListMinion):
yaml = True
def __init__(self):
super(TestListMinionYaml, self).__init__()
class TestListMinionJSON(JSONBaseTest, ListMinion):
json = True
def __init__(self):
super(TestListMinionJSON, self).__init__()
# list_minions is a convience call for func_transmit, and doesn't
# really make any sense to call async
#class TestListMinionYamlAsync(YamlBaseTest, ListMinionAsync):
# yaml = True
# async = True
# def __init__(self):
# super(TestListMinionYamlAsync, self
|
).__init__()
#class TestListMinionJSONAsync(JSONBaseTest, ListMinionAsync):
# json = True
# async = True
# def __init__(self):
# super(TestListMinionJSONAsync, self).__init__()
class ClientGlob(objec
|
t):
def _test_add(self, client):
result = self.call({'clients': client,
'method': 'add',
'module': 'test',
'parameters': [1,2]})
self.assert_on_fault(result)
return result
def test_single_client(self):
result = self._test_add(self.th)
def test_glob_client(self):
result = self._test_add("*")
def test_glob_list(self):
result = self._test_add([self.th, self.th])
def test_glob_string_list(self):
result = self._test_add("%s;*" % self.th)
# note, needs a /etc/func/group setup with the proper groups defined
# need to figure out a good way to test this... -akl
def test_group(self):
result = self._test_add("@test")
# def test_group_and_glob(self):
# result = self._test_add("@test;*")
# def test_list_of_groups(self):
# result = self._test_add(["@test", "@test2"])
# def test_string_list_of_groups(self):
# result = self._test_add("@test;@test2")
# run all the same tests, but run then
class ClientGlobAsync(ClientGlob):
async = True
class TestClientGlobYaml(YamlBaseTest, ClientGlob):
yaml = True
def __init__(self):
super(TestClientGlobYaml, self).__init__()
class TestClientGlobJSON(JSONBaseTest, ClientGlob):
json = True
def __init__(self):
super(TestClientGlobJSON, self).__init__()
class TestClientGlobYamlAsync(YamlBaseTest, ClientGlobAsync):
yaml = True
async = True
def __init__(self):
super(TestClientGlobYamlAsync, self).__init__()
class TestClientGlobJSONAsync(JSONBaseTest, ClientGlobAsync):
json = True
async = True
def __init__(self):
super(TestClientGlobJSONAsync, self).__init__()
# why the weird T_est name? because nosetests doesn't seem to reliably
# respect the __test__ attribute, and these modules aren't meant to be
# invoked as test classes themselves, only as bases for other tests
class T_estTest(object):
__test__ = False
def _echo_test(self, data):
result = self.call({'clients':'*',
'method': 'echo',
'module': 'test',
'parameters': [data]})
self.assert_on_fault(result)
assert result[self.th] == data
def test_add(self):
result = self.call({'clients':'*',
'method': 'add',
'module': 'test',
'parameters': [1,2]})
assert result[self.th] == 3
def test_echo_int(self):
self._echo_test(37)
def test_echo_array(self):
self._echo_test([1,2,"three", "fore", "V"])
def test_echo_hash(self):
self._echo_test({'one':1, 'two':2, 'three': 3, 'four':"IV"})
def test_echo_float(self):
self._echo_test(1.0)
# NOTE/FIXME: the big float tests fail for yaml and json
def test_echo_big_float(self):
self._echo_test(123121232.23)
def test_echo_bigger_float(self):
self._echo_test(234234234234234234234.234234234234234)
def test_echo_little_float(self):
self._echo_test(0.0000000000000000000000000000000000037)
# Note/FIXME: these test currently fail for YAML
def test_echo_boolean_true(self):
self._echo_test(True)
def test_echo_boolean_false(self):
self._echo_test(False)
class T_estTestAsync(T_estTest):
__test__ = False
async = True
class TestTestYaml(YamlBaseTest, T_estTest):
yaml = True
def __init__(self):
super(YamlBaseTest, self).__init__()
class TestTestJSON(JSONBaseTest, T_estTest):
json = True
def __init__(self):
super(JSONBaseTest,self).__init__()
class TestTestAsyncJSON(JSONBaseTest, T_estTestAsync):
json = True
async = True
def __init__(self
|
weaver-viii/subuser
|
logic/subuserlib/classes/gitRepository.py
|
Python
|
lgpl-3.0
| 6,303
| 0.013803
|
#!/usr/bin/env python
# This file should be compatible with both Python 2 and 3.
# If it is not, please file a bug report.
"""
This is a Class which allows one to manipulate a git repository.
"""
#external imports
import os
import tempfile
#internal imports
import subuserlib.subprocessExtras as subprocessExtras
from subuserlib.classes.fileStructure import FileStructure
class GitRepository():
def __init__(self,path):
self.__path = path
def getPath(self):
return self.__path
def run(self,args):
"""
Run git with the given command line arguments.
"""
return subprocessExtras.call(["git"]+args,cwd=self.getPath())
def runCollectOutput(self,args):
"""
Run git with the given command line arguments and return a tuple with (returncode,output).
"""
return subprocessExtras.callCollectOutput(["git"]+args,cwd=self.getPath())
def getFileStructureAtCommit(self,commit):
"""
Get a ``FileStructure`` object which relates to the given git commit.
"""
return GitFileStructure(self,commit)
def commit(self,message):
"""
Run git commit with the given commit message.
"""
try:
tempFile = tempfile.NamedTemporaryFile("w",encoding="utf-8")
except TypeError: # Older versions of python have broken tempfile implementation for which you cannot set the encoding.
tempFile = tempfile.NamedTemporaryFile("w")
message = message.encode('ascii', 'ignore').decode('ascii')
with tempFile as tempFile:
tempFile.write(message)
tempFile.flush()
return self.run(["commit","--file",tempFile.name])
def checkout(self,commit,files=[]):
"""
Run git checkout
"""
self.run(["checkout",commit]+files)
class GitFileStructure(FileStructure):
def __init__(self,gitRepository,commit):
"""
Initialize the file structure.
Here we setup test stuff:
>>> import subuserlib.subprocessExt
|
ras
>>> subuserlib.subprocessExtras.call(["git","init"],cwd="/home/travis/hashtest")
0
>>> subuserlib.subprocessExtras.call(["git","add","."],cwd="/home/travis/hashtest")
0
>>> subuserlib.subprocessExtras.call(["git","commit","-m","Initial commit"],cwd="/home/travis/hashtest")
0
"""
self.__gitRepository = gitRepository
self.__commit = commit
def getCommit(self):
return self.__commit
def getRepository(self):
return self.__gitRepository
def l
|
sTree(self, subfolder, extraArgs=[]):
"""
Returns a list of tuples of the form:
(mode,type,hash,path)
Coresponding to the items found in the subfolder.
"""
if not subfolder.endswith("/"):
subfolder += "/"
if subfolder == "/":
subfolder = "./"
(returncode,output) = self.getRepository().runCollectOutput(["ls-tree"]+extraArgs+[self.getCommit(),subfolder])
if returncode != 0:
return [] # This commenting out is intentional. It is simpler to just return [] here than to check if the repository is properly initialized everywhere else.
lines = output.splitlines()
items = []
for line in lines:
mode,objectType,rest = line.split(" ",2)
objectHash,path = rest.split("\t",1)
items.append((mode,objectType,objectHash,path))
return items
def ls(self, subfolder, extraArgs=[]):
"""
Returns a list of file and folder paths.
Paths are relative to the repository as a whole.
>>> from subuserlib.classes.gitRepository import GitRepository
>>> gitRepository = GitRepository("/home/travis/hashtest")
>>> fileStructure = gitRepository.getFileStructureAtCommit("master")
>>> print(",".join(fileStructure.ls("./")))
bar,blah
"""
items = self.lsTree(subfolder,extraArgs)
paths = []
for item in items:
paths.append(item[3])
return paths
def lsFiles(self,subfolder):
"""
Returns a list of paths to files in the subfolder.
Paths are relative to the repository as a whole.
>>> from subuserlib.classes.gitRepository import GitRepository
>>> gitRepository = GitRepository("/home/travis/hashtest")
>>> fileStructure = gitRepository.getFileStructureAtCommit("master")
>>> print(",".join(fileStructure.lsFiles("./")))
blah
"""
return list(set(self.ls(subfolder)) - set(self.lsFolders(subfolder)))
def lsFolders(self,subfolder):
"""
Returns a list of paths to folders in the subfolder.
Paths are relative to the repository as a whole.
>>> from subuserlib.classes.gitRepository import GitRepository
>>> gitRepository = GitRepository("/home/travis/hashtest")
>>> fileStructure = gitRepository.getFileStructureAtCommit("master")
>>> print(",".join(fileStructure.lsFolders("./")))
bar
"""
return self.ls(subfolder,extraArgs=["-d"])
def exists(self,path):
"""
>>> from subuserlib.classes.gitRepository import GitRepository
>>> gitRepository = GitRepository("/home/travis/hashtest")
>>> fileStructure = gitRepository.getFileStructureAtCommit("master")
>>> fileStructure.exists("./blah")
True
>>> fileStructure.exists("./non-existant")
False
"""
try:
self.read(path)
return True
except OSError:
return False
def read(self,path):
"""
Returns the contents of the given file at the given commit.
>>> from subuserlib.classes.gitRepository import GitRepository
>>> gitRepository = GitRepository("/home/travis/hashtest")
>>> fileStructure = gitRepository.getFileStructureAtCommit("master")
>>> print(fileStructure.read("./blah"))
blahblah
<BLANKLINE>
"""
(errorcode,content) = self.getRepository().runCollectOutput(["show",self.getCommit()+":"+path])
if errorcode != 0:
raise OSError("Git show exited with error "+str(errorcode)+". File does not exist.")
return content
def getMode(self,path):
"""
>>> from subuserlib.classes.gitRepository import GitRepository
>>> gitRepository = GitRepository("/home/travis/hashtest")
>>> fileStructure = gitRepository.getFileStructureAtCommit("master")
>>> print(fileStructure.getModeString("./blah"))
100644
"""
allObjects = self.lsTree("./",extraArgs=["-r"])
for treeObject in allObjects:
if os.path.normpath(treeObject[3]) == os.path.normpath(path):
return int(treeObject[0],8)
|
wasit7/tutorials
|
django/Pieng/myclass/myquiz/models.py
|
Python
|
mit
| 647
| 0.034003
|
from __future__ import unicode_literals
from django.db import models
# Create your models here.
class Questions(models.Model):
# Fields
CHOICES = (
('a', 'a.'),('b', 'b.'),('c', 'c.'),('d', 'd.'),
|
)
created = models.DateTimeField(auto_now_add=True, editable=False)
last_updated = models.DateTimeField(auto_now=True, editable=False)
question = models.TextField(max_length=100)
a = models.CharField(max_length=30)
b = models.CharField(max_length=30)
c = models.CharField(max_length=30)
d = models.CharField(max_length=30)
anwser = models.CharField(max_length=1,choices=CHOICES)
def __unicode__(se
|
lf):
return u'%s' % self.question
|
neo1691/subliminal
|
subliminal/subtitle.py
|
Python
|
mit
| 8,191
| 0.001953
|
# -*- coding: utf-8 -*-
import codecs
import logging
import os
import chardet
import pysrt
from .video import Episode, Movie
from .utils import sanitize, sanitize_release_group
logger = logging.getLogger(__name__)
#: Subtitle extensions
SUBTITLE_EXTENSIONS = ('.srt', '.sub', '.smi', '.txt', '.ssa', '.ass', '.mpl')
class Subtitle(object):
"""Base class for subtitle.
:param language: language of the subtitle.
:type language: :class:`~babelfish.language.Language`
:param bool hearing_impaired: whether or not the subtitle is hearing impaired.
:param page_link: URL of the web page from which the subtitle can be downloaded.
:type page_link: str
:param encoding: Text encoding of the subtitle.
:type encoding: str
"""
#: Name of the provider that returns that class of subtitle
provider_name = ''
def __init__(self, language, hearing_impaired=False, page_link=None, encoding=None):
#: Language of the subtitle
self.language = language
#: Whether or not the subtitle is hearing impaired
self.hearing_impaired = hearing_impaired
#: URL of the web page from which the subtitle can be downloaded
self.page_link = page_link
#: Content as bytes
self.content = None
#: Encoding to decode with when accessing :attr:`text`
self.encoding = None
# validate the encoding
if encoding:
try:
self.encoding = codecs.lookup(encoding).name
except (TypeError, LookupError):
logger.debug('Unsupported encoding %s', encoding)
@property
def id(self):
"""Unique identifier of the subtitle"""
raise NotImplementedError
@property
def text(self):
"""Content as string
If :attr:`encoding` is None, the encoding is guessed with :meth:`guess_encoding`
"""
if not self.content:
return
if self.encoding:
return self.content.decode(self.encoding, errors='replace')
return self.content.decode(self.guess_encoding(), errors='replace')
def is_valid(self):
"""Check if a :attr:`text` is a valid SubRip format.
:return: whether or not the subtitle is valid.
:rtype: bool
"""
if not self.text:
return False
try:
pysrt.from_string(self.text, error_handling=pysrt.ERROR_RAISE)
except pysrt.Error as e:
if e.args[0] < 80:
return False
return True
def guess_encoding(self):
"""Guess encoding using the language, falling back on chardet.
:return: the guessed encoding.
:rtype: str
"""
logger.info('Guessing encoding for language %s', self.language)
# always try utf-8 first
encodings = ['utf-8']
# add language-specific encodings
if self.language.alpha3 == 'zho':
encodings.extend(['gb18030', 'big5'])
elif self.language.alpha3 == 'jpn':
encodings.append('shift-jis')
elif self.language.alpha3 == 'ara':
encodings.append('windows-1256')
elif self.language.alpha3 == 'heb':
encodings.append('windows-1255')
elif self.language.alpha3 == 'tur':
encodings.extend(['iso-8859-9', 'windows-1254'])
elif self.language.alpha3 == 'pol':
# Eastern European Group 1
encodings.extend(['windows-1250'])
elif self.language.alpha3 == 'bul':
# Eastern European Group 2
encodings.extend(['windows-1251'])
else:
# Western European (windows-1252)
encodings.append('latin-1')
# try to decode
logger.debug('Trying encodings %r', encodings)
for encoding in encodings:
try:
self.content.decode(encoding)
except UnicodeDecodeError:
pass
else:
logger.info('Guessed encoding %s', encoding)
return encoding
logger.warning('Could not guess encoding from language')
# fallback on chardet
encoding = chardet.detect(self.content)['encoding']
logger.info('Chardet found encoding %s', encoding)
return encoding
def get_matches(self, video):
"""Get the matches against the `video`.
:param video: the video to get the matches w
|
ith.
:type video: :class:`~subliminal.video.Video`
:return: matches of the subtitle.
:rtype: set
"""
raise NotImplementedError
def __hash__(self):
return hash(self.provider_name + '-' + self.id)
def __repr__(self):
return '<%s %r [%s]>' % (self.__class__.__name__, self.id, self.language)
def get_subtitle_path(video_path, language=None, extension='.srt'):
"""Get the subtitle path using the `vi
|
deo_path` and `language`.
:param str video_path: path to the video.
:param language: language of the subtitle to put in the path.
:type language: :class:`~babelfish.language.Language`
:param str extension: extension of the subtitle.
:return: path of the subtitle.
:rtype: str
"""
subtitle_root = os.path.splitext(video_path)[0]
if language:
subtitle_root += '.' + str(language)
return subtitle_root + extension
def guess_matches(video, guess, partial=False):
"""Get matches between a `video` and a `guess`.
If a guess is `partial`, the absence information won't be counted as a match.
:param video: the video.
:type video: :class:`~subliminal.video.Video`
:param guess: the guess.
:type guess: dict
:param bool partial: whether or not the guess is partial.
:return: matches between the `video` and the `guess`.
:rtype: set
"""
matches = set()
if isinstance(video, Episode):
# series
if video.series and 'title' in guess and sanitize(guess['title']) == sanitize(video.series):
matches.add('series')
# title
if video.title and 'episode_title' in guess and sanitize(guess['episode_title']) == sanitize(video.title):
matches.add('title')
# season
if video.season and 'season' in guess and guess['season'] == video.season:
matches.add('season')
# episode
if video.episode and 'episode' in guess and guess['episode'] == video.episode:
matches.add('episode')
# year
if video.year and 'year' in guess and guess['year'] == video.year:
matches.add('year')
# count "no year" as an information
if not partial and video.original_series and 'year' not in guess:
matches.add('year')
elif isinstance(video, Movie):
# year
if video.year and 'year' in guess and guess['year'] == video.year:
matches.add('year')
# title
if video.title and 'title' in guess and sanitize(guess['title']) == sanitize(video.title):
matches.add('title')
# release_group
if (video.release_group and 'release_group' in guess and
sanitize_release_group(guess['release_group']) == sanitize_release_group(video.release_group)):
matches.add('release_group')
# resolution
if video.resolution and 'screen_size' in guess and guess['screen_size'] == video.resolution:
matches.add('resolution')
# format
if video.format and 'format' in guess and guess['format'].lower() == video.format.lower():
matches.add('format')
# video_codec
if video.video_codec and 'video_codec' in guess and guess['video_codec'] == video.video_codec:
matches.add('video_codec')
# audio_codec
if video.audio_codec and 'audio_codec' in guess and guess['audio_codec'] == video.audio_codec:
matches.add('audio_codec')
return matches
def fix_line_ending(content):
"""Fix line ending of `content` by changing it to \n.
:param bytes content: content of the subtitle.
:return: the content with fixed line endings.
:rtype: bytes
"""
return content.replace(b'\r\n', b'\n').replace(b'\
|
cloudify-cosmo/cloudify-manager
|
tests/integration_tests_plugins/mock_labels/setup.py
|
Python
|
apache-2.0
| 99
| 0
|
from setuptools impo
|
rt setup
setup(name='mock_labels', version='0.0.1', packages=['mo
|
ck_labels'])
|
Juniper/neutron
|
neutron/tests/unit/ml2/drivers/cisco/nexus/test_cisco_mech.py
|
Python
|
apache-2.0
| 24,807
| 0.000322
|
# Copyright (c) 2012 Open
|
Stack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this f
|
ile except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import mock
import webob.exc as wexc
from neutron.api.v2 import base
from neutron.common import constants as n_const
from neutron import context
from neutron.extensions import portbindings
from neutron.manager import NeutronManager
from neutron.openstack.common import log as logging
from neutron.plugins.ml2 import config as ml2_config
from neutron.plugins.ml2.drivers.cisco.nexus import config as cisco_config
from neutron.plugins.ml2.drivers.cisco.nexus import exceptions as c_exc
from neutron.plugins.ml2.drivers.cisco.nexus import mech_cisco_nexus
from neutron.plugins.ml2.drivers.cisco.nexus import nexus_network_driver
from neutron.plugins.ml2.drivers import type_vlan as vlan_config
from neutron.tests.unit import test_db_plugin
LOG = logging.getLogger(__name__)
ML2_PLUGIN = 'neutron.plugins.ml2.plugin.Ml2Plugin'
PHYS_NET = 'physnet1'
COMP_HOST_NAME = 'testhost'
COMP_HOST_NAME_2 = 'testhost_2'
VLAN_START = 1000
VLAN_END = 1100
NEXUS_IP_ADDR = '1.1.1.1'
NETWORK_NAME = 'test_network'
NETWORK_NAME_2 = 'test_network_2'
NEXUS_INTERFACE = '1/1'
NEXUS_INTERFACE_2 = '1/2'
CIDR_1 = '10.0.0.0/24'
CIDR_2 = '10.0.1.0/24'
DEVICE_ID_1 = '11111111-1111-1111-1111-111111111111'
DEVICE_ID_2 = '22222222-2222-2222-2222-222222222222'
DEVICE_OWNER = 'compute:None'
class CiscoML2MechanismTestCase(test_db_plugin.NeutronDbPluginV2TestCase):
def setUp(self):
"""Configure for end-to-end neutron testing using a mock ncclient.
This setup includes:
- Configure the ML2 plugin to use VLANs in the range of 1000-1100.
- Configure the Cisco mechanism driver to use an imaginary switch
at NEXUS_IP_ADDR.
- Create a mock NETCONF client (ncclient) for the Cisco mechanism
driver
"""
self.addCleanup(mock.patch.stopall)
# Configure the ML2 mechanism drivers and network types
ml2_opts = {
'mechanism_drivers': ['cisco_nexus'],
'tenant_network_types': ['vlan'],
}
for opt, val in ml2_opts.items():
ml2_config.cfg.CONF.set_override(opt, val, 'ml2')
self.addCleanup(ml2_config.cfg.CONF.reset)
# Configure the ML2 VLAN parameters
phys_vrange = ':'.join([PHYS_NET, str(VLAN_START), str(VLAN_END)])
vlan_config.cfg.CONF.set_override('network_vlan_ranges',
[phys_vrange],
'ml2_type_vlan')
self.addCleanup(vlan_config.cfg.CONF.reset)
# Configure the Cisco Nexus mechanism driver
nexus_config = {
(NEXUS_IP_ADDR, 'username'): 'admin',
(NEXUS_IP_ADDR, 'password'): 'mySecretPassword',
(NEXUS_IP_ADDR, 'ssh_port'): 22,
(NEXUS_IP_ADDR, COMP_HOST_NAME): NEXUS_INTERFACE,
(NEXUS_IP_ADDR, COMP_HOST_NAME_2): NEXUS_INTERFACE_2}
nexus_patch = mock.patch.dict(
cisco_config.ML2MechCiscoConfig.nexus_dict,
nexus_config)
nexus_patch.start()
self.addCleanup(nexus_patch.stop)
# The NETCONF client module is not included in the DevStack
# distribution, so mock this module for unit testing.
self.mock_ncclient = mock.Mock()
mock.patch.object(nexus_network_driver.CiscoNexusDriver,
'_import_ncclient',
return_value=self.mock_ncclient).start()
# Mock port values for 'status' and 'binding:segmentation_id'
mock_status = mock.patch.object(
mech_cisco_nexus.CiscoNexusMechanismDriver,
'_is_status_active').start()
mock_status.return_value = n_const.PORT_STATUS_ACTIVE
def _mock_get_vlanid(context):
network = context.network.current
if network['name'] == NETWORK_NAME:
return VLAN_START
else:
return VLAN_START + 1
mock_vlanid = mock.patch.object(
mech_cisco_nexus.CiscoNexusMechanismDriver,
'_get_vlanid').start()
mock_vlanid.side_effect = _mock_get_vlanid
super(CiscoML2MechanismTestCase, self).setUp(ML2_PLUGIN)
self.port_create_status = 'DOWN'
@contextlib.contextmanager
def _patch_ncclient(self, attr, value):
"""Configure an attribute on the mock ncclient module.
This method can be used to inject errors by setting a side effect
or a return value for an ncclient method.
:param attr: ncclient attribute (typically method) to be configured.
:param value: Value to be configured on the attribute.
"""
# Configure attribute.
config = {attr: value}
self.mock_ncclient.configure_mock(**config)
# Continue testing
yield
# Unconfigure attribute
config = {attr: None}
self.mock_ncclient.configure_mock(**config)
def _is_in_nexus_cfg(self, words):
"""Check if any config sent to Nexus contains all words in a list."""
for call in (self.mock_ncclient.connect.return_value.
edit_config.mock_calls):
configlet = call[2]['config']
if all(word in configlet for word in words):
return True
return False
def _is_in_last_nexus_cfg(self, words):
"""Confirm last config sent to Nexus contains specified keywords."""
last_cfg = (self.mock_ncclient.connect.return_value.
edit_config.mock_calls[-1][2]['config'])
return all(word in last_cfg for word in words)
def _is_vlan_configured(self, vlan_creation_expected=True,
add_keyword_expected=False):
vlan_created = self._is_in_nexus_cfg(['vlan', 'vlan-name'])
add_appears = self._is_in_last_nexus_cfg(['add'])
return (self._is_in_last_nexus_cfg(['allowed', 'vlan']) and
vlan_created == vlan_creation_expected and
add_appears == add_keyword_expected)
def _is_vlan_unconfigured(self, vlan_deletion_expected=True):
vlan_deleted = self._is_in_last_nexus_cfg(
['no', 'vlan', 'vlan-id-create-delete'])
return (self._is_in_nexus_cfg(['allowed', 'vlan', 'remove']) and
vlan_deleted == vlan_deletion_expected)
class TestCiscoBasicGet(CiscoML2MechanismTestCase,
test_db_plugin.TestBasicGet):
pass
class TestCiscoV2HTTPResponse(CiscoML2MechanismTestCase,
test_db_plugin.TestV2HTTPResponse):
pass
class TestCiscoPortsV2(CiscoML2MechanismTestCase,
test_db_plugin.TestPortsV2):
@contextlib.contextmanager
def _create_resources(self, name=NETWORK_NAME, cidr=CIDR_1,
device_id=DEVICE_ID_1,
host_id=COMP_HOST_NAME):
"""Create network, subnet, and port resources for test cases.
Create a network, subnet, port and then update the port, yield the
result, then delete the port, subnet and network.
:param name: Name of network to be created.
:param cidr: cidr address of subnetwork to be created.
:param device_id: Device ID to use for port to be created/updated.
:param host_id: Host ID to use for port create/update.
"""
with self.network(name=name) as network:
with self.subnet(network=network, cidr=cidr) as subnet:
with self.port(subnet=subnet, cidr=cidr) as port:
data = {'port': {portbindings.HOST_ID: host_id,
|
beeva-fernandocerezal/rasa_nlu
|
rasa_nlu/__init__.py
|
Python
|
apache-2.0
| 297
| 0
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import rasa_nlu.version
logging.getLogge
|
r(__name__).addHandler(logging.NullHandler())
__version__ = rasa_nlu.version.__version__
|
|
openstack/neutron-vpnaas
|
neutron_vpnaas/services/vpn/service_drivers/driver_validator.py
|
Python
|
apache-2.0
| 1,000
| 0
|
# Copyright 2017 Eayun, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND
|
, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
class VpnDriverValidator(
|
object):
"""Driver-specific validation routines for VPN resources."""
def __init__(self, driver):
self.driver = driver
@property
def l3_plugin(self):
return self.driver.l3_plugin
def validate_ipsec_site_connection(self, context, ipsec_sitecon):
"""Driver can override this for its additional validations."""
pass
|
artus40/maraudes_project
|
notes/forms.py
|
Python
|
gpl-3.0
| 2,575
| 0.001554
|
import datetime
from .models import Note, Sujet
from utilisateurs.models import Professionnel
from django import forms
from django_select2.forms import Select2Widget
# NOTES
class NoteForm(forms.ModelForm):
""" Generic Note form """
class Meta:
model = Note
fields = ['sujet', 'text', 'created_by', 'created_date', 'created_time']
widgets =
|
{
'sujet': Select2Widget(),
'text': forms.Textarea(
attrs={'rows': 4}
),
|
}
class SimpleNoteForm(forms.ModelForm):
""" Simple note with only 'sujet' and 'text' fields.
Usefull with children of 'Note' that defines all 'note_*'
special methods.
"""
class Meta(NoteForm.Meta):
fields = ['sujet', 'text']
class UserNoteForm(NoteForm):
""" Form that sets 'created_by' with current user id.
It requires 'request' object at initialization
"""
class Meta(NoteForm.Meta):
fields = ['sujet', 'text', 'created_date', 'created_time']
def __init__(self, **kwargs):
request = kwargs.pop('request')
super().__init__(**kwargs)
try:
self.author = Professionnel.objects.get(pk=request.user.pk)
except Professionnel.DoesNotExist:
msg = "%s should not have been initiated with '%s' user" % (self, request.user)
raise RuntimeError(msg)
def save(self, commit=True):
instance = super().save(commit=False)
instance.created_by = self.author
if commit:
instance.save()
return instance
class AutoNoteForm(UserNoteForm):
class Meta(UserNoteForm.Meta):
fields = ['text']
def __init__(self, **kwargs):
self.sujet = kwargs.pop('sujet')
super().__init__(**kwargs)
def save(self, commit=True):
inst = super().save(commit=False)
inst.sujet = self.sujet
if commit:
inst.save()
return inst
# SUJETS
current_year = datetime.date.today().year
YEAR_CHOICE = tuple(year - 2 for year in range(current_year, current_year + 10))
class SujetCreateForm(forms.ModelForm):
class Meta:
model = Sujet
fields = ['nom', 'surnom', 'prenom', 'genre', 'premiere_rencontre']
widgets = {
'premiere_rencontre': forms.SelectDateWidget(
empty_label=("Année", "Mois", "Jour"),
years=YEAR_CHOICE,
),
}
class SelectSujetForm(forms.Form):
sujet = forms.ModelChoiceField(queryset=Sujet.objects.all(), widget=Select2Widget)
|
appop/bitcoin
|
src/test/bctest.py
|
Python
|
mit
| 4,554
| 0.005051
|
# Copyright 2014 BitPay Inc.
# Copyright 2016 The nealcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from __future__ import division,print_function,unicode_literals
import subprocess
import os
import json
import sys
import binascii
import difflib
import logging
def parse_output(a, fmt):
"""Parse the output according to specified format.
Raise an error if the output can't be parsed."""
if fmt == 'json': # json: compare parsed data
return json.loads(a)
elif fmt == 'hex': # hex: parse and compare binary data
return binascii.a2b_hex(a.strip())
else:
raise NotImplementedError("Don't know how to compare %s" % fmt)
def bctest(testDir, testObj, exeext):
"""Runs a single test, comparing output and RC to expected output and RC.
Raises an error if input can't be read, executable fails, or output/RC
are not as expected. Error is caught by bctester() and reported.
"""
# Get the exec names and arguments
execprog = testObj['exec'] + exeext
execargs = testObj['args']
execrun = [execprog] + execargs
# Read the input data (if there is any)
stdinCfg = None
inputData = None
if "input" in testObj:
filename = testDir + "/" + testObj['input']
inputData = open(filename).read()
stdinCfg = subprocess.PIPE
# Read the expected output data (if there is any)
outputFn = None
outputData = None
if "output_cmp" in testObj:
outputFn = testObj['output_cmp']
outputType = os.path.splitext(outputFn)[1][1:] # output type from file extension (determines how to compare)
tr
|
y:
outputData = open(testDir + "/" + outputFn).read()
except:
logging.error("Output file " + outputFn + " can not be opened")
raise
if not outputData:
logging.error("Output data missing for " + outputFn)
raise Exception
# Run the test
proc
|
= subprocess.Popen(execrun, stdin=stdinCfg, stdout=subprocess.PIPE, stderr=subprocess.PIPE,universal_newlines=True)
try:
outs = proc.communicate(input=inputData)
except OSError:
logging.error("OSError, Failed to execute " + execprog)
raise
if outputData:
# Parse command output and expected output
try:
a_parsed = parse_output(outs[0], outputType)
except Exception as e:
logging.error('Error parsing command output as %s: %s' % (outputType,e))
raise
try:
b_parsed = parse_output(outputData, outputType)
except Exception as e:
logging.error('Error parsing expected output %s as %s: %s' % (outputFn,outputType,e))
raise
# Compare data
if a_parsed != b_parsed:
logging.error("Output data mismatch for " + outputFn + " (format " + outputType + ")")
raise Exception
# Compare formatting
if outs[0] != outputData:
error_message = "Output formatting mismatch for " + outputFn + ":\n"
error_message += "".join(difflib.context_diff(outputData.splitlines(True),
outs[0].splitlines(True),
fromfile=outputFn,
tofile="returned"))
logging.error(error_message)
raise Exception
# Compare the return code to the expected return code
wantRC = 0
if "return_code" in testObj:
wantRC = testObj['return_code']
if proc.returncode != wantRC:
logging.error("Return code mismatch for " + outputFn)
raise Exception
def bctester(testDir, input_basename, buildenv):
""" Loads and parses the input file, runs all tests and reports results"""
input_filename = testDir + "/" + input_basename
raw_data = open(input_filename).read()
input_data = json.loads(raw_data)
failed_testcases = []
for testObj in input_data:
try:
bctest(testDir, testObj, buildenv.exeext)
logging.info("PASSED: " + testObj["description"])
except:
logging.info("FAILED: " + testObj["description"])
failed_testcases.append(testObj["description"])
if failed_testcases:
logging.error("FAILED TESTCASES: [" + ", ".join(failed_testcases) + "]")
sys.exit(1)
else:
sys.exit(0)
|
maka-io/ooi-ui
|
ooiui/core/app/__init__.py
|
Python
|
mit
| 651
| 0.003072
|
#!/usr/bin/env python
'''
ooiui.core.app.science
Defines the application for the Science UI
'''
import os
from flask import Flask
f
|
rom flask.ext.cache import Cache
from flask_environments import Environments
app = Flask(__name__, static_url_path='', template_folder='../../templates', stati
|
c_folder='../../static')
env = Environments(app, default_env='DEVELOPMENT')
basedir = 'ooiui/config'
if os.path.exists(os.path.join(basedir, 'config_local.yml')):
env.from_yaml(os.path.join(basedir, 'config_local.yml'))
else:
env.from_yaml(os.path.join(basedir, 'config.yml'))
cache = Cache(app, config={'CACHE_TYPE': app.config['CACHE_TYPE']})
|
jtconnor/mec2
|
setup.py
|
Python
|
gpl-2.0
| 3,761
| 0
|
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='mec2',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='0.0.2',
description='Query information about the current ec2 instance',
long_description=long_description,
# The project's main homepage.
url='https://github.com/jtconnor/mec2',
# Author details
author='James Connor',
author_email='jtconnor@gmail.com',
# Choose your license
license='GPL 2',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2.7',
],
# What does your project relate to?
keywords='aws ec2',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
# Alternatively, if you want to distribute just a my_module.py, uncomment
# this:
# py_modules=["my_module"],
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=['boto'],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
# extras_require={
# 'dev': ['check-manifest'],
#
|
'test': ['coverage'],
# },
# If there are data files included in your packages that need to be
# installe
|
d, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
# package_data={
# 'sample': ['package_data.dat'],
# },
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
# data_files=[('my_data', ['data/data_file'])],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points={
'console_scripts': [
'mec2=mec2.main:main',
],
},
)
|
qstokkink/py-ipv8
|
ipv8/attestation/wallet/irmaexact/__init__.py
|
Python
|
lgpl-3.0
| 360
| 0.002778
|
from __future__ import division
import os
from functools import reduce
def secure_randint(bitspace):
delbits = 8 - (bitspac
|
e % 8)
bytez = os.urandom(bitspace // 8)
if delbits > 0:
b = (os.urandom(1)[0] & (0xFF >> delbits)).to_bytes(1, 'big', signed=False)
bytez = b + bytez
return reduce(lambda a, b: (a << 8) + b, bytez,
|
0)
|
ChristophKirst/ClearMap
|
docs/build/latex/imageanalysis-7.py
|
Python
|
gpl-3.0
| 749
| 0.045394
|
import os
import ClearMap.Settings as settings
filename = os.path.join(settings.ClearMapPath, 'Test/Data/ImageAnaly
|
sis/cfos-substack.tif');
import ClearMap.Visualization.Plot as plt
import ClearMap.IO as io
data = io.readData(filename, z = (0,26));
import ClearMap.ImageProcessing.BackgroundRemoval as bgr
dataBGR = bgr.removeBackground(data.astype('float'), size=(3,3), verbose = True);
from ClearMap.ImageProc
|
essing.Filter.DoGFilter import filterDoG
dataDoG = filterDoG(dataBGR, size=(8,8,4), verbose = True);
from ClearMap.ImageProcessing.MaximaDetection import findExtendedMaxima
dataMax = findExtendedMaxima(dataDoG, hMax = None, verbose = True, threshold = 10);
plt.plotOverlayLabel(dataDoG / dataDoG.max(), dataMax.astype('int'), z = (10,16))
|
AnzenSolutions/ASWCP-Web
|
plugins/handlers/forgotpassword/forgotpassword.py
|
Python
|
mpl-2.0
| 1,694
| 0.008855
|
from plugins.bases.handlers import HandlersBase
class forgotpassword(HandlersBase):
WEB_PATH = r"/forgotpassword"
STORE_ATTRS = True
STORE_UNREF = True
OPTS = {}
PAGE_TITLE = "Reset Password"
CSS_FILES = ["forgotpassword"]
def get(self):
self.show("forgotpassword", action="", msg_type="")
def post(self):
un = self.get_argument("user", "")
email = self.get_argument("email", "")
action = self.get_argument("action", "")
if action == "":
try:
sql = self.db.users.get((self.db.users.username == un) & (self.db.users.email == email))
self.show("forgotpassword", action="newpass", msg_type="", email=email)
except:
self.show("forgotpassword", msg_type="error", action="", msg="Invalid username and/or email provided.")
elif action == "newpass":
pw1 = self.get_argument("pw1", "")
pw2 = self.get_argument("pw2", "")
if (pw1 != "") and (pw2 != "") and (pw1 == pw2):
pw = self.text2hash(pw1)
if self.db.users.update(pw=pw).where(self.db.users.email == email).execute() == 1:
self.redirect("/login")
else:
|
self.show("forgotpassword", msg_type="error", msg="Issue updating account's pa
|
ssword. Please try again.")
else:
self.show("forgotpassword", msg_type="error", msg="Passwords did not match or where left empty. Please try again.")
else:
self.show("forgotpassword", msg_type="error", msg="Unknown action requested.")
|
DK-Git/script.mdm166a
|
resources/lib/extraicons.py
|
Python
|
gpl-2.0
| 2,739
| 0.004016
|
'''
XBMC LCDproc addon
Copyright (C) 2012 Team XBMC
Extra icon defines/enums
Copyright (C) 2012 Daniel 'herrnst' Scheller
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Founda
|
tion; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Publ
|
ic License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
# enum snippet from http://stackoverflow.com/a/1695250 - thanks!
def enum(*sequential, **named):
enums = dict(zip(sequential, range(len(sequential))), **named)
return type('Enum', (), enums)
LCD_EXTRABARS_MAX = 4
LCD_EXTRAICONS = enum(
'LCD_EXTRAICON_NONE',
'LCD_EXTRAICON_PLAYING',
'LCD_EXTRAICON_PAUSE',
'LCD_EXTRAICON_MOVIE',
'LCD_EXTRAICON_MUSIC',
'LCD_EXTRAICON_WEATHER',
'LCD_EXTRAICON_TV',
'LCD_EXTRAICON_PHOTO',
'LCD_EXTRAICON_WEBCASTING',
'LCD_EXTRAICON_MUTE',
'LCD_EXTRAICON_AT',
'LCD_EXTRAICON_REPEAT',
'LCD_EXTRAICON_SHUFFLE',
'LCD_EXTRAICON_ALARM',
'LCD_EXTRAICON_RECORD',
'LCD_EXTRAICON_VOLUME',
'LCD_EXTRAICON_TIME',
'LCD_EXTRAICON_SPDIF',
'LCD_EXTRAICON_DISC_IN',
'LCD_EXTRAICON_SCR1',
'LCD_EXTRAICON_SCR2',
'LCD_EXTRAICON_RESOLUTION_SD',
'LCD_EXTRAICON_RESOLUTION_HD',
'LCD_EXTRAICON_VCODEC_MPEG',
'LCD_EXTRAICON_VCODEC_DIVX',
'LCD_EXTRAICON_VCODEC_XVID',
'LCD_EXTRAICON_VCODEC_WMV',
'LCD_EXTRAICON_ACODEC_MPEG',
'LCD_EXTRAICON_ACODEC_AC3',
'LCD_EXTRAICON_ACODEC_DTS',
'LCD_EXTRAICON_ACODEC_VWMA', # e.g. iMON has video-WMA AND audio-WMA...
'LCD_EXTRAICON_ACODEC_MP3',
'LCD_EXTRAICON_ACODEC_OGG',
'LCD_EXTRAICON_ACODEC_AWMA', # see ACODEC_VWMA
'LCD_EXTRAICON_ACODEC_WAV',
'LCD_EXTRAICON_OUTSOURCE',
'LCD_EXTRAICON_OUTFIT',
'LCD_EXTRAICON_OUT_2_0',
'LCD_EXTRAICON_OUT_5_1',
'LCD_EXTRAICON_OUT_7_1',
'LCD_EXTRAICON_MAX'
)
LCD_EXTRAICONCATEGORIES = enum(
'LCD_ICONCAT_MODES',
'LCD_ICONCAT_OUTSCALE',
'LCD_ICONCAT_CODECS',
'LCD_ICONCAT_VIDEOCODECS',
'LCD_ICONCAT_AUDIOCODECS',
'LCD_ICONCAT_AUDIOCHANNELS'
)
|
Vvucinic/Wander
|
venv_2_7/lib/python2.7/site-packages/IPython/core/usage.py
|
Python
|
artistic-2.0
| 23,364
| 0.001198
|
# -*- coding: utf-8 -*-
"""Usage information for the main IPython applications.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
# Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu>
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
import sys
from IPython.core import release
cl_usage = """\
=========
IPython
=========
Tools for Interactive Computing in Python
=========================================
A Python shell with automatic history (input and output), dynamic object
introspection, easier configuration, command completion, access to the
system shell and more. IPython can also be embedded in running programs.
Usage
ipython [subcommand] [options] [-c cmd | -m mod | file] [--] [arg] ...
If invoked with no options, it executes the file and exits, passing the
remaining arguments to the script, just as if you had specified the same
command with python. You may need to specify `--` before args to be passed
to the script, to prevent IPython from attempting to parse them. If you
specify the option `-i` before the filename, it will enter an interactive
IPython session after running the script, rather than exiting. Files ending
in .py will be treated as normal Python, but files ending in .ipy ca
|
n
co
|
ntain special IPython syntax (magic commands, shell expansions, etc.).
Almost all configuration in IPython is available via the command-line. Do
`ipython --help-all` to see all available options. For persistent
configuration, look into your `ipython_config.py` configuration file for
details.
This file is typically installed in the `IPYTHONDIR` directory, and there
is a separate configuration directory for each profile. The default profile
directory will be located in $IPYTHONDIR/profile_default. IPYTHONDIR
defaults to to `$HOME/.ipython`. For Windows users, $HOME resolves to
C:\\Users\\YourUserName in most instances.
To initialize a profile with the default configuration file, do::
$> ipython profile create
and start editing `IPYTHONDIR/profile_default/ipython_config.py`
In IPython's documentation, we will refer to this directory as
`IPYTHONDIR`, you can change its default location by creating an
environment variable with this name and setting it to the desired path.
For more information, see the manual available in HTML and PDF in your
installation, or online at http://ipython.org/documentation.html.
"""
interactive_usage = """
IPython -- An enhanced Interactive Python
=========================================
IPython offers a combination of convenient shell features, special commands
and a history mechanism for both input (command history) and output (results
caching, similar to Mathematica). It is intended to be a fully compatible
replacement for the standard Python interpreter, while offering vastly
improved functionality and flexibility.
At your system command line, type 'ipython -h' to see the command line
options available. This document only describes interactive features.
MAIN FEATURES
-------------
* Access to the standard Python help. As of Python 2.1, a help system is
available with access to object docstrings and the Python manuals. Simply
type 'help' (no quotes) to access it.
* Magic commands: type %magic for information on the magic subsystem.
* System command aliases, via the %alias command or the configuration file(s).
* Dynamic object information:
Typing ?word or word? prints detailed information about an object. If
certain strings in the object are too long (docstrings, code, etc.) they get
snipped in the center for brevity.
Typing ??word or word?? gives access to the full information without
snipping long strings. Long strings are sent to the screen through the less
pager if longer than the screen, printed otherwise.
The ?/?? system gives access to the full source code for any object (if
available), shows function prototypes and other useful information.
If you just want to see an object's docstring, type '%pdoc object' (without
quotes, and without % if you have automagic on).
* Completion in the local namespace, by typing TAB at the prompt.
At any time, hitting tab will complete any available python commands or
variable names, and show you a list of the possible completions if there's
no unambiguous one. It will also complete filenames in the current directory.
This feature requires the readline and rlcomplete modules, so it won't work
if your Python lacks readline support (such as under Windows).
* Search previous command history in two ways (also requires readline):
- Start typing, and then use Ctrl-p (previous,up) and Ctrl-n (next,down) to
search through only the history items that match what you've typed so
far. If you use Ctrl-p/Ctrl-n at a blank prompt, they just behave like
normal arrow keys.
- Hit Ctrl-r: opens a search prompt. Begin typing and the system searches
your history for lines that match what you've typed so far, completing as
much as it can.
- %hist: search history by index (this does *not* require readline).
* Persistent command history across sessions.
* Logging of input with the ability to save and restore a working session.
* System escape with !. Typing !ls will run 'ls' in the current directory.
* The reload command does a 'deep' reload of a module: changes made to the
module since you imported will actually be available without having to exit.
* Verbose and colored exception traceback printouts. See the magic xmode and
xcolor functions for details (just type %magic).
* Input caching system:
IPython offers numbered prompts (In/Out) with input and output caching. All
input is saved and can be retrieved as variables (besides the usual arrow
key recall).
The following GLOBAL variables always exist (so don't overwrite them!):
_i: stores previous input.
_ii: next previous.
_iii: next-next previous.
_ih : a list of all input _ih[n] is the input from line n.
Additionally, global variables named _i<n> are dynamically created (<n>
being the prompt counter), such that _i<n> == _ih[<n>]
For example, what you typed at prompt 14 is available as _i14 and _ih[14].
You can create macros which contain multiple input lines from this history,
for later re-execution, with the %macro function.
The history function %hist allows you to see any part of your input history
by printing a range of the _i variables. Note that inputs which contain
magic functions (%) appear in the history with a prepended comment. This is
because they aren't really valid Python code, so you can't exec them.
* Output caching system:
For output that is returned from actions, a system similar to the input
cache exists but using _ instead of _i. Only actions that produce a result
(NOT assignments, for example) are cached. If you are familiar with
Mathematica, IPython's _ variables behave exactly like Mathematica's %
variables.
The following GLOBAL variables always exist (so don't overwrite them!):
_ (one underscore): previous output.
__ (two underscores): next previous.
___ (three underscores): next-next previous.
Global variables named _<n> are dynamically created (<n> being the prompt
counter), such that the result of output <n> is always available as _<n>.
Finally, a global dictionary named _oh exists with entries for all lines
which generated output.
* Directory history:
Your history of visited directories is kept in the global list _dh, and the
magic %cd command can be used to go to any entry in that list.
* Auto-parentheses and auto-quotes (adapted from Nathan Gray's LazyPython)
1. Auto-parentheses
Callable objects (i.e. functions, methods, etc) can be invoked like
this (notice the commas between the arguments)::
In [1]: callable_ob ar
|
xuru/pyvisdk
|
pyvisdk/enums/perf_format.py
|
Python
|
mit
| 209
| 0
|
########################################
|
# Automatically generated, do not edit.
########################################
from pyvisdk.thirdparty import Enum
PerfFormat = Enum(
'
|
csv',
'normal',
)
|
tsl143/addons-server
|
src/olympia/landfill/translations.py
|
Python
|
bsd-3-clause
| 364
| 0
|
# -*
|
- coding: utf-8 -*-
def generate_translations(item):
"""Generate French and Spanish translations for the given `item`."""
fr_prefix = u'(français) '
es_prefix = u'(español) '
oldname = unicode(item.name)
item.name = {'en': oldname,
'fr': fr_prefix + oldname,
'es': es_prefix + oldname}
i
|
tem.save()
|
12AngryMen/votca-scripts
|
lib/Carlstuff/ctp/cube2xyz.py
|
Python
|
apache-2.0
| 3,451
| 0.066068
|
from __future__ import division
import numpy as np
import sys as sys
class Atom(object):
def __init__(self, Z, pos):
self.Z = Z
self.pos = pos
self.VdW = 8 # a.u.
def DistFrom(self, pos):
return np.dot(self.pos - pos, self.pos - pos)**0.5
class ScalarField(object):
def __init__(self, atoms, scalarfield, C0, Cx, Cy, Cz, Nx, Ny, Nz):
self.atoms = atoms
self.field = scalarfield
self.C0 = C0
self.Cx = Cx
self.Cy = Cy
self.Cz = Cz
self.Nx = Nx
self.Ny = Ny
self.Nz = Nz
def write_xyz(self, outfile):
outt = open(outfile, 'w')
slot = 0
for i in range(self.Nx):
for j in range(self.Ny):
for k in range(self.Nz):
xyz = self.C0 + i*self.Cx + j*self.Cy + k*self.Cz
outt.write('%4.7f %4.7f %4.7f %4.7f \n' % (xyz[0], xyz[1], xyz[2], self.field[slot]))
slot += 1
def testPos(self, pos):
for atom in self.atoms:
if atom.DistFrom(pos) < atom.VdW:
return False
return True
def Restrict_Outside_VdW(self):
Field_Outside_VdW = []
Outside = []
Idcs = []
idx = -1
for i in range(self.Nx):
for j in range(self.Ny):
for k in range(self.Nz):
idx += 1
pos = self.C0 + i*self.Cx + j*self.Cy + k*self.Cz
#if np.dot(pos,pos)**0.5 > 10:
if self.testPos(pos):
Field_Outside_VdW.append(self.field[idx])
Outside.append(pos)
Idcs.append(idx)
else:
pass
PrintProgress(i)
self.field = Field_Outside_VdW
return Outside, Idcs
def Restrict_Outside_VdW_Idcs(self, idcs):
Field_Outside_VdW = []
for i in idcs:
Field_Outside_VdW.append(self.field[i])
self.field = np.array(Field_Outside_VdW)
def Printer(data):
sys.stdout.write("\r"+data.__str__())
sys.stdout.flush()
def PrintProgress(i):
progress = 'Voxel x # ' + str(i)
sys.stdout.write("\r"+progress.__str__())
sys.stdout.flush()
def ArrayFromCube(infile):
print "Reading file", infile,
atoms = []
scalarfield = []
C0 = None
Cx = None
Cy = None
Cz = None
Nx = None
Ny = None
Nz = None
lineCount = 0
atomCount = 0
intt = open(infile, 'r')
for ln in intt.readlines():
lineCount += 1
ln = ln.split()
# Empty line?
if ln == []:
continue
# Header line?
elif lineCount <= 2:
continue
# Cube origin
elif lineCount == 3:
atomCount = int(ln[0])
C0 = np.array( [float(ln[1]), float(ln[2]), float(ln[3])] )
# Cube X Y Z
elif lineCount == 4:
Nx = int(ln[0])
Cx = np.array( [float(ln[1]), float(ln[2]), float(ln[3])] )
elif lineCount == 5:
Ny = int(ln[0])
Cy = np.array( [float(ln[1]), float(ln[2]), float(ln[3])] )
elif lineCount
|
== 6:
Nz = int(ln[0])
Cz = np.array( [float(ln[1]), float(ln[2]), float(ln[3])] )
# Atom line
elif len(ln) == 5:
Z = int(ln[0])
pos = np.array( [float(ln[2]), float(ln[3]), float(ln[4])] )
atoms.append( Atom(Z,pos) )
# Scalar-field line
else:
for item in ln:
item = float(item)
scalarfield.append(item)
scalarfi
|
eld = np.array(scalarfield)
print ":", len(scalarfield), "grid points."
print "... Atoms ", len(atoms)
print "... Voxel numbers ", Nx, Ny, Nz, " TOTAL ", Nx*Ny*Nz
print "... Cube origin, axes ", C0, Cx, Cy, Cz
return ScalarField(atoms, scalarfield, C0, Cx, Cy, Cz, Nx, Ny, Nz)
field = ArrayFromCube('DCV_0_esp_gdma.cube')
field.write_xyz('esp_0.xyz')
|
TeamEOS/external_chromium_org
|
chrome/common/extensions/docs/server2/document_renderer.py
|
Python
|
bsd-3-clause
| 4,116
| 0.003158
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
from document_parser import ParseDocument
from third_party.json_schema_compiler.model import UnixName
class DocumentRenderer(object):
'''Performs document-level rendering such as the title, references,
and table of contents: pulling that data out of the document, then
replacing the $(title), $(ref:...) and $(table_of_contents) tokens with them.
This can be thought of as a parallel to TemplateRenderer; while
TemplateRenderer is responsible for interpreting templates and rendering files
within the template engine, DocumentRenderer is responsible for interpreting
higher-level document concepts like the title and TOC, then performing string
replacement for them. The syntax for this replacement is $(...) where ... is
the concept. Currently title and table_of_contents are supported.
'''
def __init__(self, table_of_contents_renderer, ref_resolver):
self._table_of_contents_renderer = table_of_contents_renderer
self._ref_resolver = ref_resolver
def _RenderLinks(self, document, path):
''' Replaces all $(ref:...) references in |document| with html links.
References have two forms:
$(ref:api.node) - Replaces the reference with a link to node on the
API page. The title is set to the name of the node.
$(ref:api.node Title) - Same as the previous form, but title is set
to "Title".
'''
START_REF = '$(ref:'
END_REF = ')'
MAX_REF_LENGTH = 256
new_document = []
# Keeps track of position within |document|
cursor_index = 0
start_ref_index = document.find(START_REF)
while start_ref_index != -1:
end_ref_index = document.find(END_REF, start_ref_index)
if (end_ref_index == -1 or
end_ref_index - start_ref_index > MAX_REF_LENGTH):
end_ref_index = document.find(' ', start_ref_index)
logging.error('%s:%s has no terminating ) at line %s' % (
path,
document[start_ref_index:end_ref_index],
document.count('\n', 0, end_ref_index)))
new_document.append(document[cursor_index:end_ref_index + 1])
else:
ref = document[start_ref_index:end_ref_index]
ref_parts = ref[len(START_REF):].split(None, 1)
# Guess the api name from the html name, replacing '_' with '.' (e.g.
# if the page is app_window.html, guess the api name is app.window)
api_name = os.path.splitext(os.path.basename(path))[0].replace('_', '.')
title = ref_parts[0] if len(ref_parts) == 1 else ref_parts[1]
ref_dict = self._ref_resolver.SafeGetLink(ref_parts[0],
namespace=api_name,
title=title)
new_document.append(document[cursor_index:start_ref_index])
new_document
|
.append('<a href=%s>%s</a>' % (ref_dict['href'],
ref_dict['text']))
cursor_index = end_ref_index + 1
start_ref_index = document.find(START_REF, cursor_index)
new_document.append(document[cursor_index:])
return ''.join(new_document)
def Render(self, document, path
|
, render_title=False):
# Render links first so that parsing and later replacements aren't
# affected by $(ref...) substitutions
document = self._RenderLinks(document, path)
parsed_document = ParseDocument(document, expect_title=render_title)
toc_text, toc_warnings = self._table_of_contents_renderer.Render(
parsed_document.sections)
# Only 1 title and 1 table of contents substitution allowed; in the common
# case, save necessarily running over the entire file.
if parsed_document.title:
document = document.replace('$(title)', parsed_document.title, 1)
return (document.replace('$(table_of_contents)', toc_text, 1),
parsed_document.warnings + toc_warnings)
|
chmouel/python-swiftclient
|
swiftclient/openstack/common/setup.py
|
Python
|
apache-2.0
| 12,438
| 0.000241
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Utilities with minimum-depends for use in setup.py
"""
import datetime
import os
import re
import subprocess
import sys
from setuptools.command import sdist
def parse_mailmap(mailmap='.mailmap'):
mapping = {}
if os.path.exists(mailmap):
fp = open(mailmap, 'r')
for l in fp:
l = l.strip()
if not l.startswith('#') and ' ' in l:
canonical_email, alias = l.split(' ')
mapping[alias] = canonical_email
return mapping
def canonicalize_emails(changelog, mapping):
"""Takes in a string and an email alias mapping and replaces all
instances of the aliases in the string with their real email.
"""
for alias, email in mapping.iteritems():
changelog = changelog.replace(alias, email)
return changelog
# Get requirements from the first file that exists
def get_reqs_from_files(requirements_files):
reqs_in = []
for requirements_file in requirements_files:
if os.path.exists(requirements_file):
return open(requirements_file, 'r').read().split('\n')
return []
def parse_requirements(requirements_files=['requirements.txt',
'tools/pip-requires']):
requirements = []
for line in get_reqs_from_files(requirements_files):
# For the requirements list, we need to inject only the portion
# after egg= so that distutils knows the package it's looking for
# such as:
# -e git://github.com/openstack/nova/master#egg=nova
if re.match(r'\s*-e\s+', line):
requirements.append(re.sub(r'\s*-e\s+.*#egg=(.*)$', r'\1',
line))
# such as:
# http://github.com/openstack/nova/zipball/master#egg=nova
elif re.match(r'\s*https?:', line):
requirements.append(re.sub(r'\s*https?:.*#egg=(.*)$', r'\1',
line))
# -f lines are for index locations, and don't get used here
elif re.match(r'\s*-f\s+', line):
pass
# argparse is part of the standard library starting with 2.7
# adding it to the requirements list screws distro installs
elif line == 'argparse' and sys.version_info >= (2, 7):
pass
else:
requirements.append(line)
return requirements
def parse_dependency_links(requirements_files=['requirements.txt',
'tools/pip-requires']):
dependency_links = []
# dependency_links inject alternate locations to find packages listed
# in requirements
for line in get_reqs_from_files(requirements_files):
# skip comments and blank lines
if re.match(r'(\s*#)|(\s*$)', line):
continue
# lines with -e or -f need the whole line, minus the flag
if re.match(r'\s*-[ef]\s+', line):
dependency_links.append(re.sub(r'\s*-[ef]\s+', '', line))
# lines that are only urls can go in unmolested
elif re.match(r'\s*https?:', line):
dependency_links.append(line)
return dependency_links
def write_requirements():
venv = os.environ.get('VIRTUAL_ENV', None)
if venv is not None:
with open("req
|
uirements.txt", "w") as req_file:
output = subprocess.Popen(["pip", "-E", venv, "freeze", "-l"],
stdout=subprocess.PIPE)
requirements = output.communicate()[0].strip()
req_file.write(requirements)
def _run_shell_command(cmd):
output = subprocess.Popen(["/bin/sh", "-c", cmd],
|
stdout=subprocess.PIPE)
out = output.communicate()
if len(out) == 0:
return None
if len(out[0].strip()) == 0:
return None
return out[0].strip()
def _get_git_next_version_suffix(branch_name):
datestamp = datetime.datetime.now().strftime('%Y%m%d')
if branch_name == 'milestone-proposed':
revno_prefix = "r"
else:
revno_prefix = ""
_run_shell_command("git fetch origin +refs/meta/*:refs/remotes/meta/*")
milestone_cmd = "git show meta/openstack/release:%s" % branch_name
milestonever = _run_shell_command(milestone_cmd)
if not milestonever:
milestonever = ""
post_version = _get_git_post_version()
revno = post_version.split(".")[-1]
return "%s~%s.%s%s" % (milestonever, datestamp, revno_prefix, revno)
def _get_git_current_tag():
return _run_shell_command("git tag --contains HEAD")
def _get_git_tag_info():
return _run_shell_command("git describe --tags")
def _get_git_post_version():
current_tag = _get_git_current_tag()
if current_tag is not None:
return current_tag
else:
tag_info = _get_git_tag_info()
if tag_info is None:
base_version = "0.0"
cmd = "git --no-pager log --oneline"
out = _run_shell_command(cmd)
revno = len(out.split("\n"))
else:
tag_infos = tag_info.split("-")
base_version = "-".join(tag_infos[:-2])
revno = tag_infos[-2]
return "%s.%s" % (base_version, revno)
def write_git_changelog():
"""Write a changelog based on the git changelog."""
if os.path.isdir('.git'):
git_log_cmd = 'git log --stat'
changelog = _run_shell_command(git_log_cmd)
mailmap = parse_mailmap()
with open("ChangeLog", "w") as changelog_file:
changelog_file.write(canonicalize_emails(changelog, mailmap))
def generate_authors():
"""Create AUTHORS file using git commits."""
jenkins_email = 'jenkins@review.openstack.org'
old_authors = 'AUTHORS.in'
new_authors = 'AUTHORS'
if os.path.isdir('.git'):
# don't include jenkins email address in AUTHORS file
git_log_cmd = ("git log --format='%aN <%aE>' | sort -u | "
"grep -v " + jenkins_email)
changelog = _run_shell_command(git_log_cmd)
mailmap = parse_mailmap()
with open(new_authors, 'w') as new_authors_fh:
new_authors_fh.write(canonicalize_emails(changelog, mailmap))
if os.path.exists(old_authors):
with open(old_authors, "r") as old_authors_fh:
new_authors_fh.write('\n' + old_authors_fh.read())
_rst_template = """%(heading)s
%(underline)s
.. automodule:: %(module)s
:members:
:undoc-members:
:show-inheritance:
"""
def read_versioninfo(project):
"""Read the versioninfo file. If it doesn't exist, we're in a github
zipball, and there's really know way to know what version we really
are, but that should be ok, because the utility of that should be
just about nil if this code path is in use in the first place."""
versioninfo_path = os.path.join(project, 'versioninfo')
if os.path.exists(versioninfo_path):
with open(versioninfo_path, 'r') as vinfo:
version = vinfo.read().strip()
else:
version = "0.0.0"
return version
def write_versioninfo(project, version):
"""Write a simple file containing the version of the package."""
open(os.path.join(project, 'versioninfo'), 'w').write("%s\n" % version)
def get_cmdclass():
"""Return dict of commands to run from setup.py."""
cmdclass = dict()
def _find_modules(arg, dirname, files):
for filename in files:
if filename.endswith('.py') and filename != '__init__.py':
arg["%s.%s" % (dirna
|
z-uo/imagemash
|
plugins/plugin_rotate.py
|
Python
|
gpl-3.0
| 11,247
| 0.003379
|
#!/usr/bin/python3
#-*- coding: utf-8 -*-
#
#Copyright pops (pops451@gmail.com), 2010-2011
#
#This file is part of imagemash.
#
# imagemash is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# imagemash is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with imagemash. If not, see <http://www.gnu.org/licenses/>.
import sys
import os
from PyQt4 import QtGui
from PyQt4 import QtCore
import math
from plugsubclass import Viewer
from plugsubclass import Painting
from plugsubclass import Line
### plugin infos #######################################################
NAME = "rotation"
MOD_NAME = "plugin_rotate"
DESCRIPTION = "rotation libre"
AUTHOR = "pops"
VERSION = 0.1
########################################################################
class ExecDialog(QtGui.QDialog):
def __init__(self, images, args=None, code="", parent=None):
QtGui.QDialog.__init__(self, parent)
self.setWindowTitle("rotation")
self.parent = parent
self.codeBefore = code
### widget #####################################################
### image ###
self.imW = QtGui.QComboBox(self)
self.images = []
if images:
for i, v in enumerate(images):
f = os.path.split(v)[1]
self.imW.addItem(f)
self.images.append(v)
### zoom buttons ###
self.zoomInW = QtGui.QToolButton()
self.zoomInW.setAutoRaise(True)
self.zoomInW.setIcon(QtGui.QIcon(QtGui.QPixmap("icons/black_zoom_in.svg")))
self.zoomOutW = QtGui.QToolButton()
self.zoomOutW.setAutoRaise(True)
self.zoomOutW.setIcon(QtGui.QIcon(QtGui.QPixmap("icons/black_zoom_out.svg")))
self.zoomOneW = QtGui.QToolButton()
self.zoomOneW.setAutoRaise(True)
self.zoomOneW.setIcon(QtGui.QIcon(QtGui.QPixmap("icons/black_zoom_one.svg")))
### couleur ###
if args:
self.color = args["color"]
else:
self.color = QtGui.QColor(0, 0, 0)
self.colorIcon = QtGui.QPixmap(22, 22)
self.colorIcon.fill(self.color)
self.colorW = QtGui.QToolButton(self)
self.colorW.setAutoRaise(True)
self.colorW.setIcon(QtGui.QIcon(self.colorIcon))
### action ###
self.actionW = QtGui.QComboBox(self)
self.actionW.addItem("tracer horizontal")
self.actionW.addItem("tracer vertical")
self.actionW.addItem("entrer angle")
### rotate 90, 180, 370 ###
self.rotate90W = QtGui.QToolButton()
self.rotate90W.setAutoRaise(True)
self.rotate90W.setIcon(QtGui.QIcon(QtGui.QPixmap("icons/90.svg")))
self.rotate270W = QtGui.QToolButton()
self.rotate270W.setAutoRaise(True)
self.rotate270W.setIcon(QtGui.QIcon(QtGui.QPixmap("icons/-90.svg")))
self.rotate180W = QtGui.QToolButton()
self.rotate180W.setAutoRaise(True)
self.rotate180W.setIcon(QtGui.QIcon(QtGui.QPixmap("icons/180.svg")))
### labels info ###
self.angle = 0
self.degreL = QtGui.QLabel("angle :")
self.degreW = QtGui.QLineEdit("0")
self.degreW.setValidator(QtGui.QIntValidator(self.degreW))
self.degreW.setDisabled(True)
### reset ###
self.resetW = QtGui.QPushButton("reset")
### apercu ###
self.apercuW = QtGui.QPushButton("apercu")
### viewer ###
self.painting = Painting(self)
self.painting.set_fig(Line(self.painting), self.color)
self.viewer = Viewer(self)
self.viewer.setWidget(self.painting)
### apply, undo ###
self.okW = QtGui.QPushButton('apply', self)
self.undoW = QtGui.QPushButton('undo', self)
### function ############################################
|
#######
if images:
self.im_changed(self.images[0])
### connexion ##################################################
self.imW.activated[str].connect(self.im_changed)
self.rotate90W.clicked.connect(self.rot90)
self.rotate180W.clicked.connect(self.r
|
ot180)
self.rotate270W.clicked.connect(self.rot270)
self.colorW.clicked.connect(self.color_clicked)
self.actionW.activated[str].connect(self.action_changed)
self.degreW.textChanged.connect(self.degre_changed)
self.apercuW.clicked.connect(self.apercu_clicked)
self.resetW.clicked.connect(self.reset_clicked)
self.okW.clicked.connect(self.ok_clicked)
self.undoW.clicked.connect(self.undo_clicked)
self.zoomInW.clicked.connect(self.zoom_in)
self.viewer.zoomIn.connect(self.zoom_in)
self.zoomOutW.clicked.connect(self.zoom_out)
self.viewer.zoomOut.connect(self.zoom_out)
self.zoomOneW.clicked.connect(self.zoom_one)
### args #######################################################
if args:
self.actionW.setCurrentIndex(args["action"])
self.action_changed()
self.painting.zoomN = args["zoom"]
self.angle = args["angle"]
self.apercu_clicked(self.angle)
else:
pass
### layout #####################################################
toolBox = QtGui.QHBoxLayout()
toolBox.addWidget(self.zoomInW)
toolBox.addWidget(self.zoomOutW)
toolBox.addWidget(self.zoomOneW)
toolBox.addWidget(self.colorW)
toolBox.addWidget(self.rotate90W)
toolBox.addWidget(self.rotate270W)
toolBox.addWidget(self.rotate180W)
toolBox.addStretch(0)
grid = QtGui.QGridLayout()
grid.setSpacing(2)
grid.addWidget(self.imW, 0, 0)
grid.addLayout(toolBox, 1, 0, 1, 2)
grid.addWidget(self.actionW, 0, 1)
grid.addWidget(self.degreL, 0, 2)
grid.addWidget(self.degreW, 0, 3)
grid.addWidget(self.apercuW, 1, 3)
grid.addWidget(self.resetW, 1, 2)
### ok, undo ###
okBox = QtGui.QHBoxLayout()
okBox.addStretch(0)
okBox.addWidget(self.okW)
okBox.addWidget(self.undoW)
### layout ###
layout = QtGui.QVBoxLayout()
layout.setSpacing(2)
layout.addLayout(grid)
layout.addWidget(self.viewer)
layout.addLayout(okBox)
self.setLayout(layout)
self.exec_()
def zoom_in(self):
self.painting.zoom(2.0)
def zoom_out(self):
self.painting.zoom(0.5)
def zoom_one(self):
self.painting.zoom(0)
def im_changed(self, text):
im = self.images[self.imW.currentIndex()]
self.painting.change_image(im, self.codeBefore)
def color_clicked(self):
color = QtGui.QColorDialog.getColor(self.color)
if color.isValid():
self.color = color
self.colorIcon.fill(self.color)
self.colorW.setIcon(QtGui.QIcon(self.colorIcon))
self.painting.color = self.color
self.painting.draw()
def rot90(self):
self.apercu_clicked(self.angle + 90)
def rot180(self):
self.apercu_clicked(self.angle + 180)
def rot270(self):
self.apercu_clicked(self.angle + 270)
def action_changed(self, text=None):
if self.actionW.currentIndex() == 0:
self.degreW.setDisabled(True)
self.painting.fig.setDisabled(False)
elif self.actionW.currentIndex() == 1:
self.degreW.setDisabled(True)
self.painting.fig.setDisabled(False)
elif self.actionW.currentIndex() == 2:
self.degreW.setDisabled(False)
self.painting.fig.setDisabled(True)
self.painting.draw()
def apercu_clicked(self, angle=False):
an
|
Boussadia/weboob
|
modules/creditcooperatif/backend.py
|
Python
|
agpl-3.0
| 2,921
| 0.001713
|
# -*- coding: utf-8 -*-
# Copyright(C) 2012 Kevin Pouget
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from w
|
eboob.capabilities.bank import ICapBank, AccountNotFound
from weboob.tools.backend import BaseBackend, BackendConfig
from weboob.tools.value import ValueBackendPassword, Value
from .perso
|
.browser import CreditCooperatif as CreditCooperatifPerso
from .pro.browser import CreditCooperatif as CreditCooperatifPro
__all__ = ['CreditCooperatifBackend']
class CreditCooperatifBackend(BaseBackend, ICapBank):
NAME = 'creditcooperatif'
MAINTAINER = u'Kevin Pouget'
EMAIL = 'weboob@kevin.pouget.me'
VERSION = '0.i'
DESCRIPTION = u'Crédit Coopératif'
LICENSE = 'AGPLv3+'
auth_type = {'particular': "Interface Particuliers",
'weak' : "Code confidentiel (pro)",
'strong': "Sesame (pro)"}
CONFIG = BackendConfig(Value('auth_type', label='Type de compte', choices=auth_type, default="particular"),
ValueBackendPassword('login', label='Code utilisateur', masked=False),
ValueBackendPassword('password', label='Code confidentiel ou code PIN'))
def create_default_browser(self):
if self.config['auth_type'].get() == 'particular':
self.BROWSER = CreditCooperatifPerso
return self.create_browser(self.config['login'].get(),
self.config['password'].get())
else:
self.BROWSER = CreditCooperatifPro
return self.create_browser(self.config['login'].get(),
self.config['password'].get(),
strong_auth=self.config['auth_type'].get() == "strong")
def iter_accounts(self):
with self.browser:
return self.browser.get_accounts_list()
def get_account(self, _id):
with self.browser:
account = self.browser.get_account(_id)
if account:
return account
else:
raise AccountNotFound()
def iter_history(self, account):
with self.browser:
return self.browser.get_history(account)
def iter_coming(self, account):
with self.browser:
return self.browser.get_coming(account)
|
HyperloopTeam/FullOpenMDAO
|
lib/python2.7/site-packages/openmdao.lib-0.13.0-py2.7.egg/openmdao/lib/drivers/test/test_opt_conmin.py
|
Python
|
gpl-2.0
| 27,188
| 0.001802
|
"""
Test the CONMIN optimizer component
"""
import unittest
import numpy
# pylint: disable=F0401,E0611
from openmdao.main.api import Assembly, Component, VariableTree, set_as_top, Driver
from openmdao.main.datatypes.api import Float, Array, Str, VarTree
from openmdao.lib.casehandlers.api import ListCaseRecorder
from openmdao.main.interfaces import IHasParameters, implements
from openmdao.main.hasparameters import HasParameters
from openmdao.util.decorators import add_delegate
from openmdao.lib.drivers.conmindriver import CONMINdriver
from openmdao.util.testutil import assert_rel_error
class OptRosenSuzukiComponent(Component):
""" From the CONMIN User's Manual:
EXAMPLE 1 - CONSTRAINED ROSEN-SUZUKI FUNCTION. NO GRADIENT INFORMATION.
MINIMIZE OBJ = X(1)**2 - 5*X(1) + X(2)**2 - 5*X(2) +
2*X(3)**2 - 21*X(3) + X(4)**2 + 7*X(4) + 50
Subject to:
G(1) = X(1)**2 + X(1) + X(2)**2 - X(2) +
X(3)**2 + X(3) + X(4)**2 - X(4) - 8 .LE.0
G(2) = X(1)**2 - X(1) + 2*X(2)**2 + X(3)**2 +
2*X(4)**2 - X(4) - 10 .LE.0
G(3) = 2*X(1)**2 + 2*X(1) + X(2)**2 - X(2) +
X(3)**2 - X(4) - 5 .LE.0
This problem is solved beginning with an initial X-vector of
X = (1.0, 1.0, 1.0, 1.0)
The optimum design is known to be
OBJ = 6.000
and the corresponding X-vector is
X = (0.0, 1.0, 2.0, -1.0)
"""
x = Array(iotype='in', low=-10, high=99)
g = Array([1., 1., 1.], iotype='out')
result = Float(iotype='out')
obj_string = Str(iotype='out')
opt_objective = Float(iotype='out')
# pylint: disable=C0103
def __init__(self):
super(OptRosenSuzukiComponent, self).__init__()
self.x = numpy.array([1., 1., 1., 1.], dtype=float)
self.result = 0.
self.opt_objective = 6.*10.0
self.opt_design_vars = [0., 1., 2., -1.]
def execute(self):
"""calculate the new objective value"""
x = self.x
self.result = (x[0]**2 - 5.*x[0] + x[1]**2 - 5.*x[1] +
2.*x[2]**2 - 21.*x[2] + x[3]**2 + 7.*x[3] + 50)
self.obj_string = "Bad"
#print "rosen", self.x
self.g[0] = (x[0]**2 + x[0] + x[1]**2 - x[1] +
x[2]**2 + x[2] + x[3]**2 - x[3] - 8)
self.g[1] = (x[0]**2 - x[0] + 2*x[1]**2 + x[2]**2 +
2*x[3]**2 - x[3] - 10)
self.g[2] = (2*x[0]**2 + 2*x[0] + x[1]**2 - x[1] +
x[2]**2 - x[3] - 5)
#print self.x, self.g
class RosenSuzuki2D(Component):
""" RosenSuzuki with 2D input. """
x = Array(iotype='in', low=-10, high=99)
result = Float(iotype='out')
opt_objective = Float(iotype='out')
# pylint: disable=C0103
def __init__(self):
super(RosenSuzuki2D, self).__init__()
self.x = numpy.array([[1., 1.], [1., 1.]], dtype=float)
self.result = 0.
self.opt_objective = 6.*10.0
self.opt_design_vars = [0., 1., 2., -1.]
def execute(self):
"""calculate the new objective value"""
self.result = (self.x[0][0]**2 - 5.*self.x[0][0] +
self.x[0][1]**2 - 5.*self.x[0][1] +
2.*self.x[1][0]**2 - 21.*self.x[1][0] +
self.x[1][1]**2 + 7.*self.x[1][1] + 50)
class RosenSuzukiMixed(Component):
""" RosenSuzuki with mixed scalar and 1D inputs. """
x0 = Float(iotype='in', low=-10, high=99)
x12 = Array(iotype='in', low=-10, high=99)
x3 = Float(iotype='in', low=-10, high=99)
result = Float(iotype='out')
opt_objective = Float(iotype='out')
# pylint: disable=C0103
def __init__(self):
super(RosenSuzukiMixed, self).__init__()
self.x0 = 1.
self.x12 = numpy.array([1., 1.], dtype=float)
self.x3 = 1.
self.result = 0.
self.opt_objective = 6.*10.0
self.opt_design_vars = [0., 1., 2., -1.]
def execute(self):
"""calculate the new objective value"""
self.result = (self.x0**2 - 5.*self.x0 +
self.x12[0]**2 - 5.*self.x12[0] +
2.*self.x12[1]**2 - 21.*self.x12[1] +
self.x3**2 + 7.*self.x3 + 50)
class CONMINdriverTestCase(unittest.TestCase):
"""test CONMIN optimizer component"""
def setUp(self):
self.top = set_as_top(Assembly())
self.top.add('driver', CONMINdriver())
self.top.add('comp', OptRosenSuzukiComponent())
self.top.driver.workflow.add('comp')
self.top.driver.iprint = 0
self.top.driver.itmax = 30
def test_opt1(self):
# Run with scalar parameters, scalar constraints, and OpenMDAO gradient.
self.top.driver.add_obj
|
ective('10*comp.result')
# pylint: disable=C0301
|
map(self.top.driver.add_parameter,
['comp.x[0]', 'comp.x[1]', 'comp.x[2]', 'comp.x[3]'])
map(self.top.driver.add_constraint, [
'comp.x[0]**2+comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2+comp.x[2]+comp.x[3]**2-comp.x[3] < 8',
'comp.x[0]**2-comp.x[0]+2*comp.x[1]**2+comp.x[2]**2+2*comp.x[3]**2-comp.x[3] < 10',
'2*comp.x[0]**2+2*comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2-comp.x[3] < 5'])
self.top.recorders = [ListCaseRecorder()]
self.top.driver.iprint = 0
self.top.run()
# pylint: disable=E1101
assert_rel_error(self, self.top.comp.opt_objective,
self.top.driver.eval_objective(), 0.01)
assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
1 + self.top.comp.x[0], 0.05)
assert_rel_error(self, self.top.comp.opt_design_vars[1],
self.top.comp.x[1], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[2],
self.top.comp.x[2], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[3],
self.top.comp.x[3], 0.05)
cases = self.top.recorders[0].get_iterator()
end_case = cases[-1]
self.assertEqual(self.top.comp.x[1],
end_case.get_input('comp.x[1]'))
self.assertEqual(10*self.top.comp.result,
end_case.get_output('_pseudo_0.out0'))
def test_opt1_a(self):
# Run with scalar parameters, 1D constraint, and OpenMDAO gradient.
self.top.driver.add_objective('10*comp.result')
# pylint: disable=C0301
map(self.top.driver.add_parameter,
['comp.x[0]', 'comp.x[1]', 'comp.x[2]', 'comp.x[3]'])
self.top.driver.add_constraint('comp.g <= 0')
self.top.driver.iprint = 0
self.top.run()
# pylint: disable=E1101
assert_rel_error(self, self.top.comp.opt_objective,
self.top.driver.eval_objective(), 0.01)
assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
1 + self.top.comp.x[0], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[1],
self.top.comp.x[1], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[2],
self.top.comp.x[2], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[3],
self.top.comp.x[3], 0.05)
def test_opt1_with_CONMIN_gradient(self):
# Note: all other tests use OpenMDAO gradient
self.top.driver.add_objective('10*comp.result')
self.top.driver.add_parameter('comp.x[0]', fd_step=.00001)
self.top.driver.add_parameter('comp.x[1]', fd_step=.00001)
self.top.driver.add_parameter('comp.x[2]', fd_step=.00001)
self.top.driver.add_parameter('comp.x[3]', fd_step=.00001)
# pylint: disable=C0301
map(self.top.driver.add_constraint, [
'comp.x[0]**2+comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2+comp.x[2]+comp.x[3]**2-comp.x[3] < 8',
'comp.x[0]**2-comp.x[0]+2*comp.x[1]**2+comp.x[2]**2+2*comp.x[3]**2-comp.x[3] < 10',
'2*comp.x[0]**2+2*comp.
|
vecnet/simulation-manager
|
sim_manager/tests/constants.py
|
Python
|
mpl-2.0
| 649
| 0.001541
|
# This file is part of the Simulation Manager project for VecNet.
# For copyright and licensing information about this project, see the
# NOTICE.txt and LICENSE.md files in its top-level directory; they are
# ava
|
ilable at https://github.com/vecnet/simulation-manager
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License (MPL), version 2.0. If a copy of the MPL was not distributed
# with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
from path import path
# Root directory for tests that need to make directories where they can write o
|
utput files
TEST_OUTPUT_ROOT = path(__file__).dirname() / 'output'
|
joshsamara/game-website
|
core/views/base.py
|
Python
|
mit
| 691
| 0
|
"""Base views and view utilities."""
from django.views.generic import View
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
class Login
|
RequiredMixin(object):
"""Ensu
|
res that user must be authenticated in order to access view."""
@classmethod
def as_view(cls, **initkwargs):
"""Make views that inherit this require login."""
view = super(LoginRequiredMixin, cls).as_view(**initkwargs)
return login_required(view)
class Home(View):
"""Default index page handler."""
def get(self, request, *args, **kwargs):
"""Simply render the index page."""
return render(request, "index.html")
|
pmisik/buildbot
|
master/buildbot/scripts/sendchange.py
|
Python
|
gpl-2.0
| 2,058
| 0.000486
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import sys
import traceback
from twisted.internet import defer
from buildbot.clients import sendchange as sendchange_client
from buildbot.util import in_reactor
@in_reactor
@defer.inlineCallbacks
def sendchange(config):
encoding = config.get('encoding', 'utf8')
who = config.get('who')
auth = config.get('auth')
master = config.get('master')
branch = config.get('branch')
category = config.get('category')
revision = config.get('revision')
properties = config.get('properties', {})
repository = config.get('repository', '')
vc = config.get('vc', None)
project = config.get('project', '')
revlink = config.get('revlink', '')
when = config.get('when')
comments = config.get('comments')
files = config.get('files', ())
codebase = config.get('codebase', None)
s = sendchange_client.Sender(master, auth, encoding=encoding)
try:
yield s.send(branch, revision, comments, files, who=who,
category=category, when=when, properties=pro
|
perties,
repository=repository, vc=vc, project=project, revlink=revlink,
codebase=codebase)
except Exception:
print("change not sent:")
traceback.print_exc(file=sys.stdout)
return 1
else:
print("change sent successfully
|
")
return 0
|
quantifiedcode-bot/invenio-deposit
|
invenio_deposit/autocomplete_utils.py
|
Python
|
gpl-2.0
| 3,068
| 0
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2013, 2014 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Utility functions for field autocomplete feature."""
from invenio.utils.orcid import OrcidSearch
from invenio.utils.sherpa_romeo import SherpaRomeoSearch
def kb_autocomplete(name, mapper=None):
"""Create an autocomplete function from knowledge base.
:param name: Name of knowledge base
:param mapper: Function that will map an knowledge base entry to
autocomplete entry.
"""
def inner(dummy_form, dummy_field, term, limit=50):
from invenio_knowledge.api import get_kb_mappings
result = get_kb_mappings(name, '', term, limit=limit)[:limit]
return map(mapper, result) if mapper is not None else result
return inner
def kb_dynamic_autocomplete(name, mapper=None):
"""Create an autocomplete function from dynamic knowledge base.
:param name: Name of knowledge base
:param mapper: Function that will map an knowledge base entry to
autocomplete entry.
"""
def inner(dummy_form, dummy_field, term, limit=
|
50):
from invenio_knowledge.api import get_kbd_values
result = get_kbd_values(name, searchwith=term)[:limit]
return map(mapper, result) if mapper is not None else result
return inner
|
def sherpa_romeo_publishers(dummy_form, dummy_field, term, limit=50):
"""Autocomplete publishers from SHERPA/RoMEO service."""
if term:
sherpa_romeo = SherpaRomeoSearch()
publishers = sherpa_romeo.search_publisher(term)
if publishers is None:
return []
return map(lambda x: {'value': x}, publishers[:limit])
return []
def sherpa_romeo_journals(dummy_form, dummy_field, term, limit=50):
"""Search SHERPA/RoMEO for journal name."""
if term:
# SherpaRomeoSearch doesnt' like unicode
if isinstance(term, unicode):
term = term.encode('utf8')
s = SherpaRomeoSearch()
journals = s.search_journal(term)
if journals is not None:
return map(lambda x: {'value': x}, journals[:limit])
return []
def orcid_authors(dummy_form, dummy_field, term, limit=50):
"""Autocomplete authors from ORCID service."""
if term:
orcid = OrcidSearch()
orcid.search_authors(term)
return orcid.get_authors_names()
return []
|
dvirsky/govsearch
|
scraper/scraper/spiders/resolutions.py
|
Python
|
bsd-3-clause
| 2,802
| 0.003569
|
# -*- coding: utf-8 -*-
import scrapy
from scraper.items import ResolutionItem
class ResolutionSpider(scrapy.Spider):
name = "resolutions"
allowed_domains = ["www.pmo.gov.il"]
start_urls = ["http://www.pmo.gov.il/Secretary/GovDecisions/Pages/default.aspx"]
def should_retry(self, response):
"""Sometimes body uses anti-scraping tricks.
e.g. body is:
<html><body><script>document.cookie='yyyyyyy=ea850ff3yyyyyyy_ea850ff3; path=/';window.location.href=window.location.href;</script></body></html>
Retrying usually yields a correct response.
"""
if not response.body.startswith('<html><body><script>'):
return False
self.logger.debug('anti-scraping trick for url %s', response.url)
new_request = response.request.copy()
new_request.dont_filter = True # don't de-duplicate the url for retrying
return new_request
def parse(self, response):
"""Parse pages containing links to government resolutions."""
# check if response was bad
new_request = self.should_retry(response)
# retry if so
if new_request:
yield new_request
return
# parse specific resolutions found in current page
for sel in response.xpath("//div[@id='GDSR']/div/a/@href"):
yiel
|
d scrap
|
y.Request(sel.extract(), callback=self.parse_resolution)
# parse next pages
for sel in response.xpath("//a[@class='PMM-resultsPagingNumber']/@href"):
url = response.urljoin(sel.extract())
yield scrapy.Request(url)
def parse_resolution(self, response):
"""Scrape relevant fields in specific resolution response."""
# check if response was bad
new_request = self.should_retry(response)
# retry if so
if new_request:
yield new_request
return
try:
yield ResolutionItem(
url=response.url,
date=response.xpath("/html/head/meta[@name='EventDate']/@content").extract(),
resolution_number=response.xpath("//*[@id='aspnetForm']/@action").extract(),
gov=response.xpath("/html/head/meta[@name='Subjects']/@content").extract(),
title=response.xpath("//h1[@class='mainTitle']//text()").extract(),
subject=response.xpath("//div[@id='ctl00_PlaceHolderMain_GovXParagraph1Panel']//text()[not(ancestor::h3)]").extract(),
body=response.xpath("//*[@id='ctl00_PlaceHolderMain_GovXParagraph2Panel']//text()[not(ancestor::h3)]").extract(),
)
except AttributeError:
self.logger.error('bad body in response for url %s and body %s',
response.url, response.body)
|
miyataken999/weblate
|
weblate/trans/fonts.py
|
Python
|
gpl-3.0
| 49,812
| 0
|
# -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2015 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
'''
Font handling wrapper.
'''
from weblate import appsettings
from PIL import ImageFont
import os.path
# List of chars in base DejaVu font, otherwise we use DroidSansFallback
BASE_CHARS = frozenset((
0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe,
0xf, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a,
0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26,
0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32,
0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e,
0x3f, 0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4a,
0x4b, 0x4c, 0x4d, 0x4e, 0x4f, 0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56,
0x57, 0x58, 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f, 0x60, 0x61, 0x62,
0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e,
0x6f, 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a,
0x7b, 0x7c, 0x7d, 0x7e, 0x7f, 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86,
0x87, 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f, 0x90, 0x91, 0x92,
0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e,
0x9f, 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa
|
7, 0xa8, 0xa9, 0xaa,
0xab, 0xac, 0xad, 0xae, 0xaf, 0xb0, 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6,
0xb7, 0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf, 0xc0, 0xc1, 0xc2,
0xc3, 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce,
0xcf, 0xd0, 0xd1, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
0xdb, 0xdc, 0xdd, 0xde, 0xdf, 0xe0, 0xe1, 0xe2, 0xe3, 0xe4, 0xe
|
5, 0xe6,
0xe7, 0xe8, 0xe9, 0xea, 0xeb, 0xec, 0xed, 0xee, 0xef, 0xf0, 0xf1, 0xf2,
0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, 0xfe,
0xff, 0x100, 0x101, 0x102, 0x103, 0x104, 0x105, 0x106, 0x107, 0x108, 0x109,
0x10a, 0x10b, 0x10c, 0x10d, 0x10e, 0x10f, 0x110, 0x111, 0x112, 0x113,
0x114, 0x115, 0x116, 0x117, 0x118, 0x119, 0x11a, 0x11b, 0x11c, 0x11d,
0x11e, 0x11f, 0x120, 0x121, 0x122, 0x123, 0x124, 0x125, 0x126, 0x127,
0x128, 0x129, 0x12a, 0x12b, 0x12c, 0x12d, 0x12e, 0x12f, 0x130, 0x131,
0x132, 0x133, 0x134, 0x135, 0x136, 0x137, 0x138, 0x139, 0x13a, 0x13b,
0x13c, 0x13d, 0x13e, 0x13f, 0x140, 0x141, 0x142, 0x143, 0x144, 0x145,
0x146, 0x147, 0x148, 0x149, 0x14a, 0x14b, 0x14c, 0x14d, 0x14e, 0x14f,
0x150, 0x151, 0x152, 0x153, 0x154, 0x155, 0x156, 0x157, 0x158, 0x159,
0x15a, 0x15b, 0x15c, 0x15d, 0x15e, 0x15f, 0x160, 0x161, 0x162, 0x163,
0x164, 0x165, 0x166, 0x167, 0x168, 0x169, 0x16a, 0x16b, 0x16c, 0x16d,
0x16e, 0x16f, 0x170, 0x171, 0x172, 0x173, 0x174, 0x175, 0x176, 0x177,
0x178, 0x179, 0x17a, 0x17b, 0x17c, 0x17d, 0x17e, 0x17f, 0x180, 0x181,
0x182, 0x183, 0x184, 0x185, 0x186, 0x187, 0x188, 0x189, 0x18a, 0x18b,
0x18c, 0x18d, 0x18e, 0x18f, 0x190, 0x191, 0x192, 0x193, 0x194, 0x195,
0x196, 0x197, 0x198, 0x199, 0x19a, 0x19b, 0x19c, 0x19d, 0x19e, 0x19f,
0x1a0, 0x1a1, 0x1a2, 0x1a3, 0x1a4, 0x1a5, 0x1a6, 0x1a7, 0x1a8, 0x1a9,
0x1aa, 0x1ab, 0x1ac, 0x1ad, 0x1ae, 0x1af, 0x1b0, 0x1b1, 0x1b2, 0x1b3,
0x1b4, 0x1b5, 0x1b6, 0x1b7, 0x1b8, 0x1b9, 0x1ba, 0x1bb, 0x1bc, 0x1bd,
0x1be, 0x1bf, 0x1c0, 0x1c1, 0x1c2, 0x1c3, 0x1c4, 0x1c5, 0x1c6, 0x1c7,
0x1c8, 0x1c9, 0x1ca, 0x1cb, 0x1cc, 0x1cd, 0x1ce, 0x1cf, 0x1d0, 0x1d1,
0x1d2, 0x1d3, 0x1d4, 0x1d5, 0x1d6, 0x1d7, 0x1d8, 0x1d9, 0x1da, 0x1db,
0x1dc, 0x1dd, 0x1de, 0x1df, 0x1e0, 0x1e1, 0x1e2, 0x1e3, 0x1e4, 0x1e5,
0x1e6, 0x1e7, 0x1e8, 0x1e9, 0x1ea, 0x1eb, 0x1ec, 0x1ed, 0x1ee, 0x1ef,
0x1f0, 0x1f1, 0x1f2, 0x1f3, 0x1f4, 0x1f5, 0x1f6, 0x1f7, 0x1f8, 0x1f9,
0x1fa, 0x1fb, 0x1fc, 0x1fd, 0x1fe, 0x1ff, 0x200, 0x201, 0x202, 0x203,
0x204, 0x205, 0x206, 0x207, 0x208, 0x209, 0x20a, 0x20b, 0x20c, 0x20d,
0x20e, 0x20f, 0x210, 0x211, 0x212, 0x213, 0x214, 0x215, 0x216, 0x217,
0x218, 0x219, 0x21a, 0x21b, 0x21c, 0x21d, 0x21e, 0x21f, 0x220, 0x221,
0x222, 0x223, 0x224, 0x225, 0x226, 0x227, 0x228, 0x229, 0x22a, 0x22b,
0x22c, 0x22d, 0x22e, 0x22f, 0x230, 0x231, 0x232, 0x233, 0x234, 0x235,
0x236, 0x237, 0x238, 0x239, 0x23a, 0x23b, 0x23c, 0x23d, 0x23e, 0x23f,
0x240, 0x241, 0x242, 0x243, 0x244, 0x245, 0x246, 0x247, 0x248, 0x249,
0x24a, 0x24b, 0x24c, 0x24d, 0x24e, 0x24f, 0x250, 0x251, 0x252, 0x253,
0x254, 0x255, 0x256, 0x257, 0x258, 0x259, 0x25a, 0x25b, 0x25c, 0x25d,
0x25e, 0x25f, 0x260, 0x261, 0x262, 0x263, 0x264, 0x265, 0x266, 0x267,
0x268, 0x269, 0x26a, 0x26b, 0x26c, 0x26d, 0x26e, 0x26f, 0x270, 0x271,
0x272, 0x273, 0x274, 0x275, 0x276, 0x277, 0x278, 0x279, 0x27a, 0x27b,
0x27c, 0x27d, 0x27e, 0x27f, 0x280, 0x281, 0x282, 0x283, 0x284, 0x285,
0x286, 0x287, 0x288, 0x289, 0x28a, 0x28b, 0x28c, 0x28d, 0x28e, 0x28f,
0x290, 0x291, 0x292, 0x293, 0x294, 0x295, 0x296, 0x297, 0x298, 0x299,
0x29a, 0x29b, 0x29c, 0x29d, 0x29e, 0x29f, 0x2a0, 0x2a1, 0x2a2, 0x2a3,
0x2a4, 0x2a5, 0x2a6, 0x2a7, 0x2a8, 0x2a9, 0x2aa, 0x2ab, 0x2ac, 0x2ad,
0x2ae, 0x2af, 0x2b0, 0x2b1, 0x2b2, 0x2b3, 0x2b4, 0x2b5, 0x2b6, 0x2b7,
0x2b8, 0x2b9, 0x2ba, 0x2bb, 0x2bc, 0x2bd, 0x2be, 0x2bf, 0x2c0, 0x2c1,
0x2c2, 0x2c3, 0x2c4, 0x2c5, 0x2c6, 0x2c7, 0x2c8, 0x2c9, 0x2ca, 0x2cb,
0x2cc, 0x2cd, 0x2ce, 0x2cf, 0x2d0, 0x2d1, 0x2d2, 0x2d3, 0x2d4, 0x2d5,
0x2d6, 0x2d7, 0x2d8, 0x2d9, 0x2da, 0x2db, 0x2dc, 0x2dd, 0x2de, 0x2df,
0x2e0, 0x2e1, 0x2e2, 0x2e3, 0x2e4, 0x2e5, 0x2e6, 0x2e7, 0x2e8, 0x2e9,
0x2ec, 0x2ed, 0x2ee, 0x2f3, 0x2f7, 0x300, 0x301, 0x302, 0x303, 0x304,
0x305, 0x306, 0x307, 0x308, 0x309, 0x30a, 0x30b, 0x30c, 0x30d, 0x30e,
0x30f, 0x310, 0x311, 0x312, 0x313, 0x314, 0x315, 0x316, 0x317, 0x318,
0x319, 0x31a, 0x31b, 0x31c, 0x31d, 0x31e, 0x31f, 0x320, 0x321, 0x322,
0x323, 0x324, 0x325, 0x326, 0x327, 0x328, 0x329, 0x32a, 0x32b, 0x32c,
0x32d, 0x32e, 0x32f, 0x330, 0x331, 0x332, 0x333, 0x334, 0x335, 0x336,
0x337, 0x338, 0x339, 0x33a, 0x33b, 0x33c, 0x33d, 0x33e, 0x33f, 0x340,
0x341, 0x342, 0x343, 0x344, 0x345, 0x346, 0x347, 0x348, 0x349, 0x34a,
0x34b, 0x34c, 0x34d, 0x34e, 0x34f, 0x351, 0x352, 0x353, 0x357, 0x358,
0x35a, 0x35c, 0x35d, 0x35e, 0x35f, 0x360, 0x361, 0x362, 0x370, 0x371,
0x372, 0x373, 0x374, 0x375, 0x376, 0x377, 0x37a, 0x37b, 0x37c, 0x37d,
0x37e, 0x384, 0x385, 0x386, 0x387, 0x388, 0x389, 0x38a, 0x38c, 0x38e,
0x38f, 0x390, 0x391, 0x392, 0x393, 0x394, 0x395, 0x396, 0x397, 0x398,
0x399, 0x39a, 0x39b, 0x39c, 0x39d, 0x39e, 0x39f, 0x3a0, 0x3a1, 0x3a3,
0x3a4, 0x3a5, 0x3a6, 0x3a7, 0x3a8, 0x3a9, 0x3aa, 0x3ab, 0x3ac, 0x3ad,
0x3ae, 0x3af, 0x3b0, 0x3b1, 0x3b2, 0x3b3, 0x3b4, 0x3b5, 0x3b6, 0x3b7,
0x3b8, 0x3b9, 0x3ba, 0x3bb, 0x3bc, 0x3bd, 0x3be, 0x3bf, 0x3c0, 0x3c1,
0x3c2, 0x3c3, 0x3c4, 0x3c5, 0x3c6, 0x3c7, 0x3c8, 0x3c9, 0x3ca, 0x3cb,
0x3cc, 0x3cd, 0x3ce, 0x3cf, 0x3d0, 0x3d1, 0x3d2, 0x3d3, 0x3d4, 0x3d5,
0x3d6, 0x3d7, 0x3d8, 0x3d9, 0x3da, 0x3db, 0x3dc, 0x3dd, 0x3de, 0x3df,
0x3e0, 0x3e1, 0x3e2, 0x3e3, 0x3e4, 0x3e5, 0x3e6, 0x3e7, 0x3e8, 0x3e9,
0x3ea, 0x3eb, 0x3ec, 0x3ed, 0x3ee, 0x3ef, 0x3f0, 0x3f1, 0x3f2, 0x3f3,
0x3f4, 0x3f5, 0x3f6, 0x3f7, 0x3f8, 0x3f9, 0x3fa, 0x3fb, 0x3fc, 0x3fd,
0x3fe, 0x3ff, 0x400, 0x401, 0x402, 0x403, 0x404, 0x405, 0x406, 0x407,
0x408, 0x409, 0x40a, 0x40b, 0x40c, 0x40d, 0x40e, 0x40f, 0x410, 0x411,
0x412, 0x413, 0x414, 0x415, 0x416, 0x417, 0x418, 0x419, 0x41a, 0x41b,
0x41c, 0x41d, 0x41e, 0x41f, 0x420, 0x421,
|
pinguinkiste/bioconda-recipes
|
recipes/peptide-shaker/peptide-shaker.py
|
Python
|
mit
| 3,271
| 0.000917
|
#!/usr/bin/env python
#
# Wrapper script for Java Conda packages that ensures that the java runtime
# is invoked with the right options. Adapted from the bash script (http://stackoverflow.com/questions/59895/can-a-bash-script-tell-what-directory-its-stored-in/246128#246128).
#
# Program Parameters
#
import os
import subprocess
import sys
import shutil
from os import access
from os import getenv
from os import X_OK
jar_file = 'PeptideShaker-1.15.1.jar'
default_jvm_mem_opts = ['-Xms512m', '-Xmx1g']
# !!! End of parameter section. No user-serviceable code below this line !!!
def real_dirname(path):
"""Return the symlink-resolved, canonicalized directory-portion of path."""
return os.path.dirname(os.path.realpath(path))
def java_executable():
"""Return the executable name of the Java interpreter."""
java_home = getenv('JAVA_HOME')
java_bin = os.path.join('bin', 'java')
if java_home and access(os.path.join(java_home, java_bin), X_OK):
return os.path.join(java_home, java_bin)
else:
return 'java'
def jvm_opts(argv):
"""Construct list of Java arguments based on our argument list.
The argument list passed in argv must not include the script name.
The return value is a 3-tuple lists of strings of the form:
(memory_options, prop_options, passthrough_options)
"""
mem_opts = []
prop_opts = []
pass_args = []
exec_dir = None
for arg in argv:
if arg.startswith('-D'):
prop_opts.append(arg)
elif arg.startswith('-XX'):
prop_opts.append(arg)
elif arg.startswith('-Xm'):
mem_opts.append(arg)
elif arg.startswith('--exec_dir='):
exec_dir = arg.split('=')[1].strip('"').strip("'")
if not os.path.exi
|
sts(exec_dir):
shutil.copytree(real_dirname(sys.argv[0]), exec_dir, symlinks=False, ignore=None)
|
else:
pass_args.append(arg)
# In the original shell script the test coded below read:
# if [ "$jvm_mem_opts" == "" ] && [ -z ${_JAVA_OPTIONS+x} ]
# To reproduce the behaviour of the above shell code fragment
# it is important to explictly check for equality with None
# in the second condition, so a null envar value counts as True!
if mem_opts == [] and getenv('_JAVA_OPTIONS') is None:
mem_opts = default_jvm_mem_opts
return (mem_opts, prop_opts, pass_args, exec_dir)
def main():
java = java_executable()
"""
PeptideShaker updates files relative to the path of the jar file.
In a multiuser setting, the option --exec_dir="exec_dir"
can be used as the location for the peptide-shaker distribution.
If the exec_dir dies not exist,
we copy the jar file, lib, and resources to the exec_dir directory.
"""
(mem_opts, prop_opts, pass_args, exec_dir) = jvm_opts(sys.argv[1:])
jar_dir = exec_dir if exec_dir else real_dirname(sys.argv[0])
if pass_args != [] and pass_args[0].startswith('eu'):
jar_arg = '-cp'
else:
jar_arg = '-jar'
jar_path = os.path.join(jar_dir, jar_file)
java_args = [java] + mem_opts + prop_opts + [jar_arg] + [jar_path] + pass_args
sys.exit(subprocess.call(java_args))
if __name__ == '__main__':
main()
|
CharlesShang/TFFRCNN
|
lib/networks/VGGnet_test.py
|
Python
|
mit
| 2,855
| 0.001401
|
import tensorflow as tf
from .network import Network
from ..fast_rcnn.config import cfg
class VGGnet_test(Network):
def __init__(self, trainable=True):
self.inputs = []
self.data = tf.placeholder(tf.float32, shape=[None, None, None, 3])
self.im_info = tf.placeholder(tf.float32, shape=[None, 3])
self.keep_prob = tf.placeholder(tf.float32)
self.layers = dict({'data': self.data, 'im_info': self.im_info})
self.trainable = trainable
self.setup()
def setup(self):
# n_classes = 21
n_classes = cfg.NCLASSES
# anchor_scales = [8, 16, 32]
anchor_scales = cfg.ANCHOR_SCALES
_feat_stride = [16, ]
(self.feed('data')
.conv(3, 3, 64, 1, 1, name='conv1_1', trainable=False)
.conv(3, 3, 64, 1, 1, name='conv1_2', trainable=False)
.max_pool(2, 2, 2, 2, padding='VALID', name='pool1')
.conv(3, 3, 128, 1, 1, name='conv2_1', trainable=False)
.conv(3, 3, 128, 1, 1, name='conv2_2', trainable=False)
.max_pool(2, 2, 2, 2, padding='VALID', name='pool2')
.conv(3, 3, 256, 1, 1, name='conv3_1')
.conv(3, 3, 256, 1, 1, name='conv3_2')
.conv(3, 3, 256, 1, 1, name='conv3_3')
.max_pool(2, 2, 2, 2, padding='VALID', name='pool3')
.conv(3, 3, 512, 1, 1, name='conv4_1')
.conv(3, 3, 512, 1, 1, name='conv4_2')
.conv(3, 3, 512, 1, 1, name='conv4_3')
.max_pool(2, 2, 2, 2, padding='VALID', name='pool4')
.conv(3, 3, 512, 1, 1, name='conv5_1')
|
.conv(3, 3, 512, 1, 1, name='conv5_2')
.conv(3, 3, 512, 1, 1, name='conv5_3'))
(self.feed('conv5_3')
.conv(3, 3, 512, 1, 1, name='rpn_conv/3x3')
.conv(1, 1, len(anchor_scales) * 3 * 2, 1, 1, padding='VALID', relu=False, name='rpn_cls_score'))
(self.feed('rpn_conv/3x3')
.conv(1, 1, le
|
n(anchor_scales) * 3 * 4, 1, 1, padding='VALID', relu=False, name='rpn_bbox_pred'))
# shape is (1, H, W, Ax2) -> (1, H, WxA, 2)
(self.feed('rpn_cls_score')
.spatial_reshape_layer(2, name='rpn_cls_score_reshape')
.spatial_softmax(name='rpn_cls_prob'))
# shape is (1, H, WxA, 2) -> (1, H, W, Ax2)
(self.feed('rpn_cls_prob')
.spatial_reshape_layer(len(anchor_scales) * 3 * 2, name='rpn_cls_prob_reshape'))
(self.feed('rpn_cls_prob_reshape', 'rpn_bbox_pred', 'im_info')
.proposal_layer(_feat_stride, anchor_scales, 'TEST', name='rois'))
(self.feed('conv5_3', 'rois')
.roi_pool(7, 7, 1.0 / 16, name='pool_5')
.fc(4096, name='fc6')
.fc(4096, name='fc7')
.fc(n_classes, relu=False, name='cls_score')
.softmax(name='cls_prob'))
(self.feed('fc7')
.fc(n_classes * 4, relu=False, name='bbox_pred'))
|
DailyActie/Surrogate-Model
|
01-codes/scipy-master/scipy/sparse/linalg/isolve/tests/test_lsmr.py
|
Python
|
mit
| 4,984
| 0.000201
|
"""
Copyright (C) 2010 David Fong and Michael Saunders
Distributed under the same license as Scipy
Testing Code for LSMR.
03 Jun 2010: First version release with lsmr.py
David Chin-lung Fong clfong@stanford.edu
Institute for Computational and Mathematical Engineering
Stanford University
Michael Saunders saunders@stanford.edu
Systems Optimization Laboratory
Dept of MS&E, Stanford University.
"""
from __future__ import division, print_function, absolute_import
from numpy import array, arange, eye, zeros, ones, sqrt, transpose, hstack
from numpy.linalg import norm
from numpy.testing import run_module_suite, assert_almost_equal
from scipy.sparse import coo_matrix
from scipy.sparse.linalg import lsmr
from scipy.sparse.linalg.interface import aslinearoperator
class TestLSMR:
def setUp(self):
self.n = 10
self.m = 10
def assertCompatibleSystem(self, A, xtrue):
Afun = aslinearoperator(A)
b = Afun.matvec(xtrue)
x = lsmr(A, b)[0]
assert_almost_equal(norm(x - xtrue), 0, decimal=5)
def testIdentityACase1(self):
A = eye(self.n)
xtrue = zeros((self.n, 1))
self.assertCompatibleSystem(A, xtrue)
def testIdentityACase2(self):
A = eye(self.n)
xtrue = ones((self.n, 1))
self.assertCompatibleSystem(A, xtrue)
def testIdentityACase3(self):
A = eye(self.n)
xtrue = transpose(arange(self.n, 0, -1))
self.assertCompatibleSystem(A, xtrue)
def testBidiagonalA(self):
A = lowerBidiagonalMatrix(20, self.n)
xtrue = transpose(arange(self.n, 0, -1))
self.assertCompatibleSystem(A, xtrue)
def testScalarB(self):
A = array([[1.0, 2.0]])
b = 3.0
x = lsmr(A, b)[0]
assert_almost_equ
|
al(norm(A.dot(x) - b), 0)
def testColumnB(self):
A = eye(self.n)
b = ones((self.n, 1))
x = lsmr(A, b)[0]
assert_almost_equal(norm(A.dot(x) - b.ravel()), 0)
class TestLSMRReturns:
def setUp(self):
self.n = 10
self.A = lowerBidiagonalMatrix(20, self.n)
self.xtrue = transpose(arange(self.n, 0, -1))
self.Afun = aslinearoperator(self.A)
self.b = self.Afun.matvec(self.xtrue)
|
self.returnValues = lsmr(self.A, self.b)
def testNormr(self):
x, istop, itn, normr, normar, normA, condA, normx = self.returnValues
assert_almost_equal(normr, norm(self.b - self.Afun.matvec(x)))
def testNormar(self):
x, istop, itn, normr, normar, normA, condA, normx = self.returnValues
assert_almost_equal(normar,
norm(self.Afun.rmatvec(self.b - self.Afun.matvec(x))))
def testNormx(self):
x, istop, itn, normr, normar, normA, condA, normx = self.returnValues
assert_almost_equal(normx, norm(x))
def lowerBidiagonalMatrix(m, n):
# This is a simple example for testing LSMR.
# It uses the leading m*n submatrix from
# A = [ 1
# 1 2
# 2 3
# 3 4
# ...
# n ]
# suitably padded by zeros.
#
# 04 Jun 2010: First version for distribution with lsmr.py
if m <= n:
row = hstack((arange(m, dtype=int),
arange(1, m, dtype=int)))
col = hstack((arange(m, dtype=int),
arange(m - 1, dtype=int)))
data = hstack((arange(1, m + 1, dtype=float),
arange(1, m, dtype=float)))
return coo_matrix((data, (row, col)), shape=(m, n))
else:
row = hstack((arange(n, dtype=int),
arange(1, n + 1, dtype=int)))
col = hstack((arange(n, dtype=int),
arange(n, dtype=int)))
data = hstack((arange(1, n + 1, dtype=float),
arange(1, n + 1, dtype=float)))
return coo_matrix((data, (row, col)), shape=(m, n))
def lsmrtest(m, n, damp):
"""Verbose testing of lsmr"""
A = lowerBidiagonalMatrix(m, n)
xtrue = arange(n, 0, -1, dtype=float)
Afun = aslinearoperator(A)
b = Afun.matvec(xtrue)
atol = 1.0e-7
btol = 1.0e-7
conlim = 1.0e+10
itnlim = 10 * n
show = 1
x, istop, itn, normr, normar, norma, conda, normx \
= lsmr(A, b, damp, atol, btol, conlim, itnlim, show)
j1 = min(n, 5)
j2 = max(n - 4, 1)
print(' ')
print('First elements of x:')
str = ['%10.4f' % (xi) for xi in x[0:j1]]
print(''.join(str))
print(' ')
print('Last elements of x:')
str = ['%10.4f' % (xi) for xi in x[j2 - 1:]]
print(''.join(str))
r = b - Afun.matvec(x)
r2 = sqrt(norm(r) ** 2 + (damp * norm(x)) ** 2)
print(' ')
str = 'normr (est.) %17.10e' % (normr)
str2 = 'normr (true) %17.10e' % (r2)
print(str)
print(str2)
print(' ')
if __name__ == "__main__":
# Comment out the next line to run unit tests only
lsmrtest(20, 10, 0)
run_module_suite()
|
sandeepkrjha/pgmpy
|
pgmpy/sampling/__init__.py
|
Python
|
mit
| 693
| 0.002886
|
from .base import (BaseGradLogPDF, GradLogPDFGaussian, LeapFrog,
ModifiedEuler, BaseSimulateHamiltonianDynamics, _return_samples)
from .HMC import HamiltonianMC, HamiltonianMCDA
from .NUTS import NoUTurnSampler, NoUTurnSamplerDA
from .Sampling import GibbsSa
|
mpling, BayesianModelSampling
__all__ = ['LeapFrog',
'ModifiedEuler',
'BaseSimulateHamiltonianDynamics',
'BaseGradLogPDF',
'GradLogPDFGaussian',
'_return_samples',
|
'HamiltonianMC',
'HamiltonianMCDA',
'NoUTurnSampler',
'NoUTurnSamplerDA',
'BayesianModelSampling',
'GibbsSampling']
|
Distrotech/reportlab
|
src/reportlab/__init__.py
|
Python
|
bsd-3-clause
| 1,314
| 0.022831
|
#Copyright ReportLab Europe Ltd. 2000-2015
#see license.txt for license details
__doc__="""The Reportlab PDF generation library."""
Version = "3.2.13"
__version__=Version
__date__='20151210'
import s
|
ys, os, imp
if sys.version_info[0:2]!=(2, 7) and sys.version_info<(3, 3):
raise ImportError("""reportlab requires Python 2.7+ or
|
3.3+; 3.0-3.2 are not supported.""")
#define these early in reportlab's life
isPy3 = sys.version_info[0]==3
if isPy3:
def cmp(a,b):
return -1 if a<b else (1 if a>b else 0)
import builtins
builtins.cmp = cmp
builtins.xrange = range
del cmp, builtins
else:
from future_builtins import ascii
import __builtin__
__builtin__.ascii = ascii
del ascii, __builtin__
#try to use dynamic modifications from
#reportlab.local_rl_mods.py
#reportlab_mods.py or ~/.reportlab_mods
try:
import reportlab.local_rl_mods
except ImportError:
pass
def _fake_import(fn,name):
if os.path.isfile(fn):
with open(fn,'rb') as f:
imp.load_source(name,fn,f)
try:
import reportlab_mods #application specific modifications can be anywhere on python path
except ImportError:
try:
_fake_import(os.path.expanduser(os.path.join('~','.reportlab_mods')),'reportlab_mods')
except (ImportError,KeyError):
pass
|
soylentdeen/cuddly-weasel
|
MusicMaker/Output/plotter.py
|
Python
|
mit
| 708
| 0.00565
|
import Moog960
import numpy
import SpectralTools
import matplotlib.pyplot as pyplot
fig = pyplot.figure
|
(0)
ax = fig.add_axes([0.1, 0.1, 0.8, 0.8])
ax.clear()
syntheticScore = Moog
|
960.Score(suffix='', observed = '../../Theremin/TWHydra.fits')
models = syntheticScore.getLabels(keySignature='CONVOLVED')
observed = syntheticScore.getLabels(keySignature='OBSERVED')
for model in models:
parameters = model.parameters
label = "T=%d log g=%.1f B=%.1f" % (parameters["TEFF"], parameters["LOGG"], parameters["BFIELD"])
model.Spectrum.plot(ax=ax, label=label)
for obs in observed:
# Account for wavelength shift
obs.Spectrum.wl -= 5.5
obs.Spectrum.plot(ax=ax)
ax.legend()
fig.show()
|
ASPLes/turbulence
|
tools/tbc-ctl/tbc-setup-mod-radmin.py
|
Python
|
lgpl-2.1
| 4,910
| 0.016497
|
#!/usr/bin/python
import os
import commands
import termios
import sys
def enable_echo(fd, enabled):
(iflag, oflag, cflag, lflag, ispeed, ospeed, cc) = termios.tcgetattr (fd)
if enabled:
lflag |= termios.ECHO
else:
lflag &= ~termios.ECHO
new_attr = [iflag, oflag, cflag, lflag, ispeed, ospeed, cc]
termios.tcsetattr(fd, termios.TCSANOW, new_attr)
return
def gen_password (passvalue):
import hashlib
value = hashlib.md5 (passvalue).hexdigest ().upper()
hash_len = len (value) * 1.5 -1
iterator = 2
while iterator < hash_len:
value = value[:iterator] + ":" + value[iterator:]
iterator += 3
return value
# track if we should reboot
should_reboot = False
if os.geteuid () != 0:
print ("ERROR: only root can run this tool")
sys.exit (-1)
# get base instalation
(status, output) = commands.getstatusoutput ("turbulence --conf-location | grep SYSCONFDIR")
if status:
print ("ERROR: turbulence get location command failed with status %d, error: %s" % (status, output))
sys.exit (-1)
output = output.replace ("SYSCONFDIR:", "")
baseconfdir = output.replace (" ", "")
print ("INFO: found base configuration at %s/turbulence" % baseconfdir)
# check if radmin directory exists
if not os.path.exists ("%s/turbulence/radmin" % baseconfdir):
print ("INFO: creating directory: %s/turbulence/radmin" % baseconfdir)
os.mkdir ("%s/turbulence/radmin" % baseconfdir)
# ensure permissions are right
(status, output) = commands.getstatusoutput ("chmod o-rwx %s/turbulence/radmin" % baseconfdir)
if status:
print ("ERROR: failed to ensure permissions inside %s/turbulence/radmin directory" % baseconfdir)
sys.exit (-1)
# create radmin.conf
if not os.path.exists ("%s/turbulence/profile.d/radmin.conf" % baseconfdir):
import hashlib
import random
# build serverName
serverName = hashlib.md5 (str(random.random ())).hexdigest ()
print ("INFO: creating %s/turbulence/profile.d/radmin.conf" % baseconfdir)
open ("%s/turbulence/profile.d/radmin.conf" % baseconfdir, "w").write ("<!-- profile path to load mod-radmin from localhost -->\n\
<path-def server-name='%s' \n\
src='127.0.0.1' \n\
path-name='local radmin' \n\
work-dir='%s/turbulence/radmin'>\n\
<if-success profile='http://iana.org/beep/SASL/.*' connmark='sasl:is:authenticated' >\n\
<allow profile='urn:aspl.es:beep:profiles:radmin-ctl' />\n\
</if-success>\n\
</path-def>" % (serverName, baseconfdir))
# ensure permissions are right
(status, output) = commands.getstatusoutput ("chmod o-rwx %s/turbulence/profile.d/radmin.conf" % baseconfdir)
if status:
print ("ERROR: failed to ensure permissions for %s/turbulence/profile.d/radmin.conf file" % baseconfdir)
sys.exit (-1)
# flag we have to reboot
should_reboot = True
# create sasl.conf
if not os.path.exists ("%s/turbulence/radmin/sasl.conf" % baseconfdir):
print ("INFO: creating %s/turbulence/radmin/sasl.conf" % baseconfdir)
open ("%s/turbulence/radmin/sasl.conf" % baseconfdir, "w").write ('<mod-sasl>\n\
<auth-db remote-admins="remote-admins.xml" \n\
remote="no" \n\
format="md5" \n\
location="auth-db.xml" \n\
type="xml" />\n\
<method-allowed>\n\
<method value="plain" />\n\
</method-allowed>\n\
<login-options>\n\
<max-allowed-tries value="3" action="drop"/>\n\
<accounts-disabled action="drop" />\n\
</login-options>\n\
</mod-sasl>')
# create auth-db.xml
if not os.path.exists ("%s/turbulence/radmin/auth-db.xml" % baseconfdir):
print ("No database found, creating one. For this, we need a user and a password")
user = raw_input ("Auth login to create: " ).strip ()
enable_echo (1, False)
password = raw_input ("Type password: " ).strip ()
enable_echo (1, True)
print ""
# gen password
password = gen_password (password)
print ("INFO: creating %s/turbulence/radmin/auth-db.xml" % baseconfdir)
open ("%s/turbulence/radmin/auth-db.xml" % baseconfdir, "w").write ("<sasl-auth-db>\n\
<auth user_id='%s' password='%s' disabled='no'/>\n\
</sasl-auth-db>" % (user, password))
# try to enable module if not
if not os.path.exists ("%s/turbulence/mods-enabled/mod_radmin.xml" % baseconfdir):
print ("INFO: enabling mod-radmin module")
(status, output) = commands.g
|
etstatusoutput ("ln -s %s/turbulence/mods-available/mod_radmin.xml %s/turbulence/mods-enabled/mod_radmin.xml" % (baseconfdir, baseconfdir))
if status:
print ("INFO: failed to enable module, ln command failed: %s" % output)
sys.exit (-1)
# flag you should reboot
s
|
hould_reboot = True
print ("INFO: configuration done!")
if should_reboot:
print ("INFO: you must reboot your turbulence server to make changes effective")
|
hankcs/HanLP
|
hanlp/pretrained/pos.py
|
Python
|
apache-2.0
| 2,166
| 0.007849
|
# -*- coding:utf-8 -*-
# Author: hankcs
# Date: 2019-12-29 01:57
from hanlp_common.constant import HANLP_URL
CTB5_POS_RNN = HANLP_URL + 'pos/ctb5_pos_rnn_20200113_235925.zip'
'An old school BiLSTM tagging model trained on CTB5.'
CTB5_POS_RNN_FASTTEXT_ZH = HANLP_URL + 'pos/ctb5_pos_rnn_fasttext_20191230_202639.zip'
'An old school BiLSTM tagging mode
|
l with FastText (:cite:`bojanowski2017enriching`) embeddings trained on CTB5.'
CTB
|
9_POS_ALBERT_BASE = HANLP_URL + 'pos/ctb9_albert_base_20211228_163935.zip'
'ALBERT model (:cite:`Lan2020ALBERT:`) trained on CTB9 (:cite:`https://doi.org/10.35111/gvd0-xk91`). This is a TF component.'
CTB9_POS_ELECTRA_SMALL_TF = HANLP_URL + 'pos/pos_ctb_electra_small_20211227_121341.zip'
'Electra small model (:cite:`clark2020electra`) trained on CTB9 (:cite:`https://doi.org/10.35111/gvd0-xk91`). Accuracy = `96.75`. This is a TF component.'
CTB9_POS_ELECTRA_SMALL = HANLP_URL + 'pos/pos_ctb_electra_small_20220215_111944.zip'
'Electra small model (:cite:`clark2020electra`) trained on CTB9 (:cite:`https://doi.org/10.35111/gvd0-xk91`). Accuracy = `96.26`.'
CTB9_POS_RADICAL_ELECTRA_SMALL = HANLP_URL + 'pos/pos_ctb_radical_electra_small_20220215_111932.zip'
'Electra small model (:cite:`clark2020electra`) with radical embeddings (:cite:`he2018dual`) trained on CTB9 (:cite:`https://doi.org/10.35111/gvd0-xk91`). Accuracy = `96.14`.'
C863_POS_ELECTRA_SMALL = HANLP_URL + 'pos/pos_863_electra_small_20220217_101958.zip'
'Electra small model (:cite:`clark2020electra`) trained on Chinese 863 corpus. Accuracy = `95.19`.'
PKU_POS_ELECTRA_SMALL = HANLP_URL + 'pos/pos_pku_electra_small_20220217_142436.zip'
'Electra small model (:cite:`clark2020electra`) trained on Chinese PKU corpus. Accuracy = `97.55`.'
PKU98_POS_ELECTRA_SMALL = HANLP_URL + 'pos/pos_pku_electra_small_20210808_125158.zip'
'Electra small model (:cite:`clark2020electra`) trained on CTB9 (:cite:`https://doi.org/10.35111/gvd0-xk91`). Accuracy = `97.60`.'
PTB_POS_RNN_FASTTEXT_EN = HANLP_URL + 'pos/ptb_pos_rnn_fasttext_20200103_145337.zip'
'An old school BiLSTM tagging model with FastText (:cite:`bojanowski2017enriching`) embeddings trained on PTB.'
ALL = {}
|
ajstarna/RicochetRobots
|
Brobot/Move.py
|
Python
|
bsd-2-clause
| 1,936
| 0.035124
|
import random
import numpy as np
from copy import deepcopy
class Move:
''' the move data structure. field for colour of robot and direction of move. '''
intToColourConversion = ["BLUE", "RED", "GREEN", "YELLOW"] # index with robot number to get colour as string
def __init__(self, colour, direction):
self.colour = colour # this int corresponds to the colour ints shown in the Board class
self.direction = direction # this string is NORTH, SOUTH, EAST, or WEST
def __str__(self):
return "Colo
|
ur = {0} and direction = {1}".format(self.intToColourConversion[self.colour], self.d
|
irection)
class AllMoves:
''' this class holds all possibe moves that can be made in Ricochet Robots '''
def __init__(self):
self.moveSet = self.createMoveSet() # a np array
def createMoveSet(self):
''' creates the move set with all possible colours and directions '''
moveSet = []
for colour in xrange(4):
for direction in ["NORTH", "SOUTH", "EAST", "WEST"]:
moveSet.append(Move(colour,direction))
return np.array(moveSet)
def getRandomMove(self):
''' return a random move from the moveSet '''
return deepcopy(random.sample(self.moveSet, 1)[0])
def getMoveAtIndex(self, index):
''' return the move at this index '''
return self.moveSet[index]
def getOppositeMove(self, moveInt):
''' given a move as integer, returns the opposite move as integer.
opposite is the same colour in the opposite direction. '''
if moveInt is None: # no move has been made yet, so there is no opposite. just return -1
return -1
# nice modular trick for finding opposite move
if moveInt % 2 == 0:
return moveInt + 1
else:
return moveInt - 1
def printMoveSequence(self, sequence):
''' given a sequence of moves (as ints) prints them out in human-readable format '''
count = 1
for moveInt in sequence:
print("Move {0}: {1}".format(count, self.getMoveAtIndex(moveInt)))
count += 1
|
sema/django-2012
|
website/mosaicportfolio/templatetags/htmlsanitizer.py
|
Python
|
mit
| 564
| 0.010638
|
import bleach
from django
|
import template
fr
|
om django.conf import settings
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
register = template.Library()
@stringfilter
def sanitize_html(value):
if isinstance(value, basestring):
value = bleach.clean(value,
tags=settings.HTML_SANITIZER_ALLOWED_TAGS,
attributes=settings.HTML_SANITIZER_ALLOWED_ATTR,
strip=True)
return mark_safe(value)
return value
register.filter('sanitize_html', sanitize_html)
|
zlatozar/pytak
|
pytak/tests/fakeapi/GetInformationAboutYourself.py
|
Python
|
bsd-3-clause
| 166
| 0.006024
|
from py
|
tak.call import REST
class GetInformationAboutYourself(REST):
def fill_call_data(self):
|
self.uri = "/api/muad/rest/users/@me[?query_parameters]"
|
citrix-openstack-build/nova
|
nova/db/sqlalchemy/migrate_repo/versions/153_instance_type_in_system_metadata.py
|
Python
|
apache-2.0
| 2,116
| 0
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing p
|
ermissions and limitations
# under the License
|
.
from sqlalchemy import MetaData, select, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
instances = Table('instances', meta, autoload=True)
instance_types = Table('instance_types', meta, autoload=True)
sys_meta = Table('instance_system_metadata', meta, autoload=True)
# Taken from nova/compute/api.py
instance_type_props = ['id', 'name', 'memory_mb', 'vcpus',
'root_gb', 'ephemeral_gb', 'flavorid',
'swap', 'rxtx_factor', 'vcpu_weight']
select_columns = [instances.c.uuid]
select_columns += [getattr(instance_types.c, name)
for name in instance_type_props]
q = select(select_columns, from_obj=instances.join(
instance_types,
instances.c.instance_type_id == instance_types.c.id)).where(
instances.c.deleted == 0)
i = sys_meta.insert()
for values in q.execute():
insert_rows = []
for index in range(0, len(instance_type_props)):
value = values[index + 1]
insert_rows.append({
"key": "instance_type_%s" % instance_type_props[index],
"value": None if value is None else str(value),
"instance_uuid": values[0],
})
i.execute(insert_rows)
def downgrade(migration_engine):
# This migration only touches data, and only metadata at that. No need
# to go through and delete old metadata items.
pass
|
Crystal-SDS/dashboard
|
crystal_dashboard/dashboards/crystal/controllers/instances/models.py
|
Python
|
gpl-3.0
| 319
| 0.003135
|
class Inst
|
ance:
"""
Instances class models.
"""
def __init__(self, instance_id, cont
|
roller, parameters, description, status):
self.id = instance_id
self.controller = controller
self.parameters = parameters
self.description = description
self.status = status
|
dkamotsky/program-y
|
src/programy/parser/exceptions.py
|
Python
|
mit
| 3,289
| 0.006081
|
"""
Copyright (c) 2016 Keith Sterling
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above
|
copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FR
|
OM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
class ParserException(Exception):
def __init__(self, message, filename=None, xml_exception=None, xml_element=None):
Exception.__init__(self, message)
self._message = message
self._filename = filename
self._xml_exception = xml_exception
self._xml_element = xml_element
@property
def message(self):
return self._message
@message.setter
def message(self, message):
self._message = message
@property
def filename(self):
return self._filename
@filename.setter
def filename(self, filename):
self._filename = filename
@property
def xml_exception(self):
return self._xml_exception
@xml_exception.setter
def xml_exception(self, xml_exception):
self._xml_exception = xml_exception
@property
def xml_element(self):
return self._xml_element
@xml_element.setter
def xml_element(self, xml_element):
self._xml_element = xml_element
def format_message(self):
msg = self._message
if self._filename is not None:
msg += " in [%s]" % self._filename
if self._xml_exception is not None:
if str(self._xml_exception):
msg += self._xml_exception
else:
msg += " at [line(%d), column(%d)]" % (self._xml_exception.position[0],
self._xml_exception.position[1])
if self._xml_element is not None:
if hasattr(self._xml_element, '_end_line_number') and hasattr(self._xml_element, '_end_column_number'):
msg += " at [line(%d), column(%d)]" % (self._xml_element._end_line_number,
self._xml_element._end_column_number)
return msg
class DuplicateGrammarException(ParserException):
def __init__(self, message, filename=None, xml_exception=None, xml_element=None):
ParserException.__init__(self, message, filename=filename, xml_exception=xml_exception, xml_element=xml_element)
class MatcherException(Exception):
def __init__(self, message):
Exception.__init__(message)
self.message = message
|
wscullin/spack
|
var/spack/repos/builtin/packages/xkill/package.py
|
Python
|
lgpl-2.1
| 1,898
| 0.000527
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-13
|
07 USA
##############################################################################
from spack import *
class Xkill(AutotoolsPackage):
"""xkill is a utility for forcing the X server to close connections to
clients. This program is very dangerous, but is useful for aborting
programs that have displayed undesired windows on a user's screen."""
homepage = "http://cgit.freedesktop.org/xorg/a
|
pp/xkill"
url = "https://www.x.org/archive/individual/app/xkill-1.0.4.tar.gz"
version('1.0.4', 'b04c15bfd0b619f1e4ff3e44607e738d')
depends_on('libx11')
depends_on('libxmu')
depends_on('xproto@7.0.22:', type='build')
depends_on('pkg-config@0.9.0:', type='build')
depends_on('util-macros', type='build')
|
nedbat/coveragepy
|
tests/test_html.py
|
Python
|
apache-2.0
| 44,058
| 0.00084
|
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
"""Tests that HTML generation is awesome."""
import datetime
import glob
import json
import os
import os.path
import re
import sys
from unittest import mock
import pytest
import coverage
from coverage import env
from coverage.exceptions import NoDataError, NotPython, NoSource
from coverage.files import abs_file, flat_rootname
import coverage.html
from coverage.report import get_analysis_to_report
from tests.coveragetest import CoverageTest, TESTS_DIR
from tests.goldtest import gold_path
from tests.goldtest import compare, contains, contains_rx, doesnt_contain, contains_any
from tests.helpers import assert_coverage_warnings, change_dir
class HtmlTestHelpers(CoverageTest):
"""Methods that help with HTML tests."""
def create_initial_files(self):
"""Create the source files we need to run these tests."""
self.make_file("main_file.py", """\
import helper1, helper2
helper1.func1(12)
helper2.func2(12)
""")
self.make_file("helper1.py", """\
def func1(x):
if x % 2:
print("odd")
""")
self.make_file("helper2.py", """\
def func2(x):
print("x is %d" % x)
""")
def run_coverage(self, covargs=None, htmlargs=None):
"""Run coverage.py on main_file.py, and create an HTML report."""
self.clean_local_file_imports()
cov = coverage.Coverage(**(covargs or {}))
self.start_import_stop(cov, "main_file")
return cov.html_report(**(htmlargs or {}))
def get_html_report_content(self, module):
"""Return the content of the HTML report for `module`."""
filename = flat_rootname(module) + ".html"
filename = os.path.join("htmlcov", filename)
with open(filename) as f:
return f.read()
def get_html_index_content(self):
"""Return the content of index.html.
Timestamps are replaced with a placeholder so that clocks don't matter.
"""
with open("htmlcov/index.html") as f:
index = f.read()
index = re.sub(
|
r"created at \d{4}-\d{2}-\d{2} \d{2}:\d{2} \+\d{4}",
r"created at YYYY-MM-DD HH:MM +ZZZZ",
index,
)
index = re.sub(
|
r"created at \d{4}-\d{2}-\d{2} \d{2}:\d{2}",
r"created at YYYY-MM-DD HH:MM",
index,
)
return index
def assert_correct_timestamp(self, html):
"""Extract the timestamp from `html`, and assert it is recent."""
timestamp_pat = r"created at (\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2})"
m = re.search(timestamp_pat, html)
assert m, "Didn't find a timestamp!"
timestamp = datetime.datetime(*map(int, m.groups()))
# The timestamp only records the minute, so the delta could be from
# 12:00 to 12:01:59, or two minutes.
self.assert_recent_datetime(
timestamp,
seconds=120,
msg=f"Timestamp is wrong: {timestamp}",
)
class FileWriteTracker:
"""A fake object to track how `open` is used to write files."""
def __init__(self, written):
self.written = written
def open(self, filename, mode="r"):
"""Be just like `open`, but write written file names to `self.written`."""
if mode.startswith("w"):
self.written.add(filename.replace('\\', '/'))
return open(filename, mode)
class HtmlDeltaTest(HtmlTestHelpers, CoverageTest):
"""Tests of the HTML delta speed-ups."""
def setUp(self):
super().setUp()
# At least one of our tests monkey-patches the version of coverage.py,
# so grab it here to restore it later.
self.real_coverage_version = coverage.__version__
self.addCleanup(setattr, coverage, "__version__", self.real_coverage_version)
self.files_written = None
def run_coverage(self, covargs=None, htmlargs=None):
"""Run coverage in-process for the delta tests.
For the delta tests, we always want `source=.` and we want to track
which files are written. `self.files_written` will be the file names
that were opened for writing in html.py.
"""
covargs = covargs or {}
covargs['source'] = "."
self.files_written = set()
mock_open = FileWriteTracker(self.files_written).open
with mock.patch("coverage.html.open", mock_open):
return super().run_coverage(covargs=covargs, htmlargs=htmlargs)
def assert_htmlcov_files_exist(self):
"""Assert that all the expected htmlcov files exist."""
self.assert_exists("htmlcov/index.html")
self.assert_exists("htmlcov/main_file_py.html")
self.assert_exists("htmlcov/helper1_py.html")
self.assert_exists("htmlcov/helper2_py.html")
self.assert_exists("htmlcov/style.css")
self.assert_exists("htmlcov/coverage_html.js")
self.assert_exists("htmlcov/.gitignore")
def test_html_created(self):
# Test basic HTML generation: files should be created.
self.create_initial_files()
self.run_coverage()
self.assert_htmlcov_files_exist()
def test_html_delta_from_source_change(self):
# HTML generation can create only the files that have changed.
# In this case, helper1 changes because its source is different.
self.create_initial_files()
self.run_coverage()
index1 = self.get_html_index_content()
# Now change a file (but only in a comment) and do it again.
self.make_file("helper1.py", """\
def func1(x): # A nice function
if x % 2:
print("odd")
""")
self.run_coverage()
# Only the changed files should have been created.
self.assert_htmlcov_files_exist()
assert "htmlcov/index.html" in self.files_written
assert "htmlcov/helper1_py.html" in self.files_written
assert "htmlcov/helper2_py.html" not in self.files_written
assert "htmlcov/main_file_py.html" not in self.files_written
# Because the source change was only a comment, the index is the same.
index2 = self.get_html_index_content()
assert index1 == index2
def test_html_delta_from_coverage_change(self):
# HTML generation can create only the files that have changed.
# In this case, helper1 changes because its coverage is different.
self.create_initial_files()
self.run_coverage()
# Now change a file and do it again. main_file is different, and calls
# helper1 differently.
self.make_file("main_file.py", """\
import helper1, helper2
helper1.func1(23)
helper2.func2(23)
""")
self.run_coverage()
# Only the changed files should have been created.
self.assert_htmlcov_files_exist()
assert "htmlcov/index.html" in self.files_written
assert "htmlcov/helper1_py.html" in self.files_written
assert "htmlcov/helper2_py.html" not in self.files_written
assert "htmlcov/main_file_py.html" in self.files_written
def test_html_delta_from_settings_change(self):
# HTML generation can create only the files that have changed.
# In this case, everything changes because the coverage.py settings
# have changed.
self.create_initial_files()
self.run_coverage(covargs=dict(omit=[]))
index1 = self.get_html_index_content()
self.run_coverage(covargs=dict(omit=['xyzzy*']))
# All the files have been reported again.
self.assert_htmlcov_files_exist()
assert "htmlcov/index.html" in self.files_written
assert "htmlcov/helper1_py.html" in self.files_written
assert "htmlcov/helper2_py.html" in self.files_written
assert "htmlcov/main_file_py.html" in self.files_written
index2 = self.get_html_i
|
sdlarsen1/CMPUT291
|
prjcode/Doctor.py
|
Python
|
gpl-3.0
| 9,331
| 0.014575
|
import os
from LogInOut import logout
def doctorCommands(cursor, conn, staff_id):
loggedOut = False
while not loggedOut:
os.system("clear")
choice = int(raw_input('''Type integer value of desired task:
1. Get patient chart info.
2. Record a symptom.
3. Record a diagnosis.
4. Prescribe medication.
5. Log out\n'''))
if choice == 1:
getPatientChartInfo(cursor)
raw_input("Press Enter to go back to menu.") # return to menu
elif choice == 2:
recordSymptom(cursor, conn, staff_id)
raw_input("Press Enter to go back to menu.") # return to menu
elif choice == 3:
recordDiagnosis(cursor, conn, staff_id)
raw_input("Press Enter to go back to menu.") # return to menu
elif choice == 4:
recordMedication(cursor, conn, staff_id)
raw_input("Press Enter to go back to menu.") # return to menu
else:
loggedOut = logout()
def getPatientChartInfo(cursor): # doctor 1, nurse 3
while 1:
hcno = raw_input("Enter Patient Health Care Number >").lower()
cursor.execute('''
select chart_id, edate
from charts
where hcno = ?
order by adate;
''', (hcno,))
charts = cursor.fetchall()
if len(charts) < 1:
print "Patient #%s Does Not Exist" %hcno
else:
break
print "Patient Charts for %s\nChart ID|Chart Status" %hcno
for row in charts:
if row[1] == None:
status = "Open"
else:
status = "Closed"
print "%-8s|%s" %(row[0],(status),)
chartNotSelected = True
while chartNotSelected:
chart_id = raw_input("Select Chart Number >")
for row in charts:
if chart_id == row[0]:
chartNotSelected = False
break
if chartNotSelected:
print "Patient Chart #%s Does Not Exist" %chart_id
cursor.execute('''
select staff_id, obs_date, symptom
from symptoms
where chart_id = ?
order by obs_date;
''', (chart_id,))
symptoms = cursor.fetchall()
cursor.execute('''
select staff_id, ddate, diagnosis
from diagnoses
where chart_id = ?
order by ddate;
''', (chart_id,))
diagnoses = cursor.fetchall()
cursor.execute('''
select staff_id, mdate, start_med, end_med, amount, drug_name
from medications
where chart_id = ?
order by mdate;
''', (chart_id,))
meds = cursor.fetchall()
print "Chart #%s for Patient #%s" %(chart_id ,hcno)
print "Symptoms\nStaff ID|Observation Date |Symptom"
for row in symptoms:
print "%-8s|%-19s|%s" %(row[0],row[1],row[2])
print "----------------------------------------------"
print "Diagnosis\nStaff ID|Diagnosis Date |Diagnosis"
for row in diagnoses:
print "%-8s|%-19s|%s" %(row[0],row[1],row[2])
print "----------------------------------------------"
print "Medications\nStaff ID|Precsription Date |Med Start Date |Med End Date |Amount per day|Drug Name"
for row in meds:
print "%-8s|%-19s|%-19s|%-19s|%-14s|%s" %(row[0],row[1],row[2],row[3],row[4],row[5])
print "----------------------------------------------"
def recordSymptom(cursor, conn, staff_id): # doctor 2, nurse 4
cursor.execute('''
select hcno, chart_id
from charts
where edate is Null;
''')
patientCharts = cursor.fetchall()
chartNotSelected = True
patientNotSelected = True
while patientNotSelected:
hcno = raw_input("Enter Patient Health Care Number >")
for row in patientCharts:
if hcno == row[0]:
patientNotSelected = False
break
if patientNotSelected:
print "Patient #%s does not have an open chart" % hcno
choice = raw_input("Enter 'quit' to exit task or enter anything to try another Health care number >").lower()
if choice == 'quit':
return False
while chartNotSelected:
chart_id = raw_input("Enter Patients Chart Number >")
if (hcno, chart_id) in patientCharts:
chartNotSelected = False
else:
print "Patient #%s does not have a chart #%s that is open" % (hcno, chart_id)
choice = raw_input("Enter 'quit' to exit task or enter anything to try another chart number >").lower()
if choice == 'quit':
return False
symptom = raw_input("Enter Patient Symptom >")
cursor.execute('''
insert into symptoms values (?,?,?,datetime('now'),?);
''', (hcno, chart_id, staff_id, symptom,))
conn.commit()
def recordDiagnosis(cursor, conn, staff_id): # doctor 3
cursor.execute('''
select hcno, chart_id
from charts
where edate is Null;
''')
patientCharts = cursor.fetchall()
chartNotSelected = True
patientNotSelected = True
while patientNotSelected:
hcno = raw_input("Enter Patient Health Care Number >")
for row in patientCharts:
if hcno == row[0]:
patientNotSelected = False
break
if patientNotSelected:
print "Patient #%s does not have an open chart" %hcno
choice = raw_input("Enter 'quit' to exit task or enter anything to try another Health care number >").lower()
if choice == 'quit':
return False
while chartNotSelected:
chart_id = raw_input("Enter Patients Chart Number >")
if (hcno,chart_id) in patientCharts:
chartNotSelected = False
else:
print "Patient #%s does not have a chart #%s that is open" %(hcno, chart_id)
choice = raw_input("Enter 'quit' to exit task or enter anything to try another chart number >").lower()
if choice == 'quit':
return False
diagnosis = raw_input("Enter Diagnosis >")
cursor.execute('''
insert into diagnoses values (?,?,?,datetime('now'),?);
''', (hcno, chart_id, staff_id, diagnosis, ))
conn.commit()
def recordMedication(cursor, conn, staff_id): # doctor 4
cursor.execute('''
select hcno, chart_id
from charts
where edate is Null;
''')
patientCharts = cursor.fetchall()
chartNotSelected = True
patientNotSelected = True
while patientNotSelected:
hcno = raw_input("Enter Patient Health Care Number >")
for row in patientCharts:
if hcno == row[0]:
patientNotSelected = False
break
if patientNotSelected:
print "Patient #%s does not have an open chart" %hcno
choice = raw_input("Enter 'quit' to exit task or enter anything to try another Health care number >").lower()
if choice == 'quit':
return False
while chartNotSelected:
chart_id = raw_input("Enter Patients Chart Nu
|
mber >")
if (hcno,chart_id) in patientCharts:
chartNotSelected = False
else:
print "Patient #%s does not have a chart #%s that is open" %(hcno,chart_id)
choice = raw_input("Enter 'quit' to exit task or enter anything to try another chart n
|
umber >").lower()
if choice == 'quit':
return False
medication = raw_input("Enter Drug Name >").lower()
cursor.execute('''
select lower(drug_name)
from reportedallergies
where hcno = ?
;
''', (hcno,))
directAllergies = cursor.fetchall()
cursor.execute('''
select lower(r.drug_name), lower(i.canbe_alg)
from reportedallergies r, inferredallergies i
where r.hcno = ?
and r.drug_name = i.alg
''', (hcno,))
inferredAllergies = cursor.fetchall()
if medication in directAllergies:
print "Warning Patient is allergic to %s" %medication
# override = raw_input("Do you wish to procede")
for row in inferredAllergies:
if medication == row[1]:
print "Warning Patient is allergic to %s and therefore could be allergic to %s" %(row[0], row[1])
# maybe select a new med
break
|
exa-analytics/exatomic
|
exatomic/exa/util/tests/test_utility.py
|
Python
|
apache-2.0
| 964
| 0.001037
|
# -*- codin
|
g: utf-8 -*-
# Copyright (c) 2015-2022, Exa Analytics Development Team
# Distributed under the t
|
erms of the Apache License 2.0
from os import path
from tempfile import mkdtemp
from exatomic.exa.util.utility import datetime_header, mkp, convert_bytes, get_internal_modules
def test_get_internal_modules():
lst = get_internal_modules()
assert len(lst) > 0
assert lst[0].__name__.startswith("exatomic")
def test_convert_bytes():
a, b = convert_bytes(2049)
assert a >= 2.0
assert b == "KiB"
a, b = convert_bytes(10000000)
assert a >= 9.5367
assert b == "MiB"
a, b = convert_bytes(10000000000)
assert a >= 9.3132
assert b == "GiB"
a, b = convert_bytes(10000000000000)
assert a >= 9.0949
assert b == "TiB"
def test_mkp():
dir_ = mkdtemp()
pth = path.join(dir_, "tmp")
mkp(pth)
assert path.exists(pth)
def test_datetime_header():
assert isinstance(datetime_header(), str)
|
apirobot/shmitter
|
backend/shmitter/users/validators.py
|
Python
|
mit
| 377
| 0
|
from django.core import validators
from django.utils.deconstruct import deconstructible
from django.utils.translation import gettext_lazy as _
@deconstructible
class UsernameValidator(validators.RegexValidator):
regex = r'^[\w.]+$'
message = _(
'Enter a
|
valid username. This value may contain only letters, '
'numbers, underscores and
|
periods.'
)
|
fr33jc/bang-unstable
|
setup.py
|
Python
|
gpl-3.0
| 751
| 0.001332
|
#!/usr/bin/env python
from setuptools import setup, find_packages
from bang import VER
|
SION
import os.path
ETC = os.path.join(os.path.dirname(__file__), 'etc')
with open(os.path.join(ETC, 'requireme
|
nts.pip')) as f:
reqs = [l.strip() for l in f if '://' not in l]
reqs.append('distribute')
setup(
name='bang',
version=VERSION,
author='fr33jc',
author_email='fr33jc@gmail.com',
packages=find_packages(exclude=['tests']),
package_data={'bang': ['bang.wav']},
license='GPLv3',
description='Server and cloud resource deployment automation',
platforms='POSIX',
url='https://github.com/fr33jc/bang',
install_requires=reqs,
scripts=['bin/bang'],
)
|
open-synergy/opensynid-fleet
|
fleet_work_order/reports/fleet_work_order_analysis.py
|
Python
|
agpl-3.0
| 3,619
| 0
|
# -*- coding: utf-8 -*-
# Copyright 2018 OpenSynergy Indonesia
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import models, fields
from openerp import tools
class FleetWorkOrderAnalysis(models.Model):
_name = "fleet.work_order_analysis"
_description = "Fleet Work Order Analysis"
_auto = False
type_id = fields.Many2one(
string="Type",
comodel_name="fleet.work.order.type",
)
vehicle_id = fields.Many2one(
string="Vehicle",
comodel_name="fleet.vehicle",
)
driver_id = fields.Many2one(
string="Driver",
comodel_name="res.partner",
)
co_driver_id = fields.Many2one(
string="Co-Driver",
comodel_name="res.partner",
)
date_start = fields.Datetime(
string="ETD",
)
date_end = fields.Datetime(
string="ETA",
)
real_date_depart = fields.Datetime(
string="RTD",
)
real_date_arrive = fields.Datetime(
string="RTA",
)
odometer = fields.Float(
string="Odoometer",
)
start_location_id = fields.Many2one(
string="Start Location",
comodel_name="res.partner",
)
end_location_id = fields.Many2one(
string="End Location",
comodel_name="res.partner",
)
distance = fields.Float(
string="Distance",
)
state = fields.Selection(
string="State",
selection=[
("draft", "Draft"),
("confirmed", "Confirmed"),
("depart", "Depart"),
("arrive", "Arrive"),
("cancelled", "Cancelled"),
],
)
def _select(self):
select_str = """
SELECT row_number() OVER() as id,
a.type_id AS type_id,
a.vehicle_id AS vehicle_id,
a.driver_id AS driver_id,
a.co_driver_id AS co_driver_id,
a.date_start AS date_start,
a.date_end AS date_end,
|
a.real_date_depart AS real_date_depart,
a.real_date_arrive AS real_date_arrive,
a.start_location_id AS start_location_id,
a.end_location_id AS end_location_id,
a.state AS state,
SUM(a.end_odometer - a.start_odometer) AS odometer,
SUM(a.distance) AS distance
"""
return select_str
def _from(self):
from_str = """
fleet_work_order AS a
"""
return from_str
def _where(self):
where_str = """
"""
return where_str
def _join(self):
join_str = """
"""
return join_str
def _group_by(self):
group_str = """
GROUP BY a.type_id,
a.vehicle_id,
a.driver_id,
a.co_driver_id,
a.date_start,
a.date_end,
a.real_date_depart,
a.real_date_arrive,
a.start_location_id,
a.end_location_id,
a.state
"""
return group_str
def init(self, cr):
tools.drop_view_if_exists(cr, self._table)
# pylint: disable=locally-disabled, sql-injection
cr.execute("""CREATE or REPLACE VIEW %s as (
%s
FROM %s
%s
%s
%s
)""" % (
self._table,
self._select(),
self._from(),
self._join(),
self._where(),
self._group_by()
))
|
|
custode/reviewboard
|
reviewboard/webapi/resources/draft_file_attachment.py
|
Python
|
mit
| 4,019
| 0
|
from __future__ import unicode_literals
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import Q
from djblets.util.decorators import augment_method_from
from djblets.webapi.decorators import webapi_login_required
from djblets.webapi.responses import WebAPIResponsePaginated
from reviewboard.webapi.base import WebAPIResource
from reviewboard.webapi.decorators import webapi_check_local_site
from reviewboard.webapi.resources import resources
from reviewboard.webapi.resources.base_file_attachment import \
BaseFileAttachmentResource
class DraftFileAttachmentResource(BaseFileAttachmentResource):
"""Provides information on new file attachments being added to a draft of
a review request.
These are files that will be shown once the pending review request
draft is published.
"""
added_in = '1.6'
name = 'draft_file_attachment'
uri_name = 'file-attachments'
model_parent_key = 'drafts'
allowed_methods = ('GET', 'DELETE', 'POST', 'PUT',)
def get_queryset(self, request, review_request_id, *args, **kwargs):
try:
draft = resources.review_request_draft.get_object(
request, review_request_id=review_request_id, *args, **kwargs)
inactive_ids = \
draft.inactive_file_attachments.values_list('pk', flat=True)
q = Q(review_request=review_request_id) | Q(drafts=draft)
query = self.model.objects.filter(q)
query = query.exclude(pk__in=inactive_ids)
return query
except ObjectDoesNotExist:
return self.model.objects.none()
def serialize_caption_field(self, obj, **kwargs):
return obj.draft_caption or obj.caption
@webapi_check_local_site
@webapi_login_required
@augment_method_from(BaseFileAttachmentResource)
def get(self, *args, **kwargs):
pass
@webapi_check_local_site
@webapi_login_required
@augment_method_from(BaseFileAttachmentResource)
def delete(self, *args, **kwargs):
"""Deletes the file attachment from the draft.
This will remove the file attachment from the draft review request.
This can
|
not be undone.
This can be used to remove old files that were previousl
|
y
shown, as well as newly added files that were part of the
draft.
Instead of a payload response on success, this will return :http:`204`.
"""
pass
@webapi_check_local_site
@webapi_login_required
@augment_method_from(WebAPIResource)
def get_list(self, *args, **kwargs):
"""Returns a list of draft files.
Each file attachment in this list is an uploaded file attachment that
will be shown in the final review request. These may include newly
file attachments or files that were already part of the
existing review request. In the latter case, existing files
are shown so that their captions can be added.
"""
pass
def _get_list_impl(self, request, *args, **kwargs):
"""Returns the list of files on this draft.
This is a specialized version of the standard get_list function
that uses this resource to serialize the children, in order to
guarantee that we'll be able to identify them as files that are
part of the draft.
"""
return WebAPIResponsePaginated(
request,
queryset=self._get_queryset(request, is_list=True,
*args, **kwargs),
results_key=self.list_result_key,
serialize_object_func=lambda obj: self.serialize_object(
obj, request=request, *args, **kwargs),
extra_data={
'links': self.get_links(self.list_child_resources,
request=request, *args, **kwargs),
},
**self.build_response_args(request))
draft_file_attachment_resource = DraftFileAttachmentResource()
|
CadeiraCuidadora/UMISS-backend
|
umiss_project/manage.py
|
Python
|
gpl-3.0
| 811
| 0
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "umiss_project.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
|
raise
ex
|
ecute_from_command_line(sys.argv)
|
googleapis/python-aiplatform
|
samples/generated_samples/aiplatform_v1beta1_generated_featurestore_service_update_entity_type_async.py
|
Python
|
apache-2.0
| 1,513
| 0.001322
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for UpdateEntityType
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-
|
cloud-aiplatform
# [START aiplatform_v1beta1_generated_FeaturestoreService_UpdateEntityType_async]
from google.cloud import aiplatform_v1beta1
async de
|
f sample_update_entity_type():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.UpdateEntityTypeRequest(
)
# Make the request
response = await client.update_entity_type(request=request)
# Handle the response
print(response)
# [END aiplatform_v1beta1_generated_FeaturestoreService_UpdateEntityType_async]
|
Fuchida/Archive
|
albme-py/script.py
|
Python
|
mit
| 150
| 0.02
|
def sum(*args):
total = 0
for number in args:
if isinstance(number, int):
|
total += number
return tot
|
al
print(sum(1,5))
|
petteraas/SoCo
|
examples/plugins/socoplugins.py
|
Python
|
mit
| 1,032
| 0
|
#!/usr/bin/env python
# This illustrates how to use SoCo plugins
# an example plugin is provided in soco.plugins.example.ExamplePlugin
import time
from soco import SoCo
from soco.plugins import SoCoPlugin
def main():
speakers = [speaker.ip_address for speaker in SoCo.discover()]
if not speakers:
print("no speakers found, exiting.")
return
soco = S
|
oCo(speakers[0])
# get a plugin by name (eg from a config file)
myplugin = SoCoPlugin.from_name(
"soco.plugins.example.ExamplePlugin", soco, "some user"
)
# do something with your plugin
print("Testing", myplugin.name)
myplugin.music_plugin_play()
time.sleep(5)
# create a plugin by normal instantiation
from soco.plugins.e
|
xample import ExamplePlugin
# create a new plugin, pass the soco instance to it
myplugin = ExamplePlugin(soco, "a user")
print("Testing", myplugin.name)
# do something with your plugin
myplugin.music_plugin_stop()
if __name__ == "__main__":
main()
|
richardliaw/ray
|
python/ray/tune/suggest/nevergrad.py
|
Python
|
apache-2.0
| 12,589
| 0
|
import logging
import pickle
from typing import Dict, Optional, Union, List, Sequence
from ray.tune.result import DEFAULT_METRIC
from ray.tune.sample import Categorical, Domain, Float, Integer, LogUniform, \
Quantized
from ray.tune.suggest.suggestion import UNRESOLVED_SEARCH_SPACE, \
UNDEFINED_METRIC_MODE, UNDEFINED_SEARCH_SPACE
from ray.tune.suggest.variant_generator import parse_spec_vars
from ray.tune.utils import flatten_dict
from ray.tune.utils.util import unflatten_dict
try:
import nevergrad as ng
from nevergrad.optimization import Optimizer
from nevergrad.optimization.base import ConfiguredOptimizer
Parameter = ng.p.Parameter
except ImportError:
ng = None
Optimizer = None
ConfiguredOptimizer = None
Parameter = None
from ray.tune.suggest import Searcher
logger = logging.getLogger(__name__)
class NevergradSearch(Searcher):
"""Uses Nevergrad to optimize hyperparameters.
Nevergrad is an open source tool from Facebook for derivative free
optimization. More info can be found at:
https://github.com/facebookresearch/nevergrad.
You will need to install Nevergrad via the following command:
.. code-block:: bash
$ pip install nevergrad
Parameters:
optimizer (nevergrad.optimization.Optimizer|class): Optimizer provided
from Nevergrad. Alter
space (list|nevergrad.parameter.Parameter): Nevergrad parametrization
to be passed to optimizer on instantiation, or list of parameter
names if you passed an optimizer object.
metric (str): The training result objective value attribute. If None
but a mode was passed, the anonymous metric `_metric` will be used
per default.
mode (str): One of {min, max}. Determines whether objective is
minimizing or maximizing the metric attribute.
points_to_evaluate (list): Initial parameter suggestions to be run
first. This is for when you already have some good parameters
you want hyperopt to run first to help the TPE algorithm
make better suggestions for future parameters. Needs to be
a list of dict of hyperopt-named variables.
use_early_stopped_trials: Deprecated.
max_concurrent: Deprecated.
Tune automatically converts search spaces to Nevergrad's format:
.. code-block:: python
import nevergrad as ng
config = {
"width": tune.uniform(0, 20),
"height": tune.uniform(-100, 100),
"activation": tune.choice(["relu", "tanh"])
}
current_best_params = [{
"width": 10,
"height": 0,
"activation": relu",
}]
ng_search = NevergradSearch(
optimizer=ng.optimizers.OnePlusOne,
metric="mean_loss",
mode="min",
points_to_evaluate=current_best_params)
run(my_trainable, config=config, search_alg=ng_search)
If you would like to pass the search space manually, the code would
look like this:
.. code-block:: python
import nevergrad as ng
space = ng.p.Dict(
width=ng.p.Scalar(lower=0, upper=20),
height=ng.p.Scalar(lower=-100, upper=100),
activation=ng.p.Choice(choices=["relu", "tanh"])
)
ng_search = NevergradSearch(
optimizer=ng.optimizers.OnePlusOne,
space=space,
metric="mean_loss",
mode="min")
run(my_trainable, search_alg=ng_search)
"""
def __init__(self,
optimizer: Union[None, Optimizer, ConfiguredOptimizer] = None,
space: Optional[Union[Dict, Parameter]] = None,
metric: Optional[str] = None,
mode: Optional[str] = None,
max_concurrent: Optional[int] = None,
points_to_evaluate: Optional[List[Dict]] = None,
**kwargs):
assert ng is not None, """Nevergrad must be installed!
You can install Nevergrad with the command:
`pip install nevergrad`."""
if mode:
assert mode in ["min", "max"], "`mode` must be 'min' or 'max'."
super(NevergradSearch, self).__init__(
metric=metric, mode=mode, max_concurrent=max_concurrent, **kwargs)
self._space = None
self._opt_factory = None
self._nevergrad_opt = None
if points_to_evaluate is None:
self._points_to_evaluate = None
elif not isinstance(points_to_evaluate, Sequence):
raise ValueError(
f"Invalid object type passed for `points_to_evaluate`: "
"{type(points_to_evaluate)}. "
f"Please pass a list of points (dictionaries) instead.")
else:
self._points_to_evaluate = list(points_to_evaluate)
if isinstance(space, dict) and space:
resolved_vars, domain_vars, grid_vars = parse_spec_vars(space)
if domain_vars or grid_vars:
logger.warning(
UNRESOLVED_SEARCH_SPACE.format(
par="space", cls=type(self)))
space = self.convert_search_space(space)
if isinstance(optimizer, Optimizer):
if space is not None or isinstance(space, list):
raise ValueError(
"If you pass a configured optimizer to Nevergrad, either "
"pass a list of parameter names or None as the `space` "
"parameter.")
self._parameters = space
self._nevergrad_opt = optimizer
elif isinstance(optimizer, ConfiguredOptimizer):
self._opt_factory = optimizer
self._parameters = None
self._space = space
else:
raise ValueError(
"The `optimizer` argument passed to NevergradSearch must be "
"either an `Optimizer` or a `ConfiguredOptimizer`.")
self._live_trial_mapping = {}
self.max_concurrent = max_concurrent
if self._nevergrad_opt or self._space:
self._setup_nevergrad()
def _setup_nevergrad(self):
if self._opt_factory:
self._nevergrad_opt = self._opt_factory(self._space)
# nevergrad.tell internally minimizes, so "max" => -1
if self._mode == "max":
self._metric_op = -1.
elif self._mode == "min":
self._metric_op = 1.
if self._metric is None and self._mode:
# If only a mode was passed, use anonymous metric
self._metric = DEFAULT_METRIC
if hasattr(self._nevergrad_opt, "instrumentation"): # added in v0.2.0
if self._nevergrad_opt.instrumentation.kwargs:
if self._nevergrad_opt.instrumentation.args:
raise ValueError(
"Instrumented optimizers should use kwargs only")
if self._parameters is not None:
|
raise ValueError("Instrumented optimizers should provide "
"None as parameter_names")
else:
if self
|
._parameters is None:
raise ValueError("Non-instrumented optimizers should have "
"a list of parameter_names")
if len(self._nevergrad_opt.instrumentation.args) != 1:
raise ValueError(
"Instrumented optimizers should use kwargs only")
if self._parameters is not None and \
self._nevergrad_opt.dimension != len(self._parameters):
raise ValueError("len(parameters_names) must match optimizer "
"dimension for non-instrumented optimizers")
def set_search_properties(self, metric: Optional[str], mode: Optional[str],
config: Dict) -> bool:
if self._nevergrad_opt or self._space:
return False
space = self.convert_search_space(config)
self._space = space
if metric:
self._metric = metric
|
shrinidhi666/rbhus
|
etc/blend_lin/default.py
|
Python
|
gpl-3.0
| 5,828
| 0.025566
|
#!/usr/bin/env python2
#-*- coding: utf-8 -*-
__author__ = "Shrinidhi Rao"
__license__ = "GPL"
__email__ = "shrinidhi666@gmail.com"
import os
import sys
import pwd
import time
import socket
import subprocess
import simplejson
rbhus_main_path = os.sep.join(os.path.abspath(__file__).split(os.sep)[0:-3])
sys.path.append(rbhus_main_path)
import rbhus.utilsPipe
import rbhus.renderPlugin
taskId = os.environ['rbhus_taskId']
frameId = os.environ['rbhus_frameId']
frames = os.environ['rbhus_frames']
user = os.environ['rbhus_user']
fileName = os.environ['rbhus_fileName']
btCmd = os.environ['rbhus_btCmd'] #Should be run by the server .. WTF!!!!!!!!!
fileType = os.environ['rbhus_fileType']
renderer = os.environ['rbhus_renderer']
minRam = os.environ['rbhus_minRam']
maxRam = os.environ['rbhus_maxRam']
outDir = os.environ['rbhus_outDir']
outName = os.environ['rbhus_outName']
logBase = os.environ['rbhus_logBase']
framePad = os.environ['rbhus_pad']
atCmd = os.environ['rbhus_atCmd'] #Should be run by the server .. WTF!!!!!!!!!
bfCmd = os.environ['rbhus_bfCmd']
afCmd = os.environ['rbhus_afCmd']
rThreads = os.environ['rbhus_threads']
renExtArgs = os.environ['rbhus_renExtArgs']
layer = os.environ['rbhus_layer']
pad = os.environ['rbhus_pad']
imType = os.environ['rbhus_imageType']
washMyButt = os.environ['rbhus_washmybutt']
runScript = os.environ['rbhus_runScript']
camera = os.environ['rbhus_camera']
res = os.environ['rbhus_resolution']
RENDERCMD = "/usr/local/bin/blender -noaudio -b \"" + str(fileName) +"\""
if("rbhus_renExtEnv" in rbhus.renderPlugin.env):
extEnv = rbhus.renderPlugin.env['rbhus_renExtEnv']
if(extEnv != "default"):
extEnvDict = simplejson.loads(extEnv)
if("exe" in extEnvDict):
RENDERCMD = extEnvDict['exe'] +" -noaudio -b \"" + str(fileName) + "\""
if(renExtArgs == "None"):
renExtArgs = ""
RENDER_CMD = ""
outputN = "/"+ "/".join(runScript.split("/")[0:-1]) + "/" +"outputNodes.py"
layerScF = "/tmp/"+ str(taskId) +"_"+ str(frameId) +"_layer.py"
cameraF = "/tmp/"+ str(taskId) +"_"+ str(frameId) +"_camera.py"
resF = "/tmp/"+ str(taskId) +"_"+ str(frameId) +"_res.py"
defaultF = "/tmp/"+ str(taskId) +"_"+ str(frameId) +"_defF.py"
wbd = open(washMyButt,"w")
wbd.writelines(layerScF +"\n\r")
wbd.writelines(cameraF +"\n\r")
wbd.writelines(resF +"\n\r")
wbd.writelines(defaultF +"\n\r")
wbd.flush()
fRs = " -f ".join(frames.split(","))
fr = " -f "+ fRs
outFile = "default"
if(outDir != "default"):
outFile = outDir.rstrip("/") + "/"
if(outName != "default"):
outFile = outFile.rstrip("/") + "/" + ".".join(outName.split(".")[0:-1]) + "_" + "".rjust(int(pad)
|
,"#") + "." + outName.split(".")[-1]
outputNoutF = "/tmp/"+ str(taskId) +"_"+ str(frameId) +"_outputNodes.py"
wbd.writelines(outputNoutF +"\n\r")
wbd.flush()
wbd.close()
try:
os.system("cp -a "+ outputN +" "+ outputNoutF +" >& /dev/null")
except:
pass
try:
os.system("sed -i 's/re
|
nameOutputDir/"+"\\/".join(outDir.split("/")) +"/' "+ outputNoutF +" >& /dev/null")
except:
pass
if(layer != "default"):
try:
os.system("sed -i 's/renameRenderLayer/"+ layer +"/' "+ outputNoutF +" >& /dev/null")
except:
pass
if(renderer != "default"):
RENDERCMD = RENDERCMD +" -E "+ renderer
RENDERCMD = RENDERCMD +" -t "+ rThreads
defaultScripts = "import bpy\nbpy.context.scene.render.use_save_buffers = False\nbpy.context.scene.render.use_overwrite = True\nbpy.context.scene.render.use_simplify = False\nbpy.context.scene.render.fps = 24\nbpy.context.scene.render.fps_base = 1\nbpy.context.scene.render.use_single_layer = False\nbpy.context.scene.render.use_stamp = False"
if((imType.find("PNG") >=0) or (imType.find("EXR") >=0)):
defaultScripts = defaultScripts +"\nbpy.context.scene.render.image_settings.color_depth = '16'"
if(imType == "EXR"):
defaultScripts = defaultScripts + "\nbpy.context.scene.render.image_settings.exr_codec = 'ZIPS'"
if(imType == "PNG-RGB"):
defaultScripts = defaultScripts +"\nbpy.context.scene.render.image_settings.color_mode = 'RGB'"
imType = "PNG"
elif(imType == "PNG-RGBA"):
defaultScripts = defaultScripts + "\nbpy.context.scene.render.image_settings.color_mode = 'RGBA'"
imType = "PNG"
if(renderer == "CYCLES"):
defaultScripts = defaultScripts + "\nbpy.context.scene.cycles.device = 'CPU'"
dF = open(defaultF,"w")
dF.writelines(defaultScripts)
dF.flush()
dF.close()
RENDERCMD = RENDERCMD +" --python "+ defaultF
if(layer != "default"):
layerScript = "import bpy\nfor x in bpy.context.scene.render.layers:\n bpy.context.scene.render.layers[x.name].use = False\n\n"
lay = layer.split(",")
for l in lay:
if(l):
layerScript = layerScript + "\nbpy.context.scene.render.layers[\'"+ l +"\'].use = True\n"
f = open(layerScF,"w")
f.writelines(layerScript)
f.flush()
f.close()
RENDERCMD = RENDERCMD +" --python "+ layerScF
if(camera != "default"):
cameraScript = "import bpy\nbpy.context.scene.camera = bpy.data.objects[\""+ camera + "\"]"
c = open(cameraF,"w")
c.writelines(cameraScript)
c.flush()
c.close()
RENDERCMD = RENDERCMD +" --python "+ cameraF
if(res != "default"):
resScript = "import bpy\nbpy.context.scene.render.resolution_x = "+ res.split("x")[0] +"\nbpy.context.scene.render.resolution_y = "+ res.split("x")[1] +"\nbpy.context.scene.render.resolution_percentage = 100"
r = open(resF,"w")
r.writelines(resScript)
r.flush()
r.close()
RENDERCMD = RENDERCMD +" --python "+ resF
RENDERCMD = RENDERCMD +" --python "+ outputNoutF
if(imType.find("default") < 0):
RENDERCMD = RENDERCMD +" -F "+ imType
if(outFile.find("default") < 0):
RENDERCMD = RENDERCMD +" -o "+ outFile
RENDERCMD = RENDERCMD + fr
os.system("chmod 777 {0} {1} {2} {3} {4} >& /dev/null".format(layerScF,cameraF,resF,defaultF,outputNoutF))
print(RENDERCMD)
rbhus.renderPlugin.sendCmd(RENDERCMD)
sys.exit(0)
|
looooo/pivy
|
examples/SoPyScript/glow.py
|
Python
|
isc
| 1,768
| 0.007919
|
from __future__ import print_function
# make the handler of the color field to call chicken_mcnuggets()
# instead of the default set handle_color() function
handler_registry['color'] = 'chicken_mcnuggets'
def chicken_mcnuggets():
# print color.getValue().getValue()
pass
# Initialize the color Packer (required of any property node that
# uses an SoColorPacker to set diffuse color or transparency:
colorPacker = SoColorPacker()
transpValue = floatp()
def doAction(action):
global transpValue
if not brightness.isIgnored() and not SoOverrideElement.getEmissiveColorOverride(action.getState()):
emissiveColor = color.getValue() * brightness.getValue()
# print 'doAction():', color.getValue().getValue()
# Use the Lazy element to set emissive color.
# Note that this will not actually send the color to GL.
SoLazyElement.setEmissive(action.getState(), emissiveColor)
# To send transparency we again check ignore flag and override element.
if not transparency.isIgnored() and not SoOverrideElement.getTra
|
nsparencyOverride(action.getState()):
# keep a copy of the transparency that we are putting in the state:
transpValue.assign(tr
|
ansparency.getValue())
# The color packer must be provided when the transparency is set,
# so that the transparency will be merged with current diffuse color
# in the state:
SoLazyElement.setTransparency(action.getState(), self, 1, transpValue, colorPacker)
def GLRender(action):
action.setTransparencyType(SoGLRenderAction.SORTED_OBJECT_BLEND)
doAction(action)
def callback(action):
doAction(action)
wa = SoWriteAction()
wa.apply(self)
print(handler_registry)
print('== Glow script loaded ==')
|
ivanhorvath/openshift-tools
|
scripts/monitoring/cron-send-cpu-mem-stats.py
|
Python
|
apache-2.0
| 3,773
| 0.007951
|
#!/usr/bin/env python2
'''
Simple monitoring scr
|
ipt to collect per process cpu percentage
and mem usage in bytes (vms or virt and rss)
usage:
cron-send-cpu-mem-stats process_name openshift.whatever.zabbix.key
or
cron-send-cpu-mem-stats 'something parameter more params' openshift.something.parameter.more.params
The script
|
will attach .cpu and .mem.{vms|rss} to the end of the zabbix key name for the values
Future enhancement can be to add multiple instances, that would add pid to the key, but those
would have to be dynamic items in zabbix
'''
# vim: expandtab:tabstop=4:shiftwidth=4
# Disabling invalid-name because pylint doesn't like the naming conention we have.
# pylint: disable=invalid-name
import argparse
import psutil
# Reason: disable pylint import-error because our libs aren't loaded on jenkins.
# Status: temporary until we start testing in a container where our stuff is installed.
# pylint: disable=import-error
from openshift_tools.monitoring.metric_sender import MetricSender
def parse_args():
""" parse the args from the cli """
parser = argparse.ArgumentParser(description='CPU and Memory per process stats collector')
parser.add_argument('--debug', action='store_true', default=None, help='Debug?')
parser.add_argument('process_str', help='The process command line string to match')
parser.add_argument('zabbix_key_prefix', help='Prefix for the key that will be sent \
to zabbix with this data, will get a .cpu and .mem suffix')
return parser.parse_args()
def main():
""" Main function to run the check """
argz = parse_args()
proc_parts = argz.process_str.split()
zagg_data = {}
for proc in psutil.process_iter():
try:
if proc_parts[0] == proc.name():
proc.dict = proc.as_dict(['cmdline', 'memory_info'])
cmdline = proc.dict['cmdline']
if len(proc_parts) > 1 and len(cmdline) > 1:
part_count = len(proc_parts[1:])
# This call might be confusing, (I know I will be in 2 weeks) so quick explanation:
# if the process name matches above, it will check the rest of the strings
# against the /proc/<pid>/cmdline contents, order shouldn't matter since all have to match
if len(set(proc_parts[1:]).intersection(set(cmdline[1:1+part_count]))) != part_count:
continue
if argz.debug:
print cmdline
cpu_percent = '{0:.2f}'.format(proc.cpu_percent(interval=0.5))
mem_vms = '{0}'.format(getattr(proc.dict['memory_info'], 'vms'))
mem_rss = '{0}'.format(getattr(proc.dict['memory_info'], 'rss'))
zagg_data = {'{0}.cpu'.format(argz.zabbix_key_prefix) : cpu_percent,
'{0}.mem.vms'.format(argz.zabbix_key_prefix) : mem_vms,
'{0}.mem.rss'.format(argz.zabbix_key_prefix) : mem_rss}
except psutil.NoSuchProcess:
pass
if argz.debug:
try:
print 'Process ({0}) is using {1} CPU and {2} {3} memory'.format(argz.process_str,
cpu_percent,
mem_vms,
mem_rss)
print 'Zagg will receive: {0}'.format(zagg_data)
except NameError as ex:
print 'No values: {0}'.format(ex)
if zagg_data:
ms = MetricSender(debug=argz.debug)
ms.add_metric(zagg_data)
ms.send_metrics()
if __name__ == '__main__':
main()
|
stefanwebb/tensorflow-models
|
tensorflow_models/initializations.py
|
Python
|
mit
| 1,768
| 0.010747
|
# MIT License
#
# Copyright (c) 2017, Stefan Webb. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be inclu
|
ded in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WIT
|
H THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
def xavier_init(fan_in, fan_out, constant=1):
""" Xavier initialization of network weights"""
# https://stackoverflow.com/questions/33640581/how-to-do-xavier-initialization-on-tensorflow
low = -constant*np.sqrt(6.0/(fan_in + fan_out))
high = constant*np.sqrt(6.0/(fan_in + fan_out))
return tf.random_uniform((fan_in, fan_out), minval=low, maxval=high, dtype=tf.float32)
def xavier_std(fan_in, fan_out, constant=1):
return constant * math.sqrt(3.0 / (fan_in + fan_out))
|
CyberPoint/libpgm
|
tests/run_unit_tests.py
|
Python
|
bsd-3-clause
| 14,090
| 0.004116
|
'''
A module that conducts unit tests on all top-level methods within each class.
Created on Jun 20, 2012
@author: ccabot
'''
import unittest
import sys
# add to PYTHONPATH
sys.path.append("../")
from libpgm.dictionary import Dictionary
from libpgm.graphskeleton import GraphSkeleton
from libpgm.orderedskeleton import OrderedSkeleton
from libpgm.discretebayesiannetwork import DiscreteBayesianNetwork
from libpgm.hybayesiannetwork import HyBayesianNetwork
from libpgm.nodedata import NodeData
from libpgm.tablecpdfactor import TableCPDFactor
from libpgm.sampleaggregator import SampleAggregator
from libpgm.tablecpdfactorization import TableCPDFactorization
from libpgm.lgbayesiannetwork import LGBayesianNetwork
from libpgm.dyndiscbayesiannetwork import DynDiscBayesianNetwork
from libpgm.pgmlearner import PGMLearner
class TestNodeData(unittest.TestCase):
def setUp(self):
self.nd = NodeData()
def test_entriestoinstances(self):
self.nd.load("unittesthdict.txt")
self.nd.entriestoinstances()
result = self.nd.nodes["Intelligence"].choose([])
self.assertTrue(result == 'low' or result == 'high')
class TestGraphSkeleton(unittest.TestCase):
def setUp(self):
self.instance = GraphSkeleton()
self.instance.V = [1,2,3,4,5]
self.instance.E = [[5,1],[1,2]]
def test_getparents(self):
self.assertEqual(self.i
|
nstance.getparents(1), [5])
self.assertEqual(self.instance.getparents(4), [])
def test_getchildren(self):
self
|
.assertEqual(self.instance.getchildren(5), [1])
self.assertEqual(self.instance.getchildren(4), [])
def test_toporder(self):
self.instance.toporder()
self.assertTrue(self.instance.V.index(5)<self.instance.V.index(1))
self.assertTrue(self.instance.V.index(5)<self.instance.V.index(2))
class TestOrderedSkeleton(unittest.TestCase):
def setUp(self):
self.os = OrderedSkeleton()
self.os.load("unittestdict.txt")
self.gs = GraphSkeleton()
self.gs.load("unittestdict.txt")
def test_constructor(self):
self.assertNotEqual(self.os.V, self.gs.V)
self.gs.toporder()
self.assertEqual(self.os.V, self.gs.V)
class TestDiscreteBayesianNetwork(unittest.TestCase):
def setUp(self):
skel = GraphSkeleton()
skel.load("unittestdict.txt")
skel.toporder()
nodedata = NodeData()
nodedata.load("unittestdict.txt")
self.instance = DiscreteBayesianNetwork(skel, nodedata)
def test_randomsample(self):
randomsample = self.instance.randomsample(5)
self.assertTrue(randomsample[0]["Difficulty"] == 'easy' or randomsample[0]["Difficulty"] == 'hard')
for key in randomsample[0].keys():
self.assertTrue(randomsample[0][key] != "default")
def test_randomsamplewithevidence(self):
evidence = dict(Difficulty='easy')
randomsample = self.instance.randomsample(10, evidence)
for entry in randomsample:
self.assertEqual(entry["Difficulty"], 'easy')
class TestLGBayesianNetwork(unittest.TestCase):
def setUp(self):
nodedata = NodeData()
nodedata.load("unittestlgdict.txt")
skel = GraphSkeleton()
skel.load("unittestdict.txt")
skel.toporder()
self.lgb = LGBayesianNetwork(skel, nodedata)
def test_randomsample(self):
seq = self.lgb.randomsample(1)
ctr = 0
for entry in seq[0].keys():
self.assertTrue(seq[0][entry], float)
ctr = ctr + 1
self.assertEqual(ctr, 5)
class TestTableCPDFactor(unittest.TestCase):
def setUp(self):
skel = GraphSkeleton()
skel.load("unittestdict.txt")
skel.toporder()
nodedata = NodeData()
nodedata.load("unittestdict.txt")
self.instance = DiscreteBayesianNetwork(skel, nodedata)
self.factor = TableCPDFactor("Grade", self.instance)
self.factor2 = TableCPDFactor("Letter", self.instance)
def test_constructor(self):
product = 1
for var in self.factor.card:
product *= var
self.assertTrue(len(self.factor.vals) == product)
for i in range(1, len(self.factor.scope)):
self.assertTrue(self.factor.stride[self.factor.scope[i]] == self.factor.stride[self.factor.scope[i-1]] * self.factor.card[i-1])
def test_multiplyfactor(self):
self.factor.multiplyfactor(self.factor2)
a = [0.03, 0.16000000000000003, 0.297, 0.09000000000000001, 0.032, 0.0198, 0.005000000000000001, 0.1, 0.693, 0.05, 0.12, 0.198, 0.27, 0.24, 0.003, 0.81, 0.048, 0.0002, 0.045000000000000005, 0.15, 0.006999999999999999, 0.45, 0.18, 0.002]
b = [3, 2, 2, 2]
c = ['Grade', 'Intelligence', 'Difficulty', 'Letter']
d = {'Grade': 1, 'Intelligence': 3, 'Letter': 12, 'Difficulty': 6}
self.assertEqual(self.factor.vals, a)
self.assertEqual(self.factor.card, b)
self.assertEqual(self.factor.scope, c)
self.assertEqual(self.factor.stride, d)
def test_sumout(self):
self.factor.sumout("Difficulty")
a = [0.35, 0.65, 1.0, 1.4, 0.38, 0.22]
b = [3, 2]
c = ['Grade', 'Intelligence']
d = {'Grade': 1, 'Intelligence': 3}
self.assertEqual(self.factor.vals, a)
self.assertEqual(self.factor.card, b)
self.assertEqual(self.factor.scope, c)
self.assertEqual(self.factor.stride, d)
def test_reducefactor(self):
self.factor.reducefactor("Difficulty", 'easy')
a = [0.3, 0.4, 0.3, 0.9, 0.08, 0.02]
b = [3, 2]
c = ['Grade', 'Intelligence']
d = {'Grade': 1, 'Intelligence': 3}
self.assertEqual(self.factor.vals, a)
self.assertEqual(self.factor.card, b)
self.assertEqual(self.factor.scope, c)
self.assertEqual(self.factor.stride, d)
def test_copy(self):
copy = self.factor.copy()
self.assertTrue((copy is self.factor) == False)
self.assertEqual(copy.vals, self.factor.vals)
self.assertEqual(copy.card, self.factor.card)
self.assertEqual(copy.scope, self.factor.scope)
self.assertEqual(copy.stride, self.factor.stride)
class TestTableCPDFactorization(unittest.TestCase):
def setUp(self):
skel = GraphSkeleton()
skel.load("unittestdict.txt")
skel.toporder()
nodedata = NodeData()
nodedata.load("unittestdict.txt")
self.bn = DiscreteBayesianNetwork(skel, nodedata)
self.fn = TableCPDFactorization(self.bn)
def test_constructor(self):
self.assertTrue(len(self.fn.originalfactorlist) == 5)
for x in range(5):
self.assertTrue(isinstance(self.fn.originalfactorlist[x], TableCPDFactor))
def test_refresh(self):
evidence = dict(Letter='weak')
query = dict(Intelligence=['high'])
result1 = self.fn.specificquery(query, evidence)
self.fn.refresh()
result2 = self.fn.specificquery(query, evidence)
self.assertEqual(result1, result2)
def test_sumproducteliminatevar(self):
self.fn.refresh()
self.fn.sumproducteliminatevar("Difficulty")
yes = 0
for x in range(len(self.fn.factorlist)):
if (self.fn.factorlist[x].scope == ['Grade', 'Intelligence']):
yes += 1
index = x
self.assertTrue(yes == 1)
exp = [0.2, 0.33999999999999997, 0.45999999999999996, 0.74, 0.16799999999999998, 0.09200000000000001]
for x in range(6):
self.assertTrue(abs(self.fn.factorlist[index].vals[x] - exp[x]) < .01)
def test_sumproductve(self):
input = ["Difficulty", "Grade", "Intelligence", "SAT"]
self.fn.refresh()
self.fn.sumproductve(input)
exp = [.498, .502]
for x in range(2):
self.assertTrue(abs(self.fn.factorlist.vals[x] - exp[x]) < .01)
def test_condprobve(self):
evidence = dict(Grade='C', SAT='highscore')
query = dict(Intelligence='high')
self.fn.refresh()
self.fn.condprobve(query, evidence)
exp = [.422, .578]
for x in range(2
|
zhiwliu/openshift-ansible
|
roles/openshift_health_checker/openshift_checks/docker_image_availability.py
|
Python
|
apache-2.0
| 13,335
| 0.003975
|
"""Check that required Docker images are available."""
import re
from pipes import quote
from ansible.module_utils import six
from openshift_checks import OpenShiftCheck
from openshift_checks.mixins import DockerHostMixin
NODE_IMAGE_SUFFIXES = ["haproxy-router", "docker-registry", "deployer", "pod"]
DEPLOYMENT_IMAGE_INFO = {
"origin": {
"namespace": "openshift",
"name": "origin",
"registry_console_prefix": "cockpit/",
"registry_console_basename": "kubernetes",
"registry_console_default_version": "latest",
},
"openshift-enterprise": {
"namespace": "openshift3",
"name": "ose",
"registry_console_prefix": "openshift3/",
"registry_console_basename": "registry-console",
"registry_console_default_version": "${short_version}",
},
}
class DockerImageAvailability(DockerHostMixin, OpenShiftCheck):
"""Check that required Docker images are available.
Determine docker images that an install would require and check that they
are either present in the host's docker index, or available for the host to pull
with known registries as defined in our inventory file (or defaults).
"""
name = "docker_image_availability"
tags = ["preflight"]
# we use python-docker-py to check local docker for images, and skopeo
# to look for images available remotely without waiting to pull them.
dependencies = ["python-docker-py", "skopeo"]
# command for checking if remote registries have an image, without docker pull
skopeo_command = "timeout 10 skopeo inspect --tls-verify={tls} {creds} docker://{registry}/{image}"
skopeo_example_command = "skopeo inspect [--tls-verify=false] [--creds=<user>:<pass>] docker://<registry>/<image>"
def __init__(self, *args, **kwargs):
super(DockerImageAvailability, self).__init__(*args, **kwargs)
self.registries = dict(
# set of registries that need to be checked insecurely (note: not accounting for CIDR entries)
insecure=set(self.ensure_list("openshift_docker_insecure_registries")),
# set of registries that should never be queried even if given in the image
blocked=set(self.ensure_list("openshift_docker_blocked_registries")),
)
# ordered list of registries (according to inventory vars) that docker will try for unscoped images
regs = self.ensure_list("openshift_docker_additional_registries")
# currently one of these registries is added whether the user wants it or not.
|
deployment_type = self.get_var("openshift_deployment_type")
if deployment_type == "origin" and "docker.io" not in regs:
regs.append("docker.io")
elif deployment_type == 'openshift-enterprise' and "re
|
gistry.access.redhat.com" not in regs:
regs.append("registry.access.redhat.com")
self.registries["configured"] = regs
# for the oreg_url registry there may be credentials specified
components = self.get_var("oreg_url", default="").split('/')
self.registries["oreg"] = "" if len(components) < 3 else components[0]
# Retrieve and template registry credentials, if provided
self.skopeo_command_creds = ""
oreg_auth_user = self.get_var('oreg_auth_user', default='')
oreg_auth_password = self.get_var('oreg_auth_password', default='')
if oreg_auth_user != '' and oreg_auth_password != '':
if self._templar is not None:
oreg_auth_user = self._templar.template(oreg_auth_user)
oreg_auth_password = self._templar.template(oreg_auth_password)
self.skopeo_command_creds = "--creds={}:{}".format(quote(oreg_auth_user), quote(oreg_auth_password))
# record whether we could reach a registry or not (and remember results)
self.reachable_registries = {}
def is_active(self):
"""Skip hosts with unsupported deployment types."""
deployment_type = self.get_var("openshift_deployment_type")
has_valid_deployment_type = deployment_type in DEPLOYMENT_IMAGE_INFO
return super(DockerImageAvailability, self).is_active() and has_valid_deployment_type
def run(self):
msg, failed = self.ensure_dependencies()
if failed:
return {
"failed": True,
"msg": "Some dependencies are required in order to check Docker image availability.\n" + msg
}
required_images = self.required_images()
missing_images = set(required_images) - set(self.local_images(required_images))
# exit early if all images were found locally
if not missing_images:
return {}
available_images = self.available_images(missing_images)
unavailable_images = set(missing_images) - set(available_images)
if unavailable_images:
unreachable = [reg for reg, reachable in self.reachable_registries.items() if not reachable]
unreachable_msg = "Failed connecting to: {}\n".format(", ".join(unreachable))
blocked_msg = "Blocked registries: {}\n".format(", ".join(self.registries["blocked"]))
msg = (
"One or more required container images are not available:\n {missing}\n"
"Checked with: {cmd}\n"
"Default registries searched: {registries}\n"
"{blocked}"
"{unreachable}"
).format(
missing=",\n ".join(sorted(unavailable_images)),
cmd=self.skopeo_example_command,
registries=", ".join(self.registries["configured"]),
blocked=blocked_msg if self.registries["blocked"] else "",
unreachable=unreachable_msg if unreachable else "",
)
return dict(failed=True, msg=msg)
return {}
def required_images(self):
"""
Determine which images we expect to need for this host.
Returns: a set of required images like 'openshift/origin:v3.6'
The thorny issue of determining the image names from the variables is under consideration
via https://github.com/openshift/openshift-ansible/issues/4415
For now we operate as follows:
* For containerized components (master, node, ...) we look at the deployment type and
use openshift/origin or openshift3/ose as the base for those component images. The
version is openshift_image_tag as determined by the openshift_version role.
* For OpenShift-managed infrastructure (router, registry...) we use oreg_url if
it is defined; otherwise we again use the base that depends on the deployment type.
Registry is not included in constructed images. It may be in oreg_url or etcd image.
"""
required = set()
deployment_type = self.get_var("openshift_deployment_type")
host_groups = self.get_var("group_names")
# containerized etcd may not have openshift_image_tag, see bz 1466622
image_tag = self.get_var("openshift_image_tag", default="latest")
image_info = DEPLOYMENT_IMAGE_INFO[deployment_type]
# template for images that run on top of OpenShift
image_url = "{}/{}-{}:{}".format(image_info["namespace"], image_info["name"], "${component}", "${version}")
image_url = self.get_var("oreg_url", default="") or image_url
if 'oo_nodes_to_config' in host_groups:
for suffix in NODE_IMAGE_SUFFIXES:
required.add(image_url.replace("${component}", suffix).replace("${version}", image_tag))
if self.get_var("osm_use_cockpit", default=True, convert=bool):
required.add(self._registry_console_image(image_tag, image_info))
# images for containerized components
if self.get_var("openshift_is_containerized"):
components = set()
if 'oo_nodes_to_config' in host_groups:
components.update(["node", "openvswitch"])
if 'oo_masters_to_config' in host_groups: # name is "origin" or "ose"
components.add(image_info["name"
|
susemeee/Chunsabot-framework
|
chunsabot/modules/weather.py
|
Python
|
mit
| 3,474
| 0.00489
|
# -*- coding: utf-8 -*-
from chunsabot.botlogic import brain
from chunsabot.database import Database, Cache
from bs4 import BeautifulSoup as Soup
import requests
#from threading import Thread
from multiprocessing.pool import ThreadPool
class Weather:
cWEATHER_GET_URL = Database.load_config("weather_url_all")
pool = ThreadPool(processes=1)
no_such_region = u"해당 지역의 정보가 존재하지 않습니다."
province_map = {}
@staticmethod
def parse_all():
xml = requests.get(Weather.cWEATHER_GET_URL).text
result = {}
weather = Soup(xml)
#channel
weather = weather.find('channel')
#<pubDate>2014년 02월 21일 (금)요일 18:00</pubDate>
published_date = weather.find('pubdate').text
#header->item->desc
weather = weather.find('item').find('description')
#<wf><![CDATA[기압골의 영향으로 26~27....]]></wf>
whole_description = weather.find('header').find('wf').text
all_locations = weather.find('body').findAll('location')
result[None] = whole_description.replace('<br />', '\r\n').replace('<br />', '\r\n')
result[u'전국'] = result[None]
for loca in all_locations:
# mapping province text to city
Weather.province_map[loca.find('province').text] = loca.find('city').text
region = loca.find('city').text
data = loca.findAll('data')
res = []
weather_location_header = u"{0} 지역의 날씨 정보입니다.\r\n({1} 발표)".format(region, published_date)
weather_location_footer = u"data by 기상청 RSS 서비스"
res.append(weather_location_he
|
ader)
i = 2
for d in data:
res.append(Wea
|
ther.prettify(i, d))
i += 1
if i > 6: break
res.append(weather_location_footer)
res = "\r\n".join(res)
result[region] = res
return result
@staticmethod
def prettify(i, data):
apm = [u'오전', u'오후']
day = u"[{0}일 후 {1}예보]".format(i/2, apm[i%2])
tmx = data.find('tmx').text
tmn = data.find('tmn').text
wf = data.find('wf').text
text = u"{0}, 기온 {1}°C ~ {2}°C".format(wf, tmn, tmx)
return u"{0}\r\n{1}".format(day, text)
@brain.route(u"날씨")
def async_view(msg, extras):
def info():
return u"날씨 : 전국 날씨에 관한 간략한 설명 또는 지역별 날씨를 볼 수 있습니다. \r\n예) .날씨 전국 .날씨 경기도"
if not msg:
return info()
else:
region = msg
#using SQLite from another thread causes a program to crash
#should use other DataModel (ex. Mamcached) later
info_all = Cache.load(Weather.cWEATHER_GET_URL)
if not info_all:
info_all = Weather.pool.apply_async(Weather.parse_all, ()).get(timeout=300)
new_cache = True
else:
new_cache = False
#new_cache returns parsed info
if new_cache:
Cache.save(Weather.cWEATHER_GET_URL, info_all)
try:
# Inaccurate
for tu in Weather.province_map.items():
if region in tu[0]:
region = tu[1]
info = info_all[region]
return info
# Sockets.write(room_id, info)
except KeyError:
return Weather.no_such_region
# Sockets.write(room_id, Weather.no_such_region)
|
jayhetee/coveragepy
|
coverage/control.py
|
Python
|
apache-2.0
| 44,134
| 0.000159
|
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
"""Core control stuff for coverage.py."""
import atexit
import inspect
import os
import platform
import sys
import traceback
from coverage import env, files
from coverage.annotate import AnnotateReporter
from coverage.backward import string_class, iitems
from coverage.collector import Collector
from coverage.config import CoverageConfig
from coverage.data import CoverageData, CoverageDataFiles
from coverage.debug import DebugControl
from coverage.files import TreeMatcher, FnmatchMatcher
from coverage.files import PathAliases, find_python_files, prep_patterns
from coverage.files import ModuleMatcher, abs_file
from coverage.html import HtmlReporter
from coverage.misc import CoverageException, bool_or_none, join_regex
from coverage.misc import file_be_gone
from coverage.monkey import patch_multiprocessing
from cover
|
age.plugin import FileReporter
from coverage.plugin_support import Plugins
from coverage.python import PythonFileReporter
from coverage.results import Analysis, Numbers
from coverage.summary import SummaryReporter
from coverage.xmlreport import XmlReporter
# Pypy has s
|
ome unusual stuff in the "stdlib". Consider those locations
# when deciding where the stdlib is.
try:
import _structseq
except ImportError:
_structseq = None
class Coverage(object):
"""Programmatic access to coverage.py.
To use::
from coverage import Coverage
cov = Coverage()
cov.start()
#.. call your code ..
cov.stop()
cov.html_report(directory='covhtml')
"""
def __init__(
self, data_file=None, data_suffix=None, cover_pylib=None,
auto_data=False, timid=None, branch=None, config_file=True,
source=None, omit=None, include=None, debug=None,
concurrency=None,
):
"""
`data_file` is the base name of the data file to use, defaulting to
".coverage". `data_suffix` is appended (with a dot) to `data_file` to
create the final file name. If `data_suffix` is simply True, then a
suffix is created with the machine and process identity included.
`cover_pylib` is a boolean determining whether Python code installed
with the Python interpreter is measured. This includes the Python
standard library and any packages installed with the interpreter.
If `auto_data` is true, then any existing data file will be read when
coverage measurement starts, and data will be saved automatically when
measurement stops.
If `timid` is true, then a slower and simpler trace function will be
used. This is important for some environments where manipulation of
tracing functions breaks the faster trace function.
If `branch` is true, then branch coverage will be measured in addition
to the usual statement coverage.
`config_file` determines what configuration file to read:
* If it is ".coveragerc", it is interpreted as if it were True,
for backward compatibility.
* If it is a string, it is the name of the file to read. If the
file can't be read, it is an error.
* If it is True, then a few standard files names are tried
(".coveragerc", "setup.cfg"). It is not an error for these files
to not be found.
* If it is False, then no configuration file is read.
`source` is a list of file paths or package names. Only code located
in the trees indicated by the file paths or package names will be
measured.
`include` and `omit` are lists of filename patterns. Files that match
`include` will be measured, files that match `omit` will not. Each
will also accept a single string argument.
`debug` is a list of strings indicating what debugging information is
desired.
`concurrency` is a string indicating the concurrency library being used
in the measured code. Without this, coverage.py will get incorrect
results. Valid strings are "greenlet", "eventlet", "gevent", or
"thread" (the default).
.. versionadded:: 4.0
The `concurrency` parameter.
"""
# Build our configuration from a number of sources:
# 1: defaults:
self.config = CoverageConfig()
# 2: from the rcfile, .coveragerc or setup.cfg file:
if config_file:
did_read_rc = False
# Some API users were specifying ".coveragerc" to mean the same as
# True, so make it so.
if config_file == ".coveragerc":
config_file = True
specified_file = (config_file is not True)
if not specified_file:
config_file = ".coveragerc"
did_read_rc = self.config.from_file(config_file)
if not did_read_rc:
if specified_file:
raise CoverageException(
"Couldn't read '%s' as a config file" % config_file
)
self.config.from_file("setup.cfg", section_prefix="coverage:")
# 3: from environment variables:
env_data_file = os.environ.get('COVERAGE_FILE')
if env_data_file:
self.config.data_file = env_data_file
debugs = os.environ.get('COVERAGE_DEBUG')
if debugs:
self.config.debug.extend(debugs.split(","))
# 4: from constructor arguments:
self.config.from_args(
data_file=data_file, cover_pylib=cover_pylib, timid=timid,
branch=branch, parallel=bool_or_none(data_suffix),
source=source, omit=omit, include=include, debug=debug,
concurrency=concurrency,
)
self._debug_file = None
self._auto_data = auto_data
self._data_suffix = data_suffix
# The matchers for _should_trace.
self.source_match = None
self.source_pkgs_match = None
self.pylib_match = self.cover_match = None
self.include_match = self.omit_match = None
# Is it ok for no data to be collected?
self._warn_no_data = True
self._warn_unimported_source = True
# A record of all the warnings that have been issued.
self._warnings = []
# Other instance attributes, set later.
self.omit = self.include = self.source = None
self.source_pkgs = None
self.data = self.data_files = self.collector = None
self.plugins = None
self.pylib_dirs = self.cover_dirs = None
self.data_suffix = self.run_suffix = None
self._exclude_re = None
self.debug = None
# State machine variables:
# Have we initialized everything?
self._inited = False
# Have we started collecting and not stopped it?
self._started = False
# Have we measured some data and not harvested it?
self._measured = False
def _init(self):
"""Set all the initial state.
This is called by the public methods to initialize state. This lets us
construct a :class:`Coverage` object, then tweak its state before this
function is called.
"""
if self._inited:
return
# Create and configure the debugging controller.
if self._debug_file is None:
self._debug_file = sys.stderr
self.debug = DebugControl(self.config.debug, self._debug_file)
# Load plugins
self.plugins = Plugins.load_plugins(self.config.plugins, self.config, self.debug)
# _exclude_re is a dict that maps exclusion list names to compiled
# regexes.
self._exclude_re = {}
self._exclude_regex_stale()
files.set_relative_directory()
# The source argument can be directories or package names.
self.source = []
self.source_pkgs = []
for src in self.config.source or []:
if os.path.exists(src):
|
justinvanwinkle/flexiconfig
|
flexiconfig/__init__.py
|
Python
|
bsd-2-clause
| 419
| 0
|
# -*- coding: utf
|
-8 -*-
from __future__ import unicode_literals
import os
from simplejson import load
class Configuration(dict):
def __init__(self, conf_fns):
self.conf_fns = conf_fns
for conf_fn in self.conf_fns:
abs_conf_fn = os.path.abspath(conf_fn)
|
if os.path.exists(abs_conf_fn):
with open(abs_conf_fn) as f:
self.update(load(f))
|
alfredhq/alfred-coordinator
|
setup.py
|
Python
|
isc
| 530
| 0
|
from setuptools import setup
setup(
name='alfred-c
|
oordinator',
version='0.1.dev',
license='ISC',
description='Streams tasks from listener to workers.',
url='https://github.com/alfredhq/alfred-coordinator',
author='Alfred Developers',
author_email='team@alfredhq.com',
py_modules=['alfred_coordinator'],
install_requires=[
'PyYAML',
'pyzmq',
],
entry_points={
'console_scripts': [
'alfred-coordinator = alfred_coordinator:main',
],
|
},
)
|
Torben-D/open62541
|
tools/pyUANamespace/ua_builtin_types.py
|
Python
|
lgpl-3.0
| 59,096
| 0.01391
|
#!/usr/bin/env/python
# -*- coding: utf-8 -*-
###
### Author: Chris Iatrou (ichrispa@core-vector.net)
### Version: rev 13
###
### This program was created for educational purposes and has been
### contributed to the open62541 project by the author. All licensing
### terms for this source is inherited by the terms and conditions
### specified for by the open62541 project (see the projects readme
### file for more information on the LGPL terms and restrictions).
###
### This program is not meant to be used in a production environment. The
### author is not liable for any complications arising due to the use of
### this program.
###
import sys
import xml.dom.minidom as dom
from ua_constants import *
import logging
from time import strftime, strptime
from open62541_MacroHelper import open62541_MacroHelper
logger = logging.getLogger(__name__)
def getNextElementNode(xmlvalue):
if xmlvalue == None:
return None
xmlvalue = xmlvalue.nextSibling
while not xmlvalue == None and not xmlvalue.nodeType == xmlvalue.ELEMENT_NODE:
xmlvalue = xmlvalue.nextSibling
return xmlvalue
if sys.version_info[0] >= 3:
# strings are already parsed to unicode
def unicode(s):
return s
class opcua_value_t():
value = None
name = None
__alias__ = None
__binTypeId__ = 0
stringRepresentation = ""
knownTypes = []
parent = None
def __init__(self, parent):
self.value = None
self.parent = parent
self.stringRepresentation = ""
self.setStringReprentation()
self.__binTypeId__ = 0
self.setNumericRepresentation()
self.__alias__ = None
self.knownTypes = ['boolean', 'int32', 'uint32', 'int16', 'uint16', \
'int64', 'uint64', 'byte', 'sbyte', 'float', 'double', \
'string', 'bytestring', 'localizedtext', 'statuscode', \
'diagnosticinfo', 'nodeid', 'guid', 'datetime', \
'qualifiedname', 'expandednodeid', 'xmlelement']
self.dataType = None
self.encodingRule = []
def getValueFieldByAlias(self, fieldname):
if not isinstance(self.value, list):
return None
if not isinstance(self.value[0], opcua_value_t):
return None
for val in self.value:
if val.alias() == fieldname:
return val.value
return None
def setEncodingRule(self, encoding):
self.encodingRule = encoding
def getEncodingRule(self):
return self.encodingRule
def alias(self, data=None):
if not data == None:
self.__alias__ = data
return self.__alias__
def isBuiltinByString(self, string):
if str(string).lower() in self.knownTypes:
return True
return False
def value(self, data=None):
if not data==None:
self.__value__ = data
return self.__value__
def getTypeByString(self, stringName, encodingRule):
stringName = str(stringName.lower())
if stringName == 'boolean':
t = opcua_BuiltinType_boolean_t(self.parent)
t.setEncodingRule(encodingRule)
elif stringName == 'int32':
t = opcua_BuiltinType_int32_t(self.parent)
t.setEncodingRule(encodingRule)
elif stringName == 'uint32':
t = opcua_BuiltinType_uint32_t(self.parent)
t.setEncodingRule(encodingRule)
elif stringName == 'int16':
t = opcua_BuiltinType_int16_t(self.parent)
t.setEncodingRule(encodingRule)
elif stringName == 'uint16':
t = opcua_BuiltinType_uint16_t(self.parent)
t.setEncodingRule(encodingRule)
elif stringName == 'int64':
t = opcua_BuiltinType_int64_t(self.parent)
t.setEncodingRule(encodingRule)
elif stringName == 'uint64':
t = opcua_BuiltinType_uint64_t(self.parent)
t.setEncodingRule(encodingRule)
elif stringName == 'byte':
t = opcua_BuiltinType_byte_t(self.parent)
t.setEncodingRule(encodingRule)
elif stringName == 'sbyte':
t = opcua_BuiltinType_sbyte_t(self.parent)
t.setEncodingRule(encodingRule)
elif stringName == 'float':
t = opcua_BuiltinType_float_t(self.parent)
t.setEncodingRule(encodingRule)
elif stringName == 'double':
t = opcua_BuiltinType_double_t(self.parent)
t.setEncodingRule(encodingRule)
elif stringName == 'string':
t = opcua_BuiltinType_string_t(self.parent)
t.setEncodingRule(encodingRule)
elif stringName == 'bytestring':
t = opcua_BuiltinType_bytestring_t(self.parent)
t.setEncodingRule(encodingRule)
elif stringName == 'localizedtext':
t = opcua_BuiltinType_localizedtext_t(self.parent)
t.setEncodingRule(encodingRule)
elif stringName == 'statuscode':
t = opcua_BuiltinType_statuscode_t(self.parent)
t.setEncodingRule(encodingRule)
elif stringName == 'diagnosticinfo':
t = opcua_BuiltinType_diagnosticinfo_t(self.parent)
t.setEncodingRule(encodingRule)
elif stringName == 'nodeid':
t = opcua_BuiltinType_nodeid_t(self.parent)
t.setEncodingRule(encodingRule)
elif stringName == 'guid':
t = opcua_BuiltinType_guid_t(self.parent)
t.setEncodingRule(encodingRule)
elif stringName == 'datetime':
t = opcua_BuiltinType_datetime_t(self.parent)
t.setEncodingRule(encodingRule)
elif stringName == 'qualifiedname':
t = opcua_BuiltinType_qualifiedname_t(self.parent)
t.setEncodingRule(encodingRule)
elif stringName == 'expandednodeid':
t = opcua_BuiltinType_expandednodeid_t(self.parent)
t.setEncodingRule(encodingRule)
elif stringName == 'xmlelement':
t = opcua_BuiltinType_xmlelement_t(self.parent)
t.setEncodingRule(encodingRule)
else:
logger.debug("No class representing stringName " + stringName + " was found. Cannot create builtinType.")
return None
return t
def parseXML(self, xmlvalue):
logger.debug("parsing xmlvalue for " + self.parent.browseName() + " (" + str(self.parent.id()) + ") according to " + str(self.parent.dataType().target().getEncoding()))
if not "value" in xmlvalue.tagName.lower():
logger.error("Expected <Value> , but found " + xmlvalue.tagName + " instead. Value will not be parsed.")
return
if len(xmlvalue.childNodes) == 0:
logger.error("Expected childnodes for value, but none where found... Value will not be parsed.")
return
for n in xmlvalue.childNodes:
if n.nodeType == n.ELEMENT_NODE:
xmlvalue = n
break
if "ListOf" in xmlvalue.tagName:
self.value = []
for el in xmlvalue.childNodes:
if not el.nodeType == el.ELEMENT_NODE:
continue
self.value.append(self.__parseXMLSingleValue(el))
else:
self.value = [self.__parseXMLSingleValue(xmlvalue)]
logger.debug( "Parsed Value: " + str(self.value))
def __parseXMLSingleValue(self, xmlvalue, alias=None, encodingPart=None):
# Parse an encoding list such as enc = [[Int32], ['Duration', ['DateTime']]],
# returning a possibly aliased variable or list of variables.
# Keep track of aliases, as ['Duration', ['Hawaii', ['UtcTime', ['DateTime']]]]
# will be of type DateTime, but tagged as <Duration>2013-04-10 12:00 UTC</Duration>,
|
# and not as <Duration><Hawaii><UtcTime><String>2013-04-10 12:00 UTC</String>...
# Encoding may be partially handed down (iterative call). Only resort to
# type definition if we are not given a specific encoding to match
if encodingPart == None:
enc = self.parent.dataType().target().getEncoding()
els
|
e:
enc = encodingPart
# Check the structure of the encoding list to determine if a type is to be
# returned or we need to descend further checking aliases or multipart types
# such as extension Objects.
if len(enc) == 1:
# 0: ['BuiltinType'] either builtin type
# 1: [ [ 'Alias', [...], n] ] or single alias for possible multipart
if isinstance(enc[0], str):
# 0: 'BuiltinType'
if alias != None:
if not xmlvalue.tagName == alias:
logger.error("Expected XML element with tag " + alias + " but found " + xmlvalue.tagName + " instead")
return None
else:
t = self.getTypeByString(enc[0]
|
Karel-van-de-Plassche/bokeh
|
bokeh/core/property/containers.py
|
Python
|
bsd-3-clause
| 14,792
| 0.001149
|
''' Provide special versions of list and dict, that can automatically notify
about changes when used for property values.
Mutations to these values are detected, and the properties owning the
collection is notified of the changes. Consider the following model
definition:
.. code-block:: python
class SomeModel(Model):
options = List(String)
If we have an instance of this model, ``m`` then we can set the entire
value of the ``options`` property at once:
.. code-block:: python
m.options = ["foo", "bar"]
When we do this in the context of a Bokeh server application that is being
viewed in a browser, this change is automatically noticed, and the
corresponding BokehJS property in the browser is synchronized, possibly
causing some change in the visual state of the application in the browser.
But it is also desirable that changes *inside* the ``options`` list also
be detected. That is, the following kinds of operations should also be
automatically synchronized between BokehJS and a Bokeh server:
.. code-block:: python
m.options.append("baz")
m.options[2] = "quux"
m.options.insert(0, "bar")
The classes in this module provide this functionality.
.. note::
These classes form part of the very low-level machinery that implements
the Bokeh model and property system. It is unlikely that any of these
classes or their methods will be applicable to any standard usage or to
anyone who is not directly developing on Bokeh's own infrastructure.
'''
from __future__ import absolute_import, print_function
from ...util.dependencies import import_optional
pd = import_optional('pandas')
def notify_owner(func):
''' A decorator for mutating methods of property container classes
that notifies owners of the property container about mutating changes.
Args:
func (callable) : the container method to wrap in a notification
Returns:
wrapped method
Examples:
A ``__setitem__`` could be wrapped like this:
.. code-block:: python
# x[i] = y
@notify_owner
def __setitem__(self, i, y):
return super(PropertyValueDict, self).__setitem__(i, y)
The returned wrapped method will have a docstring indicating what
original method it is wrapping.
'''
def wrapper(self, *args, **kwargs):
old = self._saved_copy()
result = func(self, *args, **kwargs)
self._notify_owners(old)
return result
wrapper.__doc__ = "Container method ``%s`` instrumented to notify property owners" % func.__name__
return wrapper
class PropertyValueContainer(object):
''' A base class for property container classes that support change
notifications on mutating operations.
This class maintains an internal list of property owners, and also
provides a private mechanism for methods wrapped with
:func:`~bokeh.core.property_containers.notify_owners` to update
those owners when mutating changes occur.
'''
def __init__(self, *args, **kwargs):
self._owners = set()
super(PropertyValueContainer, self).__init__(*args, **kwargs)
def _register_owner(self, owner, descriptor):
self._owners.add((owner, descriptor))
def _unregister_owner(self, owner, descriptor):
self._owners.discard((owner, descriptor))
def
|
_notify_owners(self, old, hint=None):
for (owner, descriptor) in self._owners:
descriptor._notify_mutated(owner, old, hint=hint)
def _saved_copy(self):
raise RuntimeError("Subtypes must implement this to make a backup copy")
class PropertyValueList(PropertyV
|
alueContainer, list):
''' A list property value container that supports change notifications on
mutating operations.
When a Bokeh model has a ``List`` property, the ``PropertyValueLists`` are
transparently created to wrap those values. These ``PropertyValueList``
values are subject to normal property validation. If the property type
``foo = List(Str)`` then attempting to set ``x.foo[0] = 10`` will raise
an error.
Instances of ``PropertyValueList`` can be explicitly created by passing
any object that the standard list initializer accepts, for example:
.. code-block:: python
>>> PropertyValueList([10, 20])
[10, 20]
>>> PropertyValueList((10, 20))
[10, 20]
The following mutating operations on lists automatically trigger
notifications:
.. code-block:: python
del x[y]
del x[i:j]
x += y
x *= y
x[i] = y
x[i:j] = y
x.append
x.extend
x.insert
x.pop
x.remove
x.reverse
x.sort
'''
def __init__(self, *args, **kwargs):
return super(PropertyValueList, self).__init__(*args, **kwargs)
def _saved_copy(self):
return list(self)
# delete x[y]
@notify_owner
def __delitem__(self, y):
return super(PropertyValueList, self).__delitem__(y)
# delete x[i:j]
@notify_owner
def __delslice__(self, i, j):
# Note: this is different py2 vs py3, py3 calls __delitem__ with a
# slice index, and does not have this method at all
return super(PropertyValueList, self).__delslice__(i, j)
# x += y
@notify_owner
def __iadd__(self, y):
return super(PropertyValueList, self).__iadd__(y)
# x *= y
@notify_owner
def __imul__(self, y):
return super(PropertyValueList, self).__imul__(y)
# x[i] = y
@notify_owner
def __setitem__(self, i, y):
return super(PropertyValueList, self).__setitem__(i, y)
# x[i:j] = y
@notify_owner
def __setslice__(self, i, j, y):
# Note: this is different py2 vs py3, py3 calls __setitem__ with a
# slice index, and does not have this method at all
return super(PropertyValueList, self).__setslice__(i, j, y)
@notify_owner
def append(self, obj):
return super(PropertyValueList, self).append(obj)
@notify_owner
def extend(self, iterable):
return super(PropertyValueList, self).extend(iterable)
@notify_owner
def insert(self, index, obj):
return super(PropertyValueList, self).insert(index, obj)
@notify_owner
def pop(self, index=-1):
return super(PropertyValueList, self).pop(index)
@notify_owner
def remove(self, obj):
return super(PropertyValueList, self).remove(obj)
@notify_owner
def reverse(self):
return super(PropertyValueList, self).reverse()
@notify_owner
def sort(self, **kwargs):
return super(PropertyValueList, self).sort(**kwargs)
class PropertyValueDict(PropertyValueContainer, dict):
''' A dict property value container that supports change notifications on
mutating operations.
When a Bokeh model has a ``List`` property, the ``PropertyValueLists`` are
transparently created to wrap those values. These ``PropertyValueList``
values are subject to normal property validation. If the property type
``foo = Dict(Str, Str)`` then attempting to set ``x.foo['bar'] = 10`` will
raise an error.
Instances of ``PropertyValueDict`` can be eplicitly created by passing
any object that the standard dict initializer accepts, for example:
.. code-block:: python
>>> PropertyValueDict(dict(a=10, b=20))
{'a': 10, 'b': 20}
>>> PropertyValueDict(a=10, b=20)
{'a': 10, 'b': 20}
>>> PropertyValueDict([('a', 10), ['b', 20]])
{'a': 10, 'b': 20}
The following mutating operations on dicts automatically trigger
notifications:
.. code-block:: python
del x[y]
x[i] = y
x.clear
x.pop
x.popitem
x.setdefault
x.update
'''
def __init__(self, *args, **kwargs):
return super(PropertyValueDict, self).__init__(*args, **kwargs)
def _saved_copy(self):
return dict(self)
# delete x[y]
@notify_owner
def __delitem__(self, y):
return super(PropertyValueDict, self).__delitem__(y)
# x[i] = y
@notify_owner
|
cropr/bjk2017
|
cd_subscription/migrations/0005_auto_20161125_1908.py
|
Python
|
apache-2.0
| 2,446
| 0.003679
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cd_subscription', '0004_auto_20161125_1901'),
]
operations = [
migrations.AlterField(
model_name='cdsubscription',
name='badgelength',
field=models.IntegerField(blank=True, default=0, verbose_name=
|
'Length badge image'),
),
migrations.AlterField(
model_name='cdsubscript
|
ion',
name='emailparent',
field=models.EmailField(blank=True, max_length=40, default='', verbose_name='Email parent'),
),
migrations.AlterField(
model_name='cdsubscription',
name='emailplayer',
field=models.EmailField(blank=True, max_length=40, default='', verbose_name='Email player'),
),
migrations.AlterField(
model_name='cdsubscription',
name='fullnameattendant',
field=models.CharField(blank=True, max_length=50, default='', verbose_name='Full name responsible on site'),
),
migrations.AlterField(
model_name='cdsubscription',
name='fullnameparent',
field=models.CharField(blank=True, max_length=50, default='', verbose_name='Full name parent'),
),
migrations.AlterField(
model_name='cdsubscription',
name='mobileattendant',
field=models.CharField(blank=True, max_length=15, default='', verbose_name='GSM number responsible on site'),
),
migrations.AlterField(
model_name='cdsubscription',
name='mobileparent',
field=models.CharField(blank=True, max_length=15, default='', verbose_name='GSM parent'),
),
migrations.AlterField(
model_name='cdsubscription',
name='mobileplayer',
field=models.CharField(blank=True, max_length=15, default='', verbose_name='GSM player'),
),
migrations.AlterField(
model_name='cdsubscription',
name='payamount',
field=models.IntegerField(blank=True, default=0, verbose_name='Amount to pay'),
),
migrations.AlterField(
model_name='cdsubscription',
name='paydate',
field=models.DateField(null=True, verbose_name='Payment date'),
),
]
|
creative-workflow/pi-setup
|
lib/piservices/piservice.py
|
Python
|
mit
| 5,056
| 0.013252
|
import os, helper, config, glob, loader
from fabops import FabricTaskOperator
from policies import PiServicePolicies
from remote.integration import RemoteFileHelper
from initd import InitScript, InitScriptWithTemplate, AutogeneratedInitScript
pisetup_root_folder_on_pi ='/home/pi/pi-setup'
piservice_standart_commands = ['install', 'uninstall', 'deploy', 'info', 'installed']
piservcie_managed_standart_commands = ['start', 'stop', 'restart', 'enable_autostart', 'disable_autostart']
class PiService(FabricTaskOperator, PiServicePolicies):
default_attributes = {
'source_files' : ['./*'],
'exclude_source_files' : ['.*\.pyc', '\.git', '\.DS_Store'],
'commands' : [],
'managed_service' : True,
'init_script' : False,
'apt_get_install' : False,
'copy_init_script' : False,
'target_init_script' : None #it is the name of the service
}
def __init__(self):
self._init_defaults()
FabricTaskOperator.__init__(self, self.local_path, self.remote_path)
self._init_service()
self._init_initscript()
def _init_defaults(self):
self.default_attributes.update({
'remote_path' : os.path.realpath(pisetup_root_folder_on_pi+'/services/'+self.name
|
),
'local_path' : os.path.realpath(loader.services_path+'/'+self.name),
|
'local_pi_setup_path' : os.path.realpath(os.path.dirname(__file__)+'/../../'),
'remote_pi_setup_path': pisetup_root_folder_on_pi
})
for key, value in self.default_attributes.iteritems():
if not hasattr(self, key):
self.__dict__[key] = value
def _init_service(self):
self.config = {}
if config.get() and config.get('services').has_key(self.name):
self.config = config.get('services')[self.name]
self.remote = RemoteFileHelper(self)
self.commands.extend(piservice_standart_commands)
if self.managed_service:
self.commands.extend(piservcie_managed_standart_commands)
def _init_initscript(self):
if self.init_script:
if self.init_script == 'installed':
self.init_script = InitScript(self)
elif self.init_script == 'auto':
self.init_script = AutogeneratedInitScript(self)
elif isinstance(self.init_script, basestring):
self.init_script = InitScriptWithTemplate(self, self.init_script)
def install(self, deploy=True):
"""setup the the pi-setup-service"""
if self.is_local():
return
self.api.run('mkdir -p '+self.remote_path)
if self.apt_get_install:
#set the correct terminal, so no notices like "Unable to initialise frontend: Dialog" etc
self.run('export TERM=linux && sudo apt-get -y install '+(' '.join(self.apt_get_install)))
PiService.deploy(self, restart=False)
def deploy(self, restart=True):
"""copy service files to remote service folder"""
self.api.run('mkdir -p '+self.remote_path)
if not self.name == 'setup':
self.service('setup').deploy()
self._copy_src_files()
if self.init_script:
self.init_script.copy()
if restart and not restart in ('0', 'false') and self.init_script:
with self.api.settings(warn_only=True):
self.restart()
def uninstall(self):
"""removes service files from remote service folder"""
self.check_remote_or_exit()
self.check_installed_or_exit()
self.stop();
if self.init_script:
self.init_script.delete()
self.api.run('sudo rm -Rf %s' % self.remote_path)
def _copy_src_files(self):
files = []
for source_file in self.source_files:
for source_file_with_path in glob.glob(self.local_path+'/'+source_file):
files.extend(
helper.collect_files(source_file_with_path))
self.zip_files_and_copy(
helper.filter_files(
files,
self.exclude_source_files),
self.remote_path)
def service(self, name):
return loader.instance(name)
def info(self):
"""print config of this service"""
print self.config
def start(self):
"""start the the service"""
if not self.init_script: return
self.check_installed_or_exit()
self.init_script.start()
def stop(self):
"""stop the the service"""
if not self.init_script: return
self.check_installed_or_exit()
self.init_script.stop()
def restart(self):
"""restart the the service"""
if not self.init_script: return
self.check_installed_or_exit()
self.init_script.restart()
def enable_autostart(self):
"""enable automatic start of this service"""
if not self.init_script: return
self.check_installed_or_exit()
self.init_script.enable_autostart()
def disable_autostart(self):
"""disable automatic start of this service"""
if not self.init_script: return
self.check_installed_or_exit()
self.init_script.disable_autostart()
|
ImmobilienScout24/afp-core
|
src/unittest/python/aws_federation_proxy_mocks.py
|
Python
|
apache-2.0
| 323
| 0.003096
|
""" Mock classes for use in aws_feder
|
ation_proxy_tests """
from __future__ import print_function, absolute_import, unicode_literals, division
from aws_federation_proxy.aws_federation_proxy import AWSFederationProxy
class MockAWSFederationProxyForInitTest(AWSFederationProxy):
def _setup_provider(self
|
):
pass
|
AdamantLife/alcustoms
|
alcustoms/Programs/SQLExplorer.py
|
Python
|
gpl-3.0
| 16,142
| 0.019081
|
import pathlib
import pprint
import shutil
from alcustoms import sql
from alcustoms.tkinter import advancedtkinter, smarttkinter, style
import tkinter as tk
from tkinter import filedialog, messagebox, scrolledtext
ttk = smarttkinter.ttk.ttk
from alcustoms.methods import minimalist_pprint_pformat
######################################################################
"""
Utility Functions
"""
######################################################################
## Show More Placeholder
SHOWMORE = "(*Show More*)"
## No Rows in Table Placeholder
NOROW = "(*No Rows*)"
## Number of rows to populate the table tree with each time SHOWMORE is clicked
ROWBATCH = 10
def configure_tags(widget):
""" Universal color-scheme for tag-based Widgets """
for (tag,fg) in [("column","blue"),
("constraint","red"),
("table","green"),]:
widget.tag_configure(tag,foreground=fg)
for (tag,font) in [("title",("times",14,'bold'),),
("showmore",("times",12,'italic')),
]:
widget.tag_configure(tag, font = font)
def get_tags(obj,*tags):
""" Returns tags appropriate for the given object.
Additional tags can be passed as positional arguemnets which
will be included.
"""
tags = list(tags)
if isinstance(obj,sql.Table):
tags.append("table")
elif isinstance(obj,sql.Column):
tags.append("column")
elif isinstance(obj,sql.Constraint):
tags.append("constraint")
return tags
######################################################################
"""
Panes
"""
######################################################################
class LoadDatabasePane(smarttkinter.Pane):
""" Landing page for loading a new database or quitting the app """
def __init__(self,parent,padding = 10, **kw):
super().__init__(parent, padding = padding, **kw)
ttk.Label(self, text="SQL Explorer", style = "Title.TLabel")
self.loadbutton = ttk.Button(self,text="Load Database")
self.quitbutton = ttk.Button(self,text="Quit")
smarttkinter.masspack(self)
######## Explorer App Primary Screen
class TogglePane(smarttkinter.Pane):
""" A Toolbar along the top of the screen which toggles panes on and off """
_gmkwargs = dict(side='top', fill="x", expand = True)
def __init__(self,parent,**kw):
super().__init__(parent,**kw)
self.homebutton = ttk.Button(self,text = "Close DB")
smarttkinter.masspack(self,side='left')
class TreePane(smarttkinter.Pane):
""" A Tree Widget displaying the Tables/Views, Columns, and Records of a Database """
def __init__(self, parent, **kw):
super().__init__(parent,**kw)
self.tree = smarttkinter.ttk.SmartTreeview(self, show = "tree")
self.tree.pack(fill='both',expand = True)
class DescriptionPane(smarttkinter.Pane):
def __init__(self,parent,**kw):
super().__init__(parent,**kw)
self.box = scrolledtext.ScrolledText(self, state = "disabled")
self.box.pack(fill='both', expand = True)
class SQLPane(smarttkinter.Pane):
def __init__(self,parent,**kw):
super().__init__(parent,**kw)
self.box = scrolledtext.ScrolledText(self)
ttk.Label(self, text = "Replacements")
self.replacements = scrolledtext.ScrolledText(self, height = 4)
f = smarttkinter.ttk.SmartFrame(self)
self.executebutton = ttk.Button(f,text="Execute")
smarttkinter.masspack(f, side = 'left')
smarttkinter.masspack(self)
self.box.pack_configure(fill = 'both',expand = True)
######################################################################
"""
CONTROLLERS
"""
######################################################################
class Main(advancedtkinter.SequencingManager):
def cleanup(self):
super().cleanup()
self.parentpane.destroy()
def loadmain(self):
self.clearchild()
child = self.newchild(LoadDatabaseController)
child.show()
def loadexplorer(self,db):
if not isinstance(db,sql.Database):
messagebox.showerror("Invalid DB","Explorer recieved an Invalid Database Object")
return self.loadmain()
self.newchildmanager(ExplorerManager, db, eventmanager = None)
class LoadDatabaseController(advancedtkinter.Controller):
def __init__(self,pane = LoadDatabasePane,**kw):
super().__init__(pane = pane,**kw)
def startup(self):
super().startup()
p = self.pane
p.loadbutton.configure(command = self.loaddb)
p.quitbutton.configure(command = self.parent.cleanup)
def loaddb(self):
file = filedialog.ask
|
openfilename()
if not file:
return
file = pathlib.Path(file).resolve()
if not file.exists():
messagebox.showerror("Invalid File", "File does not exist")
|
return
try:
file = shutil.copy(str(file),str(pathlib.Path.cwd().resolve() / "temp"))
except Exception as e:
messagebox.showerror("Copy Failure",f"Failed to create work-copy of database:\n{e}")
return
try:
db = sql.Database(file, row_factory = sql.dict_factory)
except:
messagebox.showerror("Invalid File", "Could not load Database")
return
return self.parent.loadexplorer(db)
class ExplorerManager(advancedtkinter.MultiManager):
def __init__(self,parentpane, db, *args,**kw):
super().__init__(parentpane, *args,**kw)
self.db = db
self.eventmanager.registerevent("<db_update>","type")
self.loadall()
def loadall(self):
p = self.parentpane
try:
toggle = self.addchild(ToggleController)
toggle.pane.homebutton.configure(command = self.parent.loadmain)
toggle.show()
left = tk.PanedWindow(self.parentpane)
left.pack(side='top',fill = 'both', expand = True)
child = self.addchild(TreeController, parentpane = left)
left.add(child.pane)
toggle.register(child)
top = tk.PanedWindow(left,orient = "vertical")
left.add(top)
child = self.addchild(DescriptionController, parentpane = top)
top.add(child.pane)
toggle.register(child)
child = self.addchild(SQLController, parentpane = top)
top.add(child.pane)
toggle.register(child)
except Exception as e:
import traceback
traceback.print_exc()
messagebox.showerror("Explorer Error",f"Failed to load the Explorer:{e}")
return self.parent.loadmain()
class ToggleController(advancedtkinter.Controller):
""" Bar along the top of the screen which toggles windows on and off """
def __init__(self,pane = TogglePane,**kw):
super().__init__(pane = pane, **kw)
self.counter = 1
def register(self,pane):
""" Adds a pane to the toggle list gui """
if hasattr(pane,"TOGGLENAME"):
name = pane.TOGGLENAME
else:
name = f"Pane {self.counter}"
self.counter += 1
def toggle(checkbox):
self.toggle(pane, not checkbox.get())
smarttkinter.Checkbox(self.pane, text = name, indicatoron = False, initialvalue = True, callback = toggle).pack(side='left',padx = 2)
def toggle(self,pane, display):
if display:
pane.show()
else: pane.hide()
class TreeController(advancedtkinter.Controller):
""" Sidebar to the Left which displays the Database Structure """
TOGGLENAME = "DB Tree"
def __init__(self, pane = TreePane, **kw):
super().__init__(pane = pane, **kw)
self.pane.pack_forget()
self.tree = self.p
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.