repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
anak10thn/graphics-dojo-qt5
|
snapscroll/snapscroll.py
|
Python
|
gpl-2.0
| 3,660
| 0.007104
|
#############################################################################
##
## Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies).
## Contact: Qt Software Information (qt-info@nokia.com)
##
## This file is part of the Graphics Dojo project on Qt Labs.
##
## This file may be used under the terms of the GNU General Public
## License version 2.0 or 3.0 as published by the Free Software Foundation
## and appearing in the file LICENSE.GPL included in the packaging of
## this file. Please review the following information to ensure GNU
## General Public Licensing requirements will be met:
## http:#www.fsf.org/licensing/licenses/info/GPLv2.html and
## http:#www.gnu.org/copyleft/gpl.html.
##
## If you are unsure which license is appropriate for your use, please
## contact the sales department at qt-sales@nokia.com.
##
## This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
## WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
##
########################
|
#####################################################
import sys
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.QtWebKit import *
if QT_VERSION < 0x0040500:
sys.stderr.write("You need Qt 4.5 or newer to run this example.\n")
sys.exit(1)
SNAP_THRESHOLD = 10
class SnapView(QWebView):
def __init__(self):
QWebView.__init__(self)
self.snapEnabled = True
self.setWindowTitle(self.tr("Snap-scrolling is ON"))
# rects hit by the line, in ma
|
in frame's view coordinate
def hitBoundingRects(self, line):
hitRects = []
points = 8
delta = QPoint(line.dx() / points, line.dy() / points)
point = line.p1()
i = 0
while i < points - 1:
point += delta
hit = self.page().mainFrame().hitTestContent(point)
if not hit.boundingRect().isEmpty():
hitRects.append(hit.boundingRect())
i += 1
return hitRects
def keyPressEvent(self, event):
# toggle snapping
if event.key() == Qt.Key_F3:
self.snapEnabled = not self.snapEnabled
if self.snapEnabled:
self.setWindowTitle(self.tr("Snap-scrolling is ON"))
else:
self.setWindowTitle(self.tr("Snap-scrolling is OFF"))
event.accept()
return
# no snapping? do not bother...
if not self.snapEnabled:
QWebView.keyReleaseEvent(self, event)
return
previousOffset = self.page().mainFrame().scrollPosition()
QWebView.keyReleaseEvent(self, event)
if not event.isAccepted():
return
if event.key() == Qt.Key_Down:
ofs = self.page().mainFrame().scrollPosition()
jump = ofs.y() - previousOffset.y()
if jump == 0:
return
jump += SNAP_THRESHOLD
rects = self.hitBoundingRects(QLine(1, 1, self.width() - 1, 1))
i = 0
while i < len(rects):
j = rects[i].top() - previousOffset.y()
if j > SNAP_THRESHOLD and j < jump:
jump = j
i += 1
self.page().mainFrame().setScrollPosition(previousOffset + QPoint(0, jump))
if __name__ == "__main__":
app = QApplication(sys.argv)
view = SnapView()
view.load(QUrl("http://news.bbc.co.uk/text_only.stm"))
view.resize(320, 500)
view.show()
QMessageBox.information(view, "Hint", "Use F3 to toggle snapping on and off")
sys.exit(app.exec_())
|
wdv4758h/ZipPy
|
lib-python/3/pickletools.py
|
Python
|
bsd-3-clause
| 79,093
| 0.000582
|
'''"Executable documentation" for the pickle module.
Extensive comments about the pickle protocols and pickle-machine opcodes
can be found here. Some functions meant for external use:
genops(pickle)
Generate all the opcodes in a pickle, as (opcode, arg, position) triples.
dis(pickle, out=None, memo=None, indentlevel=4)
Print a symbolic disassembly of a pickle.
'''
import codecs
import pickle
import re
__all__ = ['dis', 'genops', 'optimize']
bytes_types = pickle.bytes_types
# Other ideas:
#
# - A pickle verifier: read a pickle and check it exhaustivel
|
y for
# well-formedness. dis() does a lot of this already.
#
# - A protocol identifier: examine a pickle and return its protocol number
# (== the highest .proto attr value among all the opcodes in the pickle).
# dis() already prints this info at the end.
#
# - A pickle optimizer: for example, tuple-building code is sometimes more
# elaborate than necessary, catering for the possibility that the tuple
#
|
is recursive. Or lots of times a PUT is generated that's never accessed
# by a later GET.
"""
"A pickle" is a program for a virtual pickle machine (PM, but more accurately
called an unpickling machine). It's a sequence of opcodes, interpreted by the
PM, building an arbitrarily complex Python object.
For the most part, the PM is very simple: there are no looping, testing, or
conditional instructions, no arithmetic and no function calls. Opcodes are
executed once each, from first to last, until a STOP opcode is reached.
The PM has two data areas, "the stack" and "the memo".
Many opcodes push Python objects onto the stack; e.g., INT pushes a Python
integer object on the stack, whose value is gotten from a decimal string
literal immediately following the INT opcode in the pickle bytestream. Other
opcodes take Python objects off the stack. The result of unpickling is
whatever object is left on the stack when the final STOP opcode is executed.
The memo is simply an array of objects, or it can be implemented as a dict
mapping little integers to objects. The memo serves as the PM's "long term
memory", and the little integers indexing the memo are akin to variable
names. Some opcodes pop a stack object into the memo at a given index,
and others push a memo object at a given index onto the stack again.
At heart, that's all the PM has. Subtleties arise for these reasons:
+ Object identity. Objects can be arbitrarily complex, and subobjects
may be shared (for example, the list [a, a] refers to the same object a
twice). It can be vital that unpickling recreate an isomorphic object
graph, faithfully reproducing sharing.
+ Recursive objects. For example, after "L = []; L.append(L)", L is a
list, and L[0] is the same list. This is related to the object identity
point, and some sequences of pickle opcodes are subtle in order to
get the right result in all cases.
+ Things pickle doesn't know everything about. Examples of things pickle
does know everything about are Python's builtin scalar and container
types, like ints and tuples. They generally have opcodes dedicated to
them. For things like module references and instances of user-defined
classes, pickle's knowledge is limited. Historically, many enhancements
have been made to the pickle protocol in order to do a better (faster,
and/or more compact) job on those.
+ Backward compatibility and micro-optimization. As explained below,
pickle opcodes never go away, not even when better ways to do a thing
get invented. The repertoire of the PM just keeps growing over time.
For example, protocol 0 had two opcodes for building Python integers (INT
and LONG), protocol 1 added three more for more-efficient pickling of short
integers, and protocol 2 added two more for more-efficient pickling of
long integers (before protocol 2, the only ways to pickle a Python long
took time quadratic in the number of digits, for both pickling and
unpickling). "Opcode bloat" isn't so much a subtlety as a source of
wearying complication.
Pickle protocols:
For compatibility, the meaning of a pickle opcode never changes. Instead new
pickle opcodes get added, and each version's unpickler can handle all the
pickle opcodes in all protocol versions to date. So old pickles continue to
be readable forever. The pickler can generally be told to restrict itself to
the subset of opcodes available under previous protocol versions too, so that
users can create pickles under the current version readable by older
versions. However, a pickle does not contain its version number embedded
within it. If an older unpickler tries to read a pickle using a later
protocol, the result is most likely an exception due to seeing an unknown (in
the older unpickler) opcode.
The original pickle used what's now called "protocol 0", and what was called
"text mode" before Python 2.3. The entire pickle bytestream is made up of
printable 7-bit ASCII characters, plus the newline character, in protocol 0.
That's why it was called text mode. Protocol 0 is small and elegant, but
sometimes painfully inefficient.
The second major set of additions is now called "protocol 1", and was called
"binary mode" before Python 2.3. This added many opcodes with arguments
consisting of arbitrary bytes, including NUL bytes and unprintable "high bit"
bytes. Binary mode pickles can be substantially smaller than equivalent
text mode pickles, and sometimes faster too; e.g., BININT represents a 4-byte
int as 4 bytes following the opcode, which is cheaper to unpickle than the
(perhaps) 11-character decimal string attached to INT. Protocol 1 also added
a number of opcodes that operate on many stack elements at once (like APPENDS
and SETITEMS), and "shortcut" opcodes (like EMPTY_DICT and EMPTY_TUPLE).
The third major set of additions came in Python 2.3, and is called "protocol
2". This added:
- A better way to pickle instances of new-style classes (NEWOBJ).
- A way for a pickle to identify its protocol (PROTO).
- Time- and space- efficient pickling of long ints (LONG{1,4}).
- Shortcuts for small tuples (TUPLE{1,2,3}}.
- Dedicated opcodes for bools (NEWTRUE, NEWFALSE).
- The "extension registry", a vector of popular objects that can be pushed
efficiently by index (EXT{1,2,4}). This is akin to the memo and GET, but
the registry contents are predefined (there's nothing akin to the memo's
PUT).
Another independent change with Python 2.3 is the abandonment of any
pretense that it might be safe to load pickles received from untrusted
parties -- no sufficient security analysis has been done to guarantee
this and there isn't a use case that warrants the expense of such an
analysis.
To this end, all tests for __safe_for_unpickling__ or for
copyreg.safe_constructors are removed from the unpickling code.
References to these variables in the descriptions below are to be seen
as describing unpickling in Python 2.2 and before.
"""
# Meta-rule: Descriptions are stored in instances of descriptor objects,
# with plain constructors. No meta-language is defined from which
# descriptors could be constructed. If you want, e.g., XML, write a little
# program to generate XML from the objects.
##############################################################################
# Some pickle opcodes have an argument, following the opcode in the
# bytestream. An argument is of a specific type, described by an instance
# of ArgumentDescriptor. These are not to be confused with arguments taken
# off the stack -- ArgumentDescriptor applies only to arguments embedded in
# the opcode stream, immediately following an opcode.
# Represents the number of bytes consumed by an argument delimited by the
# next newline character.
UP_TO_NEWLINE = -1
# Represents the number of bytes consumed by a two-argument opcode where
# the first argument gives the number of bytes in the second argument.
TAKEN_FROM_ARGUMENT1 = -2 # num bytes is 1-byte unsigned int
TAKEN_FROM_ARGUMENT4 = -3 # num bytes is 4-byte signed little-endian int
class ArgumentDescriptor(object):
__slots__ = (
# name of descriptor record, also a module global name; a st
|
easytaxibr/redash
|
tests/handlers/test_authentication.py
|
Python
|
bsd-2-clause
| 2,187
| 0.003201
|
from tests import BaseTestCase
import mock
import time
from redash.models import User
from redash.authentication.account import invite_token
from tests.handlers import get_request, post_request
class TestInvite(BaseTestCase):
def test_expired_invite_token(self):
with mock.patch('time.time') as patched_time:
patched_time.return_value = time.time() - (7 * 24 * 3600) - 10
token = invit
|
e_token(self.factory.user)
response = get_request('/invite/{}'.format(token), org=self.factory.org)
self.assertEqual(response.status_code, 400)
def test_invalid_invite_token(self):
response = get_request('/invite/badtoken', org=self.factory.org)
self.assertEqual(response.stat
|
us_code, 400)
def test_valid_token(self):
token = invite_token(self.factory.user)
response = get_request('/invite/{}'.format(token), org=self.factory.org)
self.assertEqual(response.status_code, 200)
def test_already_active_user(self):
pass
class TestInvitePost(BaseTestCase):
def test_empty_password(self):
token = invite_token(self.factory.user)
response = post_request('/invite/{}'.format(token), data={'password': ''}, org=self.factory.org)
self.assertEqual(response.status_code, 400)
def test_invalid_password(self):
token = invite_token(self.factory.user)
response = post_request('/invite/{}'.format(token), data={'password': '1234'}, org=self.factory.org)
self.assertEqual(response.status_code, 400)
def test_bad_token(self):
response = post_request('/invite/{}'.format('jdsnfkjdsnfkj'), data={'password': '1234'}, org=self.factory.org)
self.assertEqual(response.status_code, 400)
def test_already_active_user(self):
pass
def test_valid_password(self):
token = invite_token(self.factory.user)
password = 'test1234'
response = post_request('/invite/{}'.format(token), data={'password': password}, org=self.factory.org)
self.assertEqual(response.status_code, 302)
user = User.get_by_id(self.factory.user.id)
self.assertTrue(user.verify_password(password))
|
bmwiedemann/linuxcnc-mirror
|
lib/python/pyngcgui.py
|
Python
|
lgpl-2.1
| 128,675
| 0.015924
|
#!/usr/bin/env python
# Notes:
# 1) ini file items:
# NGCGUI_PREAMBLE
# NGCGUI_SUBFILE
# NGCGUI_POSTAMBLE
# NGCGUI_OPTIONS
# nonew disallow new tabs
# noremove disallow removal of tabs
# noauto don't automatically send result file
# noexpand (ngcgui used, not supported pyngcgui)
# nom2 (no m2 terminator (use %))
# 2) To make pyngcgui embedded fit in small screen:
# Try:
# max_parms=10|20|30 (will reject otherwise valid subfiles)
# image_width=240
# reduce subroutine parm name lengths and/or comment string length
#------------------------------------------------------------------------------
# Copyright: 2013-4
# Author: Dewey Garrett <dgarrett@panix.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#------------------------------------------------------------------------------
""" python classes to implement an ngcgui-like application
These ini file items are compatible with ngcgui.tcl:
[DISPLAY]NGCGUI_PREAMBLE single specifier
[DISPLAY]NGCGUI_POSTAMBLE single specifier
[DISPLAY]NGCGUI_SUBFILE multiples allowed, use "" for Custom tab
[DISPLAY]NGCGUI_OPTIONS
noremove disallow tabpage removal
nonew disallow tabpage creation
noiframe don't show image in tabpage
noauto don't automatically send result file
[DISPLAY]PROGRAM_PREFIX subroutine path: start
[RS274NGC]SUBROUTINE_PATH subroutine path: middle
[WIZARD]WIZARD_ROOT subroutine path: end
[DISPLAY]NGCGUI_FONT not used
[DISPLAY]TKPKG not applicable
"""
from types import * # IntType etc
import os
import sys
import re
import gtk
import getopt
import datetime
import subprocess
import linuxcnc
import hashlib
import gobject
import glob
import shutil
import popupkeyboard
import exceptions # for debug printing
import traceback # for debug printing
import hal # notused except for debug
from gladevcp import hal_actions
g_ui_dir = linuxcnc.SHARE + "/linuxcnc"
# determine if glade interface designer is running
# in order to prevent connection of most signals
g_is_glade = False
if ( ('glade' in sys.argv[0])
and ('gladevcp' not in sys.argv[0])):
for d in os.environ['PATH'].split(':'):
f = os.path.join(d,sys.argv[0])
if ( os.path.isfile(f)
and os.access(f, os.X_OK)):
g_is_glade = True
break
g_alive = not g_is_glade
import gettext
LOCALEDIR = linuxcnc.SHARE + "/locale"
gettext.install("linuxcnc", localedir=LOCALEDIR, unicode=True)
try:
import pygtk
pygtk.require('2.0')
except ImportError,msg:
print('import pygtk failed: %s',msg)
pass
#------------------------------------------------------------------------------
g_debug = False
g_verbose = False
g_nom2 = False # True for no m2 terminator (use %)
g_strict = False # enforce additional subfile formatting requirements
g_tmode = 0 # for development
g_entry_height = 20 # default parm entry height
# (override for popupkeyboard)
g_big_height = 35 # increased parm entry height value
g_image_width = 320 # image size
g_image_height = 240 # image size
g_check_interval = 2 # periodic check (seconds)
g_label_id = 0 # subroutine labels modifier when expanding in place
g_progname = os.path.splitext(os.path.basename(__file__))[0]
g_dtfmt = "%y%m%d:%H.%M.%S"
g_stat = None # linuxcnc.stat object
g_popkbd = None # PopupKeyboard object
g_candidate_files = None # CandidateFiles object
g_send_function = None # function object f(fname) return True for success
g_tab_controls_loc ='top' # 'top' | 'bottom'
g_keyboardfile = os.path.join(g_ui_dir,'popupkeyboard.ui')
g_control_font = None
g_font_users = []
g_auto_file_ct = 1
INTERP_SUB_PARAMS = 30 # (1-based) conform to:
# src/emc/rs274ngc/interp_internal.hh:#define INTERP_SUB_PARAMS 30
g_max_parm = INTERP_SUB_PARAMS
g_max_msg_len = 500 # limit popup msg len for errant gcmc input
g_gcmc_exe = None
g_gcmc_funcname = 'tmpgcmc'
g_gcmc_id = 0
black_color = gtk.gdk.color_parse('black')
white_color = gtk.gdk.color_parse('white')
error_color = gtk.gdk.color_parse('red')
green_color = gtk.gdk.color_parse('green')
blue_color = gtk.gdk.color_parse('blue')
yellow_color = gtk.gdk.color_parse('yellow')
purple_color = gtk.gdk.color_parse('purple')
feature_color = gtk.gdk.color_parse('lightslategray')
label_normal_color = gtk.gdk.color_parse('lightsteelblue2')
label_active_color = gtk.gdk.color_parse('ivory2')
base_entry_color = gtk.gdk.color_parse('azure1')
fg_created_color = gtk.gdk.color_parse('palegreen')
fg_multiple_color = gtk.gdk.color_parse('cyan')
fg_normal_color = black_color
bg_dvalue_color = gtk.gdk.color_parse('darkseagreen2')
#------------------------------------------------------------------------------
def exception_show(ename,detail,src=''):
print('\n%s:' % src )
print('Exception: %s' % ename )
print(' detail: %s' % detail )
if type(detail) == exceptions.ValueError:
for x in detail:
if type(x) in (StringType, UnicodeType):
print('detail(s):',x)
else:
for y in x:
print('detail(d):',y,)
elif type(detail) == StringType:
print('detail(s):',detail)
elif type(detail) == ListType:
for x in detail:
print('detail(l):',x)
else:
print(ename,detail)
if g_debug:
|
#print(sys.exc_info())
print( traceback.format_exc())
def save_a_copy(fname,archive_dir='/tmp/old_ngc'):
if fname is None:
return
try:
if not os.path.exists(archive_dir):
os.mkdir(archive_dir)
shutil.copyfile(fname
,os.path.join(archive_dir,dt() + '_' + os.path.basename(fname)))
except IOError,msg:
print(_('save_a_copy: IOError copying file to %
|
s') % archive_dir)
print(msg)
except Exception, detail:
exception_show(Exception,detail,src='save_a_copy')
print(traceback.format_exc())
sys.exit(1)
def get_linuxcnc_ini_file():
ps = subprocess.Popen('ps -C linuxcncsvr --no-header -o args'.split(),
stdout=subprocess.PIPE
)
p,e = ps.communicate()
if ps.returncode:
print(_('get_linuxcnc_ini_file: stdout= %s') % p)
print(_('get_linuxcnc_ini_file: stderr= %s') % e)
return None
ans = p.split()[p.split().index('-ini')+1]
return ans
def dummy_send(filename):
return False # always fail
def default_send(filename):
import gladevcp.hal_filechooser
try:
s = linuxcnc.stat().poll()
except:
user_message(mtype=gtk.MESSAGE_ERROR
,title=_('linuxcnc not running')
,msg = _('cannot send, linuxcnc not running'))
return False
try:
fchooser = gladevcp.hal_filechooser.EMC_Action_Open()
fchooser._hal_init()
fchooser._load_file(filename)
return True
except:
return False
def send_to_axis(filename): # return True for success
# NB: file with errors may hang in axis gui
s = subprocess.Popen(['axis-remote',filename]
|
dymkowsk/mantid
|
Framework/PythonInterface/test/python/mantid/kernel/PythonPluginsTest.py
|
Python
|
gpl-3.0
| 1,990
| 0.002513
|
from __future__ import (absolute_import, division, print_function)
import unittest
import os
import shutil
import sys
import mantid.kernel.plugins as plugins
from mantid.api import AlgorithmFactory, AlgorithmManager
__TESTALG__ = \
"""from mantid.api import PythonAlgorithm, AlgorithmFactory
class TestPyAlg(PythonAlgorithm):
def PyInit(self):
pass
def PyExec(self):
pass
AlgorithmFactory.subscribe(TestPyAlg)
"""
class PythonPluginsTest(unittest.TestCase):
def setUp(self):
# Make a test directory and test plugin
self._testdir = os.path.join(os.getcwd(), 'PythonPluginsTest_TmpDir')
try:
os.mkdir(self._testdir)
except OSError:
pass # Already exists, maybe it was not removed when a test failed?
filename = os.path.join(self._testdir, 'TestPyAlg.py')
if not os.path.exists(filename):
plugin = open(filename, 'w')
plugin.write(__TESTALG__)
plugin.clos
|
e()
def tearDown(self):
try:
shutil.rmtree(self._testdir)
except shutil.Error:
pass
def test_loading_python_algorithm_increases_registered_algs_by_one(self):
loaded = plugins.load(self._testdir)
self.assertTrue(len(loaded) >
|
0)
expected_name = 'TestPyAlg'
# Has the name appear in the module dictionary
self.assertTrue(expected_name in sys.modules)
# Do we have the registered algorithm
algs = AlgorithmFactory.getRegisteredAlgorithms(True)
self.assertTrue(expected_name in algs)
# Can it be created?
try:
test_alg = AlgorithmManager.createUnmanaged(expected_name)
self.assertEquals(expected_name, test_alg.name())
self.assertEquals(1, test_alg.version())
except RuntimeError as exc:
self.fail("Failed to create plugin algorithm from the manager: '%s' " %s)
if __name__ == '__main__':
unittest.main()
|
staslev/incubator-beam
|
sdks/python/apache_beam/metrics/metric_test.py
|
Python
|
apache-2.0
| 5,205
| 0.003842
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
from apache_beam.metrics.cells import DistributionData
from apache_beam.metrics.execution import MetricKey
from apache_beam.metrics.execution import MetricsContainer
from apache_beam.metrics.execution import MetricsEnvironment
from apache_beam.metrics.metric import MetricResults
from apache_beam.metrics.metric import Metrics
from apache_beam.metrics.metric import MetricsFilter
from apache_beam.metrics.metricbase import MetricName
class NameTest(unittest.TestCase):
def test_basic_metric_name(self):
name = MetricName('namespace1', 'name1')
self.assertEqual(name.namespace, 'namespace1')
self.assertEqual(name.name, 'name1')
self.assertEqual(name, MetricName('namespace1', 'name1'))
key = MetricKey('step1', name)
self.assertEqual(key.step, 'step1')
self.assertEqual(key.metric.namespace, 'namespace1')
self.assertEqual(key.metric.name, 'name1')
self.assertEqual(key, MetricKey('step1', MetricName('namespace1', 'name1')))
class MetricResultsTest(unittest.TestCase):
def test_metric_filter_namespace_matching(self):
filter = MetricsFilter().with_namespace('ns1')
name = MetricName('ns1', 'name1')
key = MetricKey('step1', name)
self.assertTrue(MetricResults.matches(filter, key))
def test_metric_filter_name_matching(self):
filter = MetricsFilter().with_name('name1').with_namespace('ns1')
name = MetricName('ns1', 'name1')
key = MetricKey('step1', name)
self.assertTrue(MetricResults.matches(filter, key))
filter = MetricsFilter().with_name('name1')
name = MetricName('ns1', 'name1')
key = MetricKey('step1', name)
self.assertTrue(MetricResults.matches(filter, key))
def test_metric_filter_step_matching(self):
filter = MetricsFilter().with_step('Top1/Outer1/Inner1')
name = MetricName('ns1', 'name1')
key = MetricKey('Top1/Outer1/Inner1', name)
self.assertTrue(MetricResults.matches(filter, key))
filter = MetricsFilter().with_step('step1')
name = MetricName('ns1', 'name1')
key = MetricKey('step1', name)
self.assertTrue(MetricResults.matches(filter, key))
filter = MetricsFilter().with_step('Top1/Outer1')
name = MetricName('ns1', 'name1')
key = MetricKey('Top1/Outer1/Inner1', name)
self.assertTrue(MetricResults.matches(filter, key))
filter = MetricsFilter().with_step('Top1/Inner1')
name = MetricName('ns1', 'name1')
key = MetricKey('Top1/Outer1/Inner1', name)
self.assertFalse(MetricResults.matches(filter, key))
class MetricsTest(unittest.TestCase):
def test_get_namespace_class(self):
class MyClass(object):
pass
self.assertEqual('{}.{}'.format(MyClass.__module__, MyClass.__name__),
Metrics.get_namespace(MyClass))
def test_get_namespace_string(self):
namespace = 'MyNamespace'
self.assertEqual(namespace, Metrics.get_namespace(namespace))
def test_get_namespace_error(self):
with self.assertRaises(ValueError):
Metrics.get_namespace(object())
def test_counter_empty_name(self):
with self.assertRaises(ValueError):
Metrics.counter("namespace", "")
def test_counter_empty_namespace(self):
with self.assertRaises(ValueError):
Metrics.counter("", "names")
def test_distribution_empty_name(self):
with self.assertRaises(ValueError):
Metrics.distribution("namespace", "")
def test_distribution_empty_namespace(self):
with self.assertRaises(ValueError):
Metrics.distribution("", "names")
def test_create_counter_distribution(self):
MetricsEnvironment.set_current_container(MetricsContainer('mystep'))
counter_ns = 'aCounterNamespace'
distro_ns = 'aDistributionNamespace'
|
name = 'a_name'
counter = Metrics.counter(counter_ns, name)
distro = Metrics.distribution(distro_ns, name)
counter.inc(10)
counter.dec(3)
distro.update(10)
distro.update(2)
self.assertTrue(isinstance(counter, Metrics.DelegatingCounter))
self.assertTrue(isinstance(distro, Metrics.DelegatingDistribution))
del distro
del counter
container = MetricsEnvironment.current_container()
self.assert
|
Equal(
container.counters[MetricName(counter_ns, name)].get_cumulative(),
7)
self.assertEqual(
container.distributions[MetricName(distro_ns, name)].get_cumulative(),
DistributionData(12, 2, 2, 10))
if __name__ == '__main__':
unittest.main()
|
hforge/ikaaro
|
scripts/icms-stop.py
|
Python
|
gpl-3.0
| 1,970
| 0
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
# Copyright (C) 2005-2007 Juan David Ibáñez Palomar <jdavid@itaapy.com>
# Copyright (C) 2007 Sylvain Taverne <sylvain@itaapy.com>
# Copyright (C) 2008 David Versmisse <david.versmisse@itaapy.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Import from the Standard Library
from sys import exit
from optparse import OptionParser
# Import from itools
import itools
# Import from ikaaro
from ikaaro.server import stop_server
if __name__ == '__main__':
# The command line parser
usage = '%prog TARGET [TARGET]*'
version = 'itools %s' % itools.__version__
description = ('Stops the web server that is publishing the TARGET'
' ikaaro instance (if it is running). Accepts'
' several TARGETs at once, to stop several servers.')
parser = OptionParser(usage, version=version, description=description)
parser.add_option(
'--force', action="store_true", default=False,
help="Emits SIGTERM instead of SIGINT signal.")
op
|
tions, args = parser.parse_args()
if len(args) == 0:
parser.error('incorrect number of arguments')
# Action!
for target in args:
try:
stop_server(target)
except LookupError:
print('Error: {} instance do n
|
ot exists'.format(target))
exit(1)
# Ok
exit(0)
|
python27/AlgorithmSolution
|
ProjectEuler/51_100/Problem#74.py
|
Python
|
agpl-3.0
| 411
| 0.007299
|
import math
def digitFactorialSum(n):
return sum([math.factorial(i
|
nt(x)) for x in str(n)])
def repeatedLength(n):
repeatedList = []
while n not in repeatedList:
repeatedList.append(n)
n = digitFactorialSum(n)
return len(repeatedList)
if __name__ == "__main__":
cnt = 0
for i in range(1, 1000000):
if repeatedLength(i)
|
== 60:
cnt += 1
print cnt
|
Bargetor/chestnut
|
bargetor/notifiction/__init.py
|
Python
|
gpl-2.0
| 163
| 0.006135
|
class NotifyCenter(object):
"""docstring for NotifyCenter"""
def __init__(self, arg):
sup
|
er(NotifyCent
|
er, self).__init__()
self.arg = arg
|
asimshankar/tensorflow
|
tensorflow/contrib/distribute/python/parameter_server_strategy.py
|
Python
|
apache-2.0
| 21,809
| 0.006098
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Classes implementing a multi-worker ps DistributionStrategy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
from tensorflow.contrib.distribute.python import mirrored_strategy
from tensorflow.python.distribute import cross_device_ops as cross_device_ops_lib
from tensorflow.python.distribute import device_util
from tensorflow.python.distribute import distribute_lib
from tensorflow.python.distribute import multi_worker_util
from tensorflow.python.distribute import values
from tensorflow.python.eager import context
from tensorflow.python.framework import device as tf_device
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import device_setter
from tensorflow.python.util import nest
_LOCAL_CPU = "/device:CPU:0"
_LOCAL_GPU_0 = "/device:GPU:0"
# TODO(yuefengz): maybe cache variables on local CPU.
# TODO(yuefengz): we may want to set session options to disallow communication
# between workers.
class ParameterServerStrategy(distribute_lib.DistributionStrategy):
"""A parameter server DistributionStrategy.
This strategy class works for both local training and between-graph replicated
training for multiple workers. If `cluster_spec` is specified, either passed
in to __init__() method or parsed from the
["TF_CONFIG" environment
variable](https://www.tensorflow.org/api_docs/python/tf/estimator/RunConfig),
variables and updates to those variables are assigned to parameter servers and
other operations are assigned to workers. If `cluster_spec` is not set, it
becomes local training where variables are assigned to local CPU or the only
GPU. When each worker has more than one GPU, operations will be replicated on
these GPUs. In both cases, operations are replicated but variables are not and
these workers share a common view for which paramater server a variable is
assigned to.
This class assumes between-graph replication will be used and works on a graph
for a particular worker. Note that each graph and worker is independent.
This means that while each worker will synchronously compute a single gradient
update across all GPUs, updates between workers proceed asynchronously.
Operations that occur only on the first replica (such as incrementing the
global step), will occur on the first replica *of every worker*.
It is expected to call `call_for_each_replica(fn, ...)` for any
operations which potentially can be replicated across replicas (i.e. multiple
GPUs) even if there is only CPU or one GPU. When defining the `fn`, extra
caution needs to be taken:
1) Always use `tf.get_variable` instead of `tf.Variable` which is not able
to refer to the same variable on different replicas.
2) It is generally not recommended to open a device scope under the strategy's
scope. A device scope (i.e. calling `tf.device`) will be merged with or
override the device for operations but will not change the device for
variables.
3) It is also not recommended to open a colocation scope (i.e. calling
`tf.colocate_with`) under the strategy's scope. For colocating variables,
use `distribution.colocate_vars_with` instead. Colocation of ops will possibly
create conflicts of device assignment.
"""
def __init__(self, num_gpus_per_worker=0):
"""Initializes this strategy.
Args:
num_gpus_per_worker: number of local GPUs or GPUs per worker, the default
is 0 meaning CPU only.
Raises:
ValueError: if `cluster_spec` is given but `task_type` or `task_id` is
not.
"""
super(ParameterServerStrategy, self).__init__(
ParameterServerExtended(self, num_gpus_per_worker))
class ParameterServerExtended(distribute_lib.DistributionStrategyExtended):
"""Implementation of ParameterServerStrategy."""
def __init__(self, container_strategy, num_gpus_per_worker):
super(ParameterServerExtended, self).__init__(container_strategy)
self._num_gpus_per_worker = num_gpus_per_worker
self._initialize_local(num_gpus_per_worker)
# We typically don't n
|
eed to do all-reduce in this strategy.
self._cross_device_ops = (
cross_device_ops_lib.ReductionToOneDeviceCrossDeviceOps(
reduce_to_device=_LOCAL_CPU))
def _initialize_multi_
|
worker(self, num_gpus_per_worker, cluster_spec,
task_type, task_id):
"""Initialize devices for multiple workers.
It creates variable devices and compute devices. Variables and operations
will be assigned to them respectively. We have one compute device per
replica. The variable device is a device function or device string. The
default variable device assigns variables to parameter servers in a
round-robin fashion.
Args:
num_gpus_per_worker: number of local GPUs or GPUs per worker.
cluster_spec: a dict, ClusterDef or ClusterSpec object specifying the
cluster configurations.
task_type: the current task type.
task_id: the current task id.
Raises:
ValueError: if the cluster_spec doesn't have ps jobs.
"""
assert cluster_spec
if not task_type or task_id is None:
raise ValueError("When `cluster_spec` is given, you must also specify "
"`task_type` and `task_id`")
cluster_spec = multi_worker_util.normalize_cluster_spec(cluster_spec)
self._worker_device = "/job:%s/task:%d" % (self._task_type, self._task_id)
# Define compute devices which is a list of device strings and one for each
# replica. When there are GPUs, replicate operations on these GPUs.
# Otherwise, place operations on CPU.
if num_gpus_per_worker > 0:
self._compute_devices = tuple(
"%s/device:GPU:%d" % (self._worker_device, i)
for i in range(num_gpus_per_worker)
)
else:
self._compute_devices = (self._worker_device,)
self._compute_devices = tuple(
map(device_util.resolve, self._compute_devices))
self._canonical_compute_device_set = set(self._compute_devices)
# In distributed mode, place variables on ps jobs in a round-robin fashion.
# Note that devices returned from `replica_device_setter` are not
# canonical and therefore we don't canonicalize all variable devices to
# make them consistent.
# TODO(yuefengz): support passing a strategy object to control variable
# assignment.
# TODO(yuefengz): merge the logic of replica_device_setter into this
# class.
num_ps_replicas = len(cluster_spec.as_dict().get("ps", []))
if num_ps_replicas == 0:
raise ValueError("The cluster spec needs to have `ps` jobs.")
self._variable_device = device_setter.replica_device_setter(
ps_tasks=num_ps_replicas,
worker_device=self._worker_device,
merge_devices=True,
cluster=cluster_spec)
# The `_parameter_devices` is needed for the `parameter_devices` property
# and is a list of all variable devices. Here parameter devices are all
# tasks of the "ps" job.
self._parameter_devices = tuple(map("/job:ps/task:{}".format,
range(num_ps_replicas)))
# Add a default device so that ops without specified devices will not end up
# on othe
|
josanly/python-module-project
|
resources/setup.template.py
|
Python
|
gpl-3.0
| 419
| 0.002387
|
from setuptools import setup
from sample import __version__, __author__
setup(name='sample',
version=__version__,
description='an example of p
|
ython module',
url='http://github.com/josanly/python-module-project',
author=__author__,
author_email='josso.adrien@gmail.com',
license='GP
|
L v3.0',
packages=['sample'],
python_requires='>=PYTHON_VERSION',
zip_safe=False)
|
rgerkin/neuroConstruct
|
pythonNeuroML/nCUtils/ncutils.py
|
Python
|
gpl-2.0
| 66,510
| 0.014705
|
# -*- coding: utf-8 -*-
#
#
# File to preform some standard tasks on a neuroConstruct project
#
# Author: Padraig Gleeson
#
# This file has been developed as part of the neuroConstruct project
# This work has been funded by the Medical Research Council and the
# Wellcome Trust
#
#
import sys
import time
import subprocess
from java.io import File
from ucl.physiol.neuroconstruct.cell.utils import CellTopologyHelper
from ucl.physiol.neuroconstruct.cell.compartmentalisation import GenesisCompartmentalisation
from ucl.physiol.neuroconstruct.cell.compartmentalisation import OriginalCompartmentalisation
from ucl.physiol.neuroconstruct.gui.plotter import PlotManager
from ucl.physiol.neuroconstruct.gui.plotter import PlotCanvas
from ucl.physiol.neuroconstruct.dataset import DataSet
from ucl.physiol.neuroconstruct.neuron import NeuronFileManager
from ucl.physiol.neuroconstruct.neuron.NeuronSettings import DataSaveFormat
from ucl.physiol.neuroconstruct.nmodleditor.processes import ProcessManager
from ucl.physiol.neuroconstruct.neuroml import NeuroMLConstants
from ucl.physiol.neuroconstruct.neuroml import LemsConstants
from ucl.physiol.neuroconstruct.project import SimPlot
from ucl.physiol.neuroconstruct.project import ProjectManager
from ucl.physiol.neuroconstruct.simulation import SimulationData
from ucl.physiol.neuroconstruct.simulation import SpikeAnalyser
from ucl.physiol.neuroconstruct.utils.units import UnitConverter
from ucl.physiol.neuroconstruct.utils import NumberGenerator
from ucl.physiol.neuroconstruct.hpc.mpi import MpiSettings
from ucl.physiol.neuroconstruct.pynn.PynnFileManager import PynnSimulator
from ucl.physiol.neuroconstruct.neuroml import NeuroMLFileManager
def loadMepFile(mepFile, scale=1):
# Load an OMV mep file, see https://github.com/OpenSourceBrain/osb-model-validation
spike_times = {}
mep_file = open(mepFile)
exp_name = ""
for line in mep_file:
line = line.strip()
if line.startswith('system:'):
pass
elif line.startswith('expected:'):
pass
elif line.startswith('spike times: ['):
times = line[14:-1].split(',')
tt = []
for time in times:
tt.append(float(time.strip())*scale)
spike_times[exp_name] = tt
else:
exp_name = line[:-1]
return spike_times
def generateNeuroML2(projFile,
simConfigs,
neuroConstructSeed = 1234,
seed = 1234,
verbose = True):
projectManager = ProjectManager()
project = projectManager.loadProject(projFile)
nmlfm = NeuroMLFileManager(project)
genDir = File(projFile.getParentFile(), "generatedNeuroML2")
if verbose: print("Generating NeuroML 2 files for project %s, sim configs: %s, into %s"%(project.getProjectName(), str(simConfigs), genDir.getAbsolutePath()))
for simConfigName in simConfigs:
projectManager.doGenerate(simConfigName, neuroConstructSeed)
while projectManager.isGenerating():
if verbose: print("Waiting for the project to be generated with Simulation Configuration: "+simConfigName)
time.sleep(5)
simConfig = project.simConfigInfo.getSimConfig(simConfigName)
nmlfm.generateNeuroMLFiles(simConfig,
NeuroMLConstants.NeuroMLVersion.getLatestVersion(),
LemsConstants.LemsOption.LEMS_WITHOUT_EXECUTE_MODEL,
OriginalCompartmentalisation(),
seed,
False,
True,
genDir,
"GENESIS Physiological Units",
False)
info = "These files are not the source files for the model, they ha
|
ve been generated from the source of the model in the neuroConstruct directory.\n"+ \
"These have been added to provide examples of valid NeuroML files for testing applications & the OSB website and may be removed at any time."
readme = open(genDir.getAbsolutePath()+'/README--GENERA
|
TED-FILES', 'w')
readme.write(info)
readme.close()
def generateNeuroML1(projFile,
simConfigs,
neuroConstructSeed = 1234,
seed = 1234,
verbose = True):
projectManager = ProjectManager()
project = projectManager.loadProject(projFile)
nmlfm = NeuroMLFileManager(project)
genDir = File(projFile.getParentFile(), "generatedNeuroML")
if verbose: print("Generating NeuroML v1.x files for project %s, sim configs: %s, into %s"%(project.getProjectName(), str(simConfigs), genDir.getAbsolutePath()))
for simConfigName in simConfigs:
projectManager.doGenerate(simConfigName, neuroConstructSeed)
while projectManager.isGenerating():
if verbose: print("Waiting for the project to be generated with Simulation Configuration: "+simConfigName)
time.sleep(5)
simConfig = project.simConfigInfo.getSimConfig(simConfigName)
nmlfm.generateNeuroMLFiles(simConfig,
NeuroMLConstants.NeuroMLVersion.NEUROML_VERSION_1,
LemsConstants.LemsOption.LEMS_WITHOUT_EXECUTE_MODEL,
OriginalCompartmentalisation(),
seed,
False,
True,
genDir,
"GENESIS Physiological Units",
False)
info = "These files are not the source files for the model, they have been generated from the source of the model in the neuroConstruct directory.\n"+ \
"These have been added to provide examples of valid NeuroML files for testing applications & the OSB website and may be removed at any time."
readme = open(genDir.getAbsolutePath()+'/README--GENERATED-FILES', 'w')
readme.write(info)
readme.close()
def getUnusedSimRef(project, simRefPrefix="P_Sim_"):
index = 0
while File( "%s/simulations/%s%i"%(project.getProjectMainDirectory().getCanonicalPath(), simRefPrefix,index)).exists():
index = index+1
simRef = "%s%i"%(simRefPrefix,index)
return simRef
def generateAndRunGenesis(project,
projectManager,
simConfig,
simRef,
simulatorSeed,
verbose=True,
quitAfterRun=False,
runInBackground=False,
units=-1,
symmetricComps=None):
prefix = "--- GENESIS gen: "
if verbose: print prefix+"Going to generate GENESIS files for: "+simRef
if runInBackground:
project.genesisSettings.setNoConsole()
if units == UnitConverter.GENESIS_SI_UNITS or units == UnitConverter.GENESIS_PHYSIOLOGICAL_UNITS:
project.genesisSettings.setUnitSystemToUse(units) # else leave it as the units set in the proj
project.genesisSettings.setMooseCompatMode(False)
if symmetricComps is not None:
project.genesisSettings.setSymmetricCompartments(symmetricComps)
project.genesisFileManager.setQuitAfterRun(quitAfterRun)
compartmentalisation = GenesisCompartmentalisation()
project.genesisFileManager.generateTheGenesisFiles(simConfig,
None,
|
ashang/calibre
|
src/calibre/gui2/store/web_store_dialog.py
|
Python
|
gpl-3.0
| 1,721
| 0.001743
|
# -*- coding: utf-8 -*-
from __future__ import (unicode_literals, division, absolute_import, print_function)
__license__ = 'GPL 3'
__copyright__ = '2011, John Schember <john@nachtimwald.com>'
__docformat__ = 'restructuredtext en'
from PyQt5.Qt import QDialog, QUrl
from calibre import url_slash_cleaner
from calibre.gui2.store.web_store_dialog_ui import Ui_Dialog
cl
|
ass WebStoreDialog(QDialog, Ui_Dialog):
def __init__(self, gui, base_url, parent=None, detail_url=None, create_browser=None):
QDialog.__init__(self, parent=parent)
self.setupUi(self)
|
self.gui = gui
self.base_url = base_url
self.view.set_gui(self.gui)
self.view.create_browser = create_browser
self.view.loadStarted.connect(self.load_started)
self.view.loadProgress.connect(self.load_progress)
self.view.loadFinished.connect(self.load_finished)
self.home.clicked.connect(self.go_home)
self.reload.clicked.connect(self.view.reload)
self.back.clicked.connect(self.view.back)
self.go_home(detail_url=detail_url)
def set_tags(self, tags):
self.view.set_tags(tags)
def load_started(self):
self.progress.setValue(0)
def load_progress(self, val):
self.progress.setValue(val)
def load_finished(self, ok=True):
self.progress.setValue(100)
def go_home(self, checked=False, detail_url=None):
if detail_url:
url = detail_url
else:
url = self.base_url
# Reduce redundant /'s because some stores
# (Feedbooks) and server frameworks (cherrypy)
# choke on them.
url = url_slash_cleaner(url)
self.view.load(QUrl(url))
|
jelmer/pydoctor
|
pydoctor/astbuilder.py
|
Python
|
isc
| 17,654
| 0.002322
|
"""Convert ASTs into L{pydoctor.model.Documentable} instances."""
from pydoctor import model, ast_pp
from compiler import visitor, transformer, ast
import symbol, token
class str_with_orig(str):
"""Hack to allow recovery of the literal that gave rise to a docstring in an AST.
We do this to allow the users to edit the original form of the docstring in the
editing server defined in the L{server} module.
@ivar orig: The literal that gave rise to this constant in the AST.
"""
pass
class MyTransformer(transformer.Transformer):
"""Custom transformer that creates Nodes with L{str_with_orig} instances for docstrings."""
def get_docstring(self, node, n=None):
"""Override C{transformer.Transformer.get_docstring} to return a L{str_with_orig} object."""
if n is None:
n = node[0]
node = node[1:]
if n == symbol.suite:
if len(node) == 1:
return self.get_docstring(node[0])
for sub in node:
if sub[0] == symbol.stmt:
return self.get_docstring(sub)
return None
if n == symbol.file_input:
for sub in node:
if sub[0] == symbol.stmt:
return self.get_docstring(sub)
return None
if n == symbol.atom:
if node[0][0] == token.STRING:
s = ''
for t in node:
s = s + eval(t[1])
r = str_with_orig(s)
r.orig = ''.join(t[1] for t in node)
r.linenumber = node[0][2]
return r
return None
if n == symbol.stmt or n == symbol.simple_stmt \
or n == symbol.small_stmt:
return self.get_docstring(node[0])
if n in transformer._doc_nodes and len(node) == 1:
return self.get_docstring(node[0])
return None
def parseFile(path):
"""Duplicate of L{compiler.parseFile} that uses L{MyTransformer}."""
f = open(path, "U")
src = f.read() + "\n"
f.close()
return parse(src)
def parse(buf):
"""Duplicate of L{compiler.parse} that uses L{MyTransformer}."""
return MyTransformer().parsesuite(buf)
def node2dottedname(node):
parts = []
while isinstance(node, ast.Getattr):
parts.append(node.attrname)
node = node.expr
if isinstance(node, ast.Name):
parts.append(node.name)
else:
return None
parts.reverse()
return parts
class ModuleVistor(object):
def __init__(self, builder, module):
s
|
elf.builder = builder
self.system = builder.system
self.module = module
def default(self, node):
for child in node.getChildNodes():
self.visit(child)
def visitModule(self, node):
assert self.module.docstring is None
self.module.docstring = node.doc
self.builder.push(self.module)
self.default(node)
self.builder.pop(self.module)
def visitClass(self, node):
rawbases = []
bases = []
|
baseobjects = []
for n in node.bases:
str_base = ast_pp.pp(n)
rawbases.append(str_base)
full_name = self.builder.current.expandName(str_base)
bases.append(full_name)
baseobj = self.system.objForFullName(full_name)
if not isinstance(baseobj, model.Class):
baseobj = None
baseobjects.append(baseobj)
cls = self.builder.pushClass(node.name, node.doc)
cls.decorators = []
cls.rawbases = rawbases
cls.bases = bases
cls.baseobjects = baseobjects
def node2data(node):
dotted_name = node2dottedname(node)
if dotted_name is None:
return None
dotted_name = '.'.join(dotted_name)
full_name = self.builder.current.expandName(dotted_name)
obj = self.system.objForFullName(full_name)
return (dotted_name, full_name, obj)
if node.decorators:
for decnode in node.decorators:
if isinstance(decnode, ast.CallFunc):
args = []
for arg in decnode.args:
args.append(node2data(arg))
base = node2data(decnode.node)
else:
base = node2data(decnode)
args = None
cls.decorators.append((base, args))
if node.lineno is not None:
cls.linenumber = node.lineno
if cls.parentMod.sourceHref:
cls.sourceHref = cls.parentMod.sourceHref + '#L' + \
str(cls.linenumber)
for b in cls.baseobjects:
if b is not None:
b.subclasses.append(cls)
self.default(node)
self.builder.popClass()
def visitFrom(self, node):
if not isinstance(self.builder.current, model.CanContainImportsDocumentable):
self.warning("processing import statement in odd context")
return
modname = self.builder.expandModname(node.modname)
mod = self.system.getProcessedModule(modname)
if mod is not None:
assert mod.state in [model.PROCESSING, model.PROCESSED]
expandName = mod.expandName
else:
expandName = lambda name: modname + '.' + name
_localNameToFullName = self.builder.current._localNameToFullName_map
for fromname, asname in node.names:
if fromname == '*':
if mod is None:
self.builder.warning("import * from unknown", modname)
return
self.builder.warning("import *", modname)
if mod.all is not None:
names = mod.all
else:
names = mod.contents.keys() + mod._localNameToFullName_map.keys()
names = [k for k in names if not k.startswith('_')]
for n in names:
_localNameToFullName[n] = expandName(n)
return
if asname is None:
asname = fromname
if isinstance(self.builder.current, model.Module) and \
self.builder.current.all is not None and \
asname in self.builder.current.all and \
modname in self.system.allobjects:
mod = self.system.allobjects[modname]
if isinstance(mod, model.Module) and \
fromname in mod.contents and \
(mod.all is None or fromname not in mod.all):
self.system.msg(
"astbuilder",
"moving %r into %r"
% (mod.contents[fromname].fullName(),
self.builder.current.fullName()))
ob = mod.contents[fromname]
ob.reparent(self.builder.current, asname)
continue
if isinstance(
self.system.objForFullName(modname), model.Package):
self.system.getProcessedModule(modname + '.' + fromname)
_localNameToFullName[asname] = expandName(fromname)
def visitImport(self, node):
"""Process an import statement.
The grammar for the statement is roughly:
mod_as := DOTTEDNAME ['as' NAME]
import_stmt := 'import' mod_as (',' mod_as)*
and this is translated into a node which is an instance of Import wih
an attribute 'names', which is in turn a list of 2-tuples
(dotted_name, as_name) where as_name is None if there was no 'as foo'
part of the statement.
"""
if not isinstance(self.builder.current, model.CanContainImportsDocumentable):
self.warning("processing import statement in odd context")
return
_localNameToFullName = self.builder.current._localNameToFullName_map
for fromname, asname in node.names:
fullname = self.builder.expandModname(fromname)
mod = self.system.getProcess
|
schinke/solid-fortnight-ba
|
flask/migrations/versions/7ab3e266f711_.py
|
Python
|
mit
| 5,512
| 0.007438
|
"""empty message
Revision ID: 7ab3e266f711
Revises: 0152d9c6e677
Create Date: 2016-08-09 20:14:58.552655
"""
# revision identifiers, used by Alembic.
revision = '7ab3e266f711'
down_revision = '0152d9c6e677'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(u'co2_id_fkey', 'co2', type_='foreignkey')
op.create_foreign_key(None, 'co2', 'scivalue', ['id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
op.drop_constraint(u'density_id_fkey', 'density', type_='foreignkey')
op.create_foreign_key(None, 'density', 'scivalue', ['id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
op.drop_constraint(u'edb_product_id_fkey', 'edb_product', type_='foreignkey')
op.create_foreign_key(None, 'edb_product', 'product', ['id'], ['id'], ondelete='CASCADE')
op.drop_constraint(u'foodwaste_id_fkey', 'foodwaste', type_='foreignkey')
op.create_foreign_key(None, 'foodwaste', 'scivalue', ['id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
op.drop_constraint(u'location_prod_association_product_id_fkey', 'location_prod_association', type_='foreignkey')
op.create_foreign_key(None, 'location_prod_association', 'product', ['product_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
op.drop_constraint(u'prod_allergene_association_id_fkey', 'prod_allergene_association', type_='foreignkey')
op.create_foreign_key(None, 'prod_allergene_association', 'scivalue', ['id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
op.drop_constraint(u'prod_nutrient_association_id_fkey', 'prod_nutrient_association', type_='foreignkey')
op.create_foreign_key(None, 'prod_nutrient_association', 'scivalue', ['id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
op.drop_constraint(u'prod_process_association_id_fkey', 'prod_process_association', type_='foreignkey')
op.create_foreign_key(None, 'prod_process_association', 'scivalue', ['id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
op.drop_constraint(u'prod_process_co2_association_id_fkey', 'prod_process_co2_association', type_='foreignkey')
op.create_foreign_key(None, 'prod_process_co2_association', 'scivalue', ['id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
op.drop_constraint(u'scivalue_product_id_fkey', 'scivalue', type_='foreignkey')
op.create_foreign_key(None, 'scivalue', 'product', ['product_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
|
op.drop_constraint(u'synonym_prod_association_product_id_fkey', 'synonym_prod_association', type_='foreignkey')
op.create_foreign_key(None, 'synonym_prod_association', 'product', ['product_id'], ['id'], ondelete='CASCADE')
op.drop_constraint(u'template_id_fkey', 'template', type_='foreignkey')
op.create_foreign_key(None, 'template', 'product', ['id'], ['id'], ondelete='CASCADE')
op.drop_constraint(u'unit_weigh
|
t_id_fkey', 'unit_weight', type_='foreignkey')
op.create_foreign_key(None, 'unit_weight', 'scivalue', ['id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'unit_weight', type_='foreignkey')
op.create_foreign_key(u'unit_weight_id_fkey', 'unit_weight', 'scivalue', ['id'], ['id'])
op.drop_constraint(None, 'template', type_='foreignkey')
op.create_foreign_key(u'template_id_fkey', 'template', 'product', ['id'], ['id'])
op.drop_constraint(None, 'synonym_prod_association', type_='foreignkey')
op.create_foreign_key(u'synonym_prod_association_product_id_fkey', 'synonym_prod_association', 'product', ['product_id'], ['id'])
op.drop_constraint(None, 'scivalue', type_='foreignkey')
op.create_foreign_key(u'scivalue_product_id_fkey', 'scivalue', 'product', ['product_id'], ['id'])
op.drop_constraint(None, 'prod_process_co2_association', type_='foreignkey')
op.create_foreign_key(u'prod_process_co2_association_id_fkey', 'prod_process_co2_association', 'scivalue', ['id'], ['id'])
op.drop_constraint(None, 'prod_process_association', type_='foreignkey')
op.create_foreign_key(u'prod_process_association_id_fkey', 'prod_process_association', 'scivalue', ['id'], ['id'])
op.drop_constraint(None, 'prod_nutrient_association', type_='foreignkey')
op.create_foreign_key(u'prod_nutrient_association_id_fkey', 'prod_nutrient_association', 'scivalue', ['id'], ['id'])
op.drop_constraint(None, 'prod_allergene_association', type_='foreignkey')
op.create_foreign_key(u'prod_allergene_association_id_fkey', 'prod_allergene_association', 'scivalue', ['id'], ['id'])
op.drop_constraint(None, 'location_prod_association', type_='foreignkey')
op.create_foreign_key(u'location_prod_association_product_id_fkey', 'location_prod_association', 'product', ['product_id'], ['id'])
op.drop_constraint(None, 'foodwaste', type_='foreignkey')
op.create_foreign_key(u'foodwaste_id_fkey', 'foodwaste', 'scivalue', ['id'], ['id'])
op.drop_constraint(None, 'edb_product', type_='foreignkey')
op.create_foreign_key(u'edb_product_id_fkey', 'edb_product', 'product', ['id'], ['id'])
op.drop_constraint(None, 'density', type_='foreignkey')
op.create_foreign_key(u'density_id_fkey', 'density', 'scivalue', ['id'], ['id'])
op.drop_constraint(None, 'co2', type_='foreignkey')
op.create_foreign_key(u'co2_id_fkey', 'co2', 'scivalue', ['id'], ['id'])
### end Alembic commands ###
|
mscherer/ansible-modules-extras
|
packaging/language/composer.py
|
Python
|
gpl-3.0
| 6,165
| 0.008597
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Dimitrios Tydeas Mengidis <tydeas.dr@gmail.com>
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: composer
author: Dimitrios Tydeas Mengidis
short_description: Dependency Manager for PHP
version_added: "1.6"
description:
- Composer is a tool for dependency management in PHP. It allows you to declare the dependent libraries your project needs and it will install them in your project for you
options:
command:
version_added: "1.8"
description:
- Composer command like "install", "update" and so on
required: false
default: install
working_dir:
description:
- Directory of your project ( see --working-dir )
required: true
default: null
aliases: [ "working-dir" ]
prefer_source:
description:
- Forces installation from package sources when possible ( see --prefer-source )
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: [ "prefer-source" ]
prefer_dist:
description:
- Forces installation from package dist even for dev versions ( see --prefer-dist )
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: [ "prefer-dist" ]
no_dev:
description:
- Disables installation of require-dev packages ( see --no-dev )
required: false
default: "yes"
choices: [ "yes", "no" ]
aliases: [ "no-dev" ]
no_scripts:
description:
- Skips the execution of all scripts defined in composer.json ( see --no-scripts )
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: [ "no-scripts" ]
no_plugins:
description:
- Disables all plugins ( see --no-plugins )
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: [ "no-plugins" ]
optimize_autoloader:
description:
- Optimize autoloader during autoloader dump ( see --optimize-autoloader ). Convert PSR-0/4 autoloading to classmap to get a faster autoloader. This is recommended especially for production, but can take a bit of time to run so it is currently not done by default.
required: false
default: "yes"
choices: [ "yes", "no" ]
aliases: [ "optimize-autoloader" ]
requirements:
- php
- composer installed in bin path (recommended /usr/local/bin)
notes:
- Default options that are always appended in each execution are --no-ansi, --no-progress, and --no-interaction
'''
EXAMPLES = '''
# Downloads and installs all the libs and dependencies outlined in the /path/to/project/composer.lock
- composer: command=install working_dir=/path/to/project
'''
import os
import re
def parse_out(string):
return re.sub("\s+", " ", string).strip()
def has_changed(string):
if "Nothing to install or update" in string:
return False
else:
return True
def composer_install(module, command, options):
php_path = module.get_bin_path("php", True, ["/usr/local/bin"])
composer_path = module.get_bin_path("composer", True, ["/usr/local/bin"])
cmd = "%s %s %s %s" % (php_path, composer_path, command, " ".join(options))
return module.run_command(cmd)
def main():
module = AnsibleModule(
argument_spec = dict(
command = dict(default="install", type="str", required=False),
working_dir = dict(aliases=["working-dir"], required=True),
prefer_source = dict(default="no", type="bool", aliases=["prefer-source"]),
prefer_dist = dict(default="no", type="bool", aliases=["prefer-dist"]),
no_dev = dict(default="yes", type="bool", aliases=["no-dev"]),
no_scripts = dict(default="no", type="bool", aliases=["no-scripts"]),
no_plugins = dict(default="no", type="bool", aliases=["no-plugins"]),
optim
|
ize_autoloader = dict(default="yes", type="bool", aliases=["optimize-autoloader"]),
),
supports_check_mode=True
)
options = []
# Default options
options.append('--no-ansi')
options.append('--no-progress')
options.append('--no-interaction')
options.extend(['--working-dir', os.pat
|
h.abspath(module.params['working_dir'])])
# Get composer command with fallback to default
command = module.params['command']
# Prepare options
if module.params['prefer_source']:
options.append('--prefer-source')
if module.params['prefer_dist']:
options.append('--prefer-dist')
if module.params['no_dev']:
options.append('--no-dev')
if module.params['no_scripts']:
options.append('--no-scripts')
if module.params['no_plugins']:
options.append('--no-plugins')
if module.params['optimize_autoloader']:
options.append('--optimize-autoloader')
if module.check_mode:
options.append('--dry-run')
rc, out, err = composer_install(module, command, options)
if rc != 0:
output = parse_out(err)
module.fail_json(msg=output)
else:
# Composer version > 1.0.0-alpha9 now use stderr for standard notification messages
output = parse_out(out + err)
module.exit_json(changed=has_changed(output), msg=output)
# import module snippets
from ansible.module_utils.basic import *
main()
|
airbnb/airflow
|
airflow/contrib/operators/imap_attachment_to_s3_operator.py
|
Python
|
apache-2.0
| 1,222
| 0.001637
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License")
|
; you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed
|
to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.imap_attachment_to_s3`."""
import warnings
# pylint: disable=unused-import
from airflow.providers.amazon.aws.transfers.imap_attachment_to_s3 import ImapAttachmentToS3Operator # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.imap_attachment_to_s3`.",
DeprecationWarning,
stacklevel=2,
)
|
mxmzdlv/pybigquery
|
tests/unit/test_dialect_types.py
|
Python
|
mit
| 1,632
| 0.004902
|
# Copyright (c) 2017 The sqlalchemy-bigquery Authors
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associa
|
ted documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The ab
|
ove copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import importlib
# https://docs.sqlalchemy.org/en/13/core/type_basics.html#vendor-specific-types
def test_types_import():
"""Demonstrate behavior of importing types independent of any other import."""
dialect_module = importlib.import_module("sqlalchemy_bigquery")
_types_module = importlib.import_module("sqlalchemy_bigquery._types")
custom_types = getattr(_types_module, "_type_map")
for type_name, type_value in custom_types.items():
assert getattr(dialect_module, type_name) == type_value
|
ikoula/cloudstack
|
tools/marvin/marvin/sandbox/demo/simulator/testcase/test_vm_life_cycle.py
|
Python
|
gpl-2.0
| 20,302
| 0.003546
|
# -*- encoding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" BVT tests for Virtual Machine Life Cycle
"""
#Import Local Modules
import marvin
from marvin.cloudstackTestCase import *
from marvin.cloudstackAPI import *
from marvin.sshClient import SshClient
from testcase.libs.utils import *
from testcase.libs.base import *
from testcase.libs.common import *
#Import System modules
import time
class Services:
"""Test VM Life Cycle Services
"""
def __init__(self):
self.services = {
"disk_offering":{
"displaytext": "Small",
"name": "Small",
"disksize": 1
},
"account": {
"email": "test@test.com",
"firstname": "Test",
"lastname": "User",
"username": "test",
# Random characters are appended in create account to
# ensure unique username generated each time
|
"password": "password",
},
"small":
# Create a small virtual machine instance with disk offering
{
|
"displayname": "testserver",
"username": "root", # VM creds for SSH
"password": "password",
"ssh_port": 22,
"hypervisor": 'XenServer',
"privateport": 22,
"publicport": 22,
"protocol": 'TCP',
},
"medium": # Create a medium virtual machine instance
{
"displayname": "testserver",
"username": "root",
"password": "password",
"ssh_port": 22,
"hypervisor": 'XenServer',
"privateport": 22,
"publicport": 22,
"protocol": 'TCP',
},
"service_offerings":
{
"tiny":
{
"name": "Tiny Instance",
"displaytext": "Tiny Instance",
"cpunumber": 1,
"cpuspeed": 100, # in MHz
"memory": 64, # In MBs
},
"small":
{
# Small service offering ID to for change VM
# service offering from medium to small
"name": "Small Instance",
"displaytext": "Small Instance",
"cpunumber": 1,
"cpuspeed": 500,
"memory": 256
},
"medium":
{
# Medium service offering ID to for
# change VM service offering from small to medium
"name": "Medium Instance",
"displaytext": "Medium Instance",
"cpunumber": 1,
"cpuspeed": 1000,
"memory": 1024
}
},
"sleep": 60,
"timeout": 10,
#Migrate VM to hostid
"ostypeid": 'ccde7156-9b8b-4fb9-bf08-530dedf4dc61',
# CentOS 5.3 (64-bit)
"mode":'advanced',
}
class TestDeployVM(cloudstackTestCase):
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.services = Services().services
# Get Zone, Domain and templates
domain = get_domain(self.apiclient, self.services)
zone = get_zone(self.apiclient, self.services)
template = get_template(
self.apiclient,
zone.id,
self.services["ostypeid"]
)
# Set Zones and disk offerings
self.services["small"]["zoneid"] = zone.id
self.services["small"]["template"] = template.id
self.services["medium"]["zoneid"] = zone.id
self.services["medium"]["template"] = template.id
# Create Account, VMs, NAT Rules etc
self.account = Account.create(
self.apiclient,
self.services["account"],
domainid=domain.id
)
self.service_offering = ServiceOffering.create(
self.apiclient,
self.services["service_offerings"]["tiny"]
)
# Cleanup
self.cleanup = [
self.service_offering,
self.account
]
def test_deploy_vm(self):
"""Test Deploy Virtual Machine
"""
# Validate the following:
# 1. Virtual Machine is accessible via SSH
# 2. listVirtualMachines returns accurate information
# 3. The Cloud Database contains the valid information
self.virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["small"],
accountid=self.account.account.name,
domainid=self.account.account.domainid,
serviceofferingid=self.service_offering.id
)
list_vm_response = list_virtual_machines(
self.apiclient,
id=self.virtual_machine.id
)
self.debug(
"Verify listVirtualMachines response for virtual machine: %s" \
% self.virtual_machine.id
)
self.assertEqual(
isinstance(list_vm_response, list),
True,
"Check list response returns a valid list"
)
self.assertNotEqual(
len(list_vm_response),
0,
"Check VM available in List Virtual Machines"
)
vm_response = list_vm_response[0]
self.assertEqual(
vm_response.id,
self.virtual_machine.id,
"Check virtual machine id in listVirtualMachines"
)
self.assertEqual(
vm_response.displayname,
self.virtual_machine.displayname,
"Check virtual machine displayname in listVirtualMachines"
)
return
def tearDown(self):
try:
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
self.debug("Warning! Exception in tearDown: %s" % e)
class TestVMLifeCycle(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(TestVMLifeCycle, cls).getClsTestClient().getApiClient()
cls.services
|
adsabs/ADSfulltext
|
adsft/entitydefs.py
|
Python
|
gpl-3.0
| 48,476
| 0.000041
|
"""
Contains the relevant conversions of HTML and LaTeX entities that will not be
correctly converted elsewhere. This is taken from adsabs/adsdata authored by
J. Luke.
"""
__author__ = 'J. Luke'
__maintainer__ = 'J. Elliott'
__copyright__ = 'Copyright 2015'
__version__ = '1.0'
__email__ = 'ads@cfa.harvard.edu'
__status__ = 'Production'
__credit__ = ['V. S
|
udilovsky', 'A. Accomazzi', 'J. Luker']
__license__ = 'GPLv3'
import re
entitydefs = {
'nsqsupe': u'\u22e3',
'Pcy': u'\u041f',
'xharr': u'\u27f7',
'HumpDownHump': u'\u224e',
'asymp': u'\u2248',
'otimes': u'\u2297',
'Zopf': u'\u2124',
'bkarow': u'\u290d',
'lessapprox': u'\u2a85',
'angmsd': u'\u2221',
'gimel': u'\u2137',
'dollar': u'$',
'mstpos': u'\u223e',
'rsquor': u'\u2019',
'boxminus
|
': u'\u229f',
'ThinSpace': u'\u2009',
'equivDD': u'\u2a78',
'pertenk': u'\u2031',
'Gt': u'\u226b',
'gscr': u'\u210a',
'Backslash': u'\u2216',
'Gg': u'\u22d9',
'nparallel': u'\u2226',
'quatint': u'\u2a16',
'Igr': u'\u0399',
'iinfin': u'\u29dc',
'nsubseteqq': u'\u2ac5\u0338',
'yacy': u'\u044f',
'cularr': u'\u21b6',
'nges': u'\u2a7e\u0338',
'ngeq': u'\u2271',
'rangle': u'\u232a',
'lparlt': u'\u2993',
'Scaron': u'\u0160',
'solbar': u'\u233f',
'elsdot': u'\u2a97',
'LessFullEqual': u'\u2266',
'lbbrk': u'\u3014',
'Cacute': u'\u0106',
'npolint': u'\u2a14',
'THORN': u'\u00de',
'ngsim': u'\u2275',
'equals': u'=',
'eqslantgtr': u'\u2a96',
'vltri': u'\u22b2',
'robrk': u'\u301b',
'cuepr': u'\u22de',
'nrightarrow': u'\u219b',
'glj': u'\u2aa4',
'gla': u'\u2aa5',
'Rcaron': u'\u0158',
'ohgr': u'\u03c9',
'permil': u'\u2030',
'angmsdac': u'\u29aa',
'angmsdab': u'\u29a9',
'angmsdaa': u'\u29a8',
'uharl': u'\u21bf',
'angmsdag': u'\u29ae',
'angmsdaf': u'\u29ad',
'Agrave': u'\u00c0',
'angmsdad': u'\u29ab',
'angmsdah': u'\u29af',
'rceil': u'\u2309',
'angrtvb': u'\u22be',
'rppolint': u'\u2a12',
'divide': u'\u00f7',
'omacr': u'\u014d',
'circleddash': u'\u229d',
'notinE': u'\u22f9\u0338',
'Ncy': u'\u041d',
'lesdotor': u'\u2a83',
'Star': u'\u22c6',
'Mellintrf': u'\u2133',
'therefore': u'\u2234',
'KHcy': u'\u0425',
'barwed': u'\u2305',
'gvertneqq': u'\u2269\ufe00',
'Jcy': u'\u0419',
'phone': u'\u260e',
'ssetmn': u'\u2216',
'excl': u'!',
'parsim': u'\u2af3',
'centerdot': u'\u00b7',
'nwarr': u'\u2196',
'nvle': u'\u2264\u20d2',
'mu': u'\u03bc',
'mp': u'\u2213',
'OverBracket': u'\u23b4',
'Barwed': u'\u2306',
'bsemi': u'\u204f',
'idigr': u'\u03ca',
'Ll': u'\u22d8',
'cong': u'\u2245',
'rpar': u')',
'Lt': u'\u226a',
'NotSuperset': u'\u2283\u20d2',
'topcir': u'\u2af1',
'smte': u'\u2aac',
'LeftDownVector': u'\u21c3',
'eng': u'\u014b',
'heartsuit': u'\u2665',
'roplus': u'\u2a2e',
'zigrarr': u'\u21dd',
'lobrk': u'\u301a',
'nharr': u'\u21ae',
'xnis': u'\u22fb',
'Hcirc': u'\u0124',
'Uarrocir': u'\u2949',
'lcedil': u'\u013c',
'lat': u'\u2aab',
'incare': u'\u2105',
'lap': u'\u2a85',
'parallel': u'\u2225',
'xhArr': u'\u27fa',
'tritime': u'\u2a3b',
'SubsetEqual': u'\u2286',
'order': u'\u2134',
'PlusMinus': u'\u00b1',
'approxeq': u'\u224a',
'varr': u'\u2195',
'ograve': u'\u00f2',
'becaus': u'\u2235',
'kappav': u'\u03f0',
'iprod': u'\u2a3c',
'otilde': u'\u00f5',
'njcy': u'\u045a',
'upharpoonleft': u'\u21bf',
'Odblac': u'\u0150',
'RightArrowBar': u'\u21e5',
'Rfr': u'\u211c',
'rbrack': u']',
'UnderBrace': u'\ufe38',
'napid': u'\u224b\u0338',
'gescc': u'\u2aa9',
'iukcy': u'\u0456',
'xrArr': u'\u27f9',
'Jukcy': u'\u0404',
'bsime': u'\u22cd',
'Cayleys': u'\u212d',
'leqq': u'\u2266',
'nwArr': u'\u21d6',
'rrarr': u'\u21c9',
'UpTee': u'\u22a5',
'nvDash': u'\u22ad',
'bigodot': u'\u2a00',
'searr': u'\u2198',
'looparrowleft': u'\u21ab',
'xgr': u'\u03be',
'Tstrok': u'\u0166',
'lcub': u'{',
'smt': u'\u2aaa',
'rx': u'\u211e',
'simplus': u'\u2a24',
'uplus': u'\u228e',
'smallsetminus': u'\u2216',
'notniva': u'\u220c',
'dotsquare': u'\u22a1',
'notnivc': u'\u22fd',
'notnivb': u'\u22fe',
'ijlig': u'\u0133',
'Egr': u'\u0395',
'infin': u'\u221e',
'DoubleRightTee': u'\u22a8',
'sqcaps': u'\u2293\ufe00',
'NotTildeTilde': u'\u2249',
'lsimg': u'\u2a8f',
'aogon': u'\u0105',
'GreaterLess': u'\u2277',
'nparsl': u'\u2afd\u20e5',
'ange': u'\u29a4',
'lneq': u'\u2a87',
'Escr': u'\u2130',
'Tilde': u'\u223c',
'Ugr': u'\u03a5',
'Kcedil': u'\u0136',
'rang': u'\u232a',
'hellip': u'\u2026',
'scedil': u'\u015f',
'mldr': u'\u2026',
'lthree': u'\u22cb',
'efDot': u'\u2252',
'top': u'\u22a4',
'ZeroWidthSpace': u'\u200b',
'thickapprox': u'\u2248',
'Ifr': u'\u2111',
'Aacgr': u'\u0386',
'Coproduct': u'\u2210',
'Rarr': u'\u21a0',
'bbrk': u'\u23b5',
'minusdu': u'\u2a2a',
'kjcy': u'\u045c',
'llarr': u'\u21c7',
'rthree': u'\u22cc',
'target': u'\u2316',
'thksim': u'\u223c',
'ltrPar': u'\u2996',
'nearhk': u'\u2924',
'udigr': u'\u03cb',
'minus': u'\u2212',
'tcaron': u'\u0165',
'YUcy': u'\u042e',
'SucceedsTilde': u'\u227f',
'xrarr': u'\u27f6',
'cwint': u'\u2231',
'subsim': u'\u2ac7',
'ImaginaryI': u'\u2148',
'NotLessEqual': u'\u2270',
'Uacute': u'\u00da',
'Dgr': u'\u0394',
'vBar': u'\u2ae8',
'ubrcy': u'\u045e',
'reals': u'\u211d',
'Omega': u'\u03a9',
'LessTilde': u'\u2272',
'YAcy': u'\u042f',
'gnapprox': u'\u2a8a',
'ldquo': u'\u201c',
'drcorn': u'\u231f',
'fnof': u'\u0192',
'cupbrcap': u'\u2a48',
'grave': u'`',
'Tgr': u'\u03a4',
'lopar': u'\u2985',
'nhArr': u'\u21ce',
'wedgeq': u'\u2259',
'gvnE': u'\u2269\ufe00',
'odsold': u'\u29bc',
'dot': u'\u02d9',
'Rightarrow': u'\u21d2',
'emptyv': u'\u2205',
'Rgr': u'\u03a1',
'Union': u'\u22c3',
'lnapprox': u'\u2a89',
'boxDl': u'\u2556',
'nlarr': u'\u219a',
'Atilde': u'\u00c3',
'radic': u'\u221a',
'frac78': u'\u215e',
'boxDr': u'\u2553',
'phgr': u'\u03c6',
'swnwar': u'\u292a',
'nLeftarrow': u'\u21cd',
'vArr': u'\u21d5',
'yen': u'\u00a5',
'hoarr': u'\u21ff',
'ocy': u'\u043e',
'Eacute': u'\u00c9',
'xotime': u'\u2a02',
'rtrie': u'\u22b5',
'mapstoup': u'\u21a5',
'xlArr': u'\u27f8',
'dd': u'\u2146',
'sup': u'\u2283',
'nesim': u'\u2242\u0338',
'mapstoleft': u'\u21a4',
'circeq': u'\u2257',
'subseteq': u'\u2286',
'strns': u'\u00af',
'OHgr': u'\u03a9',
'nLl': u'\u22d8\u0338',
'OHacgr': u'\u038f',
'ulcrop': u'\u230f',
'Proportion': u'\u2237',
'Dstrok': u'\u0110',
'num': u'#',
'ddotseq': u'\u2a77',
'lescc': u'\u2aa8',
'bigvee': u'\u22c1',
'iexcl': u'\u00a1',
'circledcirc': u'\u229a',
'seArr': u'\u21d8',
'gneqq': u'\u2269',
'atilde': u'\u00e3',
'Nopf': u'\u2115',
'CircleMinus': u'\u2296',
'nesear': u'\u2928',
'squf': u'\u25aa',
'lrarr': u'\u21c6',
'capand': u'\u2a44',
'glE': u'\u2a92',
'ccaps': u'\u2a4d',
'Bgr': u'\u0392',
'wr': u'\u2240',
'wp': u'\u2118',
'zacute': u'\u017a',
'Hacek': u'\u02c7',
'vprop': u'\u221d',
'backcong': u'\u224c',
'rpargt': u'\u2994',
'ffilig': u'\ufb03',
'zhcy': u'\u0436',
'plustwo': u'\u2a27',
'ncap': u'\u2a43',
'RightVectorBar': u'\u2953',
'ohacgr': u'\u03ce',
'and': u'\u2227',
'Egrave': u'\u00c8',
'DiacriticalDot': u'\u02d9',
'nGtv': u'\u226b\u0338',
'igrave': u'\u00ec',
'nvlArr': u'\u2902',
'ETH': u'\u00d0',
'sqsupset': u'\u2290',
'esim': u'\u2242',
'intcal': u'\u22ba',
'lAtail': u'\u291b',
'tint': u'\u222d',
'lurdshar': u'\u294a
|
onecoolx/picasso
|
tools/gyp/pylib/gyp/generator/ninja_test.py
|
Python
|
bsd-3-clause
| 1,909
| 0.004191
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Unit tests for the ninja.py file. """
import sys
import unittest
import gyp.generator.ninja as ninja
class TestPrefixesAndSuffixes(unittest.TestCase):
def test_BinaryNamesWindows(self):
# These cannot run on non-Windows as they require a VS installation to
# correctly handle variable expansion.
if sys.platform.startswith("win"):
writer = ninja.NinjaWriter(
"foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "win"
)
spec = {"target_name": "wee"}
self.assertTrue(
writer.ComputeOutputFileName(spec, "executable").endswith(".exe")
)
self.assertTrue(
writer.ComputeOutputFileName(spec, "shared_library").endswith(".dll")
|
)
self.assertTrue(
writer.ComputeOutputFileName(spec, "static_library").endswith(".lib")
)
def te
|
st_BinaryNamesLinux(self):
writer = ninja.NinjaWriter(
"foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "linux"
)
spec = {"target_name": "wee"}
self.assertTrue("." not in writer.ComputeOutputFileName(spec, "executable"))
self.assertTrue(
writer.ComputeOutputFileName(spec, "shared_library").startswith("lib")
)
self.assertTrue(
writer.ComputeOutputFileName(spec, "static_library").startswith("lib")
)
self.assertTrue(
writer.ComputeOutputFileName(spec, "shared_library").endswith(".so")
)
self.assertTrue(
writer.ComputeOutputFileName(spec, "static_library").endswith(".a")
)
if __name__ == "__main__":
unittest.main()
|
dbrattli/RxPY
|
rx/internal/basic.py
|
Python
|
apache-2.0
| 498
| 0
|
from datetime import datetime
# Defaults
def noop(*args, **kw):
"""No operation. Returns nothing"""
pass
def identity(x):
"""Returns argument x"""
return x
def def
|
ault_now():
return datetime.utcnow()
def default_comparer(x, y):
return x == y
def default_sub_comparer(x, y):
return x - y
def default_key_serializer(x):
return str(x)
def default_error(err):
if isinstance(err, BaseException):
raise err
else:
raise Exc
|
eption(err)
|
uwcirg/true_nth_usa_portal
|
portal/models/group.py
|
Python
|
bsd-3-clause
| 1,907
| 0
|
"""Group module
Groups are intented to cluster users together for logical reasons,
such as a list of users for whom patient notifications apply.
Groups should not be used to grant or restrict access - see `Role`.
"""
import re
from sqlalchemy import UniqueConstraint
from werkzeug.exceptions import BadRequest
from ..database import db
class Group(db.Model):
"""SQLAlchemy class for `groups` table"""
__tablename__ = 'groups'
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(50), unique=True)
description = db.Column(db.Text)
def __str__(self):
return "Group {}".format(self.name)
def as_json(self):
return {'name': self.name, 'description': self.description}
@classmethod
def from_json(cls, data):
instance = cls()
instance.name = cls.validate_name(data['name'])
instance.description = data['description']
return instance
@staticmethod
def validate_name(name):
"""Only accept lowercase letters and underscores in name
:returns: the name if valid
:raises BadRequest: on error
"""
if re.match(r'^[a-z][a-z0-9_]*$', name):
return name
raise BadRequest(
"Group name may only contain lowercase letters and underscores")
class UserGroup(db.Model):
__tablename__ = 'user_groups'
|
id = db.Column(db.Integer(), primary_key=True)
user_id = db.Column(
db.Integer(), db.ForeignKey
|
('users.id', ondelete='CASCADE'),
nullable=False)
group_id = db.Column(
db.Integer(), db.ForeignKey('groups.id', ondelete='CASCADE'),
nullable=False)
__table_args__ = (UniqueConstraint(
'user_id', 'group_id', name='_user_group'),)
def __str__(self):
"""Print friendly format for logging, etc."""
return "UserGroup {0.user_id}:{0.group_id}".format(self)
|
mit-dci/lit
|
test/itest_break.py
|
Python
|
mit
| 337
| 0.005935
|
import testlib
import te
|
st_combinators
fee = 20
initialsend = 200000
capacity = 10000
|
00
def forward(env):
lit1 = env.lits[0]
lit2 = env.lits[1]
test_combinators.run_break_test(env, lit1, lit2, lit1)
def reverse(env):
lit1 = env.lits[0]
lit2 = env.lits[1]
test_combinators.run_break_test(env, lit1, lit2, lit1)
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247971765/PyQt4/QtCore/QTextStream.py
|
Python
|
gpl-2.0
| 7,395
| 0.010007
|
# encoding: utf-8
# module PyQt4.QtCore
# from /usr/lib/python3/dist-packages/PyQt4/QtCore.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import sip as __sip
class QTextStream(): # skipped bases: <class 'sip.simplewrapper'>
"""
QTextStream()
QTextStream(QIODevice)
QTextStream(QByteArray, QIODevice.OpenMode mode=QIODevice.ReadWrite)
"""
def atEnd(self): # real signature unknown; restored from __doc__
""" QTextStream.atEnd() -> bool """
return False
def autoDetectUnicode(self): # real signature unknown; restored from __doc__
""" QTextStream.autoDetectUnicode() -> bool """
return False
def codec(self): # real signature unknown; restored from __doc__
""" QTextStream.codec() -> QTextCodec """
return QTextCodec
def device(self): # real signature unknown; restored from __doc__
""" QTextStream.device() -> QIODevice """
return QIODevice
def fieldAlignment(self): # real signature unknown; restored from __doc__
""" QTextStream.fieldAlignment() -> QTextStream.FieldAlignment """
pass
def fieldWidth(self): # real signature unknown; restored from __doc__
""" QTextStream.fieldWidth() -> i
|
nt """
return 0
def flush(self): # real signature unknown; restored from __doc__
""" QTextStream.flush() """
pass
def generateByteOrderMark(self): # real signature unknown; restored from __doc__
""" QTextStream.generateByteOrderMark() -> bool """
return False
def integerBase(self): # real signature unknown; restored from __doc__
""" QTextStream.integerBase() -> int """
return 0
|
def locale(self): # real signature unknown; restored from __doc__
""" QTextStream.locale() -> QLocale """
return QLocale
def numberFlags(self): # real signature unknown; restored from __doc__
""" QTextStream.numberFlags() -> QTextStream.NumberFlags """
pass
def padChar(self): # real signature unknown; restored from __doc__
""" QTextStream.padChar() -> str """
return ""
def pos(self): # real signature unknown; restored from __doc__
""" QTextStream.pos() -> int """
return 0
def read(self, p_int): # real signature unknown; restored from __doc__
""" QTextStream.read(int) -> str """
return ""
def readAll(self): # real signature unknown; restored from __doc__
""" QTextStream.readAll() -> str """
return ""
def readLine(self, int_maxLength=0): # real signature unknown; restored from __doc__
""" QTextStream.readLine(int maxLength=0) -> str """
return ""
def realNumberNotation(self): # real signature unknown; restored from __doc__
""" QTextStream.realNumberNotation() -> QTextStream.RealNumberNotation """
pass
def realNumberPrecision(self): # real signature unknown; restored from __doc__
""" QTextStream.realNumberPrecision() -> int """
return 0
def reset(self): # real signature unknown; restored from __doc__
""" QTextStream.reset() """
pass
def resetStatus(self): # real signature unknown; restored from __doc__
""" QTextStream.resetStatus() """
pass
def seek(self, p_int): # real signature unknown; restored from __doc__
""" QTextStream.seek(int) -> bool """
return False
def setAutoDetectUnicode(self, bool): # real signature unknown; restored from __doc__
""" QTextStream.setAutoDetectUnicode(bool) """
pass
def setCodec(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
"""
QTextStream.setCodec(QTextCodec)
QTextStream.setCodec(str)
"""
pass
def setDevice(self, QIODevice): # real signature unknown; restored from __doc__
""" QTextStream.setDevice(QIODevice) """
pass
def setFieldAlignment(self, QTextStream_FieldAlignment): # real signature unknown; restored from __doc__
""" QTextStream.setFieldAlignment(QTextStream.FieldAlignment) """
pass
def setFieldWidth(self, p_int): # real signature unknown; restored from __doc__
""" QTextStream.setFieldWidth(int) """
pass
def setGenerateByteOrderMark(self, bool): # real signature unknown; restored from __doc__
""" QTextStream.setGenerateByteOrderMark(bool) """
pass
def setIntegerBase(self, p_int): # real signature unknown; restored from __doc__
""" QTextStream.setIntegerBase(int) """
pass
def setLocale(self, QLocale): # real signature unknown; restored from __doc__
""" QTextStream.setLocale(QLocale) """
pass
def setNumberFlags(self, QTextStream_NumberFlags): # real signature unknown; restored from __doc__
""" QTextStream.setNumberFlags(QTextStream.NumberFlags) """
pass
def setPadChar(self, p_str): # real signature unknown; restored from __doc__
""" QTextStream.setPadChar(str) """
pass
def setRealNumberNotation(self, QTextStream_RealNumberNotation): # real signature unknown; restored from __doc__
""" QTextStream.setRealNumberNotation(QTextStream.RealNumberNotation) """
pass
def setRealNumberPrecision(self, p_int): # real signature unknown; restored from __doc__
""" QTextStream.setRealNumberPrecision(int) """
pass
def setStatus(self, QTextStream_Status): # real signature unknown; restored from __doc__
""" QTextStream.setStatus(QTextStream.Status) """
pass
def setString(self, *args, **kwargs): # real signature unknown
pass
def skipWhiteSpace(self): # real signature unknown; restored from __doc__
""" QTextStream.skipWhiteSpace() """
pass
def status(self): # real signature unknown; restored from __doc__
""" QTextStream.status() -> QTextStream.Status """
pass
def string(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
pass
def __lshift__(self, *args, **kwargs): # real signature unknown
""" Return self<<value. """
pass
def __rlshift__(self, *args, **kwargs): # real signature unknown
""" Return value<<self. """
pass
def __rrshift__(self, *args, **kwargs): # real signature unknown
""" Return value>>self. """
pass
def __rshift__(self, *args, **kwargs): # real signature unknown
""" Return self>>value. """
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
AlignAccountingStyle = 3
AlignCenter = 2
AlignLeft = 0
AlignRight = 1
FieldAlignment = None # (!) real value is ''
FixedNotation = 1
ForcePoint = 2
ForceSign = 4
NumberFlag = None # (!) real value is ''
NumberFlags = None # (!) real value is ''
Ok = 0
ReadCorruptData = 2
ReadPastEnd = 1
RealNumberNotation = None # (!) real value is ''
ScientificNotation = 2
ShowBase = 1
SmartNotation = 0
Status = None # (!) real value is ''
UppercaseBase = 8
UppercaseDigits = 16
WriteFailed = 3
|
vahtras/amy
|
workshops/migrations/0119_auto_20161023_1413.py
|
Python
|
mit
| 1,244
| 0.001608
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-10-23 19:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0118_auto_20160922_0009'),
]
operations = [
migrations.AlterField(
model_name='event',
name='invoice_status',
field=models.CharField(choices=[('unknown', 'Unknown'), ('invoiced', 'Invoice requested'),
|
('not-invoiced', 'Invoice not requested'), ('na-historic', 'Not applicable for historical reasons'), ('na-member', 'Not applicable because of membership'), ('na-self-org', 'Not applicable because self-organized'), ('na-waiver', 'Not applicable because waiver granted'), ('na-other', 'Not applicable because other arrangements made'), ('pa
|
id', 'Paid')], default='not-invoiced', max_length=40, verbose_name='Invoice status'),
),
migrations.AlterField(
model_name='invoicerequest',
name='status',
field=models.CharField(choices=[('not-invoiced', 'Invoice not requested'), ('sent', 'Sent out'), ('paid', 'Paid')], default='not-invoiced', max_length=40, verbose_name='Invoice status'),
),
]
|
mitschabaude/nanopores
|
nanopores/physics/default.py
|
Python
|
mit
| 792
| 0.013889
|
import dolfin
from nanopores.physics.params_physical import *
def lscale(geo):
# TODO: "lscale" is confusing since it is actually 1 over the length scale
try:
return geo.parameter("lscale")
except:
try:
return geo.parameter("nm")/nm
|
except:
return 1e9
def grad(lscale):
def grad0(u):
return dolfin.Constant(lscale)*dolfin.nabla_grad(u)
return grad0
def div(lscale):
def div0(u):
|
return dolfin.Constant(lscale)*dolfin.transpose(dolfin.nabla_div(u))
return div0
def dim(geo):
return geo.mesh.topology().dim()
cyl = False
def r2pi(cyl):
return dolfin.Expression("2*pi*x[0]", degree=1) if cyl else dolfin.Constant(1.)
def invscale(lscale):
return lambda i: dolfin.Constant(1./lscale**i)
|
benneely/duke-data-service-dredd
|
dredd/dredd_scripts/21_auth_provider.py
|
Python
|
gpl-3.0
| 1,178
| 0.006791
|
import dredd_hooks as hooks
import imp
import os
import json
import uuid
#if you want to import another module for use in this workflow
utils = imp.load_source("utils",os.path.join(os.getcwd(),'utils.py'))
###############################################################################
###############################################################################
# Tags
###############################################################################
###############################################################################
@hooks.before("Auth Providers > Auth Providers collection > List auth providers")
@hooks.before("Auth Providers > Auth Provider instance > View auth provider")
@hooks.before("Auth Provider Affiliates > NOT_IMPL_NEW List auth provi
|
der affiliates > NOT_IMPL_NEW List auth provider affiliates
|
")
@hooks.before("Auth Provider Affiliates > NOT_IMPL_NEW View auth provider affiliate > NOT_IMPL_NEW View auth provider affiliate")
@hooks.before("Auth Provider Affiliates > NOT_IMPL_NEW Create user account for affiliate > NOT_IMPL_NEW Create user account for affiliate")
def skippy21_1(transaction):
utils.skip_this_endpoint(transaction)
|
ge0rgi/cinder
|
cinder/volume/drivers/dell/dell_storagecenter_common.py
|
Python
|
apache-2.0
| 83,512
| 0
|
# Copyright 2016 Dell Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import eventlet
from oslo_config import cfg
from oslo_config import types
from oslo_log import log as logging
from oslo_utils import excutils
import six
from cinder import exception
from cinder.i18n import _, _LE, _LI, _LW
from cinder.objects import fields
from cinder.volume import driver
from cinder.volume.drivers.dell import dell_storagecenter_api
from cinder.volume.drivers.san.san import san_opts
from cinder.volume import volume_types
common_opts = [
cfg.IntOpt('dell_sc_ssn',
default=64702,
help='Storage Center System Serial Number'),
cfg.PortOpt('dell_sc_api_port',
default=3033,
help='Dell API port'),
cfg.StrOpt('dell_sc_server_folder',
default='openstack',
help='Name of the server folder to use on the Storage Center'),
cfg.StrOpt('dell_sc_volume_folder',
default='openstack',
help='Name of the volume folder to use on the Storage Center'),
cfg.BoolOpt('dell_sc_verify_cert',
default=False,
help='Enable HTTPS SC certificate verification'),
cfg.StrOpt('secondary_san_ip',
default='',
help='IP address of secondary DSM controller'),
cfg.StrOpt('secondary_san_login',
default='Admin',
help='Secondary DSM user name'),
cfg.StrOpt('secondary_san_password',
default='',
help='Secondary DSM user password name',
secret=True),
cfg.PortOpt('secondary_sc_api_port',
default=3033,
help='Secondary Dell API port'),
cfg.MultiOpt('excluded_domain_ip',
item_type=types.IPAddress(),
default=None,
help='Domain IP to be excluded from iSCSI returns.'),
cfg.StrOpt('dell_server_os',
default='Red Hat Linux 6.x',
help='Server OS type to use when creating a new server on the '
'Storage Center.')
]
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
CONF.register_opts(common_opts)
class DellCommonDriver(driver.ManageableVD,
driver.ManageableSnapshotsVD,
driver.BaseVD):
def __init__(self, *args, **kwargs):
super(DellCommonDriver, self).__init__(*args, **kwargs)
self.configuration.append_config_values(common_opts)
self.configuration.append_config_values(san_opts)
self.backend_name =\
self.configuration.safe_get('volume_backend_name') or 'Dell'
self.backends = self.configuration.safe_get('replication_device')
self.replication_enabled = True if self.backends else False
self.is_direct_connect = False
self.active_backend_id = kwargs.get('active_backend_id', None)
self.failed_over = True if self.active_backend_id else False
LOG.info(_LI('Loading %(name)s: Failover state is %(state)r'),
{'name': self.backend_name,
'state': self.failed_over})
self.storage_protocol = 'iSCSI'
self.failback_timeout = 60
def _bytes_to_gb(self, spacestring):
"""Space is returned in a string like ...
7.38197504E8 Bytes
Need to split that apart and convert to GB.
:returns: gbs in int form
"""
try:
n = spacestring.split(' ', 1)
fgbs = float(n[0]) / 1073741824.0
igbs = int(fgbs)
return igbs
except Exception:
# If any of that blew up it isn't in the format we
# thought so eat our error and return None
return None
def do_setup(self, context):
"""One time driver setup.
Called once by the manager after the driver is loaded.
Sets up clients, check licenses, sets up protocol
specific helpers.
"""
self._client = dell_storagecenter_api.StorageCenterApiHelper(
self.configuration, self.active_backend_id, self.storage_protocol)
def check_for_setup_error(self):
"""Validates the configuration information."""
with self._client.open_connection() as api:
api.find_sc()
self.is_direct_connect = api.is_direct_connect
if self.is_direct_connect and self.replication_enabled:
msg = _('Dell Cinder driver configuration error replication '
'not supported with direct connect.')
raise exception.InvalidHost(reason=msg)
# If we are a healthy replicated system make sure our backend
# is alive.
if self.replication_enabled and not self.failed_over:
# Check that our replication destinations are available.
for backend in self.backends:
replssn = backend['target_device_id']
try:
# Just do a find_sc on it. If it raises we catch
# that and raise with a correct exception.
api.find_sc(int(replssn))
except exception.VolumeBackendAPIException:
msg = _('Dell Cinder driver configuration error '
'replication_device %s not found') % replssn
raise exception.InvalidHost(reason=msg)
def _get_volume_extra_specs(self, obj):
"""Gets extra specs for the given object."""
type_id = obj.get('volume_type_id')
if type_id:
return volume_types.get_volume_type_extra_specs(type_id)
return {}
def _add_volume_to_consistency_group(self, api, scvolume, volume):
"""Just a helper to add a volume to a consistency group.
:param api: Dell SC API opbject.
:param scvolume: Dell SC Volume object.
:param volume: Cinder Volume object.
:returns: Nothing.
"""
if scvolume and volume.get('consistencygroup_id'):
profile = api.find_replay_profile(
volume.get('consistencygroup_id'))
if profile:
api.update_cg_volumes(profile, [volume])
def _get_replication_specs(self, specs):
"""Checks if we can do replication.
Need the extra spec set and we have to be talking to EM.
:param specs: Cinder Volume or snapsh
|
ot extra specs.
:return: rinfo dict.
"""
rinfo = {'enabled': False, 'sync': False,
'live': False, 'active':
|
False,
'autofailover': False}
# Repl does not work with direct connect.
if not self.is_direct_connect:
if (not self.failed_over and
specs.get('replication_enabled') == '<is> True'):
rinfo['enabled'] = True
if specs.get('replication_type') == '<in> sync':
rinfo['sync'] = True
if specs.get('replication:livevolume') == '<is> True':
rinfo['live'] = True
if specs.get('replication:livevolume:autofailover') == '<is> True':
rinfo['autofailover'] = True
if specs.get('replication:activereplay') == '<is> True':
rinfo['active'] = True
# Some quick checks.
if rinfo['enabled']:
replication_target_count = len(self.backends)
msg = None
if replication_target_count == 0:
msg = _(
'Replication setup failure: replication has been '
|
arpruss/raspberryjam-pe
|
p2/scripts3/pysanka.py
|
Python
|
mit
| 4,144
| 0.017133
|
#
# Code by Alexander Pruss and under the MIT license
#
#
# pysanka.py [filename [height [oval|N]]]
# oval: wrap an oval image onto an egg
# N: wrap a rectangular image onto an egg N times (N is an integer)
#
# Yeah, these arguments are a mess!
from mine import *
import colors
import sys
import os
from PIL import Image
from random import uniform
def egg(block=block.GOLD_BLOCK, h=40, a=2.5, b=1, c=0.1, sphere=False):
def radius(y):
if y <
|
0 or y >= h:
return 0
if sphere:
return sqrt((h/2.)**2 - (y-h/2.)**2)
l = y / float(h-1)
# Formula from: htt
|
p://www.jolyon.co.uk/myresearch/image-analysis/egg-shape-modelling/
return h*a*exp((-0.5*l*l+c*l-.5*c*c)/(b*b))*sqrt(1-l)*sqrt(l)/(pi*b)
for y in range(0,h):
r = radius(y)
minimumr = min(r-2,radius(y-1),radius(y+1))
for x in range(-h,h+1):
for z in range(-h,h+1):
myr = sqrt(x*x + z*z)
if myr <= r and minimumr <= myr:
if x==0 and z==0:
theta = 0
else:
theta = atan2(z,x)+pi/2
yield (x,y,z,block,theta % (2*pi))
def getPixel(image, x, y, dither=None):
rgb = image.getpixel(( image.size[0]-1-floor( x * image.size[0] ), image.size[1]-1-floor( y * image.size[1] ) ))
if dither is not None:
tweaked = ( rgb[0] + uniform(-dither,dither), rgb[1] + uniform(-dither,dither), rgb[2] + uniform(-dither,dither) )
return colors.rgbToBlock(tweaked)[0]
return colors.rgbToBlock(rgb)[0]
if __name__ == '__main__':
mc = Minecraft()
if len(sys.argv) > 1:
filename = sys.argv[1]
if not os.path.isfile(filename):
filename = os.path.dirname(os.path.realpath(sys.argv[0])) + "/" + filename
else:
filename = os.path.dirname(os.path.realpath(sys.argv[0])) + "/" + "pysanka.jpg"
if len(sys.argv) > 2:
height = int(sys.argv[2])
else:
height = 100
oval = False
sphereWrap = False
if len(sys.argv) > 3:
if sys.argv[3] == "oval":
oval = True
elif sys.argv[3] == "sphere":
sphereWrap = True
else:
repeat = int(sys.argv[3])
else:
repeat = 2
pos = mc.player.getPos()
if oval:
image = Image.open(filename).convert('RGBA')
first = None
last = None
start = [None] * image.size[1]
stop = [None] * image.size[1]
for y in range(image.size[1]):
for x in range(image.size[0]):
_,_,_,alpha = image.getpixel((x,y))
if alpha == 255:
start[y] = x
break
for x in range(image.size[0]-1,-1,-1):
_,_,_,alpha = image.getpixel((x,y))
if alpha == 255:
stop[y] = x
break
if start[y] is not None:
if first is None:
first = y
last = y
assert first is not None
for (x,y,z,block,theta) in egg(h=height,block=None):
imageY = first + int(float(height-1-y)/height*(last-first+1))
if imageY < first:
imageY = first
if imageY > last:
imageY = last
imageX = start[imageY]+ int((0.5 - 0.5 * sin(theta)) * (stop[imageY]-start[imageY]))
if imageX < start[imageY]:
imageX = start[imageY]
if imageX > stop[imageY]:
imageX = stop[imageY]
mc.setBlock(x+pos.x,y+pos.y,z+pos.z, getPixel(image, imageX, imageY))
else:
image = Image.open(filename).convert('RGB')
for (x,y,z,block,theta) in egg(h=height,block=None):
mc.setBlock(x+pos.x,y+pos.y,z+pos.z,getPixel(image, (theta * repeat / (2*pi)) % 1, y / float(height), dither=20))
|
Sound-Colour-Space/sound-colour-space
|
website/apps/museum/migrations/0033_collection_tags.py
|
Python
|
mit
| 594
| 0.001684
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-11-04 17:41
from __future__ import unicode_literals
from django.db import migrations
import taggit.managers
class Migration(migrations.Migration):
dependenci
|
es = [
('museum', '0032_auto_20161104_1839'),
]
operations = [
migrations.AddField(
model_name='collection',
name='tags',
field=taggit.managers.TaggableManager(blank=True, help_text='A comma-separated list of tags.', through='mus
|
eum.TaggedObject', to='museum.Keyword', verbose_name='Tags'),
),
]
|
sonnyhu/scikit-learn
|
sklearn/tree/tree.py
|
Python
|
bsd-3-clause
| 41,818
| 0.000072
|
"""
This module gathers tree-based methods, including decision, regression and
randomized trees. Single and multi-output problems are both handled.
"""
# Authors: Gilles Louppe <g.louppe@gmail.com>
# Peter Prettenhofer <peter.prettenhofer@gmail.com>
# Brian Holt <bdholt1@gmail.com>
# Noel Dawe <noel@dawe.me>
# Satrajit Gosh <satrajit.ghosh@gmail.com>
# Joly Arnaud <arnaud.v.joly@gmail.com>
# Fares Hedayati <fares.hedayati@gmail.com>
# Nelson Liu <nelson@nelsonliu.me>
#
# License: BSD 3 clause
from __future__ import division
import numbers
from abc import ABCMeta
from abc import abstractmethod
from math import ceil
import numpy as np
from scipy.sparse import issparse
from ..base import BaseEstimator
from ..base import ClassifierMixin
from ..base import RegressorMixin
from ..externals import six
from ..feature_selection.from_model import _LearntSelectorMixin
from ..utils import check_array
from ..utils import check_random_state
from ..utils import compute_sample_weight
from ..utils.multiclass import check_classification_targets
from ..exceptions import NotFittedError
from ._criterion import Criterion
from ._splitter import Splitter
from ._tree import DepthFirstTreeBuilder
from ._tree import BestFirstTreeBuilder
from ._tree import Tree
from . import _tree, _splitter, _criterion
__all__ = ["DecisionTreeClassifier",
"DecisionTreeRegressor",
"ExtraTreeClassifier",
"ExtraTreeRegressor"]
# =============================================================================
# Types and constants
# =============================================================================
DTYPE = _tree.DTYPE
DOUBLE = _tree.DOUBLE
CRITERIA_CLF = {"gini": _criterion.Gini, "entropy": _criterion.Entropy}
CRITERIA_REG = {"mse": _criterion.MSE, "friedman_mse": _criterion.FriedmanMSE,
"mae": _criterion.MAE}
DENSE_SPLITTERS = {"best": _splitter.BestSplitter,
"random": _splitter.RandomSplitter}
SPARSE_SPLITTERS = {"best": _splitter.BestSparseSplitter,
"random": _splitter.RandomSparseSplitter}
# =============================================================================
# Base decision tree
# =============================================================================
class BaseDecisionTree(six.with_metaclass(ABCMeta, BaseEstimator,
_LearntSelectorMixin)):
"""Base class for decision trees.
Warning: This class should not be used directly.
Use derived classes instead.
"""
@abstractmethod
def __init__(self,
criterion,
splitter,
max_depth,
min_samples_split,
min_samples_leaf,
min_weight_fraction_leaf,
max_features,
max_leaf_nodes,
random_state,
min_impurity_split,
class_weight=None,
presort=False):
self.criterion = criterion
self.splitter = splitter
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = max_features
self.random_state = random_state
self.max_leaf_nodes = max_leaf_nodes
self.min_impurity_split = min_impurity_split
self.class_weight = class_weight
self.presort = presort
self.n_features_ = None
self.n_outputs_ = None
self.classes_ = None
self.n_classes_ = None
self.tree_ = None
self.max_features_ = None
def fit(self, X, y, sample_weight=None, check_input=True,
X_idx_sorted=None):
"""Build a decision tree from the training set (X, y).
Parameters
----------
X : array-like or sparse matrix, shape = [n_samples, n_features]
The training input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csc_matrix``.
y : array-like, shape = [n_samples] or [n_samples, n_outputs]
The target values (class labels in classification, real numbers in
regression). In the regression case, use ``dtype=np.float64`` and
``order='C'`` for maximum efficiency.
sample_weight : array-like, shape = [n_samples] or None
Sample weights. If None, then samples are equally weighted. Splits
that would create child nodes with net zero or negative weight are
ignored while searching for a split in each node. In the case of
classification, splits are also ignored if they would result in any
single class carrying a negative weight in either child node.
check_input : boolean, (default=True)
Allow to bypass several input checking.
Don't use this parameter unless you know what you do.
X_idx_sorted : array-like, shape = [n_samples, n_features], optional
The indexes of the sorted training input samples. If many tree
are grown on the same dataset, this allows the ordering to be
cached between trees. If None, the data will be sorted here.
Don't use this parameter unless you know what to do.
Returns
-------
self : object
Returns self.
"""
random_state = check_random_state(self.random_state)
if check_input:
X = check_array(X, dtype=DTYPE, accept_sparse="csc")
y = check_array(y, ensure_2d=False, dtype=None)
if issparse(X):
X.sort_indices()
if X.indices.dtype != np.intc or X.indptr.dtype != np.intc:
raise ValueError("No support for np.int64 index based "
"sparse matrices")
# Determine output settings
n_samples, self.n_features_ = X.shape
is_classification = isinstance(self, ClassifierMixin)
y = np.atleast_1d(y)
expanded_class_weight = None
if y.ndim == 1:
# reshape is necessary to preserve the data contiguity against vs
# [:, np.newaxis] that does not.
y = np.reshape(y, (-1, 1))
self.n_outputs_ = y.shape[1]
if is_classification:
check_classification_targets(y)
y = np.copy(y)
self.classes_ = []
self.n_classes_ = []
if self.class_weight is not None:
y_original = np.copy(y)
y_encoded = np.zeros(y.shape, dtype=np.int)
for k in range(self.n_outputs_):
classes_k, y_encoded[:, k] = np.unique(y[:, k],
return_inverse=True)
self.classes_.append(classes_k)
self.n_classes_.append(classes_k.shape[0])
y = y_encoded
|
if self.class_weight is not None:
expanded_class_weight = compute_sample_weight(
self.class_weight, y_original)
else:
self.classes_ = [None] * self.n_outputs_
self.n_classes_ = [1] * self.n_outputs_
self.n_classes_ = np.array(self.n_classes_, dtype=np.intp)
if getattr(y, "dtype", None) != DOUBLE or not y.flags.contiguous:
y = np.ascontiguousarray(y, dtype=DOUBLE)
# Check parameters
max_depth = ((2 ** 31) - 1 if self.max_depth is None
else self.max_depth)
max_leaf_nodes = (-1 if self.max_leaf_nodes is None
else self.max_leaf_nodes)
if isinstance(self.min_samples_leaf, (numbers.Integral, np.integer)):
min_samples_leaf = self.min_samples_leaf
else: # float
min_samples_leaf = int(ceil(self.min_samples_leaf * n_samples))
if isinstance(self.min_samples_split, (numbers.Integral, np.integer)):
min_samples_split = self.min_sa
|
|
clouserw/olympia
|
apps/search/forms.py
|
Python
|
bsd-3-clause
| 5,239
| 0.000191
|
from django import forms
from django.conf import settings
from django.forms.util import ErrorDict
import happyforms
from tower import ugettext_lazy as _lazy
import amo
from amo import helpers
from search.utils import floor_version
collection_sort_by = (
('weekly', _lazy(u'Most popular this week')),
('monthly', _lazy(u'Most popular this month')),
('all', _lazy(u'Most popular all time')),
('rating', _lazy(u'Highest Rated')),
('created', _lazy(u'Newest')),
('updated', _lazy(u'Recently Updated')),
('name', _lazy(u'Name')),
)
PER_PAGE = 20
SEARCH_CHOICES = (
('all', _lazy(u'search for add-ons')),
('collections', _lazy(u'search for collections')),
('themes', _lazy(u'search for themes')),
('apps', _lazy(u'search for apps'))
)
class SimpleSearchForm(forms.Form):
"""Powers the search box on every page."""
q = forms.CharField(required=False)
cat = forms.CharField(required=False, widget=forms.HiddenInput)
appver = forms.CharField(required=False, widget=forms.HiddenInput)
platform = forms.CharField(required=False, widget=forms.HiddenInput)
choices = dict(SEARCH_CHOICES)
def
|
clean_cat(self):
return self.data.get('cat', 'all')
def placeholder(self, txt=None):
if settings.APP_PREVIEW:
return self.choices['apps']
return self.choices.get(txt or self.clean_cat(), self.choices['all'])
class SecondarySearchForm(forms.Form):
q = forms.CharField(widget=forms.HiddenInput, required=False)
cat = forms.CharField(widget=forms.HiddenInput)
pp = forms.CharField(widget=forms.HiddenInput, requi
|
red=False)
sort = forms.ChoiceField(label=_lazy(u'Sort By'), required=False,
choices=collection_sort_by, initial='weekly')
page = forms.IntegerField(widget=forms.HiddenInput, required=False)
def clean_pp(self):
try:
return int(self.cleaned_data.get('pp'))
except TypeError:
return PER_PAGE
def clean(self):
d = self.cleaned_data
if not d.get('pp'):
d['pp'] = PER_PAGE
return d
def full_clean(self):
"""
Cleans all of self.data and populates self._errors and
self.cleaned_data.
Does not remove cleaned_data if there are errors.
"""
self._errors = ErrorDict()
if not self.is_bound: # Stop further processing.
return
self.cleaned_data = {}
# If the form is permitted to be empty, and none of the form data
# has changed from the initial data, short circuit any validation.
if self.empty_permitted and not self.has_changed():
return
self._clean_fields()
self._clean_form()
SORT_CHOICES = (
(None, _lazy(u'Relevance')),
('users', _lazy(u'Most Users')),
('rating', _lazy(u'Top Rated')),
('created', _lazy(u'Newest')),
# --
('name', _lazy(u'Name')),
('downloads', _lazy(u'Weekly Downloads')),
#('price', helpers.loc(u'Price')),
('updated', _lazy(u'Recently Updated')),
('hotness', _lazy(u'Up & Coming')),
)
APP_SORT_CHOICES = (
(None, _lazy(u'Relevance')),
('downloads', _lazy(u'Weekly Downloads')),
('rating', _lazy(u'Top Rated')),
('price', helpers.loc(u'Price')),
# --
('name', _lazy(u'Name')),
('created', _lazy(u'Newest')),
)
class ESSearchForm(happyforms.Form):
q = forms.CharField(required=False)
tag = forms.CharField(required=False)
platform = forms.CharField(required=False)
appver = forms.CharField(required=False)
atype = forms.TypedChoiceField(required=False, coerce=int,
choices=amo.ADDON_TYPES.iteritems())
cat = forms.CharField(required=False)
price = forms.CharField(required=False)
sort = forms.CharField(required=False)
def __init__(self, *args, **kw):
self.addon_type = kw.pop('type', None)
super(ESSearchForm, self).__init__(*args, **kw)
self.sort_choices = SORT_CHOICES
def clean_appver(self):
return floor_version(self.cleaned_data.get('appver'))
def clean_sort(self):
sort = self.cleaned_data.get('sort')
return sort if sort in dict(self.sort_choices) else None
def clean_cat(self):
cat = self.cleaned_data.get('cat')
if ',' in cat:
try:
self.cleaned_data['atype'], cat = map(int, cat.split(','))
except ValueError:
return None
else:
try:
return int(cat)
except ValueError:
return None
def full_clean(self):
"""
Cleans self.data and populates self._errors and self.cleaned_data.
Does not remove cleaned_data if there are errors.
"""
self._errors = ErrorDict()
if not self.is_bound: # Stop further processing.
return
self.cleaned_data = {}
# If the form is permitted to be empty, and none of the form data
# has changed from the initial data, short circuit any validation.
if self.empty_permitted and not self.has_changed():
return
self._clean_fields()
self._clean_form()
|
bigswitch/nova
|
nova/tests/unit/scheduler/test_scheduler.py
|
Python
|
apache-2.0
| 11,503
| 0.000435
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests For Scheduler
"""
import mock
from nova import context
from nova import objects
from nova.scheduler import caching_scheduler
from nova.scheduler import chance
from nova.scheduler import filter_scheduler
from nova.scheduler import host_manager
from nova.scheduler import ironic_host_manager
from nova.scheduler import manager
from nova import servicegroup
from nova import test
from nova.tests.unit import fake_server_actions
from nova.tests.unit.scheduler import fakes
class SchedulerManagerInitTestCase(test.NoDBTestCase):
"""Test case for scheduler manager initiation."""
manager_cls = manager.SchedulerManager
@mock.patch.object(host_manager.HostManager, '_init_instance_info')
@mock.patch.object(host_manager.HostManager, '_init_aggregates')
def test_init_using_default_schedulerdriver(self,
mock_init_agg,
mock_init_inst):
driver = self.manager_cls().driver
self.assertIsInstance(driver, filter_scheduler.FilterScheduler)
@mock.patch.object(host_manager.HostManager, '_init_instance_info')
@mock.patch.object(host_manager.HostManager, '_init_aggregates')
def test_init_using_chance_schedulerdriver(self,
mock_init_agg,
mock_init_inst):
self.flags(scheduler_driver='chance_scheduler')
driver = self.manager_cls().driver
self.assertIsInstance(driver, chance.ChanceScheduler)
@mock.patch.object(host_manager.HostManager, '_init_instance_info')
@mock.patch.object(host_manager.HostManager, '_init_aggregates')
def test_init_using_caching_schedulerdriver(self,
mock_init_agg,
mock_init_inst):
self.flags(scheduler_driver='caching_scheduler')
driver = self.manager_cls().driver
self.assertIsInstance(driver, caching_scheduler.CachingScheduler)
@mock.patch.object(host_manager.HostManager, '_init_instance_info')
@mock.patch.object(host_manager.HostManager, '_init_aggregates')
def test_init_nonexist_schedulerdriver(self,
mock_init_agg,
mock_init_inst):
self.flags(scheduler_driver='nonexist_scheduler')
self.assertRaises(RuntimeError, self.manager_cls)
# NOTE(Yingxin): Loading full class path is deprecated and should be
# removed in the N release.
@mock.patch.object(manager.LOG, 'warning')
@mock.patch.object(host_manager.HostManager, '_init_instance_info')
@mock.patch.object(host_manager.HostManager, '_init_aggregates')
def test_init_using_classpath_to_schedulerdriver(self,
mock_init_agg,
mock_init_inst,
mock_warning):
self.flags(
scheduler_driver=
'nova.scheduler.chance.ChanceScheduler')
driver = self.manager_cls().driver
self.assertIsInstance(driver, chance.ChanceScheduler)
warn_args, kwargs = mock_warning.call_args
self.assertIn("DEPRECATED", warn_args[0])
class SchedulerManagerTestCase(t
|
est.NoDBTestCase):
"""Test case for scheduler manager."""
manager_cls = manager.SchedulerManager
driver_cls = fakes.FakeScheduler
driver_plugin_name = 'fake_scheduler'
@mock.patch.object(host_manager.HostManager, '_init_instance_info')
@mock.patch.object(host_manager.HostManager, '_init_aggregates')
def setUp(self, mock_init_agg, mock_init_inst):
super(SchedulerManagerTestCase, self).setUp()
s
|
elf.flags(scheduler_driver=self.driver_plugin_name)
with mock.patch.object(host_manager.HostManager, '_init_aggregates'):
self.manager = self.manager_cls()
self.context = context.RequestContext('fake_user', 'fake_project')
self.topic = 'fake_topic'
self.fake_args = (1, 2, 3)
self.fake_kwargs = {'cat': 'meow', 'dog': 'woof'}
fake_server_actions.stub_out_action_events(self.stubs)
def test_1_correct_init(self):
# Correct scheduler driver
manager = self.manager
self.assertIsInstance(manager.driver, self.driver_cls)
def test_select_destination(self):
fake_spec = objects.RequestSpec()
with mock.patch.object(self.manager.driver, 'select_destinations'
) as select_destinations:
self.manager.select_destinations(None, spec_obj=fake_spec)
select_destinations.assert_called_once_with(None, fake_spec)
# TODO(sbauza): Remove that test once the API v4 is removed
@mock.patch.object(objects.RequestSpec, 'from_primitives')
def test_select_destination_with_old_client(self, from_primitives):
fake_spec = objects.RequestSpec()
from_primitives.return_value = fake_spec
with mock.patch.object(self.manager.driver, 'select_destinations'
) as select_destinations:
self.manager.select_destinations(None, request_spec='fake_spec',
filter_properties='fake_props')
select_destinations.assert_called_once_with(None, fake_spec)
def test_update_aggregates(self):
with mock.patch.object(self.manager.driver.host_manager,
'update_aggregates'
) as update_aggregates:
self.manager.update_aggregates(None, aggregates='agg')
update_aggregates.assert_called_once_with('agg')
def test_delete_aggregate(self):
with mock.patch.object(self.manager.driver.host_manager,
'delete_aggregate'
) as delete_aggregate:
self.manager.delete_aggregate(None, aggregate='agg')
delete_aggregate.assert_called_once_with('agg')
def test_update_instance_info(self):
with mock.patch.object(self.manager.driver.host_manager,
'update_instance_info') as mock_update:
self.manager.update_instance_info(mock.sentinel.context,
mock.sentinel.host_name,
mock.sentinel.instance_info)
mock_update.assert_called_once_with(mock.sentinel.context,
mock.sentinel.host_name,
mock.sentinel.instance_info)
def test_delete_instance_info(self):
with mock.patch.object(self.manager.driver.host_manager,
'delete_instance_info') as mock_delete:
self.manager.delete_instance_info(mock.sentinel.context,
mock.sentinel.host_name,
mock.sentinel.instance_uuid)
mock_delete.assert_called_once_with(mock.sentinel.context,
mock.sentinel.host_name,
mock.sentinel.instance_uuid)
def test_sync_instance_info(self):
with mock.patch.object(self.manager.driver.host_manager,
'sync_instance_info') as mock_sync:
|
danijar/invoicepad
|
invoicepad/invoicepad/urls.py
|
Python
|
gpl-3.0
| 1,168
| 0.011986
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.contrib.staticfiles.views import serve
from django.views.decorators.cache import never_cache
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^$', 'apps.user.views.index', name='index'),
url(r'^login/$', 'apps.user.views.login', name='login'),
url(r'^logout/$', 'apps.user.views.logout', name='logout'),
url(r'^user/$', 'ap
|
ps.user.views.user', name='user'),
url(r'^customer/((?P<id>[0-9]+)/)?$', 'apps.customer.views.customer', name='customer'),
url(r'^project/((?P<id>[0-9]+)/((?P<foreign>[a-z]+)/)?)?$', 'apps.project.views.project', name='project'),
url(r'^time/((?P<id>[0-9]+)/)?$', 'apps.project.views.time', name='time'),
url(r'^invoice/((?P<id>[0-9]+)/)?$', 'apps.invoice.views.invoice', name='invoice'),
)
# Serve all media files publically
urlpatterns += static(settings.MEDIA
|
_URL, document_root=settings.MEDIA_ROOT)
# Skip cache for development
if settings.DEBUG:
urlpatterns += patterns('', url(r'^static/(?P<path>.*)$', never_cache(serve)))
|
bhylak/trello_things3_sync
|
tasks/sync_task.py
|
Python
|
mit
| 2,101
| 0.002856
|
from task import Task
class SyncTask(Task):
def __init__(self, *remotes):
'''Init this task with all of the remote tasks'''
|
super(SyncTask, self).__init__()
self.remote_tasks = []
for arg in remotes:
print arg
self.remote_tasks.append(arg)
for task in self.remote_tasks:
print task.name
self.update()
def update_from(self, task):
'''Use attributes: Takes all of the attributes from a different task and assigns them to self.'''
self.description = task.description
self.name = task.name
|
self.lastModifiedDate = task.lastModifiedDate
# todo: fill out rest of attributes
def sync_changes(self):
for remote in self.remote_tasks:
remote.set_attributes(self)
remote.push_changes()
def update(self, fetch_latest=False):
# todo: updating each task from remote (trello) may be more costly then together as a list
if fetch_latest:
for remote in self.remote_tasks:
remote.update()
latest_update = self
for remote_task in self.remote_tasks:
if remote_task.modified_later_than(latest_update):
latest_update = remote_task
if latest_update is not self:
print "New modification from {source}".format(source=type(latest_update._remote_source).__name__)
self.update_from(latest_update)
self.sync_changes()
def reset_updated(self):
'''resets the updated flag on all of the remote tasks'''
for task in self.remote_tasks:
self.updated = False
def has_remote(self, remote_task):
# todo should compare by uid of card/task/whatever
return remote_task in self.remote_tasks
def has_stale_remotes(self):
'''returns a bool indicating if any of the remotes are stale since the last update'''
for task in self.remote_tasks:
if not task.updated:
return True
self.reset_updated()
return False
|
Splawik/pytigon
|
pytigon/appdata/plugins/standard/autocomplete/__init__.py
|
Python
|
lgpl-3.0
| 4,308
| 0.001857
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 3, or (at your option) any later
# versio
|
n.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without
|
even the implied warranty of MERCHANTIBILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#Pytigon - wxpython and django application framework
#author: "Slawomir Cholaj (slawomir.cholaj@gmail.com)"
#copyright: "Copyright (C) ????/2012 Slawomir Cholaj"
#license: "LGPL 3.0"
#version: "0.1a"
import wx
from autocomplete import TextCtrlAutoComplete
from pytigon_lib.schtools import schjson
from pytigon_gui.guictrl.ctrl import SchBaseCtrl
import pytigon_gui.guictrl.ctrl
class DbDict(object):
def __init__(self, href):
self.href = href
self.tab = ['']
def filter(self, parent, f):
http = wx.GetApp().get_http(parent)
response = http.get(self, str(self.href), {'query': f.encode('utf-8')})
s = response.str()
try:
self.tab = schjson.loads(s)
except:
self.tab = []
self.tab2 = []
for pos in self.tab:
self.tab2.append((pos['value'], ))
self.tab = self.tab2
def __iter__(self):
for x in self.tab:
yield x
def __getitem__(self, id):
if id < len(self.tab):
return self.tab[id]
else:
return None
def __len__(self):
return len(self.tab)
def __contains__(self, x):
if x in self.tab:
return True
else:
return False
class Autocomplete(TextCtrlAutoComplete, SchBaseCtrl):
def __init__(self, parent, **kwds):
SchBaseCtrl.__init__(self, parent, kwds)
self.dynamic_choices = DbDict(self.src)
if 'style' in kwds:
style = kwds['style']
style = style | wx.TE_MULTILINE | wx.TE_PROCESS_ENTER
kwds['style'] = style
else:
kwds['style'] = wx.TE_MULTILINE | wx.TE_PROCESS_ENTER
kwds['choices'] = self.dynamic_choices
TextCtrlAutoComplete.__init__(self, parent, colNames=('label', 'value'), **kwds)
self.SetEntryCallback(self.set_dynamic_choices)
self.SetMatchFunction(self.match)
if 'data' in self.param:
self.SetValue(self.param['data'].encode('utf-8'))
def SetValue(self, value):
if value.__class__ == str:
return TextCtrlAutoComplete.SetValue(self, value.decode('utf-8'))
else:
return TextCtrlAutoComplete.SetValue(self, value)
def on_key_down(self, event):
kc = event.GetKeyCode()
if kc in (wx.WXK_LEFT, wx.WXK_RIGHT):
event.Skip()
else:
super(Autocomplete, self).onKeyDown(event)
def match(self, text, choice):
t = text.lower()
c = choice.lower()
if c.startswith(t):
return True
if c.startswith(r'http://'):
c = c[7:]
if c.startswith(t):
return True
if c.startswith('www.'):
c = c[4:]
return c.startswith(t)
def set_dynamic_choices(self):
ctrl = self
text = ctrl.GetValue().lower()
self.dynamic_choices.filter(self.GetParent(), text)
if len(self.dynamic_choices) > 1:
ctrl.SetMultipleChoices(self.dynamic_choices)
else:
if len(self.dynamic_choices) > 0:
ctrl.SetChoices(self.dynamic_choices[0])
def _set_value_from_selected(self):
x = TextCtrlAutoComplete._setValueFromSelected(self)
return x
def _set_value_from_selected2(self):
sel = self.dropdownlistbox.GetFirstSelected()
if sel > -1:
if self._colFetch != -1:
col = self._colFetch
else:
col = self._colSearch
itemtext = self.dropdownlistbox.GetItem(sel, col).GetText()
self.SetValue(itemtext)
def init_plugin(app, mainframe, desktop, mgr, menubar, toolbar, accel):
pytigon_gui.guictrl.ctrl.AUTOCOMPLETE = Autocomplete
|
paulovn/artifact-manager
|
test/__init__.py
|
Python
|
gpl-2.0
| 50
| 0.04
|
am
|
= imp.load_source( 'am', 'artifact-ma
|
nager' )
|
hideoussquid/aureus-12-bitcore
|
qa/rpc-tests/getblocktemplate_proposals.py
|
Python
|
mit
| 6,330
| 0.005055
|
#!/usr/bin/env python2
# Copyright (c) 2014-2015 The Aureus Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import AureusTestFramework
from test_framework.util import *
from binascii import a2b_hex, b2a_hex
from hashlib import sha256
from struct import pack
def check_array_result(object_array, to_match, expected):
"""
Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value
pairs.
"""
num_matched = 0
for item in object_array:
all_match = True
for key,value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
for key,value in expected.items():
if item[key] != value:
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
num_matched = num_matched+1
if num_matched == 0:
raise AssertionError("No objects matched %s"%(str(to_match)))
def b2x(b):
return b2a_hex(b).decode('ascii')
# NOTE: This does not work for signed numbers (set the high bit) or zero (use b'\0')
def encodeUNum(n):
s = bytearray(b'\1')
while n > 127:
s[0] += 1
s.append(n % 256)
n //= 256
s.append(n)
return bytes(s)
def varlenEncode(n):
if n < 0xfd:
return pack('<B', n)
if n <= 0xffff:
return b'\xfd' + pack('<H', n)
if n <= 0xffffffff:
return b'\xfe' + pack('<L', n)
return b'\xff' + pack('<Q', n)
def dblsha(b):
return sha256(sha256(b).digest()).digest()
def genmrklroot(leaflist):
cur = leaflist
while len(cur) > 1:
n = []
if len(cur) & 1:
cur.append(cur[-1])
for i in range(0, len(cur), 2):
n.append(dblsha(cur[i] + cur[i+1]))
cur = n
return cur[0]
def template_to_bytes(tmpl, txlist):
blkver = pack('<L', tmpl['version'])
mrklroot = genmrklroot(list(dblsha(a) for a in txlist))
timestamp = pack('<L', tmpl['curtime'])
nonce = b'\0\0\0\0'
blk = blkver + a2b_hex(tmpl['previousblockhash'])[::-1] + mrklroot + timestamp + a2b_hex(tmpl['bits'])[::-1] + nonce
blk += varlenEncode(len(txlist))
for tx in txlist:
blk += tx
return blk
def template_to_hex(tmpl, txlist):
return b2x(template_to_bytes(tmpl, txlist))
def assert_template(node, tmpl, txlist, expect):
rsp = node.getblocktemplate({'data':template_to_hex(tmpl, txlist),'mode':'proposal'})
if rsp != expect:
raise AssertionError('unexpected: %s' % (rsp,))
class GetBlockTemplateProposalTest(AureusTestFramework):
'''
Test block proposals with getblocktemplate.
'''
def run_test(self):
node = self.nodes[0]
node.generate(1) # Mine a block to leave initial block download
tmpl = node.getblocktemplate()
if 'coinbasetxn' not in tmpl:
|
rawcoinbase = encodeUNum(tmpl['height'])
rawcoinbase += b'\x01-'
hexcoinbase = b2x(rawcoinbase)
hexoutval = b2
|
x(pack('<Q', tmpl['coinbasevalue']))
tmpl['coinbasetxn'] = {'data': '01000000' + '01' + '0000000000000000000000000000000000000000000000000000000000000000ffffffff' + ('%02x' % (len(rawcoinbase),)) + hexcoinbase + 'fffffffe' + '01' + hexoutval + '00' + '00000000'}
txlist = list(bytearray(a2b_hex(a['data'])) for a in (tmpl['coinbasetxn'],) + tuple(tmpl['transactions']))
# Test 0: Capability advertised
assert('proposal' in tmpl['capabilities'])
# NOTE: This test currently FAILS (regtest mode doesn't enforce block height in coinbase)
## Test 1: Bad height in coinbase
#txlist[0][4+1+36+1+1] += 1
#assert_template(node, tmpl, txlist, 'FIXME')
#txlist[0][4+1+36+1+1] -= 1
# Test 2: Bad input hash for gen tx
txlist[0][4+1] += 1
assert_template(node, tmpl, txlist, 'bad-cb-missing')
txlist[0][4+1] -= 1
# Test 3: Truncated final tx
lastbyte = txlist[-1].pop()
try:
assert_template(node, tmpl, txlist, 'n/a')
except JSONRPCException:
pass # Expected
txlist[-1].append(lastbyte)
# Test 4: Add an invalid tx to the end (duplicate of gen tx)
txlist.append(txlist[0])
assert_template(node, tmpl, txlist, 'bad-txns-duplicate')
txlist.pop()
# Test 5: Add an invalid tx to the end (non-duplicate)
txlist.append(bytearray(txlist[0]))
txlist[-1][4+1] = b'\xff'
assert_template(node, tmpl, txlist, 'bad-txns-inputs-missingorspent')
txlist.pop()
# Test 6: Future tx lock time
txlist[0][-4:] = b'\xff\xff\xff\xff'
assert_template(node, tmpl, txlist, 'bad-txns-nonfinal')
txlist[0][-4:] = b'\0\0\0\0'
# Test 7: Bad tx count
txlist.append(b'')
try:
assert_template(node, tmpl, txlist, 'n/a')
except JSONRPCException:
pass # Expected
txlist.pop()
# Test 8: Bad bits
realbits = tmpl['bits']
tmpl['bits'] = '1c0000ff' # impossible in the real world
assert_template(node, tmpl, txlist, 'bad-diffbits')
tmpl['bits'] = realbits
# Test 9: Bad merkle root
rawtmpl = template_to_bytes(tmpl, txlist)
rawtmpl[4+32] = (rawtmpl[4+32] + 1) % 0x100
rsp = node.getblocktemplate({'data':b2x(rawtmpl),'mode':'proposal'})
if rsp != 'bad-txnmrklroot':
raise AssertionError('unexpected: %s' % (rsp,))
# Test 10: Bad timestamps
realtime = tmpl['curtime']
tmpl['curtime'] = 0x7fffffff
assert_template(node, tmpl, txlist, 'time-too-new')
tmpl['curtime'] = 0
assert_template(node, tmpl, txlist, 'time-too-old')
tmpl['curtime'] = realtime
# Test 11: Valid block
assert_template(node, tmpl, txlist, None)
# Test 12: Orphan block
tmpl['previousblockhash'] = 'ff00' * 16
assert_template(node, tmpl, txlist, 'inconclusive-not-best-prevblk')
if __name__ == '__main__':
GetBlockTemplateProposalTest().main()
|
jeanpm/pof
|
methods/hc.py
|
Python
|
gpl-2.0
| 234
| 0.008547
|
# -*- coding: utf-8 -*-
|
"""
Created on Wed Jun 24 12:05:24 2015
@author: jean
"""
def hill_climbing(neighborhood, x):
y = neighborhood.randomNeighbor(x)
|
if y is not None and y.isBetterThan(x):
return y
return x
|
Sorsly/subtle
|
google-cloud-sdk/lib/surface/spanner/databases/create.py
|
Python
|
mit
| 2,377
| 0.002945
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this fi
|
le except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for spanner databases cr
|
eate."""
from googlecloudsdk.api_lib.spanner import database_operations
from googlecloudsdk.api_lib.spanner import databases
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.spanner import flags
class Create(base.CreateCommand):
"""Cloud Spanner databases create command."""
@staticmethod
def Args(parser):
"""Args is called by calliope to gather arguments for this command.
Please add arguments in alphabetical order except for no- or a clear-
pair for that argument which can follow the argument itself.
Args:
parser: An argparse parser that you can use to add arguments that go
on the command line after this command. Positional arguments are
allowed.
"""
flags.Instance(positional=False).AddToParser(parser)
flags.Database().AddToParser(parser)
flags.Ddl(help_text='Semi-colon separated DDL (data definition language) '
'statements to run inside the '
'newly created database. If there is an error in any statement, '
'the database is not created. Full DDL specification is at '
'https://cloud.google.com/spanner/docs/data-definition-language'
).AddToParser(parser)
base.ASYNC_FLAG.AddToParser(parser)
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
Some value that we want to have printed later.
"""
op = databases.Create(
args.instance, args.database, flags.FixDdl(args.ddl or []))
if args.async:
return op
return database_operations.Await(op, 'Creating database')
|
microcom/odoo-product-configurator
|
product_configurator_use_default_pricelist/models/product.py
|
Python
|
agpl-3.0
| 2,350
| 0.001277
|
# -*- coding: utf-8 -*-
from odoo.exceptions import ValidationError
from odoo import models, api, _
class ProductProduct(models.Model):
_inherit = 'product.product'
_rec_name = 'config_name'
"""
Copy the function from product_configurator to show price using price list.
To Fix :
- Extra price For Attribute value
- Extra price For Custom value.
"""
@api.multi
def _compute_product_price_extra(self):
"""Compute price of configurable products as sum
of products related to attribute values picked"""
products = self.filtered(lambda x: not x.config_ok)
pricelist = self.env.user.partner_id.property_product_pricelist
configurable_products = self - products
if products:
prices = super(ProductProduct, self)._compute_product_price_extra()
conversions = self._get_conversions_dict()
for product in configurable_products:
lst_price = product.product_tmpl_id.lst_price
value_ids = product.attribute_value_ids.ids
# TODO: Merge custom values from products with cfg session
# and use same method to retrieve parsed custom val dict
custom_vals = {}
for val in product.value_custom_ids:
custom_type = val.attribute_id.custom_type
if custom_type in conversions:
try:
custom_vals[val.attribute_id.id] = conversions[
custom_type](
|
val.value)
except:
raise ValidationError(
_("Could not convert custom value '%s' to '%s' on "
"product
|
variant: '%s'" % (val.value,
custom_type,
product.display_name))
)
else:
custom_vals[val.attribute_id.id] = val.value
#
# prices = product.product_tmpl_id.get_cfg_price(
# value_ids, custom_vals)
product_price = pricelist.get_product_price(product, 1, 1)
# product.price_extra = prices['total'] - prices['taxes'] - lst_price
product.price_extra = product_price - lst_price
|
kidaa/entropy
|
lib/entropy/db/__init__.py
|
Python
|
gpl-2.0
| 1,423
| 0
|
# -*- coding: utf-8 -*-
"""
@author: Fabio Erculiani <lxnay@sabayon.org>
@contact: lxnay@sabayon.org
@copyright: Fabio Erculiani
@license: GPL-2
B{Entropy Framework repository datab
|
ase module}.
Entropy repositories (server and client) are implemented as relational
databases. Currently, EntropyRepository class is the object that wraps
sqlite3 database queries and repository logic: there are no more
abstractions between the two because there is only
|
one implementation
available at this time. In future, entropy.db will feature more backends
such as MySQL embedded, SparQL, remote repositories support via TCP socket,
etc. This will require a new layer between the repository interface now
offered by EntropyRepository and the underlying data retrieval logic.
Every repository interface available inherits from EntropyRepository
class and has to reimplement its own Schema subclass and its get_init
method (see EntropyRepository documentation for more information).
I{EntropyRepository} is the sqlite3 implementation of the repository
interface, as written above.
"""
from entropy.db.sqlite import EntropySQLiteRepository as EntropyRepository
from entropy.db.mysql import EntropyMySQLRepository
from entropy.db.cache import EntropyRepositoryCacher
__all__ = ["EntropyRepository", "EntropyMySQLRepository",
"EntropyRepositoryCacher"]
|
mtth/kit
|
kit/ext/api.py
|
Python
|
mit
| 15,315
| 0.008554
|
#!/usr/bin/env python
"""API Extension (requires the ORM extension).
This extension provides a base class to create API views.
Setup is as follows:
.. code:: python
from kit import Flask
from kit.ext import API
app = Flask(__name__)
api = API(app)
View = api.View # the base API view
Views can then be created for models as follows:
.. code:: python
# Cat is a subclass of kit.ext.orm.Base
class CatView(View):
__model__ = Cat
This view will create the following hooks:
* ``/cats``
* ``/cats/<id>``
Another slighly more complex example:
.. code:: python
# House is a subclass of kit.ext.orm.Base
class HouseView(View):
__model__ = House
methods = ['GET', 'POST']
subviews = ['cats']
# we register the api views
api.register(app)
This view will create the following hooks:
* ``/houses``
* ``/houses/<id>``
* ``/houses/<id>/cats``
* ``/houses/<id>/cats/<position>``
These are only two simple ways to add a view. Please refer to the documentation
for :class:`kit.ext.api.BaseView` for the list of all available options.
"""
from flask import Blueprint, jsonify, request
from sqlalchemy.orm import class_mapper, Query
from time import time
from werkzeug.exceptions import HTTPException
from .orm import Model
from ..util import make_view, query_to_models, View as _View, _ViewMeta
class APIError(HTTPException):
"""Thrown when an API call is invalid.
The following error codes can occur:
* ``400 Bad Request`` if the request is badly formulated (wrong query
parameters, invalid form data, etc.)
* ``403 Forbidden`` if the request is not authorized by the server
* ``404 Not Found`` if the request refers to a non-existent resource
"""
def __init__(self, code, content):
self.code = code
self.content = content
super(APIError, self).__init__(content)
def __repr__(self):
return '<APIError %r: %r>' % (self.message, self.content)
class API(object):
"""The main API object.
:param project: the project against which the extension will be registered
:type project: kit.project.Project
:param url_prefix: the blueprint URL prefix
:type url_prefix: str
:param index_view: whether or not to create a splash page for the api
:type index_view: bool
:param parser_options: dictionary of options to create the default request
:class:`kit.ext.api.Parser`
:type parser_options: dict
"""
def __init__(self, flask_app, url_prefix='api', parser_options=None):
parser_options = parser_options or {}
self.url_prefix = url_prefix
self.blueprint = Blueprint(
url_prefix,
'%s.%s' % (flask_app.name, url_prefix),
url_prefix='/%s' % url_prefix,
)
self.V
|
iew = make_view(
self.blueprint,
view_class=View,
parser=Parser(*
|
*parser_options)
)
def register(self, flask_app, index_view=True):
if index_view:
@self.blueprint.route('/')
def index():
return jsonify({
'available_endpoints': sorted(
'%s (%s)' % (r.rule, ', '.join(str(meth) for meth in r.methods))
for r in flask_app.url_map.iter_rules()
if r.endpoint.startswith('%s.' % self.url_prefix)
)
})
flask_app.register_blueprint(self.blueprint)
class _ApiViewMeta(_ViewMeta):
"""To register classes with the API on definition.
Automatically creates the ``endpoint``, ``base_url`` and ``rules`` for the
view from the ``__model__`` attribute.
Each route is then registered on the bound application (the current API
blueprint here).
"""
def __new__(cls, name, bases, dct):
model = dct.get('__model__', None)
if model is not None:
if not issubclass(model, Model):
raise ValueError('Api views can only be used with Orm models.')
dct.setdefault('endpoint', model.__tablename__)
base_url = dct.setdefault('base_url', model.__tablename__)
collection_route = '/%s/' % (base_url, )
model_route = '/%s/%s' % (
base_url,
'/'.join('<%s>' % k.name for k in class_mapper(model).primary_key)
)
dct['rules'] = {
collection_route: ['GET', 'POST'],
model_route: ['GET', 'PUT', 'DELETE'],
}
return super(_ApiViewMeta, cls).__new__(cls, name, bases, dct)
class View(_View):
"""Base API view.
To customize, override the ``get``, ``post``, etc. methods.
"""
__metaclass__ = _ApiViewMeta
#: orm.Model class
__model__ = None
#: Base URL (will default to the model's tablename).
base_url = None
#: Allowed methods.
methods = frozenset(['GET'])
#: Request parser.
parser = None
#: Which relationship endpoints to create (these allow GET requests).
#: Can be ``True`` (all relationships) or a list of relationship names.
#: Only relationships with ``lazy`` set to ``'dynamic'``, ``'select'`` or
#: ``True`` can have subroutes. All eagerly loaded relationships are simply
#: available directly on the model.
subviews = []
@classmethod
def register_view(cls):
"""Create the URL routes for the view.
Standard :class:`kit.util.View` implementation plus subview support.
"""
super(View, cls).register_view()
if cls.subviews:
model = cls.__model__
all_keys = set(
model._get_relationships(
lazy=['dynamic', True, 'select'],
uselist=True
).keys() +
model._get_association_proxies().keys()
)
if cls.subviews == True:
keys = all_keys
else:
keys = set(cls.subviews)
if keys - all_keys:
raise ValueError('%s invalid for subviews' % (keys - all_keys, ))
keys = all_keys & keys
for key in keys:
collection_route = '/%s/%s/%s/' % (
cls.base_url,
'/'.join(
'<%s>' % k.name for k in class_mapper(model).primary_key
),
key,
)
model_route = '/%s/%s/%s/<position>' % (
cls.base_url,
'/'.join(
'<%s>' % k.name for k in class_mapper(model).primary_key
),
key
)
make_view(
cls.__app__,
view_class=_RelationshipView,
view_name='%s_%s' % (cls.endpoint, key),
__model__=model,
__assoc_key__=key,
parser=cls.parser,
endpoint='%s_%s' % (cls.endpoint, key),
methods=['GET', ],
rules={
collection_route: ['GET', ],
model_route: ['GET', ],
},
)
def get(self, **kwargs):
"""GET request handler."""
if kwargs:
model = self.__model__.retrieve(from_key=True, **kwargs)
if not model:
raise APIError(404, 'Not found')
return self.parser.jsonify(model)
else:
return self.parser.jsonify(self.__model__.q)
def post(self):
"""POST request handler."""
if not self.validate(json):
raise APIError(400, 'Invalid POST parameters')
model = self.__model__(**request.json)
model.flush()
return self.parser.jsonify(model)
def put(self, **kwargs):
"""PUT request handler."""
model = self.__model__.retrieve(from_key=True, **kwargs)
if not model:
raise APIError(404, 'Not found')
if not self.validate(json, model):
raise APIError(400, 'Invalid PUT parameters')
for k, v in request.json.items():
setattr(model, k, v)
return self.parser.jsonify(model)
def delete(self, **kwargs):
"""DELETE request handler."""
model = self.__model__.retrieve(from_key=True, **kwargs)
if not model:
raise APIError(404, 'Not found')
model.delete()
return self.parser.jsonify(model)
def validate(self, json, model=None):
"""Validation method.
:param json: a dictionary of attributes
:type json: dict
:param model: ``None`` if it is POST request, and the model instance to be
modified if it is a PUT request.
:type model: None or kit.ext.orm.BaseModel
:rtype: bool
This method is called on each POST and PUT request. Override it to
implement your own validation logic: return ``True`` when the input is
valid and ``False`` otherwise. Default imp
|
aniruddha-adhikary/bookit
|
bookit/bookings/migrations/0002_ticket.py
|
Python
|
mit
| 794
| 0.003778
|
# -*- coding: utf-8 -*-
# Generated by Django 1.1
|
0.7 on 2017-08-16 17:59
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('bookings', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Ticket',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_nam
|
e='ID')),
('uuid', models.UUIDField()),
('qrcode', models.ImageField(blank=True, null=True, upload_to='qrcode')),
('booking', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='bookings.Booking')),
],
),
]
|
ppy/angle
|
scripts/export_targets.py
|
Python
|
bsd-3-clause
| 10,578
| 0.003781
|
#! /usr/bin/env python3
assert __name__ == '__main__'
'''
To update ANGLE in Gecko, use Windows with git-bash, and setup depot_tools, python2, and
python3. Because depot_tools expects `python` to be `python2` (shame!), python2 must come
before python3 in your path.
Upstream: https://chromium.googlesource.com/angle/angle
Our repo: https://github.com/mozilla/angle
It has branches like 'firefox-60' which is the branch we use for pulling into
Gecko with this script.
This script leaves a record of the merge-base and cherry-picks that we pull into
Gecko. (gfx/angle/cherries.log)
ANGLE<->Chrome version mappings are here: https://omahaproxy.appspot.com/
An easy choice is to grab Chrome's Beta's ANGLE branch.
## Usage
Prepare your env:
~~~
export PATH="$PATH:/path/to/depot_tools"
~~~
If this is a new repo, don't forget:
~~~
# In the angle repo:
./scripts/bootstrap.py
gclient sync
~~~
Update: (in the angle repo)
~~~
# In the angle repo:
/path/to/gecko/gfx/angle/update-angle.py origin/chromium/XXXX
git push moz # Push the firefox-XX branch to github.com/mozilla/angle
~~~~
'''
import json
import os
import pathlib
import re
import shutil
import subprocess
import sys
from typing import * # mypy annotations
REPO_DIR = pathlib.Path.cwd()
GN_ENV = dict(os.environ)
# We need to set DEPOT_TOOLS_WIN_TOOLCHAIN to 0 for non-Googlers, but otherwise
# leave it unset since vs_toolchain.py assumes that the user is a Googler with
# the Visual Studio files in depot_tools if DEPOT_TOOLS_WIN_TOOLCHAIN is not
# explicitly set to 0.
vs_fo
|
und = Fa
|
lse
for directory in os.environ['PATH'].split(os.pathsep):
vs_dir = os.path.join(directory, 'win_toolchain', 'vs_files')
if os.path.exists(vs_dir):
vs_found = True
break
if not vs_found:
GN_ENV['DEPOT_TOOLS_WIN_TOOLCHAIN'] = '0'
if len(sys.argv) < 3:
sys.exit('Usage: export_targets.py OUT_DIR ROOTS...')
(OUT_DIR, *ROOTS) = sys.argv[1:]
for x in ROOTS:
assert x.startswith('//:')
# ------------------------------------------------------------------------------
def run_checked(*args, **kwargs):
print(' ', args, file=sys.stderr)
sys.stderr.flush()
return subprocess.run(args, check=True, **kwargs)
def sortedi(x):
return sorted(x, key=str.lower)
def dag_traverse(root_keys: Sequence[str], pre_recurse_func: Callable[[str], list]):
visited_keys: Set[str] = set()
def recurse(key):
if key in visited_keys:
return
visited_keys.add(key)
t = pre_recurse_func(key)
try:
(next_keys, post_recurse_func) = t
except ValueError:
(next_keys,) = t
post_recurse_func = None
for x in next_keys:
recurse(x)
if post_recurse_func:
post_recurse_func(key)
return
for x in root_keys:
recurse(x)
return
# ------------------------------------------------------------------------------
print('Importing graph', file=sys.stderr)
try:
p = run_checked('gn', 'desc', '--format=json', str(OUT_DIR), '*', stdout=subprocess.PIPE,
env=GN_ENV, shell=(True if sys.platform == 'win32' else False))
except subprocess.CalledProcessError:
sys.stderr.buffer.write(b'"gn desc" failed. Is depot_tools in your PATH?\n')
exit(1)
# -
print('\nProcessing graph', file=sys.stderr)
descs = json.loads(p.stdout.decode())
# Ready to traverse
# ------------------------------------------------------------------------------
LIBRARY_TYPES = ('shared_library', 'static_library')
def flattened_target(target_name: str, descs: dict, stop_at_lib: bool =True) -> dict:
flattened = dict(descs[target_name])
EXPECTED_TYPES = LIBRARY_TYPES + ('source_set', 'group', 'action')
def pre(k):
dep = descs[k]
dep_type = dep['type']
deps = dep['deps']
if stop_at_lib and dep_type in LIBRARY_TYPES:
return ((),)
if dep_type == 'copy':
assert not deps, (target_name, dep['deps'])
else:
assert dep_type in EXPECTED_TYPES, (k, dep_type)
for (k,v) in dep.items():
if type(v) in (list, tuple, set):
# This is a workaround for
# https://bugs.chromium.org/p/gn/issues/detail?id=196, where
# the value of "public" can be a string instead of a list.
existing = flattened.get(k, [])
if isinstance(existing, str):
existing = [existing]
flattened[k] = sortedi(set(existing + v))
else:
#flattened.setdefault(k, v)
pass
return (deps,)
dag_traverse(descs[target_name]['deps'], pre)
return flattened
# ------------------------------------------------------------------------------
# Check that includes are valid. (gn's version of this check doesn't seem to work!)
INCLUDE_REGEX = re.compile(b'(?:^|\\n) *# *include +([<"])([^>"]+)[>"]')
assert INCLUDE_REGEX.match(b'#include "foo"')
assert INCLUDE_REGEX.match(b'\n#include "foo"')
# Most of these are ignored because this script does not currently handle
# #includes in #ifdefs properly, so they will erroneously be marked as being
# included, but not part of the source list.
IGNORED_INCLUDES = {
b'absl/container/flat_hash_map.h',
b'compiler/translator/TranslatorESSL.h',
b'compiler/translator/TranslatorGLSL.h',
b'compiler/translator/TranslatorHLSL.h',
b'compiler/translator/TranslatorMetal.h',
b'compiler/translator/TranslatorVulkan.h',
b'contrib/optimizations/slide_hash_neon.h',
b'dirent_on_windows.h',
b'dlopen_fuchsia.h',
b'kernel/image.h',
b'libANGLE/renderer/d3d/d3d11/winrt/NativeWindow11WinRT.h',
b'libANGLE/renderer/d3d/DeviceD3D.h',
b'libANGLE/renderer/d3d/DisplayD3D.h',
b'libANGLE/renderer/d3d/RenderTargetD3D.h',
b'libANGLE/renderer/gl/apple/DisplayApple_api.h',
b'libANGLE/renderer/gl/cgl/DisplayCGL.h',
b'libANGLE/renderer/gl/eagl/DisplayEAGL.h',
b'libANGLE/renderer/gl/egl/android/DisplayAndroid.h',
b'libANGLE/renderer/gl/egl/DisplayEGL.h',
b'libANGLE/renderer/gl/egl/gbm/DisplayGbm.h',
b'libANGLE/renderer/gl/glx/DisplayGLX.h',
b'libANGLE/renderer/gl/wgl/DisplayWGL.h',
b'libANGLE/renderer/metal/DisplayMtl_api.h',
b'libANGLE/renderer/null/DisplayNULL.h',
b'libANGLE/renderer/vulkan/android/AHBFunctions.h',
b'libANGLE/renderer/vulkan/android/DisplayVkAndroid.h',
b'libANGLE/renderer/vulkan/fuchsia/DisplayVkFuchsia.h',
b'libANGLE/renderer/vulkan/ggp/DisplayVkGGP.h',
b'libANGLE/renderer/vulkan/mac/DisplayVkMac.h',
b'libANGLE/renderer/vulkan/win32/DisplayVkWin32.h',
b'libANGLE/renderer/vulkan/xcb/DisplayVkXcb.h',
b'loader_cmake_config.h',
b'optick.h',
b'spirv-tools/libspirv.h',
b'third_party/volk/volk.h',
b'vk_loader_extensions.c',
b'vk_snippets.h',
b'vulkan_android.h',
b'vulkan_beta.h',
b'vulkan_directfb.h',
b'vulkan_fuchsia.h',
b'vulkan_ggp.h',
b'vulkan_ios.h',
b'vulkan_macos.h',
b'vulkan_metal.h',
b'vulkan_vi.h',
b'vulkan_wayland.h',
b'vulkan_win32.h',
b'vulkan_xcb.h',
b'vulkan_xlib.h',
b'vulkan_xlib_xrandr.h',
# rapidjson adds these include stubs into their documentation
# comments. Since the script doesn't skip comments they are
# erroneously marked as valid includes
b'rapidjson/...',
}
IGNORED_INCLUDE_PREFIXES = {
b'android',
b'Carbon',
b'CoreFoundation',
b'CoreServices',
b'IOSurface',
b'mach',
b'mach-o',
b'OpenGL',
b'pci',
b'sys',
b'wrl',
b'X11',
}
IGNORED_DIRECTORIES = {
'//buildtools/third_party/libc++',
'//third_party/abseil-cpp',
'//third_party/SwiftShader',
}
def has_all_includes(target_name: str, descs: dict) -> bool:
for ignored_directory in IGNORED_DIRECTORIES:
if target_name.startswith(ignored_directory):
return True
flat = flattened_target(target_name, descs, stop_at_lib=False)
acceptable_sources = flat.get('sources', []) + flat.get('outputs', [])
acceptable_sources = {x.rspl
|
ddanier/django_price
|
django_price/price.py
|
Python
|
bsd-3-clause
| 8,910
| 0.005387
|
# coding: utf-8
import decimal
from . import settings as price_settings
from .utils import price_amount
from .currency import Currency
from .tax import NO_TAX
class Price(object):
def __init__(self, net, currency=None, tax=None, gross=None):
if currency is None:
currency = price_settings.DEFAULT_CURRENCY
if not isinstance(currency, Currency):
currency = Currency(currency)
self.currency = currency
if not isinstance(net, decimal.Decimal):
net = decimal.Decimal(str(net) or 'NaN')
# support tax models
if tax is not None and hasattr(tax, 'get_tax'):
tax = tax.get_tax()
# calculate tax, gross
self._applied_taxes = {}
if not tax is None and not gross is None:
# we need to trust the external calculation here
if not isinstance(gross, decimal.Decimal):
gross = decimal.Decimal(str(gross or '') or 'NaN')
elif not tax is None:
# self.net is still not rounded here, so tax_amount is super-precise ;-)
gross = tax.apply(net)
elif not gross is None:
raise RuntimeError('cannot specify gross amount witho
|
ut tax')
else:
# no tax applied
gross = net
tax = NO_TAX
self._applied_taxes[tax.unique_id] = (tax, net, gross)
self._recalcu
|
late_overall()
def _recalculate_overall(self):
# we pass net/gross through price_amount as late as possible, to avoid
# removing decimal_places we might need to calculate the right
# gross or tax. self._applied_taxes always stores the raw values without
# any rounding. This way we do not loose precision on calculation.
net = decimal.Decimal('0')
gross = decimal.Decimal('0')
for tax, tax_net, tax_gross in self._applied_taxes.values():
# we have to round every net/gross on its own, otherwise
# we would risk rounding issues (0.3 + 0.3 = 0.6, rounded
# 0 + 0 = 1)
net += price_amount(tax_net, self.currency)
gross += price_amount(tax_gross, self.currency)
self.net = net
self.gross = gross
def __str__(self):
from django.utils.encoding import smart_str
return smart_str(unicode(self))
def __unicode__(self):
from django.utils.translation import ugettext
return ugettext('%(currency)s %(amount)s') % {
'amount': self.formatted_gross,
'currency': self.formatted_currency,
}
def copy(self):
from copy import copy
result = copy(self)
result._applied_taxes = self._applied_taxes.copy()
return result
def rounded(self):
applied_taxes = {}
for tax, net, gross in self._applied_taxes.values():
applied_taxes[tax.unique_id] = (
tax,
price_amount(net, self.currency),
price_amount(gross, self.currency),
)
return CalculatedPrice(applied_taxes, self.currency)
@property
def precise_net(self):
return sum([t[1] for t in self._applied_taxes.values()])
@property
def precise_gross(self):
return sum([t[2] for t in self._applied_taxes.values()])
@property
def precise_tax(self):
return sum([t[2] - t[1] for t in self._applied_taxes.values()])
@property
def tax(self):
return self.gross - self.net
@property
def applied_tax(self):
if len(self._applied_taxes) != 1:
raise RuntimeError('This Price has multiple taxes, use obj.taxes instead')
return self._applied_taxes.values()[0][0]
@property
def applied_taxes(self):
return [
Price(
net = net,
tax = tax,
gross = gross,
currency = self.currency,
)
for tax, net, gross
in self._applied_taxes.values()
]
@property
def formatted_currency(self):
return self.currency.symbol if self.currency.symbol else self.currency.iso_code
def _format_amount(self, value):
from django.utils.formats import number_format
# workaround for django not treating decimal_places==0 is implied
# as prices always are rounded to their decimal_places, see
# utils.price_amount
# see https://code.djangoproject.com/ticket/13810
return number_format(value, self.currency.decimal_places or 0)
@property
def formatted_net(self):
return self._format_amount(self.net)
@property
def formatted_gross(self):
return self._format_amount(self.gross)
@property
def formatted_tax(self):
return self._format_amount(self.tax)
def __add__(self, other):
# EmptyPrice should work regardless of currency, does not change anything
if isinstance(other, EmptyPrice):
self.copy()
if not isinstance(other, Price):
raise TypeError('cannot add %s' % type(other))
if self.currency != other.currency:
raise TypeError('cannot add different currencies')
applied_taxes = self._applied_taxes.copy()
for tax, net, gross in other._applied_taxes.values():
if tax.unique_id in applied_taxes:
applied_taxes[tax.unique_id] = (
applied_taxes[tax.unique_id][0],
applied_taxes[tax.unique_id][1] + net,
applied_taxes[tax.unique_id][2] + gross,
)
else:
applied_taxes[tax.unique_id] = (tax, net, gross)
# filter out NO_TAX, if it is not relevant
if NO_TAX.unique_id in applied_taxes \
and applied_taxes[NO_TAX.unique_id][1] == 0 \
and applied_taxes[NO_TAX.unique_id][2] == 0:
del applied_taxes[NO_TAX.unique_id]
return CalculatedPrice(applied_taxes, self.currency)
def __neg__(self):
applied_taxes = {}
for tax, net, gross in self._applied_taxes.values():
applied_taxes[tax.unique_id] = (tax, -net, -gross)
return CalculatedPrice(applied_taxes, self.currency)
def __mul__(self, factor):
if not isinstance(factor, (int, long, float, decimal.Decimal)):
raise TypeError("Cannot multiply with %s" % type(factor))
if not isinstance(factor, decimal.Decimal):
factor = decimal.Decimal(str(factor))
if factor.is_nan():
raise TypeError("Factor must be a number (!= 'NaN')")
applied_taxes = {}
for tax, net, gross in self._applied_taxes.values():
calc_net = net * factor
calc_gross = gross * factor
applied_taxes[tax.unique_id] = (tax, calc_net, calc_gross)
return CalculatedPrice(applied_taxes, self.currency)
def __div__(self, factor):
if not isinstance(factor, (int, long, float, decimal.Decimal)):
raise TypeError("Cannot multiply with %s" % type(factor))
if not isinstance(factor, decimal.Decimal):
factor = decimal.Decimal(str(factor))
if factor.is_nan():
raise TypeError("Factor must be a number (!= 'NaN')")
applied_taxes = {}
for tax, net, gross in self._applied_taxes.values():
calc_net = net / factor
calc_gross = gross / factor
applied_taxes[tax.unique_id] = (tax, calc_net, calc_gross)
return CalculatedPrice(applied_taxes, self.currency)
__truediv__ = __div__
# django_ajax hook
def ajax_data(self):
return {
'tax': self.formatted_tax,
'net': self.formatted_net,
'gross': self.formatted_gross,
'currency': self.currency.ajax_data(),
}
class CalculatedPrice(Price):
def __init__(self, applied_taxes, currency=None):
if currency is None:
currency = price_settings.DEFAULT_CURRENCY
if not isinstance(curr
|
persandstrom/home-assistant
|
homeassistant/components/weather/__init__.py
|
Python
|
apache-2.0
| 4,851
| 0
|
"""
Weather component that handles meteorological data for your location.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/weather/
"""
import asyncio
import logging
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.temperature import display_temp as show_temp
from homeassistant.const import PRECISION_WHOLE, PRECISION_TENTHS, TEMP_CELSIUS
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA # noqa
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = []
DOMAIN = 'weather'
ENTITY_ID_FORMAT = DOMAIN + '.{}'
ATTR_CONDITION_CLASS = 'condition_class'
ATTR_FORECAST = 'forecast'
ATTR_FORECAST_CONDITION = 'condition'
ATTR_FORECAST_PRECIPITATION = 'precipitation'
ATTR_FORECAST_TEMP = 'temperature'
ATTR_FORECAST_TEMP_LOW = 'templow'
ATTR_FORECAST_TIME = 'datetime'
ATTR_WEATHER_ATTRIBUTION = 'attribution'
ATTR_WEATHER_HUMIDITY = 'humidity'
ATTR_WEATHER_OZONE = 'ozone'
ATTR_WEATHER_PRESSURE = 'pressure'
ATTR_WEATHER_TEMPERATURE = 'temperature'
ATTR_WEATHER_VISIBILITY = 'visibility'
ATTR_WEATHER_WIND_BEARING = 'wind_bearing'
ATTR_WEATHER_WIND_SPEED = 'wind_speed'
@asyncio.coroutine
def async_setup(hass, config):
"""Set up the weather component."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
yield from component.async_setup(config)
return True
class WeatherEntity(Entity):
"""ABC for weather data."""
@property
def temperature(self):
"""Return the platform temperature."""
raise NotImplementedError()
@property
def temperature_unit(self):
"""Return the unit of measurement."""
raise NotImplementedError()
@property
def pressure(self):
"""Return the pressure."""
return None
@property
def humidity(self):
"""Return the humidity."""
raise NotImplementedError()
@property
def wind_speed(self):
"""Return the wind speed."""
return None
@property
def wind_bearing(self):
"""Return the wind bearing."""
return None
@property
def ozone(self):
"""Return the ozone level."""
return None
@property
def attribution(self):
"""Return the attribution."""
return None
@property
def visibility(self):
"""Return the visibility."""
return None
@property
def forecast(self):
"""Return the forecast."""
return None
@property
def precision(self):
"""Return the forecast."""
return PRECISION_TENTHS if self.temperature_unit == TEMP_CELSIUS \
else PRECISION_WHOLE
@property
def state_attributes(self):
"""Return the state attributes."""
data = {
ATTR_WEATHER_TEMPERATURE: show_temp(
self.hass, self.temperature, self.temperature_unit,
self.precision),
}
humidity = self.humidity
if humidity is not None:
data[ATTR_WEATHER_HUMIDITY] = round(humidity)
ozone = self.ozone
if ozone is not None:
data[ATTR_WEATHER_OZONE] = ozone
pressure = self.pressure
if pressure is not None:
data[ATTR_WEATHER_PRESSURE] = pressure
wind_bearing = self.wind_bearing
if wind_bearing is not None:
data[ATTR_WEATHER_WIND_BEARING] = wind_bearing
wind_speed = self.wind_speed
if wind_speed is not None:
data[ATTR_WEATHER_WIND_SPEED] = wind_speed
visibility = self.visibility
if visibility is not None:
data[ATTR_WEATHER_VISIBILITY] = visibility
attribution = self.attribution
if attribution is not None:
data[ATTR_WEATHER_ATTRIBUTION] = attribution
if self.forecast is not None:
forecast = []
for forecast_entry in self.forecast:
forecast_entry = dict(forecast_entry)
forecast_entry[ATTR_FORECAST_TEMP] = show_temp(
self.hass, forecast_entry[ATTR_FORECAST_TEMP],
self.temperature_unit, self.precision)
if ATTR_FORECAST_TEMP_LOW in forecast_entry:
forecast_entry[ATTR_FORECAST_TEMP_LOW] = show_temp(
self.hass, forecast_entry[ATTR_FORECAST_TEMP_LOW],
|
self.temperature_unit, self.precision)
forecast.append(forecast_entry)
data
|
[ATTR_FORECAST] = forecast
return data
@property
def state(self):
"""Return the current state."""
return self.condition
@property
def condition(self):
"""Return the current condition."""
raise NotImplementedError()
|
SuperTux/flexlay
|
flexlay/tools/zoom2_tool.py
|
Python
|
gpl-3.0
| 1,822
| 0
|
# Flexlay - A Generic 2D Game Editor
# Copyright (C) 2014 Ingo Ruhnke <grumbel@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be usefu
|
l,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If no
|
t, see <http://www.gnu.org/licenses/>.
from flexlay.gui.editor_map_component import EditorMapComponent
from flexlay.math import Point
from flexlay.tools.tool import Tool
class Zoom2Tool(Tool):
def __init__(self):
super().__init__()
self.active = False
self.click_pos = Point(0, 0)
self.old_zoom = 0.0
def on_mouse_up(self, event):
self.active = False
def on_mouse_down(self, event):
self.active = True
self.click_pos = event.mouse_pos
gc = EditorMapComponent.current.get_gc_state()
self.old_zoom = gc.get_zoom()
def on_mouse_move(self, event):
if self.active:
gc = EditorMapComponent.current.get_gc_state()
zoom_pos = Point(gc.width / 2, gc.height / 2)
factor = (event.mouse_pos.y - self.click_pos.y) / 20.0
if factor > 0:
gc.set_zoom(self.old_zoom * pow(1.25, factor), zoom_pos)
elif factor < 0:
gc.set_zoom(self.old_zoom / pow(1.25, -factor), zoom_pos)
else:
gc.set_zoom(self.old_zoom, zoom_pos)
# EOF #
|
imZack/sanji
|
sanji/connection/mqtt.py
|
Python
|
mit
| 4,240
| 0
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from __future__ import print_function
import os
import sys
import uuid
import logging
import simplejson as json
import paho.mqtt.client as mqtt
from time import sleep
try:
sys.path.append(os.path.dirname(os.path.realpath(__file__)) + '/../../')
from sanji.connection.connection import Connection
except ImportError as e:
print(e)
print("Please check the python PATH for import test module.")
exit(1)
_logger = logging.getLogger("sanji.sdk.connection.mqtt")
class Mqtt(Connection):
"""
Mqtt
"""
def __init__(
self,
broker_host=os.getenv('BROKER_PORT_1883_TCP_ADDR', "localhost"),
broker_port=os.getenv('BROKER_PORT_1883_TCP_PORT', 1883),
broker_keepalive=60
):
# proerties
self.tunnels = {
"internel": (uuid.uuid4().hex, None),
"model": (None, None),
"view": (None, None)
}
self.broker_host = broker_host
self.broker_port = broker_port
self.broker_keepalive = broker_keepalive
self.client = mqtt.Client()
self.connect_delay = 3
# methods
self.subscribe = self.client.subscribe
self.unsubscribe = self.client.unsubscribe
self.message_callback_add = self.client.message_callback_add
self.message_callback_remove = self.client.message_callback_remove
|
self.client.on_log = self.on_log
def on_log(self, mosq, obj, level, string):
pass
def connect(self):
"""
|
connect
"""
_logger.debug("Start connecting to broker")
while True:
try:
self.client.connect(self.broker_host, self.broker_port,
self.broker_keepalive)
break
except Exception:
_logger.debug(
"Connect failed. wait %s sec" % self.connect_delay)
sleep(self.connect_delay)
self.client.loop_forever()
def disconnect(self):
"""
disconnect
"""
_logger.debug("Disconnect to broker")
self.client.loop_stop()
def set_tunnel(self, tunnel_type, tunnel, callback=None):
"""
set_tunnel(self, tunnel_type, tunnel, callback=None):
"""
orig_tunnel = self.tunnels.get(tunnel_type, (None, None))[0]
if orig_tunnel is not None:
_logger.debug("Unsubscribe: %s", (orig_tunnel,))
self.client.unsubscribe(str(orig_tunnel))
self.tunnels[tunnel_type] = (tunnel, callback)
if callback is not None:
self.message_callback_add(tunnel, callback)
self.client.subscribe(str(tunnel))
_logger.debug("Subscribe: %s", (tunnel,))
def set_tunnels(self, tunnels):
"""
set_tunnels(self, tunnels):
"""
for tunnel_type, (tunnel, callback) in tunnels.iteritems():
if tunnel is None:
continue
self.set_tunnel(tunnel_type, tunnel, callback)
def set_on_connect(self, func):
"""
set_on_connect
"""
self.client.on_connect = func
def set_on_message(self, func):
"""
set_on_message
"""
self.client.on_message = func
def set_on_publish(self, func):
"""
set_on_publish
"""
self.client.on_publish = func
def publish(self, topic="/controller", qos=0, payload=None):
"""
publish(self, topic, payload=None, qos=0, retain=False)
Returns a tuple (result, mid), where result is MQTT_ERR_SUCCESS to
indicate success or MQTT_ERR_NO_CONN if the client is not currently
connected. mid is the message ID for the publish request. The mid
value can be used to track the publish request by checking against the
mid argument in the on_publish() callback if it is defined.
"""
result = self.client.publish(topic,
payload=json.dumps(payload),
qos=qos)
if result[0] == mqtt.MQTT_ERR_NO_CONN:
raise RuntimeError("No connection")
return result[1]
|
RomanZWang/osf.io
|
website/addons/github/views.py
|
Python
|
apache-2.0
| 10,973
| 0.001185
|
"""Views for the node settings page."""
# -*- coding: utf-8 -*-
from dateutil.parser import parse as dateparse
import httplib as http
import logging
from flask import request, make_response
from framework.exceptions import HTTPError
from website.addons.base import generic_views
from website.addons.github.api import GitHubClient, ref_to_params
from website.addons.github.exceptions import NotFoundError, GitHubError
from website.addons.github.serializer import GitHubSerializer
from website.addons.github.utils import (
get_refs, check_permissions,
verify_hook_signature, MESSAGES
)
from website.models import NodeLog
from website.project.decorators import (
must_have_addon, must_be_addon_authorizer,
must_have_permission, must_not_be_registration,
must_be_contributor_or_public, must_be_valid_project,
)
from website.util import rubeus
logger = logging.getLogger(__name__)
logging.getLogger('github3').setLevel(logging.WARNING)
logging.getLogger('requests.packages.urllib3.connectionpool').setLevel(logging.WARNING)
SHORT_NAME = 'github'
FULL_NAME = 'GitHub'
############
# Generics #
############
github_account_list = generic_views.account_list(
SHORT_NAME,
GitHubSerializer
)
github_import_auth = generic_views.import_auth(
SHORT_NAME,
GitHubSerializer
)
def _get_folders(node_addon, folder_id):
pass
github_folder_list = generic_views.folder_list(
SHORT_NAME,
FULL_NAME,
_get_folders
)
github_get_config = generic_views.get_config(
SHORT_NAME,
GitHubSerializer
)
github_deauthorize_node = generic_views.deauthorize_node(
SHORT_NAME
)
github_root_folder = generic_views.root_folder(
SHORT_NAME
)
#################
# Special Cased #
#################
@must_not_be_registration
@must_have_addon(SHORT_NAME, 'user')
@must_have_addon(SHORT_NAME, 'node')
@must_be_addon_authorizer(SHORT_NAME)
@must_have_permission('write')
def github_set_config(auth, **kwargs):
node_settings = kwargs.get('node_addon', None)
node = kwargs.get('node', None)
user_settings = kwargs.get('user_addon', None)
try:
if not node:
node = node_settings.owner
if not user_settings:
user_settings = node_settings.user_settings
except AttributeError:
raise HTTPError(http.BAD_REQUEST)
# Parse request
github_user_name = request.json.get('github_user', '')
github_repo_name = request.json.get('github_repo', '')
if not github_user_name or not github_repo_name:
raise HTTPError(http.BAD_REQUEST)
# Verify that repo exists and that user can access
connection = GitHubClient(external_account=node_settings.external_account)
repo = connection.repo(github_user_name, github_repo_name)
if repo is None:
if user_settings:
message = (
'Cannot access repo. Either the repo does not exist '
'or your account does not have permission to view it.'
)
else:
message = (
'Cannot access repo.'
)
return {'message': message}, http.BAD_REQUEST
changed = (
github_user_name != node_settings.user or
github_repo_name != node_settings.repo
)
# Update hooks
if changed:
# Delete existing hook, if any
node_settings.delete_hook()
# Update node settings
node_settings.user = github_user_name
node_settings.repo = github_repo_name
# Log repo select
node.add_log(
action='github_repo_linked',
params={
'project': node.parent_id,
'node': node._id,
'github': {
'user': github_user_name,
'repo': github_repo_name,
}
},
auth=auth,
)
# Add new hook
if node_settings.user and node_settings.repo:
node_settings.add_hook(save=False)
node_settings.save()
return {}
@must_be_contributor_or_public
@must_have_addon('github', 'node')
def github_download_starball(node_addon, **kwargs):
archive = kwargs.get('archive', 'tar')
ref = request.args.get('sha', 'master')
connection = GitHubClient(external_account=node_addon.external_account)
headers, data = connection.starball(
node_addon.user, node_addon.repo, archive, ref
)
resp = make_response(data)
for ke
|
y, value in headers.iteritems():
resp.headers[key] = value
return resp
#########
# HGrid #
#########
@must_be_contributor_or_public
@must_have_addon('github', 'node')
def github_root_folder(*args, **kwargs):
"""View function returning the root container for a Gi
|
tHub repo. In
contrast to other add-ons, this is exposed via the API for GitHub to
accommodate switching between branches and commits.
"""
node_settings = kwargs['node_addon']
auth = kwargs['auth']
data = request.args.to_dict()
return github_hgrid_data(node_settings, auth=auth, **data)
def github_hgrid_data(node_settings, auth, **kwargs):
# Quit if no repo linked
if not node_settings.complete:
return
connection = GitHubClient(external_account=node_settings.external_account)
# Initialize repo here in the event that it is set in the privacy check
# below. This potentially saves an API call in _check_permissions, below.
repo = None
# Quit if privacy mismatch and not contributor
node = node_settings.owner
if node.is_public and not node.is_contributor(auth.user):
try:
repo = connection.repo(node_settings.user, node_settings.repo)
except NotFoundError:
# TODO: Test me @jmcarp
# TODO: Add warning message
logger.error('Could not access GitHub repo')
return None
if repo.private:
return None
try:
branch, sha, branches = get_refs(
node_settings,
branch=kwargs.get('branch'),
sha=kwargs.get('sha'),
connection=connection,
)
except (NotFoundError, GitHubError):
# TODO: Show an alert or change GitHub configuration?
logger.error('GitHub repo not found')
return
if branch is not None:
ref = ref_to_params(branch, sha)
can_edit = check_permissions(
node_settings, auth, connection, branch, sha, repo=repo,
)
else:
ref = None
can_edit = False
name_tpl = '{user}/{repo}'.format(
user=node_settings.user, repo=node_settings.repo
)
permissions = {
'edit': can_edit,
'view': True,
'private': node_settings.is_private
}
urls = {
'upload': node_settings.owner.api_url + 'github/file/' + (ref or ''),
'fetch': node_settings.owner.api_url + 'github/hgrid/' + (ref or ''),
'branch': node_settings.owner.api_url + 'github/hgrid/root/',
'zip': node_settings.owner.api_url + 'github/zipball/' + (ref or ''),
'repo': "https://github.com/{0}/{1}/tree/{2}".format(node_settings.user, node_settings.repo, branch)
}
branch_names = [each.name for each in branches]
if not branch_names:
branch_names = [branch] # if repo un-init-ed then still add default branch to list of branches
return [rubeus.build_addon_root(
node_settings,
name_tpl,
urls=urls,
permissions=permissions,
branches=branch_names,
defaultBranch=branch,
)]
#########
# Repos #
#########
@must_have_addon(SHORT_NAME, 'user')
@must_have_addon(SHORT_NAME, 'node')
@must_be_addon_authorizer(SHORT_NAME)
@must_have_permission('write')
def github_create_repo(**kwargs):
repo_name = request.json.get('name')
if not repo_name:
raise HTTPError(http.BAD_REQUEST)
node_settings = kwargs['node_addon']
connection = GitHubClient(external_account=node_settings.external_account)
try:
repo = connection.create_repo(repo_name, auto_init=True)
except GitHubError:
# TODO: Check status code
raise HTTPError(http.BAD_REQUEST)
return {
'user': repo.
|
lablup/sorna
|
docs/conf.py
|
Python
|
lgpl-3.0
| 6,409
| 0.004681
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Backend.AI Library documentation build configuration file, created by
# sphinx-quickstart on Tue Mar 1 21:26:20 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
on_rtd = os.environ.get('READTHEDOCS') == 'True'
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.intersphinx',
'sphinx.ext.mathjax',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = ['.rst']
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Backend.AI API Documentation'
copyright = '2015-2020, Lablup Inc.'
author = 'Lablup Inc.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'v5.20191215'
# The full version, including alpha/beta/rc tags.
release = '20.03'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'tango'
highlight_language = 'python3'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
numfig = True
intersphinx_mapping = {
'client-py':
('https://client-py.docs.backend.ai/en/latest/', None),
}
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'BackendAIAPIDoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass
|
[howto, manual, or own class]).
latex_documents = [
(master_doc, 'BackendAIDoc.tex', 'Backend.AI API Documentation',
author, 'manual'),
]
# -- Options for manual page out
|
put ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'backend.ai', 'Backend.AI API Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Backend.AI', 'Backend.AI API Documentation',
author, 'Backend.AI', 'Backend.AI is a hassle-free backend for AI programming and service.', 'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
AlexeiBuzuma/LocalComputeNetworks
|
sft/client/commands/close.py
|
Python
|
mit
| 1,397
| 0.002147
|
import logging
from sft.common.commands.base import CommandFinished, ProgramFinished, CommandIds, ErrorIds
from sft.common.socket_manager import SocketManager
from .base import ClientCommandBase
from sft.common.utils.packets import (generate_packet, get_error_code)
from sft.common.config import Config
from sft.common.sessions.session_manager import SessionManager, SessionStatus
_socket_manager = SocketManager()
LOG = logging.getLogger(__name__)
_config = Config()
_packet_size = _config.package_size
__all__ = ['CloseCommand']
class CloseCommand(ClientCommandBase):
"""Usage: close"""
@staticmethod
def get_command_id():
return CommandIds.CLOSE_COMMAND_ID
@staticmethod
def get_command_alias():
return 'close'
def _initialize(self, args_line):
super()._initialize(args_line)
self._send_request = True
self._finish = False
session = SessionManager().get_all_not_inactive_sessions()[0]
client_uuid = session.client_uuid
self._request = generate_packet(self.get_command_id(), ErrorIds.SUCCESSFUL, client_uuid)
session.status = SessionStat
|
us.wait_for_close
def receive_data(self, data):
pass
def generate_data(self):
if self._send_request:
se
|
lf._finish = True
self._send_request = False
return self._request
return None
|
boegel/easybuild-easyblocks
|
easybuild/easyblocks/x/xalt.py
|
Python
|
gpl-2.0
| 9,134
| 0.003065
|
##
# Copyright 2020 NVIDIA
#
# This file is triple-licensed under GPLv2 (see below), MIT, and
# BSD three-clause licenses.
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for XALT, implemented as an easyblock
@author: Scott McMillan (NVIDIA)
"""
import os
from easybuild.easyblocks.generic.configuremake import ConfigureMake
from easybuild.framework.easyconfig import CUSTOM, MANDATORY
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.modules import get_software_root
from easybuild.tools.systemtools import get_shared_lib_ext
class EB_XALT(ConfigureMake):
"""Support for building and installing XALT."""
@staticmethod
def extra_options():
extra_vars = {
'config_py': [None, "XALT site filter file", MANDATORY],
'executable_tracking': [True, "Enable executable tracking", CUSTOM],
'gpu_tracking': [None, "Enable GPU tracking", CUSTOM],
'logging_url': [None, "Logging URL for transmission", CUSTOM],
'mysql': [False, "Build with MySQL support", CUSTOM],
'scalar_sampling': [True, "Enable scalar sampling", CUSTOM],
'static_cxx': [False, "Statically link libstdc++ and libgcc_s", CUSTOM],
'syshost': [None, "System name", MANDATORY],
'transmission': [None, "Data tranmission method", MANDATORY],
'file_prefix': [None, "XALT record files prefix", CUSTOM],
}
return ConfigureMake.extra_options(extra_vars)
def configure_step(self):
"""Custom configuration step for XALT."""
# By default, XALT automatically appends 'xalt/<version>' to the
# prefix, i.e., --prefix=/opt will actually install in
# /opt/xalt/<version>. To precisely control the install prefix and
# not append anything to the prefix, use the configure option
# '--with-siteControlledPrefix=yes'.
# See https://xalt.readthedocs.io/en/latest/050_install_and_test.html
self.cfg.update('configopts', '--with-siteControlledPrefix=yes')
# XALT site filter config file is mandatory
config_py = self.cfg['config_py']
if config_py:
if os.path.exists(config_py):
self.cfg.update('configopts', '--with-config=%s' % config_py)
else:
raise EasyBuildError("Specified XALT configuration file %s does not exist!", config_py)
else:
error_msg = "Location of XALT configuration file must be specified via 'config_py' easyconfig parameter. "
error_msg += "You can edit the easyconfig file, or use 'eb --try-amend=config_py=<path>'. "
error_msg += "See https://xalt.readthedocs.io/en/latest/030_site_filtering.html for more information."
raise EasyBuildError(error_msg)
# XALT system name is mandatory
if self.cfg['syshost']:
self.cfg.update('configopts', '--with-syshostConfig=%s' % self.cfg['syshost'])
else:
error_msg = "The name of the system must be specified via the 'syshost' easyconfig parameter. "
error_msg += "You can edit the easyconfig file, or use 'eb --try-amend=syshost=<string>'. "
error_msg += "See https://xalt.readthedocs.io/en/latest/020_site_configuration.html for more information."
raise EasyBuildError(error_msg)
# Transmission method is mandatory
if self.cfg['transmission']:
self.cfg.update('configopts', '--with-transmission=%s' % self.cfg['transmission'])
else:
error_msg = "The XALT transmission method must be specified via the 'transmission' easyconfig parameter. "
error_msg = "You can edit the easyconfig file, or use 'eb --try-amend=transmission=<string>'. "
error_msg += "See https://xalt.readthedocs.io/en/latest/020_site_configuration.html for more information."
raise EasyBuildError(error_msg)
# GPU tracking
if self.cfg['gpu_tracking'] is True:
# User enabled
self.cfg.update('configopts', '--with-trackGPU=yes')
elif self.cfg['gpu_tracking'] is None:
# Default value, enable GPU tracking if nvml.h is present
# and the CUDA module is loaded
cuda_root = get_software_root('CUDA')
if cuda_root:
nvml_h = os.path.join(cuda_root, "include", "nvml.h")
if os.path.isfile(nvml_h):
self.cfg.update('configopts', '--with-trackGPU=yes')
self.cfg['gpu_tracking'] = True
else:
# User disabled
self.cfg.update('configopts', '--with-trackGPU=no')
# MySQL
if self.cfg['mysql'] is True:
self.cfg.update('configopts', '--with-MySQL=yes')
else:
self.cfg.update('configopts', '--with-MySQL=no')
# If XALT is built with a more recent compiler than the system
# compiler, then XALT likely will depend on symbol versions not
# available in the system libraries. Link statically as a workaround.
if self.cfg['static_cxx'] is True:
self.cfg.update('configopts', 'LDFLAGS="${LDFLAGS} -static-libstdc++ -static-libgcc"')
# XALT file prefix (optional). The default is $HOME/.xalt.d/ which
# entails that record files are stored separately for each user.
# If this option is specified, XALT will write to the specified
# location for every user. The file prefix can also be modified
# after the install using the XALT_FILE_PREFIX environment variable.
if self.cfg['file_prefix']:
self.cfg.update('configopts', '--with-xaltFilePrefix=%s' % self.cfg['file_prefix'])
# Configure
super(EB_XALT, self).configure_step()
def make_module_extra(self, *args, **kwargs):
txt = super(EB_XALT, self).make_module_extra(*args, **kwargs)
txt += self.module_generator.prepend_paths('LD_PRELOAD', 'lib64/libxalt_init.%s' % get_shared_lib_ext())
txt += self.module_generator.set_environment('XALT_DIR', self.installdir)
txt += self.module_generator.set_environment('XALT_ETC_DIR', '%s' % os.path.join(self.installdir, 'etc'))
txt += self.module_generator.set_environment('XALT_EXECUTABLE_TRACKING',
('no', 'yes')[bool(self.cfg['exec
|
utable_tracking'])])
txt += self.module_generator.set_environment('XALT_GPU_TRACKING',
('no', 'yes')[bool(self.cfg['gpu_tracking'])])
if self.cfg['transmission'].lower() == 'curl' and self.cfg['logging_url']:
txt += self.module_generator.set_e
|
nvironment('XALT_LOGGING_URL', self.cfg['logging_url'])
txt += self.module_generator.set_environment('XALT_SCALAR_SAMPLING',
('no', 'yes')[bool(self.cfg['scalar_sampling'])])
# In order to track containerized executables, bind mount the XALT
# directory in the Singularity container and preload the XALT library
# https://xalt.readthedocs.io/en/latest/050_install_and_test.html#xalt-module
|
robotblake/pdsm
|
src/pdsm/glue.py
|
Python
|
mit
| 6,425
| 0.001712
|
import copy
from typing import Any # noqa: F401
from typing import Dict # noqa: F401
from typing import Iterable # noqa: F401
from typing import List # noqa: F401
from typing import Optional # noqa: F401
from typing import Text # noqa: F401
import botocore.session
from botocore.exceptions import ClientError
from .models import Column
from .models import Partition
from .models import STORAGE_DESCRIPTOR_TEMPLATE
from .utils import chunks
from .utils import ensure_trailing_slash
from .utils import remove_trailing_slash
TABLE_INPUT_TEMPLATE = {
'Name': '',
'Owner': 'hadoop',
'StorageDescriptor': STORAGE_DESCRIPTOR_TEMPLATE,
'PartitionKeys': [],
'TableType': 'EXTERNAL_TABLE',
'Parameters': {'EXTERNAL': 'TRUE'},
} # type: Dict[Text, Any]
class Table(object):
__slots__ = ['database_name', 'name', 'columns', 'location', 'partition_keys']
def __init__(self, database_name, name, columns, location, partition_keys):
# type: (Text, Text, List[Column], Text, List[Column]) -> None
self.database_name = database_name
self.name = name
self.columns = columns
self.location = location
self.partition_keys = partition_keys
def list_partitions(self):
# type: () -> Iterable[Partition]
client = botocore.session.get_session().create_client('glue')
opts = {'DatabaseName': self.database_name, 'TableName': self.name}
while True:
result = client.get_partitions(**opts)
if 'Partitions' in result:
for pd in result['Partitions']:
yield Partition.from_input(pd)
if 'NextToken' in result:
opts['NextToken'] = result['NextToken']
else:
break
def get_partitions(self):
# type: () -> List[Partition]
client = botocore.session.get_session().create_client('glue')
opts = {'DatabaseName': self.database_name,
|
'TableName': self.name}
partitions = [] # type: List[Partition]
while True:
result = client.get_partitions(**opts)
if 'Partitions' in result:
partitions += [Partition.from_input(pd) for pd in result['Partitions']]
if 'NextToken' in result:
opts['NextToken'] = result['NextToken']
|
else:
break
return partitions
def add_partitions(self, partitions):
# type: (List[Partition]) -> None
client = botocore.session.get_session().create_client('glue')
for partition_chunk in chunks(partitions, 100):
data = {'DatabaseName': self.database_name,
'TableName': self.name,
'PartitionInputList': [partition.to_input() for partition in partition_chunk]}
client.batch_create_partition(**data)
def recreate_partitions(self, partitions):
# type: (List[Partition]) -> None
client = botocore.session.get_session().create_client('glue')
for partition_chunk in chunks(partitions, 25):
data = {'DatabaseName': self.database_name,
'TableName': self.name,
'PartitionsToDelete': [{'Values': partition.values} for partition in partition_chunk]}
client.batch_delete_partition(**data)
data = {'DatabaseName': self.database_name,
'TableName': self.name,
'PartitionInputList': [partition.to_input() for partition in partition_chunk]}
client.batch_create_partition(**data)
@classmethod
def from_input(cls, database_name, data):
# type: (Text, Dict[Text, Any]) -> Table
table = cls(
database_name=database_name,
name=data['Name'],
columns=[Column.from_input(cd) for cd in data['StorageDescriptor']['Columns']],
location=ensure_trailing_slash(data['StorageDescriptor']['Location']),
partition_keys=[Column.from_input(cd) for cd in data['PartitionKeys']],
)
return table
def to_input(self):
# type: () -> Dict[Text, Any]
data = copy.deepcopy(TABLE_INPUT_TEMPLATE)
data['Name'] = self.name
data['StorageDescriptor']['Columns'] = [column.to_input() for column in self.columns]
data['StorageDescriptor']['Location'] = remove_trailing_slash(self.location)
data['PartitionKeys'] = [column.to_input() for column in self.partition_keys]
return data
@classmethod
def get(cls, database_name, name):
# type: (Text, Text) -> Optional[Table]
client = botocore.session.get_session().create_client('glue')
try:
result = client.get_table(DatabaseName=database_name, Name=name)
except ClientError as ex:
if ex.response['Error']['Code'] == 'EntityNotFoundException':
return None
raise ex
return cls.from_input(database_name, result['Table'])
@classmethod
def create(cls, database_name, name, columns, location, partition_keys):
# type: (Text, Text, List[Column], Text, List[Column]) -> Table
client = botocore.session.get_session().create_client('glue')
table = cls(
database_name=database_name,
name=name,
columns=columns,
location=location,
partition_keys=partition_keys,
)
client.create_table(
DatabaseName=database_name,
TableInput=table.to_input(),
)
return table
@classmethod
def update(cls, database_name, name, columns, location, partition_keys):
# type: (Text, Text, List[Column], Text, List[Column]) -> Table
client = botocore.session.get_session().create_client('glue')
table = cls(
database_name=database_name,
name=name,
columns=columns,
location=location,
partition_keys=partition_keys,
)
client.update_table(
DatabaseName=database_name,
TableInput=table.to_input(),
)
return table
@classmethod
def drop(cls, database_name, name):
# type: (Text, Text) -> None
client = botocore.session.get_session().create_client('glue')
client.delete_table(
DatabaseName=database_name,
Name=name,
)
|
watchdogpolska/watchdog-kj-kultura
|
watchdog_kj_kultura/staticpages/tests.py
|
Python
|
mit
| 1,351
| 0.003701
|
from django.test import TestCase
from .templatetags.staticpages_tags import render_page_with_shortcode
ESCAPED_TEXT = '<div class="magnifier"><a href="x"><img src="x" class="img-responsive" />' + \
'</a></div><b>XSS</b>'
MULTILINE_TEXT = '<div class="magnifier"><a href="xxx"><img src="xxx" class="img-responsive" />' + \
'</a></div>'
BASIC_TEXT = MULTILINE_TEXT
class TempleTagsTestCase(TestCase):
def
|
test_render_page_with_shortcode_for_valid(self):
TEST_CASE = {'[map]xxx[/map]': BASIC_TEXT, # Basic case
"[map]\nxxx\n[/map]": MULTILINE_TEXT
|
, # Multiline case
"[map]x[/map]<b>XSS</b>": ESCAPED_TEXT # Tests of escaped text
}
for value, expected in TEST_CASE.items():
self.assertHTMLEqual(render_page_with_shortcode({}, value, safe=False), expected)
def test_render_page_with_shortcode_for_unchanged(self):
TEST_CASE = ['[/map]xxx[map]', # wrong order
'[map]xxx[/map', # no end of end tag
'[map][/map]', # empty tag
'[map]"[/map]' # with quote - XSS protection
]
for item in TEST_CASE:
self.assertHTMLEqual(render_page_with_shortcode({}, item, safe=True), item)
|
srkama/haysolr
|
dataview/testapi/views.py
|
Python
|
apache-2.0
| 251
| 0.015936
|
# Create y
|
our views here.
from django.shortcuts import render
from .froms import SampleSearchForm
def index(request):
form = SampleSearchForm(request.GET)
results = form.search()
ret
|
urn render(request,'index.html', {'samples':results})
|
azaghal/ansible
|
test/units/utils/test_unsafe_proxy.py
|
Python
|
gpl-3.0
| 3,234
| 0.000618
|
# -*- coding: utf-8 -*-
# (c) 2018 Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.module_utils.six import PY3
from ansible.utils.unsafe_proxy import AnsibleUnsafe, AnsibleUnsafeBytes, AnsibleUnsafeText, wrap_var
def test_wrap_var_text():
assert isinstance(wrap_var(u'foo'), AnsibleUnsafeText)
def test_wrap_var_bytes():
assert isinstance(wrap_var(b'foo'), AnsibleUnsafeBytes)
def test_wrap_var_string():
if PY3:
assert isinstance(wrap_var('foo'), AnsibleUnsafeText)
else:
assert isinstance(wrap_var('foo'), AnsibleUnsafeBytes)
def test_wrap_var_dict():
assert isinstance(wrap_var(dict(foo='bar')), dict)
assert not isinstance(wrap_var(dict(foo='bar')), AnsibleUnsafe)
assert isinstance(wrap_var(dict(foo=u'bar'))['foo'], AnsibleUnsafeText)
def test_wrap_var_dict_None():
assert wrap_var(dict(foo=None))['foo'] is None
assert not isinstance(wrap_var(dict(foo=None))['foo'], AnsibleUnsafe)
def test_wrap_var_list():
assert isinstance(wrap_var(['foo']), list)
assert not isinstance(wrap_var(['foo']), AnsibleUnsafe)
assert isinstance(wrap_var([u'foo'])[0], AnsibleUnsafeText)
def test_wrap_var_list_None():
assert wrap_var([None])[0] is None
assert not isinstance(wrap_var([None])[0], AnsibleUnsafe)
def test_wrap_var_set():
assert isinstance(wrap_var(set(['foo'])), set)
assert not isinstance(wrap_var(set(['foo'])), AnsibleUnsafe)
for item in wrap_var(set([u'foo'])):
assert isinstance(item, AnsibleUnsafeText)
def test_wrap_var_set_None():
for item in wrap_var(set([None])):
assert item is None
assert not isinstance(item, AnsibleUnsafe)
def test_wrap_var_tuple():
assert isinstance(wrap_var(('foo',)), tuple)
assert not isinstance(wrap_var(('foo',)), AnsibleUnsafe)
assert isinstance(wrap_var(('foo',))[0], AnsibleUnsafe)
def t
|
est_wrap_var_tuple_None():
assert wrap_var((None,))[0] is None
assert not isinstance(wrap_var((None,))[0], AnsibleUnsafe)
def test_wrap_var_None():
assert wrap_var(None) is None
assert not isinstance(wrap_var(None), AnsibleUnsafe)
def test_wrap_var_unsafe_text():
assert isinstance(wrap_var(AnsibleUnsafeText(u'foo')), AnsibleUnsafeText)
def test_wrap_var_unsafe_bytes():
assert isinstance(wrap_var(AnsibleUnsafeBytes(b'foo'
|
)), AnsibleUnsafeBytes)
def test_wrap_var_no_ref():
thing = {
'foo': {
'bar': 'baz'
},
'bar': ['baz', 'qux'],
'baz': ('qux',),
'none': None,
'text': 'text',
}
wrapped_thing = wrap_var(thing)
thing is not wrapped_thing
thing['foo'] is not wrapped_thing['foo']
thing['bar'][0] is not wrapped_thing['bar'][0]
thing['baz'][0] is not wrapped_thing['baz'][0]
thing['none'] is not wrapped_thing['none']
thing['text'] is not wrapped_thing['text']
def test_AnsibleUnsafeText():
assert isinstance(AnsibleUnsafeText(u'foo'), AnsibleUnsafe)
def test_AnsibleUnsafeBytes():
assert isinstance(AnsibleUnsafeBytes(b'foo'), AnsibleUnsafe)
|
CptLemming/django-socket-server
|
socket_server/client.py
|
Python
|
bsd-3-clause
| 1,594
| 0.000627
|
import json
from autobahn.twisted.websocket import WebSocketClientProtocol, WebSocketClientFactory
class SocketClientProtocol(WebSocketClientProtocol):
def emit(self, event_name, **kwargs):
payload = self._format_outbound_data(event_name, **kwargs)
self.sendMessage(payload)
def _format_outbound_data(self, event, **kwargs):
""" Format outbound message as JSON """
message = {'event': event}
for key in kwargs.keys():
message[key] = kwargs.get(key)
return json.dumps(message).encode('utf8')
def onMessage(self, payload, isBinary):
self.factory.handle_message(self, payload)
class BaseSocketClientFactory(WebSocketClientFactory):
protocol = SocketClientProtocol
def __init__(self, *args, **kwargs):
WebSocketClientFactory.__init__(self, *args, **kwargs)
self.callbacks = {}
self.register_callbacks()
def register_callbacks(self):
|
pass
def on(self, event_name, callback):
self.callbacks[event_name] = callback
def fire_callback(self, client, event_name, **kwargs):
if event_name in self.callbacks:
self.callbacks[event_name](client, **kwargs)
def handle_message(self, client, message):
payload = self.parse_message(message)
if payload:
event = payload.pop('event')
self.fire_callback(c
|
lient, event, **payload)
def parse_message(self, message):
payload = json.loads(message)
if 'event' in payload:
output = payload
return output
|
ComicIronic/ByondToolsv3
|
tests/ObjectTree.py
|
Python
|
mit
| 2,925
| 0.004444
|
'''
Created on Jan 5, 2014
@author: Rob
'''
import unittest
class ObjectTreeTests(unittest.TestCase):
def setUp(self):
from byond.objtree import ObjectTree
self.tree = ObjectTree()
def test_consumeVariable_basics(self):
test_string = 'var/obj/item/weapon/chainsaw = new'
name, data = self.tree.consumeVariable(test_string, '', 0)
self.assertEqual(name, 'chainsaw')
self.assertEqual(data.type, '/obj/item/weapon')
self.assertEqual(data.value, 'new')
self.assertEqual(data.declaration, True)
self.assertEqual(data.inherited, False)
self.assertEqual(data.special, None)
def test_consumeVariable_alternate_array_declaration_01(self):
test_string = 'var/appearance_keylist[0]'
name, data = self.tree.consumeVariable(test_string, '', 0)
self.assertEqual(name, 'appearance_keylist')
self.assertEqual(data.type, '/list')
self.assertEqual(data.value, None)
self.assertEqual(data.size, 0)
self.assertEqual(data.declaration, True)
self.assertEqual(data.inherited, False)
self.assertEqual(data.special, None)
def test_consumeVariable_alternate_array_declaration_02(self):
test_string = 'var/medical[] = list()'
name, data = self.tree.consumeVariable(test_string, '', 0)
self.assertEqual(name, 'medical')
self.assertEqual(data.type, '/list')
self.assertEqual(data.value, 'list()')
self.assertEqual(data.size, -1)
self.assertEqual(data.declaration, True)
self.assertEqual(data.inherited, False)
self.assertEqual(data.special, None)
def test_consumeVariable_complex_types(self):
test_string = 'var/datum/gas_mixture/air_temporary'
name, data = self.tree.consumeVariable(test_string, '', 0)
self.assertEqual(name, 'air_temporary')
self.assertEqual(data.type, '/datum/gas_mixture')
self.assertEqual(data.value, None)
self.assertEqual(data.size, None)
self.assertEqual(data.declaration, True)
self.assertEqual(data.inherited, False)
self.asse
|
rtEqual(data.special, None)
def test_consumeVariable_file_ref(self):
test_string = 'icon = \'butts.dmi\''
name, data = self.tree.consumeVariable(test_string, '', 0)
self.assertEqual(name, 'icon')
self.assertEqual(data.type, '
|
/icon')
self.assertEqual(str(data.value), 'butts.dmi')
self.assertEqual(data.size, None)
self.assertEqual(data.declaration, False)
self.assertEqual(data.inherited, False)
self.assertEqual(data.special, None)
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
daniboy/seantis-questionnaire
|
questionnaire/qprocessors/range_or_number.py
|
Python
|
bsd-3-clause
| 2,702
| 0.007772
|
from questionnaire import *
from django.conf import settings
from django.utils.translation import ugettext as _
from json import dumps
@question_proc('range', 'number')
def question_range_or_number(request, question):
cd = question.getcheckdict()
rmin, rmax = parse_range(cd)
rstep = parse_step(cd)
runit = cd.get('unit', '')
required = cd.get('required', False)
current = request.POST.get('question_%s' % question.number, None)
jsinclude = []
if question.type == 'range':
jsinclude = [settings.STATIC_URL+'range.js']
return {
'required' : required,
'type': question.type,
'rmin' : rmin,
'rmax' : rmax,
'rstep' : rstep,
'runi
|
t' : runit,
'current' : current,
'jsinclude' : jsinclude
}
@answer_proc('range', 'number')
def process_range_or_number(question, ans
|
wer):
cd = question.getcheckdict()
rmin, rmax = parse_range(cd)
rstep = parse_step(cd)
convert = range_type(rmin, rmax, rstep)
required = question.getcheckdict().get('required', 0)
ans = answer['ANSWER']
if not ans:
if required:
raise AnswerException(_(u"Field cannot be blank"))
else:
return []
try:
ans = convert(ans)
except:
raise AnswerException(_(u"Could not convert the number"))
if (rmax is not None and ans > convert(rmax)) or (rmin is not None and ans < convert(rmin)):
raise AnswerException(_(u"Out of range"))
return dumps([ans])
add_type('range', 'Range of numbers [select]')
add_type('number', 'Number [input]')
def parse_range(checkdict):
"Given a checkdict for a range widget return the min and max string values."
rmin, rmax = None, None
range = checkdict.get('range', None)
try:
if range:
rmin, rmax = range.split('-', 1)
rmin, rmax = rmin or None, rmax or None
except ValueError:
pass
return rmin, rmax
def parse_step(checkdict):
"Given a checkdict for a range widget return the step as string value."
return checkdict.get('step', '1')
def range_type(rmin, rmax, step):
"""Given the min, max and step value return float or int depending on
the number of digits after 0.
"""
if any((digits(rmin), digits(rmax), digits(step))):
return float
else:
return int
def digits(number):
"Given a number as string return the number of digits after 0."
if number is not None and ('.' in number or ',' in number):
if '.' in number:
return len(number.split('.')[1])
else:
return len(number.split(',')[1])
else:
return 0
|
acigna/pywez
|
zsi/doc/examples/client/send_request/simple/Binding/client.py
|
Python
|
mit
| 301
| 0.003322
|
#!/usr/bin/env python
from ZSI import Binding
MESSAGE = "Hello from Python!"
def main():
binding = Binding(url='http://localhost:8080/server.py')
print ' Sending: %s' % MESSAGE
r
|
esponse
|
= binding.echo(MESSAGE)
print 'Response: %s' % MESSAGE
if __name__ == '__main__':
main()
|
teitei-tk/ice-pick
|
icePick/recorder.py
|
Python
|
mit
| 4,310
| 0.000928
|
import re
import datetime
from pymongo import MongoClient
from bson import ObjectId
from .exception import RecorderException, StructureException
__all__ = ['get_database', 'Recorder', 'Structure']
def g
|
et_database(db_name, host, port=27017):
return MongoClient(host, port)[db_name]
class Structure(dict):
__store = {}
def __init__(self, *args, **kwargs):
super(Structure, self).__init__(*args, **kwargs)
self.__dict__ = self
self._validate()
def _validate(self):
pass
|
def to_dict(self):
return self.__dict__
class Recorder:
struct = None
__store = None
class Meta:
database = None
class DataStore:
def get(self, key):
return self.__dict__.get(key)
def set(self, key, value):
self.__dict__[key] = value
def to_dict(self):
return self.__dict__
def __init__(self, key, data=None):
self._key = key
self.__store = self.DataStore()
self._init_from_dict(data)
def _init_from_dict(self, data):
if not isinstance(self.struct, Structure):
raise RecorderException("{0} struct is not a defined".format(self.__class__.__name__))
if not isinstance(data, dict):
data = dict()
# initialize store data
for k, v in self.struct.to_dict().items():
result = data.get(k)
if not result:
result = v
self.__store.set(k, result)
def key(self):
return self._key
def pk(self):
return ObjectId(self.key())
def __str__(self):
return self.__name__
def __getattr__(self, key):
if key in list(self.struct.keys()):
return self.__store.get(key)
else:
return super(Recorder, self).__getattr__(key)
def __setattr__(self, key, value):
if key in list(self.struct.keys()):
self.__store.set(key, value)
else:
super(Recorder, self).__setattr__(key, value)
@classmethod
def colname(cls):
return re.sub('(?!^)([A-Z]+)', r'_\1', cls.__name__).lower().__str__()
@classmethod
def collection(cls):
return cls.Meta.database[cls.colname()]
@classmethod
def new(cls, data=None):
return cls(None, data)
@classmethod
def create(cls, data):
key = None
if '_id' in data.keys():
key = data['_id']
if isinstance(data['_id'], ObjectId):
key = data['_id'].__str__()
return cls(key, data)
@classmethod
def get(cls, key, *args, **kwargs):
data = cls.collection().find_one({'_id': ObjectId(key)}, *args, **kwargs)
if not data:
return None
return cls(key, data)
@classmethod
def get_by(cls, key, value, *args, **kwargs):
data = cls.collection().find_one({key: value}, *args, **kwargs)
if not data:
return None
return cls.create(data)
@classmethod
def find(cls, *args, **kwargs):
return [cls.create(x) for x in cls.collection().find(*args, **kwargs)]
def save(self):
if not self.key():
return self.insert()
return self.update()
def insert(self):
result = self.collection().insert_one(self.to_mongo())
self._key = result.inserted_id.__str__()
self.__store.set('_id', self.key())
return True
def update(self, upsert=False):
if not self.key():
return self.insert()
self.collection().update_one({'_id': self.pk()}, {'$set': self.to_mongo()}, upsert=upsert)
return True
def delete(self):
if not self.key():
return False
self.collection().delete_one({'_id': self.pk()})
return True
@classmethod
def exists(cls, key, value):
return cls.find(filter={key: value}, limit=1).__len__() > 0
def to_dict(self):
return self.__store.to_dict()
def to_mongo(self):
store = self.to_dict()
now = datetime.datetime.now()
if not 'created_at' in store.keys():
store['created_at'] = now
store['modified_at'] = now
if '_id' in store.keys():
del store['_id']
return store
|
ychen820/microblog
|
y/google-cloud-sdk/platform/google_appengine/lib/django-1.4/django/conf/locale/hr/formats.py
|
Python
|
bsd-3-clause
| 1,758
| 0
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. E Y.'
TIME_FORMAT = 'H:i:s'
DATETIME_FORMAT = 'j. E Y. H:i'
YEAR_MONTH_FORMAT = 'F Y.'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'j.m.Y.'
SHORT_DATETIME_FORMAT = 'j.m.Y. H:i'
FIRST_DAY_OF_WEEK = 1
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%Y-%m-%d', # '2006-10-25'
'%d.%m.%Y.', '%d.%m.%y.', # '25.10.2006.', '25.10.06.'
'%d. %m. %Y.', '%d. %m. %y.', # '25. 10. 2006.', '25. 10. 06.'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
DATETIME_INPUT_FORMATS = (
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-
|
10-25'
'%d.%m.%Y.
|
%H:%M:%S', # '25.10.2006. 14:30:59'
'%d.%m.%Y. %H:%M', # '25.10.2006. 14:30'
'%d.%m.%Y.', # '25.10.2006.'
'%d.%m.%y. %H:%M:%S', # '25.10.06. 14:30:59'
'%d.%m.%y. %H:%M', # '25.10.06. 14:30'
'%d.%m.%y.', # '25.10.06.'
'%d. %m. %Y. %H:%M:%S', # '25. 10. 2006. 14:30:59'
'%d. %m. %Y. %H:%M', # '25. 10. 2006. 14:30'
'%d. %m. %Y.', # '25. 10. 2006.'
'%d. %m. %y. %H:%M:%S', # '25. 10. 06. 14:30:59'
'%d. %m. %y. %H:%M', # '25. 10. 06. 14:30'
'%d. %m. %y.', # '25. 10. 06.'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
|
ChameleonCloud/openstack-nagios-plugins
|
setup.py
|
Python
|
gpl-3.0
| 4,527
| 0.001546
|
from setuptools import setup, find_packages # Always prefer setuptools over distutils
from codecs import open # To use a consistent encoding
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='openstacknagios',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# http://packaging.python.org/en/latest/tutorial.html#version
version='1.1.0',
description='nagios/icinga plugins to monitor an openstack installation',
long_description=long_description,
# The project's main homepage.
url='https://github.com/cirrax/openstack-nagios-plugins',
# Author details
author='Benedikt Trefzer',
author_email='benedikt.trefzer@cirrax.com',
# Choose your license
license='GPLv3',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: System :: Monitoring',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)'
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
# What does your project relate to?
keywords='openstack icinga nagios check',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
#packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
# List run-time dependencies here. These will be installed by pip when your
# project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/technical.html#install-requires-vs-requirements-files
install_requires=[
'gnocchiclient',
'nagiosplugin',
'python-novaclient',
'python-keystoneclient',
'python-neutronclient',
'python-cinderclient',
'python-ceilometerclient',
|
'python-ironicclient',
],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points={
'console_scripts': [
|
'check_nova-images=openstacknagios.nova.Images:main',
'check_nova-services=openstacknagios.nova.Services:main',
'check_nova-hypervisors=openstacknagios.nova.Hypervisors:main',
'check_cinder-services=openstacknagios.cinder.Services:main',
'check_neutron-agents=openstacknagios.neutron.Agents:main',
'check_neutron-floatingips=openstacknagios.neutron.Floatingips:main',
'check_neutron-networkipavailabilities=openstacknagios.neutron.Networkipavailabilities:main',
'check_neutron-routers=openstacknagios.neutron.Routers:main',
'check_keystone-token=openstacknagios.keystone.Token:main',
'check_keystone-endpoints=openstacknagios.keystone.Endpoints:main',
'check_ceilometer-statistics=openstacknagios.ceilometer.Statistics:main',
'check_gnocchi-metrics=openstacknagios.gnocchi.Metrics:main',
'check_gnocchi-status=openstacknagios.gnocchi.Status:main',
'check_rally-results=openstacknagios.rally.Results:main',
'check_ironic-nodes=openstacknagios.ironic.Nodes:main',
'check_ironic-node-consoles=openstacknagios.ironic.Consoles:main',
],
},
)
|
Professor-RED/Erik
|
TestPrograms/Chat_Client.py
|
Python
|
mit
| 1,527
| 0.018337
|
# chat_client.py
import sys
import socket
import select
def chat_client():
if(len(sys.argv) < 3) :
print('Usage : python chat_client.py hostname port')
sys.exit()
host = sys.argv[1]
port = int(sys.argv[2])
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(2)
# connect to remote host
try :
s.connect((host, port))
except :
print('Unable to connect')
sys.exit()
print('Connected to remote host. You can start sending messages')
sys.stdout.write('[Me] '); sys.stdout.flush()
while 1:
socket_list = [sys.stdin, s]
# Get the list sockets which are readable
ready_to_read,ready_to_write,in_error = select.select(socket_list , [], [])
for sock in ready_to_read:
if sock == s:
# incoming message from remote server, s
data = sock.recv(4096)
if not data :
print('\nDisconnected from chat server')
sys.exit()
else :
#print data
sys.stdout.write(data)
sys.stdout.write('[Me] '); sys.stdout
|
.flush()
else :
# user entered a message
msg = sys.stdin.readlin
|
e()
s.send(msg)
sys.stdout.write('[Me] '); sys.stdout.flush()
if __name__ == "__main__":
sys.exit(chat_client())
|
sadad111/leetcodebox
|
Binary Tree Level Order Traversal.py
|
Python
|
gpl-3.0
| 603
| 0.016584
|
class Solution(object):
def __init__(self):
self.l=[]
def helper(self,root,level):
i
|
f not root:
return None
else:
if level<len(self.l):
self.l[level].append(ro
|
ot.val)
else:
self.l.append([root.val])
self.helper(root.left,level+1)
self.helper(root.right,level+1)
return self.l
def levelOrder(self, root):
"""
:type root: TreeNode
:rtype: List[List[int]]
"""
if not root:
return []
return self.helper(root,0)
|
apache/incubator-airflow
|
airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py
|
Python
|
apache-2.0
| 5,015
| 0.001994
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Dict, Optional
from kubernetes import client
from airflow.exceptions import AirflowException
from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook
from airflow.sensors.base import BaseSensorOperator
class SparkKubernetesSensor(BaseSensorOperator):
"""
Checks sparkApplication object in kubernetes cluster:
.. seealso::
For more detail about Spark Application Object have a look at the reference:
https://github.com/GoogleCloudPlatform/spark-on-k8s-operator/blob/v1beta2-1.1.0-2.4.5/docs/api-docs.md#sparkapplication
:param application_name: spark Application resource name
:type application_name: str
:param namespace: the kubernetes namespace where the sparkApplication reside in
:type namespace: str
:param kubernetes_conn_id: The :ref:`ku
|
bernetes connection<howto/connection:kubernetes>`
to Kubernetes cluster.
:type kubernetes_conn_id: str
:param attach_log: determines whether logs for d
|
river pod should be appended to the sensor log
:type attach_log: bool
:param api_group: kubernetes api group of sparkApplication
:type api_group: str
:param api_version: kubernetes api version of sparkApplication
:type api_version: str
"""
template_fields = ("application_name", "namespace")
FAILURE_STATES = ("FAILED", "UNKNOWN")
SUCCESS_STATES = ("COMPLETED",)
def __init__(
self,
*,
application_name: str,
attach_log: bool = False,
namespace: Optional[str] = None,
kubernetes_conn_id: str = "kubernetes_default",
api_group: str = 'sparkoperator.k8s.io',
api_version: str = 'v1beta2',
**kwargs,
) -> None:
super().__init__(**kwargs)
self.application_name = application_name
self.attach_log = attach_log
self.namespace = namespace
self.kubernetes_conn_id = kubernetes_conn_id
self.hook = KubernetesHook(conn_id=self.kubernetes_conn_id)
self.api_group = api_group
self.api_version = api_version
def _log_driver(self, application_state: str, response: dict) -> None:
if not self.attach_log:
return
status_info = response["status"]
if "driverInfo" not in status_info:
return
driver_info = status_info["driverInfo"]
if "podName" not in driver_info:
return
driver_pod_name = driver_info["podName"]
namespace = response["metadata"]["namespace"]
log_method = self.log.error if application_state in self.FAILURE_STATES else self.log.info
try:
log = ""
for line in self.hook.get_pod_logs(driver_pod_name, namespace=namespace):
log += line.decode()
log_method(log)
except client.rest.ApiException as e:
self.log.warning(
"Could not read logs for pod %s. It may have been disposed.\n"
"Make sure timeToLiveSeconds is set on your SparkApplication spec.\n"
"underlying exception: %s",
driver_pod_name,
e,
)
def poke(self, context: Dict) -> bool:
self.log.info("Poking: %s", self.application_name)
response = self.hook.get_custom_object(
group=self.api_group,
version=self.api_version,
plural="sparkapplications",
name=self.application_name,
namespace=self.namespace,
)
try:
application_state = response["status"]["applicationState"]["state"]
except KeyError:
return False
if self.attach_log and application_state in self.FAILURE_STATES + self.SUCCESS_STATES:
self._log_driver(application_state, response)
if application_state in self.FAILURE_STATES:
raise AirflowException(f"Spark application failed with state: {application_state}")
elif application_state in self.SUCCESS_STATES:
self.log.info("Spark application ended successfully")
return True
else:
self.log.info("Spark application is still in state: %s", application_state)
return False
|
asiviero/shopify_python_api
|
shopify/resources/location.py
|
Python
|
mit
| 79
| 0
|
from ..base import ShopifyResource
class Location(Shop
|
ifyReso
|
urce):
pass
|
maclogan/VirtualPenPal
|
tests/test_adapter_validation.py
|
Python
|
bsd-3-clause
| 4,912
| 0.000814
|
from chatterbot import ChatBot
from chatterbot.adapters import Adapter
from .base_case import ChatBotTestCase
class AdapterValidationTests(ChatBotTestCase):
def test_invalid_storage_adapter(self):
kwargs = self.get_kwargs()
kwargs['storage_adapter'] = 'chatterbot.input.TerminalAdapter'
with self.assertRaises(Adapter.InvalidAdapterTypeException):
self.chatbot = ChatBot('Test Bot', **kwargs)
def test_valid_storage_adapter(self):
kwargs = self.get_kwargs()
kwargs['storage_adapter'] = 'chatterbot.storage.JsonFileStorageAdapter'
try:
self.chatbot = ChatBot('Test Bot', **kwargs)
except Adapter.InvalidAdapterTypeException:
self.fail('Test raised InvalidAdapterException unexpectedly!')
def test_invalid_input_adapter(self):
kwargs = self.get_kwargs()
kwargs['input_adapter'] = 'chatterbot.storage.JsonFileStorageAdapter'
with self.assertRaises(Adapter.InvalidAdapterTypeException):
self.chatbot = ChatBot('Test Bot', **kwargs)
def test_valid_input_adapter(self):
kwargs = self.get_kwargs()
kwargs['input_adapter'] = 'chatterbot.input.TerminalAdapter'
try:
self.chatbot = ChatBot('Test Bot', **kwargs)
except Adapter.InvalidAdapterTypeException:
self.fail('Test raised InvalidAdapterException unexpectedly!')
def test_invalid_output_adapter(self):
kwargs = self.get_kwargs()
kwargs['output_adapter'] = 'chatterbot.input.TerminalAdapter'
with self.assertRaises(Adapter.InvalidAdapterTypeException):
self.chatbot = ChatBot('Test Bot', **kwargs)
def test_valid_output_adapter(self):
kwargs = self.get_kwargs()
kwargs['output_adapter'] = 'chatterbot.output.TerminalAdapter'
try:
self.chatbot = ChatBot('Test Bot', **kwargs)
except Adapter.InvalidAdapterTypeException:
self.fail('Test raised InvalidAdapterException unexpectedly!')
def test_invalid_logic_adapter(self):
kwargs = self.get_kwargs()
kwargs['logic_adapters'] = ['chatterbot.input.TerminalAdapter']
with self.assertRaises(Adapter.InvalidAdapterTypeException):
self.chatbot = ChatBot('Test Bot', **kwargs)
def test_valid_logic_adapter(self):
kwargs = self.get_kwargs()
kwargs['logic_adapters'] = ['chatterbot.logic.BestMatch']
try:
self.chatbot = ChatBot('Test Bot', **kwargs)
except Adapter.InvalidAdapterTypeException:
self.fail('Test raised InvalidAdapterException unexpectedly!')
def test_valid_adapter_dictionary(self):
kwargs = self.get_kwargs()
kwargs['storage_adapter'] = {
'import_path': 'chatterbot.storage.JsonFileStorageAdapter'
}
try:
self.chatbot = ChatBot('Test Bot', **kwargs)
except Adapter.InvalidAdapterTypeException:
self.fail('Test raised InvalidAdapterException unexpectedly!')
def test_invalid_adapter_dictionary(self):
kwargs = self.get_kwargs()
kwargs['storage_adapter'] = {
'import_path': 'chatterbot.logic.BestMatch'
}
with self.assertRaises(Adapter.InvalidAdapterTypeException):
self.chatbot = ChatBot('Test Bot', **kwargs)
class MultiAdapterTests(ChatBotTestCase):
def test_add_logic_adapter(self):
count_before = len(self.chatbot.logic.adapters)
self.chatbot.logic.add_adapter(
'chatterbot.logic.BestMatch'
)
self.assertIsLength(self.chatbot.logic.adapters, count_before + 1)
def test_insert_logic_adapter(self):
self.chatbot.logic.add_adapter('chatterbot.logic.TimeLogicAdapter')
self.chatbot.logic.add_adapter('chatterbot.logic.BestMatch')
self.chatbot.logic.insert_logic_adapter('chatterbot.logic.MathematicalEvaluation', 1)
s
|
elf.assertEqual(
type(self.chatbot.logic.adapters[1]).__name__,
'MathematicalEvaluation'
)
def test_remove_logic_adapter(self):
self.chatbot.logic.add_adapter('chatterbot.logic.TimeLogicAdapter')
self.chatbot.logic.add_adapter('chatterbot.logic.MathematicalEvaluation')
adapter_count = len(self.chatbot.logic.adapters)
removed = self.chatbot.logic.remove_logic_adapter('MathematicalEvaluation')
sel
|
f.assertTrue(removed)
self.assertIsLength(self.chatbot.logic.adapters, adapter_count - 1)
def test_remove_logic_adapter_not_found(self):
self.chatbot.logic.add_adapter('chatterbot.logic.TimeLogicAdapter')
adapter_count = len(self.chatbot.logic.adapters)
removed = self.chatbot.logic.remove_logic_adapter('MathematicalEvaluation')
self.assertFalse(removed)
self.assertIsLength(self.chatbot.logic.adapters, adapter_count)
|
rprabhat/elastalert
|
elastalert/alerts.py
|
Python
|
apache-2.0
| 60,947
| 0.00274
|
# -*- coding: utf-8 -*-
import copy
import datetime
import json
import logging
import subprocess
import sys
import warnings
from email.mime.text import MIMEText
from email.utils import formatdate
from smtplib import SMTP
from smtplib import SMTP_SSL
from smtplib import SMTPAuthenticationError
from smtplib import SMTPException
from socket import error
import boto3
import requests
import stomp
from exotel import Exotel
from jira.client import JIRA
from jira.exceptions import JIRAError
from requests.exceptions import RequestException
from staticconf.loader import yaml_loader
from texttable import Texttable
from twilio.base.exceptions import TwilioRestException
from twilio.rest import Client as TwilioClient
from util import EAException
from util import elastalert_logger
from util import lookup_es_key
from util import pretty_ts
from util import ts_now
from util import ts_to_dt
class DateTimeEncoder(json.JSONEncoder):
def default(self, obj):
if hasattr(obj, 'isoformat'):
return obj.isoformat()
else:
return json.JSONEncoder.default(self, obj)
class BasicMatchString(object):
""" Creates a string containing fields in match for the given rule. """
def __init__(self, rule, match):
self.rule = rule
self.match = match
def _ensure_new_line(self):
while self.text[-2:] != '\n\n':
self.text += '\n'
def _add_custom_alert_text(self):
missing = '<MISSING VALUE>'
alert_text = unicode(self.rule.get('alert_text', ''))
if 'alert_text_args' in self.rule:
alert_text_args = self.rule.get('alert_text_args')
alert_text_values = [lookup_es_key(self.match, arg) for arg in alert_text_args]
# Support referencing other top-level rule properties
# This technically may not work if there is a top-level rule property with the same name
# as an es result key, since it would have been matched in the lookup_es_key call above
for i in xrange(len(alert_text_values)):
if alert_text_values[i] is None:
alert_value = self.rule.get(alert_text_args[i])
if alert_value:
alert_text_values[i] = alert_value
alert_text_values = [missing if val is None else val for val in alert_text_values]
alert_text = alert_text.format(*alert_text_values)
elif 'alert_text_kw' in self.rule:
kw = {}
for name, kw_name in self.rule.get('alert_text_kw').items():
val = lookup_es_key(self.match, name)
# Support referencing other top-level rule properties
# This technically may not work if there is a top-level rule property with the same name
# as an es result key, since it would have been matched in the lookup_es_key call above
if val is None:
val = self.rule.get(name)
kw[kw_name] = missing if val is None else val
alert_text = alert_text.format(**kw)
self.text += alert_text
def _add_rule_text(self):
self.text += self.rule['type'].get_match_str(self.match)
def _add_top_counts(self):
for key, counts in self.match.items():
if key.startswith('top_events_'):
self.text += '%s:\n' % (key[11:])
top_events = counts.items()
if not top_events:
self.text += 'No events found.\n'
else:
top_events.sort(key=lambda x: x[1], reverse=True)
for term, count in top_events:
self.text += '%s: %s\n' % (term, count)
self.text += '\n'
def _add_match_items(self):
match_items = self.match.items()
match_items.sort(key=lambda x: x[0])
for key, value in match_items:
if key.startswith('top_events_'):
continue
value_str = unicode(value)
value_str.replace('\\n', '\n')
if type(value) in [list, dict]:
try:
value_str = self._pretty_print_as_json(value)
except TypeError:
# Non serializable object, fallback to str
pass
self.text += '%s: %s\n' % (key, value_str)
def _pretty_print_as_json(self, blob):
try:
return json.dumps(blob, cls=DateTimeEncoder, sort_keys=True, indent=4, ensure_ascii=False)
except UnicodeDecodeError:
# This blob contains non-unicode, so lets pretend it's Latin-1 to show something
return json.dumps(blob, cls=DateTimeEncoder, sort_keys=True, indent=4, encoding='Latin-1', ensure_ascii=False)
def __str__(self):
self.text = ''
if 'alert_text' not in self.rule:
self.text += self.rule['name'] + '\n\n'
self._add_custom_alert_text()
self._ensure_new_line()
if self.rule.get('alert_text_type') != 'alert_text_only':
self._add_rule_text()
self._ensure_new_line()
if self.rule.get('top_count_keys'):
self._add_top_counts()
if self.rule.get('alert_text_type') != 'exclude_fields':
self._add_match_items()
return self.text
class JiraFormattedMatchString(BasicMatchString):
def _add_match_items(self):
match_items = dict([(x, y) for x, y in self.match.items() if not x.startswith('top_events_')])
json_blob = self._p
|
retty_print_as_json(match_items)
preformatted_text = u'{{code:json}}{0}{{code}}'.format(json_blob)
self.text += preformatted_text
class Alerter(object):
""" Base class for types of alerts.
:param rule: The rule configuration.
"""
required_options = frozenset([])
def __init__(self, rule):
self.rule = rule
# pipeline object is created by ElastAlerter.send_alert()
# and attached to each alerters used by a rule before calling alert()
|
self.pipeline = None
self.resolve_rule_references(self.rule)
def resolve_rule_references(self, root):
# Support referencing other top-level rule properties to avoid redundant copy/paste
if type(root) == list:
# Make a copy since we may be modifying the contents of the structure we're walking
for i, item in enumerate(copy.copy(root)):
if type(item) == dict or type(item) == list:
self.resolve_rule_references(root[i])
else:
root[i] = self.resolve_rule_reference(item)
elif type(root) == dict:
# Make a copy since we may be modifying the contents of the structure we're walking
for key, value in root.copy().iteritems():
if type(value) == dict or type(value) == list:
self.resolve_rule_references(root[key])
else:
root[key] = self.resolve_rule_reference(value)
def resolve_rule_reference(self, value):
strValue = unicode(value)
if strValue.startswith('$') and strValue.endswith('$') and strValue[1:-1] in self.rule:
if type(value) == int:
return int(self.rule[strValue[1:-1]])
else:
return self.rule[strValue[1:-1]]
else:
return value
def alert(self, match):
""" Send an alert. Match is a dictionary of information about the alert.
:param match: A dictionary of relevant information to the alert.
"""
raise NotImplementedError()
def get_info(self):
""" Returns a dictionary of data related to this alert. At minimum, this should contain
a field type corresponding to the type of Alerter. """
return {'type': 'Unknown'}
def create_title(self, matches):
""" Creates custom alert title to be used, e.g. as an e-mail subject or JIRA issue summary.
:param matches: A list of dictionaries of relevant information to the alert.
"""
if 'alert_subject' in self.rule:
|
gi11es/thumbor
|
tests/test_server.py
|
Python
|
mit
| 9,543
| 0.000838
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com thumbor@googlegroups.com
from unittest import TestCase
import mock
from preggy import expect
import thumbor.server
from tests.fixtures.custom_error_handler import ErrorHandler as CustomErrorHandler
from thumbor.app import ThumborServiceApp
from thumbor.config import Config
from thumbor.server import (
configure_log,
get_application,
get_as_integer,
get_config,
get_context,
get_importer,
main,
run_server,
validate_config,
)
class ServerTestCase(TestCase):
def test_can_get_value_as_integer(self):
expect(get_as_integer("1")).to_equal(1)
expect(get_as_integer("a")).to_be_null()
expect(get_as_integer("")).to_be_null()
expect(get_as_integer(None)).to_be_null()
def test_can_get_config_from_path(self):
config = get_config("./tests/fixtures/thumbor_config_server_test.conf")
with mock.patch.dict("os.environ", {"ENGINE": "test"}):
expect(config).not_to_be_null()
expect(config.ALLOWED_SOURCES).to_be_like(["mydomain.com"])
expect(config.ENGINE).to_be_like("thumbor.engines.pil")
def test_can_get_config_with_env_enabled(self):
config = get_config("./tests/fixtures/thumbor_config_server_test.conf", True)
with mock.patch.dict("os.environ", {"ENGINE": "test"}):
expect(config).not_to_be_null()
expect(config.ALLOWED_SOURCES).to_be_like(["mydomain.com"])
expect(config.ENGINE).to_be_like("test")
@mock.patch("logging.basicConfig")
def test_can_configure_log_from_config(self, basic_config_mock):
conf = Config()
configure_log(conf, "DEBUG")
params = dict(
datefmt="%Y-%m-%d %H:%M:%S",
level=10,
format="%(asctime)s %(name)s:%(levelname)s %(message)s",
)
basic_config_mock.assert_called_with(**params)
@mock.patch("logging.config.dictConfig")
def test_can_configure_log_from_dict_config(self, dict_config_mock):
conf = Config(THUMBOR_LOG_CONFIG={"level": "INFO"})
configure_log(conf, "DEBUG")
params = dict(level="INFO",)
dict_config_mock.assert_called_with(params)
def test_can_import_default_modules(self):
conf = Config()
importer = get_importer(conf)
expect(importer).not_to_be_null()
expect(importer.filters).not_to_be_empty()
def test_can_import_with_custom_error_handler_class(self):
conf = Config(
USE_CUSTOM_ERROR_HANDLING=True,
ERROR_HANDLER_MODULE="tests.fixtures.custom_error_handler",
)
importer = get_importer(conf)
expect(importer).not_to_be_null()
expect(importer.error_handler_class).not_to_be_null()
expect(importer.error_handler_class).to_be_instance_of(CustomErrorHandler)
def test_validate_config_security_key(self):
server_parameters = mock.Mock(security_key=None)
conf = Config(SECURITY_KEY=None)
with expect.error_to_happen(
RuntimeError,
message="No security key was found for this instance of thumbor. "
"Please provide one using the conf file or a security key file.",
):
validate_config(conf, server_parameters)
def test_validate_config_security_key_from_config(self):
server_parameters = mock.Mock(security_key=None)
conf = Config(SECURITY_KEY="something")
validate_config(conf, server_parameters)
expect(server_parameters.security_key).to_equal("something")
@mock.patch.object(thumbor.server, "which")
def test_validate_gifsicle_path(self, which_mock):
server_parameters = mock.Mock(security_key=None)
conf = Config(SECURITY_KEY="test", USE_GIFSICLE_ENGINE=True)
which_mock.return_value = "/usr/bin/gifsicle"
validate_config(conf, server_parameters)
expect(server_parameters.gifsicle_path).to_equal("/usr/bin/gifsicle")
@mock.patch.object(thumbor.server, "which")
def test_validate_null_gifsicle_path(self, which_mock):
server_parameters = mock.Mock(security_key=None)
conf = Config(SECURITY_KEY="test", USE_GIFSICLE_ENGINE=True)
which_mock.return_value = None
with expect.error_to_happen(
RuntimeError,
message="If using USE_GIFSICLE_ENGINE configuration to True, "
"the `gifsicle` binary must be in the PATH and must be an executable.",
):
validate_config(conf, server_parameters)
def test_get_context(self):
server_parameters = mock.Mock(
security_key=None, app_class="thumbor.app.ThumborServiceApp"
)
conf = Config(SECURITY_KEY="test")
importer = get_importer(conf)
context = get_context(server_parameters, conf, importer)
expect(context).not_to_be_null()
def test_get_application(self):
server_parameters = mock.Mock(
security_key=None, app_class="thumbor.app.ThumborServiceApp"
)
conf = Config(SECURITY_KEY="test")
importer = get_importer(conf)
context = get_context(server_parameters, conf, importer)
app = get_application(context)
expect(app).not_to_be_null()
expect(app).to_be_instance_of(ThumborServiceApp)
@mock.patch.object(thumbor.server, "HTTPServer")
def test_can_run_server_with_default_params(self, server_mock):
application = mock.Mock()
context = mock.Mock()
context.server = mock.Mock(fd=None, port=1234, ip="0.0.0.0", processes=1)
server_instance_mock = mock.Mock()
server_mock.return_value = server_instance_mock
run_server(application, context)
server_instance_mock.bind.assert_called_with(1234, "0.0.0.0")
server_instance_mock.start.assert_called_with(1)
@mock.patch.object(thumbor.server, "HTTPServer")
def test_can_run_server_with_multiple_processes(self, server_mock):
application = mock.Mock()
context = mock.Mock()
context.server = mock.M
|
ock(fd=None, port=1234, ip="0.0.0.0", processes=5)
server_instance_mock = mock.Mock()
server_mock.return_value = server_instance_mock
run_server(application, context)
server_instance_mock.start.assert_called_with(5)
@mock.patch.object(thumbor.server, "HTTPServer")
@mock.patch.object(thumbor.server, "socket_from_fd")
def test_can_run_server_with_fd(self, socket_from_fd_mock, server_mock):
application = mock.M
|
ock()
context = mock.Mock()
context.server = mock.Mock(fd=11, port=1234, ip="0.0.0.0", processes=1)
server_instance_mock = mock.Mock()
server_mock.return_value = server_instance_mock
socket_from_fd_mock.return_value = "socket mock"
run_server(application, context)
server_instance_mock.add_socket.assert_called_with("socket mock")
server_instance_mock.start.assert_called_with(1)
@mock.patch.object(thumbor.server, "HTTPServer")
@mock.patch.object(thumbor.server, "bind_unix_socket")
def test_can_run_server_with_unix_socket(self, bind_unix_socket, server_mock):
application = mock.Mock()
context = mock.Mock()
context.server = mock.Mock(fd="/path/bin", port=1234, ip="0.0.0.0", processes=1)
server_instance_mock = mock.Mock()
server_mock.return_value = server_instance_mock
bind_unix_socket.return_value = "socket mock"
run_server(application, context)
bind_unix_socket.assert_called_with("/path/bin")
server_instance_mock.add_socket.assert_called_with("socket mock")
server_instance_mock.start.assert_called_with(1)
@mock.patch.object(thumbor.server, "HTTPServer")
def test_run_server_returns_server(self, server_mock):
application = mock.Mock()
context = mock.Mock()
context.server = mock.Mock(fd=None, port=1234, ip="0.0.
|
r0x0r/pywebview
|
examples/localization.py
|
Python
|
bsd-3-clause
| 1,251
| 0
|
# -*- coding: utf-8 -*-
import webview
"""
This example demonstrates how to localize GUI strings used by pywebview.
"""
if __name__ == '__main__':
localization = {
'global.saveFile': u'Сохранить файл',
'cocoa.menu.about': u'О программе',
'cocoa.menu.services': u'Cлужбы',
'cocoa.menu.view': u'Вид',
'cocoa.menu.hide': u'Скрыть',
'cocoa.menu.hideOthers': u'Скрыть остальные',
'cocoa.menu.showAll': u'Показать все',
'cocoa.menu.quit': u'Завершить',
'cocoa.menu.fullscreen': u'Перейти ',
'windows.fileFilter.allFiles': u'Все файлы',
'windows.fileFilter.otherFiles': u'Остальлные файльы',
'linux.openFile': u'Открыть файл',
'linux.openFiles
|
': u'Открыть файлы',
'linux.ope
|
nFolder': u'Открыть папку',
}
window_localization_override = {
'global.saveFile': u'Save file',
}
webview.create_window(
'Localization Example',
'https://pywebview.flowrl.com/hello',
localization=window_localization_override,
)
webview.start(localization=localization)
|
jopohl/urh
|
src/urh/controller/dialogs/FilterDialog.py
|
Python
|
gpl-3.0
| 4,227
| 0.001183
|
from PyQt5.QtCore import pyqtSlot, pyqtSignal, Qt
from PyQt5.QtWidgets import QDialog
from urh.signalprocessing.Filter import Filter, FilterType
from urh.ui.ui_filter_dialog import Ui_FilterDialog
class FilterDialog(QDialog):
filter_accepted = pyqtSignal(Filter)
def __init__(self, dsp_filter: Filter, parent=None):
super().__init__(parent)
self.ui = Ui_FilterDialog()
self.ui.setupUi(self)
self.setWindowFlags(Qt.Window)
self.error_message = ""
self.set_dsp_filter_status(dsp_filter.filter_type)
self.create_connects()
def set_dsp_filter_status(self, dsp_filter_type: FilterType):
if dsp_filter_type == FilterType.moving_average:
self.ui.radioButtonMovingAverage.setChecked(True)
self.ui.lineEditCustomTaps.setEnabled(False)
self.ui.spinBoxNumTaps.setEnabled(True)
elif dsp_filter_type == FilterType.dc_correction:
self.ui.radioButtonDCcorrection.setChecked(True)
self.ui.lineEditCustomTaps.setEnabled(False)
sel
|
f.ui.spinBoxNumTaps.setEnabled(False)
else:
self.ui.radioButtonCustomTaps.setChecked(True)
|
self.ui.spinBoxNumTaps.setEnabled(True)
self.ui.lineEditCustomTaps.setEnabled(True)
def create_connects(self):
self.ui.radioButtonMovingAverage.clicked.connect(self.on_radio_button_moving_average_clicked)
self.ui.radioButtonCustomTaps.clicked.connect(self.on_radio_button_custom_taps_clicked)
self.ui.radioButtonDCcorrection.clicked.connect(self.on_radio_button_dc_correction_clicked)
self.ui.spinBoxNumTaps.valueChanged.connect(self.set_error_status)
self.ui.lineEditCustomTaps.textEdited.connect(self.set_error_status)
self.ui.buttonBox.accepted.connect(self.on_accept_clicked)
self.ui.buttonBox.rejected.connect(self.reject)
def build_filter(self) -> Filter:
if self.ui.radioButtonMovingAverage.isChecked():
n = self.ui.spinBoxNumTaps.value()
return Filter([1/n for _ in range(n)], filter_type=FilterType.moving_average)
elif self.ui.radioButtonDCcorrection.isChecked():
return Filter([], filter_type=FilterType.dc_correction)
else:
# custom filter
try:
taps = eval(self.ui.lineEditCustomTaps.text())
try:
taps = list(map(float, taps))
self.error_message = ""
return Filter(taps)
except (ValueError, TypeError) as e:
self.error_message = "Error casting taps:\n" + str(e)
return None
except SyntaxError as e:
self.error_message = "Error parsing taps:\n" + str(e)
return None
def set_error_status(self):
dsp_filter = self.build_filter()
if dsp_filter is None:
self.ui.lineEditCustomTaps.setStyleSheet("background: red")
self.ui.lineEditCustomTaps.setToolTip(self.error_message)
elif len(dsp_filter.taps) != self.ui.spinBoxNumTaps.value():
self.ui.lineEditCustomTaps.setStyleSheet("background: yellow")
self.ui.lineEditCustomTaps.setToolTip("The number of the filter taps does not match the configured number of taps. I will use your configured filter taps.")
else:
self.ui.lineEditCustomTaps.setStyleSheet("")
self.ui.lineEditCustomTaps.setToolTip("")
@pyqtSlot(bool)
def on_radio_button_moving_average_clicked(self, checked: bool):
if checked:
self.set_dsp_filter_status(FilterType.moving_average)
@pyqtSlot(bool)
def on_radio_button_custom_taps_clicked(self, checked: bool):
if checked:
self.set_dsp_filter_status(FilterType.custom)
self.set_error_status()
@pyqtSlot(bool)
def on_radio_button_dc_correction_clicked(self, checked: bool):
if checked:
self.set_dsp_filter_status(FilterType.dc_correction)
@pyqtSlot()
def on_accept_clicked(self):
dsp_filter = self.build_filter()
self.filter_accepted.emit(dsp_filter)
self.accept()
|
PhantomAppDevelopment/python-getting-started
|
step-1/myscript.py
|
Python
|
mit
| 308
| 0
|
"""
This file should only work on Py
|
thon 3.6 and newer.
Its purpose is to test a correct installation of Python 3.
"""
from random import randint
print("Generating one thousand random numbers...")
for i in range(1000):
random_number = randint(0, 100000)
print(f"Number {i} was: {rando
|
m_number}")
|
psychopy/versions
|
psychopy/visual/polygon.py
|
Python
|
gpl-3.0
| 3,361
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''Creates a regular polygon (triangles, pentagrams, ...)
as a special case of a :class:`~psychopy.visual.ShapeStim`'''
# Part of the PsychoPy library
# Copyright (C) 2002-2018 Jonathan Peirce (C) 2019-2020 Open Science Tools Ltd.
# Distributed under the terms of the GNU General Public License (GPL).
from __future__ import absolute_import, print_function
from builtins import range
import psychopy # so we can get the __path__
from psychopy.visual.shape import BaseShapeStim
from psychopy.tools.attributetools import attributeSetter, setAttribute
import numpy
class Polygon(BaseShapeStim):
"""Creates a regular polygon (triangles, pentagrams, ...).
A special case of a :class:`~psychopy.visual.ShapeStim`.
(New in version 1.72.00)
"""
def __init__(self, win, edges=3, radius=.5, **kwargs):
"""Polygon accepts all input parameters that
:class:`~psychopy.visual.ShapeStim` accepts, except for
vertices and closeShape.
"""
# what local vars are defined (these are the init params) for use by
# __repr__
self._initParams = dir()
self._initParams.remove('self')
# kwargs isn't a parameter, but a list of params
self._initParams.remove('kwargs')
self._initParams.extend(kwargs)
self.autoLog = False # but will be changed if needed at end of init
self.__dict__['edges'] = edges
self.radius = numpy.asarray(radius)
self._calcVertices()
kwargs['closeShape'] = True # Make sure nobody messes around here
kwargs['vertices'] = self.vertices
super(Polygon, self).__init__(win, **kwargs)
def _calcVertices(self):
d = numpy.pi * 2 / self.edges
self.vertices = numpy.asarray(
[numpy.asarray((numpy.sin(e * d), numpy.cos(e * d))) * self.radius
for e in range(int(round(self.edges)))])
@attributeSetter
def edges(self, edges):
"""Number of edges of the polygon. Floats are rounded to int.
:ref:`Operations <attrib-operations>` supported.
"""
self.__dict__['edges'] = edges
self._calcVertices()
|
self.setVertices(self.vertices, log=False)
def setEdges(self, edges, operation='', log=None):
"""Usually you can use 'stim.attribute = value' syntax instead,
but use this method if you need to suppress the log message"""
setAttribute(self, 'edges', edges, log, operation)
@attributeSetter
def radius(self, radius):
"""float, int, tuple, list or 2x1 array
Radius of the Po
|
lygon (distance from the center to the corners).
May be a -2tuple or list to stretch the polygon asymmetrically.
:ref:`Operations <attrib-operations>` supported.
Usually there's a setAttribute(value, log=False) method for each
attribute. Use this if you want to disable logging.
"""
self.__dict__['radius'] = numpy.array(radius)
self._calcVertices()
self.setVertices(self.vertices, log=False)
def setRadius(self, radius, operation='', log=None):
"""Usually you can use 'stim.attribute = value' syntax instead,
but use this method if you need to suppress the log message
"""
setAttribute(self, 'radius', radius, log, operation)
|
jiaphuan/models
|
research/adversarial_text/graphs.py
|
Python
|
apache-2.0
| 24,710
| 0.004816
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Virtual adversarial text models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import os
# Dependency imports
import tensorflow as tf
import adversarial_losses as adv_lib
import inputs as inputs_lib
import layers as layers_lib
flags = tf.app.flags
FLAGS = flags.FLAGS
# Flags governing adversarial training are defined in adversarial_losses.py.
# Classifier
flags.DEFINE_integer('num_classes', 2, 'Number of classes for classification')
# Data path
flags.DEFINE_string('data_dir', '/tmp/IMDB',
'Directory path to preprocessed text dataset.')
flags.DEFINE_string('vocab_freq_path', None,
'Path to pre-calculated vocab frequency data. If '
'None, use FLAGS.data_dir/vocab_freq.txt.')
flags.DEFINE_integer('batch_size', 64, 'Size of the batch.')
flags.DEFINE_integer('num_timesteps', 100, 'Number of timesteps for BPTT')
# Model architechture
flags.DEFINE_bool('bidir_lstm', False, 'Whether to build a bidirectional LSTM.')
flags.DEFINE_bool('single_label', True, 'Whether the sequence has a single '
'label, for optimization.')
flags.DEFINE_integer('rnn_num_layers', 1, 'Number of LSTM layers.')
flags.DEFINE_integer('rnn_cell_size', 512,
'Number of hidden units in the LSTM.')
flags.DEFINE_integer('cl_num_layers', 1,
'Number of hidden layers of classification model.')
flags.DEFINE_integer('cl_hidden_size', 30,
'Number of hidden units in classification layer.')
flags.DEFINE_integer('num_candidate_samples', -1,
'Num samples used in the sampled output layer.')
flags.DEFINE_bool('use_seq2seq_autoencoder', False,
'If True, seq2seq auto-encoder is used to pretrain. '
'If False, standard language model is used.')
# Vocabulary and embeddings
flags.DEFINE_integer('embedding_dims', 256, 'Dimensions of embedded vector.')
flags.DEFINE_integer('vocab_size', 86934,
'The size of the vocaburary. This value '
'should be exactly same as the number of the '
'vocabulary used in dataset. Because the last '
'indexed vocabulary of the dataset preprocessed by '
'my preprocessed code, is always <eos> and here we '
'specify the <eos> with the the index.')
flags.DEFINE_bool('normalize_embeddings', True,
'Normalize word embeddings by vocab frequency')
# Optimization
flags.DEFINE_float('learning_rate', 0.001, 'Learning rate while fine-tuning.')
flags.DEFINE_float('learning_rate_decay_factor', 1.0,
'Learning rate decay factor')
flags.DEFINE_boolean('sync_replicas', False, 'sync_replica or not')
flags.DEFINE_integer('replicas_to_aggregate', 1,
'The number of replicas to aggregate')
# Regularization
flags.DEFINE_float('max_grad_norm', 1.0,
'Clip the global gradient norm to this value.')
flags.DEFINE_float('keep_prob_emb', 1.0, 'keep probability on embedding layer. '
'0.5 is optimal on IMDB with virtual adversarial training.')
flags.DEFINE_float('keep_prob_lstm_out', 1.0,
'keep probability on lstm output.')
flags.DEFINE_float('keep_prob_cl_hidden', 1.0,
'keep probability on classification hidden layer')
def get_model():
if FLAGS.bidir_lstm:
return VatxtBidirModel()
else:
return VatxtModel()
class VatxtModel(object):
"""Constructs training and evaluation graphs.
Main methods: `classifier_training()`, `language_model_training()`,
and `eval_graph()`.
Variable reuse is a critical part of the model, both for sharing variables
between the language model and the classifier, and for reusing variables for
the adversarial loss calculation. To ensure correct variable reuse, all
variables are created in Keras-style layers, wherein stateful layers (i.e.
layers with variables) are represented as callable instances of the Layer
class. Each time the Layer instance is called, it is using the same variables.
All Layers are constructed in the __init__ method and reused in the various
graph-building functions.
"""
def __init__(self, cl_logits_input_dim=None):
self.global_step = tf.train.get_or_create_global_step()
self.vocab_freqs = _get_vocab_freqs()
# Cache VatxtInput objects
self.cl_inputs = None
self.lm_inputs = None
# Cache intermediate Tensors that are reused
self.tensors = {}
# Construct layers which are reused in constructing the LM and
# Classification graphs. Instantiating them all once here ensures that
# variable reuse works correctly.
self.layers = {}
self.layers['embedding'] = layers_lib.Embedding(
FLAGS.vocab_size, FLAGS.embedding_dims, FLAGS.normalize_embeddings,
self.vocab_freqs, FLAGS.keep_prob_emb)
self.layers['lstm'] = layers_lib.LSTM(
FLAGS.rnn_cell_size, FLAGS.rnn_num_layers, FLAGS.keep_prob_lstm_out)
self.layers['lm_loss'] = layers_lib.SoftmaxLoss(
FLAGS.vocab_size,
FLAGS.num_candidate_samples,
self.vocab_freqs,
name='LM_loss')
cl_logits_input_dim = cl_logits_input_dim or FLAGS.rnn_cell_size
self.layers['cl_logits'] = layers_lib.cl_logits_subgraph(
[FLAGS.cl_hidden_size] * FLAGS.cl_num_layers, cl_logits_input_dim,
FLAGS.num_classes, FLAGS.keep_prob_cl_hidden)
@property
def pretrained_variables(self):
return (self.layers['embedding'].trainable_weights +
self.layers['lstm'].trainable_weights)
def classifier_training(self):
loss = self.classifier_graph()
train_op = optimize(loss, self.global_step)
return train_op, loss, self.global_step
def language_model_training(self):
loss
|
= self.language_model_graph()
train_op = optimize(loss, self.global_step)
return train_op, loss, self.global_step
def classifier_graph(self):
"""Constructs classifier graph from inputs to classifier loss.
* Caches the VatxtInput object in `self.cl_inputs`
* Caches tensors: `cl_embedded`, `cl_logits`, `cl_loss`
Returns:
loss: scalar float.
"""
inputs = _inputs('train', pretrain=False)
self.cl
|
_inputs = inputs
embedded = self.layers['embedding'](inputs.tokens)
self.tensors['cl_embedded'] = embedded
_, next_state, logits, loss = self.cl_loss_from_embedding(
embedded, return_intermediates=True)
tf.summary.scalar('classification_loss', loss)
self.tensors['cl_logits'] = logits
self.tensors['cl_loss'] = loss
if FLAGS.single_label:
indices = tf.stack([tf.range(FLAGS.batch_size), inputs.length - 1], 1)
labels = tf.expand_dims(tf.gather_nd(inputs.labels, indices), 1)
weights = tf.expand_dims(tf.gather_nd(inputs.weights, indices), 1)
else:
labels = inputs.labels
weights = inputs.weights
acc = layers_lib.accuracy(logits, labels, weights)
tf.summary.scalar('accuracy', acc)
adv_loss = (self.adversarial_loss() * tf.constant(
FLAGS.adv_reg_coeff, name='adv_reg_coeff'))
tf.summary.scalar('adversarial_loss', adv_loss)
total_loss = loss + adv_loss
with tf.control_dependencies([inputs.save_state(next_state)]):
total_loss = tf.identity(total_loss)
tf.summary.scalar('total_classification_loss', total_loss)
return total_loss
def la
|
gpndata/cattle
|
tests/integration/cattletest/core/test_projects.py
|
Python
|
apache-2.0
| 17,330
| 0
|
from common_fixtures import * # NOQA
from gdapi import ApiError
_USER_LIST = [
"Owner",
"Member",
"Stranger",
"OutThereUser"
]
PROJECTS = set([])
@pytest.fixture(autouse=True, scope="module")
def clean_up_projects(super_client, request):
# This randomly times out, don't know why, disabling it
# on = super_client.create_setting(name='api.projects.use.rancher_id',
# value='true')
# wait_setting_active(super_client, on)
def fin():
for project in PROJECTS:
try:
super_client.delete(super_client.by_id('project', project))
except ApiError as e:
assert e.error.status == 404
assert len(get_ids(super_client.list_project()) & PROJECTS) == 0
# super_client.delete(on)
request.addfinalizer(fin)
pass
@pytest.fixture()
def project(user_clients, admin_user_client, request):
project = _create_project(admin_user_client, user_clients, 'Owner')
def fin():
try:
admin_user_client.delete(admin_user_client.by_id('project',
project))
except ApiError as e:
assert e.error.status == 404
request.addfinalizer(fin)
return project
@pytest.fixture(scope='session')
def user_clients(admin_user_client):
clients = {}
for user in _USER_LIST:
clients[user] = create_context(admin_user_client,
kind='user').user_client
clients['admin'] = admin_user_client
return clients
@pytest.fixture()
def members(user_clients):
members = ['Owner', 'Member']
return _create_members(user_clients, members)
def get_plain_members(members):
plain_members = []
for member in members.data:
plain_members.append({
'role': member.role,
'externalId': member.externalId,
'externalIdType': member.externalIdType
})
return plain_members
def get_ids(items):
ids = []
for item in items:
ids.append(item.id)
return set(ids)
def diff_members(members, got_members):
assert len(members) == len(got_members)
members_a = set([])
members_b = set([])
for member in members:
members_a.add(member['externalId'] + ' ' + member['externalIdType']
+ ' ' + member['role'])
for member in got_members:
members_b.add(member['externalId'] + ' ' + member['externalIdType']
+ ' ' + member['role'])
assert members_a == members_b
def all_owners(members):
for member in members:
member['role'] = 'owner'
return members
def test_update_project(user_clients, project):
user_clients['Owner'].update(
project, name='Project Name', description='Some description')
assert user_clients['Owner'].by_id(
'project', project.id).name == 'Project Name'
assert user_clients['Owner'].by_id(
'project', project.id).description == 'Some description'
with pytest.raises(ApiError) as e:
user_clients['Member'].update(
project, name='Project Name from Member', description='Loop hole?')
assert e.value.error.status == 404
with pytest.raises(ApiError) as e:
user_clients['Stranger'].update(
project, name='Project Name from Stranger', description='Changed')
assert e.value.error.status == 404
def test_set_members(admin_user_client, user_clients, project):
members = get_plain_members(project.projectMembers())
members.append({
'role': 'member',
'externalId': acc_id(user_clients['Member']),
'externalIdType': 'rancher_id'
})
_set_members(admin_user_client, user_clients['Owner'], project.id, None,
422)
_set_members(admin_user_client, user_clients['Owner'], project.id, [],
422)
_set_members(admin_user_client, user_clients['Owner'], project.id,
members, None)
_set_members(admin_user_client, user_clients['Member'], project.id,
None, 'A
|
ttribute')
_set_
|
members(admin_user_client, user_clients['Member'], project.id, [],
'Attribute')
_set_members(admin_user_client, user_clients['Member'], project.id,
members, 'Attribute')
with pytest.raises(ApiError) as e:
_set_members(admin_user_client, user_clients['Stranger'],
project.id, None, 422)
assert e.value.error.status == 404
with pytest.raises(ApiError) as e:
_set_members(admin_user_client, user_clients['Stranger'],
project.id, [], 422)
assert e.value.error.status == 404
with pytest.raises(ApiError) as e:
_set_members(admin_user_client, user_clients['Stranger'],
project.id, members, 403)
assert e.value.error.status == 404
def test_get_members(admin_user_client, user_clients, members):
project = _create_project_with_members(admin_user_client,
user_clients['Owner'], members)
members = project.projectMembers()
_get_members(user_clients['Owner'], project.id, members)
_get_members(user_clients['Member'], project.id, members)
_get_members(user_clients['admin'], project.id, members)
with pytest.raises(ApiError) as e:
_get_members(user_clients['Stranger'], project.id, members)
assert e.value.error.status == 404
def test_list_all_projects(admin_user_client):
projects = admin_user_client.list_project()
projectAccounts = admin_user_client.list_account(kind='project',
limit=4000)
ids = []
ids_2 = []
for project in projects:
ids.append(project.id)
for project in projectAccounts:
ids_2.append(project.id)
assert len(list(set(ids) - set(ids_2))) == 0
def check_state(client, project_id, states, excludes):
for type in client.schema.types:
if type not in excludes:
try:
for resource in client.list(type, accountId=project_id):
assert resource.state in states
assert resource.removed is not None
except AttributeError:
pass
def client_for_project(project, admin_user_client):
project_key = admin_user_client.create_api_key(accountId=project.id)
admin_user_client.wait_success(project_key)
return api_client(project_key.publicValue, project_key.secretValue)
def test_delete_project(admin_user_client, new_context,
super_client):
project = new_context.user_client.reload(new_context.project)
proj_id = new_context.project.id
_create_resources(new_context.client)
assert len(new_context.client.list_projectMember()) == 1
project = super_client.wait_success(project.deactivate())
project = super_client.wait_success(project.remove())
check_state(new_context.client, proj_id,
['removed'], ['account', 'project', 'host', 'subscribe'])
super_client.wait_success(project.purge())
project = new_context.client.by_id('project', id=proj_id)
assert project.state == 'purged'
check_state(new_context.client, proj_id,
['purged', 'removed'], ['account', 'project', 'subscribe'])
project_members = admin_user_client\
.list('projectMember')
for member in project_members:
assert member.projectId != proj_id
def test_delete_members(admin_user_client, user_clients, members):
project = _create_project_with_members(admin_user_client,
user_clients['Owner'], members)
members = [members[0]]
assert len(user_clients['Member']
.by_id('project', project.id).projectMembers()) == 2
project.setmembers(members=members)
project = user_clients['Owner'].by_id('project', project.id)
assert len(project.projectMembers()) == 1
with pytest.raises(ApiError) as e:
user_clients['Member'].by_id('project', project.id)
assert e.value.error.status == 404
def test_change_roles(admin_user_client, use
|
alfredodeza/ceph-deploy
|
ceph_deploy/hosts/fedora/install.py
|
Python
|
mit
| 3,044
| 0.0023
|
from ceph_deploy.lib import remoto
|
from ceph_deploy.hosts.centos.install import repo_install, mirror_install # noqa
from ceph_deploy.hosts.util import install_yum_priorities
def install(distro, version_kind, version, adjust_repos, **kw):
# note: when split packages for ceph land for Fedora,
# `kw['compo
|
nents']` will have those. Unused for now.
logger = distro.conn.logger
release = distro.release
machine = distro.machine_type
if version_kind in ['stable', 'testing']:
key = 'release'
else:
key = 'autobuild'
if adjust_repos:
install_yum_priorities(distro)
distro.conn.remote_module.enable_yum_priority_obsoletes()
logger.warning('check_obsoletes has been enabled for Yum priorities plugin')
if version_kind != 'dev':
remoto.process.run(
distro.conn,
[
'rpm',
'--import',
"https://ceph.com/git/?p=ceph.git;a=blob_plain;f=keys/{key}.asc".format(key=key)
]
)
if version_kind == 'stable':
url = 'http://ceph.com/rpm-{version}/fc{release}/'.format(
version=version,
release=release,
)
elif version_kind == 'testing':
url = 'http://ceph.com/rpm-testing/fc{release}'.format(
release=release,
)
remoto.process.run(
distro.conn,
[
'rpm',
'-Uvh',
'--replacepkgs',
'--force',
'--quiet',
'{url}noarch/ceph-release-1-0.fc{release}.noarch.rpm'.format(
url=url,
release=release,
),
]
)
if version_kind == 'dev':
logger.info('skipping install of ceph-release package')
logger.info('repo file will be created manually')
mirror_install(
distro,
'http://gitbuilder.ceph.com/ceph-rpm-fc{release}-{machine}-basic/ref/{version}/'.format(
release=release.split(".", 1)[0],
machine=machine,
version=version),
"https://ceph.com/git/?p=ceph.git;a=blob_plain;f=keys/{key}.asc".format(key=key),
adjust_repos=True,
extra_installs=False
)
# set the right priority
logger.warning('ensuring that /etc/yum.repos.d/ceph.repo contains a high priority')
distro.conn.remote_module.set_repo_priority(['Ceph', 'Ceph-noarch', 'ceph-source'])
logger.warning('altered ceph.repo priorities to contain: priority=1')
remoto.process.run(
distro.conn,
[
'yum',
'-y',
'-q',
'install',
'ceph',
'ceph-radosgw',
],
)
|
bin3/bobo
|
bobo/fileutil.py
|
Python
|
apache-2.0
| 2,554
| 0.006265
|
# -*- coding: utf-8 -*-
#
# Copyright(C) 2013 Binson Zhang.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy o
|
f the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permi
|
ssions and limitations
# under the License.
#
__author__ = 'Binson Zhang <bin183cs@gmail.com>'
__date__ = '2013-8-25'
import os
import console
BUILD_ROOT_FILE = 'BUILD_ROOT'
BUILD_FILE = 'BUILD'
PATH_SEPARATOR = '/'
ROOT_PREFIX = '/'
WORKSPACE_PREFIX = '//'
EXTERNAL_PREFIX = '#'
def find_root_dir(working_dir):
""" Find the first directory which has the TROWEL_ROOT file.
Search from the bottom to up.
"""
root_dir = os.path.normpath(working_dir)
while root_dir != PATH_SEPARATOR:
root_file = os.path.join(root_dir, BUILD_ROOT_FILE)
if os.path.isfile(root_file):
return root_dir
root_dir = os.path.dirname(root_dir)
console.abort('Failed to find the root directory for working directory %s, '
'which has the file %s. ' % (working_dir, BUILD_ROOT_FILE))
def get_root_file(root_dir):
return os.path.join(root_dir, BUILD_ROOT_FILE)
def get_build_file(abs_work_dir):
return os.path.join(abs_work_dir, BUILD_FILE)
def get_work_dir(root_dir, abs_work_dir):
"""get work_dir from root_dir and abs_work_dir
e.g. '/home/ws/', '/home/ws/example/util' -> 'example/util'
"""
root_dir = os.path.normpath(root_dir)
abs_work_dir = os.path.normpath(abs_work_dir)
assert abs_work_dir.startswith(root_dir)
return abs_work_dir[len(root_dir)+1:]
def get_work_dir_from_path(path):
return os.path.dirname(path)
def get_sconstruct_file(root_dir):
return os.path.join(root_dir, 'SConstruct')
def norm_path(path):
"""normalize path from user input
e.g. '/util/adder' -> 'util/adder'
"""
if path.startswith(WORKSPACE_PREFIX):
# TODO(bin3): impl
console.abort('Not implemented')
elif path.startswith(ROOT_PREFIX):
return path[len(ROOT_PREFIX):]
return path
def is_external_path(path):
return path.startswith(EXTERNAL_PREFIX)
def get_exteranl_name(path):
assert is_external_path(path)
return path[len(EXTERNAL_PREFIX):]
|
Rastii/pydev_docker
|
pydev_docker/options.py
|
Python
|
mit
| 4,867
| 0.006575
|
from typing import Optional, Iterable, Iterator, List
import itertools
import os.path
from pydev_docker import models
from pydev_docker import utils
class ContainerOptions:
"""
Options for running a docker container
"""
DEFAULT_PYPATH_DIR = "/pypath"
DEFAULT_SRC_DIR = "/src"
def __init__(self,
image: str,
source_directory: str,
*, # force kwargs only for optional
command: Optional[str]=None,
container_source_directory: str=DEFAULT_SRC_DIR,
environment_variables: Optional[Iterable[models.Environment]]=None,
ext_volumes: Optional[Iterable[models.Volume]]=None,
network: Optional[str]=None,
py_volumes: Optional[Iterable[str]]=None,
ports: Optional[Iterable[models.Port]]=None,
pypath_directory: str=DEFAULT_PYPATH_DIR,
remove_container: bool=True
) -> None:
"""
Args:
image: A valid docker image
source_directory: The absolute path of the development directory that will be
mounted as the main python "source"
command: The command that will be ran once the container is created
pypath_directory: The directory that will contain all of the mounted
extra python packages, defaults to `ContainerOptions.DEFAULT_PYPATH_DIR`
container_source_directory: Specifies the directory that will be mounted
on the docker container that contains the main source code
py_volumes: The additional python packages
ext_volumes: Additional volumes to mount that are not related to python packages
environment_variables: Additional environment variables
network: The network to connect the container to
remove_container: Remove the container after the container is finished running
"""
self._image = image
self._source_directory = source_directory
self._command = command
self._pypath_directory = pypath_directory
self._container_source_directory = container_source_directory
self._py_volumes = utils.set_default(py_volumes, []) # type: Iterable[str]
self._ext_volumes = utils.set_default(ext_volumes, []) # type: Iterable[models.Volume]
self._environment_variables = utils.set_default(
environment_variables, []
) # type: Iterable[models.Environment]
self._ports = utils.set_default(ports, []) # type: Iterable[models.Port]
self._network = network
self._remove_container = remove_container
@property
def image(self) -> str:
return self._image
@property
def command(self) -> Optional[str]:
return self._command
@property
def network(self) -> Optional[str]:
return self._network
@property
def remove_container(self) -> bool:
return self._remove_container
def get_source_volume(self) -> models.Volume:
return models.Volume(
host_location=self._source_directory,
container_location=self._container_source_directory,
)
def get_pythonpath_environment(self) -> models.Environment:
return models.Environment("PYTHONPATH", self._pypath_directory)
def iter_pypath_volumes(self) -> Iterator[models.Volume]:
for v in self._py_volumes:
pypath_dir = "{}/{}".format(self._pypath_directory, os.path.basename(v))
yield models.Volume(v, pypath_dir, mode=models.VolumeMode.RO)
def iter_ext_volumes(self) -> Iterator[mod
|
els.Volume]:
return iter(self._ext_volumes)
def iter_en
|
vironment_variables(self) -> Iterator[models.Environment]:
return iter(self._environment_variables)
def get_ports(self) -> List[models.Port]:
return list(self._ports)
def get_volume_collection(self) -> List[models.Volume]:
"""
Returns a list of `models.Volume` objects that contains all of the volumes to
mount, which includes the source volume and all external volumes
"""
volume_collection = [self.get_source_volume()]
volume_collection.extend(
itertools.chain(self.iter_pypath_volumes(), self.iter_ext_volumes())
)
return volume_collection
def get_environment_collection(self) -> List[models.Environment]:
"""
Returns a list of `models.Environment` objects that contains all of the
environment variables for the docker container including the PYTHONPATH variable
"""
environment_collection = [self.get_pythonpath_environment()]
environment_collection.extend(self.iter_environment_variables())
return environment_collection
|
evemorgen/GdzieJestTenCholernyTramwajProject
|
backend/schedule_worker/handlers/real_data_handler.py
|
Python
|
mit
| 1,218
| 0.001642
|
import json
import logging
from tornado.web import RequestHandler
from tornado.gen import coroutine
from db import RealDb
class RealDataHandler(RequestHandler):
def initialize(self):
self.db = RealDb()
self.db.get_all()
def set_default_headers(self):
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "Content-Type")
self.set_header('Access-Control-Allow-Methods', 'POST')
@coroutine
def post(self):
params = json.loads(self.request.body.decode('utf-8'))
logging.info('putting new point (%s, %s) to line %s', params['lat'], params['lon'], params['line'])
self.db.insert_point(params['id'], params['lat'], params['lon'], params['line'], pa
|
rams['ts'])
self.write("OK")
@coroutine
def get(self):
mes_id = self.get_argument('id')
lat = self.get_argument('lat')
lon = self.get_argument('lon')
line = self.get_argument('line')
timestamp = self.get_argument('ts')
logging.info('putting new point (%s, %s) to line %s', lat, lon, line)
self.db.in
|
sert_point(mes_id, lat, lon, line, timestamp)
self.write("OK")
|
dpazel/music_rep
|
transformation/functions/tonalfunctions/tonal_permutation.py
|
Python
|
mit
| 3,907
| 0.003583
|
"""
File: tonal_permutation.py
Purpose: Class defining a function whose cycles are composed of tone strings (no None).
"""
from function.permutation import Permutation
from tonalmodel.diatonic_tone_cache import DiatonicToneCache
from tonalmodel.diatonic_tone import DiatonicTone
class TonalPermutation(Permutation):
"""
Class implementation of a permuation on a set of tones, given in string format.
"""
def __init__(self, cycles, domain_tones=None):
"""
Concstructor.
:param cycles: The cycles of a permutation. List of lists. Strings or DiatonicTones.
:param domain_tones: Tones to use in cycles, if empty or None, use tones in cycles. String or
DiatonicTones.
"""
self._tone_domain = TonalPermutation.check_domain(domain_tones)
self._tone_cycles = TonalPermutation.convert_cycles_to_tones(self.tone_domain, cycles)
# if the tone_domain is not specified, we use the tones in the cycles as the domain.
if len(self._tone_domain) == 0:
for cycle in self._tone_cycles:
for tone in cycle:
self._tone_domain.add(tone)
Permutation.__init__(self, self.tone_domain, self.tone_cycles)
@property
def tone_domain(self):
return self._tone_domain
@property
def tone_cycles(self):
return self._tone_cycles
@staticmethod
def check_domain(tone_domain):
tones = set()
if tone_domain is not None:
if not isinstance(tone_domain, list) and not isinstance(tone_domain, set):
raise Exception('Tone domain must be a list or set.')
for tone_rep in tone_domain:
if isinstance(tone_rep, str):
tone = DiatonicToneCache.get_tone(tone_rep)
if tone is None:
raise Exception('Tone domain item \'{0}\' illegal syntax.'.format(tone_rep))
elif isinstance(tone_rep, DiatonicTone):
tone = tone_rep
else:
raise Exception('Tone domain item \'{0}\' must be string.'.format(tone_rep))
tones.add(tone)
return tones
@staticmethod
def convert_cycles_to_tones(tone_domain, cycles):
if cycles is None:
return []
if not isinstance(cycles, list):
raise Exception('Cycles paramater is not a list.')
new_cycles = list()
for cycle in cycles:
if not isinstance(cycles, list):
raise Exception('Cycle \'{0}\
|
' must be a list.'.format(cycle))
new_cycle = list()
f
|
or tone_rep in cycle:
if isinstance(tone_rep, str):
tone = DiatonicToneCache.get_tone(tone_rep)
if tone is None:
raise Exception('Tone domain item \'{0}\' illegal syntax.'.format(tone_rep))
elif isinstance(tone_rep, DiatonicTone):
tone = tone_rep
else:
raise Exception('Tone domain item \'{0}\' must be string.'.format(tone_rep))
if len(tone_domain) != 0:
if tone not in tone_domain:
raise Exception('Tone \'{0}\' not in explicit tone domain.'.format(tone))
new_cycle.append(tone)
new_cycles.append(new_cycle)
return new_cycles
def __getitem__(self, key):
if isinstance(key, str):
key = DiatonicToneCache.get_tone(key)
if key is None:
raise Exception('Illegal tone syntax \'{0}\'.'.format(key))
if not isinstance(key, DiatonicTone):
raise Exception('Key \'{0}\' must be instance of DiatonticTone'.format(key))
return super(TonalPermutation, self).__getitem__(key)
|
vIiRuS/Lagerregal
|
devices/models.py
|
Python
|
bsd-3-clause
| 10,243
| 0.002831
|
import datetime
from django.db import models
from django.db.models import Q
from django.utils.translation import ugettext_lazy as _
from django.urls import reverse
import reversion
from users.models import Lageruser
from devicetypes.models import Type
from devicegroups.models import Devicegroup
from locations.models import Section
from Lagerregal import utils
from users.models import Department
@reversion.register()
class Building(models.Model):
name = models.CharField(_('Name'), max_length=200, unique=True)
street = models.CharField(_('Street'), max_length=100, blank=True)
number = models.CharField(_('Number'), max_length=30, blank=True)
zipcode = models.CharField(_('ZIP code'), max_length=5, blank=True)
city = models.CharField(_('City'), max_length=100, blank=True)
state = models.CharField(_('State'), max_length=100, blank=True)
country = models.CharField(_('Country'), max_length=100, blank=True)
def __str__(self):
return self.name
class Meta:
verbose_name = _('Building')
verbose_name_plural = _('Buildings')
permissions = (
("read_building", _("Can read Building")),
)
def get_absolute_url(self):
return reverse('building-detail', kwargs={'pk': self.pk})
def get_edit_url(self):
return reverse('building-edit', kwargs={'pk': self.pk})
@reversion.register()
class Room(models.Model):
name = models.CharField(_('Name'), max_length=200)
building = models.ForeignKey(Building, null=True, on_delete=models.SET_NULL)
section = models.ForeignKey(Section, null=True, on_delete=models.SET_NULL, related_name="rooms", blank=True)
def __str__(self):
if self.building:
return self.name + " (" + str(self.building) + ")"
else:
return self.name
class Meta:
verbose_name = _('Room')
verbose_name_plural = _('Rooms')
permissions = (
("read_room", _("Can read Room")),
)
def get_absolute_url(self):
return reverse('room-detail', kwargs={'pk': self.pk})
def get_edit_url(self):
return reverse('room-edit', kwargs={'pk': self.pk})
@reversion.register()
class Manufacturer(models.Model):
name = models.CharField(_('Manufacturer'), max_length=200, unique=True)
def __str__(self):
return self.name
class Meta:
verbose_name = _('Manufacturer')
verbose_name_plural = _('Manufacturers')
permissions = (
("read_manufacturer", _("Can read Manufacturer")),
)
def get_absolute_url(self):
return reverse('manufacturer-detail', kwargs={'pk': self.pk})
def get_edit_url(self):
return reverse('manufacturer-edit', kwargs={'pk': self.pk})
class Bookmark(models.Model):
device = models.ForeignKey("Device", on_delete=models.CASCADE)
user = models.ForeignKey(Lageruser, on_delete=models.CASCADE)
@reversion.register(follow=["typeattributevalue_set", ], exclude=[
"archived", "currentlending", "inventoried", "bookmarks", "trashed",
], ignore_duplicates=True)
class Device(models.Model):
created_at = models.DateTimeField(auto_now_add=True, blank=True, null=True)
creator = models.ForeignKey(Lageruser, on_delete=models.SET_NULL, null=True)
name = models.CharField(_('Name'), max_length=200)
inventorynumber = models.CharField(_('Inventorynumber'), max_length=50, blank=True)
serialnumber = models.CharField(_('Serialnumber'), max_length=50, blank=True)
manufacturer = models.ForeignKey(Manufacturer, blank=True, null=True, on_delete=models.SET_NULL)
hostname = models.CharField(_('Hostname'), max_length=40, blank=True)
description = models.CharField(_('Description'), max_length=10000, blank=True)
devicetype = models.ForeignKey(Type, blank=True, null=True, on_delete=models.SET_NULL)
room = models.ForeignKey(Room, blank=True, null=True, on_delete=models.SET_NULL)
group = models.ForeignKey(Devicegroup, blank=True, null=True, related_name="devices", on_delete=models.SET_NULL)
webinterface = models.CharField(_('Webinterface'), max_length=60, blank=True)
templending = models.BooleanField(default=False, verbose_name=_("For short term lending"))
currentlending = models.ForeignKey("Lending", related_name="currentdevice", null=True, blank=True,
on_delete=models.SET_NULL)
manual = models.FileField(upload_to=utils.get_file_location, null=True, blank=True)
contact = models.ForeignKey(Lageruser, related_name="as_contact",
help_text=_("Person to contact about using this device"), blank=True,
null=True, on_delete=models.SET_NULL)
archived = models.DateTimeField(null=True, blank=True)
trashed = models.DateTimeField(null=True, blank=True)
inventoried = models.DateTimeField(null=True, blank=True)
bookmarkers = models.ManyToManyField(Lageruser, through=Bookmark, related_name="bookmarks", blank=True)
department = models.ForeignKey(Department, null=True, blank=True, related_name="devices", on_delete=models.SET_NULL)
is_private = models.BooleanField(default=False)
used_in = models.ForeignKey('self', null=True, blank=True, on_delete=models.SET_NULL,)
def __str__(self):
return self.name
class Meta:
verbose_name = _('Device')
verbose_name_plural = _('Devices')
permissions = (
("boss_mails", _("Emails for bosses")),
("managment_mails", _("Emails for managment")),
("support_mails", _("Emails for support")),
("read_device", _("Can read Device")),
("lend_device", _("Can lend Device")),
("read_puppetdetails", _("Read Puppet Details"))
)
def get_absolute_url(self):
return reverse('device-detail', kwargs={'pk': self.pk})
def get_edit_url(self):
return reverse('device-edit', kwargs={'pk': self.pk})
def get_as_dict(self):
dict = {}
dict["name"] = self.name
dict["description"] = self.description
dict["manufacturer"] = self.manufacturer
dict["devicetype"] = self.devicetype
dict["room"] = self.room
return dict
def is_overdue(self):
if self.currentlending is None:
return False
if self.currentlending.duedate < datetime.date.today():
return True
return False
@staticmethod
def active():
return Device.objects.filter(archived=None, trashed=None)
@staticmethod
def devices_for_departments(departments=[]):
return Device.objects.filter(department__in=departments).exclude(
~Q(department__in=departments), is_private=True)
class DeviceInformationType(models.Model):
keyname = models.CharField(_('Name'), max_length=200)
humanname = models.CharField(_('Human readable name'), max_length=200)
def __str__(self):
return self.humanname
class Meta:
verbose_name = _('Information Type')
verbose_name_plural = _('Information Type')
class DeviceInformation(models.Model):
information = models.CharField(_('Information'), max_length=200)
device = models.ForeignKey(Device, related_name="information", on_delete=models.CASCADE)
infotype = models.ForeignKey(DeviceInformationType, on_delete=models.CASCADE)
def __str_
|
_(self):
|
return str(self.infotype) + ": " + self.information
class Meta:
verbose_name = _('Information')
verbose_name_plural = _('Information')
@reversion.register(ignore_duplicates=True)
class Lending(models.Model):
owner = models.ForeignKey(Lageruser, verbose_name=_("Lent to"), on_delete=models.SET_NULL, null=True)
lenddate = models.DateField(auto_now_add=True)
duedate = models.DateField(blank=True, null=True)
duedate_email = models.DateField(blank=True, null=True)
returndate = models.DateField(blank=True, null=True)
device = models.ForeignKey(Device, null=True, blank=True, on_delete=models.CASCADE)
smalldevice = models.CharField(_("Small Device"), max_length=200, null=True, blank=True
|
NikolaYolov/invenio_backup
|
modules/webstat/lib/webstatadmin.py
|
Python
|
gpl-2.0
| 10,293
| 0.004372
|
## $id: webstatadmin.py,v 1.28 2007/04/01 23:46:46 tibor exp $
##
## This file is part of Invenio.
## Copyright (C) 2007, 2008, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
__revision__ = "$Id$"
__lastupdated__ = "$Date$"
import sys, webstat
from invenio.dbquery import run_sql
from invenio.bibtask import task_init, task_get_option, task_set_option, \
task_has_option, task_update_progress, write_message
from invenio.webstat_config import CFG_WEBSTAT_CONFIG_PATH
from invenio.config import CFG_SITE_RECORD
def main():
"""Main dealing with all the BibTask magic."""
task_init(authorization_action="runwebstatadmin",
authorization_msg="Webstat Administrator",
description="Description: %s Creates/deletes custom events. Can be set\n"
" to cache key events and previously defined custom events.\n" % sys.argv[0],
help_specific_usage=" -n, --new-event=ID create a new custom event with the human-readable ID\n"
" -r, --remove-event=ID remote the custom event with id ID and all its data\n"
" -S, --show-events show all currently available custom events\n"
" -c, --cache-events=CLASS|[ID] caches the events defined by the class or IDs, e.g.:\n"
" -c ALL\n"
" -c KEYEVENTS\n"
" -c CUSTOMEVENTS\n"
" -c 'event id1',id2,'testevent'\n"
" -d,--dump-config dump default config file\n"
" -e,--load-config create the custom events described in config_file\n"
"\nWhen creating events (-n) the following parameters are also applicable:\n"
" -l, --event-label=NAME set a descriptive label to the custom event
|
\n"
" -a, --args=[NAME] set column headers for additional custom event arguments\n"
" (e.g. -a country,person,car)
|
\n",
version=__revision__,
specific_params=("n:r:Sl:a:c:de", ["new-event=", "remove-event=", "show-events",
"event-label=", "args=", "cache-events=", "dump-config",
"load-config"]),
task_submit_elaborate_specific_parameter_fnc=task_submit_elaborate_specific_parameter,
task_submit_check_options_fnc=task_submit_check_options,
task_run_fnc=task_run_core)
def task_submit_elaborate_specific_parameter(key, value, opts, args):
"""
Given the string key it checks it's meaning, eventually using the value.
Usually it fills some key in the options dict. It must return True if
it has elaborated the key, False, if it doesn't know that key. eg:
"""
if key in ("-n", "--new-event"):
task_set_option("create_event_with_id", value)
elif key in ("-r", "--remove-event"):
task_set_option("destroy_event_with_id", value)
elif key in ("-S", "--show-events"):
task_set_option("list_events", True)
elif key in ("-l", "--event-label"):
task_set_option("event_name", value)
elif key in ("-a", "--args"):
task_set_option("column_headers", value.split(','))
elif key in ("-c", "--cache-events"):
task_set_option("cache_events", value.split(','))
elif key in ("-d", "--dump-config"):
task_set_option("dump_config", True)
elif key in ("-e", "--load-config"):
task_set_option("load_config", True)
else:
return False
return True
def task_submit_check_options():
"""
NOTE: Depending on the parameters, either "BibSched mode" or plain
straigh-forward execution mode is entered.
"""
if task_has_option("create_event_with_id"):
print webstat.create_customevent(task_get_option("create_event_with_id"),
task_get_option("event_name", None),
task_get_option("column_headers", []))
sys.exit(0)
elif task_has_option("destroy_event_with_id"):
print webstat.destroy_customevent(task_get_option("destroy_event_with_id"))
sys.exit(0)
elif task_has_option("list_events"):
events = webstat._get_customevents()
if len(events) == 0:
print "There are no custom events available."
else:
print "Available custom events are:\n"
print '\n'.join([x[0] + ": " + ((x[1] == None) and "No descriptive name" or str(x[1])) for x in events])
sys.exit(0)
elif task_has_option("cache_events"):
events = task_get_option("cache_events")
write_message(str(events), verbose=9)
if events[0] == 'ALL':
keyevents_to_cache = webstat.KEYEVENT_REPOSITORY.keys()
customevents_to_cache = [x[0] for x in webstat._get_customevents()]
elif events[0] == 'KEYEVENTS':
keyevents_to_cache = webstat.KEYEVENT_REPOSITORY.keys()
customevents_to_cache = []
elif events[0] == 'CUSTOMEVENTS':
keyevents_to_cache = []
customevents_to_cache = [x[0] for x in webstat._get_customevents()]
elif events[0] != '':
keyevents_to_cache = [x for x in webstat.KEYEVENT_REPOSITORY.keys() if x in events]
customevents_to_cache = [x[0] for x in webstat._get_customevents() if x in events]
# Control so that we have valid event names
if len(keyevents_to_cache + customevents_to_cache) == 0:
# Oops, no events. Abort and display help.
return False
else:
task_set_option("keyevents", keyevents_to_cache)
task_set_option("customevents", customevents_to_cache)
return True
elif task_has_option("dump_config"):
print """\
[general]
visitors_box = True
search_box = True
record_box = True
bibsched_box = True
basket_box = True
apache_box = True
uptime_box = True
[webstat_custom_event_1]
name = baskets
param1 = action
param2 = basket
param3 = user
[apache_log_analyzer]
profile = nil
nb-histogram-items-to-print = 20
exclude-ip-list = ("137.138.249.162")
home-collection = "Atlantis Institute of Fictive Science"
search-interface-url = "/?"
detailed-record-url = "/%s/"
search-engine-url = "/search?"
search-engine-url-old-style = "/search.py?"
basket-url = "/yourbaskets/"
add-to-basket-url = "/yourbaskets/add"
display-basket-url = "/yourbaskets/display"
display-public-basket-url = "/yourbaskets/display_public"
alert-url = "/youralerts/"
display-your-alerts-url = "/youralerts/list"
display-your-searches-url = "/youralerts/display"
""" % CFG_SITE_RECORD
sys.exit(0)
elif task_has_option("load_config"):
from ConfigParser import ConfigParser
conf = ConfigParser()
conf.read(CFG_WEBSTAT_CONFIG_PATH)
for section in conf.sections():
if section[:21] == "webstat_custom_event_":
cols = []
name = ""
for option, value in
|
catapult-project/catapult
|
third_party/gsutil/third_party/pyasn1-modules/tests/test_rfc2511.py
|
Python
|
bsd-3-clause
| 1,591
| 0.000629
|
#
# This file is part of pyasn1-modules software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
import sys
from pyasn1.codec.der import decoder as der_decoder
from pyasn1.codec.der import encoder as der_encoder
from pyasn1_modules import pem
from pyasn1_modules import rfc2511
try:
import unittest2 as unittest
except ImportError:
import unittest
class CertificateReqTestCase(unittest.TestCase):
pem_text = """\
MIIBozCCAZ8wggEFAgUAwTnj2jCByoABAqURMA8xDTALBgNVBAMTBHVzZXKmgZ8w
DQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAJ6ZQ2cYbn/lFsmBOlRltbRbFQUvvE0Q
nbopOu1kC7Bmaaz7QTx8nxeiHi4m7uxCbGGxHNoGCt7EmdG8eZUBNAcHyGlXrJdm
0z3/uNEGiBHq+xB8FnFJCA5EIJ3RWFnlbu9otSITLxWK7c5+/NHmWM+yaeHD/f/h
rp01c/8qXZfZAgMBAAGpEDAOBgNVHQ8BAf8EBAMCBeAwLzASBgkrBgEFBQcFAQEM
BTExMTExMBkGCSsGAQUFBwUBAgwMc2VydmVyX21hZ2ljoYGTMA0GCSqGSIb3DQEB
BQUAA4GBAEI3KNEvTq/n1kNVhNhPkovk1AZxyJrN1u1+7Gkc4PLjWwjLOjcEVWt4
AajUk/gkIJ6bbeO+fZlMjHfPSDKcD6AV2hN+n72QZwfzcw3icNvBG1el9EU4XfIm
xfu5YVWi81/fw8QQ6X6YGHFQkomLd7jxakVyjxSng9BhO6GpjJNF
"""
def setUp(self):
self.asn1Spec = rfc2511.CertReqMessages()
def testDerCodec(self):
substrate = pem.readBase64fromText(self.pem_text)
asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec)
assert not rest
assert asn1Object.prettyPrint(
|
)
assert der_encoder.encode(asn1Object) == substrate
suite = unittest.TestLoader(
|
).loadTestsFromModule(sys.modules[__name__])
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite)
|
cprov/snapcraft
|
snapcraft/internal/project_loader/_env.py
|
Python
|
gpl-3.0
| 4,619
| 0.000649
|
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from snapcraft import formatting_utils, project
from snapcraft.internal import common, elf, pluginhandler
from typing import Dict, List
def env_for_classic(base: str, arch_triplet: str) -> List[str]:
"""Set the required environment variables for a classic confined build."""
env = []
core_path = common.get_core_path(base)
paths = common.get_library_paths(core_path, arch_triplet, existing_only=False)
env.append(
formatting_utils.format_path_variable(
"LD_LIBRARY_PATH", paths, prepend="", separator=":"
)
)
return env
def runtime_env(root: str, arch_triplet: str) -> List[str]:
"""Set the environment variables required for running binaries."""
env = []
env.append(
'PATH="'
+ ":".join(
["{0}/usr/sbin", "{0}/usr/bin", "{0}/sbin", "{0}/bin", "$PATH"]
).format(root)
+ '"'
)
# Add the default LD_LIBRARY_PATH
paths = common.get_library_paths(root, arch_triplet)
# Add more specific LD_LIBRARY_PATH from staged packages if necessary
paths += elf.determine_ld_library_path(root)
if paths:
env.append(
formatting_utils.format_path_variable(
"LD_LIBRARY_PATH", paths, prepend="", separator=":"
|
)
)
return env
|
def build_env(root: str, snap_name: str, arch_triplet: str) -> List[str]:
"""Set the environment variables required for building.
This is required for the current parts installdir due to stage-packages
and also to setup the stagedir.
"""
env = []
paths = common.get_include_paths(root, arch_triplet)
if paths:
for envvar in ["CPPFLAGS", "CFLAGS", "CXXFLAGS"]:
env.append(
formatting_utils.format_path_variable(
envvar, paths, prepend="-I", separator=" "
)
)
paths = common.get_library_paths(root, arch_triplet)
if paths:
env.append(
formatting_utils.format_path_variable(
"LDFLAGS", paths, prepend="-L", separator=" "
)
)
paths = common.get_pkg_config_paths(root, arch_triplet)
if paths:
env.append(
formatting_utils.format_path_variable(
"PKG_CONFIG_PATH", paths, prepend="", separator=":"
)
)
return env
def build_env_for_stage(stagedir: str, snap_name: str, arch_triplet: str) -> List[str]:
env = build_env(stagedir, snap_name, arch_triplet)
env.append('PERL5LIB="{0}/usr/share/perl5/"'.format(stagedir))
return env
def snapcraft_global_environment(project: project.Project) -> Dict[str, str]:
if project.info.name:
name = project.info.name
else:
name = ""
if project.info.version:
version = project.info.version
else:
version = ""
if project.info.grade:
grade = project.info.grade
else:
grade = ""
return {
"SNAPCRAFT_ARCH_TRIPLET": project.arch_triplet,
"SNAPCRAFT_PARALLEL_BUILD_COUNT": project.parallel_build_count,
"SNAPCRAFT_PROJECT_NAME": name,
"SNAPCRAFT_PROJECT_VERSION": version,
"SNAPCRAFT_PROJECT_GRADE": grade,
"SNAPCRAFT_STAGE": project.stage_dir,
"SNAPCRAFT_PRIME": project.prime_dir,
}
def snapcraft_part_environment(part: pluginhandler.PluginHandler) -> Dict[str, str]:
return {
"SNAPCRAFT_PART_SRC": part.plugin.sourcedir,
"SNAPCRAFT_PART_BUILD": part.plugin.builddir,
"SNAPCRAFT_PART_INSTALL": part.plugin.installdir,
}
def environment_to_replacements(environment: Dict[str, str]) -> Dict[str, str]:
replacements = {} # type: Dict[str, str]
for variable, value in environment.items():
# Support both $VAR and ${VAR} syntax
replacements["${}".format(variable)] = value
replacements["${{{}}}".format(variable)] = value
return replacements
|
mazaclub/electrum-nmc
|
lib/commands.py
|
Python
|
gpl-3.0
| 19,964
| 0.010068
|
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2011 thomasv@gitorious
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import datetime
import time
import copy
from util import print_msg, format_satoshis, print_stderr
from bitcoin import is_valid, hash_160_to_bc_address, hash_160
from decimal import Decimal
import bitcoin
from transaction import Transaction
class Command:
def __init__(self, name, min_args, max_args, requires_network, requires_wallet, requires_password, description, syntax = '', options_syntax = ''):
self.name = name
self.min_args=min_args
self.max_args = max_args
self.requires_network = requires_network
self.requires_wallet = requires_wallet
self.requires_password = requires_password
self.description = description
self.syntax = syntax
self.options = options_syntax
known_commands = {}
def register_command(*args):
global known_commands
name = args[0]
known_commands[name] = Command(*args)
payto_options = ' --fee, -f: set transaction fee\n --fromaddr, -F: send from address -\n --changeaddr, -c: send change to address'
listaddr_options = " -a: show all addresses, including change addresses\n -l: include labels in results"
restore_options = " accepts a seed or master public key."
mksendmany_syntax = 'mksendmanytx <recipient> <amount>
|
[<recipient> <amount> ...]'
payto_syntax = "payto <recipient> <amount> [label]\n<recipient> can be a bitcoin address or a label"
paytomany_syntax = "paytomany <recipient> <amount> [<recipient> <amount> ...]\n<recipient> can be a bitcoin address or a label"
signmessage_syntax = 'signmessage <address> <message>\nIf you want to lead or end a message with spac
|
es, or want double spaces inside the message make sure you quote the string. I.e. " Hello This is a weird String "'
verifymessage_syntax = 'verifymessage <address> <signature> <message>\nIf you want to lead or end a message with spaces, or want double spaces inside the message make sure you quote the string. I.e. " Hello This is a weird String "'
# command
# requires_network
# requires_wallet
# requires_password
register_command('contacts', 0, 0, False, True, False, 'Show your list of contacts')
register_command('create', 0, 0, False, True, False, 'Create a new wallet')
register_command('createmultisig', 2, 2, False, True, False, 'similar to bitcoind\'s command')
register_command('createrawtransaction', 2, 2, False, True, False, 'Create an unsigned transaction. The syntax is similar to bitcoind.')
register_command('deseed', 0, 0, False, True, False, 'Remove seed from wallet, creating a seedless, watching-only wallet.')
register_command('decoderawtransaction', 1, 1, False, False, False, 'similar to bitcoind\'s command')
register_command('getprivatekeys', 1, 1, False, True, True, 'Get the private keys of a given address', 'getprivatekeys <bitcoin address>')
register_command('dumpprivkeys', 0, 0, False, True, True, 'Dump all private keys in your wallet')
register_command('freeze', 1, 1, False, True, True, 'Freeze the funds at one of your wallet\'s addresses', 'freeze <address>')
register_command('getbalance', 0, 1, True, True, False, 'Return the balance of your wallet, or of one account in your wallet', 'getbalance [<account>]')
register_command('getservers', 0, 0, True, False, False, 'Return the list of available servers')
register_command('getversion', 0, 0, False, False, False, 'Return the version of your client', 'getversion')
register_command('getaddressbalance', 1, 1, True, False, False, 'Return the balance of an address', 'getaddressbalance <address>')
register_command('getaddresshistory', 1, 1, True, False, False, 'Return the transaction history of a wallet address', 'getaddresshistory <address>')
register_command('getconfig', 1, 1, False, False, False, 'Return a configuration variable', 'getconfig <name>')
register_command('getpubkeys', 1, 1, False, True, False, 'Return the public keys for a wallet address', 'getpubkeys <bitcoin address>')
register_command('getrawtransaction', 1, 1, True, False, False, 'Retrieve a transaction', 'getrawtransaction <txhash>')
register_command('getseed', 0, 0, False, True, True, 'Print the generation seed of your wallet.')
register_command('getmpk', 0, 0, False, True, False, 'Return your wallet\'s master public key', 'getmpk')
register_command('help', 0, 1, False, False, False, 'Prints this help')
register_command('history', 0, 0, True, True, False, 'Returns the transaction history of your wallet')
register_command('importprivkey', 1, 1, False, True, True, 'Import a private key', 'importprivkey <privatekey>')
register_command('ismine', 1, 1, False, True, False, 'Return true if and only if address is in wallet', 'ismine <address>')
register_command('listaddresses', 2, 2, False, True, False, 'Returns your list of addresses.', '', listaddr_options)
register_command('listunspent', 0, 0, True, True, False, 'Returns the list of unspent inputs in your wallet.')
register_command('getaddressunspent', 1, 1, True, False, False, 'Returns the list of unspent inputs for an address.')
register_command('mktx', 5, 5, False, True, True, 'Create a signed transaction', 'mktx <recipient> <amount> [label]', payto_options)
register_command('mksendmanytx', 4, 4, False, True, True, 'Create a signed transaction', mksendmany_syntax, payto_options)
register_command('payto', 5, 5, True, True, True, 'Create and broadcast a transaction.', payto_syntax, payto_options)
register_command('paytomany', 4, 4, True, True, True, 'Create and broadcast a transaction.', paytomany_syntax, payto_options)
register_command('password', 0, 0, False, True, True, 'Change your password')
register_command('restore', 0, 0, True, True, False, 'Restore a wallet', '', restore_options)
register_command('searchcontacts', 1, 1, False, True, False, 'Search through contacts, return matching entries', 'searchcontacts <query>')
register_command('setconfig', 2, 2, False, False, False, 'Set a configuration variable', 'setconfig <name> <value>')
register_command('setlabel', 2,-1, False, True, False, 'Assign a label to an item', 'setlabel <tx_hash> <label>')
register_command('sendrawtransaction', 1, 1, True, False, False, 'Broadcasts a transaction to the network.', 'sendrawtransaction <tx in hexadecimal>')
register_command('signtxwithkey', 1, 3, False, False, False, 'Sign a serialized transaction with a key','signtxwithkey <tx> <key>')
register_command('signtxwithwallet', 1, 3, False, True, True, 'Sign a serialized transaction with a wallet','signtxwithwallet <tx>')
register_command('signmessage', 2,-1, False, True, True, 'Sign a message with a key', signmessage_syntax)
register_command('unfreeze', 1, 1, False, True, False, 'Unfreeze the funds at one of your wallet\'s address', 'unfreeze <address>')
register_command('validateaddress', 1, 1, False, False, False, 'Check that the address is valid', 'validateaddress <address>')
register_command('verifymessage', 3,-1, Fals
|
samueldmq/infosystem
|
infosystem/subsystem/user/manager.py
|
Python
|
apache-2.0
| 6,277
| 0
|
import os
import hashlib
import flask
from sparkpost import SparkPost
from infosystem.common import exception
from infosystem.common.subsystem import manager
from infosystem.common.subsystem import operation
_HTML_EMAIL_TEMPLATE = """
<div style="width: 100%; text-align: center">
<h1>{app_name}</h1>
<h2>CONFIRMAR E CRIAR SENHA</h2>
</div>
<p>Você acaba de ser cadastrado no portal da
{app_name}.</p>
<p>Para ter acesso ao sistema você deve clicar no link abaixo
para confirmar esse email e criar uma
|
senha.</p>
<div style="width: 100%; text-align: center">
<a href="{reset_url}">Clique aqui para CONFIRMAR o
email e CRIAR uma senha.</a>
</div>
"""
def send_email(token_id, user, domain):
try:
sparkpost = SparkPost()
default_app_name = "INFOSYSTEM"
default_email_use_sandbox = False
default_reset_url = 'http://objetorelacional.com.br/#/reset'
default_noreply_email = 'noreply@objetorelacional.com.br'
default_email_subject
|
= 'INFOSYSTEM - CONFIRMAR email e CRIAR senha'
infosystem_app_name = os.environ.get(
'INFOSYSTEM_APP_NAME', default_app_name)
infosystem_reset_url = os.environ.get(
'INFOSYSTEM_RESET_URL', default_reset_url)
infosystem_noreply_email = os.environ.get(
'INFOSYSTEM_NOREPLY_EMAIL', default_noreply_email)
infosystem_email_subject = os.environ.get(
'INFOSYSTEM_EMAIL_SUBJECT', default_email_subject)
infosystem_email_use_sandbox = os.environ.get(
'INFOSYSTEM_EMAIL_USE_SANDBOX',
default_email_use_sandbox) == 'True'
url = infosystem_reset_url + '/' + token_id + '/' + domain.name
sparkpost.transmissions.send(
use_sandbox=infosystem_email_use_sandbox,
recipients=[user.email],
html=_HTML_EMAIL_TEMPLATE.format(
app_name=infosystem_app_name, reset_url=url),
from_email=infosystem_noreply_email,
subject=infosystem_email_subject
)
except Exception:
# TODO(fdoliveira): do something here!
pass
class Create(operation.Create):
def do(self, session, **kwargs):
self.entity = super().do(session, **kwargs)
self.token = self.manager.api.tokens.create(
session=session, user=self.entity)
self.domain = self.manager.api.domains.get(id=self.entity.domain_id)
if not self.domain:
raise exception.OperationBadRequest()
return self.entity
# def post(self):
# send_reset_password_email(self.token.id, self.entity, _RESET_URL)
# send_email(self.token.id, self.entity, self.domain)
class Update(operation.Update):
def do(self, session, **kwargs):
password = kwargs.get('password', None)
if password:
kwargs['password'] = hashlib.sha256(
password.encode('utf-8')).hexdigest()
self.entity = super().do(session, **kwargs)
return self.entity
class Restore(operation.Operation):
def pre(self, **kwargs):
email = kwargs.get('email', None)
domain_name = kwargs.get('domain_name', None)
infosystem_reset_url = os.environ.get(
'INFOSYSTEM_RESET_URL', 'http://objetorelacional.com.br/#/reset/')
self.reset_url = kwargs.get('reset_url', infosystem_reset_url)
if not (domain_name and email and self.reset_url):
raise exception.OperationBadRequest()
domains = self.manager.api.domains.list(name=domain_name)
if not domains:
raise exception.OperationBadRequest()
self.domain = domains[0]
users = self.manager.api.users.list(
email=email, domain_id=self.domain.id)
if not users:
raise exception.OperationBadRequest()
self.user = users[0]
return True
def do(self, session, **kwargs):
token = self.manager.api.tokens.create(user=self.user)
send_email(token.id, self.user, self.domain)
class Reset(operation.Operation):
def pre(self, **kwargs):
self.token = flask.request.headers.get('token')
self.password = kwargs.get('password')
if not (self.token and self.password):
raise exception.OperationBadRequest()
return True
def do(self, session, **kwargs):
token = self.manager.api.tokens.get(id=self.token)
self.manager.update(id=token.user_id, password=self.password)
def post(self):
self.manager.api.tokens.delete(id=self.token)
class Routes(operation.Operation):
def do(self, session, user_id, **kwargs):
grants = self.manager.api.grants.list(user_id=user_id)
grants_ids = [g.role_id for g in grants]
roles = self.manager.api.roles.list()
user_roles_id = [r.id for r in roles if r.id in grants_ids]
# FIXME(fdoliveira) Try to send user_roles_id as paramater on query
policies = self.manager.api.policies.list()
policies_capabilitys_id = [
p.capability_id for p in policies if p.role_id in user_roles_id]
user = self.manager.api.users.list(id=user_id)[0]
capabilities = self.manager.api.capabilities.list(
domain_id=user.domain_id)
policy_capabilities = [
c for c in capabilities if c.id in policies_capabilitys_id]
# NOTE(samueldmq): if there is no policy for a capabiltiy,
# then it's open! add it too!
restricted_capabilities = [p.capability_id for p in policies]
open_capabilities = [
c for c in capabilities if c.id not in restricted_capabilities]
user_routes = [self.manager.api.routes.get(id=c.route_id) for c in (
policy_capabilities + open_capabilities)]
bypass_routes = self.manager.api.routes.list(bypass=True)
return list(set(user_routes).union(set(bypass_routes)))
class Manager(manager.Manager):
def __init__(self, driver):
super(Manager, self).__init__(driver)
self.create = Create(self)
self.update = Update(self)
self.restore = Restore(self)
self.reset = Reset(self)
self.routes = Routes(self)
|
aioc/aminiaio
|
aminiaio/db/contest.py
|
Python
|
mit
| 1,280
| 0.035156
|
from . import conn
class Contest(object):
def __init__(self, contestId, name, length, description, problems):
self._contestId = contestId
self._name = name
self._length = length
self._description = description
self._problems = problems
@classmethod
|
def load(cls, contestId):
with conn.cursor() as cur:
cur.execute('SELECT name, length, description, problems FROM contests WHERE contest_id=%s;', (contestId,))
result = cur.fetchone()
if result is None:
return None
name, length, description, problems = result
return cls(contestId, name, length, description, problems)
@classmethod
def create(cls, name, length, description,
|
problems):
with conn.cursor() as cur:
cur.execute('''
INSERT INTO contests (name, length, description, problems)
VALUES (%(name)s, %(length)s, %(description)s, %(problems)s)
RETURNING contest_id;
''', {
'name': name,
'length': length,
'description': description,
'problems': problems,
})
contestId, = cur.fetchone()
return cls.load(contestId)
def contestId(self):
return self._contestId
def name(self):
return self._name
def length(self):
return self._length
def description(self):
return self._description
def problems(self):
return self._problems
|
peterheim1/robbie_ros
|
robbie_ai/nodes/aiml/know.py
|
Python
|
bsd-3-clause
| 147
| 0.027211
|
#!
|
/usr/bin/env python
def Test():
text ='hi from'
k = text + "call "
print k
return k
def euro():
print "high"
| |
cbigler/jackrabbit
|
jackrabbit/request.py
|
Python
|
mit
| 977
| 0.002047
|
from .exceptions import MalformedRequestException
class Request(object):
def __init__(self, version, metadata, arguments):
self._version = version
self._metadata = metadata
self._arguments = arguments
@property
def version(self):
return self._version
@property
def arguments(self):
return se
|
lf._arguments
@property
def metadata(self):
|
return self._metadata
@staticmethod
def loads(s, serializer):
try:
l = serializer.loads(s)
except(ValueError, TypeError):
raise MalformedRequestException(serializer.__name__, s)
try:
version, metadata, args = l[0:3]
except ValueError:
raise MalformedRequestException(serializer.__name__, s)
else:
return Request(version, metadata, args)
def dumps(self, serializer):
return serializer.dumps([self.version, self.metadata, self.arguments])
|
SrNetoChan/QGIS
|
python/plugins/processing/algs/gdal/GridNearestNeighbor.py
|
Python
|
gpl-2.0
| 8,683
| 0.003685
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
GridNearestNeighbor.py
---------------------
Date : October 2013
Copyright : (C) 2013 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'October 2013'
__copyright__ = '(C) 2013, Alexander Bruy'
import os
from qgis.PyQt.QtGui import QIcon
from qgis.core import (QgsRasterFileWriter,
QgsProcessing,
QgsProcessingParameterDefinition,
QgsProcessingParameterFeatureSource,
QgsProcessingParameterEnum,
QgsProcessingParameterField,
QgsProcessingParameterNumber,
QgsProcessingParameterString,
|
QgsProcessingParameterRasterDestination)
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.algs.gdal.GdalUtils impor
|
t GdalUtils
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class GridNearestNeighbor(GdalAlgorithm):
INPUT = 'INPUT'
Z_FIELD = 'Z_FIELD'
RADIUS_1 = 'RADIUS_1'
RADIUS_2 = 'RADIUS_2'
ANGLE = 'ANGLE'
NODATA = 'NODATA'
OPTIONS = 'OPTIONS'
EXTRA = 'EXTRA'
DATA_TYPE = 'DATA_TYPE'
OUTPUT = 'OUTPUT'
TYPES = ['Byte', 'Int16', 'UInt16', 'UInt32', 'Int32', 'Float32', 'Float64', 'CInt16', 'CInt32', 'CFloat32', 'CFloat64']
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT,
self.tr('Point layer'),
[QgsProcessing.TypeVectorPoint]))
z_field_param = QgsProcessingParameterField(self.Z_FIELD,
self.tr('Z value from field'),
None,
self.INPUT,
QgsProcessingParameterField.Numeric,
optional=True)
z_field_param.setFlags(z_field_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(z_field_param)
self.addParameter(QgsProcessingParameterNumber(self.RADIUS_1,
self.tr('The first radius of search ellipse'),
type=QgsProcessingParameterNumber.Double,
minValue=0.0,
defaultValue=0.0))
self.addParameter(QgsProcessingParameterNumber(self.RADIUS_2,
self.tr('The second radius of search ellipse'),
type=QgsProcessingParameterNumber.Double,
minValue=0.0,
defaultValue=0.0))
self.addParameter(QgsProcessingParameterNumber(self.ANGLE,
self.tr('Angle of search ellipse rotation in degrees (counter clockwise)'),
type=QgsProcessingParameterNumber.Double,
minValue=0.0,
maxValue=360.0,
defaultValue=0.0))
self.addParameter(QgsProcessingParameterNumber(self.NODATA,
self.tr('NODATA marker to fill empty points'),
type=QgsProcessingParameterNumber.Double,
defaultValue=0.0))
options_param = QgsProcessingParameterString(self.OPTIONS,
self.tr('Additional creation options'),
defaultValue='',
optional=True)
options_param.setFlags(options_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
options_param.setMetadata({
'widget_wrapper': {
'class': 'processing.algs.gdal.ui.RasterOptionsWidget.RasterOptionsWidgetWrapper'}})
self.addParameter(options_param)
extra_param = QgsProcessingParameterString(self.EXTRA,
self.tr('Additional command-line parameters'),
defaultValue=None,
optional=True)
extra_param.setFlags(extra_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(extra_param)
dataType_param = QgsProcessingParameterEnum(self.DATA_TYPE,
self.tr('Output data type'),
self.TYPES,
allowMultiple=False,
defaultValue=5)
dataType_param.setFlags(dataType_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(dataType_param)
self.addParameter(QgsProcessingParameterRasterDestination(self.OUTPUT,
self.tr('Interpolated (Nearest neighbor)')))
def name(self):
return 'gridnearestneighbor'
def displayName(self):
return self.tr('Grid (Nearest neighbor)')
def group(self):
return self.tr('Raster analysis')
def groupId(self):
return 'rasteranalysis'
def icon(self):
return QIcon(os.path.join(pluginPath, 'images', 'gdaltools', 'grid.png'))
def commandName(self):
return 'gdal_grid'
def getConsoleCommands(self, parameters, context, feedback, executing=True):
ogrLayer, layerName = self.getOgrCompatibleSource(self.INPUT, parameters, context, feedback, executing)
arguments = ['-l']
arguments.append(layerName)
fieldName = self.parameterAsString(parameters, self.Z_FIELD, context)
if fieldName:
arguments.append('-zfield')
arguments.append(fieldName)
params = 'nearest'
params += ':radius1={}'.format(self.parameterAsDouble(parameters, self.RADIUS_1, context))
params += ':radius2={}'.format(self.parameterAsDouble(parameters, self.RADIUS_2, context))
params += ':angle={}'.format(self.parameterAsDouble(parameters, self.ANGLE, context))
params += ':nodata={}'.format(self.parameterAsDouble(parameters, self.NODATA, context))
arguments.append('-a')
arguments.append(params)
arguments.append('-ot')
arguments.append(self.TYPES[self.parameterAsEnum(parameters, self.DATA_TYPE, context)])
out = self.parameterAsOutputLayer(parameters, self.OUTPUT, context)
self.setOutputValue(self.OUTPUT, out)
arguments.append('-of')
arguments.append(QgsRasterFileWriter.driverForExtension(os.path.splitext(out)[1])
|
rgayon/plaso
|
plaso/parsers/mac_wifi.py
|
Python
|
apache-2.0
| 10,515
| 0.006182
|
# -*- coding: utf-8 -*-
"""Parses for MacOS Wifi log (wifi.log) files."""
from __future__ import unicode_literals
import re
import pyparsing
from dfdatetime import time_elements as dfdatetime_time_elements
from plaso.containers import events
from plaso.containers import time_events
from plaso.lib import errors
from plaso.lib import definitions
from plaso.lib import timelib
from plaso.parsers import logger
from plaso.parsers import manager
from plaso.parsers import text_parser
class MacWifiLogEventData(events.EventData):
"""Mac Wifi log event data.
Attributes:
action (str): known WiFI action, for example connected to an AP,
configured, etc. If the action is not known, the value is
the message of the log (text variable).
agent (str): name and identifier of process that generated the log message.
function (str): name of function that generated the log message.
text (str): log message
"""
DATA_TYPE = 'mac:wifilog:line'
def __init__(self):
"""Initializes event data."""
super(MacWifiLogEventData, self).__init__(data_type=self.DATA_TYPE)
self.action = None
self.agent = None
self.function = None
self.text = None
class MacWifiLogParser(text_parser.PyparsingSingleLineTextParser):
"""Parses MacOS Wifi log (wifi.log) files."""
NAME = 'macwifi'
DATA_FORMAT = 'MacOS Wifi log (wifi.log) file'
_ENCODING = 'utf-8'
THREE_DIGITS = text_parser.PyparsingConstants.THREE_DIGITS
THREE_LETTERS = text_parser.PyparsingConstants.THREE_LETTERS
# Regular expressions for known actions.
_CONNECTED_RE = re.compile(r'Already\sassociated\sto\s(.*)\.\sBailing')
_WIFI_PARAMETERS_RE = re.compile(
r'\[ssid=(.*?), bssid=(.*?), security=(.*?), rssi=')
_KNOWN_FUNCTIONS = [
'airportdProcessDLILEvent',
'_doAutoJoin',
'_processSystemPSKAssoc']
_AGENT = (
pyparsing.Literal('<') +
pyparsing.Combine(
pyparsing.Literal('airportd') + pyparsing.CharsNotIn('>'),
joinString='', adjacent=True).setResultsName('agent') +
pyparsing.Literal('>'))
_DATE_TIME = pyparsing.Group(
THREE_LETTERS.setResultsName('day_of_week') +
THREE_LETTERS.setResultsName('month') +
text_parser.PyparsingConstants.ONE_OR_TWO_DIGITS.setResultsName('day') +
text_parser.PyparsingConstants.TIME_ELEMENTS + pyparsing.Suppress('.') +
THREE_DIGITS.setResultsName('milliseconds'))
# Log line with a known function name.
_MAC_WIFI_KNOWN_FUNCTION_LINE = (
_DATE_TIME.setResultsName('date_time') + _AGENT +
pyparsing.oneOf(_KNOWN_FUNCTIONS).setResultsName('function') +
pyparsing.Literal(':') +
pyparsing.SkipTo(pyparsing.lineEnd).setResultsName('text'))
# Log line with an unknown function name.
_MAC_WIFI_LINE = (
_DATE_TIME.setResultsName('date_time') + pyparsing.NotAny(
_AGENT +
pyparsing.oneOf(_KNOWN_FUNCTIONS) +
pyparsing.Literal(':')) +
pyparsing.SkipTo(pyparsing.lineEnd).setResultsName('text'))
_MAC_WIFI_HEADER = (
_DATE_TIME.setResultsName('date_time') +
pyparsing.Literal('***Starting Up***').setResultsName('text'))
_DATE_TIME_TURNED_OVER_HEADER = pyparsing.Group(
text_parser.PyparsingConstants.MONTH.setResultsName('month') +
text_parser.PyparsingConstants.ONE_OR_TWO_DIGITS.setResultsName('day') +
text_parser.PyparsingConstants.TIME_ELEMENTS)
_MAC_WIFI_TURNED_OVER_HEADER = (
_DATE_TIME_TURNED_OVER_HEADER.setResultsName('date_time') +
pyparsing.Combine(
pyparsing.Word(py
|
parsing.printables) +
pyparsing.Word(pyparsing.printables) +
pyparsing.Literal('logfile turned over') +
pyparsing.LineEnd(),
joinString=' ', adjacent=False).setResultsName('text'))
# Define the available log line structures.
LINE_STRUCTURES = [
('header', _MAC_WIFI_HEADER),
('turned_over_header', _MAC_WIFI_TURNED_OVER_HEADER),
('known_function_logline', _MAC_WIFI_KNOWN_FUNCTION_LINE),
('logline', _MAC_WIFI_LINE)]
_SUPPORTED_KEYS
|
= frozenset([key for key, _ in LINE_STRUCTURES])
def __init__(self):
"""Initializes a parser."""
super(MacWifiLogParser, self).__init__()
self._last_month = 0
self._year_use = 0
def _GetAction(self, action, text):
"""Parse the well known actions for easy reading.
Args:
action (str): the function or action called by the agent.
text (str): mac Wifi log text.
Returns:
str: a formatted string representing the known (or common) action.
If the action is not known the original log text is returned.
"""
# TODO: replace "x in y" checks by startswith if possible.
if 'airportdProcessDLILEvent' in action:
interface = text.split()[0]
return 'Interface {0:s} turn up.'.format(interface)
if 'doAutoJoin' in action:
match = self._CONNECTED_RE.match(text)
if match:
ssid = match.group(1)[1:-1]
else:
ssid = 'Unknown'
return 'Wifi connected to SSID {0:s}'.format(ssid)
if 'processSystemPSKAssoc' in action:
wifi_parameters = self._WIFI_PARAMETERS_RE.search(text)
if wifi_parameters:
ssid = wifi_parameters.group(1)
bssid = wifi_parameters.group(2)
security = wifi_parameters.group(3)
if not ssid:
ssid = 'Unknown'
if not bssid:
bssid = 'Unknown'
if not security:
security = 'Unknown'
return (
'New wifi configured. BSSID: {0:s}, SSID: {1:s}, '
'Security: {2:s}.').format(bssid, ssid, security)
return text
def _GetTimeElementsTuple(self, key, structure):
"""Retrieves a time elements tuple from the structure.
Args:
key (str): name of the parsed structure.
structure (pyparsing.ParseResults): structure of tokens derived from
a line of a text file.
Returns:
tuple: containing:
year (int): year.
month (int): month, where 1 represents January.
day_of_month (int): day of month, where 1 is the first day of the month.
hours (int): hours.
minutes (int): minutes.
seconds (int): seconds.
milliseconds (int): milliseconds.
"""
time_elements_tuple = self._GetValueFromStructure(structure, 'date_time')
# TODO: what if time_elements_tuple is None.
if key == 'turned_over_header':
month, day, hours, minutes, seconds = time_elements_tuple
milliseconds = 0
else:
_, month, day, hours, minutes, seconds, milliseconds = time_elements_tuple
# Note that dfdatetime_time_elements.TimeElements will raise ValueError
# for an invalid month.
month = timelib.MONTH_DICT.get(month.lower(), 0)
if month != 0 and month < self._last_month:
# Gap detected between years.
self._year_use += 1
return self._year_use, month, day, hours, minutes, seconds, milliseconds
def _ParseLogLine(self, parser_mediator, key, structure):
"""Parse a single log line and produce an event object.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
key (str): name of the parsed structure.
structure (pyparsing.ParseResults): structure of tokens derived from
a line of a text file.
"""
time_elements_tuple = self._GetTimeElementsTuple(key, structure)
try:
date_time = dfdatetime_time_elements.TimeElementsInMilliseconds(
time_elements_tuple=time_elements_tuple)
except ValueError:
parser_mediator.ProduceExtractionWarning(
'invalid date time value: {0!s}'.format(time_elements_tuple))
return
self._last_month = time_elements_tuple[1]
function = self._GetValueFromStructure(structure, 'function')
text = self._GetValueFromStructure(structure, 'text')
if text:
text = text.strip()
event_data = MacWifiLogEventData()
event_data.agent = self._GetValueFromStructure(structure, 'agent')
event_data.function = function
event_data.text = text
if key == 'known_function_logline':
event
|
makinacorpus/formhub
|
odk_logger/migrations/0012_add_permission_view_xform.py
|
Python
|
bsd-2-clause
| 7,685
| 0.007547
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import Permission
class Migration(DataMigration):
depends_on = (
("guardian", "0005_auto__chg_field_groupobjectpermission_object_pk__chg_field_userobjectp"),
)
def forwards(self, orm):
pass
# remove old permission label if migrated with old model metadata
try:
ct = ContentType.objects.get(model='xform', app_label='odk_logger')
Permission.objects.get(content_type=ct, codename='can_view').delete()
# add new permission label
perm, created = Permission.objects.get_or_create(content_type=ct, codename='view_xform', name='Can view associated data')
except (ContentType.DoesNotExist, Permission.DoesNotExist):
pass
def backwards(self, orm):
pass
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'order
|
ing': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models
|
.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'odk_logger.attachment': {
'Meta': {'object_name': 'Attachment'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attachments'", 'to': "orm['odk_logger.Instance']"}),
'media_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'})
},
'odk_logger.instance': {
'Meta': {'object_name': 'Instance'},
'date': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "u'submitted_via_web'", 'max_length': '20'}),
'survey_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['odk_logger.SurveyType']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'surveys'", 'null': 'True', 'to': "orm['auth.User']"}),
'xform': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'surveys'", 'null': 'True', 'to': "orm['odk_logger.XForm']"}),
'xml': ('django.db.models.fields.TextField', [], {})
},
'odk_logger.surveytype': {
'Meta': {'object_name': 'SurveyType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'odk_logger.xform': {
'Meta': {'ordering': "('id_string',)", 'unique_together': "(('user', 'id_string'),)", 'object_name': 'XForm'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'default': "u''", 'null': 'True'}),
'downloadable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'has_start_time': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'id_string': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'json': ('django.db.models.fields.TextField', [], {'default': "u''"}),
'shared': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'shared_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'xforms'", 'null': 'True', 'to': "orm['auth.User']"}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '32'}),
'xls': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True'}),
'xml': ('django.db.models.fields.TextField', [], {})
}
}
complete_apps = ['odk_logger']
|
manassolanki/frappe
|
frappe/core/doctype/user_permission/user_permission.py
|
Python
|
mit
| 1,295
| 0.026255
|
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.model.document import Document
from frappe.permissions import (get_valid_perms, update_permission_property)
from frappe import _
class UserPermission(Document):
def on_update(self):
frappe.cache().delete_value('user_permissions')
def on_trash(self): # pylint: disable=no-self-use
frappe.cache().delete_value('user_permissions')
def get_user_permissions(use
|
r=None):
'''Get all users permissions for the user as a dict of doctype'''
if not user:
user = frappe.session.user
out = frappe.cache().hget("user_permissions", user)
if out is None:
out = {}
try:
for perm in frappe.get_all('User Permission',
fields=['allow', 'for_value'], filters=dict(user=user)):
meta = frappe.get_meta(perm.allow)
if not perm
|
.allow in out:
out[perm.allow] = []
out[perm.allow].append(perm.for_value)
if meta.is_nested_set():
out[perm.allow].extend(frappe.db.get_descendants(perm.allow, perm.for_value))
frappe.cache().hset("user_permissions", user, out)
except frappe.SQLError as e:
if e.args[0]==1146:
# called from patch
pass
return out
|
Cherry-project/primitiveWS
|
startup.py
|
Python
|
gpl-3.0
| 76
| 0.026316
|
from c
|
herry impo
|
rt *
robot=Cherry.setup()
Cherry.serve()
Cherry.connect()
|
lhupfeldt/jenkinsflow
|
test/job_load_test.py
|
Python
|
bsd-3-clause
| 3,177
| 0.002833
|
# Copyright (c) 2012 - 2015 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
import os, random
from jenkinsflow import jobload
from .framework import api_select
here = os.path.abspath(os.path.dirname(__file__))
_context = dict(
exec_time=1,
params=(),
script=None,
securitytoken='abc',
print_env=False,
create_job=None,
num_builds_to_keep=4,
final_result_use_cli=False,
set_build_descriptions=()
)
def _random_job_name(api, short_name=None):
# If short_name is not specified, use a random name to make sure the job doesn't exist
short_name = short_name or str(random.random()).replace('.', '')
return api.job_name_prefix + short_name, short_name
def _assert_job(api, job_name, cleanup=False):
job = api.get_job(job_name)
assert job is not None
assert job.name == job_name
assert job.public_uri is not None and job_name in job.public_uri
if cleanup:
api.delete_job(job_name)
return None
return job
def test_job_load_new_no_pre_delete(api_type):
api = api_select.api(__file__, api_type, login=True)
full_name, short_name = _random_job_name(api)
api.job(short_name, 1, 1, 1, exec_time=1, non_existing=True)
jobload.update_job_from_template(api, full_name, api.job_xml_template, pre_delete=False, context=_context)
_assert_job(api, full_name, cleanup=True)
def test_job_loa
|
d_new_pre_delete(api_type):
api = api_select.api(__file__, api_type, login=True)
full_name, short_name = _random_job_name(api)
api.job(short_name, 1, 1, 1, exec_time=1, non_existing=True)
jobload.update_job_from_template(api, full_name, api.job_xml_template, pre_delete=True, context=_context)
_assert_job(api, full_name, cleanup=True)
def test_job
|
_load_existing_pre_delete(api_type):
api = api_select.api(__file__, api_type, login=True)
full_name, short_name = _random_job_name(api)
api.job(short_name, 1, 1, 1, exec_time=1)
jobload.update_job_from_template(api, full_name, api.job_xml_template, pre_delete=True, context=_context)
_assert_job(api, full_name, cleanup=False)
jobload.update_job_from_template(api, full_name, api.job_xml_template, pre_delete=True, context=_context)
_assert_job(api, full_name, cleanup=True)
def test_job_load__existing_update(api_type):
api = api_select.api(__file__, api_type, login=True)
full_name, short_name = _random_job_name(api)
api.job(short_name, 1, 1, 1, exec_time=1)
jobload.update_job_from_template(api, full_name, api.job_xml_template, pre_delete=True, context=_context)
_assert_job(api, full_name, cleanup=False)
jobload.update_job_from_template(api, full_name, api.job_xml_template, pre_delete=False, context=_context)
_assert_job(api, full_name, cleanup=True)
def test_job_load_non_existing_pre_delete(api_type):
api = api_select.api(__file__, api_type, login=True)
full_name, short_name = _random_job_name(api)
api.job(short_name, 1, 1, 1, exec_time=1, non_existing=True)
jobload.update_job_from_template(api, full_name, api.job_xml_template, pre_delete=True, context=_context)
|
nkoech/trialscompendium
|
trialscompendium/trials/api/treatment/filters.py
|
Python
|
mit
| 934
| 0
|
from rest_framework.filters import (
FilterSet
)
from trialscompendium.trials.models import Treatment
class TreatmentListFilter(FilterSet):
"""
Filter query list from treatment database table
"""
class Meta:
model = Treatment
fields = {'id': ['exact', 'in'],
'no_replicate': ['exact', 'in', 'gte', 'lte'],
'nitrogen_treatment': ['iexact', 'in', 'icontains'],
'phosphate_treatment': ['iexact', 'in', 'icontains'],
'tillage_practice': ['iexact', 'in', 'icontains'],
'cropping_system': ['iexact', 'in', 'icontains'],
'
|
crops_grown': ['iexact', 'in', 'icontains'],
|
'farm_yard_manure': ['iexact', 'in', 'icontains'],
'farm_residue': ['iexact', 'in', 'icontains'],
}
order_by = ['tillage_practice', 'cropping_system', 'crops_grown']
|
james-tate/gnuradio_projects
|
ettus_lab/lab1/top_block.py
|
Python
|
gpl-3.0
| 3,795
| 0.016074
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
##################################################
# GNU Radio Python Flow Graph
# Title: Top Block
# Generated: Tue Dec 27 19:28:14 2016
##################################################
if __name__ == '__main__':
import ctypes
import sys
if sys.platform.startswith('linux'):
try:
x11 = ctypes.cdll.LoadLibrary('libX11.so')
x11.XInitThreads()
except:
print "Warning: failed to XInitThreads()"
from gnuradio import analog
from gnuradio import blocks
from gnuradio import eng_notation
from gnuradio import gr
from gnuradio import wxgui
from gnuradio.eng_option import eng_option
from gnuradio.filter import firdes
from gnuradio.wxgui import forms
from gnuradio.wxgui import scopesink2
from grc_gnuradio import wxgui as grc_wxgui
from optparse import OptionParser
import wx
class top_block(grc_wxgui.top_block_gui):
def __init__(self):
grc_wxgui.top_block_gui.__init__(self, title="Top Block")
##################################################
# Variables
##################################################
self.var = var = 11
self.samp_rate = samp_rate = 1e6
self.freq = freq = 1e3
##################################################
# Blocks
##################################################
_freq_s
|
izer = wx.BoxSizer(wx.VERTICAL)
|
self._freq_text_box = forms.text_box(
parent=self.GetWin(),
sizer=_freq_sizer,
value=self.freq,
callback=self.set_freq,
label='freq',
converter=forms.float_converter(),
proportion=0,
)
self._freq_slider = forms.slider(
parent=self.GetWin(),
sizer=_freq_sizer,
value=self.freq,
callback=self.set_freq,
minimum=0,
maximum=16e3,
num_steps=100,
style=wx.SL_HORIZONTAL,
cast=float,
proportion=1,
)
self.Add(_freq_sizer)
self.wxgui_scopesink2_0 = scopesink2.scope_sink_c(
self.GetWin(),
title='Scope Plot',
sample_rate=samp_rate,
v_scale=0,
v_offset=0,
t_scale=0,
ac_couple=False,
xy_mode=False,
num_inputs=1,
trig_mode=wxgui.TRIG_MODE_AUTO,
y_axis_label='Counts',
)
self.Add(self.wxgui_scopesink2_0.win)
self.blocks_throttle_0 = blocks.throttle(gr.sizeof_gr_complex*1, samp_rate,True)
self.analog_sig_source_x_0 = analog.sig_source_c(samp_rate, analog.GR_COS_WAVE, freq, 1, 0)
##################################################
# Connections
##################################################
self.connect((self.analog_sig_source_x_0, 0), (self.blocks_throttle_0, 0))
self.connect((self.blocks_throttle_0, 0), (self.wxgui_scopesink2_0, 0))
def get_var(self):
return self.var
def set_var(self, var):
self.var = var
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
self.wxgui_scopesink2_0.set_sample_rate(self.samp_rate)
self.blocks_throttle_0.set_sample_rate(self.samp_rate)
self.analog_sig_source_x_0.set_sampling_freq(self.samp_rate)
def get_freq(self):
return self.freq
def set_freq(self, freq):
self.freq = freq
self._freq_slider.set_value(self.freq)
self._freq_text_box.set_value(self.freq)
self.analog_sig_source_x_0.set_frequency(self.freq)
def main(top_block_cls=top_block, options=None):
tb = top_block_cls()
tb.Start(True)
tb.Wait()
if __name__ == '__main__':
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.