repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
valexandersaulys/prudential_insurance_kaggle
|
venv/lib/python2.7/site-packages/numpy/core/records.py
|
Python
|
gpl-2.0
| 29,422
| 0.001461
|
"""
Record Arrays
=============
Record arrays expose the fields of structured arrays as properties.
Most commonly, ndarrays contain elements of a single type, e.g. floats,
integers, bools etc. However, it is possible for elements to be combinations
of these using structured types, such as::
>>> a = np.array([(1, 2.0), (1, 2.0)], dtype=[('x', int), ('y', float)])
>>> a
array([(1, 2.0), (1, 2.0)],
dtype=[('x', '<i4'), ('y', '<f8')])
Here, each element consists of two fields: x (and int), and y (a float).
This is known as a structured array. The different fields are analogous
to columns in a spread-sheet. The different fields can be accessed as
one would a dictionary::
>>> a['x']
array([1, 1])
>>> a['y']
array([ 2., 2.])
Record arrays allow us to access fields as properties::
>>> ar = np.rec.array(a)
>>> ar.x
array([1, 1])
>>> ar.y
array([ 2., 2.])
"""
from __future__ import division, absolute_import, print_function
import sys
import os
from . import numeric as sb
from . import numerictypes as nt
from numpy.compat import isfileobj, bytes, long
# All of the functions allow formats to be a dtype
__all__ = ['record', 'recarray', 'format_parser']
ndarray = sb.ndarray
_byteorderconv = {'b':'>',
'l':'<',
'n':'=',
'B':'>',
'L':'<',
'N':'=',
'S':'s',
's':'s',
'>':'>',
'<':'<',
'=':'=',
'|':'|',
'I':'|',
'i':'|'}
# formats regular expression
# allows multidimension spec with a tuple syntax in front
# of the letter code '(2,3)f4' and ' ( 2 , 3 ) f4 '
# are equally allowed
numfmt = nt.typeDict
def find_duplicate(list):
"""Find duplication in a list, return a list of duplicated elements"""
dup = []
for i in range(len(list)):
if (list[i] in list[i + 1:]):
if (list[i] not in dup):
dup.append(list[i])
return dup
class format_parser:
"""
Class to convert formats, names, titles description to a dtype.
After constructing the format_parser object, the dtype attribute is
the converted data-type:
``dtype = format_parser(formats, names, titles).dtype``
Attributes
----------
dtype : dtype
The converted data-type.
Parameters
----------
formats : str or list of str
The format description, either specified as a string with
comma-separated format descriptions in the form ``'f8, i4, a5'``, or
a list of format description strings in the form
``['f8', 'i4', 'a5']``.
names : str or list/tuple of str
The field names, either specified as a comma-separated string in the
form ``'col1, col2, col3'``, or as a list or tuple of strings in the
form ``['col1', 'col2', 'col3']``.
An empty list can be used, in that case default field names
('f0', 'f1', ...) are used.
titles : sequence
Sequence of title strings. An empty list can be used to leave titles
out.
aligned : bool, optional
If True, align the fields by padding as the C-compiler would.
Default is False.
byteorder : str, optional
If specified, all the fields will be changed to the
provided byte-order. Otherwise, the default byte-order is
used. For all available string specifiers, see `dtype.newbyteorder`.
See Also
--------
dtype, typename, sctype2char
Examples
--------
>>> np.format_parser(['f8', 'i4', 'a5'], ['col1', 'col2', 'col3'],
... ['T1', 'T2', 'T3']).dtype
dtype([(('T1', 'col1'), '<f8'), (('T2', 'col2'), '<i4'),
(('T3',
|
'col3'), '|S5')])
`names` and/or `titles` can be empty lists. If
|
`titles` is an empty list,
titles will simply not appear. If `names` is empty, default field names
will be used.
>>> np.format_parser(['f8', 'i4', 'a5'], ['col1', 'col2', 'col3'],
... []).dtype
dtype([('col1', '<f8'), ('col2', '<i4'), ('col3', '|S5')])
>>> np.format_parser(['f8', 'i4', 'a5'], [], []).dtype
dtype([('f0', '<f8'), ('f1', '<i4'), ('f2', '|S5')])
"""
def __init__(self, formats, names, titles, aligned=False, byteorder=None):
self._parseFormats(formats, aligned)
self._setfieldnames(names, titles)
self._createdescr(byteorder)
self.dtype = self._descr
def _parseFormats(self, formats, aligned=0):
""" Parse the field formats """
if formats is None:
raise ValueError("Need formats argument")
if isinstance(formats, list):
if len(formats) < 2:
formats.append('')
formats = ','.join(formats)
dtype = sb.dtype(formats, aligned)
fields = dtype.fields
if fields is None:
dtype = sb.dtype([('f1', dtype)], aligned)
fields = dtype.fields
keys = dtype.names
self._f_formats = [fields[key][0] for key in keys]
self._offsets = [fields[key][1] for key in keys]
self._nfields = len(keys)
def _setfieldnames(self, names, titles):
"""convert input field names into a list and assign to the _names
attribute """
if (names):
if (type(names) in [list, tuple]):
pass
elif isinstance(names, str):
names = names.split(',')
else:
raise NameError("illegal input names %s" % repr(names))
self._names = [n.strip() for n in names[:self._nfields]]
else:
self._names = []
# if the names are not specified, they will be assigned as
# "f0, f1, f2,..."
# if not enough names are specified, they will be assigned as "f[n],
# f[n+1],..." etc. where n is the number of specified names..."
self._names += ['f%d' % i for i in range(len(self._names),
self._nfields)]
# check for redundant names
_dup = find_duplicate(self._names)
if _dup:
raise ValueError("Duplicate field names: %s" % _dup)
if (titles):
self._titles = [n.strip() for n in titles[:self._nfields]]
else:
self._titles = []
titles = []
if (self._nfields > len(titles)):
self._titles += [None] * (self._nfields - len(titles))
def _createdescr(self, byteorder):
descr = sb.dtype({'names':self._names,
'formats':self._f_formats,
'offsets':self._offsets,
'titles':self._titles})
if (byteorder is not None):
byteorder = _byteorderconv[byteorder[0]]
descr = descr.newbyteorder(byteorder)
self._descr = descr
class record(nt.void):
"""A data-type scalar that allows field access as attribute lookup.
"""
# manually set name and module so that this class's type shows up
# as numpy.record when printed
__name__ = 'record'
__module__ = 'numpy'
def __repr__(self):
return self.__str__()
def __str__(self):
return str(self.item())
def __getattribute__(self, attr):
if attr in ['setfield', 'getfield', 'dtype']:
return nt.void.__getattribute__(self, attr)
try:
return nt.void.__getattribute__(self, attr)
except AttributeError:
pass
fielddict = nt.void.__getattribute__(self, 'dtype').fields
res = fielddict.get(attr, None)
if res:
obj = self.getfield(*res[:2])
# if it has fields return a record,
# otherwise return the object
try:
dt = obj.dtype
except AttributeError:
#happens if field is Object type
return obj
if dt.fields:
return obj.view((self.__class__, obj.dtype.fields))
return obj
else:
raise AttributeError("'record' objec
|
ehartsuyker/securedrop
|
securedrop/tests/functional/functional_test.py
|
Python
|
agpl-3.0
| 11,846
| 0.001266
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import logging
import os
import signal
import socket
import time
import traceback
from datetime import datetime
from multiprocessing import Process
from os.path import abspath
from os.path import dirname
from os.path import expanduser
from os.path import join
from os.path import realpath
import mock
import pyotp
import requests
import tbselenium.common as cm
from selenium import webdriver
from selenium.common.exceptions import NoAlertPresentException
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.remote.remote_connection import LOGGER
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
from sqlalchemy.exc import IntegrityError
from tbselenium.tbdriver import TorBrowserDriver
import journalist_app
import source_app
import tests.utils.env as env
from db import db
from models import Journalist
from sdconfig import config
os.environ["SECUREDROP_ENV"] = "test"
FUNCTIONAL_TEST_DIR = abspath(dirname(__file__))
LOGFILE_PATH = abspath(join(FUNCTIONAL_TEST_DIR, "firefox.log"))
FILES_DIR = abspath(join(dirname(realpath(__file__)), "../..", "tests/files"))
FIREFOX_PATH = "/usr/bin/firefox/firefox"
TBB_PATH = abspath(join(expanduser("~"), ".local/tbb/tor-browser_en-US/"))
os.environ["TBB_PATH"] = TBB_PATH
TBBRC = join(TBB_PATH, "Browser/TorBrowser/Data/Tor/torrc")
LOGGER.setLevel(logging.WARNING)
# https://stackoverflow.com/a/34795883/837471
class alert_is_not_present(object):
""" Expect an alert to not be present."""
def __call__(self, driver):
try:
alert = driver.switch_to.alert
alert.text
return False
except NoAlertPresentException:
return True
class FunctionalTest(object):
use_firefox = False
driver = None
accept_languages = None
_firefox_driver = None
_torbrowser_driver = None
gpg = None
new_totp = None
timeout = 10
secret_message = "These documents outline a major government invasion of privacy."
def _unused_port(self):
s = socket.socket()
s.bind(("127.0.0.1", 0))
port = s.getsockname()[1]
s.close()
return port
def _create_torbrowser_driver(self):
logging.info("Creating TorBrowserDriver")
log_file = open(LOGFILE_PATH, "a")
log_file.write("\n\n[%s] Running Functional Tests\n" % str(datetime.now()))
log_file.flush()
# Don't use Tor when reading from localhost, and turn off private
# browsing. We need to turn off private browsing because we won't be
# able to access the browser's cookies in private browsing mode. Since
# we use session cookies in SD anyway (in private browsing mode all
# cookies are set as session cookies), this should not affect session
# lifetime.
pref_dict = {
"network.proxy.no_proxies_on": "127.0.0.1",
"browser.privatebrowsing.autostart": False,
}
if self.accept_languages is not None:
pref_dict["intl.accept_languages"] = self.accept_languages
self._torbrowser_driver = TorBrowserDriver(
|
TBB_PATH, tor_cfg=cm.USE_RUNNING_TOR, pref_dict=pref_dict, tbb_logfile_path=LOGFILE_PATH
)
logging.info("Created Tor Browser driver")
def _create_firefox_driver(self, profile=None):
logging.info("Creating Firefox driver"
|
)
if profile is None:
profile = webdriver.FirefoxProfile()
if self.accept_languages is not None:
profile.set_preference("intl.accept_languages", self.accept_languages)
profile.update_preferences()
self._firefox_driver = webdriver.Firefox(
firefox_binary=FIREFOX_PATH, firefox_profile=profile
)
self._firefox_driver.set_window_position(0, 0)
self._firefox_driver.set_window_size(1024, 768)
self._firefox_driver.implicitly_wait(self.timeout)
logging.info("Created Firefox driver")
def disable_javascript(self):
self.driver.profile.set_preference("javascript.enabled", False)
def enable_javascript(self):
self.driver.profile.set_preference("javascript.enabled", True)
def switch_to_firefox_driver(self):
self.driver = self._firefox_driver
def switch_to_torbrowser_driver(self):
self.driver = self._torbrowser_driver
def setup(self, session_expiration=30):
env.create_directories()
self.gpg = env.init_gpg()
self.__context = journalist_app.create_app(config).app_context()
self.__context.push()
# Patch the two-factor verification to avoid intermittent errors
self.patcher = mock.patch("models.Journalist.verify_token")
self.mock_journalist_verify_token = self.patcher.start()
self.mock_journalist_verify_token.return_value = True
self.patcher2 = mock.patch("source_app.main.get_entropy_estimate")
self.mock_get_entropy_estimate = self.patcher2.start()
self.mock_get_entropy_estimate.return_value = 8192
signal.signal(signal.SIGUSR1, lambda _, s: traceback.print_stack(s))
env.create_directories()
db.create_all()
# Add our test user
try:
valid_password = "correct horse battery staple profanity oil chewy"
user = Journalist(username="journalist", password=valid_password, is_admin=True)
user.otp_secret = "JHCOGO7VCER3EJ4L"
db.session.add(user)
db.session.commit()
except IntegrityError:
logging.error("Test user already added")
db.session.rollback()
# This user is required for our tests cases to login
self.admin_user = {
"name": "journalist",
"password": ("correct horse battery staple" " profanity oil chewy"),
"secret": "JHCOGO7VCER3EJ4L",
}
self.admin_user["totp"] = pyotp.TOTP(self.admin_user["secret"])
source_port = self._unused_port()
journalist_port = self._unused_port()
self.source_location = "http://127.0.0.1:%d" % source_port
self.journalist_location = "http://127.0.0.1:%d" % journalist_port
# Allow custom session expiration lengths
self.session_expiration = session_expiration
self.source_app = source_app.create_app(config)
self.journalist_app = journalist_app.create_app(config)
def start_source_server(app):
config.SESSION_EXPIRATION_MINUTES = self.session_expiration
app.run(port=source_port, debug=True, use_reloader=False, threaded=True)
def start_journalist_server(app):
app.run(port=journalist_port, debug=True, use_reloader=False, threaded=True)
self.source_process = Process(target=lambda: start_source_server(self.source_app))
self.journalist_process = Process(
target=lambda: start_journalist_server(self.journalist_app)
)
self.source_process.start()
self.journalist_process.start()
for tick in range(30):
try:
requests.get(self.source_location, timeout=1)
requests.get(self.journalist_location, timeout=1)
except Exception:
time.sleep(0.5)
else:
break
self._create_torbrowser_driver()
self._create_firefox_driver()
if self.use_firefox:
self.switch_to_firefox_driver()
else:
self.switch_to_torbrowser_driver()
# Polls the DOM to wait for elements. To read more about why
# this is necessary:
#
# http://www.obeythetestinggoat.com/how-to-get-selenium-to-wait-for-page-load-after-a-click.html
#
# A value of 5 is known to not be enough in some cases, when
# the machine hosting the tests is slow, reason why it was
# raised to 10. Setting the value to 60 or more woul
|
CG3002/Hardware-Bootloader-Timer
|
reg.py
|
Python
|
mit
| 1,129
| 0.049601
|
import time
import serial
ser = serial.Serial(port=29, baudrate=9600, bytesize=serial.EIGHTBITS, parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_TWO, timeout=1)
ser.isOpen()
connected=False
cash_reg = []
my_dict = []
reg = ['@r3', '@r1', '@r
|
2', '@r4']
flag = 1
start_rec = 0
wro
|
ng_id = 0
start_count = 0
barcode_flag = 0
def handle_data(data):
print(data)
print 'start transmission'
while 1 :
for item in reg:
try:
send_pkg = item+'/'
ser.write(send_pkg)
print 'sending '+ send_pkg
while flag :
start_count += 1
buffer = ser.read() #blocking call
print 'received '+buffer
if start_rec == 1:
if buffer == item[1] :
barcode_flag = 1
if buffer == '/' :
#print 'end round'
flag = 0
break
if buffer == '@' :
start_rec = 1
if buffer == '0' :
if start_rec == 1:
start_rec = 0
wrong_id = 1
print 'wrong id'
if start_count == 5 :
start_count = 0
flag = 0
break
start_rec = 0
wrong_id = 0
flag = 1
start_count = 0
except SerialTimeoutException:
print 'Serial time out'
continue
|
CompSoc-NUIG/python_tutorials_2013
|
guess.py
|
Python
|
unlicense
| 774
| 0.003876
|
#!/usr/bin/env python
import random
'''\
The computer will pick a number between 1 and
|
100. (You can choose any high
number you want.) The purpose of the game is to guess the number the computer
picked in as few guesses as possible.
source:http://openbookproject.net/pybiblio/practice/\
'''
high_or_low = {True: "Too high. Try again:",
False: "Too low. Try again: "}
def main():
choice = random.randrange(1, 100)
user_choice = -1
while user_choice != choice:
user_choice = int(input("Please enter your choice
|
: "))
is_high = user_choice > choice
if user_choice == choice:
break
print(high_or_low[is_high])
print("You guessed {0} correctly".format(choice))
if __name__ == "__main__":
main()
|
kanishkarj/Rave
|
Qt_Designer_files/main_design.py
|
Python
|
gpl-3.0
| 20,258
| 0.002221
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'main.ui'
#
# Created by: PyQt4 UI code generator 4.12.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(764, 593)
MainWindow.setMinimumSize(QtCore.QSize(650, 500))
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.mediaView = QtGui.QFrame(self.centralwidget)
self.mediaView.setGeometry(QtCore.QRect(0, 0, 461, 231))
self.mediaView.setStyleSheet(_fromUtf8(""))
self.mediaView.setFrameShape(QtGui.QFrame.StyledPanel)
self.mediaView.setFrameShadow(QtGui.QFrame.Raised)
self.mediaView.setObjectName(_fromUtf8("mediaView"))
self.subtitle = QtGui.QLabel(self.centralwidget)
self.subtitle.setGeometry(QtCore.QRect(250, 240, 261, 17))
font = QtGui.QFont()
font.setPointSize(12)
self.subtitle.setFont(font)
self.subtitle.setStyleSheet(_fromUtf8("color:white;"))
self.subtitle.setText(_fromUtf8(""))
self.subtitle.setObjectName(_fromUtf8("subtitle"))
self.controlView = QtGui.QWidget(self.centralwidget)
self.controlView.setGeometry(QtCore.QRect(30, 270, 661, 130))
self.controlView.setMinimumSize(QtCore.QSize(510, 130))
self.controlView.setMaximumSize(QtCore.QSize(16777215, 130))
self.controlView.setObjectName(_fromUtf8("controlView"))
self.verticalLayout = QtGui.QVBoxLayout(self.controlView)
self.verticalLayout.setMargin(0)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.gridLayout_8 = QtGui.QGridLayout()
self.gridLayout_8.setMargin(1)
self.gridLayout_8.setObjectName(_fromUtf8("gridLayout_8"))
self.timeDone = QtGui.QLabel(self.controlView)
self.timeDone.setMinimumSize(QtCore.QSize(60, 0))
self.timeDone.setMaximumSize(QtCore.QSize(60, 16777215))
self.timeDone.setAlignment(QtCore.Qt.AlignCenter)
self.timeDone.setObjectName(_fromUtf8("timeDone"))
self.gridLayout_8.addWidget(self.timeDone, 0, 0, 1, 1)
self.seekBar = QtGui.QSlider(self.controlView)
self.seekBar.setMinimumSize(QtCore.QSize(365, 18))
self.seekBar.setMaximumSize(QtCore.QSize(16777215, 18))
self.seekBar.setOrientation(QtCore.Qt.Horizontal)
self.seekBar.setObjectName(_fromUtf8("seekBar"))
self.gridLayout_8.addWidget(self.seekBar, 0, 1, 1, 1)
self.timeLeft = QtGui.QLabel(self.controlView)
self.timeLeft.setMinimumSize(QtCore.QSize(60, 18))
self.timeLeft.setMaximumSize(QtCore.QSize(60, 18))
self.timeLeft.setAlignment(QtCore.Qt.AlignCenter)
self.timeLeft.setObjectName(_fromUtf8("timeLeft"))
self.gridLayout_8.addWidget(self.timeLeft, 0, 2, 1, 1)
self.verticalLayout.addLayout(self.gridLayout_8)
self.gridLayout_4 = QtGui.QGridLayout()
self.gridLayout_4.setMargin(1)
self.gridLayout_4.setObjectName(_fromUtf8("gridLayout_4"))
self.muteButton = QtGui.QPushButton(self.controlView)
self.muteButton.setMinimumSize(QtCore.QSize(30, 30))
self.muteButton.setMaximumSize(QtCore.QSize(30, 30))
self.muteButton.setText(_fromUtf8(""))
self.muteButton.setObjectName(_fromUtf8("muteButton"))
self.gridLayout_4.addWidget(self.muteButton, 0, 4, 1, 1)
self.expansionWidget_3 = QtGui.QWidget(self.controlView)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.expansionWidget_3.sizePolicy().hasHeightForWidth())
self.expansionWidget_3.setSizePolicy(sizePolicy)
self.expansionWidget_3.setObjectName(_fromUtf8("expansionWidget_3"))
self.gridLayout_7 = QtGui.QGridLayout(self.expansionWidget_3)
self.gridLayout_7.setMargin(0)
self.gridLayout_7.setObjectName(_fromUtf8("gridLayout_7"))
self.gridLayout_4.addWidget(self.expansionWidget_3, 0, 1, 1, 1)
self.volumeBar = QtGui.QSlider(self.controlView)
self.volumeBar.setMinimumSize(QtCore.QSize(175, 0))
self.volumeBar.setMaximumSize(QtCore.QSize(100, 16777215))
self.volumeBar.setOrientation(QtCore.Qt.Horizontal)
self.volumeBar.setObjectName(_fromUtf8("volumeBar"))
self.gridLayout_4.addWidget(self.volumeBar, 0, 5, 1, 1)
self.mediaSettingsWidget = QtGui.QWidget(self.controlView)
self.mediaSettingsWidget.setMinimumSize(QtCore.QSize(140, 60))
self.mediaSettingsWidget.setMaximumSize(QtCore.QSize(140, 60))
self.mediaSettingsWidget.setObjectName(_fromUtf8("mediaSettingsWidget"))
self.horizontalLayout_6 = QtGui.QHBoxLayout(self.mediaSettingsWidget)
self.horizontalLayout_6.setMargin(0)
self.horizontalLayout_6.setObjectName(_fromUtf8("horizontalLayout_6"))
self.fullscreenButton = QtGui.QPushButton(self.mediaSe
|
ttingsWidget)
self.fullscreenButton.setMinimumSize(QtCore.QSize(30, 30))
self.fullscreenButton.setMaximumSize(QtCore.QSize(30, 30))
self.
|
fullscreenButton.setText(_fromUtf8(""))
self.fullscreenButton.setObjectName(_fromUtf8("fullscreenButton"))
self.horizontalLayout_6.addWidget(self.fullscreenButton)
self.playlistButton = QtGui.QPushButton(self.mediaSettingsWidget)
self.playlistButton.setMinimumSize(QtCore.QSize(30, 30))
self.playlistButton.setMaximumSize(QtCore.QSize(30, 30))
self.playlistButton.setText(_fromUtf8(""))
self.playlistButton.setObjectName(_fromUtf8("playlistButton"))
self.horizontalLayout_6.addWidget(self.playlistButton)
self.stopButton = QtGui.QPushButton(self.mediaSettingsWidget)
self.stopButton.setMinimumSize(QtCore.QSize(30, 30))
self.stopButton.setMaximumSize(QtCore.QSize(30, 30))
self.stopButton.setText(_fromUtf8(""))
self.stopButton.setObjectName(_fromUtf8("stopButton"))
self.horizontalLayout_6.addWidget(self.stopButton)
self.gridLayout_4.addWidget(self.mediaSettingsWidget, 0, 0, 1, 1)
self.mediaControlWidget = QtGui.QWidget(self.controlView)
self.mediaControlWidget.setMinimumSize(QtCore.QSize(225, 70))
self.mediaControlWidget.setMaximumSize(QtCore.QSize(225, 70))
self.mediaControlWidget.setObjectName(_fromUtf8("mediaControlWidget"))
self.horizontalLayout_7 = QtGui.QHBoxLayout(self.mediaControlWidget)
self.horizontalLayout_7.setMargin(0)
self.horizontalLayout_7.setObjectName(_fromUtf8("horizontalLayout_7"))
self.previous = QtGui.QPushButton(self.mediaControlWidget)
self.previous.setMinimumSize(QtCore.QSize(40, 40))
self.previous.setMaximumSize(QtCore.QSize(40, 40))
self.previous.setText(_fromUtf8(""))
self.previous.setObjectName(_fromUtf8("previous"))
self.horizontalLayout_7.addWidget(self.previous)
self.playState = QtGui.QPushButton(self.mediaControlWidget)
self.playState.setMinimumSize(QtCore.QSize(50, 50))
self.playState.setMaximumSize(QtCore.QSize(50, 50))
self.playState.setText(_fromUtf8(""))
icon = QtGui.QIcon.fromTheme(_fromUtf8("play-2.svg"))
self.playState.setIcon(icon)
self.playState.setObjectName(_fromUtf8("playState"))
self.horizontalLayout_7.addWidget(self.playState)
self.next = QtGui.QPushButton(self.mediaControlW
|
Nitrate/Nitrate
|
src/tcms/auth/migrations/0001_initial.py
|
Python
|
gpl-2.0
| 1,148
| 0.000871
|
# -*- coding: utf-8 -*-
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name="UserActiv
|
ateKey",
fields=[
(
"id",
models.AutoField(
verbose_n
|
ame="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
(
"activation_key",
models.CharField(max_length=40, null=True, blank=True),
),
("key_expires", models.DateTimeField(null=True, blank=True)),
(
"user",
models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE),
),
],
options={
"db_table": "tcms_user_activate_keys",
},
),
]
|
ArcherSys/ArcherSys
|
Lib/encodings/cp852.py
|
Python
|
mit
| 105,146
| 0.01923
|
<<<<<<< HEAD
<<<<<<< HEAD
""" Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP852.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp852',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x0085: 0x016f, # LATIN SMALL LETTER U WITH RING ABOVE
0x0086: 0x0107, # LATIN SMALL LETTER C WITH ACUTE
0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
0x0088: 0x0142, # LATIN SMALL LETTER L WITH STROKE
0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
0x008a: 0x0150, # LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
0x008b: 0x0151, # LATIN SMALL LETTER O WITH DOUBLE ACUTE
0x008c: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x008d: 0x0179, # LATIN CAPITAL LETTER Z WITH ACUTE
0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x008f: 0x0106, # LATIN CAPITAL LETTER C WITH ACUTE
0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0091: 0x0139, # LATIN CAPITAL LETTER L WITH ACUTE
0x0092: 0x013a, # LATIN SMALL LETTER L WITH ACUTE
0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x0095: 0x013d, # LATIN CAPITAL LETTER L WITH CARON
0x0096: 0x013e, # LATIN SMALL LETTER L WITH CARON
0x0097: 0x015a, # LATIN CAPITAL LETTER S WITH ACUTE
0x0098: 0x015b, # LATIN SMALL LETTER S WITH ACUTE
0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x009b: 0x0164, # LATIN CAPITAL LETTER T WITH CARON
0x009c: 0x0165, # LATIN SMALL LETTER T WITH CARON
0x009d: 0x0141, # LATIN CAPITAL LETTER L WITH STROKE
0x009e: 0x00d7, # MULTIPLICATION SIGN
0x009f: 0x010d, # LATIN SMALL LETTER C WITH CARON
0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x00a4: 0x0104, # LATIN CAPITAL LETTER A WITH OGONEK
0x00a5: 0x0105, # LATIN SMALL LETTER A WITH OGONEK
0x00a6: 0x017d, # LATIN CAPITAL LETTER Z WITH CARON
0x00a7: 0x017e, # LATIN SMALL LETTER Z WITH CARON
0x00a8: 0x0118, # LATIN CAPITAL LETTER E WITH OGONEK
0x00a9: 0x0119, # LATIN SMALL LETTER E WITH OGONEK
0x00aa: 0x00ac, # NOT SIGN
0x00ab: 0x017a, # LATIN SMALL LETTER Z WITH ACUTE
0x00ac: 0x010c, # LATIN CAPITAL LETTER C WITH CARON
0x00ad: 0x015f, # LATIN SMALL LETTER S WITH CEDILLA
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
0x00b6: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x00b7: 0x011a, # LATIN CAPITAL LETTER E WITH CARON
0x00b8: 0x015e, # LATIN CAPITAL LETTER S WITH CEDILLA
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x017b, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
0x00be: 0x017c, # LATIN SMALL LETTER Z WITH DOT ABOVE
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x0102, # LATIN CAPITAL LETTER A WITH BREVE
0x00c7: 0x0103, # LATIN SMALL LETTER A WITH BREVE
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x00a4, # CURRENCY SIGN
0x00d0: 0x0111, # LATIN SMALL LETTER D WITH STROKE
0x00d1: 0x0110, # LATIN CAPITAL LETTER D WITH STROKE
0x00d2: 0x010e, # LATIN CAPITAL LETTER D WITH CARON
0x00d3: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS
0x00d4: 0x010f, # LATIN SMALL LETTER D WITH CARON
0x00d5: 0x0147, # LATIN CAPITAL LETTER N WITH CARON
0x00d6: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
0x00d7: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
0x00d8: 0x011b, # LATIN SMALL LETTER E WITH CARON
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x0162, # LATIN CAPITAL LETTER T WITH CEDILLA
0x00de: 0x016e, # LATIN CAPITAL LETTER U WITH RING ABOVE
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
0x00e2: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00e3: 0x0143, # LATIN CAPITAL LETTER N WITH ACUTE
0x00e4: 0x0144, # LATIN SMALL LETTER N WITH ACUTE
0x00e5: 0x0148, # LATIN SMALL LETTER N WITH CARON
0x00e6: 0x0160, # LATIN CAPITAL LETTER S WITH CARON
0x00e7: 0x0161, # LATIN SMALL LETTER S WITH CARON
0x00e8: 0x0154, # LATIN CAPITAL LETTER R WITH ACUTE
0x00e9: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
0x00ea: 0x0155, # LATIN SMALL LETTER R WITH ACUTE
0x00eb: 0x0170, # LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
0x00ec: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE
0x00ed: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE
0x00ee: 0x0163, # LATIN SMALL LETTER T WITH CEDILLA
0x00ef: 0x00b4, # ACUTE ACCENT
0x00f0: 0x00ad, # SOFT HYPHEN
0x00f1: 0x02dd, # DOUBLE ACUTE ACCENT
0x00f2: 0x02
|
db, # OGONEK
0x00f3: 0x02c7, # CARON
0x00f4: 0x02d8, #
|
BREV
|
yenliangl/bitcoin
|
test/functional/test_framework/blocktools.py
|
Python
|
mit
| 9,688
| 0.003509
|
#!/usr/bin/env python3
# Copyright (c) 2015-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Utilities for manipulating blocks and transactions."""
import struct
import time
import unittest
from .address import (
key_to_p2sh_p2wpkh,
key_to_p2wpkh,
script_to_p2sh_p2wsh,
script_to_p2wsh,
)
from .messages import (
CBlock,
COIN,
COutPoint,
CTransaction,
CTxIn,
CTxInWitness,
CTxOut,
hash256,
ser_uint256,
tx_from_hex,
uint256_from_str,
)
from .script import (
CScript,
CScriptNum,
CScriptOp,
OP_1,
OP_CHECKMULTISIG,
OP_CHECKSIG,
OP_RETURN,
OP_TRUE,
)
from .script_util import (
key_to_p2wpkh_script,
script_to_p2wsh_script,
)
from .util import assert_equal
WITNESS_SCALE_FACTOR = 4
MAX_BLOCK_SIGOPS = 20000
MAX_BLOCK_SIGOPS_WEIGHT = MAX_BLOCK_SIGOPS * WITNESS_SCALE_
|
FACTOR
# Genesis block time (regtest)
TIME_GENESIS_BLOCK = 1296688602
# Coinbase transaction outputs can only be spent after this number of new blocks (network rule)
COINBASE_MATURITY = 100
# Soft-fork activation heights
DERSIG_HEIGHT = 102 # BIP 66
CLTV_HEIGHT = 111 # BIP 65
CSV_ACTIVATION_HEIGHT = 432
# From BIP141
WITNESS_COMMITMENT_HEADER = b"\xaa\x21\xa9\xed"
NORMAL_GBT_REQUEST_PARAMS = {"rules": ["segwit"]}
VERSIONBITS_LAST_OLD_BLOCK_VERSION = 4
de
|
f create_block(hashprev=None, coinbase=None, ntime=None, *, version=None, tmpl=None, txlist=None):
"""Create a block (with regtest difficulty)."""
block = CBlock()
if tmpl is None:
tmpl = {}
block.nVersion = version or tmpl.get('version') or VERSIONBITS_LAST_OLD_BLOCK_VERSION
block.nTime = ntime or tmpl.get('curtime') or int(time.time() + 600)
block.hashPrevBlock = hashprev or int(tmpl['previousblockhash'], 0x10)
if tmpl and not tmpl.get('bits') is None:
block.nBits = struct.unpack('>I', bytes.fromhex(tmpl['bits']))[0]
else:
block.nBits = 0x207fffff # difficulty retargeting is disabled in REGTEST chainparams
if coinbase is None:
coinbase = create_coinbase(height=tmpl['height'])
block.vtx.append(coinbase)
if txlist:
for tx in txlist:
if not hasattr(tx, 'calc_sha256'):
tx = tx_from_hex(tx)
block.vtx.append(tx)
block.hashMerkleRoot = block.calc_merkle_root()
block.calc_sha256()
return block
def get_witness_script(witness_root, witness_nonce):
witness_commitment = uint256_from_str(hash256(ser_uint256(witness_root) + ser_uint256(witness_nonce)))
output_data = WITNESS_COMMITMENT_HEADER + ser_uint256(witness_commitment)
return CScript([OP_RETURN, output_data])
def add_witness_commitment(block, nonce=0):
"""Add a witness commitment to the block's coinbase transaction.
According to BIP141, blocks with witness rules active must commit to the
hash of all in-block transactions including witness."""
# First calculate the merkle root of the block's
# transactions, with witnesses.
witness_nonce = nonce
witness_root = block.calc_witness_merkle_root()
# witness_nonce should go to coinbase witness.
block.vtx[0].wit.vtxinwit = [CTxInWitness()]
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ser_uint256(witness_nonce)]
# witness commitment is the last OP_RETURN output in coinbase
block.vtx[0].vout.append(CTxOut(0, get_witness_script(witness_root, witness_nonce)))
block.vtx[0].rehash()
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
def script_BIP34_coinbase_height(height):
if height <= 16:
res = CScriptOp.encode_op_n(height)
# Append dummy to increase scriptSig size above 2 (see bad-cb-length consensus rule)
return CScript([res, OP_1])
return CScript([CScriptNum(height)])
def create_coinbase(height, pubkey=None, extra_output_script=None, fees=0, nValue=50):
"""Create a coinbase transaction.
If pubkey is passed in, the coinbase output will be a P2PK output;
otherwise an anyone-can-spend output.
If extra_output_script is given, make a 0-value output to that
script. This is useful to pad block weight/sigops as needed. """
coinbase = CTransaction()
coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff), script_BIP34_coinbase_height(height), 0xffffffff))
coinbaseoutput = CTxOut()
coinbaseoutput.nValue = nValue * COIN
if nValue == 50:
halvings = int(height / 150) # regtest
coinbaseoutput.nValue >>= halvings
coinbaseoutput.nValue += fees
if pubkey is not None:
coinbaseoutput.scriptPubKey = CScript([pubkey, OP_CHECKSIG])
else:
coinbaseoutput.scriptPubKey = CScript([OP_TRUE])
coinbase.vout = [coinbaseoutput]
if extra_output_script is not None:
coinbaseoutput2 = CTxOut()
coinbaseoutput2.nValue = 0
coinbaseoutput2.scriptPubKey = extra_output_script
coinbase.vout.append(coinbaseoutput2)
coinbase.calc_sha256()
return coinbase
def create_tx_with_script(prevtx, n, script_sig=b"", *, amount, script_pub_key=CScript()):
"""Return one-input, one-output transaction object
spending the prevtx's n-th output with the given amount.
Can optionally pass scriptPubKey and scriptSig, default is anyone-can-spend output.
"""
tx = CTransaction()
assert n < len(prevtx.vout)
tx.vin.append(CTxIn(COutPoint(prevtx.sha256, n), script_sig, 0xffffffff))
tx.vout.append(CTxOut(amount, script_pub_key))
tx.calc_sha256()
return tx
def create_transaction(node, txid, to_address, *, amount):
""" Return signed transaction spending the first output of the
input txid. Note that the node must have a wallet that can
sign for the output that is being spent.
"""
raw_tx = create_raw_transaction(node, txid, to_address, amount=amount)
tx = tx_from_hex(raw_tx)
return tx
def create_raw_transaction(node, txid, to_address, *, amount):
""" Return raw signed transaction spending the first output of the
input txid. Note that the node must have a wallet that can sign
for the output that is being spent.
"""
psbt = node.createpsbt(inputs=[{"txid": txid, "vout": 0}], outputs={to_address: amount})
for _ in range(2):
for w in node.listwallets():
wrpc = node.get_wallet_rpc(w)
signed_psbt = wrpc.walletprocesspsbt(psbt)
psbt = signed_psbt['psbt']
final_psbt = node.finalizepsbt(psbt)
assert_equal(final_psbt["complete"], True)
return final_psbt['hex']
def get_legacy_sigopcount_block(block, accurate=True):
count = 0
for tx in block.vtx:
count += get_legacy_sigopcount_tx(tx, accurate)
return count
def get_legacy_sigopcount_tx(tx, accurate=True):
count = 0
for i in tx.vout:
count += i.scriptPubKey.GetSigOpCount(accurate)
for j in tx.vin:
# scriptSig might be of type bytes, so convert to CScript for the moment
count += CScript(j.scriptSig).GetSigOpCount(accurate)
return count
def witness_script(use_p2wsh, pubkey):
"""Create a scriptPubKey for a pay-to-witness TxOut.
This is either a P2WPKH output for the given pubkey, or a P2WSH output of a
1-of-1 multisig for the given pubkey. Returns the hex encoding of the
scriptPubKey."""
if not use_p2wsh:
# P2WPKH instead
pkscript = key_to_p2wpkh_script(pubkey)
else:
# 1-of-1 multisig
witness_script = CScript([OP_1, bytes.fromhex(pubkey), OP_1, OP_CHECKMULTISIG])
pkscript = script_to_p2wsh_script(witness_script)
return pkscript.hex()
def create_witness_tx(node, use_p2wsh, utxo, pubkey, encode_p2sh, amount):
"""Return a transaction (in hex) that spends the given utxo to a segwit output.
Optionally wrap the segwit output using P2SH."""
if use_p2wsh:
program = CScript([OP_1, bytes.fromhex(pubkey), OP_1, OP_CHECKMULTISIG])
addr = script_to_p2sh_p2wsh(program) if encode_p2sh else script_to_p2wsh(program)
else:
addr = key_to
|
SiderZhang/p2pns3
|
src/emu/bindings/modulegen__gcc_LP64.py
|
Python
|
gpl-2.0
| 285,868
| 0.014741
|
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.emu', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## trace-helper.h (module 'network'): ns3::AsciiTraceHelper [class]
module.add_class('AsciiTraceHelper', import_from_module='ns.network')
## trace-helper.h (module 'network'): ns3::AsciiTraceHelperForDevice [class]
module.add_class('AsciiTraceHelperForDevice', allow_subclassing=True, import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## buffer.h (module 'network'): ns3::Buffer [class]
module.add_class('Buffer', import_from_module='ns.network')
## buffer.h (module 'network'): ns3::Buffer::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer'])
## packet.h (module 'network'): ns3::ByteTagIterator [class]
module.add_class('ByteTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::ByteTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList [class]
module.add_class('ByteTagList', import_from_module='ns.network')
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## system-mutex.h (module 'core'): ns3::CriticalSection [class]
module.add_class('CriticalSection', import_from_module='ns.core')
## data-rate.h (module 'network'): ns3::DataRate [class]
module.add_class('DataRate', import_from_module='ns.network')
## event-id.h (module 'core'): ns3::EventId [class]
module.add_class('EventId', import_from_module='ns.core')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::
|
Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-address.h (module 'network'):
|
ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
module.add_class('Mac48Address', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
root_module['ns3::Mac48Address'].implicitly_converts_to(root_module['ns3::Address'])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer [class]
module.add_class('NetDeviceContainer', import_from_module='ns.network')
## node-container.h (module 'network'): ns3::NodeContainer [class]
module.add_class('NodeContainer', import_from_module='ns.network')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## object-factory.h (module 'core'): ns3::ObjectFactory [class]
module.add_class('ObjectFactory', import_from_module='ns.core')
## packet-metadata.h (module 'network'): ns3::PacketMetadata [class]
module.add_class('PacketMetadata', import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [enumeration]
module.add_enum('', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator [class]
module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet.h (module 'network'): ns3::PacketTagIterator [class]
module.add_class('PacketTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::PacketTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList [class]
module.add_class('PacketTagList', import_from_module='ns.network')
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData [struct]
module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList'])
## pcap-file.h (module 'network'): ns3::PcapFile [class]
module.add_class('PcapFile', import_from_module='ns.network')
## trace-helper.h (module 'network'): ns3::PcapHelper [class]
module.add_class('PcapHelper', import_from_module='ns.network')
## trace-helper.h (module 'network'): ns3::PcapHelper [enumeration]
module.add_enum('', ['DLT_NULL', 'DLT_EN10MB', 'DLT_PPP', 'DLT_RAW', 'DLT_IEEE802_11', 'DLT_PRISM_HEADER', 'DLT_IEEE802_11_RADIO'], outer_class=root_module['ns3::PcapHelper'], import_from_module='ns.network')
## trace-helper.h (module 'network'): ns3::PcapHelperForDevice [class]
module.add_class('PcapHelperForDevice', allow_subclassing=True, import_from_module='ns.network')
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simulator.h (module 'core'): ns3::Simulator [class]
module.add_class('Simulator', destructor_visibility='private', import_from_module='ns.core')
## system-mutex.h (module 'core'): ns3::SystemMutex [class]
module.add_class('SystemMutex', import_from_module='ns.core')
## tag.h (module 'network'): ns3::Tag [class]
module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::Objec
|
johankaito/fufuka
|
microblog/app/views.py
|
Python
|
apache-2.0
| 10,442
| 0.032561
|
#from flask.templating import render_template
# Also installed redis
from app import app
from flask import Flask, request, url_for, Response, redirect
from extended_client import extended_client
import json
from jinja2 import Environment, PackageLoader
import logging
from time import sleep
#To access JMX Rest api
import requests
#To allow calling of sh commands from python
import commands
#Threading purposes
import threading
#For async tasks
from celery import Celery
#For doing msg_out rate calculations
import math
#For the timing of things
import datetime
#messages_in_topic_per_second = 'java -cp $JAVA_HOME/lib/tools.jar:../target/scala-2.10/cjmx.jar cjmx.Main 3628 \"mbeans \'kafka.server:type=BrokerTopicMetrics,name=MessagesInPerSec,*\' select *\"'
#For getting the process id of kafka
#import os
#PIDs = os.system("ps aux | grep \"kafka.Kafka\" | grep -v grep | awk '{print $2}'")
#For getting ipaddress
import socket
ip = socket.gethostbyname(socket.gethostname()) + ""
host = {}
host["ip"] = ip
#Jinja templating
env = Environment(loader=PackageLoader('app','templates'))
ext_client=None
json_data=None
json_nodes=None
zk=None
json_topics=None
remote_server = {}
remote_server["host"]= "John"
local = "local"
remote = "remote"
#CSS File
#reading_from={}
reading_from=""
#Store the offsets for each topic all consumers consume from
#Objects for keeping track of rates for CONSUMERS
prev_consumer_info = {}
prev_consumer_counts = {}
#Store the accumulated offset
accumulated_topic_rates = {}
consumers = ""
#Stores information for msgs_out
second_counter = 0
seconds_in_a_day = 86400 #(60*60*24)
#Objects for keeping track of rates for TOPICS
topic_sums = {}
prev_topic_info = {}
prev_topic_counts = {}
#Proxy server
proxy = None
#reading_from["data"] = None
#
#
# FUNCTIONS
#
#
# The thing that the user sees
@app.route('/')
@app.route('/index')
def index():
print "Index called"
template = env.get_template('index.html')
title = "Fufuka"
client_url = ""#ext_client.url_port
return template.render(page_title=title, zk_client=client_url)
# Gets all the form data from the "Start visualization page"
@app.route('/', methods=['POST'])
def index_return_values():
print "/ with data. Form received"
start = datetime.datetime.now()
#hostname = request.local
dictionary = request.form
print "Dict: " + str(dictionary) + " :" + str(len(dictionary))
#print list(v for k,v in dictionary.iteritems() if 'jmx' in k)
if len(dictionary) > 1:
#Dealing with a remote connection
print "Remotely"
global reading_from
#reading_from["data"] = str(remote)
reading_from = str(remote)
hostname = request.form.get("hostname", None)
zkhostnamePort = request.form.get("zkhostnameport", None)
proxy = request.form.get("proxy", None)
print "Connecting to: " + hostname
print "With zk at: " + zkhostnamePort
global proxy
print "Proxy: " + proxy
global hostandport
#Set the remote host
remote_server["host"] = str(hostname)
#Set all the JMX ports that need to be listened to
jmx_ports = list(v for k,v in dictionary.iteritems() if 'jmx' in k)
remote_server["ports"] = []
for port in jmx_ports:
print "JMX ports: " + str(port)
remote_server["ports"].append(str(port))
else:
#Dealing with a local connection
global reading_from
#reading_from["data"] = str(local)
reading_from = str(local)
print "Local"
zkhostnamePort = request.form.get("zkhostnameport", None)
print "Connecting to: " + zkhostnamePort
# Process data for getting to zk instance
#
#
split = zkhostnamePort.index(':')
hostname = zkhostnamePort[:split]
port = int(zkhostnamePort[split+1:])
#Start an instance of the extended_client
global ext_client
ext_client = extended_client(hostname, port)
#Start zookeeper client
global zk
zk = ext_client.zk
zk.start()
#Once the returned values are found, set them all
#Get consumers and producers
topics = ext_client.show_all_topics(zk)
#Populate topic holder
for t in topics:
topic_sums[t] = 0
prev_topic_info[t] = {}
prev_topic_counts[t] = []
global json_topics
json_topics = json.dumps(topics)
#Get the json data and store it
global json_data
json_data = json.dumps(ext_client.get_json(zk))
global json_nodes
json_nodes = json.dumps(ext_client.get_nodes_json(zk))
json_edges = json.dumps(ext_client.get_edges_json(zk))
end = datetime.datetime.now()
print "Total time to load zk information: " + str(end-start)
return redirect("/zk")
# Main viewing area for zks
@app.route('/zk')
def zk_client():
print "/zk called"
#Set the consumers then continously calculate their offsets
print "Creating consumer holders:"
start_time = datetime.datetime.now()
global consumers
consumers = ext_client.show_all_consumers(zk)
#Populate consumer holders
for c in consumers:
prev_consumer_info[c] = {}
prev_consumer_counts[c] = []
for c in consumers:
topics = ext_client.show_topics_consumed(zk, c)
for t in topics:
prev_consumer_info[c][t] = {}
#print prev_consumer_info
end_time = datetime.datetime.now()
calculate_offsets()
#Set the template of the page
template = env.get_template('zk_client.html')
#brokers = ext_client.show_brokers_ids(zk)
#Get the information of the current zookeeper instance
data = {}
data["zkinfo"] = str(ext_client.url_port)
print "Total con: " + str(len(consumers))
print "Total time to load /zk page: " + str(end_time-start_time)
return template.render(data=data)#consumers=consumers, brokers=brokers, producers=producers, topics=topics)#, r=r.content)
# Loads the d3 graph onto the iframe
@app.route('/test')
def test_2():
print "/test called"
start = datetime.datetime.now()
template = env.get_template('test2_graph.html')
js_url = url_for('static', filename='js/loadGraph.js')
# graph={}
# graph["nodes"] = json_nodes
# graph["edges"] = json_edges
data = {}
data["json_data"] = json_data
data["json_nodes"] = json_nodes
data["json_topics"] = json_topics
data["js_url"] = js_url
data["host"] = host
data["remote_server"] = remote_server
data["reading_from"] = reading_from
data["largest_weight"] = ext_client.get_largest_weight(zk)
data["smallest_weight"] = ext_client.get_smallest_weight(zk)
data["proxy"] = proxy
sendData = json.dumps(data)
# print "---------------------------"
# print "---------------------------"
# print "---------------------------"
end = datetime.datetime.now()
print "Total time to load /test page: " + str(end-start)
#print data
return template.render(data=sendData)#json_data=json_data, json_nodes=json_nodes, json_topics=json_topics, js_url=js_url, host=host, remote_server=remote_server, readingFrom=reading_from)
# Method to return offset rates
def get_rate(rate_type, prevData):
one_minute = 60
if rate_type == "minute":
#Get the minute rate
if len(prevData) > one_minute:
#print " Min rate "
#print "L: " + str(prevData[second_counter+1]) + " S: " + str(prevData[second_counter-one_minute])
#min_rate = abs(prevData[second_counter+1] - prevData[second_counter-one_minute])
min_rate = abs(prevData[second_counter] - prevData[second_counter-one_minute])/(one_minute + 0.0)
return min_rate
else:
min_rate = 0
return min_rate
if rate_type == "mean":
#Get the mean rate
global second_counter
if second_counter > 0:
#print " Mean rate"
#Method 1
#global predata_sum
#mean_rate = predata_s
|
um/(second_counter+0.0)
#Method 2
# print "L: " + str(prevData[second_counter+1]) + " S: " + str(prevData[0])
# mean_rate = abs(prevData[second_counter+1] - prevData[0])/(second_counter+0.0)
#Method 3
# print " ArrLen: " + str(len(prevData))
# print " SC: " + str(second_counter)
# print " L: " + str(prevData[second_counter])+ " S: " + str(prevData[0])
mean_rate = abs(prevDa
|
ta[second_counter] - prevData[0])/(second_counter+0.0)
#print " MeanR " + str(mean_rate)
return mean_rate
else:
mean_rate = -1
return mean_rate
# Threaded method which calculates the offsets
def calculate_offsets():
#Get individual offsets of a consumer
for c in consumers:
global prev_consumer_info
#prev_consumer_info[c] = {}
topics = ext_client.show_topics_consum
|
dcsan/ltel
|
resources/__init__.py
|
Python
|
mit
| 197
| 0.005076
|
d
|
ef load_resources(app):
# import all our Resources to get them registered
import home
import facebook
import fblogin
home.load_resources(app)
fblogin.load_resources(ap
|
p)
|
praekelt/django-form-designer
|
form_designer/models.py
|
Python
|
bsd-3-clause
| 20,394
| 0.010052
|
from django.db import models
from django.utils.translation import ugettext, ugettext_lazy as _
from django.forms import widgets
from django.core.mail import send_mail
from django.conf import settings
from form_designer import app_settings
import re
from form_designer.pickled_object_field import PickledObjectField
from form_designer.model_name_field import ModelNameField
from form_designer.template_field import TemplateTextField, TemplateCharField
#==============================================================================
class FormDefinition(models.Model):
"""
A model that defines a form and its components and properties.
"""
name = models.SlugField(_('Name'), max_length=255, unique=True)
title = models.CharField(_('Title'), max_length=255, blank=True, null=True)
action = models.URLField(_('Target URL'), help_text=_('If you leave this empty, the page where the form resides will be requested, and you can use the mail form and logging features. You can also send data to external sites: For instance, enter "http://www.google.ch/search" to create a search form.'), max_length=255, blank=True, null=True)
mail_to = TemplateCharField(_('Send form data to e-mail address'), help_text=('Separate several addresses with a comma. Your form fields are available as template cont
|
ext. Example: "admin@domain.com, {{ from_email }}" if you have a field named `from_email`.'), max_length=255
|
, blank=True, null=True)
mail_from = TemplateCharField(_('Sender address'), max_length=255, help_text=('Your form fields are available as template context. Example: "{{ firstname }} {{ lastname }} <{{ from_email }}>" if you have fields named `first_name`, `last_name`, `from_email`.'), blank=True, null=True)
mail_subject = TemplateCharField(_('e-Mail subject'), max_length=255, help_text=('Your form fields are available as template context. Example: "Contact form {{ subject }}" if you have a field named `subject`.'), blank=True, null=True)
method = models.CharField(_('Method'), max_length=10, default="POST", choices = (('POST', 'POST'), ('GET', 'GET')))
success_message = models.CharField(_('Success message'), max_length=255, blank=True, null=True)
error_message = models.CharField(_('Error message'), max_length=255, blank=True, null=True)
submit_label = models.CharField(_('Submit button label'), max_length=255, blank=True, null=True)
log_data = models.BooleanField(_('Log form data'), help_text=_('Logs all form submissions to the database.'), default=True)
success_redirect = models.BooleanField(_('Redirect after success'), help_text=_('You should install django_notify if you want to enable this.') if not 'django_notify' in settings.INSTALLED_APPS else None, default=False)
success_clear = models.BooleanField(_('Clear form after success'), default=True)
allow_get_initial = models.BooleanField(_('Allow initial values via URL'), help_text=_('If enabled, you can fill in form fields by adding them to the query string.'), default=True)
message_template = TemplateTextField(_('Message template'), help_text=_('Your form fields are available as template context. Example: "{{ message }}" if you have a field named `message`. To iterate over all fields, use the variable `data` (a list containing a dictionary for each form field, each containing the elements `name`, `label`, `value`).'), blank=True, null=True)
form_template_name = models.CharField(_('Form template'), max_length=255, choices=app_settings.get('FORM_DESIGNER_FORM_TEMPLATES'), blank=True, null=True)
#--------------------------------------------------------------------------
class Meta:
verbose_name = _('form')
verbose_name_plural = _('forms')
#--------------------------------------------------------------------------
def get_field_dict(self):
dict = {}
for field in self.fields.all():
dict[field.name] = field
return dict
#--------------------------------------------------------------------------
def get_form_data(self, form):
data = []
field_dict = self.get_field_dict()
form_keys = form.fields.keys()
def_keys = field_dict.keys()
for key in form_keys:
if key in def_keys and field_dict[key].include_result:
value = form.cleaned_data[key]
if getattr(value, '__form_data__', False):
value = value.__form_data__()
data.append({'name': key, 'label': form.fields[key].label, 'value': value})
return data
#--------------------------------------------------------------------------
def get_form_data_dict(self, form_data):
dict = {}
for field in form_data:
dict[field['name']] = field['value']
return dict
#--------------------------------------------------------------------------
def compile_message(self, form_data, template=None):
from django.template.loader import get_template
from django.template import Context, Template
if template:
t = get_template(template)
elif not self.message_template:
t = get_template('txt/formdefinition/data_message.txt')
else:
t = Template(self.message_template)
context = Context(self.get_form_data_dict(form_data))
context['data'] = form_data
return t.render(context)
#--------------------------------------------------------------------------
def count_fields(self):
return self.fields.count()
count_fields.short_description = _('Fields')
#--------------------------------------------------------------------------
def __unicode__(self):
return self.title or self.name
#--------------------------------------------------------------------------
def log(self, form):
"""
Saves the form submission.
"""
form_data = self.get_form_data(form)
field_dict = self.get_field_dict()
# create a submission
submission = FormSubmission()
submission.save()
# log each field's value individually
for field_data in form_data:
field_submission = FormFieldSubmission(submission=submission, definition_field=field_dict[field_data['name']],
value=field_data['value'])
field_submission.save()
return submission
#--------------------------------------------------------------------------
def string_template_replace(self, text, context_dict):
from django.template import Context, Template, TemplateSyntaxError
try:
t = Template(text)
return t.render(Context(context_dict))
except TemplateSyntaxError:
return text
#--------------------------------------------------------------------------
def send_mail(self, form):
form_data = self.get_form_data(form)
message = self.compile_message(form_data)
context_dict = self.get_form_data_dict(form_data)
import re
mail_to = re.compile('\s*[,;]+\s*').split(self.mail_to)
for key, email in enumerate(mail_to):
mail_to[key] = self.string_template_replace(email, context_dict)
mail_from = self.mail_from or None
if mail_from:
mail_from = self.string_template_replace(mail_from, context_dict)
if self.mail_subject:
mail_subject = self.string_template_replace(self.mail_subject, context_dict)
else:
mail_subject = self.title
import logging
logging.debug('Mail: '+repr(mail_from)+' --> '+repr(mail_to));
from django.core.mail import send_mail
send_mail(mail_subject, message, mail_from or None, mail_to, fail_silently=False)
#--------------------------------------------------------------------------
@property
def submit_flag_name(self):
name = app_settings.get('FORM_DESIGNER_SUBMIT_FLAG_NAME') % self.name
while self.fields.filter(nam
|
asgeir/pydigilent
|
pydigilent/lowlevel/dwf.py
|
Python
|
mit
| 82,320
| 0.01318
|
import ctypes
import sys
if sys.platform.startswith("win"):
_dwf = ctypes.cdll.dwf
elif sys.platform.startswith("darwin"):
_dwf = ctypes.cdll.LoadLibrary("libdwf.dylib")
else:
_dwf = ctypes.cdll.LoadLibrary("libdwf.so")
class _types(object):
c_byte_p = ctypes.POINTER(ctypes.c_byte)
c_double_p = ctypes.POINTER(ctypes.c_double)
c_int_p = ctypes.POINTER(ctypes.c_int)
c_uint_p = ctypes.POINTER(ctypes.c_uint)
class HDWF(ctypes.c_int):
pass
hdwfNone = HDWF(0)
class ENUMFILTER(ctypes.c_int):
pass
enumfilterAll = ENUMFILTER(0)
enumfilterEExplorer = ENUMFILTER(1)
enumfilterDiscovery = ENUMFILTER(2)
class DEVID(ctypes.c_int):
pass
devid
|
EExplorer = DEVID(1)
devidDiscovery = DEVID(2)
class DEVVER(ctypes.c_int):
pass
devverEExplorerC = DEVVER(2)
devverEExplorerE = DEVVER(4)
devverEExplorerF = DEVVER(5)
devverDiscoveryA = DEVVER(1)
devverDiscoveryB = DEVVER(2)
devverDiscoveryC = DEVVER(3)
class TRIGSRC(ctypes.c_byte):
pass
trigsrcNone = TRIGSRC(0)
trigsrcPC = TRIGSRC(1)
trigsrcDetectorAnalogIn = TRIGS
|
RC(2)
trigsrcDetectorDigitalIn = TRIGSRC(3)
trigsrcAnalogIn = TRIGSRC(4)
trigsrcDigitalIn = TRIGSRC(5)
trigsrcDigitalOut = TRIGSRC(6)
trigsrcAnalogOut1 = TRIGSRC(7)
trigsrcAnalogOut2 = TRIGSRC(8)
trigsrcAnalogOut3 = TRIGSRC(9)
trigsrcAnalogOut4 = TRIGSRC(10)
trigsrcExternal1 = TRIGSRC(11)
trigsrcExternal2 = TRIGSRC(12)
trigsrcExternal3 = TRIGSRC(13)
trigsrcExternal4 = TRIGSRC(14)
class DwfState(ctypes.c_byte):
pass
DwfStateReady = DwfState(0)
DwfStateConfig = DwfState(4)
DwfStatePrefill = DwfState(5)
DwfStateArmed = DwfState(1)
DwfStateWait = DwfState(7)
DwfStateTriggered = DwfState(3)
DwfStateRunning = DwfState(3)
DwfStateDone = DwfState(2)
class ACQMODE(ctypes.c_int):
pass
acqmodeSingle = ACQMODE(0)
acqmodeScanShift = ACQMODE(1)
acqmodeScanScreen = ACQMODE(2)
acqmodeRecord = ACQMODE(3)
class FILTER(ctypes.c_int):
pass
filterDecimate = FILTER(0)
filterAverage = FILTER(1)
filterMinMax = FILTER(2)
class TRIGTYPE(ctypes.c_int):
pass
trigtypeEdge = TRIGTYPE(0)
trigtypePulse = TRIGTYPE(1)
trigtypeTransition = TRIGTYPE(2)
class TRIGCOND(ctypes.c_int):
pass
trigcondRisingPositive = TRIGCOND(0)
trigcondFallingNegative = TRIGCOND(1)
class TRIGLEN(ctypes.c_int):
pass
triglenLess = TRIGLEN(0)
triglenTimeout = TRIGLEN(1)
triglenMore = TRIGLEN(2)
class DWFERC(ctypes.c_int):
pass
dwfercNoErc = DWFERC(0) # No error occurred
dwfercUnknownError = DWFERC(1) # API waiting on pending API timed out
dwfercApiLockTimeout = DWFERC(2) # API waiting on pending API timed out
dwfercAlreadyOpened = DWFERC(3) # Device already opened
dwfercNotSupported = DWFERC(4) # Device not supported
dwfercInvalidParameter0 = DWFERC(0x10) # Invalid parameter sent in API call
dwfercInvalidParameter1 = DWFERC(0x11) # Invalid parameter sent in API call
dwfercInvalidParameter2 = DWFERC(0x12) # Invalid parameter sent in API call
dwfercInvalidParameter3 = DWFERC(0x13) # Invalid parameter sent in API call
dwfercInvalidParameter4 = DWFERC(0x14) # Invalid parameter sent in API call
class FUNC(ctypes.c_byte):
pass
funcDC = FUNC(0)
funcSine = FUNC(1)
funcSquare = FUNC(2)
funcTriangle = FUNC(3)
funcRampUp = FUNC(4)
funcRampDown = FUNC(5)
funcNoise = FUNC(6)
funcCustom = FUNC(30)
funcPlay = FUNC(31)
class ANALOGIO(ctypes.c_byte):
pass
analogioEnable = ANALOGIO(1)
analogioVoltage = ANALOGIO(2)
analogioCurrent = ANALOGIO(3)
analogioPower = ANALOGIO(4)
analogioTemperature = ANALOGIO(5)
class AnalogOutNode(ctypes.c_int):
pass
AnalogOutNodeCarrier = AnalogOutNode(0)
AnalogOutNodeFM = AnalogOutNode(1)
AnalogOutNodeAM = AnalogOutNode(2)
class DwfDigitalInClockSource(ctypes.c_int):
pass
DwfDigitalInClockSourceInternal = DwfDigitalInClockSource(0)
DwfDigitalInClockSourceExternal = DwfDigitalInClockSource(1)
class DwfDigitalInSampleMode(ctypes.c_int):
pass
DwfDigitalInSampleModeSimple = DwfDigitalInSampleMode(0)
DwfDigitalInSampleModeNoise = DwfDigitalInSampleMode(1)
class DwfDigitalOutOutput(ctypes.c_int):
pass
DwfDigitalOutOutputPushPull = DwfDigitalOutOutput(0)
DwfDigitalOutOutputOpenDrain = DwfDigitalOutOutput(1)
DwfDigitalOutOutputOpenSource = DwfDigitalOutOutput(2)
DwfDigitalOutOutputThreeState = DwfDigitalOutOutput(3)
class DwfDigitalOutType(ctypes.c_int):
pass
DwfDigitalOutTypePulse = DwfDigitalOutType(0)
DwfDigitalOutTypeCustom = DwfDigitalOutType(1)
DwfDigitalOutTypeRandom = DwfDigitalOutType(2)
class DwfDigitalOutIdle(ctypes.c_int):
pass
DwfDigitalOutIdleInit = DwfDigitalOutIdle(0)
DwfDigitalOutIdleLow = DwfDigitalOutIdle(1)
DwfDigitalOutIdleHigh = DwfDigitalOutIdle(2)
DwfDigitalOutIdleZet = DwfDigitalOutIdle(3)
def IsBitSet(fs, bit):
return ((fs & (1 << bit)) != 0)
# Error and version APIs:
_FDwfGetLastError = _dwf.FDwfGetLastError
_FDwfGetLastError.argtypes = [ctypes.POINTER(DWFERC)]
_FDwfGetLastError.restype = bool
def FDwfGetLastError():
erc = DWFERC()
return (_FDwfGetLastError(ctypes.byref(erc)), erc)
_FDwfGetLastErrorMsg = _dwf.FDwfGetLastErrorMsg
_FDwfGetLastErrorMsg.argtypes = [ctypes.POINTER(ctypes.c_char * 512)]
_FDwfGetLastErrorMsg.restype = bool
def FDwfGetLastErrorMsg():
buf = ctypes.create_string_buffer(512)
return (_FDwfGetLastErrorMsg(ctypes.byref(buf)), buf.value)
_FDwfGetVersion = _dwf.FDwfGetVersion # Returns DLL version, for instance: "2.4.3"
_FDwfGetVersion.argtypes = [ctypes.POINTER(ctypes.c_char * 32)]
_FDwfGetVersion.restype = bool
def FDwfGetVersion():
buf = ctypes.create_string_buffer(32)
return (_FDwfGetVersion(ctypes.byref(buf)), buf.value)
# DEVICE MANAGMENT FUNCTIONS
# Enumeration:
_FDwfEnum = _dwf.FDwfEnum
_FDwfEnum.argtypes = [ENUMFILTER, _types.c_int_p]
_FDwfEnum.restype = bool
def FDwfEnum(enumFilter):
tmp = ctypes.c_int()
return (_FDwfEnum(enumFilter, ctypes.byref(tmp)), tmp.value)
_FDwfEnumDeviceType = _dwf.FDwfEnumDeviceType
_FDwfEnumDeviceType.argtypes = [ctypes.c_int, ctypes.POINTER(DEVID), ctypes.POINTER(DEVVER)]
_FDwfEnumDeviceType.restype = bool
def FDwfEnumDeviceType(idxDevice):
devid = DEVID()
devver = DEVVER()
return (_FDwfEnumDeviceType(idxDevice, ctypes.byref(devid), ctypes.byref(devver)), devid, devver)
_FDwfEnumDeviceIsOpened = _dwf.FDwfEnumDeviceIsOpened
_FDwfEnumDeviceIsOpened.argtypes = [ctypes.c_int, _types.c_byte_p]
_FDwfEnumDeviceIsOpened.restype = bool
def FDwfEnumDeviceIsOpened(idxDevice):
isopen = ctypes.c_byte()
return (_FDwfEnumDeviceIsOpened(idxDevice, ctypes.byref(isopen)), bool(isopen.value))
_FDwfEnumUserName = _dwf.FDwfEnumUserName
_FDwfEnumUserName.argtypes = [ctypes.c_int, ctypes.POINTER(ctypes.c_char * 32)]
_FDwfEnumUserName.restype = bool
def FDwfEnumUserName(idxDevice):
name = ctypes.create_string_buffer(32)
return (_FDwfEnumUserName(idxDevice, ctypes.byref(name)), name.value)
_FDwfEnumDeviceName = _dwf.FDwfEnumDeviceName
_FDwfEnumDeviceName.argtypes = [ctypes.c_int, ctypes.POINTER(ctypes.c_char * 32)]
_FDwfEnumDeviceName.restype = bool
def FDwfEnumDeviceName(idxDevice):
name = ctypes.create_string_buffer(32)
return (_FDwfEnumDeviceName(idxDevice, ctypes.byref(name)), name.value)
_FDwfEnumSN = _dwf.FDwfEnumSN
_FDwfEnumSN.argtypes = [ctypes.c_int, ctypes.POINTER(ctypes.c_char * 32)]
_FDwfEnumSN.restype = bool
def FDwfEnumSN(idxDevice):
sn = ctypes.create_string_buffer(32)
return (_FDwfEnumSN(idxDevice, ctypes.byref(sn)), sn.value)
# Open/Close:
_FDwfDeviceOpen = _dwf.FDwfDeviceOpen
_FDwfDeviceOpen.argtypes = [ctypes.c_int, ctypes.POINTER(HDWF)]
_FDwfDeviceOpen.restype = bool
def FDwfDeviceOpen(idxDevice):
hdwf = HDWF()
return (_FDwfDeviceOpen(idxDevice, ctypes.byref(hdwf)), hdwf)
FDwfDeviceClose = _dwf.FDwfDeviceClose
FDwfDeviceClose.argtypes = [HDWF]
FDwfDeviceClose.restype = bool
FDwfDeviceCloseAll = _dwf.FDwfDeviceCloseAll
FDwfDeviceCloseAll.argtypes = []
FDwfDeviceCloseAll.restype = bool
FDwfDeviceAutoConfigureSet = _dwf.FDwfDeviceAutoConfigureSet
FDwfDeviceAutoConfigureSet.argtypes = [HDWF, ctypes.c_byte]
FDwfDeviceAut
|
dougmiller/theMetaCity
|
migrations/versions/551f78f9d8a5_fixed_the_id_and_reference_for_article_.py
|
Python
|
mit
| 1,176
| 0.002551
|
"""Fixed the id and reference for article self referencing with foreign key
Revision ID: 551f78f9d8a5
Revises: 8e01032c9c5e
Create Date: 2018-11-17 19:13:52.491349
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '551f78f9d8a5'
down_revision = '8e01032c9c5e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('articles', sa.Column('parent_id', sa.Integer(), nullable=True))
op.drop_constraint('articles_parent_fkey', 'articles', type_='foreignkey'
|
)
op.create_foreign_key(None, 'articles', 'articles', ['parent_id'], ['id'])
op.drop_column('articles', 'parent')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('articles', sa.Column('parent', sa.INTEGER(), autoincrement=False, nullable=True))
op.drop_constraint(None, 'articles', type_=
|
'foreignkey')
op.create_foreign_key('articles_parent_fkey', 'articles', 'articles', ['parent'], ['id'])
op.drop_column('articles', 'parent_id')
# ### end Alembic commands ###
|
paralab/Dendro4
|
python_scripts_sc16/csv_mat.py
|
Python
|
gpl-2.0
| 1,046
| 0.048757
|
# @author: Milinda Fernando
# School of Computing, University of Utah.
# generate all the slurm jobs for the sc16 poster, energy measurements,
import argparse
from subprocess import call
import os
if __nam
|
e__ == "__main__":
parser = argparse.ArgumentParser(prog='slurm_pbs')
parser.add_argument('-p','--prefix', help='file prefix that you need to merge')
parser.add_argument('-s','--suffix',help='suffix of the file')
parser.add_argument('-n','--n',help='number of flies that you need to merge')
args=parser.parse_args()
tol_list=['0.000010
|
','0.000100','0.001000','0.010000','0.100000','0.200000','0.300000','0.400000','0.500000']
#sendCommMap_M_tol_0.010000_npes_4096_pts_100000_ps_4096mat.csv
for tol in tol_list:
inFName=args.prefix+tol+args.suffix+'_'+args.n+'mat'+'.csv'
outFName=args.prefix+tol+args.suffix+'_'+args.n+'mat_comma'+'.csv'
fin=open(inFName,'r')
fout=open(outFName,'w')
for line in fin:
line=line.strip()
line=line.replace('\t',',')
fout.write(line+'\n')
fin.close()
fout.close()
print 'OK'
|
tebriel/dd-agent
|
tests/core/test_service_discovery.py
|
Python
|
bsd-3-clause
| 15,146
| 0.004093
|
# stdlib
import copy
import mock
import unittest
# project
from utils.service_discovery.config_stores import get_config_store
from utils.service_discovery.consul_config_store import ConsulStore
from utils.service_discovery.etcd_config_store import EtcdStore
from utils.service_discovery.abstract_config_store import AbstractConfigStore
from utils.service_discovery.sd_backend import get_sd_backend
from utils.service_discovery.sd_docker_backend import SDDockerBackend
def clear_singletons(agentConfig):
get_config_store(agentConfig)._drop()
get_sd_backend(agentConfig)._drop()
class Response(object):
"""Dummy response class for mocking purpose"""
def __init__(self, content):
self.content = content
def json(self):
return self.content
def raise_for_status(self):
pass
def _get_container_inspect(c_id):
"""Return a mocked container inspect dict from self.container_inspects."""
for co, _, _ in TestServiceDiscovery.container_inspects:
if co.get('Id') == c_id:
return co
return None
def _get_conf_tpls(image_name, trace_config=False):
"""Return a mocked configuration template from self.mock_templates."""
return copy.deepcopy(TestServiceDiscovery.mock_templates.get(image_name)[0])
def _get_check_tpls(image_name, **kwargs):
if image_name in TestServiceDiscovery.mock_templates:
return [copy.deepcopy(TestServiceDiscovery.mock_templates.get(image_name)[0][0][0:3])]
elif image_name in TestServiceDiscovery.bad_mock_templates:
try:
return [copy.deepcopy(TestServiceDiscovery.bad_mock_templates.get(image_name))]
except Exception:
return None
def client_read(path):
"""Return a mocked string that would normally be read from a config store (etcd, consul...)."""
parts = path.split('/')
config_parts = ['check_names', 'init_configs', 'instances']
image, config_part = parts[-2], parts[-1]
return TestServiceDiscovery.mock_tpls.get(image)[0][config_parts.index(config_part)]
class TestServiceDiscovery(unittest.TestCase):
docker_container_inspect = {
u'Id': u'69ff25598b2314d1cdb7752cc3a659fb1c1352b32546af4f1454321550e842c0',
u'Image': u'6ffc02088cb870652eca9ccd4c4fb582f75b29af2879792ed09bb46fd1c898ef',
u'Name': u'/nginx',
u'NetworkSettings': {u'IPAddress': u'172.17.0.21', u'Ports': {u'443/tcp': None, u'80/tcp': None}}
}
kubernetes_container_inspect = {
u'Id': u'389dc8a4361f3d6c866e9e9a7b6972b26a31c589c4e2f097375d55656a070bc9',
u'Image': u'de309495e6c7b2071bc60c0b7e4405b0d65e33e3a4b732ad77615d90452dd827',
u'Name': u'/k8s_sentinel.38057ab9_redis-master_default_27b84e1e-a81c-11e5-8347-42010af00002_f70875a1',
u'Config': {u'ExposedPorts': {u'6379/tcp': {}}},
u'NetworkSettings': {u'IPAddress': u'', u'Ports': None}
}
malformed_container_inspect = {
u'Id': u'69ff25598b2314d1cdb7752cc3a659fb1c1352b32546af4f1454321550e842c0',
u'Image': u'6ffc02088cb870652eca9ccd4c4fb582f75b29af2879792ed09bb46fd1c898ef',
u'Name': u'/nginx'
}
container_inspects = [
# (inspect_dict, expected_ip, expected_port)
(docker_container_inspect, '172.17.0.21', ['80', '443']),
(kubernetes_container_inspect, None, ['6379']), # arbitrarily defined in the mocked pod_list
(malformed_container_inspect, None, KeyError)
]
# templates with variables already extracted
mock_templates = {
# image_name: ([(check_name, init_tpl, instance_tpl, variables)], (expected_config_template))
'image_0': (
[('check_0', {}, {'host': '%%host%%'}, ['host'])],
('check_0', {}, {'host': '127.0.0.1'})),
'image_1': (
[('check_1', {}, {'port': '%%port%%'}, ['port'])],
('check_1', {}, {'port': '1337'})),
'image_2': (
[('check_2', {}, {'host': '%%host%%', 'port': '%%port%%'}, ['host', 'port'])],
('check_2', {}, {'host': '127.0.0.1', 'port': '1337'}
|
)),
}
# raw templates coming straight from the config store
mock_tpls = {
# image_name: ('[check_name]', '[init_tpl]', '[instance_tpl]', expected_python_tpl_list)
'image_0': (
('["check_0"]', '[{}]', '[{"host": "%%host%%"}]'),
[('check_0', {}, {"host": "%%host%%"})]),
'image_1':
|
(
('["check_1"]', '[{}]', '[{"port": "%%port%%"}]'),
[('check_1', {}, {"port": "%%port%%"})]),
'image_2': (
('["check_2"]', '[{}]', '[{"host": "%%host%%", "port": "%%port%%"}]'),
[('check_2', {}, {"host": "%%host%%", "port": "%%port%%"})]),
'bad_image_0': ((['invalid template']), []),
'bad_image_1': (('invalid template'), []),
'bad_image_2': (None, [])
}
bad_mock_templates = {
'bad_image_0': ('invalid template'),
'bad_image_1': [('invalid template')],
'bad_image_2': None
}
def setUp(self):
self.etcd_agentConfig = {
'service_discovery': True,
'service_discovery_backend': 'docker',
'sd_template_dir': '/datadog/check_configs',
'sd_config_backend': 'etcd',
'sd_backend_host': '127.0.0.1',
'sd_backend_port': '2380'
}
self.consul_agentConfig = {
'service_discovery': True,
'service_discovery_backend': 'docker',
'sd_template_dir': '/datadog/check_configs',
'sd_config_backend': 'consul',
'sd_backend_host': '127.0.0.1',
'sd_backend_port': '8500'
}
self.auto_conf_agentConfig = {
'service_discovery': True,
'service_discovery_backend': 'docker',
'sd_template_dir': '/datadog/check_configs',
'additional_checksd': '/etc/dd-agent/checks.d/',
}
self.agentConfigs = [self.etcd_agentConfig, self.consul_agentConfig, self.auto_conf_agentConfig]
# sd_backend tests
@mock.patch('utils.http.requests.get')
@mock.patch('utils.kubeutil.check_yaml')
def test_get_host(self, mock_check_yaml, mock_get):
kubernetes_config = {'instances': [{'kubelet_port': 1337}]}
pod_list = {
'items': [{
'status': {
'podIP': '127.0.0.1',
'containerStatuses': [
{'containerID': 'docker://389dc8a4361f3d6c866e9e9a7b6972b26a31c589c4e2f097375d55656a070bc9'}
]
}
}]
}
mock_check_yaml.return_value = kubernetes_config
mock_get.return_value = Response(pod_list)
for c_ins, expected_ip, _ in self.container_inspects:
with mock.patch.object(AbstractConfigStore, '__init__', return_value=None):
with mock.patch('utils.dockerutil.DockerUtil.client', return_value=None):
with mock.patch('utils.kubeutil.get_conf_path', return_value=None):
sd_backend = get_sd_backend(agentConfig=self.auto_conf_agentConfig)
self.assertEqual(sd_backend._get_host(c_ins), expected_ip)
clear_singletons(self.auto_conf_agentConfig)
def test_get_ports(self):
with mock.patch('utils.dockerutil.DockerUtil.client', return_value=None):
for c_ins, _, expected_ports in self.container_inspects:
sd_backend = get_sd_backend(agentConfig=self.auto_conf_agentConfig)
if isinstance(expected_ports, list):
self.assertEqual(sd_backend._get_ports(c_ins), expected_ports)
else:
self.assertRaises(expected_ports, sd_backend._get_ports, c_ins)
clear_singletons(self.auto_conf_agentConfig)
@mock.patch('docker.Client.inspect_container', side_effect=_get_container_inspect)
@mock.patch.object(SDDockerBackend, '_get_config_templates', side_effect=_get_conf_tpls)
def test_get_check_configs(self, mock_inspect_container, mock_get_conf_tpls):
"""Test get_check_config with mocked container inspect and config t
|
PersianWikipedia/pywikibot-core
|
tests/tk_tests.py
|
Python
|
mit
| 1,761
| 0
|
# -*- coding: utf-8 -*-
"""Tests for the Tk UI."""
#
# (C) Pywikibot team, 2008-2019
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, division, unicode_literals
import os
import pywikibot
from pywikibot.tools import PY2
from tests.aspects import unittest, TestCase, DefaultSiteTestCase
if os.environ.get('PYWIKIBOT_TEST_GUI', '0') == '1':
if not PY2:
import tkinter
else:
import Tkinter as tkinter # noqa: N813
from pywikibot.userinterfaces.gui import EditBoxWindow, Tkdialog
class TestTkdialog(TestCase):
"""Test Tkdialog."""
net = True
def testTkdialog(self):
"""Test Tk dialog."""
try:
box = Tkdialog('foo', 'tests/data/MP_sounds.png', 'MP_sounds.png')
|
box.show_dialog()
except ImportError as e:
pywikibot.warning(e)
class TestTkinter(DefaultSiteTestCase):
"""Test Tkinter."""
net = True
def testTkinter(self):
"""Test Tkinter window."""
root = tkinter.Tk()
root.resizable(width=tkinter.FALSE, height=tkinter.FALSE)
root.title('pywikibot GUI')
page = pywikibot.Page(pywikibot.Site(), 'Main Page')
content = page.get()
myapp = EditBoxWindow(root)
|
myapp.bind('<Control-d>', myapp.debug)
v = myapp.edit(content, highlight=page.title())
assert v is None
def setUpModule(): # noqa: N802
"""Skip Travis tests if PYWIKIBOT_TEST_GUI variable is not set."""
if os.environ.get('PYWIKIBOT_TEST_GUI', '0') != '1':
raise unittest.SkipTest('Tkinter tests are disabled on Travis-CI')
if __name__ == '__main__': # pragma: no cover
try:
unittest.main()
except SystemExit:
pass
|
andykhov/got-my-pi-on-you
|
src/mail.py
|
Python
|
mit
| 1,141
| 0.008764
|
import os
import smtplib
from email.mime.text import MIMEText
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
class Email:
emailCount = 0;
def __init__(self, address, password):
self.address = address
self.password = password
email.emailCount += 1
def initSMTP(self, emailserver, port):
self.smtpconnection = smtplib.SMTP(emailserver, port) #returns an SMTP object
self.smtpconnection.ehlo() #says "hello" to smtp server
self.smtpconnection.starttls() #enable TLS encryption
self.smtpconnection.login(self.address, self.password)
def sendEmail(self, recipient, subject, message, imgPath):
msg = MIMEMultipart()
msg["Subject"]
|
= subject
msg["From"] = self.address
msg["To"] = recipient
msg.attach(MIMEText(message))
imgfp = open(imgPath, "rb")
img = MIMEImage(imgfp.read())
imgfp.close()
msg.attach(img)
self.smtpconnect
|
ion.sendmail(self.address, recipient, msg.as_string())
def closeSMTP(self):
self.smtpconnection.close()
|
CARocha/estudiocafe
|
encuesta/migrations/0003_auto__chg_field_encuesta_altitud.py
|
Python
|
mit
| 14,972
| 0.006946
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Encuesta.altitud'
db.alter_column(u'encuesta_encuesta', 'altitud', self.gf('django.db.models.fields.IntegerField')(null=True))
def backwards(self, orm):
# Changing field 'Encuesta.altitud'
db.alter_column(u'encuesta_encuesta', 'altitud', self.gf('django.db.models.fields.CharField')(max_length=50, null=True))
models = {
u'encuesta.aguafinca': {
'Meta': {'object_name': 'AguaFinca'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'encuesta.beneficios': {
'Meta': {'object_name': 'Beneficios'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'encuesta.combustible': {
'Meta': {'object_name': 'Combustible'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'encuesta.composicion': {
'Meta': {'object_name': 'Composicion'},
'adultas': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'adultos': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'educacion_dueno': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'educacion_maxima_hombre': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'educacion_maxima_mujeres': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'encuesta': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['encuesta.Encuesta']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jovenes_mujeres': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'jovenes_varones': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'ninas': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'ninos': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'permanente_hombres': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'permanente_mujeres': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'relacion_finca_vivienda': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['encuesta.ViveFamilia']", 'null': 'True', 'blank': 'True'}),
'tecnico_hombres': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'tecnico_mujeres': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'temporales_hombres': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'temporales_mujeres': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'encuesta.creditoe': {
'Meta': {'object_name': 'CreditoE'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'encuesta.dequien': {
'Meta': {'object_name': 'DeQuien'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'encuesta.duenofinca': {
'Meta': {'object_name': 'DuenoFinca'},
'fecha_nacimiento': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'encuesta.encuesta': {
'Meta': {'object_name': 'Encuesta'},
'altitud': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'beneficiarios': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['encuesta.Organizacion']", 'null': 'True', 'blank': 'True'}),
'cedula': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'comunidad': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lugar.Comunidad']"}),
'departamento': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lugar.Departamento']"}),
'dueno': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['encuesta.DuenoFinca']"}),
'fecha': ('django.db.models.fields.DateField', [], {}),
'finca': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'municipio': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lugar.Municipio']"}),
'nombre': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['encuesta.Entrevistado']"}),
'pais': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lugar.Pais']"}),
'position': ('geoposition.fields.GeopositionField', [], {'max_length': '42', 'null': 'True', 'blank': 'True'}),
'recolector': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['encuesta.Recolector']"}),
'sexo': ('django.db.models.fields.IntegerField', [], {})
},
u'encuesta.energiafinca': {
'Meta': {'object_name': 'EnergiaFinca'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'encuesta.entrevistado': {
'Meta': {'object_name': 'Entrevistado'},
|
'fecha_nacimiento': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'encuesta.meses': {
'Meta': {'object_name': 'Meses'},
u'id': ('django.db.models.fields.AutoField', [],
|
{'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'encuesta.necesidadalimento': {
'Meta': {'object_name': 'NecesidadAlimento'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'encuesta.organizacion': {
'Meta': {'object_name': 'Organizacion'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'encuesta.quienfinancia': {
'Meta': {'object_name': 'QuienFinancia'},
'beneficio_ser_socio': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'beneficiario_socio'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['encuesta.Beneficios']"}),
'de_quien': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'quien'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['encuesta.DeQuien']"}),
'desde': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank'
|
eunchong/build
|
scripts/slave/generate_profile_shim.py
|
Python
|
bsd-3-clause
| 1,231
| 0.007311
|
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A simple trampoline to generate_profile.py in the src/ directory.
generate_profile.py generates a synthetic user profile.
"""
import optparse
import os
import sys
from slave import build_directory
from common import chromium_utils
def main():
parser = optparse.OptionParser()
parser.add_option('--bui
|
ld-dir', help='ignored')
parser.add_option('--target', help='Release or Debug')
parser.add_option('--profile-type-to-generate')
options, args = parser.parse_args()
output_dir = os.path.join(build_directory.GetBuildOutputDirectory(),
options.target,
'generated_profile')
cmd = [
sys.executable,
os.path.join('src', 'tools', 'perf', 'generate_pro
|
file'),
'-v',
'--browser=' + options.target.lower(),
'--profile-type-to-generate=' + options.profile_type_to_generate,
'--output-dir=' + output_dir,
'--output-format=buildbot',
] + args
return chromium_utils.RunCommand(cmd)
if '__main__' == __name__:
sys.exit(main())
|
pengzhangdev/slackbot
|
slackbot/plugins/component/database/BookshelfDatabase.py
|
Python
|
mit
| 2,538
| 0.005516
|
#! /usr/bin/env python
#
# whoosh_test.py ---
#
# Filename: whoosh_test.py
# Description:
# Author: Werther Zhang
# Maintainer:
# Created: Mon Oct 23 19:29:49 2017 (+0800)
#
# Change Log:
#
#
import os
from whoosh.index import create_in
from whoosh.index import exists_in
from whoosh.index import open_dir
from whoosh.fields import *
schema = Schema(title=TEXT(stored=True), path=ID(stored=True), author=TEXT, content=TEXT) # stored=True will show the result in results[0]
ix = create_in("indexdir", schema)
writer = ix.writer()
writer.add_document(title=u"First document", path=u"/a", author=u"Werther", content=u"This is the first document we've added!")
writer.add_document(title=u"Second document", path=u"/b", content=u"The second one is even more interesting!")
writer.commit()
from whoosh.qparser import QueryParser
with ix.searcher() as searcher:
query = QueryParser("author", ix.schema).parse("werther")
results = searcher.search(query)
prin
|
t results[0]
from whoosh.index import create_in
from whoosh.fields import *
from whoosh.qparser import QueryParser
class BookshelfDatabase(object):
"""BookshelfDatabase API"""
_DATABASE_DIR = '/mnt/mmc/database/bookshelf'
def __init__(self):
|
ix = None
# title (filename or title in db)
# path (relative path in /mnt/mmc/mi)
# author (author of the file)
# content (basename of file ; title; author)
# fileid (hash of path)
# date (file update time in string)
#
# when index, check whether file updated,
# using date AND fileid to get the item , if it not exists, update fileid with new info
# when search, using content defaultly and merge result in path , show title and path to user
schema = Schema(title=TEXT(stored=True), path=ID(stored=True), author=TEXT, content=TEXT, fileid=TEXT(unique=True), date=TEXT)
if not os.path.exists(BookshelfDatabase._DATABASE_DIR):
os.mkdir(BookshelfDatabase._DATABASE_DIR)
if not exists_in(BookshelfDatabase._DATABASE_DIR):
ix = create_in(BookshelfDatabase._DATABASE_DIR, schema)
else:
ix = open_dir(BookshelfDatabase._DATABASE_DIR)
def add(self, title, path, content, fileid, date, author=None):
pass
def update(self, title, path, content, fileid, date, author=None):
pass
# check fileid AND date exists
def exists(self, fileid, date):
pass
def search(self, content=None, author=None):
pass
|
iemejia/incubator-beam
|
sdks/python/apache_beam/io/gcp/gcsio.py
|
Python
|
apache-2.0
| 23,315
| 0.005833
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Google Cloud Storage client.
This library evolved from the Google App Engine GCS client available at
https://github.com/GoogleCloudPlatform/appengine-gcs-client.
"""
# pytype: skip-file
from __future__ import absolute_import
import errno
import io
import logging
import multiprocessing
import re
import sys
import threading
import time
import traceback
from builtins import object
from apache_beam.internal.http_client import get_new_http
from apache_beam.io.filesystemio import Downloader
from apache_beam.io.filesystemio import DownloaderStream
from apache_beam.io.filesystemio import PipeStream
from apache_beam.io.filesystemio import Uploader
from apache_beam.io.filesystemio import UploaderStream
from apache_beam.utils import retry
__all__ = ['GcsIO']
_LOGGER = logging.getLogger(__name__)
# Issue a friendlier error message if the storage library is not available.
# TODO(silviuc): Remove this guard when storage is available everywhere.
try:
# pylint: disable=wrong-import-order, wrong-import-position
# pylint: disable=ungrouped-imports
import apitools.base.py.transfer as transfer
from apitools.base.py.batch import BatchApiRequest
from apitools.base.py.exceptions import HttpError
from apache_beam.internal.gcp import auth
from apache_beam.io.gcp.internal.clients import storage
except ImportError:
raise ImportError(
'Google Cloud Storage I/O not supported for this execution environment '
'(could not import storage API client).')
# This is the size of each partial-file read operation from GCS. This
# parameter was chosen to give good throughput while keeping memory usage at
# a reasonable level; the following table shows throughput reached when
# reading files of a given size with a chosen buffer size and informed the
# choice of the value, as of 11/2016:
#
# +---------------+------------+-------------+-------------+-------------+
# | | 50 MB file | 100 MB file | 200 MB file | 400 MB file |
# +---------------+------------+-------------+-------------+-------------+
# | 8 MB buffer | 17.12 MB/s | 22.67 MB/s | 23.81 MB/s | 26.05 MB/s |
# | 16 MB buffer | 24.21 MB/s | 42.70 MB/s | 42.89 MB/s | 46.92 MB/s |
# | 32 MB buffer | 28.53 MB/s | 48.08 MB/s | 54.30 MB/s | 54.65 MB/s |
# | 400 MB buffer | 34.72 MB/s | 71.13 MB/s | 79.13 MB/s | 85.39 MB/s |
# +---------------+------------+-------------+-------------+-------------+
DEFAULT_READ_BUFFER_SIZE = 16 * 1024 * 1024
# This is the number of seconds the library will wait for a partial-file read
# operation from GCS to complete before retrying.
DEFAULT_READ_SEGMENT_TIMEOUT_SECONDS = 60
# This is the size of chunks used when writing to GCS.
WRITE_CHUNK_SIZE = 8 * 1024 * 1024
# Maximum number of operations permitted in GcsIO.copy_batch() and
# GcsIO.delete_batch().
MAX_BATCH_OPERATION_SIZE = 100
# Batch endpoint URL for GCS.
# We have to specify an API specific endpoint here since Google APIs global
# batch endpoints will be deprecated on 03/25/2019.
# See https://developers.googleblog.com/2018/03/discontinuing-support-for-json-rpc-and.html. # pylint: disable=line-too-long
# Currently apitools library uses a global batch endpoint by default:
# https://github.com/google/apitools/blob/master/apitools/base/py/batch.py#L152
# TODO: remove this constant and it's usage after apitools move to using an API
# specific batch endpoint or after Beam gcsio module start using a GCS client
# library that does not use global batch endpoints.
GCS_BATCH_ENDPOINT = 'https://www.googleapis.com/batch/storage/v1'
def parse_gcs_path(gcs_path, object_optional=False):
"""Return the bucket and object names of the given gs:// path."""
match = re.match('^gs://([^/]+)/(.*)$', gcs_path)
if match is None or (match.group(2) == '' and not object_optional):
raise ValueError('GCS path must be in the form gs://<bucket>/<object>.')
return match.group(1), match.group(2)
class GcsIOError(IOError, retry.PermanentException):
"""GCS IO error that should not be retried."""
pass
class GcsIO(object):
"""Google Cloud Storage I/O client."""
def __init__(self, storage_client=None):
if storage_client is None:
storage_client = storage.StorageV1(
credentials=auth.get_service_credentials(),
get_credentials=False,
http=get_new_http(),
response_encoding=None if sys.version_info[0] < 3 else 'utf8')
self.client = storage_client
self._rewrite_cb = None
def _set_rewrite_response_callback(self, callback):
"""For testing purposes only. No backward compatibility guarantees.
Args:
callback: A function that receives ``storage.RewriteResponse``.
"""
self._rewrite_cb = callback
def open(
self,
filename,
mode='r',
read_buffer_size=DEFAULT_READ_BUFFER_SIZE,
mime_type='application/octet-stream'):
"""Open a GCS file path for reading or writing.
Args:
filename (str): GCS file path in the form ``gs://<bucket>/<object>``.
mode (str): ``'r'`` for reading or ``'w'`` for writing.
read_buffer_size (int): Buffer size to use during read operations.
mi
|
me_type (str): Mime type to set for write operations.
Returns:
GCS file object.
Raises:
ValueError: Invalid open file mode.
"""
if mode == 'r' or
|
mode == 'rb':
downloader = GcsDownloader(
self.client, filename, buffer_size=read_buffer_size)
return io.BufferedReader(
DownloaderStream(
downloader, read_buffer_size=read_buffer_size, mode=mode),
buffer_size=read_buffer_size)
elif mode == 'w' or mode == 'wb':
uploader = GcsUploader(self.client, filename, mime_type)
return io.BufferedWriter(
UploaderStream(uploader, mode=mode), buffer_size=128 * 1024)
else:
raise ValueError('Invalid file open mode: %s.' % mode)
@retry.with_exponential_backoff(
retry_filter=retry.retry_on_server_errors_and_timeout_filter)
def delete(self, path):
"""Deletes the object at the given GCS path.
Args:
path: GCS file path pattern in the form gs://<bucket>/<name>.
"""
bucket, object_path = parse_gcs_path(path)
request = storage.StorageObjectsDeleteRequest(
bucket=bucket, object=object_path)
try:
self.client.objects.Delete(request)
except HttpError as http_error:
if http_error.status_code == 404:
# Return success when the file doesn't exist anymore for idempotency.
return
raise
# We intentionally do not decorate this method with a retry, as retrying is
# handled in BatchApiRequest.Execute().
def delete_batch(self, paths):
"""Deletes the objects at the given GCS paths.
Args:
paths: List of GCS file path patterns in the form gs://<bucket>/<name>,
not to exceed MAX_BATCH_OPERATION_SIZE in length.
Returns: List of tuples of (path, exception) in the same order as the paths
argument, where exception is None if the operation succeeded or
the relevant exception if the operation failed.
"""
if not paths:
return []
batch_request = BatchApiRequest(
batch_url=GCS_BATCH_ENDPOINT,
retryable_codes=retry.SERVER_ERROR_OR_TIMEOUT_CODES,
response_encoding='utf-8')
for path in paths:
bucket, object_path = parse_gcs_path(path)
request
|
phobson/bokeh
|
tests/test_bokehjs.py
|
Python
|
bsd-3-clause
| 669
| 0.00299
|
from __future_
|
_ import print_function
import os
import pytest
from os.path import join
import sys
import unittest
import subprocess
if sys.platform == "win32":
GULP = "gulp.cmd"
else:
GULP = "gulp"
@pytest.mark.js
class TestBokehJS(unittest.TestCase):
def test_bokehjs(self):
os.chdir('bokehjs')
proc = subprocess.Popen([join('node_modules', '.bin', GULP), "test"],
stdout=subprocess.PIPE)
out, errs = proc.co
|
mmunicate()
msg = out.decode('utf-8', errors='ignore')
print(msg)
if proc.returncode != 0:
assert False
if __name__ == "__main__":
unittest.main()
|
MichaelReiter/ProgrammingPractice
|
yield.py
|
Python
|
mit
| 142
| 0.035211
|
d
|
ef yield_function(n):
for i in range(n):
print "pre", i
yield i
print "post", i
for x in yield_function(10):
print x
|
print
|
cuescience/cuescience-shop
|
shop/tests/models/_NatSpecTemplate.py
|
Python
|
mit
| 293
| 0.003413
|
""" @Import
|
s """
from cuescience_shop.tests.support.support import ClientTestSupport
from django.test.testcases import TestCase
class _NatSpecTemplate(TestCase):
def setUp(self):
self.client_test_support = ClientTestSupport(self)
def test(self):
|
""" @MethodBody """
|
YiqunPeng/Leetcode-pyq
|
solutions/117PopulatingNextRightPointersInEachNodeII.py
|
Python
|
gpl-3.0
| 1,205
| 0.005809
|
# Definition for binary tree with next pointer.
# class TreeLinkNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
# self.next = None
class Solution:
# @param root, a tree link node
# @return nothing
def connect(self, root):
def find_nex
|
t(parent, child):
parent = parent.next
while parent:
if parent.left:
child.next = parent.left
return
elif parent.right:
child.next = parent.right
return
else:
parent = parent.next
if not root: return
q = [root]
while q:
nxt = []
|
for node in q:
if node.left:
if node.right:
node.left.next = node.right
else:
find_next(node, node.left)
nxt.append(node.left)
if node.right:
find_next(node, node.right)
nxt.append(node.right)
q = nxt
|
johnwlockwood/karl_data
|
karld/io.py
|
Python
|
apache-2.0
| 641
| 0
|
from karld.loadump import dump_dicts_to_json_file
from karld.loadump import ensure_dir
from karld.loadump import ensure_file_path_dir
from karld.loadump import i_get_csv_data
from karld.loadump import is_
|
file_csv
from karld.loadump import i_get_json_data
from karld.loadump import is_file_json
from karld.loadump import raw_line_reader
from karld.loadump import split_csv_file
from karld.loadump import split_file
from karld.loadump import split_file_output
from karld.loadump import split_file_output_csv
from karld.loadump import split_file_output_json
from karld.loadump import write_as_csv
f
|
rom karld.loadump import write_as_json
|
huangkuan/hack
|
lib/gcloud/logging/test_sink.py
|
Python
|
apache-2.0
| 13,154
| 0.000076
|
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class Test__sink_name_from_path(unittest2.TestCase):
def _callFUT(self, path, project):
from gcloud.logging.sink import _sink_name_from_path
return _sink_name_from_path(path, project)
def test_invalid_path_length(self):
PATH = 'projects/foo'
PROJECT = None
self.assertRaises(ValueError, self._callFUT, PATH, PROJECT)
def test_invalid_path_format(self):
SINK_NAME = 'SINK_NAME'
PROJECT = 'PROJECT'
PATH = 'foo/%s/bar/%s' % (PROJECT, SINK_NAME)
self.assertRaises(ValueError, self._callFUT, PATH, PROJECT)
def test_invalid_project(self):
SINK_NAME = 'SINK_NAME'
PROJECT1 = 'PROJECT1'
PROJECT2 = 'PROJECT2'
PATH = 'projects/%s/sinks/%s' % (PROJECT1, SINK_NAME)
self.assertRaises(ValueError, self._callFUT, PATH, PROJECT2)
def test_valid_data(self):
SINK_NAME = 'SINK_NAME'
PROJECT = 'PROJECT'
PATH = 'projects/%s/sinks/%s' % (PROJECT, SINK_NAME)
sink_name = self._callFUT(PATH, PROJECT)
self.assertEqual(sink_name, SINK_NAME)
class TestSink(unittest2.TestCase):
PROJECT = 'test-project'
SINK_NAME = 'sink-name'
FILTER = 'logName:syslog AND severity>=INFO'
DESTINATION_URI = 'faux.googleapis.com/destination'
def _getTargetClass(self):
from gcloud.logging.sink import Sink
return Sink
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor(self):
FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME)
conn = _Connection()
client = _Client(self.PROJECT, conn)
sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI,
client=client)
self.assertEqual(sink.name, self.SINK_NAME)
self.assertEqual(sink.filter_, self.FILTER)
self.assertEqual(sink.destination, self.DESTINATION_URI)
self.assertTrue(sink.client is client)
self.assertEqual(sink.project, self.PROJECT)
self.assertEqual(sink.full_name, FULL)
self.assertEqual(sink.path, '/%s' % (FULL,))
def test_from_api_repr_minimal(self):
CLIENT = _Client(project=self.PROJECT)
FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME)
RESOURCE = {
'name': FULL,
'filter': self.FILTER,
'destination': self.DESTINATION_URI,
}
klass = self._getTargetClass()
sink = klass.from_api_repr(RESOURCE, client=CLIENT)
self.assertEqual(sink.name, self.SINK_NAME)
self.assertEqual(sink.filter_, self.FILTER)
self.assertEqual(sink.destination, self.DESTINATION_URI)
self.assertTrue(sink._client is CLIENT)
self.assertEqual(sink.project, self.PROJECT)
self.assertEqual(sink.full_name, FULL)
def test_from_api_repr_w_description(self):
CLIENT = _Client(project=self.PROJECT)
FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME)
RESOURCE = {
'name': FULL,
'filter': self.FILTER,
'destination': self.DESTINATION_URI,
}
klass = self._getTargetClass()
sink = klass.from_api_repr(RESOURCE, client=CLIENT)
self.assertEqual(sink.name, self.SINK_NAME)
self.assertEqual(sink.filter_, self.FILTER)
self.assertEqual(sink.destination, self.DESTINATION_URI)
self.assertTrue(sink._client is CLIENT)
self.assertEqual(sink.project, self.PROJECT)
self.assertEqual(sink.full_name, FULL)
def test_from_api_repr_with_mismatched_project(self):
PROJECT1 = 'PROJECT1'
PROJECT2 = 'PROJECT2'
CLIENT = _Client(project=PROJECT1)
FULL = 'projects/%s/sinks/%s' % (PROJECT2, self.SINK_NAME)
RESOURCE = {
'name': FULL,
'filter': self.FILTER,
'destination': self.DESTINATION_URI,
}
klass = self._getTargetClass()
self.assertRaises(ValueError, klass.from_api_repr,
RESOURCE, client=CLIENT)
def test_create_w_bound_client(self):
TARGET = 'projects/%s/sinks' % (self.PROJECT,)
RESOURCE = {
'name': self.SINK_NAME,
'filter': self.FILTER,
'destination': self.DESTINATION_URI,
}
conn = _Connection(RESOURCE)
client = _Client(project=self.PROJECT, connection=conn)
sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI,
client=client)
sink.create()
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'POST')
self.assertEqual(req['path'], '/%s' % TARGET)
self.assertEqual(req['data'], RESOURCE)
def test_create_w_alternate_client(self):
TARGET = 'projects/%s/sinks' % (self.PROJECT,)
RESOURCE = {
'name': self.SINK_NAME,
'filter': self.FILTER,
'destination': self.DESTINATION_URI,
}
conn1 = _Connection()
client1 = _Client(project=self.PROJECT, connection=conn1)
conn2 = _Connection(RESOURCE)
client2 = _Client(project=self.PROJECT, connection=conn2)
sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI,
client=client1)
sink.create(client=client2)
self.assertEqual(len(conn1._requested), 0)
self.assertEqual(len(conn2._requested), 1)
|
req = conn2._requested[0]
self.assertEqual(req['method'], 'POST')
self.assertEqual(req['path'], '/%s' % TARGET)
|
self.assertEqual(req['data'], RESOURCE)
def test_exists_miss_w_bound_client(self):
FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME)
conn = _Connection()
CLIENT = _Client(project=self.PROJECT, connection=conn)
sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI,
client=CLIENT)
self.assertFalse(sink.exists())
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % FULL)
def test_exists_hit_w_alternate_client(self):
FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME)
conn1 = _Connection()
CLIENT1 = _Client(project=self.PROJECT, connection=conn1)
conn2 = _Connection({'name': FULL})
CLIENT2 = _Client(project=self.PROJECT, connection=conn2)
sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI,
client=CLIENT1)
self.assertTrue(sink.exists(client=CLIENT2))
self.assertEqual(len(conn1._requested), 0)
self.assertEqual(len(conn2._requested), 1)
req = conn2._requested[0]
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % FULL)
def test_reload_w_bound_client(self):
FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME)
NEW_FILTER = 'logName:syslog AND severity>=INFO'
NEW_DESTINATION_URI = 'faux.googleapis.com/other'
RESOURCE = {
'name': self.SINK_NAME,
'filter': NEW_FILTER,
'destination': NEW_DESTINATION_URI,
}
conn = _Connection(RESOURCE)
CLIENT = _Client(project=self.PROJECT, connection=conn)
sink = self._makeOne(self.SINK_NAME, s
|
atmark-techno/atmark-dist
|
user/python/Lib/test/test_unicodedata.py
|
Python
|
gpl-2.0
| 3,846
| 0.00468
|
""" Test script for the unicodedata module.
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""#"
import sha
encoding = 'utf-8'
def test_methods():
h = sha.sha()
for i in range(65536):
char = unichr(i)
data = [
# Predicates (single char)
char.isalnum() and u'1' or u'0',
char.isalpha() and u'1' or u'0',
char.isdecimal() and u'1' or u'0',
char.isdigit() and u'1' or u'0',
char.islower() and u'1' or u'0',
char.isnumeric() and u'1' or u'0',
char.isspace() and u'1' or u'0',
char.istitle() and u'1' or u'0',
char.isupper() and u'1' or u'0',
# Predicates (multiple chars)
(char + u'abc').isalnum() and u'1' or u'0',
(char + u'abc').isalpha() and u'1' or u'0',
(char + u'123').isdecimal() and u'1' or u'0',
(char + u'123').isdigit() and u'1' or u'0',
(char + u'abc').islower() and u'1' or u'0',
(char + u'123').isnumeric() and u'1' or u'0',
(char + u' \t').isspace() and u'1' or u'0',
(char + u'abc').istitle() and u'1' or u'0',
(char + u'ABC').isupper() and u'1' or u'0',
# Mappings (single char)
char.lower(),
char.upper(),
char.title(),
# Mappings (multiple chars)
(char + u'abc').lower(),
(char + u'ABC').upper(),
(char + u'abc').title(),
(char + u'ABC').title(),
]
h.update(u''.join(data).encode(encoding))
return h.hexdigest()
def test_unicodedata():
h = sha.sha()
for i in range(65536):
char = unichr(i)
data = [
# Properties
str(unicodedata.digit(char, -1)),
str(unicodedata.numeric(char, -1)),
str(unicodedata.decimal(char, -1)),
unicodedata.category(char),
|
unicodedata.bidirectional(char),
u
|
nicodedata.decomposition(char),
str(unicodedata.mirrored(char)),
str(unicodedata.combining(char)),
]
h.update(''.join(data))
return h.hexdigest()
### Run tests
print 'Testing Unicode Database...'
print 'Methods:',
print test_methods()
# In case unicodedata is not available, this will raise an ImportError,
# but still test the above cases...
import unicodedata
print 'Functions:',
print test_unicodedata()
# Some additional checks of the API:
print 'API:',
assert unicodedata.digit(u'A',None) is None
assert unicodedata.digit(u'9') == 9
assert unicodedata.digit(u'\u215b',None) is None
assert unicodedata.digit(u'\u2468') == 9
assert unicodedata.numeric(u'A',None) is None
assert unicodedata.numeric(u'9') == 9
assert unicodedata.numeric(u'\u215b') == 0.125
assert unicodedata.numeric(u'\u2468') == 9.0
assert unicodedata.decimal(u'A',None) is None
assert unicodedata.decimal(u'9') == 9
assert unicodedata.decimal(u'\u215b',None) is None
assert unicodedata.decimal(u'\u2468',None) is None
assert unicodedata.category(u'\uFFFE') == 'Cn'
assert unicodedata.category(u'a') == 'Ll'
assert unicodedata.category(u'A') == 'Lu'
assert unicodedata.bidirectional(u'\uFFFE') == ''
assert unicodedata.bidirectional(u' ') == 'WS'
assert unicodedata.bidirectional(u'A') == 'L'
assert unicodedata.decomposition(u'\uFFFE') == ''
assert unicodedata.decomposition(u'\u00bc') == '<fraction> 0031 2044 0034'
assert unicodedata.mirrored(u'\uFFFE') == 0
assert unicodedata.mirrored(u'a') == 0
assert unicodedata.mirrored(u'\u2201') == 1
assert unicodedata.combining(u'\uFFFE') == 0
assert unicodedata.combining(u'a') == 0
assert unicodedata.combining(u'\u20e1') == 230
print 'ok'
|
simplegeo/trialcoverage
|
setup.py
|
Python
|
gpl-2.0
| 2,609
| 0.005366
|
#!/usr/bin/env python
# trialcoverage -- plugin to integrate Twisted trial with Ned Batchelder's coverage.py
#
# Author: Brian Warner
# Packaged by: Zooko Wilcox-O'Hearn
# Thanks to: Jonathan Lange
#
# See README.txt for licensing information.
import os, re, sys
try:
from ez_setup import use_setuptools
except ImportError:
pass
else:
use_setuptools(download_delay=0)
from setuptools import find_packages, setup
trove_classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: GNU General Public License (GPL)",
"License :: DFSG approved",
"License :: OSI Approved :: BSD License",
"License :: Other/Proprietary License",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Natural Language :: English",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.4",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Topic :: Software Development :: Libraries",
]
PKG='trialcoverage'
VERSIONFILE = os.path.join(PKG, "_version.py")
verstr = "unknown"
try:
verstrline = open(VERSIONFILE, "rt").read()
except EnvironmentError:
pass # Okay, there is no version file.
else:
VSRE = r"^verstr = ['\"]([^'\"]*)['\"]"
mo = re.search(VSRE, verstrline, re.M)
if mo:
verstr = mo.group(1)
else:
print "unable to find version in %s" % (VERSIONFILE,)
raise RuntimeError("if %s.py exists, it must be well-formed" % (VERSIONFILE,))
setup_requires = []
data_fnames=[ 'COPYING.SPL.txt', 'COPYING.GPL', 'COPYING.TGPPL.html
|
', 'README.txt' ]
# In case we are building for a .deb with stdeb's sdist_dsc command, we put the
# docs in "share/doc/python-$PKG".
doc_loc = "share/doc/" + PKG
data_files = [(doc_loc, data_fnames)]
setup(name=PKG,
version=verstr,
description="a plugin to integrate Twisted trial with Ned Batchelder's coverage.py",
author='Brian Warner',
author_email='zooko@zooko.com',
url='http://tahoe-lafs.org/trac/' + PKG,
|
license='BSD', # see README.txt for details -- there are also alternative licences
packages=find_packages() + ['twisted'],
include_package_data=True,
setup_requires=setup_requires,
classifiers=trove_classifiers,
zip_safe=False, # I prefer unzipped for easier access.
install_requires=['coverage>=3.4a1', 'pyutil>=1.6.0', 'setuptools'],
tests_require=['mock', 'setuptools_trial >= 0.5'],
data_files=data_files,
test_suite='trialcoverage.test',
)
|
ORMAPtools/MapProduction
|
Config File Templates/ORMAP_LayersConfig.py
|
Python
|
gpl-3.0
| 5,573
| 0.024224
|
# ---------------------------------------------------------------------------
# OrmapLayersConfig.py
# Created by: Shad Campbell
# Date: 3/11/2011
# Updated by:
# Description: This is a configuration file to be customized by each county.
# Do not delete any of the items in this file. If they are not in use then
# specify thier value and/or definition query to "".
# ---------------------------------------------------------------------------
LOTSANNO_LAYER="LotsAnno"
LOTSANNO_QD="\"MapNumber\" = '*MapNumber*'OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
PLATSANNO_LAYER="PlatsAnno"
PLATSANNO_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
TAXCODEANNO_LAYER="TaxCodeAnno"
TAXCODEANNO_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
TAXNUMANNO_LAYER="TaxlotNumberAnno"
TAXNUMANNO_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ACRESANNO_LAYER="TaxlotAcresAnno"
ACRESANNO_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\"
|
= ''"
ANNO10_LAYER="Anno0010scale"
ANNO10_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ANNO20_LAYER="Anno0020scale"
ANNO20_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ANNO30_LAYER="Anno0030scale"
ANNO30_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ANNO40_LAYER="Anno0040scale"
ANNO40_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
|
ANNO50_LAYER="Anno0050scale"
ANNO50_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ANNO60_LAYER="Anno0060scale"
ANNO60_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ANNO100_LAYER="Anno0100scale"
ANNO100_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ANNO200_LAYER="Anno0200scale"
ANNO200_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ANNO400_LAYER="Anno0400scale"
ANNO400_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ANNO800_LAYER="Anno0800scale"
ANNO800_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ANNO2000_LAYER="Anno2000scale"
ANNO2000_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
CORNER_ABOVE_LAYER="Corner"
CORNER_ABOVE_QD="\"MapNumber\"='*MapNumber*'"
TAXCODELINES_ABOVE_LAYER="TaxCodeLines - Above"
TAXCODELINES_ABOVE_QD=""
TAXLOTLINES_ABOVE_LAYER="TaxlotLines - Above"
TAXLOTLINES_ABOVE_QD="\"LineType\" <> 32"
REFLINES_ABOVE_LAYER="ReferenceLines - Above"
REFLINES_ABOVE_QD="\"MAPNUMBER\" = '*MapNumber*'"
CARTOLINES_ABOVE_LAYER="CartographicLines - Above"
CARTOLINES_ABOVE_QD=""
WATERLINES_ABOVE_LAYER="WaterLines - Above"
WATERLINES_ABOVE_QD=""
WATER_ABOVE_LAYER="Water - Above"
WATER_ABOVE_QD=""
MAPINDEXSEEMAP_LAYER=""
MAPINDEXSEEMAP_QD=""
MAPINDEX_LAYER="SeeMaps"
MAPINDEX_QD="\"IndexMap\" = '*MapNumber*'"
CORNER_BELOW_LAYER="Corner - Below"
CORNER_BELOW_QD=""
TAXCODELINES_BELOW_LAYER="TaxCodeLines - Below"
TAXCODELINES_BELOW_QD=""
TAXLOTLINES_BELOW_LAYER="TaxlotLines - Below"
TAXLOTLINES_BELOW_QD=""
REFLINES_BELOW_LAYER="ReferenceLines - Below"
REFLINES_BELOW_QD=""
CARTOLINES_BELOW_LAYER="CartographicLines - Below"
CARTOLINES_BELOW_QD=""
WATERLINES_BELOW_LAYER="WaterLines - Below"
WATERLINES_BELOW_QD=""
WATER_BELOW_LAYER="Water - Below"
WATER_BELOW_QD=""
PAGELAYOUT_TABLE="giscarto.CREATOR_ASR.PAGELAYOUTELEMENTS"
CANCELLEDNUMBERS_TABLE="giscarto.CREATOR_ASR.CANCELLEDNUMBERS"
CUSTOMDEFINITIONQUERIES_TABLE="CustomDefinitionQueries"
EXTRA1_LAYER="Arrow0010scale"
EXTRA1_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA2_LAYER="Arrow0020scale"
EXTRA2_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA3_LAYER="Arrow0030scale"
EXTRA3_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA4_LAYER="Arrow0040scale"
EXTRA4_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA5_LAYER="Arrow0050scale"
EXTRA5_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA6_LAYER="Arrow0100scale"
EXTRA6_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA7_LAYER="Arrow0200scale"
EXTRA7_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA8_LAYER="Arrow0400scale"
EXTRA8_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA9_LAYER="Arrow2000scale"
EXTRA9_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA10_LAYER="MapSecLines - Below"
EXTRA10_QD="\"MapNumber\"='*MapNumber*'"
EXTRA11_LAYER="Railroad"
EXTRA11_QD="CL <> 'Y'"
EXTRA12_LAYER="MapArea"
EXTRA12_QD="\"MapNumber\"='*MapNumber*'"
EXTRA13_LAYER=""
EXTRA13_QD=""
EXTRA14_LAYER="Taxlots - Above"
EXTRA14_QD="\"MapNumber\"='*MapNumber*'"
EXTRA15_LAYER="Arrow0060scale"
EXTRA15_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA16_LAYER="Landmarks"
EXTRA16_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA17_LAYER=""
EXTRA17_QD=""
EXTRA18_LAYER=""
EXTRA18_QD=""
EXTRA19_LAYER=""
EXTRA19_QD=""
EXTRA20_LAYER=""
EXTRA20_QD=""
|
r4mp/evolver_server
|
evolver_server/app.py
|
Python
|
agpl-3.0
| 486
| 0.012346
|
import asynci
|
o
from server import Server
#def main(arguments):
def main():
loop = asyncio.get_event_loop()
server = Server()
asyncio.async(server.run_server())
try:
loop.run_forever()
except KeyboardInterrupt:
print('Received interrupt, closing')
|
server.close()
finally:
loop.stop()
loop.close()
if __name__ == '__main__':
#arguments = docopt(__doc__, version='evolver_server 0.1')
#main(arguments)
main()
|
saurabh6790/test-med-app
|
patches/october_2013/p05_delete_gl_entries_for_cancelled_vouchers.py
|
Python
|
agpl-3.0
| 683
| 0.02489
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
def execute():
import webnotes
entries = webnotes.conn.sql("""select voucher_type, voucher_no
from `tabGL Entry` group by voucher_type, voucher_no""", as_dict=1)
for entry in entries:
|
try:
cancelled_voucher = webnotes.conn.sql("""select name from `tab%s` where name = %s
and docstatus=2""" % (entry['voucher_type'], "%s"), entry['voucher_no'])
if cancelled_voucher:
webnotes.conn.sql("""delete from
|
`tabGL Entry` where voucher_type = %s and
voucher_no = %s""", (entry['voucher_type'], entry['voucher_no']))
except:
pass
|
Kleptobismol/scikit-bio
|
skbio/tree/_exception.py
|
Python
|
bsd-3-clause
| 814
| 0
|
from __future__ import absolute_import, division, print_function
# --------------------
|
--------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
class TreeError(Exception):
"""General tree error"""
pass
class NoLengthError(TreeEr
|
ror):
"""Missing length when expected"""
pass
class DuplicateNodeError(TreeError):
"""Duplicate nodes with identical names"""
pass
class MissingNodeError(TreeError):
"""Expecting a node"""
pass
class NoParentError(MissingNodeError):
"""Missing a parent"""
pass
|
vladan-m/ggrc-core
|
src/ggrc/utils.py
|
Python
|
apache-2.0
| 3,658
| 0.013395
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: david@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
import datetime
import json
import sys
import time
from flask import current_app
class DateTimeEncoder(json.JSONEncoder):
"""Custom JSON Encoder to handle datetime objects
from:
`http://stackoverflow.com/questions/12122007/python-json-encoder-to-support-datetime`_
also consider:
`http://hg.tryton.org/2.4/trytond/file/ade5432ac476/trytond/protocols/jsonrpc.py#l53`_
"""
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
elif isinstance(obj, datetime.date):
return obj.isoformat()
elif isinstance(obj, datetime.timedelta):
return (datetime.datetime.min + obj).time().isoformat()
else:
return super(DateTimeEncoder, self).default(obj)
class UnicodeSafeJsonWrapper(dict):
"""JSON received via POST has keys as unicode. This makes get work with plain
`str` keys.
"""
def __getitem__(self, key):
ret = self.get(key)
if ret is None:
raise KeyError(key)
return ret
def get(self, key, default=None):
return super(UnicodeSafeJsonWrapper, self).get(unicode(key), default) # noqa
def as_json(obj, **kwargs):
return json.dumps(obj, cls=DateTimeEncoder, **kwargs)
def service_for(obj):
module = sys.modules['ggrc.services']
if type(obj) is str or type(obj) is unicode: # noqa
model_type = obj
else:
model_type = obj.__class__.__name__
return getattr(module, model_type, None)
def url_for(obj, id=None):
service = service_for(obj)
if service is None:
return None
if id is not None:
return service.url_for(id=id)
return service.url_for(obj)
def view_service_for(obj):
module = sys.modules['ggrc.views']
if type(obj) is str or type(obj) is unicode: # noqa
model_type = obj
else:
model_type = obj.__class__.__name__
return getattr(module, model_type, None)
def view_url_for(obj, id=None):
service = view_service_for(obj)
if service is None:
return None
if id is not None:
return service.url_for(id=id)
return service.url_for(obj)
def encoded_dict(in_dict):
# http://stackoverflow.com/questions/6480723/urllib-urlencode-doesnt-like-unicode-values-how-about-this-workaround
out_dict = {}
for k, v in in_dict.iteritems():
if isinstance(v, unicode): # noqa
v = v.encode('utf8')
elif isinstance(v, str):
# Must be encoded in UTF-8
|
v.decode('utf8')
out_dict[k] = v
return out_dict
def merge_dict(destination, source, path=None):
"""merges source into destination"""
if path is None:
path = []
for key in source:
if key in destina
|
tion:
if isinstance(destination[key], dict) and isinstance(source[key], dict):
merge_dict(destination[key], source[key], path + [str(key)])
elif destination[key] == source[key]:
pass # same leaf value
else:
raise Exception('Conflict at %s' % '.'.join(path + [str(key)]))
else:
destination[key] = source[key]
return destination
def merge_dicts(*args):
result = {}
for arg in args:
result = merge_dict(result, arg)
return result
class BenchmarkContextManager(object):
def __init__(self, message):
self.message = message
def __enter__(self):
self.start = time.time()
def __exit__(self, exc_type, exc_value, exc_trace):
end = time.time()
current_app.logger.info("{:.4f} {}".format(end - self.start, self.message))
benchmark = BenchmarkContextManager
|
ccordoba12/codenode
|
codenode/frontend/backend/rpc.py
|
Python
|
bsd-3-clause
| 499
| 0.008016
|
import xmlrpclib
def listEngineTypes(address):
client = xmlrpclib.ServerProxy(address + '/admin/')
engine_types = client.listEngineTypes()
return engine_types
def allocateEngine(addr
|
ess, engine_type):
client = xmlrpclib.ServerProxy(str(address) + '/admin/')
access_id = client.allocateEngine(str(engine_type))
return access_id
def interruptInstance(address, instance_id):
client = xmlrpc
|
lib.ServerProxy(address + '/admin/')
client.interruptInstance(instance_id)
|
soscpd/bee
|
root/tests/zguide/examples/Python/pathosub.py
|
Python
|
mit
| 636
| 0.004717
|
#
# Pathological subs
|
criber
# Subscribes to one random topic and prints received messages
#
import sys
import time
from random import randint
import zmq
def main(url=None):
ctx = zmq.Context.instance()
subscriber = ctx.socket(zmq.SUB)
if url is None:
url = "tcp://localhost:5556"
subscriber.connect(url)
subscription = b"%03d" % randint(0,999)
subscriber.setsockopt(zmq.SUBSCRIBE, subscription)
while True:
topic, data = subscriber.recv_multipart()
assert
|
topic == subscription
print data
if __name__ == '__main__':
main(sys.argv[1] if len(sys.argv) > 1 else None)
|
darkspring2015/PyDatcomLab
|
PyDatcomLab/Core/datcomEventManager.py
|
Python
|
mit
| 2,614
| 0.013364
|
# -*- coding: utf-8 -*-
# 正处于设计开发阶段
from PyQt5 import QtCore
class datcomEvent(QtCore.QEvent):
"""
自定义的Datcom事件系统
"""
dtTypeID = QtCore.QEvent.registerEventType()
def __init__(self, type = dtTypeID):
"""
构造函数
"""
super(datcomEvent, self).__init__(type)
self.eventLabel = '' #规则的名
|
称[ 'NMACHLinkTable' ,'RuleNumToCount','RuleIndexToCombo',
self.controlVariables = {} #存储引起变换的变量的名称和值,某些规则可能是多触发的,因此使用dict类型 {'FLTCON/NMACH':'1'}
class datcomEventWarpper(QtCore.QObject):
"""
datcomModel中使用的,作为注册中心使用
"""
def __init__(self):
"""
注册中心的模型
receiver | 注册者的实例 |event的接收者
eventLabel | 事件标签 |需要监控的事件类别
controlVariable | 事件的参数和参数值 |控制变量和值
"""
super
|
(datcomEventWarpper, self).__init__()
#注册中心
#单个模板 {‘receiver':None,'eventLable':'','controlVariables’:[]}
self.registerCenter = []
def registerObject(self, receiver, eventLabel, controlVariables):
"""
向注册中心注册一个事件接收
@param receiver reference to the widget to receive the event
@type QObject
@param eventLabel 事件标签
@type str
@param controlVariables 触发事件的变量
@type str
注意事项:应当在对象销毁的地方显式调用反注册函数
"""
if controlVariables is None or type(controlVariables) != list or\
eventLabel is None or eventLabel =='' or \
receiver is None:
self.logger.warning("调用注册函数的参数无效!")
return
#开始注册过程
def isRegistered(self, iDict):
"""
检查对象iDict是否在仓库中已经注册了,包含当前注册返回True,没有返回False
"""
def simluateSendEvent(self, eventLabel, eventStr):
"""
"""
if eventLabel in self.doc:
for iR in self.doc[eventLabel]['Receiver']:
tEvent = datcomEvent()
tEvent.Url = eventLabel
tEvent.Value = {eventLabel:eventStr}
tApp = QtCore.QCoreApplication.instance()
tApp.notify(iR, tEvent)
|
dc3-plaso/plaso
|
tests/parsers/winreg_plugins/lfu.py
|
Python
|
apache-2.0
| 7,326
| 0.000956
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the Less Frequently Used (LFU) Windows Registry plugin."""
import unittest
from dfdatetime import filetime as dfdatetime_filetime
from dfwinreg import definitions as dfwinreg_definitions
from dfwinreg import fake as dfwinreg_fake
from plaso.formatters import winreg # pylint: disable=unused-import
from plaso.lib import timelib
from plaso.parsers.winreg_plugins import lfu
from tests.parsers.winreg_plugins import test_lib
class TestBootExecutePlugin(test_lib.RegistryPluginTestCase):
"""Tests for the LFU BootExecute Windows Registry plugin."""
def _CreateTestKey(self, key_path, time_string):
"""Creates Registry keys and values for testing.
Args:
key_path: the Windows Registry key path.
time_string: string containing the key last written date and time.
Returns:
A Windows Registry key (instance of dfwinreg.WinRegistryKey).
"""
filetime = dfdatetime_filetime.Filetime()
filetime.CopyFromString(time_string)
registry_key = dfwinreg_fake.FakeWinRegistryKey(
u'Session Manager', key_path=key_path,
last_written_time=filetime.timestamp, offset=153)
value_data = u'autocheck autochk *\x00'.encode(u'utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
u'BootExecute', data=value_data,
data_type=dfwinreg_definitions.REG_MULTI_SZ, offs
|
et=123)
registry_key.AddValue(registry_value)
value_data = u'2592000'.encode(u'utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
u'CriticalSectionTimeout', data=value_data,
data_type=dfwinreg_definitions.
|
REG_SZ, offset=153)
registry_key.AddValue(registry_value)
value_data = u'\x00'.encode(u'utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
u'ExcludeFromKnownDlls', data=value_data,
data_type=dfwinreg_definitions.REG_MULTI_SZ, offset=163)
registry_key.AddValue(registry_value)
value_data = u'0'.encode(u'utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
u'GlobalFlag', data=value_data, data_type=dfwinreg_definitions.REG_SZ,
offset=173)
registry_key.AddValue(registry_value)
value_data = u'0'.encode(u'utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
u'HeapDeCommitFreeBlockThreshold', data=value_data,
data_type=dfwinreg_definitions.REG_SZ, offset=183)
registry_key.AddValue(registry_value)
value_data = u'0'.encode(u'utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
u'HeapDeCommitTotalFreeThreshold', data=value_data,
data_type=dfwinreg_definitions.REG_SZ, offset=203)
registry_key.AddValue(registry_value)
value_data = u'0'.encode(u'utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
u'HeapSegmentCommit', data=value_data,
data_type=dfwinreg_definitions.REG_SZ, offset=213)
registry_key.AddValue(registry_value)
value_data = u'0'.encode(u'utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
u'HeapSegmentReserve', data=value_data,
data_type=dfwinreg_definitions.REG_SZ, offset=223)
registry_key.AddValue(registry_value)
value_data = u'2'.encode(u'utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
u'NumberOfInitialSessions', data=value_data,
data_type=dfwinreg_definitions.REG_SZ, offset=243)
registry_key.AddValue(registry_value)
return registry_key
def testProcess(self):
"""Tests the Process function."""
key_path = (
u'HKEY_LOCAL_MACHINE\\System\\ControlSet001\\Control\\Session Manager')
time_string = u'2012-08-31 20:45:29'
registry_key = self._CreateTestKey(key_path, time_string)
plugin_object = lfu.BootExecutePlugin()
storage_writer = self._ParseKeyWithPlugin(registry_key, plugin_object)
self.assertEqual(len(storage_writer.events), 2)
event_object = storage_writer.events[0]
# This should just be the plugin name, as we're invoking it directly,
# and not through the parser.
self.assertEqual(event_object.parser, plugin_object.plugin_name)
expected_timestamp = timelib.Timestamp.CopyFromString(time_string)
self.assertEqual(event_object.timestamp, expected_timestamp)
expected_message = (
u'[{0:s}] BootExecute: autocheck autochk *').format(key_path)
expected_short_message = u'{0:s}...'.format(expected_message[0:77])
self._TestGetMessageStrings(
event_object, expected_message, expected_short_message)
event_object = storage_writer.events[1]
expected_message = (
u'[{0:s}] '
u'CriticalSectionTimeout: 2592000 '
u'ExcludeFromKnownDlls: [] '
u'GlobalFlag: 0 '
u'HeapDeCommitFreeBlockThreshold: 0 '
u'HeapDeCommitTotalFreeThreshold: 0 '
u'HeapSegmentCommit: 0 '
u'HeapSegmentReserve: 0 '
u'NumberOfInitialSessions: 2').format(key_path)
expected_short_message = u'{0:s}...'.format(expected_message[0:77])
self._TestGetMessageStrings(
event_object, expected_message, expected_short_message)
class TestBootVerificationRegistry(test_lib.RegistryPluginTestCase):
"""Tests for the LFU BootVerification Windows Registry plugin."""
def _CreateTestKey(self, key_path, time_string):
"""Creates Registry keys and values for testing.
Args:
key_path: the Windows Registry key path.
time_string: string containing the key last written date and time.
Returns:
A Windows Registry key (instance of dfwinreg.WinRegistryKey).
"""
filetime = dfdatetime_filetime.Filetime()
filetime.CopyFromString(time_string)
registry_key = dfwinreg_fake.FakeWinRegistryKey(
u'BootVerificationProgram', key_path=key_path,
last_written_time=filetime.timestamp, offset=153)
value_data = u'C:\\WINDOWS\\system32\\googleupdater.exe'.encode(
u'utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
u'ImagePath', data=value_data, data_type=dfwinreg_definitions.REG_SZ,
offset=123)
registry_key.AddValue(registry_value)
return registry_key
def testProcess(self):
"""Tests the Process function."""
key_path = u'\\ControlSet001\\Control\\BootVerificationProgram'
time_string = u'2012-08-31 20:45:29'
registry_key = self._CreateTestKey(key_path, time_string)
plugin_object = lfu.BootVerificationPlugin()
storage_writer = self._ParseKeyWithPlugin(registry_key, plugin_object)
self.assertEqual(len(storage_writer.events), 1)
event_object = storage_writer.events[0]
# This should just be the plugin name, as we're invoking it directly,
# and not through the parser.
self.assertEqual(event_object.parser, plugin_object.plugin_name)
expected_timestamp = timelib.Timestamp.CopyFromString(time_string)
self.assertEqual(event_object.timestamp, expected_timestamp)
expected_message = (
u'[{0:s}] '
u'ImagePath: C:\\WINDOWS\\system32\\googleupdater.exe').format(
key_path)
expected_short_message = u'{0:s}...'.format(expected_message[0:77])
self._TestGetMessageStrings(
event_object, expected_message, expected_short_message)
if __name__ == '__main__':
unittest.main()
|
WQuanfeng/django-summernote
|
django_summernote/__init__.py
|
Python
|
mit
| 258
| 0
|
ver
|
sion_info = (0, 6, 16)
__version__ = version = '.'.join(map(str, version_info))
__project__ = PROJECT = 'django-summernote'
|
__author__ = AUTHOR = "Park Hyunwoo <ez.amiryo@gmail.com>"
default_app_config = 'django_summernote.apps.DjangoSummernoteConfig'
|
neozhangthe1/scraper
|
douban/photo/photo/misc/middlewares.py
|
Python
|
gpl-2.0
| 587
| 0.005111
|
#encoding: utf-8
from random import choice
from .helper import gen_bids
class CustomCookieMiddleware(object):
def __init__(self):
self.bids = gen_bids()
def process_r
|
equest(self, request, sp
|
ider):
request.headers["Cookie"] = 'bid="%s"' % choice(self.bids)
class CustomUserAgentMiddleware(object):
def process_request(self, request, spider):
ug = "Baiduspider"
request.headers["User-Agent"] = ug
class CustomHeadersMiddleware(object):
def process_request(self, request, spider):
request.headers["Accept-Language"] = "zh-CN,zh"
|
sebrandon1/neutron
|
neutron/agent/l3/dvr_snat_ns.py
|
Python
|
apache-2.0
| 1,810
| 0
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib import constants
from oslo_log import log as logging
from neutron.agent.l3 import namespaces
from neutron.agent.linux import ip_lib
LOG = logging.getLogger(__name__)
SNAT_NS_PREFIX = 'snat-'
SNAT_INT_DEV_PREFIX = constants.SNAT_INT_DEV_PREFIX
class SnatNamespace(namespaces.Namespace):
def __init__(self, router_id, agent_conf, driver, use_ipv6):
self.router_id = router_id
name = self.get_snat_ns_name(router_id)
super(SnatNamespace, self).__init__(
name, agent_conf, driver, use_ipv6)
@classmethod
def get_snat_ns_name(cls, router_id):
return namespaces.build_ns_name(SNAT_NS_PREFIX, router_id)
|
@namespaces.check_ns_existence
def delete(self):
ns_ip = ip_lib.IPWrapper(namespace=self.name)
for d in ns_ip.get_devices(exclud
|
e_loopback=True):
if d.name.startswith(SNAT_INT_DEV_PREFIX):
LOG.debug('Unplugging DVR device %s', d.name)
self.driver.unplug(d.name, namespace=self.name,
prefix=SNAT_INT_DEV_PREFIX)
# TODO(mrsmith): delete ext-gw-port
LOG.debug('DVR: destroy snat ns: %s', self.name)
super(SnatNamespace, self).delete()
|
Balannen/LSMASOMM
|
atom3/Kernel/GraphGrammar/GraphGrammar.py
|
Python
|
gpl-3.0
| 1,680
| 0.021429
|
# _ GraphGrammar.py __________________________________________________
# This class implements a graph gr
|
ammar, that is basically an ordered
# collecttion of GGrule's
# ____________________________________________________________________
from GGrule import *
class GraphGrammar:
def __init__(self, GGrules = None):
"Constructor, it receives GGrules, that is a list of GGrule elements"
self.GGrules = [] # We'll insert rules by order of execution
self.rewritingSystem = None #
|
No rewriting system assigned yet
while len(self.GGrules) < len(GGrules): # iterate until each rule is inserted
min = 30000 # set mininum number to a very high number
minRule = None # pointer to rule to be inserted
for rule in GGrules: # search for the minimum execution order that is not inserted
if rule.executionOrder < min and not rule in self.GGrules:
min = rule.executionOrder
minRule = rule
self.GGrules.append(minRule)
def setGraphRewritingSystem(self, rs):
"Sets the attribute rewritingSystem to rs and also calls the same method for each rule"
self.rewritingSystem = rs
for rule in self.GGrules:
rule.setGraphGrammar(self)
rule.setGraphRewritingSystem(rs)
def initialAction(self, graph): # , atom3i = None):
"action to be performed before the graph grammar starts its execution (must be overriden)"
pass
def finalAction(self, graph): #, atom3i = None):
"action to be performed after the graph grammar starts its execution (must be overriden)"
pass
|
PW-Sat2/PWSat2OBC
|
integration_tests/emulator/beacon_parser/resistance_sensors.py
|
Python
|
agpl-3.0
| 4,701
| 0.003829
|
RES = 1
TEMP = 0
OUT_OF_RANGE = -999
# Ni1000 relationship between resistances and temperatures [(temp0,resistance0), (temp1,resistance1), (tempN,resistanceN)]
ni1000_5000ppm_values = [(-80, 672.0), (-75, 692.0), (-70, 712.0), (-60, 751.8), (-50, 790.9), (-40, 830.8),
(-30, 871.7), (-20, 913.5), (-10, 956.2), (0, 1000.0),
(10, 1044.8), (20, 1090.7), (30, 1137.6), (40, 1185.7), (50, 1235.0), (60, 1285.4),
(70, 1337.1), (80, 1390.1), (90, 1444.4),
(100, 1500.0), (110, 1557.0), (120, 1615.4), (130, 1675.2), (140, 1736.5), (150, 1799.3),
(160, 1863.6), (170, 1929.5),
(180, 1997.0), (190, 2066.1), (200, 2137.0), (210, 2209.5), (220, 2283.7), (230, 2359.8),
(240, 2437.6), (250, 2517.3)]
ni1000_6180ppm_values = [(-70, 647.8), (-60, 695.2), (-50, 742.6), (-40, 791.3), (-30, 841.5), (-20, 893), (-10, 945.8),
(0, 1000.0),
(10, 1055.5), (20, 1112.4), (30, 1170.6), (40, 1230.1), (50, 1291.1), (60, 1353.4),
(70, 1417.2), (80, 1482.5), (90, 1549.3),
(100, 1617.8), (110, 1687.9), (120, 1759.7), (130, 1833.3), (140, 1908.9), (150, 1986.3),
(160, 2065.9), (170, 2147.6),
(180, 2231.5), (190, 2317.8), (200, 2406.6), (210, 2498), (220, 2592), (230, 2688.9),
(240, 2788.7), (250, 2891.6)]
pt1000_values = [(-70, 723.35), (-60, 763.28), (-50, 803.06), (-40, 842.71), (-30, 882.22), (-20, 921.6), (-10, 960.86),
(0, 1000),
(10, 1039), (20, 1077.9), (30, 1116.7), (40, 1155.4), (50, 1194), (60, 1232.4), (70, 1270.8),
(80, 1309), (90, 1347.1),
(100, 1385.1), (110, 1422.9), (120, 1460.7), (130, 1498.3), (140, 1535.8), (150, 1573.9),
(160, 1610.5), (170, 1447.7),
(180, 1684.8), (190, 1721.7), (200, 1758.6), (210, 1795.3), (220, 1831.9), (230, 1868.4),
(240, 1904.7), (250, 1941)]
# Public functions
def ni1000_5000ppm_res_to_temp(ni1000_resistance):
"""
This function converts an Ni1000 5000ppm sensor resistance to temperature
Parameters:
===========
ni1000_resistance: Ni1000 5000ppm resistance in Ohms
Return:
===========
Ni1000 5000ppm resistance converted to temperature
"""
return res_to_temp(ni1000_5000ppm_values, ni1000_resistance)
def pt1000_res_to_temp(pt1000_resistance):
"""
This function converts an PT1000 sensor resistance to temperature
Parameters:
===========
pt1000_resistance: PT1000 resistance in Ohms
Return:
===========
PT1000 resistance converted to temperature
"""
return
|
res_to_temp(pt1000_values, pt1000_resistance)
# Public functions
def ni1000_6180ppm_res_to_temp(ni1000_resistance):
"""
This function converts an Ni1000 6180ppm sensor resistance to temperature
Parameters:
===========
ni1000_resistance: Ni1000 6180ppm resistance in Ohms
Return:
===========
Ni1000 6180ppm resista
|
nce converted to temperature
"""
return res_to_temp(ni1000_6180ppm_values, ni1000_resistance)
# Private functions
def res_to_temp(values_list, resistance):
"""
This function converts a sensor resistance to temperature
Parameters:
===========
values_list: relationship between resistances and temperatures [(temp0,resistance0), (temp1,resistance1), (tempN,resistanceN)]
resistance: a sensor resistance in Ohms
Return:
===========
Sensor resistance converted to temperature
"""
first_resistance = values_list[0][RES]
last_resistance = values_list[-1][RES]
start_index = 0
end_index = -1
calculated_temp = OUT_OF_RANGE
if (resistance >= first_resistance) and (resistance <= last_resistance):
while values_list[start_index][RES] < resistance:
start_index += 1
while values_list[end_index][RES] > resistance:
end_index -= 1
delta_res = abs(values_list[start_index][RES] - values_list[end_index][RES])
delta_temp = abs(values_list[start_index][TEMP] - values_list[end_index][TEMP])
if delta_temp == 0:
return values_list[start_index][TEMP]
temp_coefficient = delta_res / delta_temp
calculated_temp = ((resistance - values_list[end_index][RES]) / temp_coefficient) + values_list[end_index][TEMP]
return calculated_temp
|
rickhurst/Django-non-rel-blog
|
blogengine/admin.py
|
Python
|
bsd-3-clause
| 95
| 0
|
from blogengine.models import Post
from d
|
jango.contrib import admin
admin.site.register
|
(Post)
|
takeicoin/takeicoin
|
share/qt/clean_mac_info_plist.py
|
Python
|
mit
| 898
| 0.017817
|
#!/usr/bin/env python
# Jonas Schnelli, 2013
# make sure the TakeiCoin-Qt.app contains the right plist (including the right version)
# fix made because of serval bugs in Qt mac dep
|
loyment (https://bugreports.qt-project.org/browse/QTBUG-21267)
from string import Template
from datetime import date
bitcoinDir = "./";
inFile = bitcoinDir+"/share/qt/Info.plist"
outFile = "TakeiCoin-Qt.app/Contents/Info.plist"
v
|
ersion = "unknown";
fileForGrabbingVersion = bitcoinDir+"bitcoin-qt.pro"
for line in open(fileForGrabbingVersion):
lineArr = line.replace(" ", "").split("=");
if lineArr[0].startswith("VERSION"):
version = lineArr[1].replace("\n", "");
fIn = open(inFile, "r")
fileContent = fIn.read()
s = Template(fileContent)
newFileContent = s.substitute(VERSION=version,YEAR=date.today().year)
fOut = open(outFile, "w");
fOut.write(newFileContent);
print "Info.plist fresh created"
|
gsnedders/Template-Python
|
t/string_test.py
|
Python
|
artistic-2.0
| 6,966
| 0.001005
|
from template.test import TestCase, main
class StringTest(TestCase):
def testString(self):
self.Expect(DATA)
DATA = r"""
-- test --
[% USE String -%]
string: [[% String.text %]]
-- expect --
string: []
-- test --
[% USE String 'hello world' -%]
string: [[% String.text %]]
-- expect --
string: [hello world]
-- test --
[% USE String text='hello world' -%]
string: [[% String.text %]]
-- expect --
string: [hello world]
-- test --
[% USE String -%]
string: [[% String %]]
-- expect --
string: []
-- test --
[% USE String 'hello world' -%]
string: [[% String %]]
-- expect --
string: [hello world]
-- test --
[% USE String text='hello world' -%]
string: [[% String %]]
-- expect --
string: [hello world]
-- test --
[% USE String text='hello' -%]
string: [[% String.append(' world') %]]
string: [[% String %]]
-- expect --
string: [hello world]
string: [hello world]
-- test --
[% USE String text='hello' -%]
[% copy = String.copy -%]
string: [[% String %]]
string: [[% copy %]]
-- expect --
string: [hello]
string: [hello]
-- test --
[% USE String -%]
[% hi = String.new('hello') -%]
[% lo = String.new('world') -%]
[% hw = String.new(text="$hi $lo") -%]
hi: [[% hi %]]
lo: [[% lo %]]
hw: [[% hw %]]
-- expect --
hi: [hello]
lo: [world]
hw: [hello world]
-- test --
[% USE hi = String 'hello' -%]
[% lo = hi.new('world') -%]
hi: [[% hi %]]
lo: [[% lo %]]
-- expect --
hi: [hello]
lo: [world]
-- test --
[% USE hi = String 'hello' -%]
[% lo = hi.copy -%]
hi: [[% hi %]]
lo: [[% lo %]]
-- expect --
hi: [hello]
lo: [hello]
-- test --
[% USE hi = String 'hello' -%]
[% lo = hi.copy.append(' world') -%]
hi: [[% hi %]]
lo: [[% lo %]]
-- expect --
hi: [hello]
lo: [hello world]
-- test --
[% USE hi = String 'hello' -%]
[% lo = hi.new('hey').append(' world') -%]
hi: [[% hi %]]
lo: [[% lo %]]
-- expect --
hi: [hello]
lo: [hey world]
-- test --
[% USE hi=String "hello world\n" -%]
hi: [[% hi %]]
[% lo = hi.chomp -%]
hi: [[% hi %]]
lo: [[% lo %]]
-- expect --
hi: [hello world
]
hi: [hello world]
lo: [hello world]
-- test --
[% USE foo=String "foop" -%]
[[% foo.chop %]]
[[% foo.chop %]]
-- expect --
[foo]
[fo]
-- test --
[% USE hi=String "hello" -%]
left: [[% hi.copy.left(11) %]]
right: [[% hi.copy.right(11) %]]
center: [[% hi.copy.center(11) %]]
centre: [[% hi.copy.centre(12) %]]
-- expect --
left: [hello ]
right: [ hello]
center: [ hello ]
centre: [ hello ]
-- test --
[% USE str=String('hello world') -%]
hi: [[% str.upper %]]
hi: [[% str %]]
lo: [[% str.lower %]]
cap: [[% str.capital %]]
-- expect --
hi: [HELLO WORLD]
hi: [HELLO WORLD]
lo: [hello world]
cap: [Hello world]
-- test --
[% USE str=String('hello world') -%]
len: [[% str.length %]]
-- expect --
len: [11]
-- test --
[% USE str=String(" \n\n\t\r hello\nworld\n\r \n \r") -%]
[[% str.trim %]]
-- expect --
[hello
world]
-- test --
[% USE str=String(" \n\n\t\r hello \n \n\r world\n\r \n \r") -%]
[[% str.collapse %]]
-- expect --
[hello world]
-- test --
[% USE str=String("hello") -%]
[[% str.append(' world') %]]
[[% str.prepend('well, ') %]]
-- expect --
[hello world]
[well, hello world]
-- test --
[% USE str=String("hello") -%]
[[% str.push(' world') %]]
[[% str.unshift('well, ') %]]
-- expect --
[hello world]
[well, hello world]
-- test --
[% USE str=String('foo bar') -%]
[[% str.copy.pop(' bar') %]]
[[% str.copy.shift('foo ') %]]
-- expect --
[foo]
[bar]
-- test --
[% USE str=String('Hello World') -%]
[[% str.copy.truncate(5) %]]
[[% str.copy.truncate(8, '...') %]]
[[% str.copy.truncate(20, '...') %]]
-- expect --
[Hello]
[Hello...]
[Hello World]
-- test --
[% USE String('foo') -%]
[[% String.append(' ').repeat(4) %]]
-- expect --
[foo foo foo foo ]
-- test --
[% USE String('foo') -%]
[% String.format("[%s]") %]
-- expect --
[foo]
-- test --
[% USE String('foo bar foo baz') -%]
[[% String.replace('foo', 'oof') %]]
-- expect --
[oof bar oof baz]
-- test --
[% USE String('foo bar foo baz') -%]
[[% String.copy.remove('foo\s*') %]]
[[% String.copy.remove('ba[rz]\s*') %]]
-- expect --
[bar baz]
[foo foo ]
-- test --
[% USE String('foo bar foo baz') -%]
[[% String.split.join(', ') %]]
-- expect --
[foo, bar, foo, baz]
-- test --
[% USE String('foo bar foo baz') -%]
[[% String.split(' bar ').join(', ') %]]
-- expect --
[foo, foo baz]
-- test --
[% USE String('foo bar foo baz') -%]
[[% String.split(' bar ').join(', ') %]]
-- expect --
[foo, foo baz]
-- test --
[% USE String('foo bar foo baz') -%]
[[% String.split('\s+').join(', ') %]]
-- expect --
[foo, bar, foo, baz]
-- test --
[% USE String('foo bar foo baz') -%]
[[% String.split('\s+', 2).join(', ') %]]
-- expect --
[foo, bar foo baz]
-- test --
[% USE String('foo bar foo baz') -%]
[% String.search('foo') ? 'ok' : 'not ok' %]
[% String.search('fooz') ? 'not ok' : 'ok' %]
[% String.search('^foo') ? 'ok' : 'not ok' %]
[% String.search('^bar') ? 'not ok' : 'ok' %]
-- expect --
ok
ok
ok
ok
-- test --
[% USE String 'foo < bar' filter='html' -%]
[% String %]
-- expect --
foo < bar
-- test --
[% USE String 'foo bar' filter='uri' -%]
[% String %]
-- expect --
foo%20bar
-- test --
[% USE String 'foo bar' filters='uri' -%]
[% String %]
-- expect --
foo%20bar
-- test --
[% USE String ' foo bar
|
' filters=['trim' 'uri'] -%]
[[% String %]]
-- expect --
[foo%20bar]
-- test --
[% USE String ' foo bar ' filter='trim, uri' -%]
[[% String %]]
-- expect --
[foo%
|
20bar]
-- test --
[% USE String ' foo bar ' filters='trim, uri' -%]
[[% String %]]
-- expect --
[foo%20bar]
-- test --
[% USE String 'foo bar' filters={ replace=['bar', 'baz'],
trim='', uri='' } -%]
[[% String %]]
-- expect --
[foo%20baz]
-- test --
[% USE String 'foo bar' filters=[ 'replace', ['bar', 'baz'],
'trim', 'uri' ] -%]
[[% String %]]
-- expect --
[foo%20baz]
-- test --
[% USE String 'foo bar' -%]
[% String %]
[% String.filter('uri') %]
[% String.filter('replace', 'bar', 'baz') %]
[% String.output_filter('uri') -%]
[% String %]
[% String.output_filter({ repeat => [3] }) -%]
[% String %]
-- expect --
foo bar
foo%20bar
foo baz
foo%20bar
foo%20barfoo%20barfoo%20bar
-- test --
[% USE String;
a = 'HeLLo';
b = 'hEllO';
a == b ? "not ok 0\n" : "ok 0\n";
String.new(a) == String.new(b) ? "not ok 1\n" : "ok 1\n";
String.new(a).lower == String.new(b).lower ? "ok 2\n" : "not ok 2\n";
String.new(a).lower.equals(String.new(b).lower) ? "ok 3\n" : "not ok 3\n";
a.search("(?i)^$b\$") ? "ok 4\n" : "not ok 4\n";
-%]
-- expect --
ok 0
ok 1
ok 2
ok 3
ok 4
-- test --
[% USE String('Hello World') -%]
a: [% String.substr(6) %]!
b: [% String.substr(0, 5) %]!
c: [% String.substr(0, 5, 'Goodbye') %]!
d: [% String %]!
-- expect --
a: World!
b: Hello!
c: Hello!
d: Goodbye World!
-- test --
[% USE str = String('foo bar baz wiz waz woz') -%]
a: [% str.substr(4, 3) %]
b: [% str.substr(12) %]
c: [% str.substr(0, 11, 'FOO') %]
d: [% str %]
-- expect --
a: bar
b: wiz waz woz
c: foo bar baz
d: FOO wiz waz woz
"""
|
IPIDataLab/PPP_Loader
|
python/data_norm.py
|
Python
|
gpl-2.0
| 12,915
| 0.022145
|
#!/usr/bin/python
import csv
import json
import datetime
import time
from utils import dateToISOString
#############################
#############################
# This file normalizes incoming
# data from the morph.io API
# to conform with the Mongo
# data model.
#############################
#############################
# Normalie data function
def normalize(data, update_date):
# Load in mission object to add mission location data elements
missions_in = open('../python/json/missions.json','rb')
missions = json.load(missions_in)
# Load in country object to add country location data elements
countries_in = open('../python/json/countries.json','rb')
countries = json.load(countries_in)
# Output data array of objects to load into mongo
data_out = []
# Iterators to keep track of what has been entered
dates = {}
country_date = {}
country_date_mission = {}
# Dictionary to convert string type input to data base type conventions
type_dict = {'Individual Police':'ip', 'Experts on Mission':'eom', 'Contingent Troop':'troops', 'Formed Police Units':'fpu'}
# loop through incoming dat
for entry in data:
# Check to see if all mission all country object has been created for that date
if str(entry['date']) not in dates:
# create all mission all country object dont include numeric fields
data_out.append({
'cont_date':dateToISOString(datetime.datetime.strptime(str(entry['date']), '%Y%m%d').date()),
'tcc_country_id': 'all',
'mission': 'all',
'total': 0,
'total_m': 0,
'total_f': 0
})
# Add key (date) value (data_out index number) pair to dates object
dates[str(entry['date'])] = len(data_out)-1
# Check to see if all mission object has been created for that date country
if (entry['tcc'] + '-' + str(entry['date'])) not in country_date:
# Create all mission object for country date combo dont include numeric fields
data_out.append({
'cont_date':dateToISOString(datetime.datetime.strptime(str(entry['date']), '%Y%m%d').date()),
'tcc_country_id': entry['tccIso3Alpha'],
'tcc_country_string': entry['tcc'],
'tcc_au': countries[entry['tccIso3Alpha']]['au'],
'tcc_eu': countries[entry['tccIso3Alpha']]['eu'],
'tcc_ecowas': countries[entry['tccIso3Alpha']]['ecowas'],
'tcc_cis': countries[entry['tccIso3Alpha']]['cis'],
'tcc_gcc': countries[entry['tccIso3Alpha']]['gcc'],
'tcc_g20': countries[entry['tccIso3Alpha']]['g20'],
'tcc_eccas': countries[entry['tccIso3Alpha']]['eccas'],
'tcc_shanghai': countries[entry['tccIso3Alpha']]['shanghai'],
'tcc_nam': countries[entry['tccIso3Alpha']]['nam'],
'tcc_oecd': countries[entry['tccIso3Alpha']]['oecd'],
'tcc_uma': countries[entry['tccIso3Alpha']]['uma'],
'tcc_nato': countries[entry['tccIso3Alpha']]['nato'],
'tcc_igad': countries[entry['tccIso3Alpha']]['igad'],
'tcc_sadc': countries[entry['tccIso3Alpha']]['sadc'],
'tcc_eac': countries[entry['tccIso3Alpha']]['eac'],
'tcc_oic': countries[entry['tccIso3Alpha']]['oic'],
'tcc_g8': countries[entry['tccIso3Alpha']]['g8'],
'tcc_comesa': countries[entry['tccIso3Alpha']]['comesa'],
'tcc_p5g4a3': countries[entry['tccIso3Alpha']]['p5g4a3'],
'tcc_oas': countries[entry['tccIso3Alpha']]['oas'],
'tcc_censad': countries[entry['tccIso3Alpha']]['cen_sad'],
'tcc_asean': countries[entry['tccIso3Alpha']]['asean'],
'tcc_g77': countries[entry['tccIso3Alpha']]['g77'],
'tcc_arabLeague': countries[entry['tccIso3Alpha']]['arab_league'],
'tcc_capital': countries[entry['tccIso3
|
Alpha']]['ca
|
pital'],
'tcc_capital_loc': countries[entry['tccIso3Alpha']]['capital_loc'],
'tcc_continent': countries[entry['tccIso3Alpha']]['continent'],
'tcc_un_region': countries[entry['tccIso3Alpha']]['un_region'],
'tcc_un_bloc': countries[entry['tccIso3Alpha']]['un_bloc'],
'mission': 'all',
'total': 0,
'total_m': 0,
'total_f': 0
})
# Add key (country-date) value (data_out index number) pair to dates object
country_date[(entry['tcc'] + '-' + str(entry['date']))] = len(data_out)-1
if (entry['tcc'] + '-' + str(entry['date']) + '-' + entry['mission']) not in country_date_mission:
# create new country-mission-date object
data_out.append({
'cont_date':dateToISOString(datetime.datetime.strptime(str(entry['date']), '%Y%m%d').date()),
'tcc_country_id': entry['tccIso3Alpha'],
'tcc_country_string': entry['tcc'],
'tcc_au': countries[entry['tccIso3Alpha']]['au'],
'tcc_eu': countries[entry['tccIso3Alpha']]['eu'],
'tcc_ecowas': countries[entry['tccIso3Alpha']]['ecowas'],
'tcc_cis': countries[entry['tccIso3Alpha']]['cis'],
'tcc_gcc': countries[entry['tccIso3Alpha']]['gcc'],
'tcc_g20': countries[entry['tccIso3Alpha']]['g20'],
'tcc_eccas': countries[entry['tccIso3Alpha']]['eccas'],
'tcc_shanghai': countries[entry['tccIso3Alpha']]['shanghai'],
'tcc_nam': countries[entry['tccIso3Alpha']]['nam'],
'tcc_oecd': countries[entry['tccIso3Alpha']]['oecd'],
'tcc_uma': countries[entry['tccIso3Alpha']]['uma'],
'tcc_nato': countries[entry['tccIso3Alpha']]['nato'],
'tcc_igad': countries[entry['tccIso3Alpha']]['igad'],
'tcc_sadc': countries[entry['tccIso3Alpha']]['sadc'],
'tcc_eac': countries[entry['tccIso3Alpha']]['eac'],
'tcc_oic': countries[entry['tccIso3Alpha']]['oic'],
'tcc_g8': countries[entry['tccIso3Alpha']]['g8'],
'tcc_comesa': countries[entry['tccIso3Alpha']]['comesa'],
'tcc_p5g4a3': countries[entry['tccIso3Alpha']]['p5g4a3'],
'tcc_oas': countries[entry['tccIso3Alpha']]['oas'],
'tcc_censad': countries[entry['tccIso3Alpha']]['cen_sad'],
'tcc_asean': countries[entry['tccIso3Alpha']]['asean'],
'tcc_g77': countries[entry['tccIso3Alpha']]['g77'],
'tcc_arabLeague': countries[entry['tccIso3Alpha']]['arab_league'],
'tcc_capital': countries[entry['tccIso3Alpha']]['capital'],
'tcc_capital_loc': countries[entry['tccIso3Alpha']]['capital_loc'],
'tcc_continent': countries[entry['tccIso3Alpha']]['continent'],
'tcc_un_region': countries[entry['tccIso3Alpha']]['un_region'],
'tcc_un_bloc': countries[entry['tccIso3Alpha']]['un_bloc'],
'mission': entry['mission'],
'mission_country_id': missions[entry['mission']]['country_id'],
'mission_country': missions[entry['mission']]['country'],
'mission_hq': missions[entry['mission']]['hq'],
'mission_hq_loc': missions[entry['mission']]['mission_loc'],
'mission_continent': countries[missions[entry['mission']]['country_id']]['continent'],
'mission_un_region': countries[missions[entry['mission']]['country_id']]['un_region'],
'mission_un_bloc': countries[missions[entry['mission']]['country_id']]['un_bloc'],
'mission_au': countries[missions[entry['mission']]['country_id']]['au'],
'mission_eu': countries[missions[entry['mission']]['country_id']]['eu'],
'mission_ecowas': countries[missions[entry['mission']]['country_id']]['ecowas'],
'mission_cis': countries[missions[entry['mission']]['country_id']]['cis'],
'mission_gcc': countries[missions[entry['mission']]['country_id']]['gcc'],
'mission_g20': countries[missions[entry['mission']]['country_id']]['g20'],
'mission_eccas': countries[missions[entry['mission']]['country_id']]['eccas'],
'mission_shanghai': countries[missions[entry['mission']]['country_id']]['shanghai'],
'mission_nam': countries[missions[entry['mission']]['country_id']]['nam'],
'mission_oecd': countries[missions[entry['mission']]['country_id']]['oecd'],
'mission_uma': countries[missions[entry['mission']]['country_id']]['uma'],
'mission_nato': countries[missions[entry['mission']]['country_id']]['nato'],
'mission_igad': countries[missions[entry['mission']]['country_id']]['igad'],
'mission_sadc': countries[missions[entry['mission']]['country_id']]['sadc'],
'mission_eac': countries[missions[entry['mission']]['country_id']]['eac'],
'mission_oic': countries[missions[entry['mission']]['country_id']]['oic'],
'mission_g8': countries[missions[entry['mission']]['country_id']]['g8'],
'mission_comesa': countries[missions[entry['mission']]['country_id']]['comesa'],
'm
|
Strangemother/python-state-machine
|
scratch/machine_2/core/conditions.py
|
Python
|
mit
| 3,066
| 0.005545
|
'''
A condition
'''
from base import Base
from compares import const
class ComparisonMixin(object):
'''
Compare two values with a comparison utility
to denote if a change has validated.
'''
def compare(self, a, b, ctype=None):
'''
compare 'a' against 'b' for a comparison of `c
|
type`
by defauly ctype will compare for an exact match
'''
if ctype is None:
ctype = const.EXACT
# internal importer for core.compares.simple.
Comp = self.get_comparison_class(ctype)
# new class of
comp = Comp(self)
# perform comparison
return com
|
p.match(a,b)
def get_comparison_class(self, compare):
'''
Return the compare class by string
'''
m = __import__('core.compares.simple', fromlist=[compare])
# print 'module', m
# print 'compare', compare
k = getattr(m, compare)
return k
class Condition(Base, ComparisonMixin):
'''
A condition perpetuates changes of an object base upon
rules applied at configuration.
'''
def __init__(self, node, attr, value=None, valid=None):
'''
A condition requires
a node (Node|String|iterable),
the attribute to monitor (String),
a value to validate condition.
Optionally `valid` callback when the condition is met
'''
self.watch = node
self.field = attr
self.target = value
self._valid_cb = valid
def valid(self):
'''
Is this condition valid
'''
vs = self._validate()
for node in vs:
val = vs[node]
if val == False: return False
return True
def get_nodes(self):
'''
return a list of Nodes retrieved from the machine using the
`watch` attr. Each item in the `watch` iterable will be
parsed into a Node type.
'''
if isinstance(self.watch, (tuple, list,) ) is not True:
# create iterable
return [self.watch]
# is iterable
return self.watch
def _validate(self, nodes=None, field=None, ctype=None):
'''
validate the condition against the assigned node.
Returns boolean
Provide nodes as a node, a list of nodes or a string for
network aquisition.
ctype defines the comapre utility to use for validation
'''
nodes = nodes or self.get_nodes()
# attr of the node to inspect
field = field or self.field
# the value to target.
value = self.target
if len(nodes) == 0:
return (False, 'no machine node %s' % self.watch)
r = {};
# print 'nodes', nodes
for node in nodes:
# current value
v = node.get(field)
# print 'node:', v, 'cache', cv, 'ctype', ctype
c = self.compare(v, value, ctype)
r.update({ node: c })
# import pdb;pdb.set_trace()
return r
|
cpantel/gravityFalls
|
Coursera Crypto 1/StatisticalTests.py
|
Python
|
gpl-2.0
| 217
| 0.013825
|
#!/usr/bin/python
import sys
class StatisticalTest(object):
def __init__(self):
|
pass
maxRunOfLength(x) =< 10 . log2(n)
| count0(x) - count1(
|
x) | <= 10.sqrt(n)
| count00(x) - n/4 | <= 10.sqrt(n)
|
keremgocen/demo-gui-python
|
py3env/bin/viewer.py
|
Python
|
apache-2.0
| 1,064
| 0.00094
|
#!/Users/kerem/github-stuff/demo-gui-python/py3env/bin/python3
#
# The Python Imaging Library
# $Id$
#
from __future__ import print_function
import sys
if sys.version_info[0] > 2:
import tkinter
else:
import Tkinter as tkinter
from PIL import Image, ImageTk
#
# an image viewer
cla
|
ss UI(tkinter.Label):
def __init__(self, master, im):
if im.mode == "1":
# bitmap image
self.image = ImageTk.BitmapImage(im, foreground="white")
tkinter.Label.__init__(self, master, image=self.image, bd=0,
bg="black")
else:
# photo image
self.image = ImageT
|
k.PhotoImage(im)
tkinter.Label.__init__(self, master, image=self.image, bd=0)
#
# script interface
if __name__ == "__main__":
if not sys.argv[1:]:
print("Syntax: python viewer.py imagefile")
sys.exit(1)
filename = sys.argv[1]
root = tkinter.Tk()
root.title(filename)
im = Image.open(filename)
UI(root, im).pack()
root.mainloop()
|
vladimir-v-diaz/securesystemslib
|
tests/test_formats.py
|
Python
|
mit
| 15,792
| 0.005256
|
#!/usr/bin/env python
"""
<Program Name>
test_formats.py
<Author>
Vladimir Diaz <vladimir.v.diaz@gmail.com>
<Started>
January 2017 (modified from TUF's original formats.py)
<Copyright>
See LICENSE for licensing information.
<Purpose>
Unit test for 'formats.py'
"""
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import unittest
import datetime
import securesystemslib.formats
import securesystemslib.schema
import six
class TestFormats(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_schemas(self):
# Test conditions for valid schemas.
valid_schemas = {
'ISO8601_DATETIME_SCHEMA': (securesystemslib.formats.ISO8601_DATETIME_SCHEMA,
'1985-10-21T13:20:00Z'),
'UNIX_TIMESTAMP_SCHEMA': (securesystemslib.formats.UNIX_TIMESTAMP_SCHEMA, 499137720),
'HASH_SCHEMA': (securesystemslib.formats.HASH_SCHEMA, 'A4582BCF323BCEF'),
'HASHDICT_SCHEMA': (securesystemslib.formats.HASHDICT_SCHEMA,
{'sha256': 'A4582BCF323BCEF'}),
'HEX_SCHEMA': (securesystemslib.formats.HEX_SCHEMA, 'A4582BCF323BCEF'),
'KEYID_SCHEMA': (securesystemslib.formats.KEYID_SCHEMA, '123456789abcdef'),
'KEYIDS_SCHEMA': (securesystemslib.formats.KEYIDS_SCHEMA,
['123456789abcdef', '123456789abcdef']),
'SIG_SCHEME_SCHEMA': (securesystemslib.formats.SIG_SCHEME_SCHEMA, 'ecdsa-sha2-nistp256'),
'RELPATH_SCHEMA': (securesystemslib.formats.RELPATH_SCHEMA, 'metadata/root/'),
'RELPATHS_SCHEMA': (securesystemslib.formats.RELPATHS_SCHEMA,
['targets/role1/', 'targets/role2/']),
'PATH_SCHEMA': (securesystemslib.formats.PATH_SCHEMA, '/home/someuser/'),
'PATHS_SCHEMA': (securesystemslib.formats.PATHS_SCHEMA,
['/home/McFly/', '/home/Tannen/']),
'URL_SCHEMA': (securesystemslib.formats.URL_SCHEMA,
'https://www.updateframework.com/'),
'VERSION_SCHEMA': (securesystemslib.formats.VERSION_SCHEMA,
{'major': 1, 'minor': 0, 'fix': 8}),
'LENGTH_SCHEMA': (securesystemslib.formats.LENGTH_SCHEMA, 8),
'NAME_SCHEMA': (securesystemslib.formats.NAME_SCHEMA, 'Marty McFly'),
'BOOLEAN_SCHEMA': (securesystemslib.formats.BOOLEAN_SCHEMA, True),
'THRESHOLD_SCHEMA': (securesystemslib.formats.THRESHOLD_SCHEMA, 1),
'ROLENAME_SCHEMA': (securesystemslib.formats.ROLENAME_SCHEMA, 'Root'),
'RSAKEYBITS_SCHEMA': (securesystemslib.formats.RSAKEYBITS_SCHEMA, 4096),
'PASSWORD_SCHEMA': (securesystemslib.formats.PASSWORD_SCHEMA, 'secret'),
'PASSWORDS_SCHEMA': (securesystemslib.formats.PASSWORDS_SCHEMA, ['pass1', 'pass2']),
'KEYVAL_SCHEMA': (securesystemslib.formats.KEYVAL_SCHEMA,
{'public': 'pubkey', 'private': 'privkey'}),
'PUBLIC_KEYVAL_SCHEMA': (securesystemslib.formats.PUBLIC_KEYVAL_SCHEMA,
{'public': 'pubkey'}),
'PUBLIC_KEYVAL_SCHEMA2': (securesystemslib.formats.PUBLIC_KEYVAL_SCHEMA,
{'public': 'pubkey', 'private': ''}),
'KEY_SCHEMA': (securesystemslib.formats.KEY_SCHEMA,
{'keytype': 'rsa',
'scheme': 'rsassa-pss-sha256',
'keyval': {'public': 'pubkey',
'private': 'privkey'}}),
'PUBLIC_KEY_SCHEMA': (securesystemslib.formats.KEY_SCHEMA,
{'keytype': 'rsa',
'scheme': 'rsassa-pss-sha256',
'keyval': {'public': 'pubkey'}}),
'PUBLIC_KEY_SCHEMA2': (securesystemslib.formats.KEY_SCHEMA,
{'keytype': 'rsa',
'
|
scheme': 'rsassa-pss-sha256',
'keyval': {'public': 'pubkey',
'private': ''}}),
'RSAKEY_SCHEMA': (securesystemslib.formats.RSAKEY_SCHEMA,
{'keytype': 'rsa',
'scheme': 'rsassa-pss-sha256',
'keyid': '123456789abcde
|
f',
'keyval': {'public': 'pubkey',
'private': 'privkey'}}),
'FILEINFO_SCHEMA': (securesystemslib.formats.FILEINFO_SCHEMA,
{'length': 1024,
'hashes': {'sha256': 'A4582BCF323BCEF'},
'custom': {'type': 'paintjob'}}),
'FILEDICT_SCHEMA': (securesystemslib.formats.FILEDICT_SCHEMA,
{'metadata/root.json': {'length': 1024,
'hashes': {'sha256': 'ABCD123'},
'custom': {'type': 'metadata'}}}),
'SIGNATURE_SCHEMA': (securesystemslib.formats.SIGNATURE_SCHEMA,
{'keyid': '123abc',
'method': 'evp',
'sig': 'A4582BCF323BCEF'}),
'SIGNATURESTATUS_SCHEMA': (securesystemslib.formats.SIGNATURESTATUS_SCHEMA,
{'threshold': 1,
'good_sigs': ['123abc'],
'bad_sigs': ['123abc'],
'unknown_sigs': ['123abc'],
'untrusted_sigs': ['123abc'],
'unknown_method_sigs': ['123abc']}),
'SIGNABLE_SCHEMA': (securesystemslib.formats.SIGNABLE_SCHEMA,
{'signed': 'signer',
'signatures': [{'keyid': '123abc',
'method': 'evp',
'sig': 'A4582BCF323BCEF'}]}),
'KEYDICT_SCHEMA': (securesystemslib.formats.KEYDICT_SCHEMA,
{'123abc': {'keytype': 'rsa',
'scheme': 'rsassa-pss-sha256',
'keyval': {'public': 'pubkey',
'private': 'privkey'}}}),
'KEYDB_SCHEMA': (securesystemslib.formats.KEYDB_SCHEMA,
{'123abc': {'keytype': 'rsa',
'keyid': '123456789abcdef',
'keyval': {'public': 'pubkey',
'private': 'privkey'}}}),
'ROLE_SCHEMA': (securesystemslib.formats.ROLE_SCHEMA,
{'keyids': ['123abc'],
'threshold': 1,
'paths': ['path1/', 'path2']}),
'ROLEDICT_SCHEMA': (securesystemslib.formats.ROLEDICT_SCHEMA,
{'root': {'keyids': ['123abc'],
'threshold': 1,
'paths': ['path1/', 'path2']}}),
'ROOT_SCHEMA': (securesystemslib.formats.ROOT_SCHEMA,
{'_type': 'root',
'version': 8,
'consistent_snapshot': False,
'compression_algorithms': ['gz'],
'expires': '1985-10-21T13:20:00Z',
'keys': {'123abc': {'keytype': 'rsa',
'scheme': 'rsassa-pss-sha256',
'keyval': {'public': 'pubkey',
'private': 'privkey'}}},
'roles': {'root': {'keyids': ['123abc'],
'threshold': 1,
'paths': ['path1/', 'path2']}}}),
'TARGETS_SCHEMA': (securesystemslib.formats.TARGETS_SCHEMA,
{'_type': 'targets',
'version': 8,
'expires': '1985-10-21T13:20:00Z',
'targets': {'metadata/targets.json': {'length': 1
|
Reagankm/KnockKnock
|
venv/lib/python3.4/site-packages/nltk/tbl/demo.py
|
Python
|
gpl-2.0
| 14,715
| 0.006116
|
# -*- coding: utf-8 -*-
# Natural Language Toolkit: Transformation-based learning
#
# Copyright (C) 2001-2015 NLTK Project
# Author: Marcus Uneson <marcus.uneson@gmail.com>
# based on previous (nltk2) version by
# Christopher Maloof, Edward Loper, Steven Bird
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
from __future__ import print_function, absolute_import, division
import os
import pickle
import random
import time
from nltk.corpus import treebank
from nltk.tbl import error_list, Template
from nltk.tag.brill import Word, Pos
from nltk.tag import BrillTaggerTrainer, RegexpTagger, UnigramTagger
def demo():
"""
Run a demo with defaults. See source comments for details,
or docstrings of any of the more specific demo_* functions.
"""
postag()
def demo_repr_rule_format():
"""
Exemplify repr(Rule) (see also str(Rule) and Rule.format("verbose"))
"""
postag(ruleformat="repr")
def demo_str_rule_format():
"""
Exemplify repr(Rule) (see also str(Rule) and Rule.format("verbose"))
"""
postag(ruleformat="str")
def demo_verbose_rule_format():
"""
Exemplify Rule.format("verbose")
"""
postag(ruleformat="verbose")
def demo_multiposition_feature():
"""
The feature/s of a template takes a list of positions
relative to the current word where the feature should be
looked for, conceptually joined by logical OR. For instance,
Pos([-1, 1]), given a value V, will hold whenever V is found
one step to the left and/or one step to the right.
For contiguous ranges, a 2-arg form giving inclusive end
points can also be used: Pos(-3, -1) is the same as the arg
below.
"""
postag(templates=[Template(Pos([-3,-2,-1]))])
def demo_multifeature_template():
"""
Templates can have more than a single feature.
"""
postag(templates=[Template(Word([0]), Pos([-2,-1]))])
def demo_template_statistics():
"""
Show aggregate statistics per template. Little used templates are
candidates for deletion, much used templates may possibly be refined.
Deleting unused templates is mostly about saving time and/or space:
training is basically O(T) in the number of templates T
(also in terms of memory usage, which often will be the limiting factor).
"""
postag(incremental_stats=True, template_stats=True)
def demo_generated_templates():
"""
Template.expand and Feature.expand are class methods facilitating
generating large amounts of templates. See their documentation for
details.
Note: training with 500 templates can easily fill all available
even on relatively small corpora
"""
wordtpls = Word.expand([-1,0,1], [1,2], excludezero=False)
tagtpls = Pos.expand([-2,-1,0,1], [1,2], excludezero=True)
templat
|
es = list(Template.expand([wordtpls, tagtpls], combinations=(1,3)))
print("Generated {0} templates for transformation-based learning".format(len(templates
|
)))
postag(templates=templates, incremental_stats=True, template_stats=True)
def demo_learning_curve():
"""
Plot a learning curve -- the contribution on tagging accuracy of
the individual rules.
Note: requires matplotlib
"""
postag(incremental_stats=True, separate_baseline_data=True, learning_curve_output="learningcurve.png")
def demo_error_analysis():
"""
Writes a file with context for each erroneous word after tagging testing data
"""
postag(error_output="errors.txt")
def demo_serialize_tagger():
"""
Serializes the learned tagger to a file in pickle format; reloads it
and validates the process.
"""
postag(serialize_output="tagger.pcl")
def demo_high_accuracy_rules():
"""
Discard rules with low accuracy. This may hurt performance a bit,
but will often produce rules which are more interesting read to a human.
"""
postag(num_sents=3000, min_acc=0.96, min_score=10)
def postag(
templates=None,
tagged_data=None,
num_sents=1000,
max_rules=300,
min_score=3,
min_acc=None,
train=0.8,
trace=3,
randomize=False,
ruleformat="str",
incremental_stats=False,
template_stats=False,
error_output=None,
serialize_output=None,
learning_curve_output=None,
learning_curve_take=300,
baseline_backoff_tagger=None,
separate_baseline_data=False,
cache_baseline_tagger=None):
"""
Brill Tagger Demonstration
:param templates: how many sentences of training and testing data to use
:type templates: list of Template
:param tagged_data: maximum number of rule instances to create
:type tagged_data: C{int}
:param num_sents: how many sentences of training and testing data to use
:type num_sents: C{int}
:param max_rules: maximum number of rule instances to create
:type max_rules: C{int}
:param min_score: the minimum score for a rule in order for it to be considered
:type min_score: C{int}
:param min_acc: the minimum score for a rule in order for it to be considered
:type min_acc: C{float}
:param train: the fraction of the the corpus to be used for training (1=all)
:type train: C{float}
:param trace: the level of diagnostic tracing output to produce (0-4)
:type trace: C{int}
:param randomize: whether the training data should be a random subset of the corpus
:type randomize: C{bool}
:param ruleformat: rule output format, one of "str", "repr", "verbose"
:type ruleformat: C{str}
:param incremental_stats: if true, will tag incrementally and collect stats for each rule (rather slow)
:type incremental_stats: C{bool}
:param template_stats: if true, will print per-template statistics collected in training and (optionally) testing
:type template_stats: C{bool}
:param error_output: the file where errors will be saved
:type error_output: C{string}
:param serialize_output: the file where the learned tbl tagger will be saved
:type serialize_output: C{string}
:param learning_curve_output: filename of plot of learning curve(s) (train and also test, if available)
:type learning_curve_output: C{string}
:param learning_curve_take: how many rules plotted
:type learning_curve_take: C{int}
:param baseline_backoff_tagger: the file where rules will be saved
:type baseline_backoff_tagger: tagger
:param separate_baseline_data: use a fraction of the training data exclusively for training baseline
:type separate_baseline_data: C{bool}
:param cache_baseline_tagger: cache baseline tagger to this file (only interesting as a temporary workaround to get
deterministic output from the baseline unigram tagger between python versions)
:type cache_baseline_tagger: C{string}
Note on separate_baseline_data: if True, reuse training data both for baseline and rule learner. This
is fast and fine for a demo, but is likely to generalize worse on unseen data.
Also cannot be sensibly used for learning curves on training data (the baseline will be artificially high).
"""
# defaults
baseline_backoff_tagger = baseline_backoff_tagger or REGEXP_TAGGER
if templates is None:
from nltk.tag.brill import describe_template_sets, brill24
# some pre-built template sets taken from typical systems or publications are
# available. Print a list with describe_template_sets()
# for instance:
templates = brill24()
(training_data, baseline_data, gold_data, testing_data) = \
_demo_prepare_data(tagged_data, train, num_sents, randomize, separate_baseline_data)
# creating (or reloading from cache) a baseline tagger (unigram tagger)
# this is just a mechanism for getting deterministic output from the baseline between
# python versions
if cache_baseline_tagger:
if not os.path.exists(cache_baseline_tagger):
baseline_tagger = UnigramTagger(baseline_data, backoff=baseline_backoff_tagger)
with open(cache_baseline_tagger, 'w') as print_rules:
pickle.dump(baseline_tagger, print_rules)
print("
|
HSAR/Ficlatte
|
comment/urls.py
|
Python
|
agpl-3.0
| 1,083
| 0.004625
|
#coding: utf-8
#This file is part of Ficlatté.
#Copyright © 2015-2017 Paul Robertson, Jim Stitzel and Shu Sam Chen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of version 3 of the GNU Affero General Public
# License as published by the Free Software Foundation
#
#
# This program is distributed in the hope that it will be useful,
#
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Pu
|
blic License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^submit/$', 'comment.views.submit_comment', name='submit_comment'),
url(r'^(?P<comment_id>\d+)/like/$', 'comment.views.like_comment', name='like_comment'),
url(r'^(?P<comment_id>\d+)/unlike/$', 'comment.views.unlike_comment', name='unlike_comment'),
]
|
tomprince/gemrb
|
gemrb/GUIScripts/iwd2/Class.py
|
Python
|
gpl-2.0
| 5,585
| 0.03026
|
# GemRB - Infinity Engine Emulator
# Copyright (C) 2003 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#
#character generation, class (GUICG2)
import GemRB
from GUIDefines import *
import CommonTables
ClassWindow = 0
TextAreaControl = 0
DoneButton = 0
BackButton = 0
ClassCount = 0
HasSubClass = 0
ClassID = 0
def AdjustTextArea():
global HasSubClass, ClassID
Class = GemRB.GetVar("Class")-1
TextAreaControl.SetText(CommonTables.Classes.GetValue(Class,1) )
ClassName = CommonTables.Classes.GetRowName(Class)
ClassID = CommonTables.Classes.GetValue(ClassName, "ID")
#determining if this class has any subclasses
HasSubClass = 0
for i in range(1, ClassCount):
ClassName = CommonTables.Classes.GetRowName(i-1)
#determining if this is a kit or class
Allowed = CommonTables.Classes.GetValue(ClassName, "CLASS")
if Allowed != ClassID:
continue
HasSubClass = 1
break
if HasSubClass == 0:
DoneButton.SetState(IE_GUI_BUTTON_ENABLED)
else:
DoneButton.SetState(IE_GUI_BUTTON_DISABLED)
return
def OnLoad():
global ClassWindow, TextAreaControl, DoneButton, BackButton
global ClassCount
GemRB.LoadWindowPack("GUICG", 800, 600)
#this replaces help02.2da for class restrictions
ClassCount = CommonTables.Classes.GetRowCount()+1
ClassWindow = GemRB.LoadWindow(2)
rid = CommonTables.Races.FindValue(3, GemRB.GetVar('BaseRace'))
RaceName = CommonTables.Races.GetRowName(rid)
#radiobutton groups must be set up before doing anything else to them
j = 0
for i in range(1,ClassCount):
ClassName = CommonTables.Classes.GetRowName(i-1)
Allowed = CommonTables.Classes.GetValue(ClassName, "CLASS")
if Allowed > 0:
continue
Button = ClassWindow.GetControl(j+2)
j = j+1
Button.SetFlags(IE_GUI_BUTTON_RADIOBUTTON, OP_SET)
Button.SetState(IE_GUI_BUTTON_DISABLED)
j = 0
for i in range(1,ClassCount):
ClassName = CommonTables.Classes.GetRowName(i-1)
#determining if this is a kit or class
Allowed = CommonTables.Classes.GetValue(ClassName, "CLASS")
if Allowed > 0:
continue
Allowed = CommonTables.Classes.GetValue(ClassName, RaceName)
Button = ClassWindow.GetControl(j+2)
j = j+1
t = CommonTables.Classes.GetValue(ClassName, "NAME_REF")
Button.SetText(t )
if Allowed==0:
continue
Button.SetState(IE_GUI_BUTTON_ENABLED)
Button.SetEvent(IE_GUI_BUTTON_ON_PRESS, ClassPress)
Button.SetVarAssoc("Class", i)
BackButton = ClassWindow.GetControl(17)
BackButton.SetText(15416)
BackButton.SetFlags(IE_GUI_BUTTON_CANCEL,OP_OR)
DoneButton = ClassWindow.GetControl(0)
DoneButton.SetText(36789)
DoneButton.SetFlags(IE_GUI_BUTTON_DEFAULT,OP_OR)
ScrollBarControl = ClassWindow.GetControl(15)
TextAreaControl = ClassWindow.GetControl(16)
Class = GemRB.GetVar("Class")-1
if Class<0:
TextAreaControl.SetText(17242)
DoneButton.SetState(IE_GUI_BUTTON_DISABLED)
else:
AdjustTextArea()
DoneButton.SetEvent(IE_GUI_BUTTON_ON_PRESS, NextPress)
BackButton
|
.SetEvent(IE_GUI_BUTTON_ON_PRESS, BackPress)
ClassWindow.SetVisible(WINDOW_VISIBLE)
return
def ClassPress():
global Ha
|
sSubClass
AdjustTextArea()
if HasSubClass == 0:
return
DoneButton.SetState(IE_GUI_BUTTON_DISABLED)
j = 0
for i in range(1,ClassCount):
ClassName = CommonTables.Classes.GetRowName(i-1)
Allowed = CommonTables.Classes.GetValue(ClassName, "CLASS")
if Allowed > 0:
continue
Button = ClassWindow.GetControl(j+2)
j = j+1
Button.SetFlags(IE_GUI_BUTTON_RADIOBUTTON, OP_SET)
Button.SetState(IE_GUI_BUTTON_DISABLED)
Button.SetText("")
j=0
for i in range(1, ClassCount):
ClassName = CommonTables.Classes.GetRowName(i-1)
#determining if this is a kit or class
Allowed = CommonTables.Classes.GetValue(ClassName, "CLASS")
if Allowed != ClassID:
continue
Button = ClassWindow.GetControl(j+2)
j = j+1
t = CommonTables.Classes.GetValue(ClassName, "NAME_REF")
Button.SetText(t )
Button.SetState(IE_GUI_BUTTON_ENABLED)
Button.SetEvent(IE_GUI_BUTTON_ON_PRESS, ClassPress2)
Button.SetVarAssoc("Class", i)
BackButton.SetEvent(IE_GUI_BUTTON_ON_PRESS, BackPress2)
return
def ClassPress2():
Class = GemRB.GetVar("Class")-1
TextAreaControl.SetText(CommonTables.Classes.GetValue(Class,1) )
DoneButton.SetState(IE_GUI_BUTTON_ENABLED)
return
def BackPress2():
DoneButton.SetState(IE_GUI_BUTTON_DISABLED)
if ClassWindow:
ClassWindow.Unload()
OnLoad()
return
def BackPress():
if ClassWindow:
ClassWindow.Unload()
GemRB.SetNextScript("CharGen3")
GemRB.SetVar("Class",0) #scrapping the class value
MyChar = GemRB.GetVar("Slot")
GemRB.SetPlayerStat (IE_CLASS, 0)
return
def NextPress():
#classcolumn is base class
Class = GemRB.GetVar("Class")
ClassColumn = CommonTables.Classes.GetValue(Class - 1, 3)
if ClassColumn <= 0: #it was already a base class
ClassColumn = Class
GemRB.SetVar("BaseClass", ClassColumn)
if ClassWindow:
ClassWindow.Unload()
GemRB.SetNextScript("CharGen4") #alignment
return
|
harshilasu/LinkurApp
|
y/google-cloud-sdk/platform/gcutil/lib/google_api_python_client/oauth2client/util.py
|
Python
|
gpl-3.0
| 5,523
| 0.004527
|
#!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Common utility library."""
__author__ = ['rafek@google.com (Rafe Kaplan)',
'guido@google.com (Guido van Rossum)',
]
__all__ = [
'positional',
]
import gflags
import inspect
import logging
import types
import urllib
import urlparse
try:
from urlparse import parse_qsl
except ImportError:
from cgi import parse_qsl
logger = logging.getLogger(__name__)
FLAGS = gflags.FLAGS
gflags.DEFINE_enum('positional_parameters_enforcement', 'WARNING',
['EXCEPTION', 'WARNING', 'IGNORE'],
'The action when an oauth2client.util.positional declaration is violated.')
def positional(max_positional_args):
"""A decorator to declare that only the first N arguments my be positional.
This decorator makes it easy to support Python 3 style key-word only
parameters. For example, in Python 3 it is possible to write:
def fn(pos1, *, kwonly1=None, kwonly1=None):
...
All named parameters after * must be a keyword:
fn(10, 'kw1', 'kw2') # Raises exception.
fn(10, kwonly1='kw1') # Ok.
Example:
To define a function like above, do:
@positional(1)
def fn(pos1, kwonly1=None, kwonly2=None):
...
If no default value is provided to a keyword argument, it becomes a required
keyword argument:
@positional(0)
def fn(required_kw):
...
This must be called with the keyword parameter:
fn() # Raises exception.
fn
|
(10) # Raises exception.
fn(required_kw=10) # Ok.
When defining instance or class methods always remember to account for
'self' and 'cl
|
s':
class MyClass(object):
@positional(2)
def my_method(self, pos1, kwonly1=None):
...
@classmethod
@positional(2)
def my_method(cls, pos1, kwonly1=None):
...
The positional decorator behavior is controlled by the
--positional_parameters_enforcement flag. The flag may be set to 'EXCEPTION',
'WARNING' or 'IGNORE' to raise an exception, log a warning, or do nothing,
respectively, if a declaration is violated.
Args:
max_positional_arguments: Maximum number of positional arguments. All
parameters after the this index must be keyword only.
Returns:
A decorator that prevents using arguments after max_positional_args from
being used as positional parameters.
Raises:
TypeError if a key-word only argument is provided as a positional parameter,
but only if the --positional_parameters_enforcement flag is set to
'EXCEPTION'.
"""
def positional_decorator(wrapped):
def positional_wrapper(*args, **kwargs):
if len(args) > max_positional_args:
plural_s = ''
if max_positional_args != 1:
plural_s = 's'
message = '%s() takes at most %d positional argument%s (%d given)' % (
wrapped.__name__, max_positional_args, plural_s, len(args))
if FLAGS.positional_parameters_enforcement == 'EXCEPTION':
raise TypeError(message)
elif FLAGS.positional_parameters_enforcement == 'WARNING':
logger.warning(message)
else: # IGNORE
pass
return wrapped(*args, **kwargs)
return positional_wrapper
if isinstance(max_positional_args, (int, long)):
return positional_decorator
else:
args, _, _, defaults = inspect.getargspec(max_positional_args)
return positional(len(args) - len(defaults))(max_positional_args)
def scopes_to_string(scopes):
"""Converts scope value to a string.
If scopes is a string then it is simply passed through. If scopes is an
iterable then a string is returned that is all the individual scopes
concatenated with spaces.
Args:
scopes: string or iterable of strings, the scopes.
Returns:
The scopes formatted as a single string.
"""
if isinstance(scopes, types.StringTypes):
return scopes
else:
return ' '.join(scopes)
def dict_to_tuple_key(dictionary):
"""Converts a dictionary to a tuple that can be used as an immutable key.
The resulting key is always sorted so that logically equivalent dictionaries
always produce an identical tuple for a key.
Args:
dictionary: the dictionary to use as the key.
Returns:
A tuple representing the dictionary in it's naturally sorted ordering.
"""
return tuple(sorted(dictionary.items()))
def _add_query_parameter(url, name, value):
"""Adds a query parameter to a url.
Replaces the current value if it already exists in the URL.
Args:
url: string, url to add the query parameter to.
name: string, query parameter name.
value: string, query parameter value.
Returns:
Updated query parameter. Does not update the url if value is None.
"""
if value is None:
return url
else:
parsed = list(urlparse.urlparse(url))
q = dict(parse_qsl(parsed[4]))
q[name] = value
parsed[4] = urllib.urlencode(q)
return urlparse.urlunparse(parsed)
|
steventimberman/masterDebater
|
venv/lib/python2.7/site-packages/django_fluent_comments-1.4.3.dist-info/top_level.txt.py
|
Python
|
mit
| 16
| 0
|
XXXXXXX
|
XXXXXXXX
|
|
willprice/mentor-finder
|
mentor_finder/models/county.py
|
Python
|
gpl-3.0
| 3,324
| 0.000301
|
# -*- coding: utf-8 -*-
_counties = [
("Aberdeenshire", "Aberdeenshire"),
("Anglesey", "Anglesey"),
("Angus", "Angus"),
("Argyll", "Argyll"),
("Ayrshire", "Ayrshire"),
("Banffshire", "Banffshire"),
("Bedfordshire", "Bedfordshire"),
("Berwickshire", "Berwickshire"),
("Breconshire", "Breconshire"),
("Buckinghamshire", "Buckinghamshire"),
("Bute", "Bute"),
("Caernarvonshire", "Caernarvonshire"),
("Caithness", "Caithness"),
("Cambridgeshire", "Cambridgeshire"),
("Cardiganshire", "Cardiganshire"),
("Carmarthenshire", "Carmarthenshire"),
("Cheshire", "Cheshire"),
("Clackmannanshire", "Clackmannanshire"),
("Cornwall and Isles of Scilly", "Cornwall and Isles of Scilly"),
("Cumbria", "Cumbria"),
("Denbighshire", "Denbighshire"),
("Derbyshire", "Derbyshire"),
("Devon", "Devon"),
("Dorset", "Dorset"),
("Dumbartonshire", "Dumbartonshire"),
("Dumfriesshire", "Dumfriesshire"),
("Durham", "Durham"),
("East Lothian", "East Lothian"),
("East Sussex", "East Sussex"),
("Essex", "Essex"),
("Fife", "Fife"),
("Flintshire", "Flintshire"),
("Glamorgan", "Glamorgan"),
("Gloucestershire", "Gloucestershire"),
("Greater London", "Greater London"),
("Greater Manchester", "Greater Manchester"),
("Hampshire", "Hampshire"),
("Hertfordshire", "Hertfordshire"),
("Herefordshire", "Herefordshire"),
("Inverness", "Inverness"),
("Kent", "Kent"),
("Kincardineshire", "Kincardineshire"),
("Kinross-shire", "Kinross-shire"),
("Kirkcudbrightshire", "Kirkcudbrightshire"),
("Lanarkshire", "Lanarkshire"),
("Lancashire", "Lancashire"),
("Leicestershire", "Leicestershire"),
("Lincolnshire", "Lincolnshire"),
("London", "London"),
("Merionethshire", "Merionethshire"),
("Merseyside", "Merseyside"),
("Midlothian", "Midlothian"),
("Monmouthshire", "Monmouthshire"),
("Montgomeryshire", "Montgomeryshire"),
("Moray", "Moray"),
("Nairnshire", "Nairnshire"),
("Norfolk", "Norfolk"),
("North Yorkshire", "North Yorkshire"),
("Northamptonshire", "Northamptonshire"),
("Northumberland", "Northumberland"),
("Nottinghamshire", "Notting
|
hamshire"),
("Orkney", "Orkney"),
("Oxfordshire", "Oxfordshire"),
("Peebl
|
eshire", "Peebleshire"),
("Pembrokeshire", "Pembrokeshire"),
("Perthshire", "Perthshire"),
("Radnorshire", "Radnorshire"),
("Renfrewshire", "Renfrewshire"),
("Ross & Cromarty", "Ross & Cromarty"),
("Roxburghshire", "Roxburghshire"),
("Selkirkshire", "Selkirkshire"),
("Shetland", "Shetland"),
("Shropshire", "Shropshire"),
("Somerset", "Somerset"),
("South Yorkshire", "South Yorkshire"),
("Staffordshire", "Staffordshire"),
("Stirlingshire", "Stirlingshire"),
("Suffolk", "Suffolk"),
("Surrey", "Surrey"),
("Sutherland", "Sutherland"),
("Tyne and Wear", "Tyne and Wear"),
("Warwickshire", "Warwickshire"),
("West Lothian", "West Lothian"),
("West Midlands", "West Midlands"),
("West Sussex", "West Sussex"),
("West Yorkshire", "West Yorkshire"),
("Wigtownshire", "Wigtownshire"),
("Wiltshire", "Wiltshire"),
("Worcestershire", "Worcestershire"),
]
def get_counties():
return _counties
|
openbig/odoo-contract
|
sale_contractmanagement/idoit_license_gen.py
|
Python
|
agpl-3.0
| 2,980
| 0.003357
|
# -*- coding: utf-8 -*-
from hashlib import sha1
from phpserialize import dumps
from calendar import timegm
from time import strptime
import zlib
#calendar.timegm(time.strptime('01/12/2011', '%d/%m/%Y'))
def create(data):
assert isinstance(data, dict)
assert 'request_data' in data
assert 'contract_data' in data['request_data']
assert 'product_data' in data['request_data']
assert isinstance(data['request_data']['product
|
_data'], list)
mod_identifiers = {
'viva': 'viva',
'rfc': 'rfc',
'relocate_ci': 'CI-Umzug',
|
'swapci': 'Geräteaustausch',
}
contract_data = data['request_data']['contract_data']
product_data = data['request_data']['product_data']
license_data = {
'C__LICENCE__OBJECT_COUNT': 0,
'C__LICENCE__DB_NAME': contract_data['db_name'] or '',
'C__LICENCE__CUSTOMER_NAME': contract_data['customer_name'],
'C__LICENCE__REG_DATE': timegm(strptime(contract_data['date_start'], '%d/%m/%Y')),
'C__LICENCE__RUNTIME': timegm(strptime(contract_data['end_date'], '%d/%m/%Y')) - timegm(strptime(contract_data['date_start'], '%d/%m/%Y')),
'C__LICENCE__EMAIL': ''.join('i-doit@', contract_data['customer_name']),
'C__LICENCE__TYPE': 'Einzellizenz Subskription',
'C__LICENCE__DATA': {},
}
for product in product_data:
if 'Objektanzahl' in product:
license_data['C__LICENCE__OBJECT_COUNT'] += product[
'Objektanzahl'].isdigit() and int(product['Objektanzahl']) or 0
if 'Multitenancy' in product:
if product['Multitenancy'] == 'Single':
license_data['C__LICENCE__TYPE'] = 'Einzellizenz Subskription'
elif product['Multitenancy'] == 'Multi':
license_data['C__LICENCE__TYPE'] = 'Hosting'
if 'Lizenztyp' in product and product['Lizenztyp'] == 'Kaufversion':
license_data['C__LICENCE__TYPE'] = 'Kauflizenz'
if 'Produkttyp' in product and product['Produkttyp'] == 'Modul':
if 'identifier' in product:
license_data['C__LICENCE__DATA'][
product['identifier']] = True
for key in mod_identifiers:
if mod_identifiers[key] in product['name'].lower():
license_data['C__LICENCE__DATA'][key] = True
if license_data['C__LICENCE__TYPE'] == 'Hosting':
license_data['C__LICENCE__DB_NAME'] == ''
elif license_data['C__LICENCE__TYPE'] == 'Kauflizenz':
license_data['C__LICENCE__DB_NAME'] == ''
del license_data['C__LICENCE__RUNTIME']
license_key = sha1(dumps(license_data))
#sort
#serialize with phpserialize.dumps
#gzip with zlib.compress
#reverse:
# f = open('license.key','rb')
# f_unzipped = zlib.decompress(f.read())
# license_dict = phpserialize.loads(f_unzipped)
# return license encoded in base_64
return True
|
jjhelmus/scipy
|
scipy/stats/_multivariate.py
|
Python
|
bsd-3-clause
| 114,780
| 0.000741
|
#
# Author: Joris Vankerschaver 2013
#
from __future__ import division, print_function, absolute_import
import math
import numpy as np
import scipy.linalg
from scipy.misc import doccer
from scipy.special import gammaln, psi, multigammaln, xlogy, entr
from scipy._lib._util import check_random_state
from scipy.linalg.blas import drot
from ._discrete_distns import binom
__all__ = ['multivariate_normal',
'matrix_normal',
'dirichlet',
'wishart',
'invwishart',
'multinomial',
'special_ortho_group',
'ortho_group',
'random_correlation',
'unitary_group']
_LOG_2PI = np.log(2 * np.pi)
_LOG_2 = np.log(2)
_LOG_PI = np.log(np.pi)
_doc_random_state = """\
random_state : None or int or np.random.RandomState instance, optional
If int or RandomState, use it for drawing the random variates.
If None (or np.random), the global np.random state is used.
Default is None.
"""
def _squeeze_output(out):
"""
Remove single-dimensional entries from array and convert to scalar,
if necessary.
"""
out = out.squeeze()
if out.ndim == 0:
out = out[()]
return out
def _eigvalsh_to_eps(spectrum, cond=None, rcond=None):
"""
Determine which eigenvalues are "small" given the spectrum.
This is for compatibility across various linear algebra functions
that should agree about whether or not a Hermitian matrix is numerically
singular and what is its numerical matrix rank.
This is designed to be compatible with scipy.linalg.pinvh.
Parameters
----------
spectrum : 1d ndarray
Array of eigenvalues of a Hermitian matrix.
cond, rcond : float, optional
Cutoff for small eigenvalues.
Singular values smaller than rcond * largest_eigenvalue are
considered zero.
If None or -1, suitable machine precision is used.
Returns
-------
eps : float
Magnitude cutoff for numerical negligibility.
"""
if rcond is not None:
cond = rcond
if cond in [None, -1]:
t = spectrum.dtype.char.lower()
factor = {'f': 1E3, 'd': 1E6}
cond = factor[t] * np.finfo(t).eps
eps = cond * np.max(abs(spectrum))
return eps
def _pinv_1d(v, eps=1e-5):
"""
A helper function for computing the pseudoinverse.
Parameters
----------
v : iterable of numbers
This may be thought of as a vector of eigenvalues or singular values.
eps : float
Values with magnitude no greater than eps are considered negligible.
Returns
-------
v_pinv : 1d float ndarray
A vector of pseudo-inverted numbers.
"""
return np.array([0 if abs(x) <= eps else 1/x for x in v], dtype=float)
class _PSD(object):
"""
Compute coordinated functions of a symmetric positive semidefinite matrix.
This class addresses two issues. Firstly it allows the pseudoinverse,
the logarithm of the pseudo-determinant, and the rank of the matrix
to be computed using one call to eigh instead of three.
Secondly it allows these functions to be computed in a way
that gives mutually compatible results.
All of the functions are computed with a common understanding as to
which of the eigenvalues are to be considered negligibly small.
The functions are designed to coordinate with scipy.linalg.pinvh()
but not necessarily with np.linalg.det() or with np.linalg.matrix_rank().
Parameters
----------
M : array_like
Symmetric positive semidefinite matrix (2-D).
cond, rcond : float, optional
Cutoff for small eigenvalues.
Singular values smaller than rcond * largest_eigenvalue are
considered zero.
If None or -1, suitable machine precision is used.
lower : bool, optional
Whether the pertinent array data is taken from the lower
or upper triangle of M. (Default: lower)
check_finite : bool, optional
Whether to check that the input matrices contain only finite
numbers. Disabling may give a performance gain, but may result
in problems (crashes, non-termination) if the inputs do contain
infinities or NaNs.
allow_singular : bool, optional
Whether to allow a singular matrix. (Default: True)
Notes
-----
The arguments are similar to those of scipy.linalg.pinvh().
"""
def __init__(self, M, cond=None, rcond=None, lower=True,
check_finite=True, allow_singular=True):
# Compute the symmetric eigendecomposition.
# Note that eigh takes care of array conversion, chkfinite,
# and assertion that the matrix is square.
s, u = scipy.linalg.eigh(M, lower=lower, check_finite=check_finite)
eps = _eigvalsh_to_eps(s, cond, rcond)
if np.min(s) < -eps:
raise ValueError('the input matrix must be positive semidefinite')
d = s[s > eps]
if len(d) < len(s) and not allow_singular:
raise np.linalg.LinAlgError('singular matrix')
s_pinv = _pinv_1d(s, eps)
U = np.multiply(u, np.sqrt(s_pinv))
# Initialize the eagerly precomputed attributes.
self.rank = len(d)
self.U = U
self.log_pdet = np.sum(np.log(d))
# Initialize an attribute to be lazily computed.
self._pinv = None
@property
def pinv(self):
if self._pinv is None:
self._pinv = np.dot(self.U, self.U.T)
return self._pinv
class multi_rv_generic(object):
"""
Class which encapsulates common functionality between all multivariate
distributions.
"""
def __init__(self, seed=None):
super(multi_rv_generic, self).__init__()
self._random_state = check_random_state(seed)
@property
def random_state(self):
""" Get or set the RandomState object for generating random variates.
This can be either None or an existing RandomState object.
If None (or np.random), use the RandomState singleton used by np.random.
If already a RandomState instance, use it.
If an int, use a new RandomState instance seeded with seed.
"""
return self._random_state
@random_state.setter
def random_state(self, seed):
self._random_state = check_random_state(seed)
def _get_random_state(self, random_state):
if random_state is not None:
return check_random_state(random_state)
else:
return self._random_state
class multi_rv_frozen(object):
"""
Class which encapsulates common functionality between all frozen
multivariate distributions.
"""
@property
def random_state(self):
return self._dist._random_state
@random_state.setter
def random_state(self, seed):
self._dist._random_state = check_random_state(seed)
_mvn_doc_default_callparams = """\
mean : array_like, optional
Mean of the distribution (default zero)
cov : array_like, optional
Covariance matrix of the distribution (default one)
allow_singular : bool, optional
Whether to allow a singular covariance matrix. (Default: False)
"""
_mvn_doc_callparams_note = \
"""Setting the parameter `mean` to `None` is equivalent to having `mean`
be the zero-vector. The parameter `cov` can be a scalar, in which case
the covariance matrix is the identity times that value, a vector of
diagonal entries for the covariance matrix, or a two-dimensional
array_like.
"""
_mvn_doc_frozen_callparams = ""
_mvn_doc_frozen_callparams_note =
|
\
"""See class definition for a detailed description of parameters."""
mvn_docdict_params = {
'_mvn_doc_default_callparams': _mvn_doc_default_callparams,
'_mvn_doc_callparams_note': _mvn_doc_callparams_note,
'_doc_random_state': _doc_random_state
}
mvn_docdict_noparams = {
'_mvn_doc_default_callparams': _mvn_doc_frozen_callparams,
'_mvn_doc_callparams_note': _mvn_doc_frozen_callparams_note,
'_doc_random_state': _d
|
oc_random_state
}
class multivariate_normal_gen(multi_rv_generic):
r"""
A multivariate normal random var
|
EvanMurawski/BeamAnalyzer
|
beamanalyzer/test/test.py
|
Python
|
mit
| 6,877
| 0.009597
|
__author__ = 'Evan Murawski'
import unittest
import backend
from backend.interactions import *
from backend.beam import Beam
import backend.solver as solver
from backend.solver import SolverError
import backend.shearmomentgenerator as shearmomentgenerator
from backend.shearmomentgenerator import Shear_Moment_Error
import matplotlib.pyplot as plt
import numpy as np
class TestBeamAnalyzer(unittest.TestCase):
"""Unit tests for the backend."""
ALMOST = 0.01
beams = []
STEP_SIZE = 0.001
def setUp(self):
"""Setup the tests. Creates various beams with known solutions and force moment plots."""
self.beams = []
self.beams.append(Beam(10))
self.beams[0].add_interaction(Force(5, -10))
self.beams[0].add_interaction(Force(0, 0, False))
self.beams[0].add_interaction(Force(10, 0, False))
self.beams.append(Beam(5.5))
self.beams[1].add_interaction(Force(0, 0, False))
self.beams[1].add_interaction(Moment(0, 0, False))
self.beams[1].add_interaction(Force(5.5, 10))
self.beams[1].add_interaction(Moment(4, 40))
self.beams.append(Beam(30))
self.beams[2].add_interaction(Force(0, 0, False))
self.beams[2].add_interaction(Force(20, 0, False))
self.beams[2].add_interaction(Dist_Force(0, -1, 10))
self.beams[2].add_interaction(Force(15, -20))
self.beams[2].add_interaction(Force(30, -10))
self.beams.append(Beam(10))
self.beams[3].add_interaction(Force(1, 7))
self.beams[3].add_interaction(Dist_Force(2, -5, 7))
self.beams[3].add_interaction(Moment(8, 10))
self.beams[3].add_interaction(Force(8, 0, False))
self.beams[3].add_interaction(Moment(0, 0, False))
#A very simple beam with one known force and two unknown forces
def test_beam0(self):
solver.solve(self.beams[0])
#Test solution
self.assertEqual(5, self.beams[0].interactions[0].magnitude)
self.assertEqual(5, self.beams[0].interactions[2].magnitude)
shear_moment = shearmomentgenerator.generate_numerical(self.beams[0], self.STEP_SIZE)
#Test moment
assert abs(shear_moment[0][1] - 0 ) < self.ALMOST
assert abs(shear_moment[int(10/self.STEP_SIZE/2)][1] - 25) < self.ALMOST
assert abs(shear_moment[int(10/self.STEP_SIZE/4)][1] - 25/2) < self.ALMOST
#Test shear
assert abs(shear_moment[1][0] - 5) < self.ALMOST
assert abs(shear_moment[int(10/self.STEP_SIZE/2) -1][0] - 5 ) < self.ALMOST
assert a
|
bs(shear_moment[int(10/self.STEP_SIZE/2) +2][0] - (-5)) < self.ALMOST
assert abs(shear_moment[int(10/self.STEP_SIZE) -1][0] - (-5)) < self.ALMOST
def test_beam1(self):
solver.solve(self.beams[1])
#Test solution
self.assertEqual(-10, self.beams[1].interactions[0].magnitude)
self.assertEqual(-95, self.beams[1].interactions[1].magnitude)
shear_moment = shearmomentgenera
|
tor.generate_numerical(self.beams[1], self.STEP_SIZE)
#Test shear
for item in shear_moment:
assert abs(item[0] - (-10)) < self.ALMOST
#Test moment
assert abs(shear_moment[0][1] - 95) < self.ALMOST
assert abs(shear_moment[int(4/self.STEP_SIZE - 1)][1] - 55 ) < self.ALMOST
assert abs(shear_moment[int(5.5/self.STEP_SIZE) - 1][1] - 0) < self.ALMOST
def test_beam2(self):
solver.solve(self.beams[2])
#Test the solution
self.assertEqual(7.5, self.beams[2].interactions[0].magnitude)
self.assertEqual(32.5, self.beams[2].interactions[3].magnitude)
shear_moment = shearmomentgenerator.generate_numerical(self.beams[2], self.STEP_SIZE)
#Test shear
assert abs(shear_moment[0][0] - 7.5) < self.ALMOST
assert abs(shear_moment[int(10/self.STEP_SIZE)][0] - (-2.5)) < self.ALMOST
assert abs(shear_moment[int(15/self.STEP_SIZE) - 1][0] - (-2.5)) < self.ALMOST
assert abs(shear_moment[int(15/self.STEP_SIZE) + 1][0] - (-22.5)) < self.ALMOST
assert abs(shear_moment[int(20/self.STEP_SIZE) - 1][0] - (-22.5)) < self.ALMOST
assert abs(shear_moment[int(20/self.STEP_SIZE) + 1][0] - (10)) < self.ALMOST
#Test moment
assert abs(shear_moment[0][1] - 0) < self.ALMOST
assert abs(shear_moment[int(10/self.STEP_SIZE)][1] - 25) < self.ALMOST
assert abs(shear_moment[int(15/self.STEP_SIZE)][1] - 12.5) < self.ALMOST
assert abs(shear_moment[int(20/self.STEP_SIZE)][1] - (-100)) < self.ALMOST
assert abs(shear_moment[int(30/self.STEP_SIZE) -1][1] - 0) < self.ALMOST
def test_beam3(self):
solver.solve(self.beams[3])
#Test the solution
self.assertEqual(-48.5, self.beams[3].interactions[0].magnitude)
self.assertEqual(18, self.beams[3].interactions[4].magnitude)
shear_moment = shearmomentgenerator.generate_numerical(self.beams[3], self.STEP_SIZE)
#Test shear
assert abs(shear_moment[0][0] - 0) < self.ALMOST
assert abs(shear_moment[int(1/self.STEP_SIZE) -1][0] - 0) < self.ALMOST
assert abs(shear_moment[int(1/self.STEP_SIZE) + 1][0] - 7) < self.ALMOST
assert abs(shear_moment[int(2/self.STEP_SIZE) -1][0] - 7) < self.ALMOST
assert abs(shear_moment[int(7/self.STEP_SIZE) +1][0] - (-18)) < self.ALMOST
assert abs(shear_moment[int(8/self.STEP_SIZE) -1][0] - (-18)) < self.ALMOST
assert abs(shear_moment[int(8/self.STEP_SIZE) +1][0] - (0)) < self.ALMOST
assert abs(shear_moment[int(10/self.STEP_SIZE) -1][0] - (0)) < self.ALMOST
#Test moment
assert abs(shear_moment[0][1] - 48.5) < self.ALMOST
assert abs(shear_moment[int(1/self.STEP_SIZE) - 1][1] - 48.5) < self.ALMOST
#Had to decrease criteria due to steep slope
assert abs(shear_moment[int(8/self.STEP_SIZE) - 1][1] - 10) < 0.02
assert abs(shear_moment[int(8/self.STEP_SIZE) +1][1] - 0) < self.ALMOST
assert abs(shear_moment[int(10/self.STEP_SIZE) -1][1] - 0) < self.ALMOST
def test_interaction_location_error(self):
with self.assertRaises(InteractionLocationError):
Force(-1, 3)
with self.assertRaises(InteractionLocationError):
self.beams[0].add_interaction(Force(13, 3))
def test_solver_error(self):
self.beams[0].add_interaction(Force(3, 0, False))
with self.assertRaises(SolverError):
solver.solve(self.beams[0])
def test_shear_moment_error(self):
with self.assertRaises(Shear_Moment_Error):
shearmomentgenerator.generate_numerical(self.beams[0], self.STEP_SIZE)
|
rven/odoo
|
addons/hr_contract/models/hr_contract.py
|
Python
|
agpl-3.0
| 11,699
| 0.005214
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import date
from dateutil.relativedelta import relativedelta
from odoo import api, fields, models, _
from odoo.exceptions import ValidationError
from odoo.osv import expression
class Contract(models.Model):
_name = 'hr.contract'
_description = 'Contract'
_inherit = ['mail.thread', 'mail.activity.mixin']
name = fields.Char('Contract Reference', required=True)
active = fields.Boolean(default=True)
structure_type_id = fields.Many2one('hr.payroll.structure.type', string="Salary Structure Type")
employee_id = fields.Many2one('hr.employee', string='Employee', tracking=True, domain="['|', ('company_id', '=', False), ('company_id', '=', company_id)]")
department_id = fields.Many2one('hr.department', compute='_compute_employee_contract', store=True, readonly=False,
domain="['|', ('company_id', '=', False), ('company_id', '=', company_id)]", string="Department")
job_id = fields.Many2one('hr.job', compute='_compute_employee_contract', store=True, readonly=False,
domain="['|', ('company_id', '=', False), ('company_id', '=', company_id)]", string='Job Position')
date_start = fields.Date('Start Date', required=True, default=fields.Date.today, tracking=True,
help="Start date of the contract.")
date_end = fields.Date('End Date', tracking=True,
help="End date of the contract (if it's a fixed-term contract).")
trial_date_end = fields.Date('End of Trial Period',
help="End date of the trial period (if there is one).")
resource_calendar_id = fields.Many2one(
'resource.calendar', 'Working Schedule', compute='_compute_employee_contract', store=True, readonly=False,
default=lambda self: self.env.company.resource_calendar_id.id, copy=False, index=True,
domain="['|', ('company_id', '=', False), ('company_id', '=', company_id)]")
wage = fields.Monetary('Wage', required=True, tracking=True, help="Employee's monthly gross wage.")
notes = fields.Text('Notes')
state = fields.Selection([
('draft', 'New'),
('open', 'Running'),
('close', 'Expired'),
('cancel', 'Cancelled')
], string='Status', group_expand='_expand_states', copy=False,
tracking=True, help='Status of the contract', default='draft')
company_id = fields.Many2one('res.company', compute='_compute_employee_contract', store=True, readonly=False,
default=lambda self: self.env.company, required=True)
company_country_id = fields.Many2one('res.country', string="Company country", related='company_id.country_id', readonly=True)
"""
kanban_state:
* draft + green = "Incoming" state (will be set as Open once the contract has started)
* open + red = "Pending" state (will be set as Closed once the contract has ended)
* red = Shows a warning on the employees kanban view
"""
kanban_state = fields.Selection([
('normal', 'Grey'),
('done', 'Green'),
('blocked', 'Red')
], string='Kanban State', default='normal', tracking=True, copy=False)
currency_id = fields.Many2one(string="Currency", related='company_id.currency_id', readonly=True)
permit_no = fields.Char('Work Permit No', related="employee_id.permit_no", readonly=False)
visa_no = fields.Char('Visa No', related="employee_id.visa_no", readonly=False)
visa_expire = fields.Date('Visa Expire Date', related="employee_id.visa_expire", readonly=False)
hr_responsible_id = fields.Many2one('res.users', 'HR Responsible', tracking=True,
help='Person responsible for validating the employee\'s contracts.')
calendar_mismatch = fields.Boolean(compute='_compute_calendar_mismatch')
first_contract_date = fields.Date(related='employee_id.first_contract_date')
@api.depends('employee_id.resource_calendar_id', 'resource_calendar_id')
def _compute_calendar_mismatch(self):
for contract in self:
contract.calendar_mismatch = contract.resource_calendar_id != contract.employee_id.resource_calendar_id
def _expand_states(self, states, domain, order):
return [key for key, val in type(self).state.selection]
@api.depends('employee_id')
def _compute_employee_contract(self):
for contract in self.filtered('employee_id'):
contract.job_id = contract.employee_id.job_id
contract.department_id = contract.employee_id.department_id
contract.resource_calendar_id = contract.employee_id.resource_calendar_id
contract.company_id = contract.employee_id.company_id
@api.onchange('company_id')
def _onchange_company_id(self):
if self.company_id:
structure_types = self.env['hr.payroll.structure.type'].search([
'|',
('country_id', '=', self.company_id.country_id.id),
('country_id', '=', False)])
if structure_types:
self.structure_type_id = structure_types[0]
elif self.structure_type_id not in structure_types:
self.structure_type_id = False
@api.onchange('structure_type_id')
def _onchange_structure_type_id(self):
if self.structure_type_id.default_resource_calendar_id:
self.resource_calendar_id = self.structure_type_id.default_resource_calendar_id
@api.constrains('employee_id', 'state', 'kanban_state', 'date_start', 'date_end')
def _check_current_contract(self):
""" Two contracts in state [incoming | open | close] cannot overlap """
for contract in self.filtered(lambda c: (c.state not in ['draft', 'cancel'] or c.state == 'draft' and c.kanban_state == 'done') and c.employee_id):
domain = [
('id', '!=', contract.id),
('employee_id', '=', contract.employee_id.id),
'|',
('state', 'in', ['open', 'close']),
'&',
('state', '=', 'draft'),
('kanban_state', '=', 'done') # replaces incoming
]
if not contract.date_end:
start_domain = []
end_domain = ['|', ('date_end', '>=', contract.date_start), ('date_end', '=', False)]
else:
start_domain = [('date_start', '<=', contract.date_end)]
end_domain = ['|', ('date_end', '>', contract.date_start), ('date_end', '=', False)]
domain = expression.AND([domain, start_domain, end_domain])
if self.search_count(domain):
raise ValidationError(_('An employee can only have one contract at the same time. (Excluding Draft and Cancelled contracts)'))
@api.constrains('date_start', 'date_end')
def _check_dates(self):
if self.filtered(lambda c: c.date_end and c.date_start > c.date_end):
raise ValidationError(_('Contract start date must be earlier than contract end date.'))
@api.model
def update_state(self):
contracts = self.search([
('state', '=', 'open'), ('kanban_state', '!=', 'blocked'),
'|',
'&',
('date_end', '<=', fields.Date.to_string(date.today() + relativedelta(days=7))),
('date_end', '>=', fields.Date.to_string(date.today() + relativedelta(days=1))),
'&',
('visa_expire', '<=', fields.Date.to_string(date.today() + relativedelta(days=60))),
('visa_expire', '>=', fields.Date.to_string(date.today() + relativedelta(days=1))),
])
for contract in contracts:
contract.activity_schedule(
'mail.mail_activity_data_todo', contract.date_end,
_("The contract of %s is about to expire.", contract.employee_id.name),
user_id=contract.hr_responsible_id.id or self.env.uid)
contracts.write({'kanban_s
|
tate': 'blocked'})
self.search([
('state', '=', 'open'),
|
'|',
('date_end', '<=', fields.Date.to_string(date.today() + relativedelta(days=1))),
('visa_ex
|
threatstream/mnemosyne
|
webapi/admin.py
|
Python
|
gpl-3.0
| 2,846
| 0.001054
|
# Copyright (C) 2013 Johnny Vestergaard <jkv@unixcluster.dk>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import bottle
from bottle import get, post, route, static_file, view, HTTPError
import shared_state
import logging
logger = logging.getLogger(__name__)
@route('/unauth')
def login():
return HTTPError(401, 'Unauthorized')
@post('/login')
def login():
"""Authenticate users"""
username = post_get('username')
password = post_get('password')
logger.info("Authentication attempt with username: [{0}]".format(username))
if shared_state.auth.login(username, password):
return "You provided valid credentials"
else:
return HTTPError(401, 'Invalid credentials')
@route('/logout')
def logout():
shared_state.auth.logout(success_redirect='/unauth')
@route('/admin')
@view('admin_page')
def admin():
"""Only admin users can see this"""
shared_state.auth.require(role='admin', fail_redirect='/unauth')
return dict(
current_user=shared_state.auth.current_user,
users=shared_state.auth.list_users(),
roles=shared_state.auth.list_roles()
)
@post('/create_user')
def create_user():
try:
shared_state.auth.create_user(postd().username, postd().role, postd().password)
return dict(ok=True, msg='')
except Exception, e:
return dict(ok=False, msg=e.message)
@post('/delete_user')
def delete_user():
try:
shared_state.auth.delete_user(post_get('username'))
return
|
dict(ok=True, msg='')
except Exception, e:
return dict(ok=False, msg=e.message)
@post('/create_role')
def create_role():
try:
shared_state.auth.create_role(post_get('role'), post_get('level'))
return dict(ok=True, msg='')
except Exception, e:
return dict(ok=False, msg=e.message)
@post('/delete_role')
def delete_role():
try:
shared_state.auth.delete_role(post_get('role'))
return d
|
ict(ok=True, msg='')
except Exception, e:
return dict(ok=False, msg=e.message)
def postd():
return bottle.request.forms
def post_get(name, default=''):
return bottle.request.POST.get(name, default).strip()
|
unpingco/csvkit
|
csvkit/convert/ndjs.py
|
Python
|
mit
| 1,769
| 0.005088
|
#!/usr/bin/env python
try:
from collections import OrderedDict
import json
except ImportError:
from ordereddict import OrderedDict
import simplejson as json
import itertools
import six
from csvkit import CSVKitWriter
def parse_object(obj, path=''):
"""
Recursively parse JSON objects and a dictionary of paths/keys and values.
Inspired by JSONPipe (https://github.com/dvxhouse/jsonpipe).
"""
if isinstance(obj, dict):
iterator = obj.items()
elif isinstance(obj, (list, tuple)):
iterator = enumerate(obj)
else:
return { path.strip('/'): obj }
d = {}
for key, value in iterator:
key = six.text_type(key)
d.update(parse_object(value, path + key + '/'))
return d
def ndjson2csv(f, key=None, **kwargs):
"""
Convert a JSON document into CSV format.
Supports both JSON and "Newline-delimited JSON".
The top-level element of the input must be a list or a dictionary. If it is a dictionary, a key must be provided which is an item of the dictionary which contains a list.
"""
first_line = f.readline()
first_row = json.loads(first_line, object_pairs_hook=OrderedDict)
js = itertools.chain((first_row, ), (json.loads(l, object_pairs_hook=OrderedDict) for l in f))
fields = []
flat = []
for obj in js:
flat.append(parse_object(obj))
for key in obj.keys():
if key not in fields:
fields.append(key)
o = six.StringIO()
writer = CSVKitWriter(o)
writer.writerow(fields)
for i in flat:
row = []
for field in fields:
row.append(i.get(field, None))
writer
|
.writerow(row)
output = o.getvalue()
o.close()
|
return output
|
usersource/anno
|
anno_gec_server/api/anno_api.py
|
Python
|
mpl-2.0
| 18,371
| 0.003212
|
"""
API implemented using Google Cloud Endpoints on :class:`.Anno` model
.. http:get:: /anno/1.0/anno/(id)
``anno.anno.get`` - Get the details for a specific anno
:param int id: id of the anno
:returns: details of the anno :class:`.AnnoResponseMessage`
.. http:get:: /anno/1.0/anno
``anno.anno.list`` - Get list of annos
:param str cursor: resumption point in a query
:param int limit: number of annos to be returned
:param str select: fields that you want to retrieve
:param str app: name of app for which we need annos
:param str query_type: one of the :class:`.AnnoQueryType`
:param int community: id of the community for which annos to be returned,
required only when query by **COMMUNITY** of :class:`.AnnoQueryType`
:returns: a list of annos :class:`.AnnoListMessage`
.. http:post:: /anno/1.0/anno
``anno.anno.insert`` - Insert an anno
:param: :class:`.AnnoMessage`
:returns: details of the anno :class:`.AnnoResponseMessage`
.. http:post:: /anno/1.0/anno/(id)
``anno.anno.merge`` - Edit an specific anno
:param int id: id of the anno
:param: :class:`.AnnoMergeMessage`
:returns: details of the anno :class:`.AnnoResponseMessage`
"""
__author__ = 'topcircler'
import datetime
import logging
import re
import endpoints
from google.appengine.datastore.datastore_query import Cursor
from google.appengine.ext.db import BadValueError
from protorpc import message_types
from protorpc import messages
from protorpc import remote
from message.anno_api_messages import AnnoMessage
from message.anno_api_messages import AnnoMergeMessage
from message.anno_api_messages import AnnoListMessage
from message.anno_api_messages import AnnoDashboardListMessage
from message.anno_api_messages import AnnoResponseMessage
from message.anno_api_messages import UserUnreadMessage
from message.anno_api_messages import AnnoTeamNotesMetadataMessage
from message.anno_api_messages import AnnoMentionsResponseMessage
from message.user_message import UserMessage
from message.user_message import UserListMessage
from model.anno import Anno
from model.vote import Vote
from model.flag import Flag
from model.community import Community
from model.follow_up import FollowUp
from model.userannostate import UserAnnoState
from model.tags import Tag
from model.appinfo import AppInfo
from model.user import User
from helper.settings import anno_js_client_id
from helper.utils import auth_user
from helper.utils import put_search_document
from helper.utils import extract_tags_from_text
from helper.utils import parseTeamNotesForHashtags
from helper.activity_push_notifications import ActivityPushNotifications
from helper.utils_enum import AnnoQueryType, AnnoActionType
from helper.utils_enum import SearchIndexName
@endpoints.api(name='anno', version='1.0', description='Anno API',
allowed_client_ids=[endpoints.API_EXPLORER_CLIENT_ID, anno_js_client_id])
class AnnoApi(remote.Service):
"""
Class which defines Anno API v1.
"""
anno_with_id_resource_container = endpoints.ResourceContainer(
message_types.VoidMessage,
id=messages.IntegerField(2, required=True),
team_key=messages.StringField(3),
team_notes=messages.StringField(4),
tagged_users=messages.StringField(5, repeated=True)
)
anno_list_resource_container = endpoints.ResourceContainer(
message_types.VoidMessage,
cursor=messages.StringField(2),
limit=messages.IntegerField(3),
select=messages.StringField(4),
app=messages.StringField(5),
query_type=messages.StringField(6),
community=messages.IntegerField(7),
is_plugin=messages.BooleanField(8),
team_key=messages.StringField(9),
anno_id=messages.IntegerField(10)
)
anno_update_resource_container = endpoints.ResourceContainer(
AnnoMergeMessage,
id=messages.IntegerField(2, required=True),
app_name=messages.StringField(3),
community_name=messages.StringField(4),
platform_type=messages.StringField(5)
)
anno_search_resource_container = endpoints.ResourceContainer(
search_string=messages.StringField(1, required=False),
app_name=messages.StringField(2, required=False),
order_type=messages.StringField(3, required=True),
cursor=messages.StringField(4), # can't make it work, not sure why. may check it in the future.
limit=messages.IntegerField(5),
offset=messages.IntegerField(6),
only_my_apps=messages.BooleanField(7)
)
anno_user_email_resource_container =endpoints.ResourceContainer(
user_email=messages.StringField(1),
team_key=messages.StringField(2)
)
@endpoints.method(anno_with_id_resource_container, AnnoResponseMessage, path='anno/{id}',
http_method='GET', name='anno.get')
def anno_get(self, request):
"""
Exposes an API endpoint to get an anno detail by the specified id.
"""
try:
user = auth_user(self.request_state.headers)
except Exception:
user = None
if request.id is None:
raise endpoints.BadRequestException('id field is required.')
anno = Anno.get_by_id(request.id)
if anno is None:
raise endpoints.NotFoundException('No anno entity with the id "%s" exists.' % request.id)
# set anno basic properties
anno_resp_message = anno.to_response_message(user, list_message=False)
# set anno association with followups
followups = FollowUp.find_by_anno(anno)
followup_messages = [ entity.to_message(team_key=request.team_key) for entity in followups ]
anno_resp_message.followup_list = followup_messages
# set anno association with votes/flags
# if current user exists, then fetch vote/flag.
if user is not None:
anno_resp_message.is_my_vote = Vote.is_belongs_user(anno, user)
anno_resp_message.is_my_flag = Flag.is_belongs_user(anno, user)
# update last_read of UserAnnoState
UserAnnoState.update_last_read(user=user, anno=anno)
return anno_resp_message
@endpoints.method(anno_list_resource_container, AnnoListMessage, path='anno',
http_method='GET', name='anno.list')
def anno_list(self, request):
"""
Exposes an API endpoint to retrieve a list of anno.
"""
user = auth_user(self.request_state.headers)
limit = 10
if request.limit is not None:
limit = request.limit
is_plugin = request.is_plugin or False
curs = None
if request.cursor is not None:
try:
curs = Cursor(urlsafe=request.cursor)
except BadValueError:
raise endpoints.BadRequestException('Invalid cursor %s.' % request.cursor)
select_projection = None
if request.select is not None:
select_projection = request.select.split(',')
if request.query_type == AnnoQueryType.CREATED:
return Anno.query_by_app_by_created(request.app, limit, select_projection, curs, user)
elif request.query_type == AnnoQueryType.VOTE_COUNT:
return Anno.query_by_vote_count(request.app, user)
elif request.query_type == AnnoQueryType.FLAG_COUNT:
return Anno.query_by_flag_count(request.app, user)
elif request.query_type == AnnoQueryType.ACTIVITY_COUNT:
return Anno.query_by_activity_count(request.app, user)
elif request.query_type == AnnoQueryType.LAST_ACTIVITY:
return Anno.query_by_last_activity(request.app, user)
elif request.query_type == AnnoQueryType.COUN
|
TRY:
return Anno.query_by_country(request.app, user)
elif request.query_type == AnnoQueryType.COMMUNITY:
community = Community.get_by_id(request.community)
return Anno.query_by_community(community, limit, select_projection, curs, us
|
er)
elif request.query_type == AnnoQueryType.APP:
app = AppInfo.get
|
danilobellini/dose
|
dose/terminal.py
|
Python
|
gpl-3.0
| 7,486
| 0.002404
|
"""Dose GUI for TDD: colored terminal."""
from __future__ import print_function
import os, sys, subprocess, signal, colorama
from .misc import attr_item_call_auto_cache
DEFAULT_TERMINAL_WIDTH = 80
class TerminalSize(object):
r"""
Console/terminal width information getter.
There should be only one instance for this class, and it's the
``terminal_size`` object in this module, whose ``width``
attribute has the desired terminal width. The ``usable_width``
read-only property has t
|
he width that can be safely used with a
``"\n"`` at the end without
|
skipping a line.
The ``retrieve_width`` method can be called to update the ``width``
attribute, but there's also a SIGWINCH (SIGnal: WINdow CHanged)
signal handler updating the width if that's a valid signal in the
operating system.
Several strategies for getting the terminal width are combined
in this class, all of them are tried until a width is found. When
a strategy returns ``0`` or ``None``, it means it wasn't able to
collect the console width.
Note: The ``terminal_size`` object should have been created in the
main thread of execution.
"""
width = DEFAULT_TERMINAL_WIDTH # Fallback
# Strategies are (method name without the "from_" prefix, arguments list)
if sys.platform == "win32":
strategies = [
("windows_handle", [subprocess.STD_INPUT_HANDLE]),
("windows_handle", [subprocess.STD_OUTPUT_HANDLE]),
("windows_handle", [subprocess.STD_ERROR_HANDLE]),
]
@property
def usable_width(self):
return self.width - 1
else: # Linux, OS X and Cygwin
strategies = [
("io_control", [sys.stdin]),
("io_control", [sys.stdout]),
("io_control", [sys.stderr]),
("tty_io_control", []),
]
@property
def usable_width(self):
return self.width
strategies.extend([
("tput_subprocess", []), # Cygwin "tput" works on other Windows consoles
("environment_variable", []),
])
def __init__(self):
try:
signal.signal(signal.SIGWINCH, self.retrieve_width)
except (AttributeError, ValueError): # There's no SIGWINCH in Windows
pass
self.retrieve_width()
def retrieve_width(self, signum=None, frame=None):
"""
Stores the terminal width into ``self.width``, if possible.
This function is also the SIGWINCH event handler.
"""
for method_name, args in self.strategies:
method = getattr(self, "from_" + method_name)
width = method(*args)
if width and width > 0:
self.width = width
break # Found!
os.environ["COLUMNS"] = str(self.width) # A hint for the next test job
@staticmethod
def from_environment_variable():
"""Gets the width from the ``COLUMNS`` environment variable."""
return int(os.environ.get("COLUMNS", "0"))
@staticmethod
def from_io_control(fobj):
"""
Call TIOCGWINSZ (Terminal I/O Control to Get the WINdow SiZe)
where ``fobj`` is a file object (e.g. ``sys.stdout``),
returning the terminal width assigned to that file.
See the ``ioctl``, ``ioctl_list`` and tty_ioctl`` man pages
for more information.
"""
import fcntl, termios, array
winsize = array.array("H", [0] * 4) # row, col, xpixel, ypixel
if not fcntl.ioctl(fobj.fileno(), termios.TIOCGWINSZ, winsize, True):
return winsize[1]
@classmethod
def from_tty_io_control(cls):
"""Calls cls.from_io_control for the tty file descriptor."""
with open(os.ctermid(), "rb") as fobj:
return cls.from_io_control(fobj)
@staticmethod
def from_tput_subprocess():
"""
Gets the terminal width from the ``tput`` shell command,
usually available in Linux, OS X and Cygwin (Windows).
"""
try:
# Windows require shell=True to avoid the tput extension
return int(subprocess.check_output("tput cols", shell=True))
except (OSError, # tput not found
subprocess.CalledProcessError): # tput didn't return 0
return 0
@staticmethod
def from_windows_handle(std_handle):
"""
Use the Windows Console Handles API to get the console width,
where ``std_handle`` is the WINAPI ``GetStdHandle`` input
(e.g. STD_INPUT_HANDLE).
https://msdn.microsoft.com/library/windows/desktop/ms682075
"""
from ctypes import windll, c_ushort
# https://msdn.microsoft.com/library/windows/desktop/ms683231
handle = windll.kernel32.GetStdHandle(std_handle)
# https://msdn.microsoft.com/library/windows/desktop/ms682093
info = (c_ushort * 11)() # It's a CONSOLE_SCREEN_BUFFER_INFO:
# xsize, ysize, | COORD dwSize
# xcursor, ycursor, | COORD dwCursorPosition
# attributes, | WORD wAttributes
# left, top, right, bottom, | SMALL_RECT srWindow
# xmax, ymax | COORD dwMaximumWindowSize
# https://msdn.microsoft.com/library/windows/desktop/ms683171
if windll.kernel32.GetConsoleScreenBufferInfo(handle, info):
return info[7] - info[5] + 1
terminal_size = TerminalSize()
@attr_item_call_auto_cache
def fg(color):
"""
Foreground color formatter function factory.
Each function casts from a unicode string to a colored bytestring
with the respective foreground color and foreground reset ANSI
escape codes. You can also use the ``fg.color`` or ``fg[color]``
directly as attributes/items.
The colors are the names of the ``colorama.Fore`` attributes
(case insensitive). For more information, see:
https://pypi.python.org/pypi/colorama
https://en.wikipedia.org/wiki/ANSI_escape_code#Colors
"""
ansi_code = [getattr(colorama.Fore, color.upper()), colorama.Fore.RESET]
return lambda msg: msg.join(ansi_code)
@attr_item_call_auto_cache
def log(color):
"""
Function factory for foreground-colored loggers (printers).
The ``log.color(msg)`` and ``print(fg.color(msg))`` are the
same. On Windows, the ANSI escape codes for colors are mapped to
``SetConsoleTextAttribute`` Windows Console Handles API function
calls by the ``colorama`` package.
https://msdn.microsoft.com/library/windows/desktop/ms686047
The colorama initialization is on the ``dose.__main__`` module.
See ``fg`` for more information.
"""
foreground = fg(color)
return lambda msg: print(foreground(msg))
@attr_item_call_auto_cache
def hr(color):
"""
Colored horizontal rule printer/logger factory.
The resulting function prints an entire terminal row with the given
symbol repeated. It's a terminal version of the HTML ``<hr/>``.
"""
logger = log(color)
return lambda symbol: logger(symbol * terminal_size.usable_width)
def centralize(msg):
"""Add spaces to centralize the string in the terminal."""
return msg.center(terminal_size.usable_width)
@attr_item_call_auto_cache
def clog(color):
"""Same to ``log``, but this one centralizes the message first."""
logger = log(color)
return lambda msg: logger(centralize(msg).rstrip())
|
brendan-w/python-OBD
|
obd/utils.py
|
Python
|
gpl-2.0
| 6,075
| 0.000988
|
# -*- coding: utf-8 -*-
########################################################################
# #
# python-OBD: A python OBD-II serial module derived from pyobd #
# #
# Copyright 2004 Donour Sizemore (donour@uchicago.edu) #
# Copyright 2009 Secons Ltd. (www.obdtester.com) #
# Copyright 2009 Peter J. Creath #
# Copyright 2015 Brendan Whitfield (bcw7044@rit.edu) #
# #
########################################################################
#
|
#
# utils.py #
# #
# This file is part of python-OBD (a derivative of pyOBD) #
# #
# python-OBD is free software: you can redistribute it and/or m
|
odify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 2 of the License, or #
# (at your option) any later version. #
# #
# python-OBD is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with python-OBD. If not, see <http://www.gnu.org/licenses/>. #
# #
########################################################################
import errno
import glob
import logging
import string
import sys
import serial
logger = logging.getLogger(__name__)
class OBDStatus:
""" Values for the connection status flags """
NOT_CONNECTED = "Not Connected"
ELM_CONNECTED = "ELM Connected"
OBD_CONNECTED = "OBD Connected"
CAR_CONNECTED = "Car Connected"
class BitArray:
"""
Class for representing bitarrays (inefficiently)
There's a nice C-optimized lib for this: https://github.com/ilanschnell/bitarray
but python-OBD doesn't use it enough to be worth adding the dependency.
But, if this class starts getting used too much, we should switch to that lib.
"""
def __init__(self, _bytearray):
self.bits = ""
for b in _bytearray:
v = bin(b)[2:]
self.bits += ("0" * (8 - len(v))) + v # pad it with zeros
def __getitem__(self, key):
if isinstance(key, int):
if key >= 0 and key < len(self.bits):
return self.bits[key] == "1"
else:
return False
elif isinstance(key, slice):
bits = self.bits[key]
if bits:
return [b == "1" for b in bits]
else:
return []
def num_set(self):
return self.bits.count("1")
def num_cleared(self):
return self.bits.count("0")
def value(self, start, stop):
bits = self.bits[start:stop]
if bits:
return int(bits, 2)
else:
return 0
def __len__(self):
return len(self.bits)
def __str__(self):
return self.bits
def __iter__(self):
return [b == "1" for b in self.bits].__iter__()
def bytes_to_int(bs):
""" converts a big-endian byte array into a single integer """
v = 0
p = 0
for b in reversed(bs):
v += b * (2 ** p)
p += 8
return v
def bytes_to_hex(bs):
h = ""
for b in bs:
bh = hex(b)[2:]
h += ("0" * (2 - len(bh))) + bh
return h
def twos_comp(val, num_bits):
"""compute the 2's compliment of int value val"""
if ((val & (1 << (num_bits - 1))) != 0):
val = val - (1 << num_bits)
return val
def isHex(_hex):
return all([c in string.hexdigits for c in _hex])
def contiguous(l, start, end):
""" checks that a list of integers are consequtive """
if not l:
return False
if l[0] != start:
return False
if l[-1] != end:
return False
# for consequtiveness, look at the integers in pairs
pairs = zip(l, l[1:])
if not all([p[0] + 1 == p[1] for p in pairs]):
return False
return True
def try_port(portStr):
"""returns boolean for port availability"""
try:
s = serial.Serial(portStr)
s.close() # explicit close 'cause of delayed GC in java
return True
except serial.SerialException:
pass
except OSError as e:
if e.errno != errno.ENOENT: # permit "no such file or directory" errors
raise e
return False
def scan_serial():
"""scan for available ports. return a list of serial names"""
available = []
possible_ports = []
if sys.platform.startswith('linux') or sys.platform.startswith('cygwin'):
possible_ports += glob.glob("/dev/rfcomm[0-9]*")
possible_ports += glob.glob("/dev/ttyUSB[0-9]*")
elif sys.platform.startswith('win'):
possible_ports += ["\\.\COM%d" % i for i in range(256)]
elif sys.platform.startswith('darwin'):
exclude = [
'/dev/tty.Bluetooth-Incoming-Port',
'/dev/tty.Bluetooth-Modem'
]
possible_ports += [port for port in glob.glob('/dev/tty.*') if port not in exclude]
# possible_ports += glob.glob('/dev/pts/[0-9]*') # for obdsim
for port in possible_ports:
if try_port(port):
available.append(port)
return available
|
evernote/pootle
|
pootle/apps/staticpages/urls.py
|
Python
|
gpl-2.0
| 1,786
| 0.00112
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2012-2013 Zuza Software Foundation
# Copyright 2013 Evernote Corporation
#
# This file is part of Pootle.
#
# Pootle is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the
|
im
|
plied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with translate; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from django.conf.urls import patterns, url
from .views import (AdminTemplateView, PageCreateView, PageDeleteView,
PageUpdateView)
urlpatterns = patterns('',
url(r'^legal/agreement/$',
'staticpages.views.legal_agreement',
name='pootle-staticpages-legal-agreement'),
url(r'^(?P<virtual_path>.+)/$',
'staticpages.views.display_page',
name='pootle-staticpages-display'),
)
admin_patterns = patterns('',
url(r'^$',
AdminTemplateView.as_view(),
name='pootle-staticpages'),
url(r'^(?P<page_type>[^/]+)/add/?$',
PageCreateView.as_view(),
name='pootle-staticpages-create'),
url(r'^(?P<page_type>[^/]+)/(?P<pk>\d+)/?$',
PageUpdateView.as_view(),
name='pootle-staticpages-edit'),
url(r'^(?P<page_type>[^/]+)/(?P<pk>\d+)/delete/?$',
PageDeleteView.as_view(),
name='pootle-staticpages-delete'),
)
|
nborkowska/kpir2
|
kpir2/users/migrations/0004_auto_20160403_2348.py
|
Python
|
gpl-3.0
| 581
| 0.001721
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-03 23:48
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('users', '0003_auto_2
|
0160403_2058'),
]
operations = [
migrations.AlterField(
model_name='usercompanyinfo',
name='user',
field=models.ForeignKey(on_delete=django
|
.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
elyezer/robottelo
|
tests/foreman/ui/test_isodownload.py
|
Python
|
gpl-3.0
| 4,978
| 0
|
"""Test class for ISO downloads UI
:Requirement: Isodownload
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: UI
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from robottelo.decorators import run_only_on, stubbed, tier1
from robottelo.test import UITestCase
class ISODownloadTestCase(UITestCase):
"""Test class for iso download feature"""
@stubbed()
@run_only_on('sat')
@tier1
def test_positive_download(self):
"""Downloading ISO from export
:id: 47f20df7-f6f3-422b-b57b-3a5ef9cf62ad
:Steps:
1. find out the location where all iso's are kept
2. check whether a valid iso can be downloaded
:expectedresults: iso file is properly downloaded on your satellite
6 system
:caseautomation: notautomated
:CaseImportance: Critical
"""
@stubbed()
@run_only_on('sat')
@tier1
def test_positive_upload(self):
"""Uploadng the iso successfully to the sat6 system
:id: daf87a68-7c61-46f1-b4cc-021476080b6b
:Steps:
1. download the iso
2. upload it to sat6 system
:expectedresults: uploading iso to satellite6 is successful
:caseautomation: notautomated
:CaseImportance: Critical
|
"""
@stubbed()
@run_only_on('sat')
@tier1
def test_positive_mount(self):
"""Mounting iso to directory accessible to satellite6 works
:id: 44d3c8fa-c01f-438c-b83e-8f6894befbbf
|
:Steps:
1. download the iso
2. upload it to sat6 system
3. mount it a local sat6 directory
:expectedresults: iso is mounted to sat6 local directory
:caseautomation: notautomated
:CaseImportance: Critical
"""
@stubbed()
@run_only_on('sat')
@tier1
def test_positive_validate_cdn_url(self):
"""Validate that cdn url to file path works
:id: 00157f61-1557-48a7-b7c9-6dac726eff94
:Steps:
1. after mounting the iso locally try to update the cdn url
2. the path should be validated
:expectedresults: cdn url path is validated
:caseautomation: notautomated
:CaseImportance: Critical
"""
@stubbed()
@run_only_on('sat')
@tier1
def test_positive_check_message(self):
"""Check if proper message is displayed after successful upload
:id: 5ed31a26-b902-4352-900f-bb38eac95511
:Steps:
1. mount the iso to sat6
2. update the cdn url with file path
3. check if proper message is displayed
:expectedresults: Asserting the message after successful upload
:caseautomation: notautomated
:CaseImportance: Critical
"""
@stubbed()
@run_only_on('sat')
@tier1
def test_positive_enable_repo(self):
"""Enable the repositories
:id: e33e2796-0554-419f-b5a1-3e2c8e23e950
:Steps:
1. mount iso to directory
2. update cdn url
3. upload manifest
4. try to enable redhat repositories
:expectedresults: Redhat repositories are enabled
:caseautomation: notautomated
:CaseImportance: Critical
"""
@stubbed()
@run_only_on('sat')
@tier1
def test_positive_validate_checkboxes(self):
"""Check if enabling the checkbox works
:id: 10b19405-f82e-4f95-869d-28d91cac1e6f
:Steps:
1. mount iso to directory
2. update cdn url
3. upload manifest
4. Click the checkbox to enable redhat repositories
5. redhat repository enabled
:expectedresults: Checkbox functionality works
:caseautomation: notautomated
:CaseImportance: Critical
"""
@stubbed()
@run_only_on('sat')
@tier1
def test_positive_sync_repos(self):
"""Sync repos to local iso's
:id: 96266438-4a52-4222-b573-96bd7cde1700
:Steps:
1. mount iso to directory
2. update cdn url
3. upload manifest
4. try to enable redhat repositories
5. sync the repos
:expectedresults: Repos are synced after upload
:caseautomation: notautomated
:CaseImportance: Critical
"""
@stubbed()
@run_only_on('sat')
@tier1
def test_positive_disable_repo(self):
"""Disabling the repo works
:id: 075700a7-fda0-41db-b9b7-3d6b29f63784
:Steps:
1. mount iso to directory
2. update cdn url
3. upload manifest
4. try to enable redhat repositories
5. sync the contents
6. try disabling the repository
:expectedresults: Assert disabling the repo
:caseautomation: notautomated
:CaseImportance: Critical
"""
|
thegooglecodearchive/marave
|
marave/editor/spelltextedit.py
|
Python
|
gpl-2.0
| 13,856
| 0.009454
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__license__ = 'MIT'
__copyright__ = '2009, John Schember <john@nachtimwald.com>'
__docformat__ = 'restructuredtext en'
import re
import sys
import os
import codecs
# Spell checker support
try:
import enchant
except ImportError:
enchant = None
# Syntax highlight support
try:
from highlight.SyntaxHighlighter import srchiliteqt
except ImportError:
srchiliteqt = None
import numberbar
from PyQt4.Qt import QAction
from PyQt4.Qt import QApplication
from PyQt4.Qt import QEvent
from PyQt4.Qt import QMenu
from PyQt4.Qt import QMouseEvent
from PyQt4.Qt import QTextEdit
from PyQt4.Qt import QSyntaxHighlighter
from PyQt4.Qt import QTextCharFormat
from PyQt4.Qt import QTextCursor
from PyQt4.Qt import Qt
from PyQt4.Qt import QColor
from PyQt4.Qt import QPalette
from PyQt4.QtCore import pyqtSignal
from PyQt4 import QtGui, QtCore
from widgets import SearchWidget
from widgets import SearchReplaceWidget
from widgets import GotoLineWidget
class Editor(QTextEdit):
'''A QTextEdit-based editor that supports syntax highlighting and
spellchecking out of the box'''
langChanged = QtCore.pyqtSignal(QtCore.QString)
def __init__(self, *args):
QTextEdit.__init__(self, *args)
self.lastFolder = QtGui.QDesktopServices.storageLocation(QtGui.QDesktopServices.DocumentsLocation)
self.docName = None
self.initDict()
def gotoLineWidget(self):
return GotoLineWidget(self)
def searchWidget(self):
'''Creates a search widget hooked to this editor (parent is None)'''
return SearchWidget(self)
def searchReplaceWidget(self):
'''Creates a search/replace widget hooked to this editor (parent is None)'''
return SearchReplaceWidget(self)
def initDict(self, lang=None):
if enchant:
if lang==None:
# Default dictionary based on the current locale.
try:
self.dict = enchant.Dict()
except enchant.DictNotFoundError:
self.dict=None
else:
self.dict = enchant.Dict(lang)
else:
self.dict=None
self.highlighter = SpellHighlighter(self.document())
if self.dict:
self.highlighter.setDict(self.dict)
self.highlighter.rehighlight()
def killDict(self):
print 'Disabling spellchecker'
self.highlighter.setDocument(None)
self.dict=None
def mousePressEvent(self, event):
if event.button() == Qt.RightButton:
# Rewrite the mouse event to a left button event so the cursor is
# moved to the location of the pointer.
event = QMouseEvent(QEvent.MouseButtonPress, event.pos(),
Qt.LeftButton, Qt.LeftButton, Qt.NoModifier)
QTextEdit.mousePressEvent(self, event)
def contextMenuEvent(self, event):
popup_menu = self.createStandardContextMenu()
pal=QApplication.instance().palette()
# This fixes Issue 20
menu_style=""" * { background-color: %s;
color: %s;}
"""%(unicode(pal.color(QPalette.Button).name()),
unicode(pal.color(QPalette.WindowText).name()))
popup_menu.setStyleSheet(menu_style)
# Select the word under the cursor.
cursor = self.textCursor()
cursor.select(QTextCursor.WordUnderCursor)
self.setTextCursor(cursor)
# Check if the selected word is misspelled and offer spelling
# suggestions if it is.
if enchant and self.dict:
if self.textCursor().hasSelection():
text = unicode(self.textCursor().selectedText())
if not self.dict.check(text):
spell_menu = QMenu(QtCore.QCoreApplication.translate('app','Spelling Suggestions'), self)
spell_menu.setStyleSheet(menu_style)
for word in self.dict.suggest(text):
action = SpellAction(word, spell_menu)
action.correct.connect(self.correctWord)
spell_menu.addAction(action)
# Only add th
|
e spelling suggests to the menu if ther
|
e are
# suggestions.
if len(spell_menu.actions()) != 0:
popup_menu.insertSeparator(popup_menu.actions()[0])
popup_menu.insertMenu(popup_menu.actions()[0], spell_menu)
# FIXME: add change dict and disable spellcheck options
popup_menu.exec_(event.globalPos())
def correctWord(self, word):
'''
Replaces the selected text with word.
'''
cursor = self.textCursor()
cursor.beginEditBlock()
cursor.removeSelectedText()
cursor.insertText(word)
cursor.endEditBlock()
def save(self):
if not self.docName:
self.saveas()
else:
try:
f = QtCore.QFile(self.docName)
if not f.open(QtCore.QIODevice.WriteOnly | QtCore.QIODevice.Truncate):
QtGui.QMessageBox.information(self.parent(), "Error - Marave",
"Error saving %s."%self.docName)
else:
stream = QtCore.QTextStream(f)
encoded = stream.codec().fromUnicode(self.toPlainText())
f.write(encoded)
f.flush()
f.close()
#f=codecs.open(self.docName,'w+','utf-8')
#f.truncate()
#f.write(unicode(self.toPlainText()))
#f.close()
self.document().setModified(False)
# FIXME: doesn't belong in this class
try:
self.parent().notify(self.tr('Document saved'))
except:
pass
except Exception, e:
QtGui.QMessageBox.information(self.parent(), "Error - Marave",
"Error saving %s."%self.docName)
def saveas(self):
QtCore.QCoreApplication.instance().setOverrideCursor(QtCore.Qt.ArrowCursor)
fdialog = QtGui.QFileDialog(self.parent(), self.tr("Save as"), self.lastFolder)
fdialog.setFileMode(fdialog.AnyFile)
fdialog.setAcceptMode(fdialog.AcceptSave)
fname = None
if fdialog.exec_():
fname = unicode(fdialog.selectedFiles()[0])
print 'FNAME:', fname
#fname=unicode(QtGui.QFileDialog.getSaveFileName(self.parent(), self.tr("Save as"), self.lastFolder))
QtCore.QCoreApplication.instance().restoreOverrideCursor()
if fname:
self.docName=fname
self.save()
def new(self):
QtCore.QCoreApplication.instance().setOverrideCursor(QtCore.Qt.ArrowCursor)
try:
if self.document().isModified():
r=QtGui.QMessageBox.question(self.parent(), self.tr("New Document"), self.tr("The document \"%s\" has been modified."\
"\nDo you want to save your changes or discard them?")%self.docName or "UNNAMED",
QtGui.QMessageBox.Save|QtGui.QMessageBox.Discard|QtGui.QMessageBox.Cancel,QtGui.QMessageBox.Cancel)
if r==QtGui.QMessageBox.Save:
self.save()
elif r==QtGui.QMessageBox.Discard:
self.docName=''
self.setPlainText('')
self.parent().setWindowFilePath('Untitled')
else:
self.docName=''
self.setPlainText('')
self.parent().setWindowFilePath('Untitled')
except:
pass
QtCore.QCoreApplication.instance().restoreOverrideCursor()
def open(self, fname=None):
self.new()
if self.docName:
return
if not fname:
QtCore.QCoreApplication.instance().setOverrideCursor(QtCore.Qt.ArrowCursor)
fdialog = QtGu
|
Ademan/NumPy-GSoC
|
numpy/core/fromnumeric.py
|
Python
|
bsd-3-clause
| 71,769
| 0.000195
|
# Module containing non-deprecated functions borrowed from Numeric.
__docformat__ = "restructuredtext en"
# functions that are now methods
__all__ = ['take', 'reshape', 'choose', 'repeat', 'put',
'swapaxes', 'transpose', 'sort', 'argsort', 'argmax', 'argmin',
'searchsorted', 'alen',
'resize', 'diagonal', 'trace', 'ravel', 'nonzero', 'shape',
'compress', 'clip', 'sum', 'product', 'prod', 'sometrue', 'alltrue',
'any', 'all', 'cumsum', 'cumproduct', 'cumprod', 'ptp', 'ndim',
'rank', 'size', 'around', 'round_', 'mean', 'std', 'var', 'squeeze',
'amax', 'amin',
]
import multiarray as mu
import umath as um
import numerictypes as nt
from numeric import asarray, array, asanyarray, concatenate
_dt_ = nt.sctype2char
import types
try:
_gentype = types.GeneratorType
except AttributeError:
_gentype = types.NoneType
# save away Python sum
_sum_ = sum
# functions that are now methods
def _wrapit(obj, method, *args, **kwds):
try:
wrap = obj.__array_wrap__
except AttributeError:
wrap = None
result = getattr(asarray(obj),method)(*args, **kwds)
if wrap:
if not isinstance(result, mu.ndarray):
result = asarray(result)
result = wrap(result)
return result
def take(a, indices, axis=None, out=None, mode='raise'):
"""
Take elements from an array along an axis.
This function does the same thing as "fancy" indexing (indexing arrays
using arrays); however, it can be easier to use if you need elements
along a given axis.
Parameters
----------
a : array_like
The source array.
indices : array_like
The indices of the values to extract.
axis : int, optional
The axis over which to select values. By default, the flattened
input array is used.
out : ndarray, optional
If provided, the result will be placed in this array. It should
be of the appropriate shape and dtype.
mode : {'raise', 'wrap', 'clip'}, optional
Specifies how out-of-bounds indices will behave.
* 'raise' -- raise an error (default)
* 'wrap' -- wrap around
* 'clip' -- clip to the range
'clip' mode means that all indices that are too large are replaced
by the index that addresses the last element along that axis. Note
that this disables indexing with negative numbers.
Returns
-------
subarray : ndarray
The returned array has the same type as `a`.
See Also
--------
ndarray.take : equivalent method
Examples
--------
>>> a = [4, 3, 5, 7, 6, 8]
>>> indices = [0, 1, 4]
>>> np.take(a, indices)
array([4, 3, 6])
In this example if `a` is an ndarray, "fancy" indexing can be used.
>>> a = np.array(a)
>>> a[indices]
array([4, 3, 6])
"""
try:
take = a.take
except AttributeError:
return _wrapit(a, 'take', indices, axis, out, mode)
return take(indices, axis, out, mode)
# not deprecated --- copy if necessary, view otherwise
def reshape(a, newshape, order='C'):
"""
Gives a new shape to an array without changing its data.
Parameters
----------
a : array_like
Array to be reshaped.
newshape : int or tuple of ints
The new shape should be compatible with the original shape. If
an integer, then the result will be a 1-D array of that length.
One shape dimension can be -1. In this case, the value is inferred
from the length of the array and remaining dimensions.
order : {'C', 'F'}, optional
Determines whether the array data should be viewed as in C
(row-major) order or FORTRAN (column-major) order.
Returns
-------
reshaped_array : ndarray
This will be a new view object if possible; otherwise, it will
be a copy.
See Also
--------
ndarray.reshape : Equivalent method.
Notes
-----
It is not always possible to change the shape of an array without
copying the data. If you want an error to be raise if the data is copied,
you should assign the new shape to the shape attribute of the array::
>>> a = np.zeros((10, 2))
# A transpose make the array non-contiguous
>>> b = a.T
# Taking a view makes it possible to modify the shape without modiying the
# initial object.
>>> c = b.view()
>>> c.shape = (20)
AttributeError: incompatible shape for a non-contiguous array
Examples
--------
>>> a = np.array([[1,2,3], [4,5,6]])
>>> np.reshape(a, 6)
array([1, 2, 3, 4, 5, 6])
>>> np.reshape(a, 6, order='F')
array([1, 4, 2, 5, 3, 6])
>>> np.reshape(a, (3,-1)) # the unspecified value is inferred to be 2
array([[1, 2],
[3, 4],
[5, 6]])
"""
try:
reshape = a.reshape
except AttributeError:
return _wrapit(a, 'reshape', newshape, order=order)
return reshape(newshape, order=order)
def choose(a, choices, out=None, mode='raise'):
"""
Construct an array from an index array and a set of arrays to choose from.
First of all, if confused or uncertain, definitely look at the Examples -
in its full generality, this function is less simple than it might
seem from the following code description (below ndi =
`numpy.lib.index_tricks`):
``np.choose(a,c) == np.array([c[a[I]][I] for I in ndi.ndindex(a.shape)])``.
But this omits some subtleties. Here is a fully general summary:
Given an "index" array (`a`) of integers and a sequence of `n` arrays
(`choices`), `a` and each choice array are first broadcast, as necessary,
to arrays of a common shape; calling these *Ba* and *Bchoices[i], i =
0,...,n-1* we have that, necessarily, ``Ba.shape == Bchoices[i].shape``
for each `i`. Then, a new array with shape ``Ba.shape`` is created as
follows:
* if ``mode=raise`` (the default), then, first of all, each element of
`a` (and thus `Ba`) must be in the range `[0, n-1]`; now, suppose that
`i` (in that range) is the value at the `(j0, j1, ..., jm)` position
in `Ba` - then the value at the same position in the new array is the
value in `Bchoices[i]` at that same position;
* if ``mode=wrap``, values in `a` (and thus `Ba`) may be any (signed)
integer; modular arithmetic is used to map integers outside the range
`[0, n-1]` back into that range; and then the new array is constructed
as above;
* if ``mode=clip``, values in `a` (and thus `Ba`) may be any (signed)
integer; negative integers are mapped to 0; values greater than `n-1`
are mapped to `n-1`; and then the new array is constructed as above.
Parameters
----------
a : int array
This array must contain integers in `[0, n-1]`, where `n` is the number
of choices, unless ``mode=wrap`` or ``mode=clip``, in which cases any
integers are permissible.
choices : sequence of arrays
Choice arrays. `a` and all of the choices must be broadcastable to the
same shape. If `choices` is itself an array (not recommended), then
its outermost dimension (i.e., the one corresponding to
``choices.shape[0]``) is taken as defining the "sequence".
out : array, optional
If provided, the result will be inserted into this array. It should
be of the appropriate shape and dtype.
mode : {'raise' (default), 'wrap', 'clip'}, optional
Specifies how indices outside `[0, n-1]
|
` will be treated:
* 'raise' : an exception is raised
* 'wrap' : value becomes value mod `n`
* 'clip' : values < 0 are mapped to 0, values > n-1 are mapped to n-1
Returns
-------
merged_array : array
The merg
|
ed result.
Raises
------
ValueError: shape mismatch
If `a` and each choice array are not all broadcastable to the same
shape.
See Also
--------
ndarray.choose : equivalent method
Notes
-----
To reduce the chance of misinterpretation, even though the fo
|
vpikulik/lymph
|
lymph/core/monitoring/pusher.py
|
Python
|
apache-2.0
| 1,268
| 0.000789
|
import logging
import time
import gevent
import msgpack
import zmq.green as zmq
from lymph.core.components import Component
from lymph.utils.sockets import bind_zmq_socket
logger = logging.getLogger(__name__)
class MonitorPusher(Component):
def __init__(self, container, aggregator, endpoint='127.0.0.1', interval=2):
super(MonitorPusher, self).__init__()
self.container = container
self.interval = interval
ctx = zmq.Context.instance()
self.socket = ctx.socket(zmq.PUB)
self.endpoint, port = bind_zmq_socket(self.socket, endpoint)
logger
|
.info('binding monitoring endpoint %s', self.endpoint)
self.aggregator = aggregator
def on_start(self):
self.loop_greenlet = self.container.spawn(self.loop)
def on_stop(self, **kwargs):
self.loop_greenlet.kill()
def loop(self):
last_stats
|
= time.monotonic()
while True:
gevent.sleep(self.interval)
dt = time.monotonic() - last_stats
series = list(self.aggregator)
stats = {
'time': time.time(),
'series': series,
}
last_stats += dt
self.socket.send_multipart([b'stats', msgpack.dumps(stats)])
|
ikoula/cloudstack
|
scripts/vm/network/vnet/cloudstack_pluginlib.py
|
Python
|
gpl-2.0
| 17,502
| 0.004571
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# cloudstack_pluginlib for openvswitch on KVM hypervisor
import ConfigParser
import logging
import os
import subprocess
from time import localtime, asctime
DEFAULT_LOG_FORMAT = "%(asctime)s %(levelname)8s [%(name)s] %(message)s"
DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
DEFAULT_LOG_FILE = "/var/log/cloudstack_plugins.log"
PLUGIN_CONFIG_PATH = "/usr/share/cloudstack-common/scripts/vm/hypervisor/xenserver/cloudstack_plugins.conf"
OVSDB_PID_PATH = "/var/run/openvswitch/ovsdb-server.pid"
OVSDB_DAEMON_PATH = "ovsdb-server"
OVS_PID_PATH = "/var/run/openvswitch/ovs-vswitchd.pid"
OVS_DAEMON_PATH = "ovs-vswitchd"
VSCTL_PATH = "/usr/bin/ovs-vsctl"
OFCTL_PATH = "/usr/bin/ovs-ofctl"
class PluginError(Exception):
"""Base Exception class for all plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
def setup_logging(log_file=None):
debug = False
verbose = False
log_format = DEFAULT_LOG_FORMAT
log_date_format = DEFAULT_LOG_DATE_FORMAT
# try to read plugin configuration file
if os.path.exists(PLUGIN_CONFIG_PATH):
config = ConfigParser.ConfigParser()
config.read(PLUGIN_CONFIG_PATH)
try:
options = config.options('LOGGING')
if 'debug' in options:
debug = config.getboolean('LOGGING', 'debug')
if 'verbose' in options:
verbose = config.getboolean('LOGGING', 'verbose')
if 'format' in options:
log_format = config.get('LOGGING', 'format')
if 'date_format' in options:
log_date_format = config.get('LOGGING', 'date_format')
if 'file' in options:
log_file_2 = config.get('LOGGING', 'file')
except ValueError:
# configuration file contained invalid attributes
# ignore them
pass
except ConfigParser.NoSectionError:
# Missing 'Logging' section in configuration file
pass
root_logger = logging.root
if debug:
root_logger.setLevel(logging.DEBUG)
elif verbose:
root_logger.setLevel(logging.INFO)
else:
root_logger.setLevel(logging.WARNING)
formatter = logging.Formatter(log_format, log_date_format)
log_filename = log_file or log_file_2 or DEFAULT_LOG_FILE
logfile_handler = logging.FileHandler(log_filename)
logfile_handler.setFormatter(formatter)
root_logger.addHandler(logfile_handler)
def do_cmd(cmd):
"""Abstracts out the basics of issuing system commands. If the command
returns anything in stderr, a PluginError is raised with that information.
Otherwise, the output from stdout is returned.
"""
pipe = subprocess.PIPE
logging.debug("Executing:%s", cmd)
proc = subprocess.Popen(cmd, shell=False, stdin=pipe, stdout=pipe,
stderr=pipe, close_fds=True)
ret_code = proc.wait()
err = proc.stderr.read()
if ret_code:
logging.debug("The command exited with the error code: " +
"%s (stderr output:%s)" % (ret_code, err))
raise PluginError(err)
output = proc.stdout.read()
if output.endswith('\n'):
output = output[:-1]
return output
def _is_process_run(pidFile, name):
try:
fpid = open(pidFile, "r")
pid = fpid.readline()
fpid.close()
except IOError, e:
return -1
pid = pid[:-1]
ps = os.popen("ps -ae")
for l in ps:
if pid in l and name in l:
ps.close()
return 0
ps.close()
return -2
def _is_tool_exist(name):
if os.path.exists(name):
return 0
return -1
def check_switch():
global result
ret = _is_process_run(OVSDB_PID_PATH, OVSDB_DAEMON_PATH)
if ret < 0:
if ret == -1:
return "NO_DB_PID_FILE"
if ret == -2:
return "DB_NOT_RUN"
ret = _is_process_run(OVS_PID_PATH, OVS_DAEMON_PATH)
if ret < 0:
if ret == -1:
return "NO_SWITCH_PID_FILE"
if ret == -2:
return "SWITCH_NOT_RUN"
if _is_tool_exist(VSCTL_PATH) < 0:
return "NO_VSCTL"
if _is_tool_exist(OFCTL_PATH) < 0:
return "NO_OFCTL"
return "SUCCESS"
def _build_flow_expr(**kwargs):
is_delete_expr = kwargs.get('delete', False)
flow = ""
if not is_delete_expr:
flow = "hard_timeout=%s,idle_timeout=%s,priority=%s"\
% (kwargs.get('hard_timeout', '0'),
kwargs.get('idle_timeout', '0'),
kwargs.get('priority', '1'))
in_port = 'in_port' in kwargs and ",in_port=%s" % kwargs['in_port'] or ''
dl_type = 'dl_type' in kwargs and ",dl_type=%s" % kwargs['dl_type'] or ''
dl_src = 'dl_src' in kwargs and ",dl_src=%s" % kwargs['dl_src'] or ''
dl_dst = 'dl_dst' in kwargs and ",dl_dst=%s" % kwargs['dl_dst'] or ''
nw_src = 'nw_src' in kwargs and ",nw_src=%s" % kwargs['nw_src'] or ''
nw_dst = 'nw_dst' in kwargs and ",nw_dst=%s" % kwargs['nw_dst'] or ''
table = 'table' in kwargs and ",table=%s" % kwargs['table'] or ''
proto = 'proto' in kwargs and ",%s" % kwargs['proto'] or ''
ip = ('nw_src' in kwargs or 'nw_dst' in kwargs) and ',ip' or ''
flow = (flow + in_port + dl_type + dl_src + dl_dst +
(ip or proto) + nw_src + nw_dst)
return flow
def add_flow(bridge, **kwargs):
"""
Builds a flow expression for **kwargs and adds the flow entry
to an Open vSwitch instance
"""
flow = _build_flow_expr(**kwargs)
actions = 'actions' in kwargs and ",actions=%s" % kwargs['actions'] or ''
flow = flow + actions
addflow = [OFCTL_PATH, "add-flow", bridge, flow]
do_cmd(addflow)
def del_flows(bridge, **kwargs):
"""
Removes flows according to criteria passed as keyword.
"""
flow = _build_flow_expr(delete=True, **kwargs)
# out_port condition does not exist for all flow commands
out_port = ("out_port" in kwargs and
",out_port=%s" % kwargs['out_port'] or '')
flow = flow + out_port
delFlow = [OFCTL_PATH, 'del-flows', bridge, flow]
do_cmd(delFlow)
def del_all_flows(bridge):
delFlow = [OFCTL_PATH, "del-flows", bridge]
do_cmd(delFlow)
normalFlow = "priority=0 idle_timeout=0 hard_timeout=0 actions=normal"
add_flow(bridge, normalFlow)
def del_port(bridge, port):
delPort = [VSCTL_PATH, "del-port", bridge, port]
do_cmd(delPort)
def get_network_id_for_vif(vif_name):
domain_id, device_id = vif_name[3:len(vif_name)].split(".")
dom_uuid = do_cmd([XE_PATH, "vm-list", "dom-id=%s" % domain_id, "--minimal"])
vif_uuid = do_cmd([XE_PATH, "vif-list", "vm-uuid=%s" % dom_uuid, "device=%s" % device_id, "--minimal"])
vnet = do_cmd([XE_
|
PATH, "vif-param-get", "uuid=%s" % vif_uuid, "param-name=other-config",
"param-key=cloudstack-network-id"])
return vnet
def get_network_id_for_tunnel_port(tunnelif_name):
vnet = do_cmd([VSCTL_PATH, "get", "interface", tunnelif_name, "options:cloudstack-network-id"])
return vnet
def clear_flooding_rules_for_port(bridge, ofport):
del_flows(bridge, in_port=ofport, table=2)
def add_flooding_rules_for_port(bridge, in_
|
ofport, out_ofports):
action = "".join("output:%s," %ofport for ofport in out_ofports)[:-1]
add_flow(bridge, priority=1100, in_port=
|
JackDanger/sentry
|
src/sentry/web/frontend/auth_close.py
|
Python
|
bsd-3-clause
| 561
| 0.003565
|
from __future__ import absolute_import
from django.shortcuts import render_to_response
from sentry.
|
web.frontend.base import BaseView
class AuthCloseView(BaseView):
"""This is a view to handle when sentry log in has been opened from
another window. This view loads an html page with a script that sends a message
back to the window opener and closes the window"""
def handle(self, request):
logged_in = request.user.is_authenticated()
return render_to_response('sentry/auth_close.html',
|
{'logged_in': logged_in})
|
jim-cooley/abletonremotescripts
|
remote-scripts/samples/APC40_20/ShiftableSelectorComponent.py
|
Python
|
apache-2.0
| 9,563
| 0.013071
|
import Live
from _Framework.ModeSelectorComponent import ModeSelectorComponent
from _Framework.ButtonElement import ButtonElement
#from consts import * #see below (not used)
#MANUFACTURER_ID = 71
#ABLETON_MODE = 65
#NOTE_MODE = 65 #67 = APC20 Note Mode; 65 = APC40 Ableton Mode 1
class ShiftableSelectorComponent(ModeSelectorComponent):
__doc__ = ' SelectorComponent that assigns buttons to functions based on the shift button '
#def __init__(self, select_buttons, master_button, arm_buttons, matrix, session, zooming, mixer, transport, slider_modes, mode_callback):
def __init__(self, parent, select_buttons, master_button, arm_buttons, matrix, session, zooming, mixer, slider_modes, mode_callback):
if not len(select_buttons) == 8:
raise AssertionError
if not len(arm_buttons) == 8:
raise AssertionError
ModeSelectorComponent.__init__(self)
self._toggle_pressed = False
self._note_mode_active = False
self._invert_assignment = False
self._select_buttons = select_buttons
self._master_button = master_button
self._slider_modes = slider_modes
self._arm_buttons = arm_buttons
#self._transport = transport
self._session = session
self._zooming = zooming
self._matrix = matrix
self._mixer = mixer
self._mode_callback = mode_callback
self._master_button.add_value_listener(self._master_value)
self._parent = parent #use this to call methods of parent class (APC40plus20)
def disconnect(self):
ModeSelectorComponent.disconnect(self)
self._master_button.remove_value_listener(self._master_value)
self._select_buttons = None
self._master_button = None
self._slider_modes = None
self._arm_buttons = None
#self._transport = None
self._session = None
self._zooming = None
self._matrix = None
self._mixer = None
self._mode_callback = None
self._parent = None #added
return None
def set_mode_toggle(self, button):
ModeSelectorComponent.set_mode_toggle(self, button) #called from APC20: self._shift_modes.set_mode_toggle(self._shift_button)
self.set_mode(0)
def invert_assignment(self):
self._invert_assignment = True
self._recalculate_mode()
def number_of_modes(self):
return 2
def update(self):
if self.is_enabled():
if (self._mode_index == 0): # Non-Shifted Mode
#for index in range(len(self._select_buttons)):
#strip = self._mixer.channel_strip(index)
#strip.set_select_button(None)
self._mixer.master_strip().set_select_button(None)
#self._transport.set_play_button(self._select_buttons[0])
#self._transport.set_stop_button(self._select_buttons[1])
#self._transport.set_record_button(self._select_buttons[2])
#self._transport.set_overdub_button(self._select_buttons[3])
#self._session.set_track_bank_buttons(self._select_buttons[4], self._select_buttons[5])
#self._session.set_scene_bank_buttons(self._select_buttons[6], self._select_buttons[7])
#self._zooming.set_nav_buttons(self._select_buttons[6], self._select_buttons[7], self._select_buttons[4], self._select_buttons[5])
self._on_note_mode_changed()
elif (self._mode_index == 1): # Shifted Mode
#self._transport.set_play_button(None)
#self._transport.set_stop_button(None)
#self._transport.set_record_button(None)
#self._transport.set_overdub_button(None)
#self._session.set_track_bank_buttons(None, None)
#self._session.set_scene_bank_buttons(None, None)
#self._zooming.set_nav_buttons(None, None, None, None)
#for index in range(len(self._select_buttons)):
#strip = self._mixer.channel_strip(index)
#strip.set_select_button(self._select_buttons[index])
self._mixer.master_strip().set_select_button(self._master_button)
else :
assert False
if self._mode_index == int(self._invert_assignment):
self._slider_modes.set_mode_buttons(None)
for index in range(len(self._arm_buttons)): #was: for index in range(len(self._select_buttons)):
self._mixer.channel_strip(index).set_arm_button(self._arm_buttons[index])
else:
for index in range(len(self._arm_buttons)): #was: for index in range(len(self._select_buttons)):
self._mixer.channel_strip(index).set_arm_button(None)
self._slider_modes.set_mode_buttons(self._arm_buttons)
return None
def _toggle_value(self, value): #"toggle" is shift button
if not self._mode_toggle != None:
raise AssertionError
if not value in range(128):
raise AssertionError
self._toggle_pressed = value > 0
self._recalculate_mode()
self._parent._encoder_modes.update() #added to update track control encoders on shift
return None
def _recalculate_mode(self): #called if toggle (i.e. shift) is pressed
self.set_mode((int(self._toggle_pressed) + int(self._invert_assignment)) % self.number_of_modes())
def _master_value(self, value): #this is the master_button value_listener, i.e. called when the master_button is pressed
if not self._master_button != None:
raise AssertionError
if not value in range(128):
raise AssertionError
if self.is_enabled() and self._invert_assignment == self._toggle_pressed:
if not self._master_button.is_momentary() or value > 0: #if the master button is presse
|
d:
#for button in self._select_buttons: #turn off track select buttons (only needed for APC20)
#button.turn_off(
|
)
self._matrix.reset() #turn off the clip launch grid LEDs
#mode_byte = NOTE_MODE #= 67 for APC20 Note Mode, send as part of sysex string to enable Note Mode
if self._note_mode_active: #if note mode is already on, turn it off:
#mode_byte = ABLETON_MODE #= 65 for APC40 Ableton Mode 1
for scene_index in range(5):
scene = self._session.scene(scene_index)
for track_index in range(8):
clip_slot = scene.clip_slot(track_index)
button = self._matrix.get_button(track_index, scene_index)
clip_slot.set_launch_button(button)
button.set_enabled(True)
button.turn_off()
self._rebuild_callback()
#self._mode_callback(mode_byte) #send sysex to set Mode (NOTE_MODE or ABLETON_MODE)
self._note_mode_active = not self._note_mode_active
self._zooming.set_ignore_buttons(self._note_mode_active) #turn off matrix, scene launch, and clip stop buttons when in Note Mode
#self._transport.update() #only needed for APC20
self._on_note_mode_changed()
return None
def _on_note_mode_changed(self):
if not self._master_button != None:
raise AssertionError
if self.is_enabled() and self._invert_assignment == self._toggle_pressed:
if self._note_mode_active:
self._master_button.turn_on()
for scene_index in range(5):
#TODO: re-map scene_launch buttons to note velocity...
scene = self._session.scene(scene_index)
for track_index in range(8):
clip_slot = scene.clip_slot(track_index)
button = self._matrix.get_button(track_index, scene_index)
clip_slot.set_launch_button(None)
|
anderscui/nails
|
nails/texts.py
|
Python
|
mit
| 534
| 0
|
# coding=utf-8
import re
RE_WHITESPACE = re.compile(r"(\s)+", re.UNICODE)
def remove_postfix(s, postfix):
if s.endswith(postfix):
return s[:len(s)-len(postfix)]
def r
|
emove_prefix(s, p
|
refix):
if s.startswith(prefix):
return s[len(prefix):]
def flatten2str(obj):
if obj is None:
return ''
if isinstance(obj, str):
return obj
if isinstance(obj, (list, tuple)):
return ' '.join(obj)
return str(obj)
def compress_whitespaces(s):
return RE_WHITESPACE.sub(' ', s)
|
platformio/platformio
|
platformio/managers/package.py
|
Python
|
apache-2.0
| 29,447
| 0.000815
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
import json
import os
import re
import shutil
from os.path import basename, getsize, isdir, isfile, islink, join, realpath
from tempfile import mkdtemp
import click
import requests
import semantic_version
from platformio import __version__, app, exception, fs, util
from platformio.compat import hashlib_encode_data
from platformio.downloader import FileDownloader
from platformio.lockfile import LockFile
from platformio.package.exception import ManifestException
from platformio.package.manifest.parser import ManifestParserFactory
from platformio.unpacker import FileUnpacker
from platformio.vcsclient import VCSClientFactory
# pylint: disable=too-many-arguments, too-many-return-statements
class PackageRepoIterator(object):
def __init__(self, package, repositories):
assert isinstance(repositories, list)
self.package = package
self.repositories = iter(repositories)
def __iter__(self):
return self
def __next__(self):
return self.next() # pylint: disable=not-callable
@staticmethod
@util.memoized(expire="60s")
def load_manifest(url):
r = None
try:
r = requests.get(url, headers={"User-Agent": app.get_user_agent()})
r.raise_for_status()
return r.json()
except: # pylint: disable=bare-except
pass
finally:
if r:
r.close()
return None
def next(self):
repo = next(self.repositories)
manifest = repo if isinstance(repo, dict) else self.load_manifest(repo)
if manifest and self.package in manifest:
return manifest[self.package]
return next(self)
class PkgRepoMixin(object):
PIO_VERSION = semantic_version.Version(util.pepver_to_semver(__version__))
@staticmethod
def is_system_compatible(valid_systems):
if not valid_systems or "*" in valid_systems:
return True
if not isinstance(valid_systems, list):
valid_systems = list([valid_systems])
return util.get_systype() in valid_systems
def max_satisfying_repo_version(self, versions, requirements=None):
item = None
reqspec = None
try:
reqspec = (
semantic_version.SimpleSpec(requirements) if requirements else None
)
except ValueError:
pass
for v in versions:
if not self.is_system_compatible(v.get("system")):
continue
# if "platformio" in v.get("e
|
ngines", {}):
# if PkgRepoMixin.PIO_VERSION not in requirements.SimpleSpec(
# v['engines']['platformio']):
# continue
specver = semantic_version.Version(v["version"])
if reqspec and specver
|
not in reqspec:
continue
if not item or semantic_version.Version(item["version"]) < specver:
item = v
return item
def get_latest_repo_version( # pylint: disable=unused-argument
self, name, requirements, silent=False
):
version = None
for versions in PackageRepoIterator(name, self.repositories):
pkgdata = self.max_satisfying_repo_version(versions, requirements)
if not pkgdata:
continue
if (
not version
or semantic_version.compare(pkgdata["version"], version) == 1
):
version = pkgdata["version"]
return version
def get_all_repo_versions(self, name):
result = []
for versions in PackageRepoIterator(name, self.repositories):
result.extend([semantic_version.Version(v["version"]) for v in versions])
return [str(v) for v in sorted(set(result))]
class PkgInstallerMixin(object):
SRC_MANIFEST_NAME = ".piopkgmanager.json"
TMP_FOLDER_PREFIX = "_tmp_installing-"
FILE_CACHE_VALID = None # for example, 1 week = "7d"
FILE_CACHE_MAX_SIZE = 1024 * 1024 * 50 # 50 Mb
MEMORY_CACHE = {} # cache for package manifests and read dirs
def cache_get(self, key, default=None):
return self.MEMORY_CACHE.get(key, default)
def cache_set(self, key, value):
self.MEMORY_CACHE[key] = value
def cache_reset(self):
self.MEMORY_CACHE.clear()
def read_dirs(self, src_dir):
cache_key = "read_dirs-%s" % src_dir
result = self.cache_get(cache_key)
if result:
return result
result = [
join(src_dir, name)
for name in sorted(os.listdir(src_dir))
if isdir(join(src_dir, name))
]
self.cache_set(cache_key, result)
return result
def download(self, url, dest_dir, sha1=None):
cache_key_fname = app.ContentCache.key_from_args(url, "fname")
cache_key_data = app.ContentCache.key_from_args(url, "data")
if self.FILE_CACHE_VALID:
with app.ContentCache() as cc:
fname = str(cc.get(cache_key_fname))
cache_path = cc.get_cache_path(cache_key_data)
if fname and isfile(cache_path):
dst_path = join(dest_dir, fname)
shutil.copy(cache_path, dst_path)
click.echo("Using cache: %s" % cache_path)
return dst_path
with_progress = not app.is_disabled_progressbar()
try:
fd = FileDownloader(url, dest_dir)
fd.start(with_progress=with_progress)
except IOError as e:
raise_error = not with_progress
if with_progress:
try:
fd = FileDownloader(url, dest_dir)
fd.start(with_progress=False)
except IOError:
raise_error = True
if raise_error:
click.secho(
"Error: Please read http://bit.ly/package-manager-ioerror",
fg="red",
err=True,
)
raise e
if sha1:
fd.verify(sha1)
dst_path = fd.get_filepath()
if (
not self.FILE_CACHE_VALID
or getsize(dst_path) > PkgInstallerMixin.FILE_CACHE_MAX_SIZE
):
return dst_path
with app.ContentCache() as cc:
cc.set(cache_key_fname, basename(dst_path), self.FILE_CACHE_VALID)
cc.set(cache_key_data, "DUMMY", self.FILE_CACHE_VALID)
shutil.copy(dst_path, cc.get_cache_path(cache_key_data))
return dst_path
@staticmethod
def unpack(source_path, dest_dir):
with_progress = not app.is_disabled_progressbar()
try:
with FileUnpacker(source_path) as fu:
return fu.unpack(dest_dir, with_progress=with_progress)
except IOError as e:
if not with_progress:
raise e
with FileUnpacker(source_path) as fu:
return fu.unpack(dest_dir, with_progress=False)
@staticmethod
def parse_semver_version(value, raise_exception=False):
try:
try:
return semantic_version.Version(value)
except ValueError:
if "." not in str(value) and not str(value).isdigit():
raise ValueError("Invalid SemVer version %s" % value)
return semantic_version.Version.coerce(value)
except ValueError as e:
if raise_exception:
raise e
|
GrognardsFromHell/TemplePlus
|
tpdatasrc/tpgamefiles/rules/d20_actions/action02600_feat_divine_armor.py
|
Python
|
mit
| 348
| 0.04023
|
from toee import *
i
|
mport tpactions
def GetActionName():
return "Divine Armor"
def GetActionDefinitionFlags():
return D20ADF_None
def GetTargetingClassification():
return D20TC_Target0
def GetActionCostType():
return D20ACT_Swift_Action
def AddToSequence(d20action, action_seq, tb_status):
action_seq.add_acti
|
on(d20action)
return AEC_OK
|
bearops/ebzl
|
ebzl/lib/format.py
|
Python
|
bsd-3-clause
| 1,944
| 0.000514
|
TEXT = "text"
BASH = "bash"
JSON = "json"
DOCKERENV = "dockerenv"
NAME_VALUE_DICT = "nvdict"
DEFAULT = TEXT
CHOICES = (TEXT, BASH, JSON, DOCKERENV, NAME_VALUE_DICT)
def print_dict(dictionary, format_=None):
"""Print a dictionary in a given format. Defaults to text."""
format_ = format_ or DEFAULT
if format_ == TEXT:
for key, value in iter(sorted(dictionary.items())):
print("%s = %s" % (key, value))
elif format_ == DOCKERENV:
for key,
|
value in iter(sorted(dictionary.items())):
print("%s=%s" % (key, value))
elif format_ == BASH:
for key, value in iter(sorted(dictionary.items())):
print("export %s=%s" % (key, value))
elif format_ == JSON:
print(json.dumps(dictionary))
elif format_ == NAME_VALUE_DICT:
|
print("[")
for key, value in iter(sorted(dictionary.items())):
print('{"name": "%s", "value": "%s"},' % (key, value))
print("]")
def print_list(list_, format_=None):
"""Print a list in a given format. Defaults to text."""
format_ = format_ or DEFAULT
if format_ == TEXT:
for item in list_:
print(item)
elif format_ == JSON:
print(json.dumps(list_))
def print_table(rows, separator=" "):
columns = max(map(len, rows))
widths = [0] * columns
for column in range(columns):
for row in rows:
length = len(row[column])
if length > widths[column]:
widths[column] = length
for row in rows:
print separator.join(["%s%s" % (value, " " * (widths[index] - len(str(value))))
for index, value in enumerate(row)])
def print_profile(profile, format_=None):
"""Print profile header."""
format_ = format_ or DEFAULT
if format_ == TEXT:
print("[profile:%s]" % profile)
elif format_ == BASH:
print("# profile: %s" % profile)
|
matrix-org/synapse
|
tests/rest/client/test_consent.py
|
Python
|
apache-2.0
| 4,541
| 0.00044
|
# Copyright 2018 New Vector
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from http import HTTPStatus
from twisted.test.proto_helpers import MemoryReactor
import synapse.rest.admin
from synapse.api.urls import ConsentURIBuilder
from synapse.rest.client import login, room
from synapse.rest.consent import consent_resource
from synapse.server import HomeServer
from synapse.util import Clock
from tests import unittest
from tests.server import FakeSite, make_request
class ConsentResourceTestCase(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets_for_client_rest_resource,
room.register_servlets,
login.register_servlets,
]
user_id = True
hijack_auth = False
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
config = self.default_config()
config["form_secret"] = "123abc"
# Make some temporary templates...
temp_consent_path = self.mktemp()
os.mkdir(temp_consent_path)
os.mkdir(os.path.join(temp_consent_path, "en"))
config["user_consent"] = {
"version": "1",
"template_dir": os.path.abspath(temp_consent_path),
}
with open(os.path.join(temp_consent_path, "en/1.html"), "w") as f:
f.write("{{version}},{{has_consented}}")
with open(os.path.join(temp_consent_path, "en/success.html"), "w") as f:
f.write("yay!")
hs = self.setup_test_homeserver(config=config)
return hs
def test_render_public_consent(self) -> None:
"""You can observe the terms form without specifying a user"""
resource = consent_resource.ConsentResource(self.hs)
channel = make_request(
self.reactor,
FakeSite(resource, self.reactor),
"GET",
"/consent?v=1",
shorthand=False,
)
self.assertEqual(channel.code, HTTPStatus.OK)
def test_accept_consent(self) -> None:
"""
A user can use the consent form to accept the terms.
"""
uri_builder = ConsentURIBuilder(self.hs.config)
resource = consent_resource.ConsentResource(self.hs)
# Register a user
user_id = self.register_user("user", "pass")
access_token = self.login("user", "pass")
# Fetch the consent page, to get the consent version
consent_uri = (
uri_builder.build_user_consent_uri(user_id).replace("_matrix/", "")
+ "&u=user"
)
channel = make_request(
self.reactor,
FakeSite(resource, self.reactor),
"GET",
consent_uri,
access_token=access_token,
shorthand=False,
)
self.assertEqual(channel.code, HTTPStatus.OK)
# Get the version from the body, and whether we've consented
version, consented = channel.result["body"].decode("ascii").split(",")
self.assertEqual(consented, "False")
# POST to the consent page, saying we've agreed
|
channel = make_request(
self.reactor,
FakeSite(resource, self.reactor),
"POST",
consent_uri + "&v=" + version,
access_token=access_token,
shorthand=False,
)
self.assertEqual(channel.code, HTTPStatus.OK)
# Fetch the consent page, to get the consent version -- it should have
# changed
|
channel = make_request(
self.reactor,
FakeSite(resource, self.reactor),
"GET",
consent_uri,
access_token=access_token,
shorthand=False,
)
self.assertEqual(channel.code, HTTPStatus.OK)
# Get the version from the body, and check that it's the version we
# agreed to, and that we've consented to it.
version, consented = channel.result["body"].decode("ascii").split(",")
self.assertEqual(consented, "True")
self.assertEqual(version, "1")
|
MaxWayne/Beginning-Game-Development-with-Python-and-Pygame
|
Chapter 12/model3d.py
|
Python
|
mit
| 5,468
| 0.007315
|
from OpenGL.GL import *
from OpenGL.GLU import *
import pygame
import os.path
class Material(object):
def __init__(self):
self.name = ""
self.texture_fname = None
self.texture_id = None
class FaceGroup(object):
def __init__(self):
self.tri_indices = []
self.material_name = ""
class Model3D(object):
def __init__(self):
self.vertices = []
self.tex_coords = []
self.normals = []
self.materials = {}
self.face_groups = []
self.display_list_id = None
def __del__(self):
#Called when the model is cleaned up by Python
self.free_resources()
def free_
|
resources(self):
# Delete the display list and textures
if self.display_list_id is not None:
gl
|
DeleteLists(self.display_list_id, 1)
self.display_list_id = None
# Delete any textures we used
for material in self.materials.values():
if material.texture_id is not None:
glDeleteTextures(material.texture_id)
# Clear all the materials
self.materials.clear()
# Clear the geometry lists
del self.vertices[:]
del self.tex_coords[:]
del self.normals[:]
del self.face_groups[:]
def read_obj(self, fname):
current_face_group = None
file_in = open(fname)
for line in file_in:
# Parse command and data from each line
words = line.split()
command = words[0]
data = words[1:]
if command == 'mtllib': # Material library
model_path = os.path.split(fname)[0]
mtllib_path = os.path.join( model_path, data[0] )
self.read_mtllib(mtllib_path)
elif command == 'v': # Vertex
x, y, z = data
vertex = (float(x), float(y), float(z))
self.vertices.append(vertex)
elif command == 'vt': # Texture coordinate
s, t = data
tex_coord = (float(s), float(t))
self.tex_coords.append(tex_coord)
elif command == 'vn': # Normal
x, y, z = data
normal = (float(x), float(y), float(z))
self.normals.append(normal)
elif command == 'usemtl' : # Use material
current_face_group = FaceGroup()
current_face_group.material_name = data[0]
self.face_groups.append( current_face_group )
elif command == 'f':
assert len(data) == 3, "Sorry, only triangles are supported"
# Parse indices from triples
for word in data:
vi, ti, ni = word.split('/')
indices = (int(vi) - 1, int(ti) - 1, int(ni) - 1)
current_face_group.tri_indices.append(indices)
for material in self.materials.values():
model_path = os.path.split(fname)[0]
texture_path = os.path.join(model_path, material.texture_fname)
texture_surface = pygame.image.load(texture_path)
texture_data = pygame.image.tostring(texture_surface, 'RGB', True)
material.texture_id = glGenTextures(1)
glBindTexture(GL_TEXTURE_2D, material.texture_id)
glTexParameteri( GL_TEXTURE_2D,
GL_TEXTURE_MAG_FILTER,
GL_LINEAR)
glTexParameteri( GL_TEXTURE_2D,
GL_TEXTURE_MIN_FILTER,
GL_LINEAR_MIPMAP_LINEAR)
glPixelStorei(GL_UNPACK_ALIGNMENT,1)
width, height = texture_surface.get_rect().size
gluBuild2DMipmaps( GL_TEXTURE_2D,
3,
width,
height,
GL_RGB,
GL_UNSIGNED_BYTE,
texture_data)
def read_mtllib(self, mtl_fname):
file_mtllib = open(mtl_fname)
for line in file_mtllib:
words = line.split()
command = words[0]
data = words[1:]
if command == 'newmtl':
material = Material()
material.name = data[0]
self.materials[data[0]] = material
elif command == 'map_Kd':
material.texture_fname = data[0]
def draw(self):
vertices = self.vertices
tex_coords = self.tex_coords
normals = self.normals
for face_group in self.face_groups:
material = self.materials[face_group.material_name]
glBindTexture(GL_TEXTURE_2D, material.texture_id)
glBegin(GL_TRIANGLES)
for vi, ti, ni in face_group.tri_indices:
glTexCoord2fv( tex_coords[ti] )
glNormal3fv( normals[ni] )
glVertex3fv( vertices[vi] )
glEnd()
def draw_quick(self):
if self.display_list_id is None:
self.display_list_id = glGenLists(1)
glNewList(self.display_list_id, GL_COMPILE)
self.draw()
glEndList()
glCallList(self.display_list_id)
|
denzp/cef3
|
tools/gyp_cef.py
|
Python
|
bsd-3-clause
| 797
| 0
|
# Copyright (c) 2013 The Chromium Embedded Framework Authors.
# Portions copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE fi
|
le.
# This file is (possibly, depending on python version) imported by gyp_cef
# when it creates sub-processes through the multiprocessing library.
# Importing in Python 2.6 (fixed in 2.7) on Windows doesn't search for
# imports that don't end in .py (and aren't directories with an
# __init__.py). This wrapper makes "impor
|
t gyp_cef" work with those old
# versions and makes it possible to execute gyp_cef.py directly on Windows
# where the extension is useful.
import os
path = os.path.abspath(os.path.split(__file__)[0])
execfile(os.path.join(path, 'gyp_cef'))
|
slyphon/pants
|
tests/python/pants_test/backend/jvm/tasks/test_bundle_create.py
|
Python
|
apache-2.0
| 5,152
| 0.003882
|
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from pants.backend.jvm.targets.jar_dependency import JarDependency
from pants.backend.jvm.targets.jar_library import JarLibrary
from pants.backend.jvm.targets.jvm_app import JvmApp
from pants.backend.jvm.targets.jvm_binary import JvmBinary
from pants.backend.jvm.tasks.bundle_create import BundleCreate
from pants.util.contextutil import open_zip
from pants_test.backend.jvm.tasks.jvm_binary_task_test_base import JvmBinaryTaskTestBase
class TestBundleCreate(JvmBinaryTaskTestBase):
@classmethod
def task_type(cls):
return BundleCreate
def test_jvm_bundle_products(self):
jar_lib = self.make_target(spec='3rdparty/jvm/org/example:foo',
target_type=JarLibrary,
jars=[JarDependency(org='org.example', name='foo', rev='1.0.0'),
JarDependency(org='org.pantsbuild', name='bar', rev='2.0.0',
ext='zip'),
JarDependency(org='org.apache', name='baz', rev='3.0.0',
classifier='tests'),
JarDependency(org='org.gnu', name='gary', rev='4.0.0',
ext='tar.gz')])
binary_target = self.make_target(spec='//foo:foo-binary',
|
target_type=JvmBinary,
source='Foo.java',
dependencies=[jar_lib])
app_target = self.make_target(spec='//foo:foo-app',
target_type=JvmApp,
basename='FooApp',
dependencies=[binary_target])
context = self.context(target_roots=[app_target])
jar_artifact
|
= self.create_artifact(org='org.example', name='foo', rev='1.0.0')
zip_artifact = self.create_artifact(org='org.pantsbuild', name='bar', rev='2.0.0', ext='zip')
bundle_artifact = self.create_artifact(org='org.apache', name='baz', rev='3.0.0',
classifier='tests')
tar_gz_artifact = self.create_artifact(org='org.gnu', name='gary', rev='4.0.0', ext='tar.gz')
classpath_products = self.ensure_classpath_products(context)
classpath_products.add_jars_for_targets(targets=[jar_lib],
conf='default',
resolved_jars=[jar_artifact,
zip_artifact,
bundle_artifact,
tar_gz_artifact])
self.add_to_runtime_classpath(context, binary_target, {'Foo.class': '', 'foo.txt': ''})
self.execute(context)
products = context.products.get('jvm_bundles')
self.assertIsNotNone(products)
product_data = products.get(app_target)
dist_root = os.path.join(self.build_root, 'dist')
self.assertEquals({dist_root: ['FooApp-bundle']}, product_data)
bundle_root = os.path.join(dist_root, 'FooApp-bundle')
self.assertEqual(sorted(['foo-binary.jar',
'libs/org.example-foo-1.0.0.jar',
'libs/org.pantsbuild-bar-2.0.0.zip',
'libs/org.apache-baz-3.0.0-tests.jar',
'libs/org.gnu-gary-4.0.0.tar.gz']),
sorted(self.iter_files(bundle_root)))
with open_zip(os.path.join(bundle_root, 'foo-binary.jar')) as jar:
self.assertEqual(sorted(['META-INF/', 'META-INF/MANIFEST.MF', 'Foo.class', 'foo.txt']),
sorted(jar.namelist()))
def test_jvm_bundle_missing_product(self):
binary_target = self.make_target(spec='//foo:foo-binary',
target_type=JvmBinary,
source='Foo.java')
app_target = self.make_target(spec='//foo:foo-app',
target_type=JvmApp,
basename='FooApp',
dependencies=[binary_target])
context = self.context(target_roots=[app_target])
jar_artifact = self.create_artifact(org='org.example', name='foo', rev='1.0.0',
materialize=False)
classpath_products = self.ensure_classpath_products(context)
classpath_products.add_jars_for_targets(targets=[binary_target],
conf='default',
resolved_jars=[jar_artifact])
self.add_to_runtime_classpath(context, binary_target, {'Foo.class': '', 'foo.txt': ''})
with self.assertRaises(BundleCreate.MissingJarError):
self.execute(context)
|
toymachine/concurrence
|
test/testextra.py
|
Python
|
bsd-3-clause
| 3,776
| 0.011388
|
from __future__ import with_statement
import logging
import time
import sys
from concurrence import unittest, Tasklet, Channel, Lock, Semaphore, TaskletPool, DeferredQueue, Deque, TimeoutError, TaskletError, JoinError, Message
class TestTaskletPool(unittest.TestCase):
def testBasic(self):
d = Deque()
def handler(i):
Tasklet.sleep(1.0)
d.append(i)
tp = TaskletPool()
N = 10
for i in range(N):
tp.defer(handler, i)
start = time.time()
xs = []
while True:
xs.append(d.popleft(True, 30))
if len(xs) == N:
break
end = time.time()
#X workers taking 1 second to process N items = Z total proc time
self.assertAlmostEqual(N / TaskletPool.INIT_WORKERS, end - start, places = 1)
self.assertEquals(45, sum(xs))
class TestDeferredQueue(unittest.TestCase):
def testDeferredQueue(self):
d = DeferredQueue()
def f(i):
pass
for i in range(10):
d.defer(f, i)
Tasklet.sleep
|
(1)
for i in range(10):
d.defer(f, i)
Tasklet.sleep(1)
class TestPrimitives(unittest.TestCase):
def testSemaphore(self):
sema = Semap
|
hore(4)
self.assertEquals(True, sema.acquire())
self.assertEquals(3, sema.count)
self.assertEquals(True, sema.acquire())
self.assertEquals(2, sema.count)
self.assertEquals(True, sema.acquire())
self.assertEquals(1, sema.count)
self.assertEquals(True, sema.acquire())
self.assertEquals(0, sema.count)
self.assertEquals(False, sema.acquire(False))
self.assertEquals(0, sema.count)
self.assertEquals(None, sema.release())
self.assertEquals(1, sema.count)
self.assertEquals(None, sema.release())
self.assertEquals(2, sema.count)
self.assertEquals(None, sema.release())
self.assertEquals(3, sema.count)
self.assertEquals(None, sema.release())
self.assertEquals(4, sema.count)
self.assertEquals(None, sema.release())
self.assertEquals(5, sema.count) #possible to go beyond initial count... is this ok?
sema = Semaphore(4)
xs = []
def t(x):
try:
with sema:
Tasklet.sleep(1.0)
xs.append(x)
return x
except TimeoutError:
pass
start = time.time()
for i in range(8):
Tasklet.new(t)(i)
join_result = Tasklet.join_children()
self.assertEquals(8, len(join_result))
self.assertEquals(28, sum(xs))
end = time.time()
self.assertAlmostEqual(2.0, end - start, places = 1)
def testLock(self):
lock = Lock()
self.assertEquals(True, lock.acquire())
self.assertEquals(True, lock.is_locked())
self.assertEquals(None, lock.release())
xs = []
def t(x):
try:
with lock:
Tasklet.sleep(1.0)
xs.append(x)
return x
except TimeoutError:
pass
start = time.time()
for i in range(5):
Tasklet.new(t)(i)
join_result = Tasklet.join_children()
self.assertEquals(5, len(join_result))
self.assertEquals(10, sum(xs))
end = time.time()
self.assertAlmostEqual(5.0, end - start, places = 1)
if __name__ == '__main__':
unittest.main(timeout = 100.0)
|
jimsrc/seatos
|
shared_lib/shared_funcs.py
|
Python
|
mit
| 54,269
| 0.01382
|
#!/usr/bin/env ipython
# -*- coding: utf-8 -*-
from datetime import datetime, time, timedelta
import numpy as np
import console_colors as ccl
from scipy.io.netcdf import netcdf_file
from ShiftTimes import ShiftCorrection, ShiftDts
import os, argparse
import h5py
from h5py import File as h5
from numpy import (
mean, median, nanmean, nanmedian, std, nan,
isnan, min, max, zeros, ones, size, loadtxt
)
from os.path import isfile, isdir
if 'DISPLAY' in os.environ: # to avoid crash when running remotely
from pylab import figure, savefig, close, find, pause
import matplotlib.patches as patches
import matplotlib.transforms as transforms
#from read_NewTable import tshck, tini_icme, tend_icme, tini_mc, tend_mc, n_icmes, MCsig
#from z_expansion_gulisano import z as z_exp
_ERROR_ = ccl.Rn+' ### ERROR ###: '+ccl.W
def flags2nan(VAR, FLAG):
cond = VAR < FLAG
VAR = np.array(VAR)
VAR[~cond] = np.nan
return VAR
def date_to_utc(fecha):
utc = datetime(1970, 1, 1, 0, 0, 0, 0)
sec_utc = (fecha - utc).total_seconds()
return sec_utc
def selecc_data(data, tshk):
time = data[0] #[s] utc sec
rate = data[1]
day = 86400. # [seg]
utc = datetime(1970, 1, 1, 0, 0, 0, 0)
tshk_utc = (tshk - utc).total_seconds()
ti = tshk_utc - 10.*day # [seg] utc
tf = tshk_utc + 30.*day
cond = (time > ti) & (time < tf)
time = (time[cond] - tshk_utc) / day # [days] since shock
rate = rate[cond]
return (time, rate)
def selecc_window(data, tini, tend):
time = data[0] #[s] utc sec
y = data[1]
day = 86400. # [seg]
utc = datetime(1970, 1, 1, 0, 0, 0, 0)
tini_utc = (tini - utc).total_seconds() # [s] utc sec
tend_utc = (tend - utc).total_seconds() # [s] utc sec
ti = tini_utc # [seg] utc
tf = tend_utc
cond = (time > ti) & (time < tf)
time = (time[cond] - tini_utc) / day # [days] since 'ti'
y = y[cond]
return (time, y)
def enoughdata(var, fgap):
n = len(var)
ngood = len(find(~isnan(var)))
fdata = 1.*ngood/n # fraccion de data sin gaps
if fdata>=(1.-fgap):
return True
else:
return False
def averages_and_std(n_icmes, t_shck, ti_icme, dTday, nbin, t_utc, VAR, fgap):
day = 86400.
nok=0; nbad=0
adap = []
for i in range(n_icmes):
dT = (ti_icme[i] - t_shck[i]).total_seconds()/day # [day]
if dT>dTday:
dt = dT/nbin
t, var = selecc_window(
[t_utc, VAR],
t_shck[i], ti_icme[i]
)
if enoughdata(var, fgap): # pido q haya mas del 80% NO sean gaps
adap += [adaptar(nbin, dt, t, var)]
nok +=1
else:
continue
else:
print " i:%d ---> Este evento es muy chico!, dT/day:%g" % (i, dT)
nbad +=1
VAR_adap = zeros(nbin*nok).reshape(nok, nbin)
for i in range(nok):
VAR_adap[i,:] = adap[i][1]
VAR_avrg = zeros(nbin)
VAR_std = zeros(nbin)
ndata = zeros(nbin)
for i in range(nbin):
cond = ~isnan(VAR_adap.T[i,:])
ndata[i] = len(find(cond)) # nro de datos != flag
VAR_avrg[i] = mean(VAR_adap.T[i,cond]) # promedio entre los valores q no tienen flag
VAR_std[i] = std(VAR_adap.T[i,cond]) # std del mismo conjunto de datos
tnorm = adap[0][0]
return [nok, nbad, tnorm, VAR_avrg, VAR_std, ndata]
def adaptar(n, dt, t, r):
#n = int(5./dt) # nro de puntos en todo el intervalo de ploteo
tt = zeros(n)
rr = zeros(n)
for i in range(n):
tmin = i*dt
tmax = (i+1.)*dt
cond = (t>tmin) & (t<tmax)
tt[i] = mean(t[cond])
rr[i] = mean(r[cond])
return [tt/(n*dt), rr]
def adaptar(nwndw, dT, n, dt, t, r):
#n = int(5./dt) # nro de puntos en todo el intervalo de ploteo
tt = zeros(n)
rr = zeros(n)
_nbin_ = n/(1+nwndw[0]+nwndw[1]) # nro de bins en la sheath
for i in range(n):
tmin = (i-nwndw[0]*_nbin_)*dt
tmax = tmin + dt
cond = (t>tmin) & (t<tmax)
tt[i] = mean(t[cond])#; print "tt:", t[i]; pause(1)
rr[i] = mean(r[cond])
return [tt/dT, rr] # tiempo normalizado x la duracion de la sheath
#@profile
def adaptar_ii(nwndw, dT, n, dt, t, r, fgap):
tt = zeros(n)
rr = zeros(n)
_nbin_ = n/(1+nwndw[0]+nwndw[1]) # nro de bins en la sheath/mc
cc = (t>0.) & (t<dT) # intervalo de la sheath/mc
#print " r[cc]: ", r[cc]
if len(r[cc])==0: # no hay data en esta ventana
rr = nan*ones(n)
enough = False
else:
enough = enoughdata(r[cc], fgap) # [bool] True si hay mas del 80% de data buena.
if not(enough):
rr = nan*ones(n) # si no hay suficiente data, este evento no aporta
for i in range(n):
tmin = (i-nwndw[0]*_nbin_)*dt
tmax = tmin + dt
cond = (t>=tmin) & (t<=tmax)
#tt[i] = mean(t[cond])#; print "tt:", t[i]; pause(1) # bug
tt[i] = tmin + .5*dt # bug corregido
if enough:
#cc = ~isnan(r[cond]) # no olvidemos filtrar los gaps
#rr[i] = mean(r[cond][cc])
rr[i] = nanmean(r[cond])
return enough, [tt/dT, rr] # tiempo normalizado x la duracion de la sheath/mc/etc
#@profile
def selecc_window_ii(nwndw, data, tini, tend):
time = data[0] #[s] utc sec
y = data[1]
day = 86400. # [seg]
utc = datetime(1970, 1, 1, 0, 0, 0, 0)
tini_utc = (tini - utc).total_seconds() # [s] utc sec
tend_utc = (tend - utc).total_seconds() # [s] utc sec
dt = tend_utc - tini_utc
ti = tini_utc - nwndw[0]*dt # [seg] utc
tf = tend_utc + nwndw[1]*dt
cond = (time > ti) & (time < tf)
time = (time[cond] - tini_utc) / day # [days] since 'ti'
y = y[cond]
return (time, y)
def averages_and_std_ii(nwndw,
SELECC, #MCsig, MCwant,
n_icmes, tini, tend, dTday, nbin, t_utc, VAR):
day = 86400.
nok=0; nbad=0
adap = []
for i in range(n_icmes):
dT = (tend[i] - tini[i]).total_seconds()/day # [day]
if ((dT>dTday) & SELECC[i]):# (MCsig[i]>=MCwant)):
dt = dT*(1+nwndw[0]+nwndw[1])/nbin
t, var = selecc_window_ii(
nwndw, # nro de veces hacia atras y adelante
[t_utc, VAR],
tini[i], tend[i]
)
adap += [adaptar(nwndw, dT, nbin, dt, t, var)] # rebinea usando 'dt' como el ancho de nuevo bineo
nok +=1
else:
print " i:%d ---> Filtramos este evento!, dT/day:%g" % (i, dT)
nbad +=1
VAR_adap = zeros(nbin*nok).reshape(nok, nbin)
for i in range(nok):
VAR_adap[i,:] = adap[i][1]
VAR_avrg = zeros(nbin)
VAR_medi = zeros(nbin)
VAR_std = zeros(nbin)
ndata = zeros(nbin)
for i in range(nbin):
cond = ~isnan(VAR_adap.T[i,:])
ndata[i] = len(find(cond)) # nro de datos != flag
VAR_avrg[i] = mean(VAR_adap.T[i,cond]) # promedio entre los valores q no tienen
|
flag
VAR_medi[i] = median(VAR_adap.T[i,cond])# medi
|
ana entre los valores q no tienen flag
VAR_std[i] = std(VAR_adap.T[i,cond]) # std del mismo conjunto de datos
tnorm = adap[0][0]
return [nok, nbad, tnorm, VAR_avrg, VAR_medi, VAR_std, ndata]
def mvs_for_each_event(VAR_adap, nbin, nwndw, Enough, verbose=False):
|
raphaelm/django-i18nfield
|
tests/settings.py
|
Python
|
apache-2.0
| 1,801
| 0
|
import os
from django.utils.translation import gettext_lazy as _
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'kk0ai8i0dm-8^%&0&+e-rsmk8#t&)6r*y!wh=xx7l12+6k5mg4'
DEBUG = True
ALLOWED_HOSTS = ['*']
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
|
'tests.testapp'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOpti
|
onsMiddleware',
]
ROOT_URLCONF = 'demoproject.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'demoproject.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
STATIC_URL = '/static/'
LANGUAGE_CODE = 'en'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
LANGUAGES = [
('de', _('German')),
('en', _('English')),
('fr', _('French')),
]
|
AlexYu-beta/CppTemplateProgrammingDemo
|
Demo1_8/demo1_8.py
|
Python
|
gpl-3.0
| 278
| 0.061151
|
from callb
|
ack_event import *
def getOddNumber(k,getEvenNumber): return 1+getEvenNumber(k)
def main():
k=1
i=getOddNumber(k,double);
print(i)
i=getOddNumber(k,quadruple);
print(i)
i=getOddNumber(k,lambda x:x*8)
print(i)
if _
|
_name__=="__main__":main()
|
TeamSPoon/logicmoo_workspace
|
packs_web/butterfly/lib/python3.7/site-packages/isort/profiles.py
|
Python
|
mit
| 1,601
| 0.000625
|
"""Common profiles are defined here to be easily used within a project using --profile {name}"""
from typing import Any, Dict
black = {
"multi_line_output": 3,
"include_trailing_comma": True,
"force_grid_wrap": 0,
"use_parentheses": True,
"ensure_newline_before_comments": True,
"line_length": 88,
}
django = {
"combine_as_imports": True,
"include_trailing_comma": True,
"multi_line_output": 5,
"line_length": 79,
}
pycharm = {
"multi_line_output": 3,
"force_grid_wrap": 2,
"lines_after_imports": 2,
}
google = {
"fo
|
rce_single_line": True,
"force_sort_within_sections": True,
"lexicographical": True,
|
"single_line_exclusions": ("typing",),
"order_by_type": False,
"group_by_package": True,
}
open_stack = {
"force_single_line": True,
"force_sort_within_sections": True,
"lexicographical": True,
}
plone = {
"force_alphabetical_sort": True,
"force_single_line": True,
"lines_after_imports": 2,
"line_length": 200,
}
attrs = {
"atomic": True,
"force_grid_wrap": 0,
"include_trailing_comma": True,
"lines_after_imports": 2,
"lines_between_types": 1,
"multi_line_output": 3,
"use_parentheses": True,
}
hug = {
"multi_line_output": 3,
"include_trailing_comma": True,
"force_grid_wrap": 0,
"use_parentheses": True,
"line_length": 100,
}
profiles: Dict[str, Dict[str, Any]] = {
"black": black,
"django": django,
"pycharm": pycharm,
"google": google,
"open_stack": open_stack,
"plone": plone,
"attrs": attrs,
"hug": hug,
}
|
amgowano/oppia
|
core/domain/collection_domain_test.py
|
Python
|
apache-2.0
| 25,252
| 0.000158
|
# coding: utf-8
#
# Copyright 2015 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for collection domain objects and methods defined on them."""
from core.domain import collection_domain
from core.domain import collection_services
from core.tests import test_utils
import feconf
import utils
# Dictionary-like data structures within sample YAML must be formatted
# alphabetically to match string equivalence with the YAML generation
# methods tested below.
#
# If evaluating differences in YAML, conversion to dict form via
# utils.dict_from_yaml can isolate differences quickly.
SAMPLE_YAML_CONTENT = ("""category: A category
language_code: en
nodes:
- acquired_skills:
- Skill0a
- Skill0b
exploration_id: an_exploration_id
prerequisite_skills: []
objective: An objective
schema_version: %d
tags: []
title: A title
""") % (feconf.CURRENT_COLLECTION_SCHEMA_VERSION)
class CollectionDomainUnitTests(test_utils.GenericTestBase):
"""Test the collection domain object."""
COLLECTION_ID = 'collection_id'
EXPLORATION_ID = 'exp_id_0'
def setUp(self):
super(CollectionDomainUnitTests, self).setUp()
self.save_new_valid_collection(
self.COLLECTION_ID, 'user@example.com', title='Title',
category='Category', objective='Objective',
exploration_id=self.EXPLORATION_ID)
self.collection = collection_services.get_collection_by_id(
self.COLLECTION_ID)
def _assert_validation_error(self, expected_error_substring):
"""Checks that the collection passes strict validation."""
with self.assertRaisesRegexp(
utils.ValidationError, expected_error_substring):
self.collection.validate()
def test_initial_validation(self):
"""Test validating a new, valid collection."""
self.collection.validate()
def test_title_validation(self):
self.collection.title = 0
self._assert_validation_error('Expected title to be a string')
def test_category_validation(self):
self.collection.category = 0
self._assert_validation_error('Expected category to be a string')
def test_objective_validation(self):
self.collection.objective = ''
self._assert_validation_error('objective must be specified')
self.collection.objective = 0
self._assert_validation_error('Expected objective to be a string')
def test_language_code_validation(self):
self.collection.language_code = ''
self._assert_validation_error('language must be specified')
self.collection.language_code = 0
self._assert_validation_error('Expected language code to be a string')
self.collection.language_code = 'xz'
self._assert_validation_error('Invalid language code')
def test_tags_validation(self):
self.collection.tags = 'abc'
self._assert_validation_error('Expected tags to be a list')
self.collection.tags = [2, 3]
self._assert_validation_error('Expected each tag to be a string')
self.collection.tags = ['', 'tag']
self._assert_validation_error('Tags should be non-empty')
self.collection.tags = ['234']
self._assert_validation_error(
'Tags should only contain lowercase letters and spaces')
self.collection.tags = [' abc']
self._assert_validation_error(
'Tags should not start or end with whitespace')
self.collection.tags = ['abc def']
self._assert_validation_error(
'Adjacent whitespace in tags should be collapsed')
self.collection.tags = ['abc', 'abc']
self._assert_validation_error(
'Expected tags to be unique, but found duplicates')
def test_schema_version_validation(self):
self.collection.schema_version = 'some_schema_version'
self._assert_validation_error('Expected schema version to be an int')
self.collection.schema_version = 100
self._assert_validation_error(
'Expected schema version to be %s' %
feconf.CURRENT_COLLECTION_SCHEMA_VERSION)
def test_nodes_validation(self):
self.collection.nodes = {}
self._assert_validation_error('Expected nodes to be a list')
self.collection.nodes = [
collection_domain.CollectionNode.from_dict({
'exploration_id': '0',
'prerequisite_skills': [],
'acquired_skills': ['skill0a']
}),
collection_domain.CollectionNode.from_dict({
'exploration_id': '0',
'prerequisite_skills': ['skill0a'],
'acquired_skills': ['skill0b']
})
]
self._assert_validation_error(
'There are explorations referenced in the collection more than '
'once.')
def test_initial_explorations_validation(self):
# Having no collection nodes is fine for non-strict validation.
self.collection.nodes = []
self.collection.validate(strict=False)
# But it's not okay for strict validation.
self._assert_validation_error(
'Expected to have at least 1 exploration in the collection.')
# If the collection has exactly one exploration and that exploration
# has prerequisite skills, then the collection should fail validation.
self.collection.add_node('exp_id_1')
self.save_new_valid_exploration(
'exp_id_1', 'user@example.com', end_state_name='End')
collection_node1 = self.collection.get_node('exp_id_1')
collection_node1.update_prerequisite_skills(['skill1a'])
self._assert_validation_error(
'Expected to have at least 1 exploration with no prerequisite '
'skills.')
def test_metadata_validation(self):
self.collection.title = ''
self.collection.objective = ''
self.collection.category = ''
self.collection.nodes = []
self.collection.add_node('exp_id_1')
# Having no title is fine for non-strict validation.
self.collection.validate(strict=False)
# But it's not okay for strict validation.
self._assert_validation_error(
'A title must be specified for the collection.')
self.collection.title = 'A title'
# Having no objective is fine for non-strict validation.
self.collection.validate(strict=False)
# But it's not okay for strict validation.
self._assert_validation
|
_error(
'An objective must be specified for the collection.')
|
self.collection.objective = 'An objective'
# Having no category is fine for non-strict validation.
self.collection.validate(strict=False)
# But it's not okay for strict validation.
self._assert_validation_error(
'A category must be specified for the collection.')
self.collection.category = 'A category'
# Now the collection passes both strict and non-strict validation.
self.collection.validate(strict=False)
self.collection.validate(strict=True)
def test_collection_completability_validation(self):
# Add another exploration, but make it impossible to reach exp_id_1.
self.collection.add_node('exp_id_1')
collection_node1 = self.collection.get_node('exp_id_1')
collection_node1.update_prerequisite_skills(['skill0a'])
self._assert_validation_error(
'Some explorations are unreachable from the initial explorations')
# Connecting the two explorations should lead to cle
|
mozman/ezdxf
|
tests/test_02_dxf_graphics/test_243_replace_entity.py
|
Python
|
mit
| 3,603
| 0
|
# Copyright (c) 2020, Manfred Moitzi
# License: MIT License
import pytest
import ezdxf
from ezdxf.entities.dxfgfx import add_entity, replace_entity
from ezdxf.entities import Point
@pytest.fixture(scope="module")
def msp():
return ezdxf.new().modelspace()
@pytest.fixture(scope="module")
def db(msp):
return msp.entitydb
def test_add_entity(msp, db):
point = msp.add_point((0, 0))
new_point = Point.new(dxfattribs={"location": (3, 3)})
add_entity(new_point, msp)
assert point in msp
assert point.dxf.handle in db
assert new_point in msp
assert new_point.dxf.handle in db
assert point.dxf.handle != new_point.dxf.handle
def test_replace_entity(msp, db):
point = msp.add_point((0, 0))
handle = point.dxf.handle
new_point = Point.new(dxfattribs={"location": (3, 3)})
replace_entity(point, new_point, msp)
assert point.is_alive is False
assert new_point in msp
assert new_point.dxf.handle in db
assert new_point.dxf.handle == handle
def test_replace_entity_without_layout(msp, db):
point = Point.new(dxfattribs={"location": (3, 3)})
db.add(point)
handle = point.dxf.handle
assert point not in msp
assert point.dxf.handle in db
new_point = Point.new(dxfattribs={"location": (3, 3)})
replace_entity(point, new_point, msp)
assert point.is_alive is False
assert new_point not in msp
assert new_point.dxf.handle in db
assert new_point.dxf.handle == handle
def test_convert_circle_to_ellipse(msp, db):
circle = msp.add_circle(center=(3, 3), radius=2)
ellipse = circle.to_ellipse(replace=False)
assert circle.dxf.handle in db
assert ellipse.dxftype() == "ELLIPSE"
assert ellipse.dxf.handle in db
assert circle in msp
assert ellipse in msp
def test_replace_circle_by_ellipse(msp, db):
circle = msp.add_circle(center=(3, 3), radius=2)
circle_handle = circle.dxf.handle
ellipse = circle.to_ellipse(replace=True)
assert circle.is_alive is False
assert ellipse.dxftype() == "ELLIPSE"
assert ellipse.dxf.handle in db
assert ellipse.dxf.handle == circle_handle
assert ellipse in msp
def test_convert_circle_to_spline(msp, db):
circle = msp.add_circle(center=(3, 3), radius=2)
spline = circle.to_spline(replace=False)
assert circle.dxf.handle in db
assert spline.dxftype() == "SPLINE"
assert spline.dxf.handle in db
assert circle in msp
assert spline in msp
def test_replace_circle_by_spline(msp, db):
circle = msp.add_circle(center=(3, 3), radius=2)
circle_handle = circle.dxf.handle
spline = circle.to_spline(replace=True)
assert circle.is_alive is False
assert spline.dxftype() == "SPLINE"
assert spline.dxf.handle in db
assert spline.dxf.handle == circle_handle
assert spline in msp
def test_co
|
nvert_ellipse_to_spline(msp, db):
ellipse = msp.add_ellipse(center=(3, 3), major_axis=(2, 0), ratio=0.5)
spline = ellipse.to_spline(replace=False)
assert ellipse.dxf.handle in db
assert spline.dxftype() == "SPLINE"
assert spline.dxf.handle in db
assert ellipse in msp
assert spline in msp
def test_replace_ellipse_by_spline(
|
msp, db):
ellipse = msp.add_ellipse(center=(3, 3), major_axis=(2, 0), ratio=0.5)
ellipse_handle = ellipse.dxf.handle
spline = ellipse.to_spline(replace=True)
assert ellipse.is_alive is False
assert spline.dxftype() == "SPLINE"
assert spline.dxf.handle in db
assert spline.dxf.handle == ellipse_handle
assert spline in msp
if __name__ == "__main__":
pytest.main([__file__])
|
ewheeler/tracpro
|
tracpro/profiles/models.py
|
Python
|
bsd-3-clause
| 509
| 0.003929
|
from __future__ import absolute_import, unicode_literals
from django.contrib.auth
|
.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Profile(models.Model):
"""
Extension for the user class
"""
user = models.OneToOneField(User)
full_name = models.CharField(verbose_name=_("Full name"), max_length=128, null=True)
change_password = models.BooleanField(default=False, help_text=_("User must change password
|
on next login"))
|
sai9/weewx-gitsvn
|
extensions/pmon/install.py
|
Python
|
gpl-3.0
| 1,514
| 0.001321
|
# $Id$
# installer for pmon
# Copyright 2014 Matthew Wall
from setup import ExtensionInstaller
def loader():
return ProcessMonitorInstaller()
class ProcessMonitorInstaller(ExtensionInstaller):
def __init__(self):
super(ProcessMonitorInstaller, self).__init__(
version="0.2",
name='pmon',
description='Collect and display process memory usage.',
author="Matthew Wall",
author_email="mwall@users.sourceforge.net",
process_services='user.pmon.ProcessMonitor',
config={
'ProcessMonitor': {
'data_binding': 'pmon_binding',
|
'process': 'weewxd'},
'DataBindings': {
'pmon_binding': {
'database': 'pmon_sqlite',
'table_name': 'archive',
'manager': 'weewx.manager.DaySummaryManager',
'schema': 'user.pmon.schema'}},
'Databases': {
|
'pmon_sqlite': {
'database_name': 'pmon.sdb',
'driver': 'weedb.sqlite'}},
'StdReport': {
'pmon': {
'skin': 'pmon',
'HTML_ROOT': 'pmon'}}},
files=[('bin/user', ['bin/user/pmon.py']),
('skins/pmon', ['skins/pmon/skin.conf',
'skins/pmon/index.html.tmpl'])]
)
|
openstack/murano
|
api-ref/source/conf.py
|
Python
|
apache-2.0
| 6,670
| 0
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# murano documentation build configuration file, created by
# sphinx-quickstart on Sat May 1 15:17:47 2010.
#
# This file is execfile()d with the current directory set to
# its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
extensions = [
'os_api_ref',
'openstackdocstheme'
]
html_theme = 'openstackdocs'
html_theme_options = {
"sidebar_mode": "toc",
}
# openstackdocstheme options
openstackdocs_repo_name = 'openstack/murano'
openstackdocs_bug_project = 'murano'
openstackdocs_bug_tag = 'api-ref'
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../'))
sys.path.insert(0, os.path.abspath('../'))
sys.path.insert(0, os.path.abspath('./'))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
copyright = u'2016-present, OpenStack Foundation'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# The reST default role (used for this markup: `text`) to use
# for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = False
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native'
# -- Options for man page output ----------------------------------------------
# Grouping the document tree for man pages.
# List of tuples 'sourcefile', 'target', u'title', u'Authors name', 'manual'
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
# html_theme_path = ["."]
# html_theme = '_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_use_modindex = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'muranodoc'
# -- Options for LaTeX output -------------------------------------------------
# The paper size ('letter' or 'a4').
# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'Murano.tex', u'OpenStack Application Catalog API Documentation',
u'OpenStack Foundation', 'manual'),
]
# The name of an image file (relative to this directory) to place at
|
the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# Additional stuff for the LaTeX preamble.
# latex_preamble = ''
# Documents to append as an appendix to all manuals.
#
|
latex_appendices = []
# If false, no module index is generated.
# latex_use_modindex = True
|
BadDNA/anolis
|
web/env/lib/python2.6/site-packages/pip-0.7.2-py2.6.egg/pip/venv.py
|
Python
|
bsd-3-clause
| 1,972
| 0.001521
|
"""Tools for working with virtualenv environments"""
import os
import sys
import subprocess
from pip.exceptions import BadCommand
from pip.log import logger
def restart_in_venv(venv, base, site_packages, args):
"""
Restart this script using the interpreter in the given virtual environment
"""
if base and not os.path.isabs(venv) and not venv.startswith('~'):
base = os.path.expanduser(base)
# ensure we have an abs basepath at this point:
# a re
|
lative one makes no sense (or does it?)
if os.path.isabs(base):
venv = os.path.join(base, venv)
if venv.startswith('~'):
|
venv = os.path.expanduser(venv)
if not os.path.exists(venv):
try:
import virtualenv
except ImportError:
print 'The virtual environment does not exist: %s' % venv
print 'and virtualenv is not installed, so a new environment cannot be created'
sys.exit(3)
print 'Creating new virtualenv environment in %s' % venv
virtualenv.logger = logger
logger.indent += 2
virtualenv.create_environment(venv, site_packages=site_packages)
if sys.platform == 'win32':
python = os.path.join(venv, 'Scripts', 'python.exe')
# check for bin directory which is used in buildouts
if not os.path.exists(python):
python = os.path.join(venv, 'bin', 'python.exe')
else:
python = os.path.join(venv, 'bin', 'python')
if not os.path.exists(python):
python = venv
if not os.path.exists(python):
raise BadCommand('Cannot find virtual environment interpreter at %s' % python)
base = os.path.dirname(os.path.dirname(python))
file = os.path.join(os.path.dirname(__file__), 'runner.py')
if file.endswith('.pyc'):
file = file[:-1]
proc = subprocess.Popen(
[python, file] + args + [base, '___VENV_RESTART___'])
proc.wait()
sys.exit(proc.returncode)
|
twiindan/selenium_lessons
|
04_Selenium/exercices/expedia.py
|
Python
|
apache-2.0
| 654
| 0.007645
|
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.select import Select
# Configure the baseURL
baseUrl = "https://www.expedia.es"
# Create a webDriver instance and maximize window
driver = webdriver.Firefox()
driver.maximi
|
ze_window()
# Navigage to URL and put a 10 seconds implicit wait
driver.get(baseUrl)
driver.implicitly_wait(10)
# Find and click on element
|
"Flights"
# Find departure textbox and type "Barcelona"
# Find departure textbox and type "Madrid"
# Find departure time and type "23/11/2017"
# Close Calendar
# Find the "Find" button and click on
# Quit driver
|
akehrer/fiddle
|
fiddle/controllers/FiddleTabWidget.py
|
Python
|
gpl-3.0
| 9,860
| 0.001724
|
# Copyright (c) 2015 Aaron Kehrer
# Licensed under the terms of the MIT License
# (see fiddle/__init__.py for details)
# Import standard library modules
import os
# Import additional modules
import chardet
from PyQt4 import QtCore, QtGui
from fiddle.controllers.Editors import *
from fiddle.config import FILE_TYPES, PLATFORM
# An iterator to update as the user creates new files
new_file_iter = 1
class FiddleTabWidget(QtGui.QTabWidget):
def __init__(self, parent=None):
super(FiddleTabWidget, self).__init__(parent)
self.parent = parent
self.setAcceptDrops(True)
self.setTabsClosable(True)
self.setMovable(True)
self.setElideMode(QtCore.Qt.ElideRight)
self.setMinimumSize(QtCore.QSize(800, 300))
self.setDocumentMode(False)
self.setAutoFillBackground(False)
self.setTabShape(QtGui.QTabWidget.Rounded)
self.setCurrentIndex(-1)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(5)
sizePolicy.setVerticalStretch(3)
sizePolicy.setHeightForWidth(self.sizePolicy().hasHeightForWidth())
self.setSizePolicy(sizePolicy)
def dragEnterEvent(self, e):
"""
For drag-and-drop we need to accept drag enter events
"""
e.accept()
def dragMoveEvent(self, e):
"""
For drag-and-drop we need to accept drag move events
http://qt-project.org/forums/viewthread/3093
"""
e.accept()
def dropEvent(self, e):
"""
Handle the drop
http://qt-project.org/wiki/Drag_and_Drop_of_files
"""
# dropped files are file:// urls
if e.mimeData().hasUrls():
self._insert_list_of_files(e.mimeData().urls())
def _insert_list_of_files(self, file_list):
for filepath in file_list:
if filepath.isLocalFile():
if 'win32' in PLATFORM:
# mimedata path includes a leading slash that confuses copyfile on windows
# http://stackoverflow.com/questions/2144748/is-it-safe-to-use-sys-platform-win32-check-on-64-bit-python
fpath = filepath.path()[1:]
else:
# not windows
fpath = filepath.path()
self.parent.open_filepath(fpath)
class FiddleTabFile(QtGui.QWidget):
editor_changed = QtCore.pyqtSignal()
cursor_changed = QtCore.pyqtSignal(int, int)
find_wrapped = QtCore.pyqtSignal()
def __init__(self, parent=None, filepath=None):
super(FiddleTabFile, self).__init__(parent)
self._filepath = None
s
|
elf._saved = True
self.basepath = None
self.filename = None
self.extension = None
self.encoding = 'utf-8' # Default to UTF-8 encoding
# Set the layout and insert the editor
self.editor = None
self.setLayout(QtGui.QVBoxLayout())
self.layout().setMargin(0)
self.layout().setSpacing(0)
# Find/Replace
self.find_expr = ''
self.find_forward = False
self.found_first = False
self.first_
|
found = (0, 0) # line, col
self.filepath = filepath
self.watcher = None
@property
def filepath(self):
return self._filepath
@filepath.setter
def filepath(self, path):
global new_file_iter
if path is not None:
self._filepath = path
self.basepath, self.filename = os.path.split(path)
_, ext = os.path.splitext(path)
self.extension = ext.lower()
with open(path, 'rb') as fp:
data = fp.read()
enc = chardet.detect(data)['encoding']
self.encoding = enc if enc is not None else 'utf-8'
if '.htm' in self.extension:
self.insert_editor(HTMLEditor(parent=self))
elif self.extension == '.js':
self.insert_editor(JavascriptEditor(parent=self))
elif self.extension == '.css':
self.insert_editor(CSSEditor(parent=self))
elif self.extension == '.py':
self.insert_editor(PythonEditor(parent=self))
else:
self.insert_editor(BaseEditor(parent=self))
try:
self.editor.setText(data.decode(self.encoding))
except TypeError:
self.editor.setText('')
self._saved = True
else:
self.basepath = None
self.filename = 'new_{}.py'.format(new_file_iter)
self.extension = '.py'
self._filepath = os.path.join(os.path.expanduser('~'), self.filename)
self.insert_editor(PythonEditor(parent=self))
new_file_iter += 1
self._saved = False
@property
def saved(self):
return self._saved
@saved.setter
def saved(self, state):
self._saved = state
self.editor_changed.emit()
def insert_editor(self, editor):
if self.editor is not None and self.layout().indexOf(self.editor) >= 0:
self.layout().removeWidget(self.editor)
self.editor.deleteLater()
self.editor = None
self.editor = editor
self.editor.textChanged.connect(self._set_text_changed)
self.editor.cursorPositionChanged.connect(self._cursor_position_changed)
self.layout().addWidget(self.editor)
def save(self):
if self.basepath is None:
self.save_as()
else:
self._write_file(self.filepath)
self.saved = True
def save_as(self):
path = self.basepath or os.path.join(os.path.expanduser('~'), self.filename)
filepath = QtGui.QFileDialog.getSaveFileName(None, None, path, ';;'.join(FILE_TYPES[1:]))
if filepath is not '':
self._write_file(filepath)
self.filepath = filepath
self.saved = True
def find_text(self, expr, re, cs, wo, wrap,
in_select=False, forward=True, line=-1, index=-1, show=True, posix=False):
"""
Find the string expr and return true if expr was found, otherwise returns false.
If expr is found it becomes the current selection. This is a convenience function around the find features
built in to QsciScintilla.
http://pyqt.sourceforge.net/Docs/QScintilla2/classQsciScintilla.html
:param expr:
:param re:
:param cs:
:param wo:
:param wrap:
:param in_select:
:param forward:
:param line:
:param index:
:param show:
:param posix:
:return:
"""
# Check for new expression
if expr != self.find_expr:
self.find_expr = expr
self.found_first = False
# Check for change in direction
if forward != self.find_forward:
if self.editor.hasSelectedText():
line, idx, _, _ = self.editor.getSelection()
self.editor.setCursorPosition(line, idx)
self.find_forward = forward
self.found_first = False
if self.found_first:
f = self.editor.findNext()
c = self.editor.getCursorPosition()
if c[0] <= self.first_found[0] and forward:
self.find_wrapped.emit()
elif c[0] >= self.first_found[0] and not forward:
self.find_wrapped.emit()
return f
elif in_select:
res = self.editor.findFirstInSelection(expr, re, cs, wo, forward, show, posix)
if res:
self.found_first = True
self.first_found = self.editor.getCursorPosition()
return True
else:
self.found_first = False
return False
else:
res = self.editor.findFirst(expr, re, cs, wo, wrap, forward, line, index, show, posix)
if res:
self.found_first = True
self.first_found = self.editor.getCursorPosition()
return Tru
|
peterhinch/micropython-mqtt
|
mqtt_as/range_ex.py
|
Python
|
mit
| 2,773
| 0.003967
|
# range_ex.py Test of asynchronous mqtt client with clean session False.
# Extended version publishes SSID
# (C) Copyright Peter Hinch 2017-2019.
# Released under the MIT licence.
# Public brokers https://github.com/mqtt/mqtt.github.io/wiki/public_brokers
# This demo is for wireless range tests. If OOR the red LED will light.
# In range the blue LED will pulse for each received message.
# Uses clean sessions to avoid backlog when OOR.
# red LED: ON == WiFi fail
# blue LED pulse == message received
# Publishes connection statistics.
from mqtt_as import MQTTClient, config
from config import wifi_led, blue_led
import uasyncio as asyncio
import network
import gc
TOPIC = 'shed' # For demo publication and last will use same topic
outages = 0
rssi = -199 # Effectively zero signal in dB.
async def pulse(): # This demo pulses blue LED each time a subscribed msg arrives.
blue_led(True)
await asyncio.sleep(1)
blue_led(False)
def sub_cb(topic, msg, retained):
print((topic, msg))
asyncio.create_task(pulse())
# The only way to measure RSSI is via scan(). Alas scan() blocks so the code
# causes the obvious uasyncio issues.
async def get_rssi():
global rssi
s = network.WLAN()
ssid = config['ssid'].encode('UTF8')
while True:
try:
rssi = [x[3] for x in s.scan() if x[0] == ssid][0]
except IndexError: # ssid not found.
rssi = -199
await asyncio.sleep(30)
async def wifi_han(state):
global outages
wifi_led(not state) # Light LED when WiFi down
if state:
print('We are connected to broker.')
else:
outages += 1
print('WiFi or broker is down.')
await asyncio.sleep(1)
async def conn_han(client):
await client.subscribe('foo_topic', 1)
async def main(client):
try:
await client.connect()
except OSErro
|
r:
print('Co
|
nnection failed.')
return
n = 0
s = '{} repubs: {} outages: {} rssi: {}dB free: {}bytes'
while True:
await asyncio.sleep(5)
gc.collect()
m = gc.mem_free()
print('publish', n)
# If WiFi is down the following will pause for the duration.
await client.publish(TOPIC, s.format(n, client.REPUB_COUNT, outages, rssi, m), qos = 1)
n += 1
# Define configuration
config['subs_cb'] = sub_cb
config['wifi_coro'] = wifi_han
config['will'] = (TOPIC, 'Goodbye cruel world!', False, 0)
config['connect_coro'] = conn_han
config['keepalive'] = 120
# Set up client. Enable optional debug statements.
MQTTClient.DEBUG = True
client = MQTTClient(config)
asyncio.create_task(get_rssi())
try:
asyncio.run(main(client))
finally: # Prevent LmacRxBlk:1 errors.
client.close()
blue_led(True)
asyncio.new_event_loop()
|
poppogbr/genropy
|
packages/showcase/webpages/dev/remote.py
|
Python
|
lgpl-2.1
| 2,026
| 0.008391
|
# -*- coding: UTF-8 -*-
#--------------------------------------------------------------------------
# Copyright (c) : 2004 - 2007 Softwell sas - Milano
# Written by : Giovanni Porcari, Michele Bertoldi
# Saverio Porcari, Francesco Porcari , Francesco Cavazzana
#--------------------------------------------------------------------------
#This library is free software; you can redistribute it and/or
#modify it under the terms of the GNU Lesser General Public
#License as published by the Free Software Foundation; either
#version 2.1 of the License, or (at your option) any later version.
#This library is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
#Lesser General Public License for more details.
#You should have received a copy of the GNU Lesser General Public
#License along with this library; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
class GnrCustomWebPage(object):
def windowTitle(self):
return 'test remote'
def main(self,
|
root, **kwargs):
bc = root.borderContainer()
top = bc.contentPane(region='top', height='100px')
top.button('Build', fire='build')
top.button('Add element', fire='add')
top.dataController("""var pane = genro.nodeById('remoteContent')
pane._('div',{height:'200px',width:'200px',background:'lightBlue',
border:'1px so
|
lid blue','float':'left',
remote:{'method':'test'}});
""", _fired="^add")
center = bc.contentPane(region='center').div(nodeId='remoteContent')
center.div().remote('test', _fired='^build')
def remote_test(self, pane, **kwargs):
print 'pippo'
pane.div('hello', height='40px', width='80px', background='lime')
|
ducted/duct
|
duct/tests/test_sflow.py
|
Python
|
mit
| 362
| 0
|
from twisted.trial import unittest
from twisted.int
|
ernet import defer
from duct.protocol.sflow import protocol
from duct
|
.tests import globs
class Test(unittest.TestCase):
def test_decode(self):
proto = protocol.Sflow(globs.SFLOW_PACKET, '172.30.0.5')
self.assertTrue(proto.version == 5)
self.assertTrue(len(proto.samples) == 5)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.