repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
asttra/pysces | pysces/tests/__init__.py | Python | bsd-3-clause | 622 | 0.003215 | """
PySCeS - Python Simulator for Cellular Systems (ht | tp://pysces.sourceforge.net)
Copyright (C) 2004-2015 B.G. Olivier, J.M. Rohwer, J.-H.S Hofmeyr all rights reserved,
Brett G. Olivier (bgoli@users.sourceforge.net)
Triple-J Group for Molecular Cell Physiology
Stellenbosch University, South Africa.
Permission to use, modi | fy, and distribute this software is given under the
terms of the PySceS (BSD style) license. See LICENSE.txt that came with
this distribution for specifics.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
Brett G. Olivier
"""
# Init file used for distutils install |
ltworf/relational | relational_gui/guihandler.py | Python | gpl-3.0 | 16,612 | 0.001987 | # Relational
# Copyright (C) 2008-2020 Salvo "LtWorf" Tomaselli
#
# Relational is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# author Salvo "LtWorf" Tomaselli <tiposchi@tiscali.it>
import sys
from gettext import gettext as _
from PyQt5 import QtCore, QtWidgets, QtGui
from relational import parser, optimizer, rtypes
from relational.maintenance import UserInterface
from relational_gui import about
from relational_gui import survey
from relational_gui import surveyForm
from relational_gui import maingui
version = ''
class relForm(QtWidgets.QMainWindow):
def __init__(self):
QtWidgets.QMainWindow.__init__(self)
self.About = None
self.Survey = None
self.undo = None # UndoQueue for queries
self.undo_program = None
self.selectedRelation = None
self.ui = maingui.Ui_MainWindow()
self.user_interface = UserInterface()
self.history_current_line = None
# Creates the UI
self.ui.setupUi(self)
# Setting fonts for symbols
f = QtGui.QFont()
size = f.pointSize()
if sys.platform.startswith('win'):
winFont = 'Cambria'
symbolFont = 'Segoe UI Symbol'
increment = 4
else:
winFont = f.family()
symbolFont = f.family()
increment = 2
font = QtGui.QFont(winFont, size + increment)
sfont = QtGui.QFont(symbolFont)
self.ui.lstHistory.setFont(font)
self.ui.txtMultiQuery.setFont(font)
self.ui.txtQuery.setFont(font)
self.ui.groupOperators.setFont(font)
self.ui.cmdClearMultilineQuery.setFont(sfont)
self.ui.cmdClearQuery.setFont(sfont)
self.settings = QtCore.QSettings()
self._restore_settings()
# Shortcuts
shortcuts = (
(self.ui.lstRelations, QtGui.QKeySequence.Delete, self.unloadRelation),
(self.ui.lstRelations, 'Space', lambda: self.printRelation(self.ui.lstRelations.currentItem())),
(self.ui.txtQuery, QtGui.QKeySequence.MoveToNextLine, self.next_history),
(self.ui.txtQuery, QtGui.QKeySequence.MoveToPreviousLine, self.prev_history),
)
self.add_shortcuts(shortcuts)
def next_history(self):
if self.ui.lstHistory.currentRow() + 1 == self.ui.lstHistory.count() and self.history_current_line:
self.ui.txtQuery.setText(self.history_current_line)
self.history_current_line = None
elif self.history_current_line:
self.ui.lstHistory.setCurrentRow(self.ui.lstHistory.currentRow()+1)
self.resumeHistory(self.ui.lstHistory.currentItem())
def prev_history(self):
if self.history_current_line is None:
self.history_current_line = self.ui.txtQuery.text()
if self.ui.lstHistory.currentItem() is None:
return
if not self.ui.lstHistory.currentItem().text() != self.ui.txtQuery.text():
self.ui.lstHistory.setCurrentRow(self.ui.lstHistory.currentRow()-1)
elif self.ui.lstHistory.currentRow() > 0:
self.ui.lstHistory.setCurrentRow(self.ui.lstHistory.currentRow()-1)
self.resumeHistory(self.ui.lstHistory.currentItem())
def add_shortcuts(self, shortcuts):
for widget,shortcut,slot in shortcuts:
action = QtWidgets.QAction(self)
action.triggered.connect(slot)
action.setShortcut(QtGui.QKeySequence(shortcut))
# I couldn't find the constant
action.setShortcutContext(0)
widget.addAction(action)
def checkVersion(self):
from relational import maintenance
online = maintenance.check_latest_version()
if online is None:
r = _('Network error')
elif online > version:
r = _('New version available online: %s.') % online
elif online == version:
r = _('Latest version installed.')
else:
r = _('You are using an unstable version.')
QtWidgets.QMessageBox.information(self, _('Version'), r)
def setHistoryShown(self, history_shown):
self.history_shown = history_shown
self.settings.setValue('history_shown', history_shown)
self.ui.lstHistory.setVisible(history_shown)
self.ui.actionShow_history.setChecked(history_shown)
def setMultiline(self, multiline):
self.multiline = multiline
self.settings.setValue('multiline', multiline)
if multiline:
index = 0
else:
index = 1
self.ui.stackedWidget.setCurrentIndex(index)
self.ui.actionMulti_line_mode.setChecked(multiline)
def load_query(self, *index):
self.ui.txtQuery.setText(self.savedQ.itemData(index[0]).toString())
def undoOptimize(self):
'''Undoes the optimization on the query, popping one item from the undo list'''
if self.undo != None:
self.ui.txtQuery.setText(self.undo)
def undoOptimizeProgram(self):
if self.undo_program:
self.ui.txtMultiQuery.setPlainText(self.undo_pro | gram)
def optimizeProgram(self):
self.undo_program = self.ui.txtMultiQuery.toPlainText()
result = optimizer.optimize_program(
self.ui.txtMultiQuery.toPlainText(),
self.user_interface.relations
| )
self.ui.txtMultiQuery.setPlainText(result)
def optimize(self):
'''Performs all the possible optimizations on the query'''
self.undo = self.ui.txtQuery.text() # Storing the query in undo list
res_rel,query = self.user_interface.split_query(self.ui.txtQuery.text(),None)
try:
trace = []
result = optimizer.optimize_all(
query,
self.user_interface.relations,
debug=trace
)
print('==== Optimization steps ====')
print(query)
print('\n'.join(trace))
print('========')
if res_rel:
result = '%s = %s' % (res_rel, result)
self.ui.txtQuery.setText(result)
except Exception as e:
self.error(e)
def resumeHistory(self, item):
if item is None:
return
itm = item.text()
self.ui.txtQuery.setText(itm)
def execute(self):
# Show the 'Processing' frame
self.ui.stackedWidget.setCurrentIndex(2)
QtCore.QCoreApplication.processEvents()
try:
'''Executes the query'''
if self.multiline:
query = self.ui.txtMultiQuery.toPlainText()
self.settings.setValue('multiline/query', query)
else:
query = self.ui.txtQuery.text()
if not query.strip():
return
try:
self.selectedRelation = self.user_interface.multi_execute(query)
except Exception as e:
return self.error(e)
finally:
self.updateRelations() # update the list
self.showRelation(self.selectedRelation)
if not self.multiline:
# Last in history
item = self.ui.lstHistory.item(self.ui.lstHistory.count() - 1)
if item is None or item.text() != query:
# Adds to history if it is not already the last
hitem = QtWidgets.QListWidgetItem(None, 0)
hitem.setText(query)
self.ui.lstHistory.addItem(hitem)
self.ui. |
acontry/altcoin-arbitrage | arbitrage/public_markets/market.py | Python | mit | 4,037 | 0.001734 | import time
import requests
import config
import logging
class Market(object):
def __init__(self):
self.name = self.__class__.__name__
self.p_coin = config.p_coi | n
self.s_coin = config.s_coin
self.depth = {'asks': [], 'b | ids': [], 'last_updated': 0}
self.prices = {'last_updated': 0}
# Configurable parameters
self.update_rate = 60
self.fees = {"buy": {"fee": 0.002, "coin": "p_coin"}, "sell": {"fee": 0.002, "coin": "s_coin"}}
def get_depth(self):
# If the update rate dictates that it is time to update the market, do it
timediff = time.time() - self.depth['last_updated']
if timediff > self.update_rate:
try:
self.update_depth()
self.depth['last_updated'] = time.time()
self.depth['current'] = True
except requests.HTTPError:
logging.error("HTTPError, can't update market: %s" % self.name)
except Exception as e:
logging.error("Can't update market: %s - %s" % (self.name, str(e)))
# If the market is expired, mark it as such
timediff = time.time() - self.depth['last_updated']
if timediff > config.market_expiration_time:
logging.warning('Market: %s order book is expired', self.name)
self.depth['current'] = False
return self.depth
def get_all_prices(self):
"""Get bid/ask prices for all currencies from market"""
try:
self.update_prices()
self.prices['last_updated'] = time.time()
self.prices['current'] = True
except requests.HTTPError:
logging.error("HTTPError, can't update market: %s" % self.name)
except Exception as e:
logging.error("Can't update market: %s - %s" % (self.name, str(e)))
# If market is expired, mark it as such
timediff = time.time() - self.prices['last_updated']
if timediff > config.market_expiration_time:
logging.warning('Market: %s order book is expired', self.name)
self.prices['current'] = False
return self.prices
def get_ticker(self):
"""Returns bid/ask prices from depth"""
res = {'ask': 0, 'bid': 0}
if len(self.depth['asks']) > 0 and len(self.depth['bids']) > 0:
res = {'ask': self.depth['asks'][0],
'bid': self.depth['bids'][0]}
return res
def format_depth(self, bids, asks, price_idx, amount_idx):
bids = self.format_price_list(bids, price_idx, amount_idx, True)
asks = self.format_price_list(asks, price_idx, amount_idx, False)
# Bid prices should be less than ask prices, so go through depths to "execute" trades and clean
# up the bids and asks
while bids[0]['price'] >= asks[0]['price']:
# If bid amount is greater than ask amount, update bid volume and remove "completed" ask
if bids[0]['amount'] > asks[0]['amount']:
bids[0]['amount'] -= asks[0]['amount']
asks.remove(asks[0])
# If ask amount is greater than bid amount, do the opposite
elif bids[0]['amount'] < asks[0]['amount']:
asks[0]['amount'] -= bids[0]['amount']
bids.remove(bids[0])
# If the volumes are miraculously equal
else:
asks.remove(asks[0])
bids.remove(bids[0])
return {'asks': asks, 'bids': bids}
@staticmethod
def format_price_list(orders, price_idx, amount_idx, reverse=False):
orders.sort(key=lambda x: float(x[price_idx]), reverse=reverse)
r = []
for i in orders:
r.append({'price': float(i[price_idx]), 'amount': float(i[amount_idx])})
return r
## Abstract methods
def update_depth(self):
pass
def update_prices(self):
pass
def buy(self, price, amount):
pass
def sell(self, price, amount):
pass
|
nikpap/inspire-next | inspirehep/modules/workflows/views/__init__.py | Python | gpl-2.0 | 998 | 0 | # -*- coding: utf-8 - | *-
#
# This | file is part of Invenio.
# Copyright (C) 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 331, Boston, MA 02111-1307, USA.
"""View blueprints for Holding Pen."""
from .callback import blueprint as holdingpen_blueprint
from .holdingpen_edit import blueprint as settings_blueprint
blueprints = [
holdingpen_blueprint,
settings_blueprint,
]
|
erdem/django-selenium-example | todoapp/todoapp/wsgi.py | Python | mit | 391 | 0 | """
WS | GI config for todoapp project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "todoapp.settings")
application = ge | t_wsgi_application()
|
kaskr/CppAD | bin/proj_desc.py | Python | epl-1.0 | 6,719 | 0.016669 | #! /bin/python3
# $Id
# -----------------------------------------------------------------------------
# CppAD: C++ Algorithmic Differentiation: Copyright (C) 2003-15 Bradley M. Bell
#
# CppAD is distributed under multiple licenses. This distribution is under
# the terms of the
# Eclipse Public License Version 1.0.
#
# A copy of this license is included in the COPYING file of this distribution.
# Please visit http://www.coin-or.org/CppAD/ for information on other licenses.
# -----------------------------------------------------------------------------
# dictionary of replacements
#
import collections
replace_dict = collections.OrderedDict()
replace_dict['projectName'] = 'CppAD'
replace_dict['projectRepo'] = 'CppAD'
#
replace_dict['projectDescription'] = '''
Given a C++ algorithm that computes function values,
CppAD generates an algorithm that computes corresponding derivative values.
'''
#
replace_dict['projectShortDescription'] ='''
A tool for differentiation of C++ functions.
'''
#
replace_dict['projectManager'] ='Brad Bell, http://www.seanet.com/~bradbell'
replace_dict['projectHomePage'] = 'http://www.coin-or.org/CppAD'
#
replace_dict['projectLicense'] = '''
Eclipse Public License 1.0, or GNU General Public License 3.0
'''
replace_dict['projectLicenseURL'] = '''
http://www.opensource.org/licenses/alphabetica
'''
replace_dict['coinLinkedProjects'] = ''
replace_dict['projectLanguage'] = 'C++'
replace_dict['activityStatus'] = 'Active'
replace_dict['maturityLevel'] = '4'
replace_dict['stableVersionNumber'] = '20150000'
replace_dict['releaseNumber'] = '20150000.8'
replace_dict['projectCategories'] = 'Optimization utility'
replace_dict['documentation'] = 'http://www.coin-or.org/CppAD/Doc'
replace_dict['sourceCodeDownload'] = '''
http://www.coin-or.org/download/source/CppAD
'''
replace_dict['mailingList'] = '''
http://list.coin-or.org/mailman/listinfo/cppad
'''
# ----------------------------------------------------------------------------
# <otherLinkedPackages>
#
other_package = collections.OrderedDict( [
('ADOL-C', 'http://www.coin-or.org/projects/ADOL-C.xml'),
('Boost uBlas vector', 'http://www.boost.org/doc/libs'),
('Boost Thread', 'http://www.boost.org/doc/libs'),
('ColPack', 'http://cscapes.cs.purdue.edu/dox/ColPack/html'),
('Eigen', 'http://eigen.tuxfamily.org'),
('FADBAD', 'http://www.fadbad.com/fadbad.htm'),
('Ipopt', 'http://www.coin-or.org/projects/Ipopt.xml'),
('OpenMP', 'http://openmp.org/wp'),
('Pthread',
'http,//pubs.opengroup.org/onlinepubs/009695399/basedefs/pthread.h.html'),
('Sacado', 'http://trilinos.org/packages/sacado')
] )
text='\n'
for name in other_package :
url = other_package[name]
text += ' <otherPackage>\n'
text += ' <packageName>' + name + '</packageName>\n'
text += ' <packageURL>' + url + '</packageURL>\n'
text += ' <requiredOrOptional>Optional</requiredOrOptional>\n'
text += ' </otherPackage>\n'
replace_dict['otherLinkedPackages'] = text
# -----------------------------------------------------------------------------
# <testedPlatforms>
tested_platform = collections.OrderedDict( [
('Linux', 'gcc'),
('Cygwin', 'gcc'),
('Microsoft Windows', 'MSVC++')
] )
text='\n'
for system in tested_platform :
compiler = tested_platform[system]
text += ' <platform>\n'
text += ' <operatingSystem>' + system + '</operatingSystem>\n'
text += ' <compiler>' + compiler + '</compiler>\n'
text += ' </platform>\n'
replace_dict['testedPlatforms'] = text
# -----------------------------------------------------------------------------
import sys
import re
import os
import subprocess
# -----------------------------------------------------------------------------
# command line arguments
usage = '\tbin/proj_desc.py\n'
narg = len(sys.argv)
if sys.argv[0] != 'bin/proj_desc.py' :
msg = 'bin/proj_desc.py must be executed from its parent directory'
sys.exit(usage + msg)
if narg != 1 :
msg = 'expected 0 but found ' + str(narg-1) + ' command line arguments'
sys.exit(usage + msg)
# -----------------------------------------------------------------------------
def system_cmd(cmd) :
try :
output = subprocess.check_output(
cmd,
stderr=subprocess.STDOUT,
shell=True
)
except subprocess.CalledProcessError as info :
msg = str( info.output , 'ascii' )
msg += '\nbin/proj_desc.py exiting because command above failed'
sys.exit(msg)
return str( output, 'ascii' )
# ----------------------------------------------------------------------- | ------
# update cppad_conf_dir
#
cppad_conf_dir = os.environ['HOME'] + '/cppad.svn/conf'
if os.path.exists(cppad_conf_dir ) :
cmd = 'svn update ' + cppad_conf_dir
print( cmd )
else :
cmd = 'svn checkout https://projects.coin-or.org/svn/CppAD/conf '
cmd += cppad_conf_dir
| print( cmd )
# -----------------------------------------------------------------------------
# get the current verison of the file
#
file_name = cppad_conf_dir + '/projDesc.xml'
file_ptr = open(file_name, 'r')
file_data = file_ptr.read()
file_ptr.close()
# -----------------------------------------------------------------------------
# Do the replacemnets
#
# fix stableVersionNumber and releaseNumber (remove enclosing comment block)
pattern = '<!--\s*<stableVersionNumber>'
replace = '<stableVersionNumber>'
file_data = re.sub(pattern, replace, file_data)
pattern = '</releaseNumber>\s*-->'
replace = '</releaseNumber>'
file_data = re.sub(pattern, replace, file_data)
#
for key in replace_dict :
value = replace_dict[key]
start = '<' + key + '>'
end = '</' + key + '>'
pattern = '[ \t]*' + start + '[a-zA-Z\s]*' + end
replace = start + value + end
file_data = re.sub(pattern, replace, file_data)
#
# projectCategories is a special case
key = 'projectCategories'
value = replace_dict[key]
start = '<' + key + '>'
end = '</' + key + '>'
pattern = '[ \t]*' + start + '[ \t]*'
file_data = re.sub(pattern, replace, file_data)
#
pattern = '[ \t]*' + end + '[ \t]*'
replace = start + '\n <category>\n '
replace += value + '\n </category>\n' + end
file_data = re.sub(pattern, replace, file_data)
# -----------------------------------------------------------------------------
# replace input file
file_name = cppad_conf_dir + '/projDesc.xml'
file_ptr = open(file_name, 'w')
file_ptr.write(file_data)
file_ptr.close()
# -----------------------------------------------------------------------------
msg = 'check results in\n\t' + file_name + '\nand then execute'
msg += '\n\tsvn commit -m "see bin/proj_desc.py" \\\n\t' + file_name
print(msg)
|
richo/groundstation | test/test_all_stores.py | Python | mit | 438 | 0 | import os
from support import store_fixture
impo | rt groundstation.store
class TestGitStore(store_fixture.StoreTestCase):
storeClass = groundstation.store.git_store.GitStore
def test_creates_required_dirs(self):
for d in groundstation.store.git_store.GitStore.required_dirs:
path = os.path.join(self.path, d)
| self.assertTrue(os.path.exists(path))
self.assertTrue(os.path.isdir(path))
|
whlteXbread/photoManip | photomanip/tests/test_metadata.py | Python | mit | 3,503 | 0 | import os
from shutil import copyfile
from photomanip.metadata import ImageExif, SetExifTool
from nose import tools
ORIGINAL_IMAGE_FILENAME = 'photomanip/tests/turd_ferguson.jpeg'
TEST_IMAGE_FILENAME = 'photomanip/tests/image_exif_test.jpg'
ORIGINAL_PHOTO_FILENAME = 'photomanip/tests/test_photo_0.jpg'
TEST_PHOTO_01_FILENAME = 'photomanip/tests/image_exposure_test_01.jpg'
TEST_PHOTO_02_FILENAME = 'photomanip/tests/image_exposure_test_02.jpg'
class TestImageExif:
@classmethod
def setup_class(cls):
cls.image_exif = ImageExif()
copyfile(ORIGINAL_IMAGE_FILENAME, TEST_IMAGE_FILENAME)
copyfile(ORIGINAL_PHOTO_FILENAME, TEST_PHOTO_01_FILENAME)
copyfile(ORIGINAL_PHOTO_FILENAME, TEST_PHOTO_02_FILENAME)
@classmethod
def teardown_class(cls):
os.remove(TEST_IMAGE_FILENAME)
os.remove(TEST_PHOTO_01_FILENAME)
os.remove(TEST_PHOTO_02_FILENAME)
def get_stored_tags(self, tag_list, filename):
with SetExifTool() as et:
stored_tags = et.get_tags(tag_list, filename)
return stored_tags
def test_imageexif_generate_tag_list(self):
get_list = self.image_exif.get_list
# test get list
tag_list = self.image_exif._generate_tag_list(get_list)
tools.eq_(set(tag_list), set([
'EXIF:DateTimeOriginal',
'File:ImageHeight',
'IPTC:Keywords',
'EXIF:ExposureTime',
'File:ImageWidth']))
# test set list
tag_list = self.image_exif._generate_tag_list(get_list, True)
tools.eq_(tag_list, {
'date_created': 'EXIF:DateTimeOriginal={}',
'exposure_time': 'EXIF:ExposureTime={}',
'image_height': 'File:ImageHeight={}',
'image_width': 'File:ImageWidth={}',
'keywords': 'IPTC:Keywords={}'})
def test_set_image_metadata(self):
output_meta = {
"name": "Terd Ferguson",
"keywords": "one, two, three",
"caption": "suck it, trebeck",
}
result = self.image_exif.set_image_metadata(TEST_IMAGE_FILENAME,
output_meta)
tools.eq_(result, '1 image files updated\n')
check_tags = self.image_exif._generate_tag_list(output_meta.keys())
stored_tags = self.get_stored_tags(check_tags, TEST_IMAGE_FILENAME)
# now check if the metadata matches
for key, val in output_meta.items():
mapped_key = self.image_exif.metadata_map[key]
tools.eq_(val, sto | red_tags[mapped_key])
def test_calculate_exposure_time(self):
tag_list = self.image_exif._generate_tag_list(['exposure_time'])
stored_tags = self.get_stored_tags(tag_list, TEST_PHOTO_01_FILENAME)
tools.eq_(stored_tags['EXIF:ExposureTime'], 0.0013333333 | 33)
def test_get_tags_containing(self):
tag_list = self.image_exif._generate_tag_list(['keywords'])
stored_tags = self.get_stored_tags(tag_list, TEST_PHOTO_01_FILENAME)
result = self.image_exif.get_tags_containing(
stored_tags['IPTC:Keywords'], 'faceit365')
tools.eq_(result, 'faceit365:date=20190308')
def test_get_metadata_batch(self):
fname_list = [TEST_PHOTO_01_FILENAME, TEST_PHOTO_02_FILENAME]
meta_list = self.image_exif.get_metadata_batch(fname_list)
meta_list[0].pop('SourceFile')
meta_list[1].pop('SourceFile')
tools.eq_(meta_list[0], meta_list[1])
|
MSFTOSSMgmt/WPSDSCLinux | LCM/scripts/python3/GetDscConfiguration.py | Python | mit | 3,596 | 0.003893 | #!/usr/bin/env python3
import fileinput
import sys
import subprocess
import json
import time
import datetime
import os
import os.path
from OmsConfigHostHelpers import write_omsconfig_host_telemetry, write_omsconfig_host_switch_event, write_omsconfig_host_log, stop_old_host_instances
import warnings
with warnings.catch_warnings():
warnings.filterwarnings("ignore",category=DeprecationWarning)
from imp import load_source
from os.path import dirname, isfile, join, realpath
from fcntl import flock, LOCK_EX, LOCK_UN, LOCK_NB
pathToCurrentScript = realpath(__file__)
pathToCommonScriptsFolder = dirname(pathToCurrentScript)
helperLibPath = join(pathToCommonScriptsFolder, 'helperlib.py')
helperlib = load_source('helperlib', helperLibPath)
omi_bindir = "<CONFIG_BINDIR>"
omicli_path = omi_bindir + "/omicli"
dsc_host_base_path = helperlib.DSC_HOST_BASE_PATH
dsc_host_path = join(dsc_host_base_path, 'bin/dsc_host')
dsc_host_output_path = join(dsc_host_base_path, 'output')
dsc_host_lock_path = join(dsc_host_base_path, 'dsc_host_lock')
dsc_host_switch_path = join(dsc_host_base_path, 'dsc_host_ready')
if ("omsconfig" in helperlib.DSC_SCRIPT_PATH):
write_omsconfig_host_switch_event(pathToCurrentScript, isfile(dsc_host_switch_path))
if ("omsconfig" in helperlib.DSC_SCRIPT_PATH) and (isfile(dsc_host_switch_path)):
use_omsconfig_host = True
else:
use_omsconfig_host = False
parameters = []
if use_omsconfig_host:
parameters.append(dsc_host_path)
parameters.append(dsc_host_output_path)
parameters.append("GetConfiguration")
else:
parameters.append(omicli_path)
parameters.append("iv")
parameters.append("<DSC_NAMESPACE>")
parameters.append("{")
parameters.append("MSFT_DSCLocalConfigurationManager")
parameters.append("}")
parameters.append("GetConfiguration")
stdout = ''
stderr = ''
if use_omsconfig_host:
try:
dschostlock_filehandle = None
stop_old_host_instances(dsc_host_lock_path | )
# Open the dsc host lock file. This also creates a file if it does not exist
dschostlock_filehandle = open(dsc_host_lock_path, 'w')
print("Opened the dsc host lock fi | le at the path '" + dsc_host_lock_path + "'")
dschostlock_acquired = False
# Acquire dsc host file lock
for retry in range(10):
try:
flock(dschostlock_filehandle, LOCK_EX | LOCK_NB)
dschostlock_acquired = True
break
except IOError:
write_omsconfig_host_log('dsc_host lock file not acquired. retry (#' + str(retry) + ') after 60 seconds...', pathToCurrentScript)
sleep(60)
if dschostlock_acquired:
p = subprocess.Popen(parameters, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
stdout = stdout.decode() if isinstance(stdout, bytes) else stdout
print(stdout)
else:
print("dsc host lock already acuired by a different process")
finally:
if (dschostlock_filehandle):
# Release dsc host file lock
flock(dschostlock_filehandle, LOCK_UN)
# Close dsc host lock file handle
dschostlock_filehandle.close()
else:
p = subprocess.Popen(parameters, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
stdout = stdout.decode() if isinstance(stdout, bytes) else stdout
stderr = stderr.decode() if isinstance(stderr, bytes) else stderr
print(stdout)
print(stderr)
|
RedHatInsights/insights-core | insights/parsers/tests/test_max_uid.py | Python | apache-2.0 | 635 | 0 | import doctest
import pytest
from insights.parsers import max_uid, ParseException, SkipException
from insights.parsers.max_uid import MaxUID
from insights.tests import context_wrap
def test_max_uid():
with pytest.raises(SkipException):
MaxUID(context_wrap(""))
with pytest.raises(ParseException):
MaxUID(context_wrap("1a"))
| ma | x_uid = MaxUID(context_wrap("65536"))
assert max_uid is not None
assert max_uid.value == 65536
def test_doc_examples():
env = {
'max_uid': MaxUID(context_wrap("65534")),
}
failed, total = doctest.testmod(max_uid, globs=env)
assert failed == 0
|
dilynfullerton/tr-A_dependence_plots | src/deprecated/int/ExpInt.py | Python | cc0-1.0 | 719 | 0 | """ExpInt.py
Definition of the namedtuple that identifies a set of interaction files from
which to derive data
"""
from __future__ import division
from __future__ import unicode_literals
from collections import namedtuple
# noinspection PyClassHasNoInit
class ExpInt(namedtuple('ExpInt', ['e', 'hw', 'base', 'rp'])):
"""Descriptive key for *.int da | ta.
e: max e-level
hw: hw frequency
base: (default None) the mass number that normal ordering was done WRT
rp: (default None) the proton radius? I do not really know what this is
"""
__slots__ = ()
def __str__(self):
return str(tupl | e(self._asdict().values())).replace(', None', '')
ExpInt.__new__.__defaults__ = (None, None)
|
ColdSauce/define | testclass.py | Python | bsd-2-clause | 1,700 | 0.018235 | import requests
from os import system
from time import time
class dict:
def __init__(self,key,urbankey,tkey):
self.key = key
self.urbankey = urbankey
self.tkey = tkey
def getDefinition(self,word):
definition = requests.get("http://api.wordnik.com/v4/word.json/%s/definitions?api_key=%s" % (word,self.key) | ).json()[0]["text"]
return definition
def getHyphenation(self,word):
hyphenation = requests.get("http://api.wordnik.com/v4/word.json/%s/hyphenation?api_key=%s" % (word,self.key)).json()
return hyphenation
def getAudio(self,word):
url = requests.get("http://api.wordnik.com/v4/word.json/%s/audio?api_key=%s" % (word,self.key)).json()[0]["fileUrl"]
return requests.get(url)
def getUrban(self,word):
urb = re | quests.get("https://mashape-community-urban-dictionary.p.mashape.com/define?term=%s"
% word, headers={"X-Mashape-Key": self.urbankey}).json()
if urb["list"]>0:
return urb["list"][0]["definition"]
def getThesaurus(self,word):
response = requests.get("http://words.bighugelabs.com/api/2/%s/%s/json"
% (self.tkey, word)).json()
return response
t = dict("1e940957819058fe3ec7c59d43c09504b400110db7faa0509","ub2JDDg9Iumsh1HfdO3a3HQbZi0up1qe8LkjsnWQvyVvQLFn1q","e415520c671c26518df498d8f4736cac")
tim = time()
print(t.getDefinition("test"))
print(t.getHyphenation("test"))
#print(t.getUrban("DAE"))
#print(t.getThesaurus("test")["noun"])
aud = t.getAudio("love")
buff = open("/tmp/filename.mp3","w")
buff.write(aud.content)
buff.close()
system("gst-launch-1.0 playbin uri=file:///tmp/filename.mp3 -q")
print(time()-tim)
|
Dining-Engineers/left-luggage-detection | misc/demo/demo_cv_async.py | Python | gpl-2.0 | 732 | 0 | #!/usr/bin/env python
import freenect
import cv
from misc.demo import frame_convert
cv.NamedWindow('Depth')
cv.NamedWindow('RGB')
keep_running = True
def display_depth(dev, data, timestamp):
global keep_running
| cv.ShowImage('Depth', frame_convert.pretty_depth_cv(data))
if cv.WaitKey(10) == 27:
keep_running = False
def display_rgb(dev, data, timestamp):
global keep_running
cv.ShowImage('RGB', frame_convert.video_cv(data))
if cv.WaitKey(10) == 27:
keep_running = False
def body(*args):
if not keep_running:
raise freenect.Kill
pr | int('Press ESC in window to stop')
freenect.runloop(depth=display_depth,
video=display_rgb,
body=body)
|
dianshen/github | day12bbs/manage.py | Python | gpl-3.0 | 806 | 0 | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "day12bbs.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
| )
raise
execute_from_ | command_line(sys.argv)
|
Init-7/demoest | demoest/demoest/doctype/cotizacion_item_recursos_humanos/cotizacion_item_recursos_humanos.py | Python | mit | 272 | 0.007353 | # | -*- coding: utf-8 -*-
# Copyright (c) 2015, Daniel and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model. | document import Document
class CotizacionItemRecursosHumanos(Document):
pass
|
lazaronixon/enigma2 | RecordTimer.py | Python | gpl-2.0 | 43,499 | 0.030208 | import os
from enigma import eEPGCache, getBestPlayableServiceReference, \
eServiceReference, iRecordableService, quitMainloop, eActionMap, setPreferredTuner
from Components.config import config
from Components.UsageConfig import defaultMoviePath
from Components.TimerSanityCheck import TimerSanityCheck
from Screens.MessageBox import MessageBox
import Screens.Standby
import Screens.InfoBar
from Tools import Directories, Notifications, ASCIItranslit, Trashcan
from Tools.XMLTools import stringToXML
import timer
import xml.etree.cElementTree
import NavigationInstance
from ServiceReference import ServiceReference
from time import localtime, strftime, ctime, time
from bisect import insort
from sys import maxint
# ok, for descriptions etc we have:
# service reference (to get the service name)
# name (title)
# description (description)
# event data (ONLY for time adjustments etc.)
# parses an event, and gives out a (begin, end, name, duration, eit)-tuple.
# begin and end will be corrected
def parseEvent(ev, description = True):
if description:
name = ev.getEventName()
description = ev.getShortDescription()
if description == "":
description = ev.getExtendedDescription()
else:
name = ""
description = ""
begin = ev.getBeginTime()
end = begin + ev.getDuration()
eit = ev.getEventId()
begin -= config.recording.margin_before.value * 60
end += config.recording.margin_after.value * 60
return (begin, end, name, description, eit)
class AFTEREVENT:
NONE = 0
STANDBY = 1
DEEPSTANDBY = 2
AUTO = 3
def findSafeRecordPath(dirname):
if not dirname:
return None
from Components import Harddisk
dirname = os.path.realpath(dirname)
mountpoint = Harddisk.findMountPoint(dirname)
if mountpoint in ('/', '/media'):
print '[RecordTimer] media is not mounted:', dirname
return None
if not os.path.isdir(dirname):
try:
os.makedirs(dirname)
except Exception, ex:
print '[RecordTimer] Failed to create dir "%s":' % dirname, ex
return None
return dirname
def checkForRecordings():
if NavigationInstance.instance.getRecordings():
return True
rec_time = NavigationInstance.instance.RecordTimer.getNextTimerTime(isWakeup=True)
return rec_time > 0 and (rec_time - time()) < 360
# please do not translate log messages
class RecordTimerEntry(timer.TimerEntry, object):
######### the following static methods and members are only in use when the box is in (soft) standby
wasInStandby = False
wasInDeepStandby = False
receiveRecordEvents = False
@staticmethod
def keypress(key=None, flag=1):
if flag and (RecordTimerEntry.wasInStandby or RecordTimerEntry.wasInDeepStandby):
RecordTimerEntry.wasInStandby = False
RecordTimerEntry.wasInDeepStandby = False
eActionMap.getInstance().unbindAction('', RecordTimerEntry.keypress)
@staticmethod
def setWasInDeepStandby():
RecordTimerEntry.wasInDeepStandby = True
eActionMap.getInstance().bindAction('', -maxint - 1, RecordTimerEntry.keypress)
@staticmethod
def setWasInStandby():
if not RecordTimerEntry.wasInStandby:
if not RecordTimerEntry.wasInDeepStandby:
eActionMap.getInstance().bindAction('', -maxint - 1, RecordTimerEntry.keypress)
RecordTimerEntry.wasInDeepStandby = False
RecordTimerEntry.wasInStandby = True
@staticmethod
def shutdown():
quitMainloop(1)
@staticmethod
def staticGotRecordEvent(recservice, event):
if event == iRecordableService.evEnd:
print "RecordTimer.staticGotRecordEvent(iRecordableService.evEnd)"
if not checkForRecordings():
print "No recordings busy of sceduled within 6 minutes so shutdown"
RecordTimerEntry.shutdown() # immediate shutdown
elif event == iRecordableService.evStart:
print "RecordTimer.staticGotRecordEvent(iRecordableService.evStart)"
@staticmethod
def stopTryQuitMainloop():
print "RecordTimer.stopTryQuitMainloop"
NavigationInstance.instance.record_event.remove(RecordTimerEntry.staticGotRecordEvent)
RecordTimerEntry.receiveRecordEvents = False
@staticmethod
def TryQuitMainloop():
if not RecordTimerEntry.receiveRecordEvents and Screens.Standby.inStandby:
print "RecordTimer.TryQuitMainloop"
NavigationInstance.instance.record_event.append(RecordTimerEntry.staticGotRecordEvent)
RecordTimerEntry.receiveRecordEvents = True
# send fake event.. to check if another recordings are running or
# other timers start in a few seconds
RecordTimerEntry.staticGotRecordEvent(None, iRecordableService.evEnd)
#################################################################
def __init__(self, serviceref, begin, end, name, description, eit, disabled = False, justplay = False, afterEvent = AFTEREVENT.AUTO, checkOldTimers = False, dirname = None, tags = None, descramble = True, record_ecm = False, always_zap = False, zap_wakeup = "always", rename_repeat = True):
timer.TimerEntry.__init__(self, int(begin), int(end))
if checkOldTimers == True:
if self.begin < time() - 1209600:
self.begin = int(time())
if self.end < self.begin:
self.end = self.begin
assert isinstance(serviceref, ServiceReference)
if serviceref and serviceref.isRecordable():
self.service_ref = serviceref
else:
self.service_ref = ServiceReference(None)
self.eit = eit
self.dontSave = False
self.name = name
self.description = description
self.disabled = disabled
self.timer = None
self.__record_service = None
self.start_prepare = 0
self.justplay = justplay
self.always_zap = always_zap
self.zap_wakeup = zap_wakeup
self.afterEvent = afterEvent
self.dirname = dirname
self.dirnameHadToFallback = False
self.autoincrease = False
self.autoincreasetime = 3600 * 24 # 1 day
self.tags = tags or []
self.descramble = descramble
self.record_ecm = record_ecm
self.rename_repeat = rename_repeat
self.needChangePriorityFrontend = config.usage.recording_frontend_priority.value != "-2" and config.usage.recording_frontend_priority.value != config.usage.frontend_priority.value
self.change_frontend = False
self.InfoBarInstance = Screens.InfoBar.InfoBar.instance
self.ts_dialog = None
self.log_entries = []
self.resetState()
def __repr__(self):
return "RecordTimerEntry(name=%s, begin=%s, serviceref=%s, justplay=%s)" % (self.name, ctime(self.begin), self.service_ref, self.justplay)
def log(self, code, msg):
self.log_entries.append((int(time()), code, msg))
print "[TIMER]", msg
def calculateFilename(self, name=None):
service_name = self.service_ref.getServiceName()
begin_date = strftime("%Y%m%d %H%M", localtime(self.begin))
name = name or self.name
filename = begin_date + " - " + service_name
if name:
if config.recording.filename_composition.value == "short":
filename = strftime("%Y%m%d", localtime(self.begin)) + " - " + name
elif config.recording.filename_composition.value == "long":
filename += " - " + name + " - " + self.description
else:
filename += " - " + name # standard
if config.recording.ascii_filenames.value:
filename = ASCIItranslit.legacyEncode(filename)
if not self.dirname:
dirname = findSafeRecordPath(defaultMoviePath())
else:
dirname = findSafeRecordPath(self.dirname)
if dirname is None:
dirname = findSafeRecordPath(defaultMoviePath())
self.dirnameHadToFallback = True
if not dirname:
return None
self.Filename = Directories.getRecordingFilename(filename, dirname)
self.log(0, "Filename calculated as: '%s'" % self.Filename)
return self.Filename
def tryPrepare(self):
if self.justplay:
return True
else:
if not self.calculateFilename():
self.do_backoff()
self.start_prepare = time() + self.backoff
return False
rec_ref = self.service_ref | and self.service_ref.ref
if rec_ref and rec_ref.flags & eServiceReference.isGroup:
rec_ref = getBestPlayableServiceReference(rec_ref, eServiceReference())
if not rec_ref:
self.log(1, "'get best playable service for group... record' failed")
return False
self.setRecordingPreferredTuner()
self.record_service = rec_ref and NavigationInstance.instance.recordService(rec_ref)
if not self.record_ | service:
self.log(1, "'record service' failed")
self.setRec |
dacb/assembly_and_binning | assess_unbinned_reads_across_samples/assess_unbinned_reads_across_samples.py | Python | mit | 858 | 0.011655 | import os
import pandas as pd
#import seaborn as sns
total_reads = pd.read_csv('../data/sample_info/sample_read_counts.tsv', sep='\t', names = ['fastq filename', 'number of reads'])
total_reads['cryptic metagenome name'] = total_reads['fastq filename'].str.strip('.fastq.gz')
sample_info = pd.read_csv('../data/sample_info/sample_info.tsv', sep='\t')
sample_translation = pd.read_csv('../data/sample_info/meta4_sample_names--cryptic_to_sample_number.tsv', sep='\t')
read_mappings = pd.read_csv('./data/num_reads_mapped--can_double_count_multiple_mappings.tsv', sep='\t')
reads = pd.merge(sample_info, sample_translation)
reads = pd.merge(reads, total_reads)
reads = pd.merge(reads, read_mappings)
out_path = 'total_num_reads_across_samples_with_sample_info.tsv'
out_dir = './data'
reads.to_csv(os.path.join(out_dir, out_path | ), sep=' | \t', index=False)
|
zstackorg/zstack-woodpecker | integrationtest/vm/multihosts/volumes/paths/path119.py | Python | apache-2.0 | 870 | 0.055172 | import zstackwoodpecker.test_state as ts_header
TestAction = ts_header.TestAction
def path():
return dict(initial_formation="template4",\
path_list=[[TestAction.delete_volume, "vm1-volume1"], \
[TestAction.reboot_vm, "vm1"], \
[TestAction.create_volume, "volume1", "=scsi"] | , \
[TestAction.attach_volume, "vm1", "volume1"], \
[TestAction.create_volume_backup, "volume1", "backup1"], \
[TestAction.stop_vm, "vm1"], \
[TestAction.cleanup_ps_cache], \
[TestAction.start_vm, "vm1"], \
[TestAction.create_volume_snapshot, "volume1", 'snapshot1'], \
[TestAction.detach_volume, "volume1"], \
[TestAction.clone_vm, "vm1", "vm2", "=full"], \
[TestAction.attach_volume, "vm1", "volume1"], \
[TestAction.s | top_vm, "vm1"], \
[TestAction.use_volume_backup, "backup1"], \
[TestAction.start_vm, "vm1"], \
[TestAction.reboot_vm, "vm1"]])
|
prefetchnta/questlab | bin/x64bin/python/37/Lib/_weakrefset.py | Python | lgpl-2.1 | 5,875 | 0.001021 | # Access WeakSet through the weakref module.
# This code is separated-out because it is needed
# by abc.py to load everything else at startup.
from _weakref import ref
__all__ = ['WeakSet']
class _IterationGuard:
# This context manager registers itself in the current iterators of the
# weak container, such as to delay all removals until the context manager
# exits.
# This technique should be relatively thread-safe (since sets are).
def __init__(self, weakcontainer):
# Don't create cycles
self.weakcontainer = ref(weakcontainer)
def __enter__(self):
w = self.weakcontainer()
if w is not None:
w._iterating.add(self)
return self
def __exit__(self, e, t, b):
w = self.weakcontainer()
if w is not None:
s = w._iterating
s.remove(self)
if not s:
w._commit_removals()
class WeakSet:
def __init__(self, data=None):
self.data = set()
def _remove(item, selfref=ref(self)):
self = selfref()
if self is not None:
if self._iterating:
self._pending_removals.append(item)
else:
self.data.discard(item)
self._remove = _remove
# A list of keys to be removed
self._pending_removals = []
self._iterating = set()
if data is not None:
self.update(data)
def _commit_removals(self):
l = self._pending_removals
discard = self.data.discard
while l:
discard(l.pop())
def __iter__(self):
with _IterationGuard(self):
for itemref in self.data:
item = itemref()
if item is not None:
# Caveat: the iterator will keep a strong reference to
# `item` until it is resumed or closed.
yield item
def __len__(self):
return len(self.data) - len(self._pending_removals)
def __contains__(self, item):
try:
wr = ref(item)
except TypeError:
return False
return wr in self.data
def __reduce__(self):
return (self.__class__, (list(self),),
getattr(self, '__dict__', None))
def add(self, item):
if self._pending_removals:
self._commit_removals()
self.data.add(ref(item, self._remove))
def clear(self):
if self._pending_removals:
self._commit_removals()
self.data.clear()
def copy(self):
return self.__class__(self)
def pop(self):
if self._pending_removals:
self._commit_removals()
while True:
try:
itemref = self.data.pop()
except KeyError:
raise KeyError('pop from empty WeakSet') from None
item = itemref()
if item is not None:
return item
def remove(self, item):
if self._pending_removals:
self._commit_removals()
self.data.remove(ref(item))
def discard(self, item):
if self._pending_removals:
self._commit_removals()
self.data.discard(ref(item))
def update(self, other):
if self._pending_removals:
self._commit_removals()
for element in other:
self.add(element)
def __ior__(self, other):
self.update(other)
return self
def difference(self, other):
newset = self.copy()
newset.difference_update(other)
return newset
__sub__ = difference
def difference_update(self, other):
self. | __isub__(other)
def __isub__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.difference_update(ref(item) for item in other)
return self
def intersection(self, other):
return self.__class__(item for item i | n other if item in self)
__and__ = intersection
def intersection_update(self, other):
self.__iand__(other)
def __iand__(self, other):
if self._pending_removals:
self._commit_removals()
self.data.intersection_update(ref(item) for item in other)
return self
def issubset(self, other):
return self.data.issubset(ref(item) for item in other)
__le__ = issubset
def __lt__(self, other):
return self.data < set(map(ref, other))
def issuperset(self, other):
return self.data.issuperset(ref(item) for item in other)
__ge__ = issuperset
def __gt__(self, other):
return self.data > set(map(ref, other))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self.data == set(map(ref, other))
def symmetric_difference(self, other):
newset = self.copy()
newset.symmetric_difference_update(other)
return newset
__xor__ = symmetric_difference
def symmetric_difference_update(self, other):
self.__ixor__(other)
def __ixor__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.symmetric_difference_update(ref(item, self._remove) for item in other)
return self
def union(self, other):
return self.__class__(e for s in (self, other) for e in s)
__or__ = union
def isdisjoint(self, other):
return len(self.intersection(other)) == 0
|
0111001101111010/open-health-inspection-api | venv/lib/python2.7/site-packages/bson/son.py | Python | gpl-2.0 | 8,313 | 0.000481 | # Copyright 2009-2012 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for creating and manipulating SON, the Serialized Ocument Notation.
Regular dictionaries can be used instead of SON objects, but not when the order
of keys is important. A SON object can be used just like a normal Python
dictionary."""
import copy
import re
# This sort of sucks, but seems to be as good as it gets...
# This is essentially the same as re._pattern_type
RE_TYPE = type(re.compile(""))
class SON(dict):
"""SON data.
A subclass of dict that maintains ordering of keys and provides a
few extra niceties for dealing with SON. SON objects can be
converted to and from BSON.
The mapping from Python types to BSON types is as follows:
=================================== ============= ===================
Python Type BSON Type Supported Direction
=================================== ============= ===================
None null both
bool boolean both
int [#int]_ int32 / int64 py -> bson
long int64 both
float number (real) both
string string py -> bson
unicode string both
list array both
dict / `SON` object both
datetime.datetime [#dt]_ [#dt2]_ date both
compiled re regex both
`bson.binary.Binary` binary both
`bson.objectid.ObjectId` oid both
`bson.dbref.DBRef` dbref both
None undefined bson -> py
unicode code bson -> py
`bson.code.Code` code py -> bson
unicode symbol bson -> py
bytes (Python 3) [#bytes]_ binary both
=================================== ============= ===================
Not | e that to save binary data it must be wrapped as an instance of
`bson.binary.Binary`. Otherwise it will be saved as a BSON string
and retrieved as unicode.
.. [#int] A Python int will be saved as a BSON int32 or BSON int64 depending
on its size. A BSON int32 will always | decode to a Python int. In Python 2.x
a BSON int64 will always decode to a Python long. In Python 3.x a BSON
int64 will decode to a Python int since there is no longer a long type.
.. [#dt] datetime.datetime instances will be rounded to the nearest
millisecond when saved
.. [#dt2] all datetime.datetime instances are treated as *naive*. clients
should always use UTC.
.. [#bytes] The bytes type from Python 3.x is encoded as BSON binary with
subtype 0. In Python 3.x it will be decoded back to bytes. In Python 2.x
it will be decoded to an instance of :class:`~bson.binary.Binary` with
subtype 0.
"""
def __init__(self, data=None, **kwargs):
self.__keys = []
dict.__init__(self)
self.update(data)
self.update(kwargs)
def __new__(cls, *args, **kwargs):
instance = super(SON, cls).__new__(cls, *args, **kwargs)
instance.__keys = []
return instance
def __repr__(self):
result = []
for key in self.__keys:
result.append("(%r, %r)" % (key, self[key]))
return "SON([%s])" % ", ".join(result)
def __setitem__(self, key, value):
if key not in self:
self.__keys.append(key)
dict.__setitem__(self, key, value)
def __delitem__(self, key):
self.__keys.remove(key)
dict.__delitem__(self, key)
def keys(self):
return list(self.__keys)
def copy(self):
other = SON()
other.update(self)
return other
# TODO this is all from UserDict.DictMixin. it could probably be made more
# efficient.
# second level definitions support higher levels
def __iter__(self):
for k in self.keys():
yield k
def has_key(self, key):
return key in self.keys()
def __contains__(self, key):
return key in self.keys()
# third level takes advantage of second level definitions
def iteritems(self):
for k in self:
yield (k, self[k])
def iterkeys(self):
return self.__iter__()
# fourth level uses definitions from lower levels
def itervalues(self):
for _, v in self.iteritems():
yield v
def values(self):
return [v for _, v in self.iteritems()]
def items(self):
return [(key, self[key]) for key in self]
def clear(self):
for key in self.keys():
del self[key]
def setdefault(self, key, default=None):
try:
return self[key]
except KeyError:
self[key] = default
return default
def pop(self, key, *args):
if len(args) > 1:
raise TypeError("pop expected at most 2 arguments, got "\
+ repr(1 + len(args)))
try:
value = self[key]
except KeyError:
if args:
return args[0]
raise
del self[key]
return value
def popitem(self):
try:
k, v = self.iteritems().next()
except StopIteration:
raise KeyError('container is empty')
del self[k]
return (k, v)
def update(self, other=None, **kwargs):
# Make progressively weaker assumptions about "other"
if other is None:
pass
elif hasattr(other, 'iteritems'): # iteritems saves memory and lookups
for k, v in other.iteritems():
self[k] = v
elif hasattr(other, 'keys'):
for k in other.keys():
self[k] = other[k]
else:
for k, v in other:
self[k] = v
if kwargs:
self.update(kwargs)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __eq__(self, other):
"""Comparison to another SON is order-sensitive while comparison to a
regular dictionary is order-insensitive.
"""
if isinstance(other, SON):
return len(self) == len(other) and self.items() == other.items()
return self.to_dict() == other
def __ne__(self, other):
return not self == other
def __len__(self):
return len(self.keys())
def to_dict(self):
"""Convert a SON document to a normal Python dictionary instance.
This is trickier than just *dict(...)* because it needs to be
recursive.
"""
def transform_value(value):
if isinstance(value, list):
return [transform_value(v) for v in value]
if isinstance(value, SON):
value = dict(value)
if isinstance(value, dict):
for k, v in value.iteritems():
value[k] = transform_value(v)
return value
return transform_value(dict(self))
def __deepcopy__(self, memo):
out = SON()
val_id = id(self)
if val_id in memo:
return memo.get(val_id)
memo[val_id] = out
for k, v in self.iteritem |
Chasvortex/caffe-gui-tool | CGTArrangeHelper.py | Python | unlicense | 10,580 | 0.004253 | import bpy
def forceupdate(nodes):
for node in nodes:
if node.inputs:
for inpt in node.inputs:
try:
inpt.default_value = inpt.default_value # set value to itself to force update
return True
except:
pass
return False
def get_nodes_links(treename):
if not treename:
for space in bpy.context.area.spaces:
if space.type == 'NODE_EDITOR':
treename = space.edit_tree.name
tree = bpy.data.node_groups[treename]
nodes = tree.nodes
links = tree.links
return nodes, links
def get_nodes_links_withsel(treename):
if not treename:
for space in bpy.context.area.spaces:
if space.type == 'NODE_EDITOR':
treename = space.edit_tree.name
tree = bpy.data.node_groups[treename]
nodes = tree.nodes
links = tree.links
all_nodes = nodes
newnodes = []
for node in nodes:
if node.select == True:
newnodes.append(node)
if len(newnodes) == 0:
newnodes = all_nodes
nodes_sorted = sorted(newnodes, key=lambda x: x.name) # Sort the nodes list to achieve consistent
links_sorted = sorted(links, key=lambda x: x.from_node.name) # results (order was changed based on selection).
return nodes_sorted, links_sorted
def isStartNode(node):
bool = True
if len(node.inputs):
for input in node.inputs:
if input.links != ():
bool = False
return bool
def isEndNode(node):
bool = True
if len(node.outputs):
for output in node.outputs:
if output.links != ():
bool = False
return bool
def between(b1, a, b2):
# b1 MUST be smaller than b2!
bool = False
if a >= b1 and a <= b2:
bool = True
return bool
def overlaps(node1, node2):
dim1x = node1.dimensions.x
dim1y = node1.dimensions.y
dim2x = node2.dimensions.x
dim2y = node2.dimensions.y
boolx = False
booly = False
boolboth = False
# check for x overlap
if between(node2.location.x, node1.location.x, (node2.location.x + dim2x)) or between(node2.location.x,
(node1.location.x + dim1x), (
node2.location.x + dim2x)): # if either edges are inside the second node
boolx = True
if between(node1.location.x, node2.location.x, node1.location.x + dim1x) and between(node1.location.x,
(node2.location.x + dim2x),
node1.location.x + dim1x): # if each edge is on either side of the second node
boolx = True
# check for y overlap
if between((node2.location.y - dim2y), node1.location.y, node2.location.y) or between((node2.location.y - dim2y),
(node1.location.y - dim1y),
node2.location.y):
booly = True
if between((node1.location.y - dim1y), node2.location.y, node1.location.y) and between((node1.location.y - dim1y),
(node2.location.y - dim2y),
node1.location.y):
booly = True
if boolx == True and booly == True:
boolboth = True
return boolboth
def treeMidPt(nodes):
minx = (sorted(nodes, key=lambda k: k.location.x))[0].location.x
miny = (sorted(nodes, key=lambda k: k.location.y))[0].location.y
maxx = (sorted(nodes, key=lambda k: k.location.x, reverse=True))[0].location.x
maxy = (sorted(nodes, key=lambda k: k.location.y, reverse=True))[0].location.y
midx = minx + ((maxx - minx) / 2)
midy = miny + ((maxy - miny) / 2)
return midx, midy
def ArrangeFunction(context, treename=False):
nodes, links = get_nodes_links_withsel(treename)
margin = context.scene.NWSpacing
oldmidx, oldmidy = treeMidPt(nodes)
if context.scene.NWDelReroutes:
# Store selection
selection = []
for node in nodes:
if node.select == True and node.type != "REROUTE":
selection.append(node.name)
# Delete Re | routes
for node in nodes:
node.select = False # deselect all nodes
for node in nodes:
if node.type == 'REROUTE':
node.select = True
bpy.ops.node.delete_reconnect()
# Restore selection
nodes, links = get_nodes_links(treename)
nodes = list(nodes)
for node in nodes:
if node.name in selection:
node.select = True
else:
# Store selection anyway
| selection = []
for node in nodes:
if node.select == True:
selection.append(node.name)
if context.scene.NWFrameHandling == "delete":
# Store selection
selection = []
for node in nodes:
if node.select == True and node.type != "FRAME":
selection.append(node.name)
# Delete Frames
for node in nodes:
node.select = False # deselect all nodes
for node in nodes:
if node.type == 'FRAME':
node.select = True
bpy.ops.node.delete()
# Restore selection
nodes, links = get_nodes_links(treename)
nodes = list(nodes)
for node in nodes:
if node.name in selection:
node.select = True
layout_iterations = len(nodes) * 2
backward_check_iterations = len(nodes)
overlap_iterations = len(nodes)
for it in range(0, layout_iterations):
print (
'Layout Iteration %i / %i' % (it, layout_iterations + overlap_iterations + backward_check_iterations - 1))
for node in nodes:
isframe = False
if node.type == "FRAME" and context.scene.NWFrameHandling == 'ignore':
isframe = True
if not isframe:
if isStartNode(node) and context.scene.NWStartAlign: # line up start nodes
node.location.x = node.dimensions.x / -2
node.location.y = node.dimensions.y / 2
for link in links:
if link.from_node == node and link.to_node in nodes:
link.to_node.location.x = node.location.x + node.dimensions.x + margin
link.to_node.location.y = node.location.y - (node.dimensions.y / 2) + (
link.to_node.dimensions.y / 2)
else:
node.location.x = 0
node.location.y = 0
for it in range(0, backward_check_iterations):
print ('Layout Iteration %i / %i' % (
layout_iterations + it, layout_iterations + overlap_iterations + backward_check_iterations - 1))
for link in links:
if link.from_node.location.x + link.from_node.dimensions.x >= link.to_node.location.x and link.to_node in nodes:
link.to_node.location.x = link.from_node.location.x + link.from_node.dimensions.x + margin
# line up end nodes
if context.scene.NWEndAlign:
for node in nodes:
max_loc_x = (sorted(nodes, key=lambda x: x.location.x, reverse=True))[0].location.x
if isEndNode(node) and not isStartNode(node):
node.location.x = max_loc_x
for it in range(0, overlap_iterations):
print ('Layout Iteration %i / %i' % (layout_iterations + overlap_iterations + it,
layout_iterations + overlap_iterations + backward_check_iterations - 1))
for node in nodes:
isframe = False
if node.type = |
GluuFederation/community-edition-setup | static/extension/person_authentication/DuoExternalAuthenticator.py | Python | mit | 9,408 | 0.004464 | # oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
# Copyright (c) 2016, Gluu
#
# Author: Yuriy Movchan
#
from org.gluu.service.cdi.util import CdiUtil
from org.gluu.oxauth.security import Identity
from org.gluu.model.custom.script.type.auth import PersonAuthenticationType
from org.gluu.oxauth.service import AuthenticationService
from org.gluu.oxauth.service.common import UserService
from org.gluu.service import MailService
from org.gluu.util import ArrayHelper
from org.gluu.util import StringHelper
from java.util import Arrays
import duo_web
import json
class PersonAuthentication(PersonAuthenticationType):
def __init__(self, currentTimeMillis):
self.currentTimeMillis = currentTimeMillis
def init(self, customScript, configurationAttributes):
print "Duo. Initialization"
duo_creds_file = configurationAttributes.get("duo_creds_file").getValue2()
# Load credentials from file
f = open(duo_creds_file, 'r')
try:
creds = json.loads(f.read())
except:
print "Duo. Initialization. Failed to load creds from file:", duo_creds_file
return False
finally:
f.close()
self.ikey = str(creds["ikey"])
self.skey = str(creds["skey"])
self.akey = str(creds["akey"])
self.use_duo_group = False
if (configurationAttributes.containsKey("duo_group")):
self.duo_group = configurationAttributes.get("duo_group").getValue2()
self.use_duo_group = True
print "Duo. Initialization. Using Duo only if user belong to group:", self.duo_group
self.use_audit_group = False
if (configurationAttributes.containsKey("audit_group")):
self.audit_group = configurationAttributes.get("audit_group").getValue2()
if (not configurationAttributes.containsKey("audit_group_email")):
print "Duo. Initialization. Property audit_group_email is not specified"
return False
self.audit_email = configurationAttributes.get("audit_group_email").getValue2()
self.use_audit_group = True
print "Duo. Initialization. Using audito group:", self.audit_group
if (self.use_duo_group or self.use_audit_group):
if (not configurationAttributes.containsKey("audit_attribute")):
print "Duo. Initialization. Property audit_attribute is not specified"
return False
else:
self.audit_attribute = configurationAttributes.get("audit_attribute").getValue2()
print "Duo. Initialized successfully"
return True
def destroy(self, configurationAttributes):
print "Duo. Destroy"
print "Duo. Destroyed successfully"
return True
def getApiVersion(self):
return 11
def getAuthenticationMethodClaims(self, requestParameters):
return None
def isValidAuthenticationMethod(self, usageType, configurationAttributes):
return True
def getAlternativeAuthenticationMethod(self, usageType, configurationAttributes):
return None
def authenticate(self, configurationAttributes, requestParameters, step):
duo_host = configurationAttributes.get("duo_host").getValue2()
authenticationService = CdiUtil.bean(AuthenticationService)
identity = CdiUtil.bean(Identity)
if (step == 1):
print "Duo. Authenticate for step 1"
# Check if user authenticated already in another custom script
user = authenticationService.getAuthenticatedUser()
if user == None:
credentials = identity.getCredentials()
user_name = credentials.getUsername()
user_password = credentials.getPassword()
logged_in = False
if (StringHelper.isNotEmptyString(user_name) and StringHelper.isNotEmptyString(user_password)):
userService = CdiUtil.bean(UserService)
logged_in = authenticationService.authenticate(user_name, user_password)
if (not logged_in):
return False
user = authenticationService.getAuthenticatedUser()
if (self.use_duo_group):
print "Duo. Authenticate for step 1. Checking if user belong to Duo group"
is_member_duo_group = self.isUserMemberOfGroup(user, self.audit_attribute, self.duo_group)
if (is_member_duo_group):
print "Duo. Authenticate for step 1. User '" + user.getUserId() + "' member of Duo group"
duo_count_login_steps = 2
else:
self.processAuditGroup(user)
duo_count_login_steps = 1
identity.setWorkingParameter("duo_count_login_steps", duo_count_login_steps)
return True
elif (step == 2):
print "Duo. Authenticate for step 2"
user = authenticationService.getAuthenticatedUser()
if user == None:
print "Duo. Authenticate for step 2. Failed to determine user name"
return False
user_name = user.getUserId()
sig_response_array = requestParameters.get("sig_response")
if ArrayHelper.isEmpty(sig_response_array):
print "Duo. Authenticate for step 2. sig_response is empty"
return False
duo_sig_response = sig_response_array[0]
print "Duo. Authenticate for step 2. duo_sig_response: " + duo_sig_response
authenticated_username = duo_web.verify_response(self.ikey, self.skey, self.akey, duo_sig_response)
print "Duo. Authenticate for step 2. authenticated_username: " + authenticated_username + ", expected user_name: " + user_name
if (not StringHelper.equals(user_name, authenticated_username)):
return False
self.processAuditGroup(user)
return True
else:
return False
def prepareForStep(self, configurationAttributes, requestParameters, step):
identity = CdiUtil.bean(Identity)
authenticationService = CdiUtil.bean(AuthenticationService)
duo_host = configurationAttributes.get("duo_host").getValue2()
if (step == 1):
print "Duo. Prepare for step 1"
return True
elif (step == 2):
print "Duo. Prepare for step 2"
user = authenticationService.getAuthenticatedUser()
if (user == None):
print "Duo. Prepare for step 2. Failed to determine user name"
return False
user_name = user.getUserId()
duo_sig_request = duo_web.sign_request(self.ikey, self.skey, self.akey, user_name)
print "Duo. Prepare for step 2. duo_sig_request: " + duo_sig_request
identity.setWorkingParameter("duo_host", duo_host)
identity.setWorkingParameter("duo_sig_request", duo_sig_request)
return True
| else:
| return False
def getExtraParametersForStep(self, configurationAttributes, step):
if step == 2:
return Arrays.asList("duo_count_login_steps", "cas2_user_uid")
return None
def getCountAuthenticationSteps(self, configurationAttributes):
identity = CdiUtil.bean(Identity)
if (identity.isSetWorkingParameter("duo_count_login_steps")):
return int(identity.getWorkingParameter("duo_count_login_steps"))
return 2
def getPageForStep(self, configurationAttributes, step):
if (step == 2):
return "/auth/duo/duologin.xhtml"
return ""
def getNextStep(self, configurationAttributes, requestParameters, step):
return -1
def getLogoutExternalUrl(self, configurationAttributes, requestParameters):
print "Get external logout URL call"
return None
def logout(self, configurationAttributes, requestParameters):
|
labordoc/labordoc-next | modules/docextract/lib/docextract_webinterface.py | Python | gpl-2.0 | 6,745 | 0.003262 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""DocExtract REST and Web API
Exposes document extration facilities to the world
"""
from tempfile import NamedTemporaryFile
from invenio.webinterface_handler import WebInterfaceDirectory
from invenio.webuser import collect_user_info
from invenio.webpage import page
from invenio.config import CFG_TMPSHAREDDIR, CFG_ETCDIR
from invenio.refextract_api import extract_references_from_file_xml, \
extract_references_from_url_xml, \
extract_references_from_string_xml
from invenio.bibformat_engine import format_record
def check_login(req):
"""Check that the user is logged in"""
user_info = collect_user_info(req)
if user_info['email'] == 'guest':
# 1. User is guest: must login prior to upload
# return 'Please login before uploading file.'
pass
def check_url(url):
"""Check that the url we received is not gibberish"""
return url.startswith('http://') or \
url.startswith('https://') or \
url.startswith('ftp://')
def extract_from_pdf_string(pdf):
"""Extract references from a pdf stored in a string
Given a string representing a pdf, this function writes the string to
disk and passes it to refextract.
We need to create a temoporary file because we need to run pdf2text on it"""
# Save new record to file
tf = NamedTemporaryFile(prefix='docextract-pdf',
| dir=CFG_TMPSHAREDDIR)
try:
tf.write(pdf)
tf.flush()
refs = extract_references_from_file_xml(tf.name)
finally:
# Also deletes the file
tf.close()
return refs
def make_arxiv_url(arxiv_id):
"""Make a url we can use to download a pdf from arxiv
Arguments:
arxiv_id -- the arxiv id | of the record to link to
"""
return "http://arxiv.org/pdf/%s.pdf" % arxiv_id
class WebInterfaceAPIDocExtract(WebInterfaceDirectory):
"""DocExtract REST API"""
_exports = [
('extract-references-pdf', 'extract_references_pdf'),
('extract-references-pdf-url', 'extract_references_pdf_url'),
('extract-references-txt', 'extract_references_txt'),
]
def extract_references_pdf(self, req, form):
"""Extract references from uploaded pdf"""
check_login(req)
if 'pdf' not in form:
return 'No PDF file uploaded'
return extract_from_pdf_string(form['pdf'].stream.read())
def extract_references_pdf_url(self, req, form):
"""Extract references from the pdf pointed by the passed url"""
check_login(req)
if 'url' not in form:
return 'No URL specified'
url = form['url']
if not check_url(url):
return 'Invalid URL specified'
return extract_references_from_url_xml(url)
def extract_references_txt(self, req, form):
"""Extract references from plain text"""
check_login(req)
if 'txt' not in form:
return 'No text specified'
txt = form['txt'].stream.read()
return extract_references_from_string_xml(txt)
class WebInterfaceDocExtract(WebInterfaceDirectory):
"""DocExtract API"""
_exports = ['api',
('extract-references', 'extract_references'),
('example.pdf', 'example_pdf'),
]
api = WebInterfaceAPIDocExtract()
def example_pdf(self, req, _form):
"""Serve a test pdf for tests"""
f = open("%s/docextract/example.pdf" % CFG_ETCDIR, 'rb')
try:
req.write(f.read())
finally:
f.close()
def extract_references_template(self):
"""Template for reference extraction page"""
return """Please specify a pdf or a url or some references to parse
<form action="extract-references" method="post"
enctype="multipart/form-data">
<p>PDF: <input type="file" name="pdf" /></p>
<p>arXiv: <input type="text" name="arxiv" /></p>
<p>URL: <input type="text" name="url" style="width: 600px;"/></p>
<textarea name="txt" style="width: 500px; height: 500px;"></textarea>
<p><input type="submit" /></p>
</form>
"""
def extract_references(self, req, form):
"""Refrences extraction page
This page can be used for authors to test their pdfs against our
refrences extraction process"""
user_info = collect_user_info(req)
# Handle the 3 POST parameters
if 'pdf' in form and form['pdf']:
pdf = form['pdf']
references_xml = extract_from_pdf_string(pdf)
elif 'arxiv' in form and form['arxiv']:
url = make_arxiv_url(arxiv_id=form['arxiv'])
references_xml = extract_references_from_url_xml(url)
elif 'url' in form and form['url']:
url = form['url']
references_xml = extract_references_from_url_xml(url)
elif 'txt' in form and form['txt']:
txt = form['txt']
references_xml = extract_references_from_string_xml(txt)
else:
references_xml = None
# If we have not uploaded anything yet
# Display the form that allows us to do so
if not references_xml:
out = self.extract_references_template()
else:
out = """
<style type="text/css">
#referenceinp_link { display: none; }
</style>
"""
out += format_record(0,
'hdref',
xml_record=references_xml.encode('utf-8'),
user_info=user_info)
# Render the page (including header, footer)
return page(title='References Extractor',
body=out,
uid=user_info['uid'],
req=req)
|
jileiwang/CJ-Glo | tools/character_count.py | Python | apache-2.0 | 1,312 | 0.005335 | from collections import defaultdict
import codecs
def count(corpus, output_file):
debug = False
dic = defaultdict(int)
other = set()
fout = codecs.open(output_file, 'w', 'utf8')
for line in open(corpus, 'r'):
words = line.split()
for word in words:
if len(word) % 3 == 0:
for i in xrange(len(word) / 3):
dic[word[i:i+3]] += 1
else:
other.add(word)
fout.write('%i %i\n' % (len(dic), len(other)))
record_list = [(y, x) for x, y in dic.items()]
record_list.sort()
record_list.reverse()
i = 0
for x, y in record_list:
#print y.decode('utf8'), x
try:
yy = y.decode('GBK')
except:
print y
yy = 'N/A'
fout.write('%s %i\n' % (yy, x))
i += 1
if i > 10 and debug:
break
other_list = list(other)
other_list.sort()
for item in other_list:
#print item.decode('utf8')
item2 = i | tem.decode('utf8')
fout.write(item2)
fout.write('\n')
i += 1
if i > 20 and debug:
| break
fout.close()
if __name__ =='__main__':
count('data/train.zh_parsed', 'output/count.zh')
count('data/train.ja_parsed', 'output/count.ja')
|
alexissmirnov/donomo | donomo_archive/deps/paypal.jonboxall/standard/ipn/models.py | Python | bsd-3-clause | 1,485 | 0.003367 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2
from paypal.standard.models import PayPalStandardBase
from paypal.standard.ipn.signals import *
class PayPalIPN(PayPalStandardBase):
"""Logs PayPal IPN interactions."""
FORMAT = u"<IPN: %s %s>"
class Meta:
db_table = "paypal_ipn"
verbose_name = "PayPal IPN"
def _postback(self):
"""Perform PayPal Postback validation."""
self.response = urllib2.urlopen(self.get_endpoint(), "cmd=_notify-validate&%s" % self.query).read()
def _verify_postback(self):
if self.response != "VERIFIED":
sel | f.set_flag("Invalid postback. (%s)" % self.response)
def send_signals(self):
"""Shout | for the world to hear whether a txn was successful."""
# Transaction signals:
if self.is_transaction():
if self.flag:
payment_was_flagged.send(sender=self)
else:
payment_was_successful.send(sender=self)
# Subscription signals:
else:
if self.is_subscription_cancellation():
subscription_cancel.send(sender=self)
elif self.is_subscription_signup():
subscription_signup.send(sender=self)
elif self.is_subscription_end_of_term():
subscription_eot.send(sender=self)
elif self.is_subscription_modified():
subscription_modify.send(sender=self) |
monetizeio/django-pgmp | django_pgmp/__init__.py | Python | lgpl-3.0 | 1,587 | 0.006305 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# | === django_pgmp ---------------------------------------------------------===
# This file is part of django-pgpm. django-pgpm is copyright © 2012, RokuSigma
# Inc. and contributors. See AUTHORS and LICENSE for more details.
#
# dj | ango-pgpm is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# django-pgpm is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with django-pgpm. If not, see <http://www.gnu.org/licenses/>.
# ===----------------------------------------------------------------------===
VERSION = (0,0,4, 'alpha', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%spre-alpha' % version
else:
if VERSION[3] != 'final':
version = "%s%s" % (version, VERSION[3])
if VERSION[4] != 0:
version = '%s%s' % (version, VERSION[4])
return version
# ===----------------------------------------------------------------------===
# End of File
# ===----------------------------------------------------------------------===
|
Forage/Gramps | gramps/guiQML/views/__init__.py | Python | gpl-2.0 | 940 | 0.002128 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2010 Benny Malengier
#
# This program is free software; you can redistribute it a | nd/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for | more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
"""
Package init for different views in guiQML.
"""
# DO NOT IMPORT METHODS/CLASSES FROM src/guiQML HERE ! Only __all__
__all__ = [ ]
|
vlegoff/tsunami | src/secondaires/botanique/commandes/__init__.py | Python | bsd-3-clause | 1,675 | 0.001791 | # -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY O | UT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISE | D OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant les commandes du module botanique."""
from . import recolter
from . import vegetal
|
b1naryth1ef/rowboat | rowboat/types/guild.py | Python | mit | 1,573 | 0.001907 | import os
from holster.enum import Enum
from rowboat.types import Model, SlottedModel, Field, DictField, text, raw, rule_matcher
CooldownMode = Enum(
'GUILD',
'CHANNEL',
'USER',
)
class PluginConfigObj(object):
client = None
class PluginsConfig(Model):
def __init__(self, inst, obj):
self.client = None
self.load_into(inst, obj)
@classmethod
def parse(cls, obj, *args, **kwargs):
inst = PluginConfigObj()
cls(inst, obj)
return inst
@classmethod
def force_load_plugin_configs(cls):
" | ""
This function can be called to ensure that this class will have all its
attributes properly loaded, as they are dynamically set when plugin configs
| are defined.
"""
plugins = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'plugins')
for name in os.listdir(plugins):
__import__('rowboat.plugins.{}'.format(
name.rsplit('.', 1)[0]
))
class CommandOverrideConfig(SlottedModel):
disabled = Field(bool, default=False)
level = Field(int)
class CommandsConfig(SlottedModel):
prefix = Field(str, default='')
mention = Field(bool, default=False)
overrides = Field(raw)
def get_command_override(self, command):
return rule_matcher(command, self.overrides or [])
class GuildConfig(SlottedModel):
nickname = Field(text)
commands = Field(CommandsConfig, default=None, create=False)
levels = DictField(int, int)
plugins = Field(PluginsConfig.parse)
|
rfleschenberg/django-safedelete | safedelete/tests.py | Python | bsd-3-clause | 11,086 | 0.001714 | import django
from django.conf.urls import patterns, include
from django.core.exceptions import ValidationError
from django.contrib import admin
from django.contrib.admin.views.main import ChangeList
from django.contrib.admin.sites import AdminSite
from django.contrib.auth.models import User
from django.db import models
from django.test import TestCase, RequestFactory
from .admin import SafeDeleteAdmin, highlight_deleted
from .models import (safedelete_mixin_factory, SoftDeleteMixin,
HARD_DELETE, HARD_DELETE_NOCASCADE, SOFT_DELETE,
NO_DELETE, DELETED_VISIBLE_BY_PK)
# MODELS (FOR TESTING)
class Author(safedelete_mixin_factory(HARD_DELETE_NOCASCADE)):
name = models.CharField(max_length=200)
class Category(safedelete_mixin_factory(SOFT_DELETE, visibility=DELETED_VISIBLE_BY_PK)):
name = models.CharField(max_length=200, unique=True)
class Article(safedelete_mixin_factory(HARD_DELETE)):
name = models.CharField(max_length=200)
author = models.ForeignKey(Author)
category = models.ForeignKey(Category, null=True, default=None)
def __unic | ode__(self):
return 'Article ({0}): {1}'.format(self.pk, self.name)
class Order(SoftDeleteMixin):
name = models.CharField(max_length=100)
articles = models.ManyToManyField(Article)
class VeryImportant(safedelete_mixin_factory(NO_DELETE)):
name = models.CharField(max_length=200)
# ADMINMODEL (FOR TESTING)
class CategoryAdmin(SafeDeleteAdmin):
list_display = (highlight_ | deleted,) + SafeDeleteAdmin.list_display
admin.site.register(Category, CategoryAdmin)
# URLS (FOR TESTING)
urlpatterns = patterns(
'',
(r'^admin/', include(admin.site.urls)),
)
# TESTS
class SimpleTest(TestCase):
def setUp(self):
self.authors = (
Author.objects.create(name='author 0'),
Author.objects.create(name='author 1'),
Author.objects.create(name='author 2'),
)
self.categories = (
Category.objects.create(name='category 0'),
Category.objects.create(name='category 1'),
Category.objects.create(name='category 2'),
)
self.articles = (
Article.objects.create(name='article 0', author=self.authors[1]),
Article.objects.create(name='article 1', author=self.authors[1], category=self.categories[1]),
Article.objects.create(name='article 2', author=self.authors[2], category=self.categories[2]),
)
self.order = Order.objects.create(name='order')
self.order.articles.add(self.articles[0], self.articles[1])
def test_softdelete(self):
self.assertEqual(Order.objects.count(), 1)
self.order.delete()
self.assertEqual(Order.objects.count(), 0)
self.assertEqual(Order.objects.all_with_deleted().count(), 1)
self.order.save()
self.assertEqual(Order.objects.count(), 1)
def test_hard_delete(self):
self.assertEqual(Article.objects.count(), 3)
self.articles[0].delete()
self.assertEqual(Article.objects.count(), 2)
self.assertEqual(Article.objects.all_with_deleted().count(), 2)
self.articles[1].delete(force_policy=SOFT_DELETE)
self.assertEqual(Article.objects.count(), 1)
self.assertEqual(Article.objects.all_with_deleted().count(), 2)
self.assertEqual(Article.objects.filter(author=self.authors[2]).count(), 1)
def test_hard_delete_nocascade(self):
self.assertEqual(Author.objects.count(), 3)
self.authors[0].delete()
self.assertEqual(Author.objects.count(), 2)
self.assertEqual(Author.objects.all_with_deleted().count(), 2)
self.authors[1].delete()
self.assertEqual(Author.objects.count(), 1)
self.assertEqual(Author.objects.all_with_deleted().count(), 2)
self.assertEqual(Article.objects.count(), 3)
def test_no_delete(self):
obj = VeryImportant.objects.create(name="I don't wanna die :'(.")
obj.delete()
self.assertEqual(obj.deleted, False)
obj = VeryImportant.objects.get(pk=obj.pk)
self.assertEqual(obj.deleted, False)
def test_no_delete_manager(self):
obj = VeryImportant.objects.create(name="I don't wanna die :'(.")
VeryImportant.objects.all().delete()
obj = VeryImportant.objects.get(pk=obj.pk)
self.assertEqual(obj.deleted, False)
def test_save(self):
"""
When we save an object, it will be re-inserted if it was deleted,
the same way as save() will re-insert a deleted object.
"""
self.assertEqual(Order.objects.count(), 1)
self.order.delete()
self.assertEqual(Order.objects.count(), 0)
self.order.save()
self.assertEqual(Order.objects.count(), 1)
def test_undelete(self):
self.assertEqual(Order.objects.count(), 1)
self.order.delete()
self.assertEqual(Order.objects.count(), 0)
self.order.undelete()
self.assertEqual(Order.objects.count(), 1)
def test_access_by_pk(self):
"""
Ensure that we can access to a deleted category when we access it by pk.
We can do that because we have set visibility=DELETED_VISIBLE_BY_PK
"""
pk = self.categories[1].id
self.categories[1].delete()
self.assertRaises(Category.DoesNotExist, Category.objects.get, name=self.categories[1].name)
self.assertEqual(self.categories[1], Category.objects.get(pk=pk))
cat = Category.objects.filter(pk=pk)
self.assertEqual(len(cat), 1)
self.assertEqual(self.categories[1], cat[0])
def test_no_access_by_pk(self):
"""
Ensure that if we try to access a deleted object by pk (with the default visibility),
we can't access it.
"""
self.order.delete()
self.assertRaises(Order.DoesNotExist, Order.objects.get, pk=self.order.id)
def test_queryset(self):
self.assertEqual(Category.objects.count(), 3)
Category.objects.all().delete()
self.assertEqual(Category.objects.count(), 0)
Category.objects.all().undelete() # Nonsense
self.assertEqual(Category.objects.count(), 0)
Category.objects.deleted_only().undelete()
self.assertEqual(Category.objects.count(), 3)
def test_related_manager(self):
order = Order.objects.create(name='order 2')
Order.objects.create(name='order 3')
order.articles.add(self.articles[0])
self.assertEqual(self.articles[0].order_set.all().count(), 2)
order.delete()
self.assertEqual(self.articles[0].order_set.all().count(), 1)
# Ensure all_with_deleted() filter correctly on the article.
self.assertEqual(
self.articles[0].order_set.all_with_deleted().count(), 2
)
def test_prefetch_related(self):
""" prefetch_related() queryset should not be filtered by core_filter """
authors = Author.objects.all().prefetch_related('article_set')
for author in authors:
self.assertQuerysetEqual(
author.article_set.all().order_by('pk'),
[repr(a) for a in Author.objects.get(pk=author.pk).article_set.all().order_by('pk')]
)
def test_validate_unique(self):
""" Check that uniqueness is also checked against deleted objects """
Category.objects.create(name='test').delete()
with self.assertRaises(ValidationError):
Category(name='test').validate_unique()
class AdminTestCase(TestCase):
urls = 'safedelete.tests'
def setUp(self):
self.author = Author.objects.create(name='author 0')
self.categories = (
Category.objects.create(name='category 0'),
Category.objects.create(name='category 1'),
Category.objects.create(name='category 2'),
)
self.articles = (
Article(name='article 0', author=self.author),
Article(name='article 1', author=self.author, category=self.categories[1]),
Article(name='article 2', author=self.author, categ |
Ichimonji10/elts | apps/main/views.py | Python | gpl-3.0 | 472 | 0.004237 | """Business logic for all URLs in the ``main`` application.
For details on what each function is responsible for, see ``main/urls.py``.
That module documents both URL-to-func | tion mappings and the exact
responsiblities of each function.
"""
from django.core import urlresolvers
from django import http
def index(request): # pylint: disable=W0613
"""Redirect user to ELTS application."""
| return http.HttpResponseRedirect(urlresolvers.reverse('elts.views.index'))
|
ldoktor/autotest | client/job.py | Python | gpl-2.0 | 50,100 | 0.002335 | """The main job wrapper
This is the core infrastructure.
Copyright Andy Whitcroft, Martin J. Bligh 2006
"""
import copy, os, re, shutil, sys, time, traceback, types, glob
import logging, getpass, weakref
from autotest.client import client_logging_config
from autotest.client import utils, parallel, kernel, xen
from autotest.client import profilers, boottool, harness
from autotest.client import config, sysinfo, test, local_host
from autotest.client import partition as partition_lib
from autotest.client.shared import base_job
from autotest.client.shared import error, barrier, logging_manager
from autotest.client.shared import base_packages, packages
from autotest.client.shared import global_config
from autotest.client.tools import html_report
GLOBAL_CONFIG = global_config.global_config
LAST_BOOT_TAG = object()
JOB_PREAMBLE = """
from autotest.client.shared.error import *
from autotest.client.utils import *
"""
class StepError(error.AutotestError):
pass
class NotAvailableError(error.AutotestError):
pass
def _run_test_complete_on_exit(f):
"""Decorator for job methods that automatically calls
self.harness.run_test_complete when the method exits, if appropriate."""
def wrapped(self, *args, **dargs):
try:
return f(self, *args, **dargs)
finally:
if self._logger.global_filename == 'status':
self.harness.run_test_complete()
if self.drop_caches:
utils.drop_caches()
wrapped.__name__ = f.__name__
wrapped.__doc__ = f.__doc__
wrapped.__dict__.update(f.__dict__)
return wrapped
class status_indenter(base_job.status_indenter):
"""Provide a status indenter that is backed by job._record_prefix."""
def __init__(self, job):
self.job = weakref.proxy(job) # avoid a circular reference
@property
def indent(self):
return self.job._record_indent
def increment(self):
self.job._record_indent += 1
def decrement(self):
self.job._record_indent -= 1
class base_client_job(base_job.base_job):
"""The client-side concrete implementation of base_job.
Optional properties provided by this implementation:
control
bootloader
harness
"""
_WARNING_DISABLE_DELAY = 5
# _record_indent is a persistent property, but only on the client
_job_state = base_job.base_job._job_state
_record_indent = _job_state.property_factory(
'_state', '_record_indent', 0, namespace='client')
_max_disk_usage_rate = _job_state.property_factory(
'_state', '_max_disk_usage_rate', 0.0, namespace='client')
def __init__(self, control, options, drop_caches=True,
extra_copy_cmdline=None):
"""
Prepare a client side job object.
@param control: The control file (pathname of).
@param options: an object which includes:
jobtag: The job tag string (eg "default").
cont: If this is the continuation of this job.
harness_type: An alternative server harness. [None]
use_external_logging: If true, the enable_external_logging
method will be called during construction. [False]
@param drop_caches: If true, utils.drop_caches() is called before and
between all tests. [True]
@param extra_copy_cmdline: list of additional /proc/cmdline arguments to
copy from the running kernel to all the installed kernels with
this job
"""
super(base_client_job, self).__init__(options=options)
self._pre_record_init(control, options)
try:
self._post_record_init(control, options, drop_caches,
extra_copy_cmdline)
except Exception, err:
self.record(
'ABORT', None, None,'client.job.__init__ failed: %s' %
str(err))
raise
@classmethod
def _get_environ_autodir(cls):
return os.environ['AUTODIR']
@classmethod
def _find_base_directories(cls):
"""
Determine locations of autodir and clientdir (which are the same)
using os.environ. Serverdir does not exist in this context.
"""
autodir = clientdir = cls._get_environ_autodir()
return autodir, clientdir, None
@classmethod
def _parse_args(cls, args):
return re.findall("[^\s]*?['|\"].*?['|\"]|[^\s]+", args)
def _find_resultdir(self, options):
"""
Determine the directory for storing results. On a client this is
always <autodir>/results/<tag>, where tag is passed in on the command
line as an option.
"""
output_dir_config = GLOBAL_CONFIG.get_config_value('CLIENT',
'output_dir',
default="")
if options.output_dir:
basedir = options.output_dir
elif output_dir_config:
basedir = output_dir_config
else:
basedir = self.autodir
return os.path.join(basedir, 'results', options.tag)
def _get_status_logger(self):
"""Return a reference to the status logger."""
return self._logger
def _pre_record_init(self, control, options):
"""
Initialization function that should peform ONLY the required
setup so that the self.record() method works.
As of now self.record() needs self.resultdir, self._group_level,
self.harness and of course self._logger.
"""
if not options.cont:
self._cleanup_debugdir_files()
self._cleanup_results_dir()
logging_manager.configure_logging(
client_logging_config.ClientLoggingConfig(),
results_dir=self.resultdir,
verbose=options.verbose)
logging.info('Writing results to %s', self.resultdir)
# init_group_level needs the state
self.control = os.path.realpath(control)
self._is_continuation = options.cont
self._current_step_ancestry = []
self._next_step_index = 0
self._load_state()
_harness = self.handle_persistent_option(options, 'harness')
_harness_args = self.handle_persistent_option(options, 'harness_args')
self.harness = harness.select(_harness, self, _harness_args)
# set up the status logger
def client_job_record_hook(entry):
msg_tag = ''
if '.' in self._logger.global_filename:
msg_tag = self._logger.global_filename.split('.', 1)[1]
# send the entry to the job harness
message = '\n'.join([entry.message] + entry.extra_message_lines)
rendered_entry = self._logger.render_entry(entry)
self.harness.test_status_detail(entry.status_code, entry.subdir,
entry.operation, message, msg_tag,
entry.fields)
self.harness.test_status(rendered_entry, msg_tag)
# send the entry to stdout, if it's enabled
logging.info(rendered_entry)
self._logger = base_job.status_logger(
self, status_indenter(self), record_hook=client_job_record_hook,
tap_writer=self._tap)
def _post_record_init(self, control, options, drop_caches,
extra_copy_cmdline):
"""
Perform job initialization not required by self.record().
"""
self._init_drop_caches(drop_caches)
self._init_packages()
self.sysinfo = sysinfo.sysinfo(self.resultdir)
self._load_sysinfo_state()
if not options.cont:
download = os.path.join(self.testdir, | 'download')
if not os.path.exists(download):
os.mkdir(download)
shutil.copyfi | le(self.control,
os.path.join(self.resultdir, 'control'))
self.control = control
self.logging = logging_manager.get_logging_mana |
VladKha/CodeWars | 7 kyu/Exes and Ohs/solve.py | Python | gpl-3.0 | 69 | 0 | def xo(s):
s = s.lower()
| return s. | count('x') == s.count('o')
|
justasabc/python_tutorials | tutorial/gui/pyqt4/widget/slider.py | Python | gpl-3.0 | 1,427 | 0.010512 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
ZetCode PyQt4 tutorial
This example shows a QtGui.QSlider widget.
author: Jan Bodnar
website: zetcode.com
last edited: September 2011
"""
import sys
from PyQt4 import QtGui, QtCore
class Example(QtGui.QWidget):
def __init__(self):
super(Example, self).__init_ | _()
self.initUI()
def initUI(self):
sld = QtGui.QSlider(QtCore.Qt.Horizontal, self)
sld.setFocusPolicy(QtCore.Qt.NoFocus)
sld.setGeometry(30, 40, 100, 30)
sld.valueChanged[int].connect(self.changeValue)
self.label = QtGui.QLabel(self)
self.label.setPixmap(QtGui.QPixmap('mute.png'))
| self.label.setGeometry(160, 40, 80, 30)
self.setGeometry(300, 300, 280, 170)
self.setWindowTitle('QtGui.QSlider')
self.show()
def changeValue(self, value):
if value == 0:
self.label.setPixmap(QtGui.QPixmap('mute.png'))
elif value > 0 and value <= 30:
self.label.setPixmap(QtGui.QPixmap('min.png'))
elif value > 30 and value < 80:
self.label.setPixmap(QtGui.QPixmap('med.png'))
else:
self.label.setPixmap(QtGui.QPixmap('max.png'))
def main():
app = QtGui.QApplication(sys.argv)
ex = Example()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
lucabaldini/ximpol | ximpol/test/test_sensitivity_calculator.py | Python | gpl-3.0 | 5,090 | 0.001179 | #!/urs/bin/env python
#
# Copyright (C) 2015--2016, the ximpol team.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU GengReral Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import sys
import os
import unittest
import numpy
from ximpol.srcmodel.spectrum import xCountSpectrum, power_law
from ximpol.srcmodel.spectrum import int_eflux2pl_norm
from ximpol.utils.units_ import k | eV2erg
from ximpol.core.sp | line import xInterpolatedUnivariateSplineLinear
from ximpol.irf import load_arf, load_mrf
from ximpol.srcmodel.gabs import xpeInterstellarAbsorptionModel
"""The calculator ouputs have been obtained running by hand the code on the web
http://www.isdc.unige.ch/xipe/index.php/sensitivity-calculator
and are stored in the form of a list of tuples containing in order:
1 - column_density [1e22 cm^{-2}]
2 - power-law spectral index
3 - exposure time [ks]
4 - integral energy flux between 2 and 8 keV [1e-8 erg/cm^{2}/s]
5 - MDP in the 2--4, 4--6 and 6--8 keV energy bands.
Note that the numbers in the tuple are exactly what you would write in the
web form.
"""
SENSITIVITY_CALCULATOR_OUTPUT = [
(0.1, 1., 10., 0.1, [0.04022, 0.06668, 0.14058]),
(0.1, 2., 10., 0.1, [0.03293, 0.06927, 0.17443]),
(1. , 1., 10., 0.1, [0.04191, 0.06579, 0.13706]),
(1. , 2., 10., 0.1, [0.03400, 0.06729, 0.16716]),
(10., 1., 10., 0.1, [0.06228, 0.06348, 0.11810]),
(10., 2., 10., 0.1, [0.04880, 0.06013, 0.13230])
]
class TestSensitivityCalculator(unittest.TestCase):
"""Unit test for Fabio's sensitivity calculator at
http://www.isdc.unige.ch/xipe/index.php/sensitivity-calculator
"""
@classmethod
def setUpClass(cls):
"""Setup.
"""
cls.irf_name = 'xipe_baseline'
cls.aeff = load_arf(cls.irf_name)
cls.modf = load_mrf(cls.irf_name)
cls.emin = 2.
cls.emax = 8.
cls.ebinning = numpy.linspace(cls.emin, cls.emax, 4)
cls.ism_model = xpeInterstellarAbsorptionModel()
def mdp_table(self, column_density, index, exposure_time, eflux):
"""Return the MDP table for a point source with a power-law
spectral shape with a given set of parameters and for a given
observation time.
There's a slight complication, here, due to the fact that the
sensitivity calculator is rescaling the absorbed fluxes so that the
input energy flux (in the web form) is that at the observer instead of
that at the source. Therefore we need to do the same here.
"""
tsamples = numpy.linspace(0., exposure_time, 2)
norm = int_eflux2pl_norm(eflux, self.emin, self.emax, index, erg=True)
energy_spectrum = power_law(norm, index)
ism_trans = self.ism_model.transmission_factor(column_density)
_x = numpy.linspace(self.emin, self.emax, 1000)
_y = _x*energy_spectrum(_x, 0.)*ism_trans(_x)
absorbed_energy_spectrum = xInterpolatedUnivariateSplineLinear(_x, _y)
absorbed_eflux = keV2erg(absorbed_energy_spectrum.norm())
scale = eflux/absorbed_eflux
count_spectrum = xCountSpectrum(energy_spectrum, self.aeff, tsamples,
column_density, scale_factor=scale)
mdp_table = count_spectrum.build_mdp_table(self.ebinning, self.modf)
return mdp_table
def test_mdp(self):
"""Compare the MDP calculated by ximpol with that returned by the
sensitivity calculator.
"""
for column_density, index, exposure_time, eflux, target_mdps in\
SENSITIVITY_CALCULATOR_OUTPUT:
# Convert the source parameters to physical units.
column_density *= 1.e22
exposure_time *= 1000.
eflux *= 1.e-8
# Calculate the MDP table using the ximpol facilities.
table = self.mdp_table(column_density, index, exposure_time, eflux)
ximpol_mdps = table.mdp_values()[:-1]
target_mdps = numpy.array(target_mdps)
ximpol_mdps = numpy.array(ximpol_mdps)
delta = abs(target_mdps - ximpol_mdps)/target_mdps
max_delta = delta.max()
err_msg = 'max. diff. %.4f (nH = %.3e, index = %.2f)' %\
(max_delta, column_density, index)
err_msg += '\nximpol: %s\nsensitivity calculator: %s' %\
(ximpol_mdps, target_mdps)
self.assertTrue(max_delta < 0.03, err_msg)
if __name__ == '__main__':
unittest.main()
|
Toollabs/video2commons | video2commons/frontend/app.py | Python | gpl-3.0 | 7,166 | 0.000558 | #!/usr/bin/env python3
# -*- coding: UTF-8 -*-
#
# Copyright (C) 2015-2016 Zhuyifei1999
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>
#
"""video2commons web frontend."""
import json
import traceback
from urllib.parse import quote as urlquote
from urllib.parse import urlparse, urljoin
from flask import (
Flask, request, Response, session, render_template, redirect, url_for
)
from mwoauth import AccessToken, ConsumerToken, RequestToken, Handshaker
from requests_oauthlib import OAuth1
import requests
from video2commons.config import (
consumer_key, consumer_secret, api_url, webfrontend_uri, socketio_uri
)
from video2commons.frontend.redisession import RedisSessionInterface
from video2commons.frontend.shared import redisconnection, check_banned
from video2commons.frontend.api import api
from video2commons.frontend.i18n import (
i18nblueprint, translate as _, getlanguage, is_rtl
)
consumer_token = ConsumerToken(consumer_key, consumer_secret)
handshaker = Handshaker(api_url, consumer_token)
app = Flask(__name__)
app.session_cookie_name = 'v2c-session'
app.session_interface = RedisSessionInterface(redisconnection)
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 3600
config_p = {
'webfrontend_uri': webfrontend_uri,
'socketio_uri': socketio_uri,
}
app.jinja_env.globals['config'] = config_p
app.jinja_env.globals['_'] = _
app.jinja_env.globals['lang'] = getlanguage
app.jinja_env.tests['rtl'] = is_rtl
app.register_blueprint(api, url_prefix='/api')
app.register_blueprint(i18nblueprint, url_prefix='/i18n')
@app.errorhandler(Exception)
def all_exception_handler(e):
"""Handle an exception and show the traceback to error page."""
try:
message = 'Please file an issue in GitHub: ' + \
traceback.format_exc()
loggedin = 'username' in session
except:
message = (
'Something went terribly wrong, '
'and we failed to find the cause automatically. '
'Please file an issue in GitHub.'
)
loggedin = False
try:
return render_template(
'error.min.html',
message=message,
loggedin=loggedin
), 500
except:
return message, 500
@app.before_request
def force_https():
"""Force user to redirect to https, checking X-Forwarded-Proto."""
if request.headers.get('X-Forwarded-Proto') == 'http':
return redirect('https://' + request.headers['Host'] +
request.headers['X-Original-URI'],
code=301)
@app.route('/config')
def get_config():
"""Get the current config as a dict and output Javascript."""
data = 'window.config=' + json.dumps(config_p) + ';'
return Response(data, mimetype='application/javascript; charset=utf-8')
@app.route('/')
def main():
"""Main page."""
banned = check_banned()
if banned:
return render_template(
'error.min.html',
message='You are banned from using this tool! Reason: ' + banned,
loggedin=False
)
try:
auth = dologin()
session['language'] = querylanguage(auth)
except:
# SECURITY: If we cannot login, the session is invalid.
app.session_interface.abandon_session(app, session)
return render_template(
'main.min.html',
loggedin=False
)
return render_template(
'main.min.html',
loggedin=True
)
def dologin():
"""Attempt to login."""
if not (
'access_token_key' in session and
'access_token_secret' in session
):
raise NameError("No access keys")
access_token = AccessToken(
session['access_token_key'],
session['access_token_secret']
)
session['username'] = handshaker.identify(access_token)['username']
auth = OAuth1(
client_key=consumer_token.key,
client_secret=consumer_token.secret,
resource_owner_key=access_token.key,
resource_owner_secret=access_token.secret
)
return auth
def querylanguage(auth):
"""Query user's language that's available on v2c."""
default = 'en'
r = requests.post(
url=api_url.replace('index.php', 'api.php'),
data={
'action': 'query',
'format': 'json',
'meta': 'userinfo',
'uiprop': 'options'
},
auth=auth
)
try:
language = r.json()['query']['userinfo']['options']['language']
except (NameError, KeyError):
return default
if not language:
return default
return language
@app.route('/oauthinit')
def loginredirect():
"""Initialize OAuth login."""
app.session_interface.abandon_session(app, session)
redirecturl, request_token = handshaker.initiate()
session['request_token_key'], session['request_token_secret'] = \
request_token.key, request_token.secret
session['return_to_url'] = url_for('main')
returnto = request.args.get('returnto')
if returnto:
ref_url = urlparse(request.url_root)
test_url = urlparse(urljoin(request.host_url, returnto))
if (
test_url.scheme == ref_url.scheme and
test_url.netloc == ref_url.netloc and
test_url.path.startswith(ref_url.path)
):
session['return_to_url'] = returnto
return redirect(redirecturl)
@app.route('/oauthcallback')
def logincallback():
"""Finialize OAuth login."""
request_token = RequestToken(
session['request_token_key'],
session['request_token_secret']
)
access_token = handshaker.complete(request_token, request.query_string)
session.pop('access_token_key', None)
session.pop('access_token_secret', None)
session.pop('username', None)
identify = handshaker.identify(access_token)
if not (identify['editcount'] >= 50 and
'autoconfirmed' in identify['rights']):
return render_template(
'error.min.html',
message='Due to ongoing abuse, you must be autoconfirmed '
'with at least 50 edits on Commons to use this tool.',
loggedin=True
)
session['access_token_key'], session['access_token_secret'] = \
access_token.key, access_token.secret
session['username'] = identify['username']
return re | dire | ct(session.get('return_to_url', url_for('main')))
@app.route('/logout')
def logout():
"""Logout: clear all session data."""
session.clear()
return redirect(url_for('main'))
|
aarontropy/django-datebook | datebook/forms.py | Python | bsd-3-clause | 5,875 | 0.027234 | from django import forms
from django.utils import timezone
from django.utils.formats import get_format
from django.utils.safestring import mark_safe
from django.forms.widgets import SplitDateTimeWidget, HiddenInput
from timezones.forms import TZDateTimeField
from datetime import datetime
import dateutil.parser
from datebook import models as datebook
import logging
log = logging.getLogger('app')
DATE_FORMAT = '%A, %b %d, %Y'
TIME_FORMAT = '%I:%M %p'
class DatePickerWidget(forms.widgets.Widget):
def render(self, name, value, attrs=None):
if value is None:
vstr = ''
elif hasattr(value, 'strftime'):
vstr = value.strftime(DATE_FORMAT)
else:
# This is a string, so try to parse it in ISO format and then format it properly
try:
vdate = dateutil.parser.parse(value)
vstr = vdate.strftime(DATE_FORMAT)
except:
vstr = value
id = "id_%s" % name
args = [
"<input class=\"datepicker\" type=\"text\" value=\"%s\" name=\"%s\" id=\"%s\" readonly=\"true\" />" % \
(vstr, name, id),
"<script type=\"text/javascript\">$(\"#%s\").datepicker({dateFormat:'DD, M d, yy'}).datepicker('setDate', '%s');</script>" % (id, vstr,)
]
return mark_safe("\n".join(args))
class TimePickerWidget(forms.widgets.Widget):
def render(self, name, value, attrs=None):
if value is None:
vstr = ''
elif hasattr(value, 'strftime'):
vstr = value.strftime(TIME_FORMAT)
else:
# This is a string, so try to parse it in ISO format and then format it properly
try:
vdate = dateutil.parser.parse(value)
vstr = vdate.strftime(TIME_FORMAT)
except:
vstr = value
id = "id_%s" % name
args = [
"<input class=\"timepicker\" type=\"text\" value=\"%s\" name=\"%s\" id=\"%s\" readonly=\"true\" />" % (vstr, name, id),
"<script type=\"text/javascript\">$(\"#%s\").timepicker({showLeadingZero: false, showPeriod: true, minutes: { interval: 15} }).timepicker('setTime', '%s');</script>" % (id, vstr,)
]
return mark_safe("\n".join(args))
class JqDateTimeWidget(forms.widgets.MultiWidget):
def __init__(self, attrs=None):
widgets = [
DatePickerWidget(),
TimePickerWidget(),
# forms.widgets.TextInput({'class': 'datePicker', 'readonly':'true'}),
# forms.widgets.TextInput({'class': 'timePicker', 'readonly':'true'})
]
super(JqDateTimeWidget, self).__init__(widgets, attrs)
def decompress(self, value):
""" You have to implement this method in order to subclass a MultiWidget.
Look at django.forms.widgets.SplitDateTimeWidget for another example."""
if value:
return [value.date(), value.time().replace(microsecond=0)]
return [None, None]
def value_from_datadict(self, data, files, name):
date_value, time_value = super(JqDateTimeWidget, self).value_from_datadict(data, files, name)
if date_value and time_value:
datetime_format = "%s %s" % (DATE_FORMAT, TIME_FORMAT)
datetime_input_format = "%s %s" % (get_format('DATE_INPUT_FORMATS')[0], get_format('TIME_INPUT_FORMATS')[1])
datetime_string = "%s %s" % (date_value, time_value)
try:
return datetime.strptime(datetime_string, datetime_format)
except ValueError:
return None
return None
class Weekday | Picker(forms.widgets.MultiWidget):
"""
This widget should display a list of seven checkboxes corresponding to the seven days of the week
The values for the seven checkboxes correspond to the integer values for rrule.MO, rrule.TU, and so | on
The return value from this widget is a comma-separated list of the selected integers
"""
def __init__(self, attrs=None):
widgets = [forms.CheckboxInput(),] * 7
super(WeekdayPicker, self).__init__(widgets, attrs)
def decompress(self, value):
"""
take a comma-separated list of integers (value) and return a list of booleans
"""
b = [False] * 7
val = value or ''
for i in [int(s) for s in val.split(',') if len(val)>0 ]:
b[i] = True
return b
def value_from_datadict(self, data, files, name):
"""
Take a list of booleans and make a string representing a comma-separated integer list
The booleans are the checkbox values and the string represents the weekdays picked (rrule.MO, etc.)
"""
b = super(WeekdayPicker, self).value_from_datadict(data, files, name)
return ','.join([str(idx) for idx, day in enumerate(b) if day])
FIELD_NAME_MAPPING = {
'title': 'event_title',
}
class EventModelForm(forms.ModelForm):
start = TZDateTimeField(widget=JqDateTimeWidget) #forms.DateTimeField(widget=JqDateTimeWidget)
end = TZDateTimeField(widget=JqDateTimeWidget) #forms.DateTimeField(widget=JqDateTimeWidget)
title = forms.CharField(required=False)
location = forms.CharField(required=False)
datebook = forms.ModelChoiceField(widget=forms.HiddenInput, queryset=datebook.Datebook.objects.all())
id = forms.IntegerField(widget = forms.HiddenInput, required=False)
class Meta:
model = datebook.Event
exclude = ['series',]
# The overridden method is here to allow me to rename the title field.
# I need to do this so that the event title does not conflict with the datebook
# title on the admin page.
def add_prefix(self, field_name):
# look up field name; return original if not found
field_name = FIELD_NAME_MAPPING.get(field_name, field_name)
return super(EventModelForm, self).add_prefix(field_name)
class SeriesModelForm(forms.ModelForm):
day_list = forms.MultipleChoiceField(widget=forms.CheckboxSelectMultiple, choices=datebook.Series.day_list_choices)
series = forms.CharField(widget=forms.HiddenInput, required=False)
class Meta:
model = datebook.Series
class TestModelForm(forms.ModelForm):
start = forms.DateTimeField(widget=JqDateTimeWidget)
end = forms.DateTimeField(widget=JqDateTimeWidget)
class Meta:
model = datebook.TestModel
class TestForm(forms.Form):
start = forms.DateTimeField(widget=JqDateTimeWidget)
end = forms.DateTimeField(widget=SplitDateTimeWidget)
|
concentricsky/badgr-server | apps/mainsite/drf_fields.py | Python | agpl-3.0 | 3,112 | 0.003213 | import base64
import binascii
import mimetypes
import urllib.parse
import uuid
from django.core.exceptions import ValidationError
from django.core.files.base import ContentFile
from django.core.files.uploadedfile import UploadedFile
from django.utils.translation import ugettext as _
from rest_framework.fields import FileField, SkipField
from mainsite.validators import ValidImageValidator
class Base64FileField(FileField):
# mimetypes.guess_extension() may return different values for same mimetype, but we need one extension for one mime
_MIME_MAPPING = {
'image/jpeg': '.jpg',
'audio/wav': '.wav',
'image/svg+xml': '.svg'
}
_ERROR_MESSAGE = _('Base64 string is incorrect')
def to_internal_value(self, data):
if isinstance(data, UploadedFile):
return super(Base64FileField, self).to_internal_value(data)
try:
mime, encoded_data = data.replace('data:', '', 1).split(';base64,')
extension = self._MIME_MAPPING[mime] if mime in list(self._MIME_MAPPING.keys()) else mimetypes.guess_extension(mime)
if extension is None:
raise ValidationError('Invalid MIME type')
ret = ContentFile(base64.b64decode(encoded_data), name='{name}{extension}'.format(name=str(uuid. | uuid4()),
| extension=extension))
return ret
except (ValueError, binascii.Error):
return super(Base64FileField, self).to_internal_value(data)
class ValidImageField(Base64FileField):
default_validators = [ValidImageValidator()]
def __init__(self, skip_http=True, allow_empty_file=False, use_url=True, allow_null=True, **kwargs):
self.skip_http = skip_http
self.use_public = kwargs.pop('use_public', False)
super(ValidImageField, self).__init__(
allow_empty_file=allow_empty_file, use_url=use_url, allow_null=allow_null, **kwargs
)
def to_internal_value(self, data):
# Skip http/https urls to avoid overwriting valid data when, for example, a client GETs and subsequently PUTs an
# entity containing an image URL.
if self.skip_http and not isinstance(data, UploadedFile) and urllib.parse.urlparse(data).scheme in ('http', 'https'):
raise SkipField()
self.source_attrs = ['image'] # Kind of a dirty hack, because this is failing to stick if set on init.
return super(ValidImageField, self).to_internal_value(data)
def to_representation(self, value):
if self.use_public:
try:
if getattr(value, 'instance', None):
return value.instance.image_url(public=True) # sometimes value is a FileField despite source="*"
return value.image_url(public=True)
except AttributeError:
pass
try:
return super(ValidImageField, self).to_representation(value.image)
except AttributeError:
return super(ValidImageField, self).to_representation(value)
|
skosukhin/spack | lib/spack/spack/cmd/flake8.py | Python | lgpl-2.1 | 10,620 | 0.000094 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from __future__ import print_function
import re
import os
import sys
import shutil
import tempfile
import argparse
from llnl.util.filesystem import working_dir, mkdirp
import spack
from spack.util.executable import which
description = "runs source code style checks on Spack. requires flake8"
section = "developer"
level = "long"
def is_package(f):
"""Whether flake8 should consider a file as a core file or a package.
We run flake8 with different exceptions for the core and for
packages, since we allow `from spack import *` and poking globals
into packages.
"""
return f.startswith('var/spack/repos/') or 'docs/tutorial/examples' in f
#: List of directories to exclude from checks.
exclude_directories = [spack.external_path]
#: This is a dict that maps:
#: filename pattern ->
#: flake8 exemption code ->
#: list of patterns, for which matching lines should have codes applied.
#:
#: For each file, if the filename pattern matches, we'll add per-line
#: exemptions if any patterns in the sub-dict match.
pattern_exemptions = {
# exemptions applied only to package.py files.
r'package.py$': {
# Allow 'from spack import *' in packages, but no other wildcards
'F403': [
r'^from spack import \*$'
],
# Exempt lines with urls and descriptions from overlong line errors.
'E501': [
r'^\s*homepage\s*=',
r'^\s*url\s*=',
r'^\s*git\s*=',
r'^\s*svn\s*=',
r'^\s*hg\s*=',
r'^\s*list_url\s*=',
r'^\s*version\(',
r'^\s*variant\(',
r'^\s*provides\(',
r'^\s*extends\(',
r'^\s*depends_on\(',
r'^\s*conflicts\(',
r'^\s*resource\(',
r'^\s*patch\(',
],
# Exempt '@when' decorated functions from redefinition errors.
'F811': [
r'^\s*@when\(.*\)',
],
},
# exemptions applied to all files.
r'.py$': {
'E501': [
r'(https?|ftp|file)\:', # URLs
r'([\'"])[0-9a-fA-F]{32,}\1', # long hex checksums
]
},
}
# compile all regular expressions.
pattern_exemptions = dict(
(re.compile(file_pattern),
dict((code, [re.compile(p) for p in patterns])
for code, patterns in error_dict.items()))
for file_pattern, error_dict in pattern_exemptions.items())
def changed_files(args):
"""Get list of changed files in the Spack repository."""
git = which('git', required=True)
range = "{0}...".format(args.base)
git_args = [
# Add changed files committed since branching off of develop
['diff', '--name-only', '--diff-filter=ACMR', range],
# Add changed files that have been staged but not yet committed
['diff', '--name-only', '--diff-filter=ACMR', '--cached'],
# Add changed files that are unstaged
['diff', '--name-only', '--diff-filter=ACMR'],
]
# Add new files that are untracked
if args.untracked:
git_args.append(['ls-files', '--exclude-standard', '--other'])
# add everything if the user asked for it
if args.all:
git_args.append(['ls-files', '--exclude-standard'])
excludes = [os.path.realpath(f) for f in exclude_directories]
changed = set()
for arg_list in git_args:
files = git(*arg_list, output=str).split('\n')
for f in files:
# Ignore non-Python files
if not f.endswith('.py'):
continue
# Ignore files in the exclude locations
if any(os.path.realpath(f).startswith(e) for e in excludes):
continue
changed.add(f)
return sorted(changed)
def add_pattern_exemptions(line, codes):
"""Add a flake8 exemption to a line."""
if line.startswith('#'):
return line
line = line.rstrip('\n')
# Line is already ignored
if line.endswith('# noqa'):
return line + '\n'
orig_len = len(line)
exemptions = ','.join(sorted(set(codes)))
# append exemption to line
if '# noqa: ' in line:
line += ',{0}'.format(exemptions)
elif line: # ignore noqa on empty lines
line += ' # noqa: {0}'.format(exemptions)
# if THIS made the line too long, add an exemption for that
if len(line) > 79 and orig_len <= 79:
line += ',E501'
return line + '\n'
def filter_file(source, dest, output=False):
"""Filter a single file through all the patterns in pattern_exemptions."""
with open(source) as infile:
parent = os.path.dirname(dest)
mkdirp(parent)
with open(dest, 'w') as outfile:
for line in infile:
line_errors = []
# pattern exemptions
for file_pattern, errors in pattern_exemptions.items():
if not f | ile_pattern.search(source):
continue
for code, patterns in errors.items():
for pattern in patterns | :
if pattern.search(line):
line_errors.append(code)
break
if line_errors:
line = add_pattern_exemptions(line, line_errors)
outfile.write(line)
if output:
sys.stdout.write(line)
def setup_parser(subparser):
subparser.add_argument(
'-b', '--base', action='store', default='develop',
help="select base branch for collecting list of modified files")
subparser.add_argument(
'-k', '--keep-temp', action='store_true',
help="do not delete temporary directory where flake8 runs. "
"use for debugging, to see filtered files")
subparser.add_argument(
'-a', '--all', action='store_true',
help="check all files, not just changed files")
subparser.add_argument(
'-o', '--output', action='store_true',
help="send filtered files to stdout as well as temp files")
subparser.add_argument(
'-r', '--root-relative', action='store_true', default=False,
help="print root-relative paths (default is cwd-relative)")
subparser.add_argument(
'-U', '--no-untracked', dest='untracked', action='store_false',
default=True, help="exclude untracked files from checks")
subparser.add_argument(
'files', nargs=argparse.REMAINDER, help="specific files to check")
def flake8(parser, args):
flake8 = which('flake8', required=True)
temp = tempfile.mkdtemp()
try:
file_list = args.files
if file_list:
def prefix_relative(path):
return os.path.relpath(
os.path.abspath(os.path.realpath(path)), spack.prefix)
file_list = [prefix_relative(p) for p in file_list]
with working_dir(spack.pr |
akshayp/college-projects | c++/car-finder/lightmarker.py | Python | mit | 4,319 | 0.012271 | # Light Marker Annotation
#
# (c) Scott & Linda Wills 20 November 2009
import os, Image, ImageDraw
SeqDir = "./seqs/"
TrlDir = "./trials/"
Colors = ['pink', 'red', 'green', 'blue', 'orange', 'yellow', 'skyblue', 'purple']
def LightMarker() :
""" This routine walks through each sequence in the seqs directory, parses
the report.txt file in that sequence and annotates the images in the sequence
with position and velocity information contained in the report.txt file. """
DirList = os.listdir(".")
if SeqDir[2:-1] not in DirList :
print "sequence directory %s is missing" % (SeqDir)
elif TrlDir[2:-1] not in DirList :
print "trial directory %s is missing" % (TrlDir)
else :
Trials = os.listdir(TrlDir)
for Seq in Trials :
print "annotating sequence %s" % (Seq)
Files = os.listdir(TrlDir + Seq)
for File in Files :
if File == 'report.txt' :
ReportFile = open(TrlDir + Seq + '/' + File, 'r')
FrameData = {}
for Line in ReportFile.readlines() :
if ':' in Line :
Keyword, Tokens = Line.split(':')
Keyword = Keyword.strip()
Tokens = Tokens[:-1]
if Keyword == 'Seq' :
SeqName = Tokens.strip()
SeqName = SeqName.strip('"')
if SeqName <> Seq :
print "Error: Sequence name %s does not match sequence directory %s" % (SeqName, Seq)
if Keyword == 'Start' :
Start = int(Tokens.strip())
elif Keyword == 'End' :
End = int(Tokens.strip())
elif Keyword == 'FN' :
FN = int(Tokens.strip())
FrameData[FN] = []
elif Keyword == 'CE' : # line is a data line
Tokens = Tokens.replace(')', ',')
Tokens = Tokens.replace('(', '')
Data = Tokens.split(',')
Xmin = int(Data[0].strip())
Ymin = int(Da | ta[1].strip())
Xmax = int(Data[2].strip())
Ymax = int(Data[3].strip())
A = int(Data [4].strip())
FrameData[FN].append((Xmin, Ymin, Xmax, Ymax, A))
else : # do nothing
False
for FN in range(Start, End+1) :
| Frame = Load_Frame(SeqDir + Seq, FN)
Draw = ImageDraw.Draw(Frame)
if FN in FrameData :
FN_Data = FrameData[FN]
for (Xmin, Ymin, Xmax, Ymax, A) in FN_Data :
Draw_Marker(FN, Draw, Xmin, Ymin, Xmax, Ymax)
Store_Frame(TrlDir + Seq, FN, Frame)
else :
print "Error: Missing data for Frame %d" % (FN)
def Load_Frame(Seq, FN) :
""" This routine loads a specified frame in a sequence and
returns it. """
Path = "%s/%05d.jpg" % (Seq, FN)
Frame = Image.open(Path)
return Frame
def Store_Frame(Seq, FN, Frame) :
""" This routine saves an output frame. """
Path = "%s/%05d.jpg" % (Seq, FN)
Frame.save(Path, 'JPEG')
def Draw_Marker(FN, Draw, Xmin, Ymin, Xmax, Ymax, Width=1280, Height=720) :
""" This routine draws a marker at the specfied position in the frame. """
Y = (Ymax + Ymin) / 2
L = Xmax - Xmin
if L < Width / 5 :
Color = "green"
elif L < Width / 3 :
Color = "yellow"
else :
Color = "red"
Draw.line([(Xmin,Ymin),(Xmin,Ymax)], width=2, fill=Color)
Draw.line([(Xmax,Ymin),(Xmax,Ymax)], width=2, fill=Color)
Draw.line([(Xmin,Y),(Xmax,Y)], width=2, fill=Color)
if __name__ == '__main__' :
LightMarker()
|
misterhay/GoogleAppsProvisioning | oauth2_getAllUsers.py | Python | unlicense | 3,688 | 0.011931 | #import some things we need
import httplib2
from oauth2client.client import SignedJwtAssertionCredentials #included with the Google Apps Directory API
from apiclient.discovery import build
import csv
def downloadUsers(domain, account, customerId):
superAdmin = 'is@' + domain
serviceAccount = account + '@developer.gserviceaccount.com'
p12File = domain + '.p12'
scope = 'https://www.googleapis.com/auth/admin.directory.user https://www.googleapis.com/auth/admin.directory.orgunit https://www.googleapis.com/auth/admin.directory.group https://www.googleapis.com/auth/admin.directory.device.chromeos'
#read then close the key file
keyFile = file(p12File, 'rb')
key = keyFile.read()
keyFile.close()
#build credentials
credentials = SignedJwtAssertionCredentials(serviceAccount, key, scope, prn=superAdmin)
#authenticate
http = httplib2.Http()
httplib2.debuglevel = False #change this to True if you want to see the output
http = credentials.authorize(http=http)
directoryService = build(serviceName='admin', version='directory_v1', http=http)
#create and/or open a file that we'll append to
outputFileName = domain + '_userList.csv'
outputFile = open(outputFileName, 'a')
outputFile.write('primaryEmail, lastLoginTime, name, isAdmin, orgUnitPath\n') #write the headers
pageToken = None #this is the variable where we'll store the next page token
while True:
try:
page = directoryService.users().list(domain=domain, customer=customerId, maxResults='500', pageToken=pageToken).execute()
users = page['users']
for user in users: #parse the users from the page variable
primaryEmail = user['primaryEmail']
lastLoginTime = user['lastLoginTime']
name = user['name']['fullName']
isAdmin = user['isAdmin']
orgUnitPath = user['orgUnitPath']
#print primaryEmail, lastLoginTime, name, isAdmin, orgUnitPath
#log to a file
outputFile.write(primaryEmail + ',' + str(lastLoginTime) + ',' + name + ',' + str(isAdmin) + ',' + str(orgUnitPath))
outputFile.write( '\n')
pageToken = page['nextPageToken'] #this will error if there's no nextPageToken
except:
print 'We probably reached the end of ' + domain
break
outputFile.close()
#open and read the csv file that contains the list of domains, account numbers, and customer IDs
domainListFile = open('domainList.csv', 'rb')
domainList = csv.reader(domainListFile)
for row in domainList:
domain = row[0] #the fir | st entry in this row is the | domain
account = row[1]
customerId = row[2]
downloadUsers(domain, account, customerId)
'''
for user in page:
primaryEmail = page.get(user['primaryEmail'])
lastLoginTime = page.get('lastLoginTime')
name = page.get('name')
isAdmin = page.get('isAdmin')
orgUnitPath = page.get('orgUnitPath')
newPage = page.get('nextPageToken')
print primaryEmail, lastLoginTime, name, isAdmin, orgUnitPath
'''
'''
#create a user
userinfo = {'primaryEmail': 'newTest@example.com',
'name': { 'givenName': 'New', 'familyName': 'Test' },
'password': 'passwordfornewuser1',
'orgUnitPath':'/Archive'}
directoryService.users().insert(body=userinfo).execute()
'''
'''
#move a user to an org
userOrg = {'orgUnitPath':'/Archive'}
directoryService.users().patch(userKey='newTest@example.com', body=userOrg).execute()
'''
'''
user = directoryService.users().get(userKey = 'newTest@example.com')
pprint.pprint(user.execute())
'''
|
rizar/attention-lvcsr | libs/blocks/blocks/utils/__init__.py | Python | mit | 18,117 | 0 | from __future__ import print_function
import sys
import contextlib
from collections import OrderedDict, deque
import numpy
import six
import theano
from theano import tensor
from theano import printing
from theano.gof.graph import Constant
from theano.tensor.shared_randomstreams import RandomStateSharedVariable
from theano.tensor.sharedvar import SharedVariable
def pack(arg):
"""Pack variables into a list.
Parameters
----------
arg : object
Either a list or tuple, or any other Python object. Lists will be
returned as is, and tuples will be cast to lists. Any other
variable will be returned in a singleton list.
Returns
-------
list
List containing the arguments
"""
if isinstance(arg, (list, tuple)):
return list(arg)
else:
return [arg]
def unpack(arg, singleton=False):
"""Unpack variables from a list or tuple.
Parameters
----------
arg : object
Either a list or tuple, or any other Python object. If passed a
list or tuple of length one, the only element of that list will
be returned. If passed a tuple of length greater than one, it
will be cast to a list before returning. Any other variable
will be returned as is.
singleton : bool
If ``True``, `arg` is expected to be a singleton (a list or tuple
with exactly one element) and an exception is raised if this is not
the case. ``False`` by default.
Returns
-------
object
A list of length greater than one, or any other Python object
except tuple.
"""
if isinstance(arg, (list, tuple)):
if len(arg) == 1:
return arg[0]
else:
if singleton:
raise ValueError("Expected a singleton, got {}".
format(arg))
return list(arg)
else:
return arg
def shared_floatx_zeros_matching(shared_variable, name=None, **kwargs):
r"""Create another shared variable with matching shape and broadcast.
Parameters
----------
shared_variable : :class:'tensor.TensorSharedVariable'
A Theano shared variable with the desired shape and broadcastable
flags.
name : :obj:`str`, optional
The name for the shared variable. Defaults to `None`.
\*\*kwargs
Keyword arguments to pass to the :func:`shared_floatx_zeros`
function.
Returns
-------
:class:'tensor.TensorSharedVariable'
A new shared variable, initialized to all zeros, with the same
shape and broadcastable flags as `shared_variable`.
"""
if not is_shared_variable(shared_variable):
raise ValueError('argument must be a shared variable')
return shared_floatx_zeros(shared_variable.get_value().shape,
name=name,
broadcastable=shared_variable.broadcastable,
**kwargs)
def shared_floatx_zeros(shape, **kwargs):
r"""Creates a shared variable array filled with zeros.
Parameters
----------
shape : tuple
A tuple of integers representing the shape of the array.
\*\*kwargs
Keyword arguments to pass to the :func:`shared_floatx` function.
Returns
-------
:class:'tensor.TensorSharedVariable'
A Theano shared variable filled with zeros.
"""
return shared_floatx(numpy.zeros(shape), **kwargs)
def shared_floatx_nans(shape, **kwargs):
r"""Creates a shared variable array filled with nans.
Parameters
----------
shape : tuple
A tuple of integers representing the shape of the array.
\*\*kwargs
Keyword arguments to pass to the :func:`shared_floatx` function.
Returns
-------
:class:'tensor.TensorSharedVariable'
A Theano shared variable filled with nans.
"""
return shared_floatx(numpy.nan * numpy.zeros(shape), **kwargs)
def shared_floatx(value, name=None, borrow=False, dtype=None, **kwargs):
r"""Transform a value into a shared variable of type floatX.
Parameters
----------
value : :class:`~numpy.ndarray`
The value to associate with the Theano shared.
name : :obj:`str`, optional
The name for the shared variable. Defaults to `None`.
borrow : :obj:`bool`, optional
If set to True | , the given `value` will not be copied if possible.
This can save memory and speed. Defaults to False.
dtype : :obj:`str`, optional
The ` | dtype` of the shared variable. Default value is
:attr:`config.floatX`.
\*\*kwargs
Keyword arguments to pass to the :func:`~theano.shared` function.
Returns
-------
:class:`tensor.TensorSharedVariable`
A Theano shared variable with the requested value and `dtype`.
"""
if dtype is None:
dtype = theano.config.floatX
return theano.shared(theano._asarray(value, dtype=dtype),
name=name, borrow=borrow, **kwargs)
def shared_like(variable, name=None, **kwargs):
r"""Construct a shared variable to hold the value of a tensor variable.
Parameters
----------
variable : :class:`~tensor.TensorVariable`
The variable whose dtype and ndim will be used to construct
the new shared variable.
name : :obj:`str` or :obj:`None`
The name of the shared variable. If None, the name is determined
based on variable's name.
\*\*kwargs
Keyword arguments to pass to the :func:`~theano.shared` function.
"""
variable = tensor.as_tensor_variable(variable)
if name is None:
name = "shared_{}".format(variable.name)
return theano.shared(numpy.zeros((0,) * variable.ndim,
dtype=variable.dtype),
name=name, **kwargs)
def reraise_as(new_exc):
"""Reraise an exception as a different type or with a message.
This function ensures that the original traceback is kept, making for
easier debugging.
Parameters
----------
new_exc : :class:`Exception` or :obj:`str`
The new error to be raised e.g. (ValueError("New message"))
or a string that will be prepended to the original exception
message
Notes
-----
Note that when reraising exceptions, the arguments of the original
exception are cast to strings and appended to the error message. If
you want to retain the original exception arguments, please use:
>>> try:
... 1 / 0
... except Exception as e:
... reraise_as(Exception("Extra information", *e.args))
Traceback (most recent call last):
...
Exception: 'Extra information, ...
Examples
--------
>>> class NewException(Exception):
... def __init__(self, message):
... super(NewException, self).__init__(message)
>>> try:
... do_something_crazy()
... except Exception:
... reraise_as(NewException("Informative message"))
Traceback (most recent call last):
...
NewException: Informative message ...
"""
orig_exc_type, orig_exc_value, orig_exc_traceback = sys.exc_info()
if isinstance(new_exc, six.string_types):
new_exc = orig_exc_type(new_exc)
if hasattr(new_exc, 'args'):
if len(new_exc.args) > 0:
# We add all the arguments to the message, to make sure that this
# information isn't lost if this exception is reraised again
new_message = ', '.join(str(arg) for arg in new_exc.args)
else:
new_message = ""
new_message += '\n\nOriginal exception:\n\t' + orig_exc_type.__name__
if hasattr(orig_exc_value, 'args') and len(orig_exc_value.args) > 0:
if getattr(orig_exc_value, 'reraised', False):
new_message += ': ' + str(orig_exc_value.args[0])
else:
new_message += ': ' + ', '.join(str(arg)
for arg in orig_exc_value.args)
new_exc.args = (new_message,) + new_exc.args[1:]
new_exc.__cause__ = orig_exc_value
new_e |
chrys87/fenrir | src/fenrirscreenreader/commands/vmenu-navigation/next_vmenu_entry.py | Python | lgpl-3.0 | 661 | 0.01059 | #!/bin/python
# -*- coding: utf-8 -*-
# Fenrir TTY screen reader
# By Chrys, Storm Dragon, and contributers.
from fenrirscreenreader.core import debug
class command():
def __init__(self):
pass
def initialize(self, environment):
self.env = environment
| def shutdown(self):
pass
def getDescription(self):
return _('get nex | t v menu entry')
def run(self):
self.env['runtime']['vmenuManager'].nextIndex()
text = self.env['runtime']['vmenuManager'].getCurrentEntry()
self.env['runtime']['outputManager'].presentText(text, interrupt=True)
def setCallback(self, callback):
pass
|
hackatbrown/2015.hackatbrown.org | hack-at-brown-2015/csv_import.py | Python | mit | 2,999 | 0.002668 | import csv
import webapp2
from google.appengine.ext import blobstore
from google.appengine.ext.webapp import blobstore_handlers
import models
import mentor
import logging
import re
class ImportPageHandler(blobstore_handlers.BlobstoreUploadHandler):
def get(self):
upload_url = blobstore.create_upload_url('/dashboard/upload_csv')
html_string = """
<form action="%s" method="POST" enctype="multipart/form-data">
Upload File:
<select name="kind">
<option value="Mentor">Mentor</option>
</select>
<input type="file" name="file"> <br>
<input type="submit" name="submit" value="Submit">
</form>""" % upload_url
self.response.write(html_string)
def post(self):
upload_files = self.get_uploads('file') # 'file' is file upload field in the form
kind = self.request.get('kind')
blob_info = upload_files[0]
process_csv(blob_info, kind)
blobstore.delete(blob_info.key()) # optional: delete file after import
self.redirect("/")
def process_csv(blob_info, kind):
blob_reader = blobstore.BlobReader(blob_info.key())
reader = csv.reader(blob_reader, delimiter=',')
headers = reader.next()
for row in reader:
person = dict(zip(headers, row))
if kind == "Mentor":
create_mentor(person)
def create_volunteer(person):
vol = models.Volunteer()
vol.name = person['Name']
vol.email = person['Email']
gen, size = person['T-shirt Size'].split('-')
vol.shirt_gen = gen
vol.shirt_size = size
pn = re.sub('[^\d]', '', person['Phone Number'])
if pn:
vol.phone_number = pn
#TODO - role?
vol.put()
def create_mentor(person):
try:
m = mentor.Mentor()
m.email = person['Email Address']
existing = models.Rep.query(models.Rep.email == m.email).fetch()
if not existing:
pn = re.sub('[^\d]', '', person['Phone Number'])
else:
pn = existing.phone_number
if len(pn) == 10 or len(pn) == 11:
m.phone = pn
m.name = person['Name']
m.tags = person['Skills or Experience'].split(', ')
m.role = person['Role at Company']
m.availability = '?'
m.details = '?'
m.put()
except datastore_errors.BadValueError as e:
log | ging.info(person)
def create_rep(person):
rep = models.Rep()
rep.name = person['Name']
rep.email = person['Email Address']
pn = re.sub('[^\d]', '', person['Phone Number'])
if pn:
rep.phone_number = pn
rep.company = person['Company']
mens | Shirt = person["T-Shirt Size [Men's]"]
womensShirt = person["T-Shirt Size [Women's]"]
if mensShirt in ['XS', 'S', 'M', 'L', 'XL', 'XXL']:
rep.shirt_gen = 'M'
rep.shirt_size = mensShirt
elif womensShirt in ['XS', 'S', 'M', 'L', 'XL', 'XXL']:
rep.shirt_gen = 'M'
rep.shirt_size = womensShirt
rep.put()
|
yiliaofan/faker | faker/providers/address/sl_SI/__init__.py | Python | mit | 34,150 | 0.013566 | # coding=utf-8
from __future__ import unicode_literals
from .. import Provider as AddressProvider
class Provider(AddressProvider):
city_formats = ('{{city_name}}', )
street_name_formats = ('{{street_name}}', )
street_address_formats = ('{{street_name}} {{building_number}}', )
address_formats = ('{{street_address}}\n{{postcode}} {{city}}', )
building_number_formats = ('###', '##', '#', '#a', '#b', '#c')
postcode_formats = ('####', )
cities = (
"Ajdovščina", "Bled", "Bovec", "Brežice", "Celje", "Cerknica",
"Črnomelj", "Domžale", "Dravograd", "Gornja Radgona", "Gornji Grad",
"Grosuplje", "Hrastnik", "Idrija", "Ilirska Bistrica", "Izola",
"Jesenice", "Kamnik", "Kobarid", "Kočevje", "Koper",
"Kostanjevica na Krki", "Kranj", "Krško", "Laško",
"Lenart v Slovenskih goricah", "Lendava", "Litija", "Ljubljana",
"Ljutomer", "Logatec", "Maribor", "Medvode", "Mengeš", "Metlika",
"Mežica", "Murska Sobota", "Nova Gorica", "Novo mesto", "Ormož",
"Piran", "Postojna", "Prevalje", "Ptuj", "Radeče", "Radovljica",
"Ravne na Koroškem", "Ribnica", "Rogaška Slatina",
"Ruše", "Sevnica", "Sežana", "Slovenj Gradec", "Slovenska Bistrica",
"Slovenske Konjice", "Šempeter pri Gorici", "Šentjur", "Škofja Loka",
"Šoštanj", "Tolmin", "Trbovlje", "Trebnje", "Tržič", "Turnišče",
"Velenje", "Vipava", "Vipavski Križ", "Višnja Gora", "Vrhnika",
"Zagorje ob Savi", "Žalec", "Železniki", "Žiri",
)
streets = (
"Abramova ulica", "Adamičeva ulica", "Adamič-Lundrovo nabrežje",
"Ajdovščina", "Aleševa ulica", "Alešovčeva ulica",
"Aljaževa ulica", "Ambrožev trg", "Ameriška ulica",
"Andrićeva ulica", "Anžurjeva ulica", "Apihova ulica",
"Argentinska ulica", "Arharjeva cesta", "Arkova ulica",
"Artačeva ulica", "Aškerčeva cesta", "Avčinova ulica",
"Avsečeva ulica", "Avstrijska ulica", "Avšičeva cesta",
"Ažmanova ulica", "Babičeva ulica", "Badjurova ulica",
"Balinarska pot", "Baragova ulica", "Barjanska cesta",
"Bavdkova ulica", "Baznikova ulica", "Bazoviška ulica",
"Beethovnova ulica", "Belačeva ulica", "Beljaška ulica",
"Berčičeva ulica", "Berčonova pot", "Berdajsova ulica",
"Bernekerjeva ulica", "Bernikova ulica", "Betettova cesta",
"Bezenškova ulica", "Bežigrad", "Bičevje", "Bilečanska ulica",
"Bitenčeva ulica", "Bizjakova ulica", "Bizjanova ulica",
"Bizovški štradon", "Blasnikova ulica", "Blasov breg",
"Bleiweisova cesta", "Bobenčkova ulica", "Bobrova ulica",
"Bognarjeva pot", "Bohinjčeva ulica", "Bohoričeva ulica",
"Boletova ulica", "Bolgarska ulica", "Borovniška ulica",
"Borštnikov trg", "Borutova ulica", "Božičeva ulica",
"Brankova ulica", "Bratinova ulica", "Bratislavska cesta",
"Bratov Jakopičev ulica", "Bratov Kunovarjev ulica",
"Bravničarjeva ulica", "Brdnikova ulica", "Breg", "Bregarjeva ulica",
"Breznikova ulica", "Brglezov štradon", "Brilejeva ulica",
"Brodarjev trg", "Brodska cesta", "Burnikova ulica", "Cankarjev vrh",
"Cankarjevo nabrežje", "Carja Dušana ulica", "Celarčeva ulica",
"Celjska ulica", "Celovška cesta", "Cerkniška ulica",
"Cerutova ulica", "Cesta Andreja Bitenca", "Cesta Ceneta Štuparja",
"Cesta Dolomitskega odreda", "Cesta II. grupe odredov",
"Cesta Ljubljanske brigade", "Cesta na Bellevue", "Cesta na Bokalce",
"Cesta na Brinovec", "Cesta na Brod", "Cesta na Ježah",
"Cesta na Kope", "Cesta na Laze", "Cesta na Loko", "Cesta na Mesarico",
"Cesta na Ozare", "Cesta na Poljane", "Cesta na Prevoje",
"Cesta na Urh", "Cesta na Vrhovce", "Cesta slov. kmečkih uporov",
"Cesta Urške Zatlerjeve", "Cesta v Dvor", "Cesta v Gameljne",
"Cesta v Hrastje", "Cesta v hrib", "Cesta v Kleče", "Cesta v Kostanj",
"Cesta v Legarico", "Cesta v Mestni log", "Cesta v Pečale",
"Cesta v Prod", "Cesta v Rožno dolino", "Cesta v Šmartno",
"Cesta v Zeleni log", "Cesta v Zgornji log", "Cesta vstaje",
"Cesta 24. junija", "Cesta 25 talcev", "Cesta 27. aprila",
"Chengdujska cesta", "Chopinov prehod", "Cigaletova ulica",
"Cilenškova ulica", "Cimermanova ulica", "Cimpermanova ulica",
"Cizejeva ulica", "Clevelandska ulica", "Colnarjeva ulica",
"Cvetlična pot", "Čampova ulica", "Čanžekova ulica",
"Čargova ulica", "Čebelarska ulica", "Čehova ulica",
"Čepelnikova ulica", "Čepovanska ulica", "Čerinova ulica",
"Černigojeva ulica", "Černivčeva ulica", "Červanova ulica",
"Čevljarska ulica", "Čižmanova ulica", "Čopova ulica", "Črna pot",
"Črnuška cesta", "Črtomirova ulica", "Čučkova ulica",
"Dajnkova ulica", "Dalmatinova ulica", "Danile Kumarjeve ulica",
"Dečkova ulica", "Dečmanova ulica", "Dela | kova ulica",
"Demšarjeva cesta", "Derčeva ulica", "Dergančeva ulica",
"Dermotova ulica", "Detelova ulica", "Devinska ulica", "Devova ulica",
"Divjakova ulica", "Do proge", "Dobrajčeva ulica | ", "Dobrdobska ulica",
"Dolenjska cesta", "Dolgi breg", "Dolgi most", "Dolharjeva ulica",
"Dolinarjeva ulica", "Dolinškova ulica", "Dolničarjeva ulica",
"Dolomitska ulica", "Drabosnjakova ulica", "Draga", "Draveljska ulica",
"Dražgoška ulica", "Drenikov vrh", "Drenikova ulica",
"Dunajska cesta", "Dvojna ulica", "Dvorakova ulica", "Dvorni trg",
"Eipprova ulica", "Ellerjeva ulica", "Emonska cesta",
"Erbežnikova ulica", "Erjavčeva cesta", "Fabianijeva ulica",
"Fani Grumove ulica", "Ferberjeva ulica", "Filipičeva ulica",
"Flajšmanova ulica", "Flandrova ulica", "Forsterjeva ulica",
"Franketova ulica", "Frankopanska ulica", "Frenkova pot",
"Friškovec", "Funtkova ulica", "Fužinska cesta", "Gabrov trg",
"Gača", "Galičeva ulica", "Galjevica", "Gallusovo nabrežje",
"Gasilska cesta", "Gasparijeva ulica", "Gašperšičeva ulica",
"Gerbičeva ulica", "Gestrinova ulica", "Glavarjeva ulica",
"Gledališka stolba", "Glinška ulica", "Glinškova ploščad",
"Glonarjeva ulica", "Gmajnice", "Gobarska pot", "Godeževa ulica",
"Gola Loka", "Golarjeva ulica", "Goljarjeva pot", "Golouhova ulica",
"Goriška ulica", "Gorjančeva ulica", "Gorjupova ulica",
"Gornji Rudnik I", "Gornji Rudnik II", "Gornji Rudnik III",
"Gornji trg", "Goropečnikova ulica", "Gortanova ulica",
"Gospodinjska ulica", "Gosposka ulica", "Gosposvetska cesta",
"Govekarjeva ulica", "Gozdna pot", "Grablovičeva ulica",
"Gradišče", "Gradnikova ulica", "Grafenauerjeva ulica",
"Grajski drevored", "Grajzerjeva ulica", "Gramozna pot",
"Grassellijeva ulica", "Gregorčičeva ulica", "Gregorinova ulica",
"Grintovška ulica", "Grobeljca", "Grobeljska pot", "Groharjeva cesta",
"Groznikova ulica", "Grška ulica", "Grško", "Gruberjevo nabrežje",
"Grudnovo nabrežje", "Gubčeva ulica", "Gunceljska cesta",
"Gustinčarjeva ulica", "Gustinčičeva ulica", "Hacetova ulica",
"Hafnerjeva ulica", "Hajdrihova ulica", "Hauptmanca",
"Hladilniška pot", "Hladnikova cesta", "Hlebčeva ulica",
"Hotimirova ulica", "Hradeckega cesta", "Hranilniška ulica",
"Hribarjevo nabrežje", "Hribernikova ulica", "Hribovska pot",
"Hrvaška ulica", "Hrvatski trg", "Hubadova ulica", "Hudourniška pot",
"Idrijska ulica", "Igriška ulica", "Ilešičeva ulica",
"Ilovški štradon", "Industrijska cesta", "Ingličeva ulica",
"Italijanska ulica", "Izletniška ulica", "Ižanska cesta",
"Jakčeva ulica", "Jakhljeva ulica", "Jakopičev drevored",
"Jakopičevo sprehajališče", "Jakšičeva ulica", "Jalnova ulica",
"Jamova cesta", "Janežičeva cesta", "Janova ulica", "Janševa ulica",
"Jarčeva ulica", "Jarnikova ulica", "Jarše", "Jarška cesta",
"Javorškova ulica", "Jaz |
saurabh6790/frappe | frappe/desk/doctype/notification_log/notification_log.py | Python | mit | 4,285 | 0.027071 | # -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.model.document import Document
from frappe.desk.doctype.notification_settings.notification_settings import (is_notifications_enabled, is_email_notifications_enabled_for_type, set_seen_value)
class NotificationLog(Document):
def after_insert(self):
frappe.publish_realtime('notification', after_commit=True, user=self.for_user)
set_notifications_as_unseen(self.for_user)
if is_email_notifications_enabled_for_type(self.for_user, self.type):
send_notification_email(self)
def get_permission_query_conditions(for_user):
if not for_user:
for_user = frappe.session.user
if for_user == 'Administrator':
return
return '''(`tabNotification Log`.for_user = '{user}')'''.format(user=for_user)
def get_title(doctype, docname, title_field=None):
if not title_field:
title_field = frappe.get_meta(doctype).get_title_field()
title = docname if title_field == "name" else \
frappe.db.get_value(doctype, docname, title_field)
return title
def get_title_html(title):
return '<b class="subject-title">{0}</b>'.format(title)
def enqueue_create_notification(users, doc):
'''
During installation of new site, enqueue_create_notification tries to connect to Redis.
This breaks new site creation if Redis server is not running.
We do not need any notifications in fresh installation
'''
if frappe.flags.in_install:
return
doc = frappe._dict(doc)
if isinstance(users, str):
users = [user.strip() for user in users.split(',') if user | .strip()]
users = list(set(users))
frappe.enqueue(
'frappe.desk.doctype.notification_log.notification_log.make_notification_logs',
doc=doc,
users=users,
now=frappe.flags.in_test
)
def make_notification_logs(doc, users):
from frappe.soc | ial.doctype.energy_point_settings.energy_point_settings import is_energy_point_enabled
for user in users:
if frappe.db.exists('User', {"email": user, "enabled": 1}):
if is_notifications_enabled(user):
if doc.type == 'Energy Point' and not is_energy_point_enabled():
return
_doc = frappe.new_doc('Notification Log')
_doc.update(doc)
_doc.for_user = user
if _doc.for_user != _doc.from_user or doc.type == 'Energy Point' or doc.type == 'Alert':
_doc.insert(ignore_permissions=True)
def send_notification_email(doc):
if doc.type == 'Energy Point' and doc.email_content is None:
return
from frappe.utils import get_url_to_form, strip_html
doc_link = get_url_to_form(doc.document_type, doc.document_name)
header = get_email_header(doc)
email_subject = strip_html(doc.subject)
frappe.sendmail(
recipients = doc.for_user,
subject = email_subject,
template = "new_notification",
args = {
'body_content': doc.subject,
'description': doc.email_content,
'document_type': doc.document_type,
'document_name': doc.document_name,
'doc_link': doc_link
},
header = [header, 'orange'],
now=frappe.flags.in_test
)
def get_email_header(doc):
docname = doc.document_name
header_map = {
'Default': _('New Notification'),
'Mention': _('New Mention on {0}').format(docname),
'Assignment': _('Assignment Update on {0}').format(docname),
'Share': _('New Document Shared {0}').format(docname),
'Energy Point': _('Energy Point Update on {0}').format(docname),
}
return header_map[doc.type or 'Default']
@frappe.whitelist()
def mark_all_as_read():
unread_docs_list = frappe.db.get_all('Notification Log', filters = {'read': 0, 'for_user': frappe.session.user})
unread_docnames = [doc.name for doc in unread_docs_list]
if unread_docnames:
filters = {'name': ['in', unread_docnames]}
frappe.db.set_value('Notification Log', filters, 'read', 1, update_modified=False)
@frappe.whitelist()
def mark_as_read(docname):
if docname:
frappe.db.set_value('Notification Log', docname, 'read', 1, update_modified=False)
@frappe.whitelist()
def trigger_indicator_hide():
frappe.publish_realtime('indicator_hide', user=frappe.session.user)
def set_notifications_as_unseen(user):
try:
frappe.db.set_value('Notification Settings', user, 'seen', 0)
except frappe.DoesNotExistError:
return
|
tu-darmstadt-ros-pkg/hector_flexbe_behavior | hector_flexbe_states/src/hector_flexbe_states/StartCheck.py | Python | bsd-3-clause | 2,388 | 0.031407 | #!/usr/bin/env python
import rospy
from flexbe_core import EventState, Logger
from flexbe_core.proxy import ProxyPublisher
from smach import CBState
class StartCheck(EventState):
'''
Example for a state to demonstrate which functionality is available for state implementation.
This example lets the behavior wait until the given target_time has passed since the behavior has been started.
-- target_time float Time which needs to have passed since the behavior started.
<= continue Given time has passed.
<= failed Example for a failure outcome.
'''
def __init__(self):
# Declare outcomes, input_keys, and output_keys by calling the super constructor with the corresponding arguments.
super(StartCheck, self).__init__(outcomes = [ | 'succeeded'])
def execute(self, userdata):
# This method is called periodically while the state is active.
# Main purpose is to check stat | e conditions and trigger a corresponding outcome.
# If no outcome is returned, the state will stay active.
##if rospy.Time.now() - self._start_time < self._target_time:
return 'succeeded' # One of the outcomes declared above.
def on_enter(self, userdata):
# This method is called when the state becomes active, i.e. a transition from another state to this one is taken.
# It is primarily used to start actions which are associated with this state.
# The following code is just for illustrating how the behavior logger works.
# Text logged by the behavior logger is sent to the operator and displayed in the GUI.
pass
def on_exit(self, userdata):
# This method is called when an outcome is returned and another state gets active.
# It can be used to stop possibly running processes started by on_enter.
pass # Nothing to do in this example.
def on_start(self):
# This method is called when the behavior is started.
# If possible, it is generally better to initialize used resources in the constructor
# because if anything failed, the behavior would not even be started.
# In this example, we use this event to set the correct start time.
##self._start_time = rospy.Time.now()
pass
def on_stop(self):
# This method is called whenever the behavior stops execution, also if it is cancelled.
# Use this event to clean up things like claimed resources.
pass # Nothing to do in this example.
|
javiercantero/streamlink | tests/test_plugin_bbciplayer.py | Python | bsd-2-clause | 1,639 | 0.004881 | import json
import logging
import unittest
from requests import Response, Request
from streamlink.compat import urlencode
from streamlink.plugins.bbciplayer import BBCiPlayer
class TestPluginBBCiPlayer(unittest.TestCase):
def test_can_handle_url(self):
# should match
self.assertTrue(
BBCiPlayer.can_handle_url("http://www.bbc.co.uk/iplayer/episode/b00ymh67/madagascar-1-island-of-marvels"))
self.assertTrue(BBCiPlayer.can_handle_url("http://www.bbc.co.uk/iplayer/live/bbcone"))
# shouldn't match
self.assertFalse(BBCiPlayer.can_handle_url("http://www.tvcatchup.com/"))
self.assertFalse(BBCiPlayer.can_handle_url("http://ww | w.sportal.bg/sportal_live_tv.php?str=15"))
self.assertFalse(BBCiPlayer.can_handle_url("http://www.bbc.co.uk/iplayer/"))
def test_vpid_hash(self):
self.assertEqual(
"71c345435589c6ddeea70d6f252e2a52281ecbf3",
BBCiPlayer._hash_vpid("1234567890")
)
def test_extract_nonce(self):
mock_nonce = "mock-nonce-nse"
last_response = Response()
last_response.request = Request('GET', "http://e | xample.com/?" + urlencode(dict(
goto="http://example.com/?" + urlencode(dict(
state=json.dumps(dict(nonce=mock_nonce))
))
)))
mock_response = Response()
mock_response.history = [
Response(), # Add some extra dummy responses in to make sure we always get the last
Response(),
last_response
]
self.assertEqual(BBCiPlayer._extract_nonce(mock_response), mock_nonce)
|
michalkurka/h2o-3 | h2o-py/tests/testdir_algos/grid/pyunit_grid_parallel_cv_error.py | Python | apache-2.0 | 1,164 | 0.007732 | import sys
import os
import random
sys.path.insert(1, os.path.join("..", "..", ".."))
import h2o
from tests import pyunit_utils
from h2o.grid.grid_search import H2OGridSearch
from h2o.estimators.gbm import H2OGradientBoostingEstimator
def grid_parallel():
train = h2o.import_file(path=pyunit_utils.locate("smalldata/iris/iris_wheader.csv"))
fold_assignments = h2o.H2OFrame([[random.randint(0, 4)] for f in range(train.nrow)])
fold_assignments.set_names(["fold_assignment"])
train = train.cbind(fold_assignments)
hyper_parameters = {
"ntrees": [1, 3, 5],
"min_rows": [1, 10, 100]
}
print("GBM grid with the following hyper_parameters:", hyper_parameters) |
gs = H2OGridSearch(
H2OGradientBoostingEstimator,
hyper_params=hyper_parameters,
parallelism=4
)
gs.train(x=list(range(4)), y=4, training_frame=train, fold_column="fold_assignment")
assert gs is not None
| # only six models are trained, since CV is not possible with min_rows=100
assert len(gs.model_ids) == 6
if __name__ == "__main__":
pyunit_utils.standalone_test(grid_parallel)
else:
grid_parallel()
|
ZobairAlijan/osf.io | api_tests/nodes/serializers/test_serializers.py | Python | apache-2.0 | 4,035 | 0.001239 | # -*- coding: utf-8 -*-
from urlparse import urlparse
from nose.tools import * # flake8: noqa
from dateutil.parser import parse as parse_date
from tests.base import DbTestCase, ApiTestCase, assert_datetime_equal
from tests.utils import make_drf_request
from tests.factories import UserFactory, NodeFactory, RegistrationFactory, ProjectFactory
from framework.auth import Auth
from api.nodes.serializers import NodeSerializer
from api.registrations.serializers import RegistrationSerializer
from api.base.settings.defaults import API_BASE
class TestNodeSerializer(DbTestCase):
def setUp(self):
super(TestNodeSerializer, self).setUp()
self.user = UserFactory()
def test_node_serialization(self):
parent = ProjectFactory(creator=self.user)
node = NodeFactory(creator=self.user, parent=parent)
req = make_drf_request()
result = NodeSerializer(node, context={'request': req}).data
data = result['data']
assert_equal(data['id'], node._id)
assert_equal(data['type'], 'nodes')
# Attributes
attributes = data['attributes']
assert_equal(attributes['title'], node.title)
assert_equal(attributes['description'], node.description)
assert_equal(attributes['public'], node.is_public)
assert_equal(attributes['tags'], [str(each) for each in node.tags])
assert_equal(attributes['category'], node.category)
assert_equal(attributes['registration'], node.is_registration)
assert_equal(attributes['fork'], node.is_fork)
assert_equal(attributes['collection'], node.is_folder)
# Relationships
relationships = data['relationships']
assert_in('children', relationships)
assert_in('contributors', relationships)
assert_in('files', relationships)
assert_in('parent', relationships)
parent_link = relationships['parent']['links']['related']['href']
assert_equal(
urlparse(parent_link).path,
'/{}nodes/{}/'.format(API_BASE, parent._id)
)
assert_in('registrations', relationships)
# Not a fork, so forked_from is removed entirely
assert_not_in('forked_from', relationships)
def test_fork_serialization(self):
node = NodeFactory(creator=self.user)
fork = node.fork_node(auth=Auth(user=node.creator))
result = NodeSerializer(fork, context={'request': make_drf_request()}).data
data = result['data']
# Relationships
relationships = data['relationships']
forked_from = relationships['forked_from']['links']['related']['href']
assert_equal(
urlparse(forked_from).path,
'/{}nodes/{}/'.format(API_BASE, node._id)
)
class TestNodeRegistrationSerializer(DbTestCase):
def test_serialization(self):
| user = UserFactory()
req = make_drf_request()
reg = RegistrationFactory(creator=user)
result = RegistrationSerializer(reg, context={'request': req}).data
d | ata = result['data']
assert_equal(data['id'], reg._id)
assert_equal(data['type'], 'registrations')
# Attributes
attributes = data['attributes']
assert_datetime_equal(
parse_date(attributes['date_registered']),
reg.registered_date
)
assert_equal(attributes['retracted'], reg.is_retracted)
# Relationships
relationships = data['relationships']
assert_in('registered_by', relationships)
registered_by = relationships['registered_by']['links']['related']['href']
assert_equal(
urlparse(registered_by).path,
'/{}users/{}/'.format(API_BASE, user._id)
)
assert_in('registered_from', relationships)
registered_from = relationships['registered_from']['links']['related']['href']
assert_equal(
urlparse(registered_from).path,
'/{}nodes/{}/'.format(API_BASE, reg.registered_from._id)
)
|
Xcelled/cap-n-snap | host.py | Python | mit | 1,016 | 0.042323 | import loggingstyleadapter
log = loggingstyleadapter.getLogger(__name__)
from PyQt5.QtGui import QKeySequence
import hotkeys, plat
class Host:
def __init__(self):
pass
#enddef
def registerDestination(self, destination):
print("Don't forget to implement me (registerDestination)")
#end | def
def registerCommand(self, name, callback, defaultHotkey=None):
hk = hotkeys.default
hk.registerCommand(name, callback)
if defaultHotkey and plat.Supports.hotkeys:
if not hk.commandHasHotkey(name) and not hk.hasHotkey(defaultHotkey):
hk.add(defaultHotkey, name)
else:
log.info('Not registering default hotkey for "{name}" (in use or already assigned)' | , name=name)
#endif
#endif
#enddef
def getHotkeyForCommand(self, cmd):
if plat.Supports.hotkeys:
return hotkeys.default.hotkeyForCommand(cmd)
else:
return QKeySequence()
#endif
#enddef
def store(self, data, type, **kwargs):
''' Stores the given data '''
print('Implement me (store) for cool stuff!')
#enddef
#endclass |
hessler/udacity-courses | full_stack_foundations/vagrant/restaurant/database_setup.py | Python | mit | 3,008 | 0.001662 | """
This module provides database setup and configuration.
"""
from database_utils import establish_session
from models import Restaurant, MenuItem
SESSION = None
def create_data():
"""Function to create and set up DB and populate with data."""
global SESSION
SESSION = establish_session()
# Query for restaurant list. If none exist, create via prefab data.
all_restaurants = SESSION.query(Restaurant).all()
if len(all_restaurants) < 1:
import prefab_db_data
prefab_db_data.create_all_the_things(SESSION)
all_restaurants = SESSION.query(Restaurant).all()
def update_ub_veggie_burger():
"""Function to update the price of the Veggie Burger at Urban Burger."""
if not SESSION:
create_dat | a()
# Reset price of Urban Burger's Veggie Burger
urban_burger = SESSION.query(Restaurant).filter_by(
name="Urban Burger"
).first() | or None
if urban_burger:
urban_burger_veggie_burger = SESSION.query(MenuItem).filter_by(
name="Veggie Burger", restaurant=urban_burger
).first() or None
urban_burger_veggie_burger.price = "$2.99"
SESSION.add(urban_burger_veggie_burger)
SESSION.commit()
print "Urban Burger's Veggie Burger price updated."
def update_all_veggie_burger_prices():
"""Function to update the price of all Veggie Burgers."""
if not SESSION:
create_data()
# Reset price of Urban Burger's Veggie Burger
veggie_burgers = SESSION.query(MenuItem).filter_by(
name="Veggie Burger"
)
# Reset other restaurant's veggie burger prices
for burger in veggie_burgers:
if burger.price != "$2.99":
burger.price = "$2.99"
SESSION.add(burger)
SESSION.commit()
print "All Veggie Burger prices updated."
def delete_spinach_ice_cream():
"""Function to delete Spinach Ice Cream from Auntie Ann's menu."""
if not SESSION:
create_data()
# Delete Spinach Ice Cream from Auntie Ann's Diner menu
auntie_anns = SESSION.query(Restaurant).filter_by(
name="Auntie Ann\'s Diner"
).first() or None
if auntie_anns:
spinach_ice_cream = SESSION.query(MenuItem).filter_by(
name="Spinach Ice Cream", restaurant=auntie_anns
).first() or None
if spinach_ice_cream:
SESSION.delete(spinach_ice_cream)
SESSION.commit()
spinach_ice_cream = SESSION.query(MenuItem).filter_by(
name="Spinach Ice Cream", restaurant=auntie_anns
).first() or None
print "Spinach Ice Cream removed from menu."
def print_all_restaurants():
"""Function to query and print out all restaurant names."""
if not SESSION:
create_data()
all_restaurants = SESSION.query(Restaurant).all()
print "Restaurants:"
for restaurant in all_restaurants:
print " - {}".format(restaurant.name)
# Run function to create data
if __name__ == '__main__':
create_data()
|
sevas/csxj-crawler | csxj/datasources/parser_tools/constants.py | Python | mit | 590 | 0 | from datetime import date, time
NO_TITLE = u"__NO_TITLE__"
NO_AUTHOR_NAME = 'None'
NO_CATEGORY_NAME = 'None'
NON_EXISTENT_ARTICLE_TITLE = 'NON_EXISTENT'
NO_DATE = date(1970, 01 | , 01)
NO_TIME = time(0, 0)
NO_URL = u"__NO_URL__"
UNFINISHED_TAG = u"unfinished"
GHOST_LINK_TAG = u"ghost link"
GHOST_LINK_TITLE = u"__GHOST | _LINK__"
GHOST_LINK_URL = u"__GHOST_LINK__"
PAYWALLED_CONTENT = u"__PAYWALLED__"
RENDERED_STORIFY_TITLE = u"__RENDERED_STORIFY__"
RENDERED_TWEET_TITLE = u"__RENDERED_TWEET__"
EMBEDDED_VIDEO_TITLE = u"__EMBEDDED_VIDEO_TITLE__"
EMBEDDED_VIDEO_URL = u"__EMBEDDED_VIDEO_URL__"
|
samsu/neutron | tests/unit/ml2/drivers/freescale/test_mechanism_fslsdn.py | Python | apache-2.0 | 11,225 | 0 | # Copyright (c) 2014 Freescale, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from oslo.config import cfg
from neutron.extensions import portbindings
from neutron.plugins.ml2.drivers.freescale import mechanism_fslsdn
from neutron.tests import base
from neutron.tests.unit import test_db_plugin
"""Unit testing for Freescale SDN mechanism driver."""
class TestFslSdnMechDriverV2(test_db_plugin.NeutronDbPluginV2TestCase):
"""Testing mechanism driver with ML2 plugin."""
def setUp(self):
cfg.CONF.set_override('mechanism_drivers', ['fslsdn'], 'ml2')
def mocked_fslsdn_init(self):
# Mock CRD client, since it requires CRD service running.
self._crdclient = mock.Mock()
with mock.patch.object(mechanism_fslsdn.FslsdnMechanismDriver,
'initialize', new=mocked_fslsdn_init):
super(TestFslSdnMechDriverV2, self).setUp()
class TestFslSdnMechDriverNetworksV2(test_db_plugin.TestNetworksV2,
TestFslSdnMechDriverV2):
pass
class TestFslSdnMechDriverPortsV2(test_db_plugin.TestPortsV2,
TestFslSdnMechDriverV2):
VIF_TYPE = portbindings.VIF_TYPE_OVS
CAP_PORT_FILTER = True
class TestFslSdnMechDriverSubnetsV2(test_db_plugin.TestSubnetsV2,
TestFslSdnMechDriverV2):
pass
class TestFslSdnMechanismDriver(base.BaseTestCase):
"""Testing FSL SDN Mechanism driver."""
def setUp(self):
super(TestFslSdnMechanismDriver, self).setUp()
cfg.CONF.set_override('mechanism_drivers', ['fslsdn'], 'ml2')
self.driver = mechanism_fslsdn.FslsdnMechanismDriver()
self.driver.initialize()
self.client = self.driver._crdclient = moc | k.Mock()
def test_create_update_delete_network_postcommit(self):
"""Testing create/ | update/delete network postcommit operations."""
tenant_id = 'test'
network_id = '123'
segmentation_id = 456
expected_seg = [{'segmentation_id': segmentation_id}]
expected_crd_network = {'network':
{'network_id': network_id,
'tenant_id': tenant_id,
'name': 'FakeNetwork',
'status': 'ACTIVE',
'admin_state_up': True,
'segments': expected_seg}}
network_context = self._get_network_context(tenant_id, network_id,
segmentation_id)
network = network_context.current
segments = network_context.network_segments
net_id = network['id']
req = self.driver._prepare_crd_network(network, segments)
# test crd network dict
self.assertEqual(expected_crd_network, req)
# test create_network.
self.driver.create_network_postcommit(network_context)
self.client.create_network.assert_called_once_with(body=req)
# test update_network.
self.driver.update_network_postcommit(network_context)
self.client.update_network.assert_called_once_with(net_id, body=req)
# test delete_network.
self.driver.delete_network_postcommit(network_context)
self.client.delete_network.assert_called_once_with(net_id)
def test_create_update_delete_subnet_postcommit(self):
"""Testing create/update/delete subnet postcommit operations."""
tenant_id = 'test'
network_id = '123'
subnet_id = '122'
cidr = '192.0.0.0/8'
gateway_ip = '192.0.0.1'
expected_crd_subnet = {'subnet':
{'subnet_id': subnet_id, 'tenant_id': tenant_id,
'name': 'FakeSubnet', 'network_id': network_id,
'ip_version': 4, 'cidr': cidr,
'gateway_ip': gateway_ip,
'dns_nameservers': '',
'allocation_pools': '',
'host_routes': ''}}
subnet_context = self._get_subnet_context(tenant_id, network_id,
subnet_id, cidr, gateway_ip)
subnet = subnet_context.current
subnet_id = subnet['id']
req = self.driver._prepare_crd_subnet(subnet)
# test crd subnet dict
self.assertEqual(expected_crd_subnet, req)
# test create_subnet.
self.driver.create_subnet_postcommit(subnet_context)
self.client.create_subnet.assert_called_once_with(body=req)
# test update_subnet.
self.driver.update_subnet_postcommit(subnet_context)
self.client.update_subnet.assert_called_once_with(subnet_id, body=req)
# test delete_subnet.
self.driver.delete_subnet_postcommit(subnet_context)
self.client.delete_subnet.assert_called_once_with(subnet_id)
def test_create_delete_port_postcommit(self):
"""Testing create/delete port postcommit operations."""
tenant_id = 'test'
network_id = '123'
port_id = '453'
expected_crd_port = {'port':
{'port_id': port_id, 'tenant_id': tenant_id,
'name': 'FakePort', 'network_id': network_id,
'subnet_id': '', 'mac_address': 'aabb',
'device_id': '1234', 'ip_address': '',
'admin_state_up': True, 'status': 'ACTIVE',
'device_owner': 'compute',
'security_groups': ''}}
# Test with empty fixed IP
port_context = self._get_port_context(tenant_id, network_id, port_id)
port = port_context.current
req = self.driver._prepare_crd_port(port)
# Test crd port dict
self.assertEqual(expected_crd_port, req)
# test create_port.
self.driver.create_port_postcommit(port_context)
self.client.create_port.assert_called_once_with(body=req)
# Test delete_port
self.driver.delete_port_postcommit(port_context)
self.client.delete_port.assert_called_once_with(port['id'])
def test_prepare_port_with_single_fixed_ip(self):
"""Test _prepare_crd_port with single fixed_ip."""
tenant_id = 'test'
network_id = '123'
port_id = '453'
fips = [{"subnet_id": "sub-1", "ip_address": "10.0.0.1"}]
expected_crd_port = {'port':
{'port_id': port_id, 'tenant_id': tenant_id,
'name': 'FakePort', 'network_id': network_id,
'subnet_id': '', 'mac_address': 'aabb',
'device_id': '1234', 'ip_address': '',
'admin_state_up': True, 'status': 'ACTIVE',
'device_owner': 'compute',
'security_groups': ''}}
port_context = self._get_port_context(tenant_id, network_id, port_id,
fips)
port = port_context.current
req = self.driver._prepare_crd_port(port)
expected_crd_port['port']['subnet_id'] = 'sub-1'
expected_crd_port['port']['ip_address'] = '10.0.0.1'
self.assertEqual(expected_crd_port, req)
def test_prepare_port_with_multiple_fixed_ips(self):
"""Test _prepare_crd_port with multiple fixed_ips."""
tenant_id = 'test'
network_id = '123'
port_id = '453'
multiple_fips = [{"subnet_id": "s |
msincenselee/vnpy | vnpy/api/easytrader/server.py | Python | mit | 2,858 | 0 | import functools
from flask import Flask, jsonify, request
from . import api
from .log import logger
app = Flask(__name__)
global_store = {}
def error_handle(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
# pylint: disable=broad-except
except Exception as e:
logger.exception("server error")
message = "{}: {}".format(e.__class__, e)
return jsonify({"error": message}), 400
return wrapper
@app.route("/prepare", methods=["POST"])
@error_handle
def post_prepare():
json_data = request.get_json(force=True)
user = api.use(json_data.pop("broker"))
user.prepare(**json_data)
global_store["user"] = user
return jsonify({"msg": "login success"}), 201
@app.route("/balance", methods=["GET"])
@error_handle
def get_balance():
print('get balance')
user = global_store["user"]
balance = user.balance
return jsonify(balance), 200
@app.route("/position", methods=["GET"])
@error_handle
def get_position():
print('get position')
user = global_store["user"]
position = user.position
return jsonify(position), 200
@app.route("/auto_ipo", methods=["GET"])
@error_handle
def get_auto_ipo():
user = global_store["user"]
res = user.auto_ipo()
re | turn jsonify(res), 200
@app.route("/today_entrusts", methods=["GET"])
@error_handle
def get_today_entrusts():
user = global_store["user"]
today_entrusts = user.today_entrusts
return jsonify(today_entrusts), 200
@app.route("/today_trades", methods=["GET"])
@error_handle
def get_today_trades():
user = global_store["user"]
today_trades = user.today_trades
return jsonify(today_trades), 200
@app.route("/cancel_entrusts", methods=["GET"])
@error_handle
def get_cancel_entru | sts():
user = global_store["user"]
cancel_entrusts = user.cancel_entrusts
return jsonify(cancel_entrusts), 200
@app.route("/buy", methods=["POST"])
@error_handle
def post_buy():
json_data = request.get_json(force=True)
user = global_store["user"]
res = user.buy(**json_data)
return jsonify(res), 201
@app.route("/sell", methods=["POST"])
@error_handle
def post_sell():
json_data = request.get_json(force=True)
user = global_store["user"]
res = user.sell(**json_data)
return jsonify(res), 201
@app.route("/cancel_entrust", methods=["POST"])
@error_handle
def post_cancel_entrust():
json_data = request.get_json(force=True)
user = global_store["user"]
res = user.cancel_entrust(**json_data)
return jsonify(res), 201
@app.route("/exit", methods=["GET"])
@error_handle
def get_exit():
user = global_store["user"]
user.exit()
return jsonify({"msg": "exit success"}), 200
def run(port=1430):
app.run(host="0.0.0.0", port=port)
|
achang97/YouTunes | lib/python2.7/site-packages/botocore/docs/shape.py | Python | mit | 4,763 | 0 | # Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
# NOTE: This class should not be instantiated and its
# ``traverse_and_document_shape`` method called directly. It should be
# inherited from a Documenter class with the appropriate methods
| # and attributes.
from botocore.utils import is_json_value_header
class ShapeDocumenter(object):
EVENT_NAME = ''
def __init__(self, service_name, operation_name, event_emitter,
context=None):
self._service_name = service_name
self | ._operation_name = operation_name
self._event_emitter = event_emitter
self._context = context
if context is None:
self._context = {
'special_shape_types': {}
}
def traverse_and_document_shape(self, section, shape, history,
include=None, exclude=None, name=None,
is_required=False):
"""Traverses and documents a shape
Will take a self class and call its appropriate methods as a shape
is traversed.
:param section: The section to document.
:param history: A list of the names of the shapes that have been
traversed.
:type include: Dictionary where keys are parameter names and
values are the shapes of the parameter names.
:param include: The parameter shapes to include in the documentation.
:type exclude: List of the names of the parameters to exclude.
:param exclude: The names of the parameters to exclude from
documentation.
:param name: The name of the shape.
:param is_required: If the shape is a required member.
"""
param_type = shape.type_name
if shape.name in history:
self.document_recursive_shape(section, shape, name=name)
else:
history.append(shape.name)
is_top_level_param = (len(history) == 2)
getattr(self, 'document_shape_type_%s' % param_type,
self.document_shape_default)(
section, shape, history=history, name=name,
include=include, exclude=exclude,
is_top_level_param=is_top_level_param,
is_required=is_required)
if is_top_level_param:
self._event_emitter.emit(
'docs.%s.%s.%s.%s' % (self.EVENT_NAME,
self._service_name,
self._operation_name,
name),
section=section)
at_overlying_method_section = (len(history) == 1)
if at_overlying_method_section:
self._event_emitter.emit(
'docs.%s.%s.%s.complete-section' % (self.EVENT_NAME,
self._service_name,
self._operation_name),
section=section)
history.pop()
def _get_special_py_default(self, shape):
special_defaults = {
'jsonvalue_header': '{...}|[...]|123|123.4|\'string\'|True|None',
'streaming_input_shape': 'b\'bytes\'|file',
'streaming_output_shape': 'StreamingBody()'
}
return self._get_value_for_special_type(shape, special_defaults)
def _get_special_py_type_name(self, shape):
special_type_names = {
'jsonvalue_header': 'JSON serializable',
'streaming_input_shape': 'bytes or seekable file-like object',
'streaming_output_shape': ':class:`.StreamingBody`'
}
return self._get_value_for_special_type(shape, special_type_names)
def _get_value_for_special_type(self, shape, special_type_map):
if is_json_value_header(shape):
return special_type_map['jsonvalue_header']
for special_type, marked_shape in self._context[
'special_shape_types'].items():
if special_type in special_type_map:
if shape == marked_shape:
return special_type_map[special_type]
return None
|
kkdang/synapsePythonClient | tests/unit/unit_test_DictObject.py | Python | apache-2.0 | 784 | 0.022959 | import os
from synapseclient.dict_object import DictObject
def setup():
print('\n')
print('~' * 60)
print(os.path.basename(__file__))
print('~' * 60)
def test_DictObject():
"""Test creation and property access on DictObjects"""
d = DictObject({'args_working?':'yes'}, a=123, b='foobar', nerds=['chris','jen','janey'])
assert d.a==123
assert d['a']==123
assert d.b=='foobar'
assert d['b']=='fooba | r'
assert d.nerds==['chris','jen','janey']
assert hasattr(d,'nerds')
assert d['nerds']==['chris','jen','janey']
assert not hasattr(d,'qwerqwer')
print(d.keys())
assert all([key in d.keys() for key in ['args_working?', 'a', 'b', 'nerds | ']])
print(d)
d.new_key = 'new value!'
assert d['new_key'] == 'new value!'
|
alberto-antonietti/nest-simulator | pynest/examples/one_neuron.py | Python | gpl-2.0 | 3,758 | 0 | # -*- coding: utf-8 -*-
#
# one_neuron.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
One neuron example
------------------
This script simulates a neuron driven by a constant external current
and records its membrane potential.
See Also
~~~~~~~~
:doc:`twoneurons`
"""
#######################################################################
# First, we import all necessary modules for simulation, analysis and
# plotting. Additionally, we set the verbosity to suppress info
# messages and reset the kernel.
# Resetting the kernel allows you to execute the script several
# times in a Python shell without interferences from previous NEST
# simulations. Thus, without resetting the kernel the network status
# including connections between nodes, status of neurons, devices and
# intrinsic time clocks, is kept and influences the next simulations.
import nest
import nest.voltage_trace
nest.set_verbosity("M_WARNING")
nest.ResetKernel()
#######################################################################
# Second, the nodes (neurons and devices) are created using ``Create``.
# We store the returned handles in variables for later reference.
# The ``Create`` function also allow you to create multiple nodes
# e.g. ``nest.Create('iaf_psc_alpha',5)``
# Also default parameters of the model can be configured using ``Create``
# by including a list of parameter dictionaries
# e.g. `nest.Create("iaf_psc_alp | ha", params=[{'I_e':376.0}])`.
# In this example we will configure these parameters in an additional
# step, which is explained in the third section.
neuron = nest.Create("iaf_psc_alpha")
voltmeter = nest.Create("voltmeter")
#######################################################################
# Third, the | neuron is configured using `SetStatus()`, which expects
# a list of node handles and a list of parameter dictionaries.
# In this example we use `SetStatus()` to configure the constant
# current input to the neuron.
neuron.I_e = 376.0
#######################################################################
# Fourth, the neuron is connected to the voltmeter. The command
# ``Connect`` has different variants. Plain ``Connect`` just takes the
# handles of pre- and post-synaptic nodes and uses the default values
# for weight and delay. Note that the connection direction for the voltmeter is
# reversed compared to the spike detector, because it observes the
# neuron instead of receiving events from it. Thus, ``Connect``
# reflects the direction of signal flow in the simulation kernel
# rather than the physical process of inserting an electrode into the
# neuron. The latter semantics is presently not available in NEST.
nest.Connect(voltmeter, neuron)
#######################################################################
# Now we simulate the network using ``Simulate``, which takes the
# desired simulation time in milliseconds.
nest.Simulate(1000.0)
#######################################################################
# Finally, we plot the neuron's membrane potential as a function of
# time.
nest.voltage_trace.from_device(voltmeter)
nest.voltage_trace.show()
|
ivannotes/luigi | test/import_test.py | Python | apache-2.0 | 2,285 | 0.000875 | # -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
from helpers import unittest
class ImportTest(unittest.TestCase):
def import_test(self):
"""Test that all module can be imported
"""
luigidir = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'..'
)
packagedir = os.path.join(luigidir, 'luigi')
for root, subdirs, files in os.walk(packagedir):
package = os.path.relpath(root, luigidir).replace('/', '.')
if '__init__.py' in files:
__import__(package)
for f in files:
if f.endswith('.py') and not f.startswith('_'):
__import__(package + '.' + f[:-3])
def import_luigi_test(self):
"""
Test that the top luigi package can be imported and contains the usual suspects.
"""
import luigi
# These should exist (if not, this will cause AttributeErrors)
expected = [
luigi.Event,
luigi.Config,
luigi.Task, luigi.ExternalTask, luigi.WrapperTask,
| luigi.Target, luigi.LocalTarget, luigi.File,
luigi.namespace,
luigi.RemoteScheduler,
luigi.RPCError,
luigi.run, luigi.build,
l | uigi.Parameter,
luigi.DateHourParameter, luigi.DateMinuteParameter, luigi.DateSecondParameter, luigi.DateParameter,
luigi.MonthParameter, luigi.YearParameter,
luigi.DateIntervalParameter, luigi.TimeDeltaParameter,
luigi.IntParameter, luigi.FloatParameter,
luigi.BooleanParameter, luigi.BoolParameter,
]
self.assertGreater(len(expected), 0)
|
PredictiveScienceLab/inverse-bgo | demos/catalysis/__init__.py | Python | mit | 124 | 0 | """
Initialize the module.
Author:
Panagiotis Tsilifis
| Date:
5/22/2014
"""
from _forward_model_dmnless imp | ort *
|
EthanBlackburn/sync-engine | inbox/util/addr.py | Python | agpl-3.0 | 857 | 0 | import rfc822
from flanker.addresslib import address
|
def canonicalize_address(addr):
"""Gmail addresses with and without peri | ods are the same."""
parsed_address = address.parse(addr, addr_spec_only=True)
if not isinstance(parsed_address, address.EmailAddress):
return addr
local_part = parsed_address.mailbox.lower()
hostname = parsed_address.hostname.lower()
if hostname in ('gmail.com', 'googlemail.com'):
local_part = local_part.replace('.', '')
return '@'.join((local_part, hostname))
def parse_mimepart_address_header(mimepart, header_name):
header_list_string = ', '.join(mimepart.headers.getall(header_name))
addresslist = rfc822.AddressList(header_list_string).addresslist
if len(addresslist) > 1:
# Deduplicate entries
return list(set(addresslist))
return addresslist
|
atjacobs/PeakUtils | peakutils/prepare.py | Python | mit | 970 | 0 | '''Data preparation / preprocessing algorithms.'''
import numpy as np
def scale(x, new_range=(0., 1.), eps=1e-9):
'''Changes the scale of an array
Parameters
----------
x : ndarray
1D array to change the scale (remains unchanged)
new_range : tuple (float, floa | t)
Desired range of the array
eps: float
Numerical precision, to detect degenerate cases (for example, when
every value of *x* is equal)
Returns
-------
ndarray
Scaled array
tuple (float, float)
Previous data range, allowing a rescale to the old range
'''
assert new_range[1] >= new_range[0]
range_ = (x.min(), x.max())
if (range_[1] - range | _[0]) < eps:
mean = (new_range[0] + new_range[1]) / 2.0
xp = np.full(x.shape, mean)
else:
xp = (x - range_[0])
xp *= (new_range[1] - new_range[0]) / (range_[1] - range_[0])
xp += new_range[0]
return xp, range_
|
rbaumg/trac | trac/tests/functional/tester.py | Python | bsd-3-clause | 18,151 | 0.00011 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2008-2019 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
"""The :class:`FunctionalTester` object provides a higher-level interface to
working with a Trac environment to make test cases more succinct.
"""
import io
import re
from trac.tests.functional import internal_error
from trac.tests.functional.better_twill import tc, b
from trac.tests.contentgen import random_page, random_sentence, random_word, \
random_unique_camel
from trac.util.html import tag
from trac.util.text import to_utf8, unicode_quote
class FunctionalTester(object):
"""Provides a library of higher-level operations for interacting with a
test environment.
It makes assumptions such as knowing what ticket number is next, so
avoid doing things manually in a :class:`FunctionalTestCase` when you can.
"""
def __init__(self, url):
"""Create a :class:`FunctionalTester` for the given Trac URL and
Subversion URL"""
self.url = url
self.ticketcount = 0
# Connect, and login so we can run tests.
self.go_to_front()
self.login('admin')
def login(self, username):
"""Login as the given user"""
username = to_utf8(username)
tc.add_auth("", self.url, username, username)
self.go_to_front()
tc.find("Login")
tc.follow(r"\bLogin\b")
# We've provided authentication info earlier, so this should
# redirect back to the base url.
tc.find('logged in as[ \t\n]+<span class="trac-author-user">%s</span>'
% username)
tc.find("Logout")
tc.url(self.url)
tc.notfind(internal_error)
def logout(self):
"""Logout"""
tc.submit('logout', 'logout')
tc.notfind(internal_error)
tc.notfind('logged in as')
def create_ticket(self, summary=None, info=None):
"""Create a new (random) ticket in the test environment. Returns
the new ticket number.
:param summary:
may optionally be set to the desired summary
:param info:
may optionally be set to a dictionary of field value pairs for
populating the ticket. ``info['summary']`` overrides summary.
`summary` and `description` default to randomly-generated values.
"""
info = info or {}
self.go_to_front()
tc.follow(r"\bNew Ticket\b")
tc.notfind(internal_error)
if summary is None:
summary = random_sentence(5)
tc.formvalue('propertyform', 'field_summary', summary)
tc.formvalue('propertyform', 'field_description', random_page())
if 'owner' in info:
tc.formvalue('propertyform', 'action', 'assign')
tc.formvalue('propertyform',
'action_create_and_assign_reassign_owner',
info.pop('owner'))
for field, value in info.items():
tc.formvalue('propertyform', 'field_%s' % field, value)
tc.submit('submit')
tc.notfind(internal_error)
# we should be looking at the newly created ticket
tc.url(self.url + '/ticket/%s' % (self.ticketcount + 1))
# Increment self.ticketcount /after/ we've verified that the ticket
# was created so a failure does not trigger spurious later
# failures.
self.ticketcount += 1
return self.ticketcount
def quickjump(self, search):
"""Do a quick search to jump to a page."""
tc.formvalue('search', 'q', search)
tc.submit()
tc.notfind(internal_error)
def go_to_url(self, url):
tc.go(url)
tc.url(re.escape(url))
tc.notfind(internal_error)
def go_to_front(self):
"""Go to the Trac front page"""
self.go_to_url(self.url)
def go_to_ticket(self, ticketid=None):
"""Surf to the page for the given ticket ID, or to the NewTicket page
if `ticketid` is not specified or is `None`. If `ticketid` is
specified, it assumes the ticket exists."""
if ticketid is not None:
ticket_url = self.url + '/ticket/%s' % ticketid
else:
ticket_url = self.url + '/newticket'
self.go_ | to_url(ticket_url)
tc.url(ticket_url + '$')
def go_to_wiki(self, name, version=None):
"""Surf to the wiki page. By default this will be the latest version
of the page.
:param name: name of the wiki pa | ge.
:param version: version of the wiki page.
"""
# Used to go based on a quickjump, but if the wiki pagename isn't
# camel case, that won't work.
wiki_url = self.url + '/wiki/%s' % name
if version:
wiki_url += '?version=%s' % version
self.go_to_url(wiki_url)
def go_to_timeline(self):
"""Surf to the timeline page."""
self.go_to_front()
tc.follow(r"\bTimeline\b")
tc.url(self.url + '/timeline')
def go_to_view_tickets(self, href='report'):
"""Surf to the View Tickets page. By default this will be the Reports
page, but 'query' can be specified for the `href` argument to support
non-default configurations."""
self.go_to_front()
tc.follow(r"\bView Tickets\b")
tc.url(self.url + '/' + href.lstrip('/'))
def go_to_query(self):
"""Surf to the custom query page."""
self.go_to_front()
tc.follow(r"\bView Tickets\b")
tc.follow(r"\bNew Custom Query\b")
tc.url(self.url + '/query')
def go_to_admin(self, panel_label=None):
"""Surf to the webadmin page. Continue surfing to a specific
admin page if `panel_label` is specified."""
self.go_to_front()
tc.follow(r"\bAdmin\b")
tc.url(self.url + '/admin')
if panel_label is not None:
tc.follow(r"\b%s\b" % panel_label)
def go_to_roadmap(self):
"""Surf to the roadmap page."""
self.go_to_front()
tc.follow(r"\bRoadmap\b")
tc.url(self.url + '/roadmap')
def go_to_milestone(self, name):
"""Surf to the specified milestone page. Assumes milestone exists."""
self.go_to_roadmap()
tc.follow(r"\bMilestone: %s\b" % name)
tc.url(self.url + '/milestone/%s' % name)
def go_to_report(self, id, args=None):
"""Surf to the specified report.
Assumes the report exists. Report variables will be appended if
specified.
:param id: id of the report
:param args: may optionally specify a dictionary of arguments to
be encoded as a query string
"""
report_url = self.url + "/report/%s" % id
if args:
arglist = []
for param, value in args.items():
arglist.append('%s=%s' % (param.upper(), unicode_quote(value)))
report_url += '?' + '&'.join(arglist)
tc.go(report_url)
tc.url(report_url.encode('string-escape').replace('?', '\?'))
def go_to_preferences(self, panel_label=None):
"""Surf to the preferences page. Continue surfing to a specific
preferences panel if `panel_label` is specified."""
self.go_to_front()
tc.follow(r"\bPreferences\b")
tc.url(self.url + '/prefs')
if panel_label is not None:
tc.follow(r"\b%s\b" % panel_label)
def add_comment(self, ticketid, comment=None):
"""Adds a comment to the given ticket ID, assumes ticket exists."""
self.go_to_ticket(ticketid)
if comment is None:
comment = random_sentence()
tc.formvalue('propertyform', 'comment', comment)
tc.submit("submit")
# Verify we're where we're |
akrherz/iem | scripts/outgoing/wxc_azos_prec.py | Python | mit | 2,603 | 0 | """
Generate a Weather Central Formatted file of ASOS/AWOS Precip
"""
import os
import subprocess
import datetime
import psycopg2.extras
from pyiem.util import get_dbconn
from pyiem.network import Table as NetworkTable
IEM = get_dbconn("iem", user="nobody")
icursor = IEM.cursor(cursor_factory=psycopg2.extras.DictCursor)
COOP = get_dbconn("coop", user="nobody")
ccursor = COOP.cursor(cursor_factory=psycopg2.extras.DictCursor)
nt = NetworkTable(("IA_ASOS", "AWOS"))
def compute_climate(sts, ets):
sql = """
SELECT station, sum(gdd50) as cgdd,
sum(precip) as crain from climate WHERE valid >= '2000-%s' and
valid < '2000-%s' and gdd50 is not null GROUP by station
""" % (
sts.strftime("%m-%d"),
ets.strftime("%m-%d"),
)
ccursor.execute(sql)
data = {}
for row in ccursor:
data[row[0]] = row
return data
def compute_obs():
"""Compute the GS values given a start/end time and networks to look at"""
sql = """
SELECT
s.id, ST_x(s.geom) as lon, ST_y(s.geom) as lat,
sum(CASE WHEN
day = 'TODAY'::date and pday > 0
THEN pday ELSE 0 END) as p01,
sum(CASE WHEN
day IN ('TODAY'::date,'YESTERDAY'::date) and pday > 0
THEN pday ELSE 0 END) as p02,
sum(CASE WHEN
pday > 0
THEN pday ELSE 0 END) as p03
FROM
summary_%s c, stations s
WHERE
s.network in ('IA_ASOS', 'AWOS') and
s.iemid = c.iemid and
day IN ('TODAY'::date,'YESTERDAY'::date, 'TODAY'::date - '2 days'::interval)
GROUP by s.id, lon, lat
""" % (
datetime.date.today().year,
)
icursor.execute(sql)
data = {}
for row in icursor:
data[row["id"]] = row
return data
def main | ():
output = open("/tmp/wxc_airport_precip.txt", "w")
output.write(
"""Weather Central 001d0300 Surface Data TimeStamp=%s
6
4 Station
6 TODAY RAIN
6 DAY2 RAIN
6 DAY3 RAIN
6 Lat
8 Lon
"""
% (datetime.datetime.utcnow().strftime("%Y.%m.%d.%H%M"),)
)
data = compute_obs()
for sid, entry in data.items():
output.write(
("K%s %6.2f %6.2f %6.2f %6.3f % | 8.3f\n")
% (
sid,
entry["p01"],
entry["p02"],
entry["p03"],
entry["lat"],
entry["lon"],
)
)
output.close()
pqstr = "data c 000000000000 wxc/wxc_airport_precip.txt bogus text"
cmd = f"pqinsert -p '{pqstr}' /tmp/wxc_airport_precip.txt"
subprocess.call(cmd, shell=True)
os.remove("/tmp/wxc_airport_precip.txt")
if __name__ == "__main__":
main()
|
Opentrons/labware | api/src/opentrons/protocols/api_support/util.py | Python | apache-2.0 | 14,987 | 0.000067 | """ Utility functions and classes for the protocol api """
from collections import UserDict
import functools
import logging
from dataclasses import dataclass, field, astuple
from typing import (Any, Callable, Dict, Optional,
TYPE_CHECKING, Union, List, Set)
from opentrons import types as top_types
from opentrons.protocols.types import APIVersion
from opentrons.hardware_control import (types, SynchronousAdapter, API,
HardwareAPILike, ThreadManager)
if TYPE_CHECKING:
from opentrons.protocol_api.contexts import InstrumentContext
from opentrons.protocol_api.labware import Well, Labware
from opentrons.protocols.geometry.deck import Deck
from opentrons.hardware_control.dev_types import HasLoop # noqa (F501)
MODULE_LOG = logging.getLogger(__name__)
class APIVersionError(Exception):
"""
Error raised when a protocol attempts to access behavior not implemented
"""
pass
def _assert_gzero(val: Any, message: str) -> float:
try:
new_val = float(val)
assert new_val > 0.0
return new_val
except (TypeError, ValueError, AssertionError):
raise AssertionError(message)
@dataclass
class EdgeList:
right: Optional[top_types.Point] = field(default_factory=top_types.Point)
left: Optional[top_types.Point] = field(default_factory=top_types.Point)
center: Optional[top_types.Point] = field(default_factory=top_types.Point)
up: top_types.Point = field(default_factory=top_types.Point)
down: top_types.Point = field(default_factory=top_types.Point)
def determine_edge_path(
where: 'Well', mount: top_types.Mount,
default_edges: EdgeList, deck: 'Deck') -> EdgeList:
left_path = EdgeList(
left=default_edges.left,
right=None,
center=default_edges.center,
up=default_edges.up,
down=default_edges.down)
right_path = EdgeList(
left=None,
right=default_edges.right,
center=default_edges.center,
up=default_edges.up,
down=default_edges.down
)
labware = where.parent
r_mount = top_types.Mount.RIGHT
l_mount = top_types.Mount.LEFT
l_col = labware.columns()[0]
r_col = labware.columns()[-1]
right_pip_criteria = mount is r_mount and where in l_col
left_pip_criteria = mount is l_mount and where in r_col
next_to_mod = deck.is_edge_move_unsafe(mount, labware)
if labware.parent in ['3', '6', '9'] and left_pip_criteria:
return left_path
elif left_pip_criteria and next_to_mod:
return left_path
elif right_pip_criteria and next_to_mod:
r | eturn right_path
return default | _edges
def build_edges(
where: 'Well', offset: float, mount: top_types.Mount,
deck: 'Deck', radius: float = 1.0,
version: APIVersion = APIVersion(2, 7)) -> List[top_types.Point]:
# Determine the touch_tip edges/points
offset_pt = top_types.Point(0, 0, offset)
edge_list = EdgeList(
right=where._from_center_cartesian(x=radius, y=0, z=1) + offset_pt,
left=where._from_center_cartesian(x=-radius, y=0, z=1) + offset_pt,
center=where._from_center_cartesian(x=0, y=0, z=1) + offset_pt,
up=where._from_center_cartesian(x=0, y=radius, z=1) + offset_pt,
down=where._from_center_cartesian(x=0, y=-radius, z=1) + offset_pt
)
if version < APIVersion(2, 4):
edge_list.center = None
# Add the center value before switching axes
return [edge for edge in astuple(edge_list) if edge]
new_edges = determine_edge_path(where, mount, edge_list, deck)
return [edge for edge in astuple(new_edges) if edge]
def labware_column_shift(
initial_well: 'Well', tiprack: 'Labware',
well_spacing: int = 4,
modulo_value: int = 8) -> 'Well':
unshifted_index = tiprack.wells().index(initial_well)
unshifted_column = unshifted_index // modulo_value
shifted_column = unshifted_column + well_spacing
shifted_well = unshifted_index % modulo_value
return tiprack.columns()[shifted_column][shifted_well]
def first_parent(loc: top_types.LocationLabware) -> Optional[str]:
""" Return the topmost parent of this location. It should be
either a string naming a slot or a None if the location isn't
associated with a slot """
# cycle-detecting recursive climbing
seen: Set[top_types.LocationLabware] = set()
# internal function to have the cycle detector different per call
def _fp_recurse(location: top_types.LocationLabware):
if location in seen:
raise RuntimeError('Cycle in labware parent')
seen.add(location)
if location is None or isinstance(location, str):
return location
else:
return first_parent(location.parent)
return _fp_recurse(loc)
class FlowRates:
""" Utility class for rich setters/getters for flow rates """
def __init__(self,
instr: 'InstrumentContext') -> None:
self._instr = instr
def set_defaults(self, api_level: APIVersion):
self.aspirate = _find_value_for_api_version(
api_level, self._instr.hw_pipette['default_aspirate_flow_rates'])
self.dispense = _find_value_for_api_version(
api_level, self._instr.hw_pipette['default_dispense_flow_rates'])
self.blow_out = _find_value_for_api_version(
api_level, self._instr.hw_pipette['default_blow_out_flow_rates'])
@property
def aspirate(self) -> float:
return self._instr.hw_pipette['aspirate_flow_rate']
@aspirate.setter
def aspirate(self, new_val: float):
self._instr._hw_manager.hardware.set_flow_rate(
mount=self._instr._mount,
aspirate=_assert_gzero(
new_val, 'flow rate should be a numerical value in ul/s'))
@property
def dispense(self) -> float:
return self._instr.hw_pipette['dispense_flow_rate']
@dispense.setter
def dispense(self, new_val: float):
self._instr._hw_manager.hardware.set_flow_rate(
mount=self._instr._mount,
dispense=_assert_gzero(
new_val, 'flow rate should be a numerical value in ul/s'))
@property
def blow_out(self) -> float:
return self._instr.hw_pipette['blow_out_flow_rate']
@blow_out.setter
def blow_out(self, new_val: float):
self._instr._hw_manager.hardware.set_flow_rate(
mount=self._instr._mount,
blow_out=_assert_gzero(
new_val, 'flow rate should be a numerical value in ul/s'))
def _find_value_for_api_version(for_version: APIVersion,
values: Dict[str, float]) -> float:
"""
Parse a dict that looks like
{"2.0": 5,
"2.5": 4}
(aka the flow rate values from pipette config) and return the value for
the highest api level that is at or underneath ``for_version``
"""
sorted_versions = sorted({APIVersion.from_string(k): v
for k, v in values.items()})
last = values[str(sorted_versions[0])]
for version in sorted_versions:
if version > for_version:
break
last = values[str(version)]
return last
class PlungerSpeeds:
""" Utility class for rich setters/getters for speeds """
def __init__(self,
instr: 'InstrumentContext') -> None:
self._instr = instr
@property
def aspirate(self) -> float:
return self._instr.hw_pipette['aspirate_speed']
@aspirate.setter
def aspirate(self, new_val: float):
self._instr._hw_manager.hardware.set_pipette_speed(
mount=self._instr._mount,
aspirate=_assert_gzero(
new_val, 'speed should be a numerical value in mm/s'))
@property
def dispense(self) -> float:
return self._instr.hw_pipette['dispense_speed']
@dispense.setter
def dispense(self, new_val: float):
self._instr._hw_manager.hardware.set_pipette_speed(
mount=self._instr._mount,
dispense=_assert_gzero(
new_val, 'speed should be a nume |
maggotgdv/fofix | src/Theme.py | Python | gpl-2.0 | 126,381 | 0.012352 | #####################################################################
# -*- coding: iso-8859-1 -*- #
# #
# Frets on Fire X (FoFiX) #
# Copyright (C) 2006 Sami Kyöstilä #
# 2008 myfingershurt #
# 2008 Blazingamer #
# 2008 evilynux <evilynux@gmail.com> #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, #
# MA 02110-1301, USA. #
#####################################################################
import Log
import Version
import os
import sys
import imp
import Config
import Song
from OpenGL.GL import *
from OpenGL.GLU import *
import string
import math
from Language import _
from Shader import shaders
from Task import Task
from constants import *
#Theme Constants.
GUITARTYPES = [0, 1, 4]
DRUMTYPES = [2, 3]
MICTYPES = [5]
defaultDict = {}
classNames = {'setlist': lambda x: Setlist(x), 'themeLobby': lambda x: ThemeLobby(x), 'partDiff': lambda x: ThemeParts(x)}
def halign(value, default='center'):
try:
return {'left': LEFT,
'center': CENTER,
'right': RIGHT}[value.lower()]
except KeyError:
Log.warn('Invalid horizontal alignment value - defaulting to %s' % default)
return halign(default)
def valign(value, default='middle'):
try:
if value.lower() == 'center':
Log.notice('Use of "center" for vertical alignment is deprecated. Use "middle" instead.')
return {'top': TOP,
'middle': MIDDLE, # for consistency with HTML/CSS terminology
'center': MIDDLE, # for temporary backward compatibility
'bottom': BOTTOM}[value.lower()]
except KeyError:
Log.warn('Invalid vertical alignment value - defaulting to %s' % default)
return valign(default)
class Theme(Task):
def __getattr__(self, attr):
try: #getting to this function is kinda slow. Set it on the first get to keep renders from lagging.
object.__getattribute__(self, '__dict__')[attr] = defaultDict[attr]
Log.debug("No theme variable for %s - Loading default..." % attr)
return object.__getattribute__(self, attr)
except KeyError:
if attr in classNames.keys():
Log.warn("No theme class for %s - Loading default..." % attr)
object.__getattribute__(self, '__dict__')[attr] = classNames[attr](self)
return object.__getattribute__(self, attr)
elif attr.startswith('__') and attr.endswith('__'): #for object's attributes (eg: __hash__, __eq__)
return object.__getattribute__(self, attr)
Log.error("Attempted to load theme variable %s - no default found." % attr)
def __init__(self, path, name):
self.name = name
self.path = path
self.themePath = os.path.join(Version.dataPath(),"themes", name)
if not os.path.exists(self.themePath):
Log.warn("Theme: %s does not exist!\n" % self.themePath)
name = Config.get("coffee", "themename")
Log.notice("Theme: Attempting fallback to default theme \"%s\"." % name)
self.themePath = os.path.join(Version.dataPath(),"themes", name)
if not os.path.exists(self.themePath):
Log.error("Theme: %s does not exist!\nExiting.\n" % self.themePath)
sys.exit(1)
if os.path.exists(os.path.join(self.themePath, "theme.ini")):
self.config = Config.MyConfigParser()
self.config.read(os.path.join(self.themePath, "theme.ini"))
Log.debug("theme.ini loaded")
else:
self.config = None
Log.debug("no theme.ini")
def get(value, type = str, default = None):
if self.config:
| if self.config.has_option("theme", value):
if type == bool:
return isTrue(self.config.get("theme", value).lower())
elif type == "color":
| return self.hexToColor(self.config.get("theme", value))
else:
return type(self.config.get("theme", value))
if type == "color":
return self.hexToColor(default)
return default
#These colors are very important
#background_color defines what color openGL will clear too
# (the color that shows then no image is present)
#base_color is the default color of text in menus
#selected_color is the color of text when it is selected
# (like in the settings menu or when selecting a song)
self.backgroundColor = get("background_color", "color", "#000000")
self.baseColor = get("base_color", "color", "#FFFFFF")
self.selectedColor = get("selected_color", "color", "#FFBF00")
#notes that are not textured are drawn in 3 parts (Mesh, Mesh_001, Mesh_002, and occasionally Mesh_003)
#The color of mesh is set by mesh_color (on a GH note this is the black ring)
#The color of the Mesh_001 is the color of the note (green, red, yellow, etc)
#Mesh_002 is set by the hopo_color but if Mesh_003 is present it will be colored spot_color
#When Mesh_003 is present it will be colored hopo_color
self.meshColor = get("mesh_color", "color", "#000000")
self.hopoColor = get("hopo_color", "color", "#00AAAA")
self.spotColor = get("spot_color", "color", "#FFFFFF")
#keys when they are not textured are made of three parts (Mesh, Key_001, Key_002),
#two of which can be colored by the CustomTheme.py or the Theme.ini (Mesh, Mesh_002).
#These will only work if the object has a Glow_001 mesh in it, else it will render
#the whole object the color of the fret
#Key_001 is colored by key_color, Key_002 is colored by key2_color, pretty obvious, eh?
self.keyColor = get("key_color", "color", "#333333")
self.key2Color = get("key2_color", "color", "#000000")
#when a note is hit a glow will show aside from the hitflames, this has been around
#since the original Frets on Fire. What glow_color allows you to do is set it so
#the glow is either the color of the fret it's over or it can be the color the image
#actually is (if the image is white then no matter what key is hit the glow will be white)
self.hitGlowColor = get("hit_glow_color", str, "frets")
if not self.hitGlowColor == "frets":
self.hitGlowColor = self.hexToColor(self.hitGlowColor)
#Sets the color of the glow.png
self.glowColor = get("glow_color", str, "frets")
if not self.glowColor == "frets":
self.glowColor = self.hexToColor(self.glowColor)
#Acts similar to the glowColor but its does so for flames instead
self.flamesColor = get("flames_color", |
JanHelbling/mitmprotector | bin/mitmprotector.py | Python | gpl-3.0 | 27,675 | 0.00271 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# mitmprotector.py - protect's you from any kind of MITM-attacks.
#
# Copyright (C) 2020 by Jan Helbling <jh@jan-helbling.ch>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Gene | ral Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the G | NU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
try:
from os import popen, getuid, path, fork, execvp, wait, unlink, chmod, getpid, kill
from sys import exit
from time import sleep
from logging import info, warning, critical, basicConfig, DEBUG
from re import findall, compile
from struct import pack
from socket import inet_ntoa
from uuid import getnode
from signal import signal, SIGTERM
from optparse import OptionParser
import configparser
except ImportError as e:
print("ImportError: {0}".format(e.msg))
exit(1)
try:
import daemon, daemon.pidfile
except ImportError:
print("You must install python3-daemon to run this programm!")
print("Ubuntu: sudo apt-get install python3-daemon")
print("ArchLinux: sudo pacman -S python-daemon")
print("Fedora: sudo dnf install python3-daemon")
exit(1)
try:
import lockfile
except ImportError:
print("You must install python3-lockfile to run this programm!")
print("Ubuntu: sudo apt-get install python3-lockfile")
print("ArchLinux: sudo pacman -S python-lockfile")
print("Fedora: sudo dnf install python3-lockfile")
exit(1)
ip_regex = compile(r"\d+\.\d+\.\d+\.\d+")
mac_regex = compile(
r"[A-Za-z0-9]+:[A-Za-z0-9]+:[A-Za-z0-9]+:[A-Za-z0-9]+:[A-Za-z0-9]+:[A-Za-z0-9]+"
)
config_path = "/etc/mitmprotector.conf"
log_path = "/var/log/mitmprotector.log"
pid_file = "/var/run/mitmprotector.pid"
prog_name = "mitmprotector.py"
version = "29"
pf = daemon.pidfile.PIDLockFile(pid_file)
arptables_used = False
class mitmprotector(object):
"""mitmprotector.py - protect's you from any kind of MITM-attacks."""
global pf, arptables_used
def __init__(self):
basicConfig(
filename=log_path,
filemode="a",
level=DEBUG,
format="%(asctime)s - %(levelname)s - %(message)s",
datefmt="%d.%m.%Y - %H:%M:%S",
)
info("=> mitmprotector started!")
print("=> mitmprotector started!")
self.devices = []
self.counter = 0
signal(SIGTERM, self.__sigterm_handler__)
try:
self.__run__()
except KeyboardInterrupt:
if pf.is_locked():
pf.release()
self.__remove_firewall__()
info("=> mitmprotector ended!")
print("=> mitmprotector ended!")
def __sigterm_handler__(self, a, b):
self.__remove_firewall__()
if pf.is_locked():
pf.release()
exit(0)
def __get_hw_addr__(self):
return ":".join(findall("..", "%012x" % getnode()))
def __read_config__(self):
print("=> Loading configuration oddments =)")
config = configparser.RawConfigParser()
if not path.exists(config_path):
info("Creating new configfile: {}.".format(config_path))
print("Creating new configfile: {}.".format(config_path))
config.add_section("attack")
config.set(
"attack",
"exec",
'/usr/bin/notify-send "MITM-Attack" "from IP: {0} MAC: {1}" -u critical -t 3000 -c "Security"',
)
config.set("attack", "interface", "wlan0")
config.set("attack", "put-interface-down", "1")
config.set("attack", "shutdown-interface-command", "ifconfig {0} down")
config.add_section("arp-scanner")
config.set("arp-scanner", "timeout", "5")
config.set("arp-scanner", "command", "arp -an")
with open(config_path, "w") as configfile:
config.write(configfile)
configfile.close()
print("==> First execution <==")
print("Created configurationfile {}!".format(config_path))
print("You need to edit it before run {}!".format(prog_name))
info("==> First execution <==")
info("Created configurationfile {}!".format(config_path))
info("You need to edit it before run {}!".format(prog_name))
if pf.is_locked():
pf.release()
exit(0)
info("Reading configfile {}.".format(config_path))
if config.read(config_path) != [config_path]:
critical("Could not read config {}!".format(config_path))
critical("Shutting down mitmprotector.")
print("Could not read config {}!".format(config_path))
print("Shutting down mitmprotector.")
if pf.is_locked():
pf.release()
exit(1)
try:
self.exec_cmd = config.get("attack", "exec")
if not "{0}" in self.exec_cmd or not "{1}" in self.exec_cmd:
critical("IP: {0} and MAC: {1} not in [attack]->exec")
critical("Shutting down mitmprotector.")
print("IP: {0} and MAC: {1} not in [attack]->exec")
print("Shutting down mitmprotector.")
if pf.is_locked():
pf.release()
exit(1)
self.interface = config.get("attack", "interface")
self.putinterfacedown = bool(config.get("attack", "put-interface-down"))
self.shutdown_iface_cmd = config.get("attack", "shutdown-interface-command")
if not "{0}" in self.shutdown_iface_cmd:
critical("Interface {0} not in [attack]->shutdown-interface-command5")
critical("Shutting down mitmprotector.")
print("Interface {0} not in [attack]->shutdown-interface-command")
print("Shutting down mitmprotector.")
if pf.is_locked():
pf.release()
exit(1)
self.scan_timeout = float(config.get("arp-scanner", "timeout"))
self.arp_command = config.get("arp-scanner", "command")
except configparser.NoSectionError as e:
critical("Could not read config {}: {}.".format(config_path, e))
critical("Shutting down mitmprotector.")
print("Could not read config {}: {}.".format(config_path, e))
print("Shutting down mitmprotector.")
if pf.is_locked():
pf.release()
exit(1)
except configparser.NoOptionError as e:
critical("Could not read config {}: {}.".format(config_path, e.message))
critical("Shutting down mitmprotector.")
print("Could not read config {}: {}.".format(config_path, e.message))
print("Shutting down mitmprotector.")
if pf.is_locked():
pf.release()
exit(1)
except ValueError as e:
critical(
"Could not read floatvalue [arp-scanner]->timeout: {}".format(e.message)
)
critical("Shutting down mitmprotector.")
print(
"Could not read floatvalue [arp-scanner]->timeout: {}".format(e.message)
)
print("Shutting down mitmprotector.")
if pf.is_locked():
pf.release()
exit(1)
def __arptable_firewall__(self):
self.routerip = self.__getrouterip__()
if popen("arptables --help 2>/dev/null").read() == "":
print('Command "arptables" not found!!! Could not create a firewall!!!')
critical('Command "arptables" not found!!! Co |
antoinedube/numeric-cookiecutter | docs/conf.py | Python | gpl-3.0 | 9,486 | 0.005799 | #!/usr/bin/python
# coding: utf8
#
# cookiecutter-py documentation build configuration file
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
from setup import get_distribution_info
project_metadata = get_distribution_info()
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.5'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
| 'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
"sphinx_autodoc_typehints",
]
# Add any paths t | hat contain templates here, relative to this directory.
templates_path = ['templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = project_metadata["name"]
author = project_metadata["author"]
copyright = '2016, {!s}'.format(author)
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = project_metadata["version"]
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'cookiecutter-pydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'cookiecutter-py.tex', 'cookiecutter-py Documentation',
'Charles Bouchard-Légaré', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
|
ACS-Community/ACS | LGPL/CommonSoftware/acspycommon/test/acspyTestEpochHelper.py | Python | lgpl-2.1 | 9,573 | 0.042098 | #!/usr/bin/env python
# @(#) $Id: acspyTestEpochHelper.py,v 1.1.1.1 2012/03/07 17:40:45 acaproni Exp $
#
# Copyright (C) 2001
# Associated Universities, Inc. Washington DC, USA.
#
# Produced for the ALMA project
#
# This library is free software; you can redistribute it and/or modify it
# under
# the terms of the GNU Library General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Library General Public License for
# more
# details.
#
# You should have received a copy of the GNU Library General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc.,
# 675 Massachusetts Ave, Cambridge, MA 02139, USA. Correspondence concerning
# ALMA should be addressed as follows:
#
# Internet email: alma-sw-admin@nrao.edu
'''
'''
#--REGULAR IMPORTS-------------------------------------------------------------
import acstime
from Acspy.Common.TimeHelper import getTimeStamp
from Acspy.Common.EpochHelper import EpochHelper
from Acspy.Common.DurationHelper import DurationHelper
#------------------------------------------------------------------------------
print "DWF...need a real test here!!!"
# format string using all possible
allOut = "%G %g %x\n%Y %y %m %h %j %d %e %w %a %H:%M:%S.%q %1q %2q %3q %4q %5q %6q"
e1 = EpochHelper()
e2 = EpochHelper()
d1 = DurationHelper()
# create an Epoch structure
eout = getTimeStamp()
e1.value(eout)
pStr = e1.toString(acstime.TSArray,
| "%x",
0L,
0L)
#print "Current time is " , pStr
# test Epoch range & toUTUdate(), toJulianYear()
eout.value = 0xFFFFFFFFFFFFFFFA
e1.value(eout)
pStr = e1.toString(acstime.TSArray,"", 0L, 0L)
print pStr
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
utc = e1.toUTCdate(0L, 0L)
julian = e1.toJulianYear(0L, 0L)
#mjdSec | onds = e1.toMJDseconds()
print utc, julian #, mjdSeconds
eout.value = 0L
e1.value(eout)
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
utc = e1.toUTCdate(0L, 0L)
julian = e1.toJulianYear(0L, 0L)
#mjdSeconds = e1.toMJDseconds()
print utc, julian #, mjdSeconds
e1.fromString(acstime.TSArray,
"60038-3-11T5:36:10.955161")
eout = e1.value()
print eout.value
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
e1.fromString(acstime.TSArray,
"1582-10-15T00:00:00.000000")
eout = e1.value()
print eout.value
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
e1.fromString(acstime.TSArray,
"1995-4-28T17:23:15.654321")
eout = e1.value()
print eout.value
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
utc = e1.toUTCdate(0L, 0L)
julian = e1.toJulianYear(0L, 0L)
#mjdSeconds = e1.toMJDseconds()
print utc, julian #, mjdSeconds
# test Epoch usec implicit trailing zeroes
e1.fromString(acstime.TSArray,
"1776-7-4T17:23:15.5")
pStr = e1.toString(acstime.TSArray,"%x",0L, 0L)
print pStr
e1.fromString(acstime.TSArray,
"2345-6-7T08:09:00.103")
pStr = e1.toString(acstime.TSArray,"%x",0L, 0L)
print pStr
e1.fromString(acstime.TSArray,
"2001-9-11T06:54:32.0506")
pStr = e1.toString(acstime.TSArray,"%x",0L, 0L)
print pStr
# test Epoch.add()
e1.fromString(acstime.TSArray,
"1943-04-05 05:36:10.955161")
eout = e1.value()
e2.value(eout)
d1.fromString("+1 02:03:04.567890")
e1.add(d1.value())
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
# test Epoch.difference()
d2_val = e1.difference(e2.value())
d2 = DurationHelper(d2_val)
pStr = d2.toString("")
print pStr
# test Epoch.subtract()
e1.subtract(d1.value())
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
# test Epoch.compare() using Epoch.now()
eout = getTimeStamp()
e1.value(eout)
import time
time.sleep(8e-6)
eout = getTimeStamp()
e2.value(eout)
tcom = e1.compare(e2.value())
print tcom
tcom = e2.compare(e1.value())
print tcom
tcom = e1.compare(e1.value())
print tcom
# test Epoch setting by parts
e1.reset()
e1.year(1995)
e1.month(4)
e1.day(28)
e1.hour(17)
e1.minute(23)
e1.second(15)
e1.microSecond(654321)
eout = e1.value()
print eout.value
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
e1.reset()
e1.year(1995)
e1.dayOfYear(118)
eout = e1.value()
print eout.value
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
# test Epoch getting by parts
print e1.year(), e1.month(), e1.day(), e1.dayOfYear(), e1.dayOfWeek(), e1.hour(), e1.minute(), e1.second(), e1.microSecond()
# test Epoch.normalize() switch
e1.normalize(1)
e1.fromString(acstime.TSArray,
"1900-13-32T25:67:71.955161")
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
# test get/set microSecond value w/ normalize true
lngtmp = e1.microSecond()
e1.microSecond(lngtmp - 1111111)
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
e1.microSecond(lngtmp + 1111111)
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
# test get/set second value w/ normalize true
lngtmp = e1.second()
e1.second(lngtmp - 61)
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
e1.second(lngtmp + 61)
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
# test get/set minute value w/ normalize true
lngtmp = e1.minute()
e1.minute(lngtmp - 61)
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
e1.minute(lngtmp + 61)
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
# test get/set hour value w/ normalize true
lngtmp = e1.hour()
e1.hour(lngtmp - 25)
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
e1.hour(lngtmp + 25)
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
# test get/set day value w/ normalize true (non-leap year)
e1.fromString(acstime.TSArray,
"1901-02-26T21:18:37.955161")
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
lngtmp = e1.day()
e1.day(lngtmp - 12)
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
e1.day(lngtmp + 12)
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
e1.fromString(acstime.TSArray, "1901-03-02T12:16:43.955161")
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
lngtmp = e1.day()
e1.day(lngtmp - 12)
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
e1.day(lngtmp + 12)
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
# test get/set day value w/ normalize true (leap year)
e1.fromString(acstime.TSArray,"1904-02-26T08:53:12.955161")
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
lngtmp = e1.day()
e1.day(lngtmp - 12)
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
e1.day(lngtmp + 12)
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
e1.fromString(acstime.TSArray, "1904-03-02T18:37:21.955161")
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
lngtmp = e1.day()
e1.day(lngtmp - 12)
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
e1.day(lngtmp + 12)
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
# test get/set day-of-year value w/ normalize true (non-leap year)
e1.fromString(acstime.TSArray, "1901-02-26T21:18:37.955161")
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
lngtmp = e1.dayOfYear()
e1.dayOfYear(lngtmp - 58)
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
e1.dayOfYear(lngtmp + 12)
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
e1.fromString(acstime.TSArray,"1901-03-02T12:16:43.955161")
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
lngtmp = e1.dayOfYear()
e1.dayOfYear(lngtmp - 12)
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
e1.dayOfYear(lngtmp + 12)
pStr = e1.toString(acstime.TSArray,allOut,0L, 0L)
print pStr
# tes |
JensTimmerman/radical.pilot | examples/running_mpi_executables.py | Python | mit | 5,342 | 0.009173 | #!/usr/bin/env python
__copyright__ = "Copyright 2013-2014, http://radical.rutgers.edu"
__license__ = "MIT"
import sys
import radical.pilot as rp
# READ: The RADICAL-Pilot documentation:
# http://radicalpilot.readthedocs.org/en/latest
#
# Try running this example with RADICAL_PILOT_VERBOSE=debug set if
# you want to see what happens behind the scenes!
#------------------------------------------------------------------------------
#
def pilot_state_cb (pilot, state):
""" this callback is invoked on all pilot state changes """
print "[Callback]: ComputePilot '%s' state: %s." % (pilot.uid, state)
if state == rp.FAILED:
sys.exit (1)
#------------------------------------------------------------------------------
#
def unit_state_cb (unit, state):
""" this callback is invoked on all unit state changes """
print "[Callback]: ComputeUnit '%s' state: %s." % (unit.uid, state)
if state == rp.FAILED:
sys.exit (1)
# ------------------------------------------------------------------------------
#
if __name__ == "__main__":
# we can optionally pass session name to RP
if len(sys.argv) > 1:
session_name = sys.argv[1]
else:
session_name = None
# Create a new session. No need to try/except this: if session creation
# fails, there is not much we can do anyways...
session = rp.Session(name=session_name)
print "session id: %s" % session.uid
# all other pilot code is now tried/excepted. If an exception is caught, we
# can rely on the session object to exist and be valid, and we can thus tear
# the whole RP stack down via a 'session.close()' call in the 'finally'
# clause...
try:
# Add a Pilot Manager. Pilot managers manage one or more ComputePilots.
pmgr = rp.PilotManager(session=session)
# Register our callback with the PilotManager. This callback will get
# called every time any of the pilots managed by the PilotManager
# change their state.
pmgr.register_callback(pilot_state_cb)
# Define a X-core on stamped that runs for N minutes and
# uses $HOME/radical.pilot.sandbox as sandbox directoy.
pdesc = rp.ComputePilotDescription()
pdesc.resource = "xsede.stampede"
pdesc.runtime = 15 # N minutes
pdesc.cores = 16 # X cores
pdesc.project = "TG-MCB090174"
# Launch the pilot.
pilot = pmgr.submit_pilots(pdesc)
cud_list = []
for unit_count in range(0, 4):
cu = rp.ComputeUnitDescription()
cu.pre_exec = ["module load python intel mvapich2 mpi4py"]
cu.executable = "python"
cu.arguments = ["helloworld_mpi.py"]
cu.input_staging = ["helloworld_mpi.py"]
# These two parameters are relevant to MPI execution:
# 'cores' sets the number of cores required by the task
# 'mpi' identifies the task as an MPI taskg
cu.cores = 8
cu.mpi = True
cud_list.append(cu)
# Combine the ComputePilot, the ComputeUnits and a scheduler via
# a UnitManager object.
umgr = rp.UnitManager(
session=session,
scheduler=rp.SCHED_DIRECT_SUBMISSION)
# Register our callback with the UnitManager. This callback will get
# called every time any of the units managed by the UnitManager
# change their state.
umgr.register_callback(unit_state_cb)
# Add the previously created ComputePilot to the UnitManager.
umgr.add_pilots(pilot)
# Submit the previously created ComputeUnit descriptions to the
# PilotManager. This will trigger the selected scheduler to start
# assigning ComputeUnits to the ComputePilots.
units = umgr.submit_units(cud_list)
# Wait for all compute units to reach a terminal state (DONE or FAILED).
umgr.wait_units()
if not isinstance(units, list):
units = [units]
for unit in units:
print "* Task %s - state: %s, exit code: %s, started: %s, finished: %s, stdout: %s" \
% (unit.uid, unit.state, unit.exit_code, unit.start_time, unit.stop_time, unit.stdout)
except Exception as e:
# Something unexpected happened in the pilot code above
print "caught Exception: %s" % e
raise
except (KeyboardInterrupt, SystemExit) as e:
# the callback called | sys.exit(), and we can here catch the
# corresponding KeyboardInterrupt exception for shutdown. We also catch
# SystemExit (which get | s raised if the main threads exits for some other
# reason).
print "need to exit now: %s" % e
finally:
# always clean up the session, no matter if we caught an exception or
# not.
print "closing session"
session.close ()
# the above is equivalent to
#
# session.close (cleanup=True, terminate=True)
#
# it will thus both clean out the session's database record, and kill
# all remaining pilots (none in our example).
#-------------------------------------------------------------------------------
|
vmthunder/nova | nova/tests/api/openstack/compute/test_servers.py | Python | apache-2.0 | 188,039 | 0.000356 | # Copyright 2010-2011 OpenStack Foundation
# Copyright 2011 Piston Cloud Computing, Inc.
# All Rights Reserved.
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import contextlib
import datetime
import urllib
import uuid
import iso8601
from lxml import etree
import mock
from oslo.config import cfg
from oslo.utils import timeutils
import six.moves.urllib.parse as urlparse
import testtools
import webob
from nova.api.openstack import compute
from nova.api.openstack.compute import ips
from nova.api.openstack.compute import servers
from nova.api.openstack.compute import views
from nova.api.openstack import extensions
from nova.api.openstack import xmlutil
from nova.compute import api as compute_api
from nova.compute import flavors
from nova.compute import task_states
from nova.compute import vm_states
from nova import context
from nova import db
from nova.db.sqlalchemy import mod | els
from nova import exception
from nova.i18n import _
from nova.image import glance
from nova.ne | twork import manager
from nova.network.neutronv2 import api as neutron_api
from nova import objects
from nova.objects import instance as instance_obj
from nova.openstack.common import jsonutils
from nova.openstack.common import policy as common_policy
from nova import policy
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import fake_instance
from nova.tests import fake_network
from nova.tests.image import fake
from nova.tests import matchers
from nova.tests.objects import test_keypair
from nova.tests import utils
from nova import utils as nova_utils
CONF = cfg.CONF
CONF.import_opt('password_length', 'nova.utils')
FAKE_UUID = fakes.FAKE_UUID
NS = "{http://docs.openstack.org/compute/api/v1.1}"
ATOMNS = "{http://www.w3.org/2005/Atom}"
XPATH_NS = {
'atom': 'http://www.w3.org/2005/Atom',
'ns': 'http://docs.openstack.org/compute/api/v1.1'
}
INSTANCE_IDS = {FAKE_UUID: 1}
FIELDS = instance_obj.INSTANCE_DEFAULT_FIELDS
def fake_gen_uuid():
return FAKE_UUID
def return_servers_empty(context, *args, **kwargs):
return []
def return_security_group(context, instance_id, security_group_id):
pass
def instance_update_and_get_original(context, instance_uuid, values,
update_cells=True,
columns_to_join=None,
):
inst = fakes.stub_instance(INSTANCE_IDS.get(instance_uuid),
name=values.get('display_name'))
inst = dict(inst, **values)
return (inst, inst)
def instance_update(context, instance_uuid, values, update_cells=True):
inst = fakes.stub_instance(INSTANCE_IDS.get(instance_uuid),
name=values.get('display_name'))
inst = dict(inst, **values)
return inst
def fake_compute_api(cls, req, id):
return True
class MockSetAdminPassword(object):
def __init__(self):
self.instance_id = None
self.password = None
def __call__(self, context, instance_id, password):
self.instance_id = instance_id
self.password = password
class Base64ValidationTest(test.TestCase):
def setUp(self):
super(Base64ValidationTest, self).setUp()
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self.controller = servers.Controller(self.ext_mgr)
def test_decode_base64(self):
value = "A random string"
result = self.controller._decode_base64(base64.b64encode(value))
self.assertEqual(result, value)
def test_decode_base64_binary(self):
value = "\x00\x12\x75\x99"
result = self.controller._decode_base64(base64.b64encode(value))
self.assertEqual(result, value)
def test_decode_base64_whitespace(self):
value = "A random string"
encoded = base64.b64encode(value)
white = "\n \n%s\t%s\n" % (encoded[:2], encoded[2:])
result = self.controller._decode_base64(white)
self.assertEqual(result, value)
def test_decode_base64_invalid(self):
invalid = "A random string"
result = self.controller._decode_base64(invalid)
self.assertIsNone(result)
def test_decode_base64_illegal_bytes(self):
value = "A random string"
encoded = base64.b64encode(value)
white = ">\x01%s*%s()" % (encoded[:2], encoded[2:])
result = self.controller._decode_base64(white)
self.assertIsNone(result)
class NeutronV2Subclass(neutron_api.API):
"""Used to ensure that API handles subclasses properly."""
pass
class ControllerTest(test.TestCase):
def setUp(self):
super(ControllerTest, self).setUp()
self.flags(verbose=True, use_ipv6=False)
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fake.stub_out_image_service(self.stubs)
return_server = fakes.fake_instance_get()
return_servers = fakes.fake_instance_get_all_by_filters()
self.stubs.Set(db, 'instance_get_all_by_filters',
return_servers)
self.stubs.Set(db, 'instance_get_by_uuid',
return_server)
self.stubs.Set(db, 'instance_add_security_group',
return_security_group)
self.stubs.Set(db, 'instance_update_and_get_original',
instance_update_and_get_original)
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self.controller = servers.Controller(self.ext_mgr)
self.ips_controller = ips.Controller()
policy.reset()
policy.init()
fake_network.stub_out_nw_api_get_instance_nw_info(self.stubs)
class ServersControllerTest(ControllerTest):
def test_can_check_loaded_extensions(self):
self.ext_mgr.extensions = {'os-fake': None}
self.assertTrue(self.controller.ext_mgr.is_loaded('os-fake'))
self.assertFalse(self.controller.ext_mgr.is_loaded('os-not-loaded'))
def test_requested_networks_prefix(self):
uuid = 'br-00000000-0000-0000-0000-000000000000'
requested_networks = [{'uuid': uuid}]
res = self.controller._get_requested_networks(requested_networks)
self.assertIn((uuid, None), res.as_tuples())
def test_requested_networks_neutronv2_enabled_with_port(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_requested_networks_neutronv2_enabled_with_network(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
requested_networks = [{'uuid': network}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(network, None, None, None)], res.as_tuples())
def test_requested_networks_neutronv2_enabled_with_network_and_port(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_requested_networks_neutronv2_disabled_with_port(sel |
glesica/django-site-news | site_news/constants.py | Python | bsd-3-clause | 122 | 0 | """
Django-site | -news con | stants. These are for convenience.
"""
# Category defaults
DOWNTIME = 30
CHANGES = 20
INFO = 10
|
zhuango/python | pythonLearning/oo/FrenchDeck.py | Python | gpl-2.0 | 487 | 0.004107 | #!/usr/bin/python3
Card = c | ollections.namedtuple('Card', ['rank', 'suit'])
class FrenchDeck:
ranks = [str(n) for n in range(2, 11)] + list('JQKA')
suits = 'spades diamonds clubs hearts'.split()
def __init__(self):
self._cards = [Card(rank, suit) for suit in self.suits
for rank in self.ranks]
def __len__(self):
return len(self | ._cards)
def __getitem__(self, position):
return self._cards[position]
|
IndonesiaX/edx-platform | lms/djangoapps/courseware/tests/test_microsites.py | Python | agpl-3.0 | 10,562 | 0.003124 | """
Tests related to the Microsites feature
"""
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
from nose.plugins.attrib import attr
from courseware.tests.helpers import LoginEnrollmentTestCase
from course_modes.models import CourseMode
from xmodule.course_module import (
CATALOG_VISIBILITY_CATALOG_AND_ABOUT, CATALOG_VISIBILITY_NONE)
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
@attr('shard_1')
class TestMicrosites(ModuleStoreTestCase, LoginEnrollmentTestCase):
"""
This is testing of the Microsite feature
"""
STUDENT_INFO = [('view@test.com', 'foo'), ('view2@test.com', 'foo')]
def setUp(self):
super(TestMicrosites, self).setUp()
# use a different hostname to test Microsites since they are
# triggered on subdomain mappings
#
# NOTE: The Microsite Configuration is in lms/envs/test.py. The content for the Test Microsite is in
# test_microsites/test_microsite.
#
# IMPORTANT: For these tests to work, this domain must be defined via
# DNS configuration (either local or published)
self.course = CourseFactory.create(
display_name='Robot_Super_Course',
org='TestMicrositeX',
emit_signals=True,
)
self.chapter0 = ItemFactory.create(parent_location=self.course.location,
display_name='Overview')
self.chapter9 = ItemFactory.create(parent_location=self.course.location,
display_name='factory_chapter')
self.section0 = ItemFactory.create(parent_location=self.chapter0.location,
display_name='Welcome')
self.section9 = ItemFactory.create(parent_location=self.chapter9.location,
display_name='factory_section')
self.course_outside_microsite = CourseFactory.create(
display_name='Robot_Course_Outside_Microsite',
org='FooX',
emit_signals=True,
)
# have a course which explicitly sets visibility in catalog to False
self.course_hidden_visibility = CourseFactory.create(
display_name='Hidden_course',
org='TestMicrositeX',
catalog_visibility=CATALOG_VISIBILITY_NONE,
emit_signals=True,
)
# have a course which explicitly sets visibility in catalog and about to true
self.course_with_visibility = CourseFactory.create(
display_name='visible_course',
org='TestMicrositeX',
course="foo",
catalog_visibility=CATALOG_VISIBILITY_CATALOG_AND_ABOUT,
emit_signals=True,
)
def setup_users(self):
# Create student accounts and activate them.
for i in range(len(self.STUDENT_INFO)):
email, password = self.STUDENT_INFO[i]
username = 'u{0}'.format(i)
self.create_account(username, email, password)
self.activate_user(email)
@override_settings(SITE_NAME=settings.MICROSITE_TEST_HOSTNAME)
def test_microsite_anonymous_homepage_content(self):
"""
Verify that the homepage, when accessed via a Microsite domain, returns
HTML that reflects the Microsite branding elements
"""
resp = self.client.get('/', HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEqual(resp.status_code, 200)
# assert various branding definitions on this Microsite
# as per the configuration and Microsite overrides
self.assertContains(resp, 'This is a Test Microsite Overlay') # Overlay test message
self.assertContains(resp, 'test_microsite/images/header-logo.png') # logo swap
self.assertContains(resp, 'test_microsite/css/test_microsite') # css override
self.assertContains(resp, 'Test Microsite') # page title
# assert that test course display name is visible
self.assertContains(resp, 'Robot_Super_Course')
# assert that test course with 'visible_in_catalog' to True is showing up
self.assertContains(resp, 'visible_course')
# assert that test course that is outside microsite is not visible
self.assertNotContains(resp, 'Robot_Course_Outside_Microsite')
# assert that a course that has visible_in_catalog=False is not visible
self.assertNotContains(resp, 'Hidden_course')
# assert that footer template has been properly overriden on homepage
self.assertContains(resp, 'This is a Test Microsite footer')
# assert that the edX partners section is not in the HTML
self.assertNotContains(resp, '<section class="university-partners university-partners2x6">')
# assert that the edX partners tag line is not in the HTML
self.assertNotContains(resp, 'Explore free courses from')
def test_not_microsite_anonymous_homepage_content(self):
"""
Make sure we see the right content on the homepage if we are not in a microsite
"""
resp = self.client.get('/')
self.assertEqual(resp.status_code, 200)
# assert various branding definitions on this Microsite ARE NOT VISIBLE
self.assertNotContains(resp, 'This is a Test Microsite Overlay') # Overlay test message
self.assertNotContains(resp, 'test_microsite/images/header-logo.png') # logo swap
self.assertNotContains(resp, 'test_microsite/css/test_microsite') # css override
self.assertNotContains(resp, '<title>Test Microsite</title>') # page title
# assert that test course display name IS NOT VISIBLE, since that is a Microsite only course
self.assertNotContains(resp, 'Robot_Super_Course')
# assert that test course that is outside microsite IS VISIBLE
self.assertContains(resp, 'Robot_Course_Outside_Microsite')
# assert that footer template has been properly overriden on homepage
self.assertNotContains(resp, 'This is a Test Microsite footer')
def test_no_redirect_on_homepage_when_no_enrollments(self):
"""
Verify that a user going to homepage will not redirect if he/she has no course enrollments
"""
self.setup_users()
email, password = self.STUDENT_INFO[0]
self.login(email, password)
resp = self.client.get(reverse('root'), HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEquals(resp.status_code, 200)
def test_no_redirect_on_homepage_when_has_enrollments(self):
"""
Verify that a user going to homepage will not redirect to dashboard if he/she has
a course enrollment
"""
self.setup_users()
email, password = self.STUDENT_INFO[0]
self.login(email, password)
self.enroll(self.course, True)
resp = self.client.get(reverse('root'), HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEquals(resp.status_code, 200)
def test_micro | site_course_enrollment(self):
"""
Enroll user in a course scoped in a Microsite and one course outside of a Microsite
and make sure that they are only visible in the right Dashboards
"""
self.setup_users()
email, password = self.STUDENT_INFO[1]
self.login(email, password)
self.enroll(self.course, True)
self.enroll(self.course_outside_microsite, True)
# Access the microsite dashboard and make sure the right course | s appear
resp = self.client.get(reverse('dashboard'), HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertContains(resp, 'Robot_Super_Course')
self.assertNotContains(resp, 'Robot_Course_Outside_Microsite')
# Now access the non-microsite dashboard and make sure the right courses appear
resp = self.client.get(reverse('dashboard'))
self.assertNotContains(resp, 'Robot_Super_Course')
self.assertContains(resp, 'Robot_Cou |
onecloud/ovs-igmp-v3 | tests/appctl.py | Python | apache-2.0 | 2,351 | 0 | # Copyright (c) 2012 Nicira, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and |
# limitations u | nder the License.
import argparse
import signal
import sys
import ovs.daemon
import ovs.unixctl
import ovs.unixctl.client
import ovs.util
import ovs.vlog
def connect_to_target(target):
error, str_result = ovs.unixctl.socket_name_from_target(target)
if error:
ovs.util.ovs_fatal(error, str_result)
else:
socket_name = str_result
error, client = ovs.unixctl.client.UnixctlClient.create(socket_name)
if error:
ovs.util.ovs_fatal(error, "cannot connect to \"%s\"" % socket_name)
return client
def main():
parser = argparse.ArgumentParser(description="Python Implementation of"
" ovs-appctl.")
parser.add_argument("-t", "--target", default="ovs-vswitchd",
help="pidfile or socket to contact")
parser.add_argument("command", metavar="COMMAND",
help="Command to run.")
parser.add_argument("argv", metavar="ARG", nargs="*",
help="Arguments to the command.")
parser.add_argument("-T", "--timeout", metavar="SECS",
help="wait at most SECS seconds for a response")
args = parser.parse_args()
if args.timeout:
signal.alarm(int(args.timeout))
ovs.vlog.Vlog.init()
target = args.target
client = connect_to_target(target)
err_no, error, result = client.transact(args.command, args.argv)
client.close()
if err_no:
ovs.util.ovs_fatal(err_no, "%s: transaction error" % target)
elif error is not None:
sys.stderr.write(error)
ovs.util.ovs_error(0, "%s: server returned an error" % target)
sys.exit(2)
else:
assert result is not None
sys.stdout.write(result)
if __name__ == '__main__':
main()
|
zulu7/pylib | crypto/stream_crypto.py | Python | bsd-3-clause | 1,198 | 0.01586 | #!/usr/bin/env python
from Crypto import Random
from M2Crypto import EVP
from io_helper import stream
from padding import pad_pkcs5, unpad_pkcs5
from chunk_buffer import ChunkBuffer
ALGORITHM = 'aes_256_cbc'
# AES has a fixed block size of 16 bytes regardless of key size
BLOCK_SIZE = 16
ENC=1
DEC=0
def encrypt(in_file, out_file, | key, iv, pad=True, chunk_size=stream.DEFAULT_CHUNK_SIZE, alg=ALGORITHM):
cipher = EVP.Cipher(alg=alg, key=key, iv=iv, op=ENC)
size = 0
for chunk in stream.chunk_iter(in_file):
out_file.write(cipher.update(chunk))
size += len(chunk)
if pad:
padding = pad_pkcs5(size, BLOCK_SIZE)
out_file.write(cipher.update(padding))
out_file.write(cipher.final())
def decrypt(in_file, out_file, key, iv, unpad=True, chunk_size | =stream.DEFAULT_CHUNK_SIZE):
cipher = EVP.Cipher(alg=ALGORITHM, key=key, iv=iv, op=DEC)
buf = ChunkBuffer(
min_size=BLOCK_SIZE,
evict_fn=lambda chunk: out_file.write(chunk)
)
for chunk in stream.chunk_iter(in_file):
buf.append(cipher.update(chunk))
buf.append(cipher.final())
remainder = buf.getvalue()
if unpad:
out_file.write(unpad_pkcs5(remainder))
else:
out_file.write(remainder)
|
Joergen/zamboni | mkt/purchase/tests/test_utils_.py | Python | bsd-3-clause | 983 | 0 | import amo
import amo.tests
import waffle
from users.models import UserProfile
from mkt.purchase.utils import payments_enabled
from mkt.site.fixtures import fixture
from test_utils import RequestFactory
class TestUtils(amo.tests.TestCase):
fixtures = fixture('user_2519')
def setUp(self):
self.req = RequestFactory().get('/')
def test_settings(self):
with self.settings(PAYMENT_LI | MITED=False):
assert payments_enabled(self.req)
def test_not_flag(self):
with self.settings(PAYMENT_LIMITED=True):
assert not payments_enabled(self.req)
def test_flag(self):
profile = UserProfi | le.objects.get(pk=2519)
flag = waffle.models.Flag.objects.create(name='override-app-payments')
flag.everyone = None
flag.users.add(profile.user)
flag.save()
self.req.user = profile.user
with self.settings(PAYMENT_LIMITED=True):
assert payments_enabled(self.req)
|
SuperMarioBean/microblog | app/views.py | Python | bsd-3-clause | 6,281 | 0.018468 | from flask import render_template, flash, redirect, session, url_for, request, g
from flask.ext.login import login_user, logout_user, current_user, login_required
from app import app, db, lm, oid
from forms import LoginForm, EditForm, PostForm, SearchForm
from models import User, ROLE_USER, ROLE_ADMIN, Post
from datetime import datetime
from emails import follower_notification
from config import POSTS_PER_PAGE, MAX_SEARCH_RESULTS
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
@app.before_request
def before_request():
g.user = current_user
if g.user.is_authenticated():
g.user.last_seen = datetime.utcnow()
db.session.add(g.user)
db.session.commit()
g.search_form = SearchForm()
@app.errorhandler(404)
def internal_error(error):
return render_template('404.html'), 404
@app.errorhandler(500)
def internal_error(error):
db.session.rollback()
return render_template('500.html'), 500
@app.route('/', methods = ['GET', 'POST'])
@app.route('/index', methods = ['GET', 'POST'])
@app.route('/index/<int:page>', methods = ['GET', 'POST'])
@login_required
def index(page = 1):
form = PostForm()
if form.validate_on_submit():
post = Post(body = form.post.data, timestamp = datetime.utcnow(), author = g.user)
db.session.add(post)
db.session.commit()
flash('Your post is now live!')
return redirect(url_for('index'))
posts = g.user.followed_posts().paginate(page, POSTS_PER_PAGE, False)
return render_template('index.html',
title = 'Home',
form = form,
posts = posts)
@app.route('/login', methods = ['GET', 'POST'])
#@oid.loginhandler
def login():
if g.user is not None and g.user.is_authenticated():
return redirect(url_for('index'))
form = LoginForm()
if form.validate_on_submit():
session['remember_me'] = form.remember_me.data
#return oid.try_login(form.openid.data, ask_for = ['nickname', 'email'])
resp = LoginResult()
return after_login(resp=resp);
return render_template('login.html',
title = 'Sign In',
form = form,
providers = app.config['OPENID_PROVIDERS'])
class LoginResult:
email = 'john@email.com'
nickname = 'john'
#@oid.after_login
def after_login(resp):
# if resp.email is None or resp.email == "":
# flash('Invalid login. Please try again.')
# return redirect(url_for('login'))
user = User.query.filter_by(email = resp.email).first()
if user is None:
nickname = resp.nickname
if nickname is None or nickname == "":
nickname = resp.email.split('@')[0]
nickname = User.make_unique_nickname(nickname)
user = User(nickname = nickname, email = resp.email, role = ROLE_USER)
db.session.add(user)
db.session.commit()
# make the user follow him/herself
db.session.add(user.follow(user))
db.session.commit()
remember_me = False
if 'remember_me' in session:
remember_me = session['remember_me']
session.pop('remember_me', None)
login_user(user, remember = remember_me)
return redirect(request.args.get('next') or url_for('index'))
@app.route('/logout')
def logout():
logout_user()
return redirect(url_for('index'))
@app.route('/user/<nickname>')
@app.route('/user/<nickname>/<int:page>')
@login_required
def user(nickname, page = 1):
user = User.query.filter_by(nickname = nickname).first()
if user == None:
flash('User ' + nickname + ' not found.')
return redirect(url_for('index'))
posts = user.posts.paginate(page, POSTS_PER_PAGE, False)
return render_template('user.html',
user = user,
posts = posts)
@app.route('/edit', methods = | ['GET', 'POST'])
@login_required
def edit():
form = EditForm(g.user.nickname)
if form.validate_on_submit():
g.user.nickname = form.nickname.data
g.user.about_me = form.about_me.data
db.session.add(g.user)
db.session.commit()
flash('Your changes have been saved.')
return redirect(url_for('edit'))
elif request.method != "POST":
| form.nickname.data = g.user.nickname
form.about_me.data = g.user.about_me
return render_template('edit.html',
form = form)
@app.route('/follow/<nickname>')
@login_required
def follow(nickname):
user = User.query.filter_by(nickname = nickname).first()
if user == None:
flash('User ' + nickname + ' not found.')
return redirect(url_for('index'))
if user == g.user:
flash('You can\'t follow yourself!')
return redirect(url_for('user', nickname = nickname))
u = g.user.follow(user)
if u is None:
flash('Cannot follow ' + nickname + '.')
return redirect(url_for('user', nickname = nickname))
db.session.add(u)
db.session.commit()
flash('You are now following ' + nickname + '!')
follower_notification(user, g.user)
return redirect(url_for('user', nickname = nickname))
@app.route('/unfollow/<nickname>')
@login_required
def unfollow(nickname):
user = User.query.filter_by(nickname = nickname).first()
if user == None:
flash('User ' + nickname + ' not found.')
return redirect(url_for('index'))
if user == g.user:
flash('You can\'t unfollow yourself!')
return redirect(url_for('user', nickname = nickname))
u = g.user.unfollow(user)
if u is None:
flash('Cannot unfollow ' + nickname + '.')
return redirect(url_for('user', nickname = nickname))
db.session.add(u)
db.session.commit()
flash('You have stopped following ' + nickname + '.')
return redirect(url_for('user', nickname = nickname))
@app.route('/search', methods = ['POST'])
@login_required
def search():
if not g.search_form.validate_on_submit():
return redirect(url_for('index'))
return redirect(url_for('search_results', query = g.search_form.search.data))
@app.route('/search_results/<query>')
@login_required
def search_results(query):
results = Post.query.whoosh_search(query, MAX_SEARCH_RESULTS).all()
return render_template('search_results.html',
query = query,
results = results)
|
shernshiou/CarND | Term1/04-CarND-Behavioral-Cloning/model.py | Python | mit | 6,595 | 0.006065 | from keras.models import Sequential, model_from_json
from keras.layers import Dense, Dropout, Activation, Flatten, Convolution2D, MaxPooling2D, Lambda, ELU
from keras.layers.normalization import BatchNormalization
from keras.optimizers import Adam
import cv2
import csv
import numpy as np
import os
from random import random
from sklearn.model_selection import train_test_split
DATA_PATH = './data/t1/'
def trans_image(image,steer,trans_range):
#
# Translate image
# Ref: https://chatbotslife.com/using-augmentation-to-mimic-human-driving-496b569760a9#.s1pwczi3q
#
rows, cols, _ = image.shape
tr_x = trans_range*np.random.uniform()-trans_range/2
steer_ang = steer + tr_x/trans_range*2*.2
tr_y = 40*np.random.uniform()-40/2
Trans_M = np.float32([[1,0,tr_x],[0,1,tr_y]])
image_tr = cv2.warpAffine(image,Trans_M,(cols,rows))
return image_tr, steer_ang
def gen_data(X, y, batch_size=128, validation=False):
#
# Generate data for fit_generator
#
gen_start = 0
while True:
features = []
labels = []
if gen_start >= len(y):
gen_start = 0
ending = min(gen_start+batch_size, len(y))
for idx, row in enumerate(y[gen_start:ending]):
center_img = cv2.imread(DATA_PATH + X[gen_start+idx][0].strip())
center_img = cv2.cvtColor(center_img, cv2.COLOR_BGR2HSV)
center_label = float(row[0])
# Augmentation 1: Jitter image
center_img, center_label = trans_image(center_img, center_label, 100)
# Augmentation 2: Occasionally flip straight
if random() > 0.5 and abs(center_label) > 0.1:
center_img = cv2.flip(center_img, 1)
labels.append(-center_label)
else:
labels.append(center_label)
# Augmentation 3: Random brightness
random_bright = .25 + np.random.uniform()
center_img[:,:,2] = center_img[:,:,2]*random_bright
features.append(center_img)
if not validation:
# Augmentation 4: +0.25 to Left Image
left_img = cv2.imread(DATA_PATH + X[gen_start+idx][1].strip())
features.append(left_img)
labels.append(float(row[0]) + 0.15)
# Augmentation 5: -0.25 to Right Image
right_img = cv2.imread(DATA_PATH + X[gen_start+idx][2].strip())
features.append(right_img)
labels.append(float(row[0]) - 0.15)
gen_start += batch_size
features = np.array(features)
labels = np.array(labels)
yield features, labels
def nvidia_model(row=66, col=200, ch=3, dropout=0.3, lr=0.0001):
#
# NVIDIA CNN model
# Ref: https://arxiv.org/abs/1604.07316
#
input_shape = (row, col, ch)
model = Sequential()
model.add(BatchNormalization(axis=1, input_shape=input_shape))
model.add(Convolution2D(24, 5, 5, border_mode='valid',
subsample=(2, 2), activation='elu'))
model.add(Dropout(dropout))
model.add(Convolution2D(36, 5, 5, border_mode='valid',
subsample=(2, 2), activation='elu'))
model.add(Dropout(dropout))
model.add(Convolution2D(48, 5, 5, border_mode='valid',
subsample=(2, 2), activation='elu'))
model.add(Dropout(dropout))
model.add(Convolution2D(64, 3, 3, border_mode='valid',
subsample=(1, 1), activation='elu'))
model.add(Dropout(dropout))
model.add(Convolution2D(64, 3, 3, border_mode='valid',
subsample=(1, 1), activation='elu'))
model.add(Dropout(dropout))
model.add(Flatten())
model.add(Dense(100))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(50))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(10))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(1))
model.add(Activation('elu'))
model.compile(optimizer=Adam(lr=lr), loss='mse', metrics=['accuracy'])
print(model.summary())
| return model
def nvidialite_model(row=33, col=100, ch=3, dropout=0.3, lr=0.0001):
#
# Modified of NVIDIA CNN Model (Dysfunctional)
#
input_shape = (row, col, ch)
model = Sequential()
model.add(BatchNormalization(axis=1, input_shape=input_shape))
model.add(Convolution | 2D(24, 5, 5, border_mode='valid',
subsample=(2, 2), activation='elu'))
model.add(Convolution2D(36, 5, 5, border_mode='valid',
subsample=(2, 2), activation='elu'))
model.add(Convolution2D(48, 3, 3, border_mode='valid',
subsample=(1, 1), activation='elu'))
model.add(Flatten())
model.add(Dense(100))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(50))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(10))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(1))
model.add(Activation('elu'))
model.compile(optimizer=Adam(lr=lr), loss='mse', metrics=['accuracy'])
print(model.summary())
return model
def load_data(filter=True):
#
# Load and split data
# CSV: center,left,right,steering,throttle,brake,speed
#
total = 0
with open(DATA_PATH + 'driving_log.csv', 'r') as f:
reader = csv.reader(f)
data = [row for row in reader]
data = np.array(data)
X = data[:,[0,1,2]]
y = data[:,[3]]
print('Total samples:', total)
print('Total samples (after filter):', len(X))
return train_test_split(X, y, test_size=0.2, random_state=42)
def load_model(lr=0.001):
#
# Load the existing model and weight
#
with open('model.json', 'r') as jfile:
model = model_from_json(jfile.read())
model.compile(optimizer=Adam(lr=lr), loss='mse', metrics=['accuracy'])
model.load_weights('model.h5')
return model
def main():
# Load data
X_train, X_val, y_train, y_val = load_data()
print('X_train shape:', X_train.shape)
print('X_val shape:', X_val.shape)
# Build model
if 'model.json' in os.listdir():
model = load_model()
else:
model = nvidia_model()
model.fit_generator(gen_data(X_train, y_train),
samples_per_epoch=len(X_train)*3, nb_epoch=8,
validation_data=gen_data(X_val, y_val, validation=True),
nb_val_samples=len(X_val))
# Save model
json = model.to_json()
model.save_weights('model.h5')
with open('model.json', 'w') as f:
f.write(json)
if __name__ == "__main__": main()
|
SiggyF/dotfiles | .config/ipython/profile_default/ipython_config.py | Python | mit | 18,950 | 0.00438 | # Configuration file for ipython.
c = get_config()
#------------------------------------------------------------------------------
# InteractiveShellApp configuration
#------------------------------------------------------------------------------
# A Mixin for applications that start InteractiveShell instances.
#
# Provides configurables for loading extensions and executing files as part of
# configuring a Shell environment.
#
# The following methods should be called by the :meth:`initialize` method of the
# subclass:
#
# - :meth:`init_path`
# - :meth:`init_shell` (to be implemented by the subclass)
# - :meth:`init_gui_pylab`
# - :meth:`init_extensions`
# - :meth:`init_code`
# Execute the given command string.
# c.InteractiveShellApp.code_to_run = ''
# lines of code to run at IPython startup.
# c.InteractiveShellApp.exec_lines = []
# Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'none',
# 'osx', 'pyglet', 'qt', 'qt4', 'tk', 'wx').
# c.InteractiveShellApp.gui = None
# Pre-load matplotlib and numpy for interactive use, selecting a particular
# matplotlib backend and loop integration.
# c.InteractiveShellApp.pylab = None
# Configure matplotlib for interactive use with the default matplotlib backend.
# c.InteractiveShellApp.matplotlib = None
# If true, IPython will populate the user namespace with numpy, pylab, etc. and
# an 'import *' is done from numpy and pylab, when using pylab mode.
#
# When False, pylab mode should not import any names into the user namespace.
# c.InteractiveShellApp.pylab_import_all = True
# A list of dotted module names of IPython extensions to load.
# c.InteractiveShellApp.extensions = []
# Run the module as a script.
# c.InteractiveShellApp.module_to_run = ''
# dotted module name of an IPython extension to load.
# c.InteractiveShellApp.extra_extension = ''
# List of files to run at IPython startup.
# c.InteractiveShellApp.exec_files = []
# A file to be run
# c.InteractiveShellApp.file_to_run = ''
#------------------------------------------------------------------------------
# TerminalIPythonApp configuration
#------------------------------------------------------------------------------
# TerminalIPythonApp will inherit config from: BaseIPythonApplication,
# Application, InteractiveShellApp
# Pre-load matplotlib and numpy for interactive use, selecting a particular
# matplotlib backend and loop integration.
# c.TerminalIPythonApp.pylab = None
# Create a massive crash report when IPython encounters what may be an internal
# error. The default is to append a short message to the usual traceback
# c.TerminalIPythonApp.verbose_crash = False
# Run the module as a script.
# c.TerminalIPythonApp.module_to_run = ''
# The date format used by logging formatters for %(asctime)s
# c.TerminalIPythonApp.log_datefmt = '%Y-%m-%d %H:%M:%S'
# Whether to overwrite existing config files when copying
# c.TerminalIPythonApp.overwrite = False
# Execute the given command string.
# c.TerminalIPythonApp.code_to_run = ''
# Set the log level by value or name.
# c.TerminalIPythonApp.log_level = 30
# lines of code to run at IPython startup.
# c.TerminalIPythonApp.exec_lines = []
# Suppress warning messages about legacy config files
# c.TerminalIPythonApp.ignore_old_config = False
# Path to an extra config file to load.
#
# If specified, load this config file in addition to any other IPython config.
# c.TerminalIPythonApp.extra_config_file = u''
# dotted module name of an IPython extension to load.
# c.TerminalIPythonApp.extra_extension = ''
# A file to be run
# c.TerminalIPythonApp.file_to_run = ''
# The IPython profile to use.
# c.TerminalIPythonApp.profile = u'default'
# Configure matplotlib for interactive use with the default matplotlib backend.
# c.TerminalIPythonApp.matplotlib = None
# If a command or file is given via the command-line, e.g. 'ipython foo.py
# c.TerminalIPythonApp.force_interact = False
# If true, IPython will populate the user namespace with numpy, pylab, etc. and
# an 'import *' is done from numpy and pylab, when using pylab mode.
#
# When False, pylab mode should not import any names into the user namespace.
# c.TerminalIPythonApp.pylab_import_all = True
# The name of the IPython directory. This directory is used for logging
# configuration (through profiles), history storage, etc. The default is usually
# $HOME/.ipython. This options can also be specified through the environment
# variable IPYTHONDIR.
# c.TerminalIPythonApp.ipython_dir = u'/home/fedor/.config/ipython'
# Whether to display a banner upon starting IPython.
# c.TerminalIPythonApp.display_banner = True
# Whether to install the default config files into the profile dir. If a new
# profile is being created, and IPython contains config files for that profile,
# then they will be staged into the new directory. Otherwise, default config
# files will be automatically generated.
# c.TerminalIPythonApp.copy_config_files = False
# List of files to run at IPython startup.
# c.TerminalIPythonApp.exec_files = []
# Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'none',
# 'osx', 'pyglet', 'qt', 'qt4', 'tk', 'wx').
# c.TerminalIPythonApp.gui = None
# A list of dotted module names of IPython extensions to load.
# c.TerminalIPythonApp.extensions = []
# Start IPython quickly by skipping the loading of config files.
# c.TerminalIPythonApp.quick = False
# The Logging format template
# c.TerminalIPythonApp.log_format = '[%(name)s]%(highlevel)s %(message)s'
#------------------------------------------------------------------------------
# TerminalInteractiveShell configuration
#------------------------------------------------------------------------------
# TerminalInteractiveShell will inherit config from: InteractiveShell
# auto editing of files with syntax errors.
# c.TerminalInteractiveShell.autoedit_syntax = False
# Use colors for displaying information about objects. Because this information
# is passed through a pager (like 'less'), and some pagers get confused with
# color codes, this capability can be turned off.
# c.TerminalInteractiveShell.color_info = True
# A list of ast.NodeTransformer subclass instances, which will be applied to
# user input before code is run.
# c.TerminalInteractiveShell.ast_transformers = []
#
# c.TerminalInteractiveShell.history_length = 10000
# Don't call post-execute functions that have failed in the past.
# c.TerminalInteractiveShell.disable_failing_post_execute = False
# Show rewritten input, e.g. for autocall.
# c.TerminalInteract | iveShell.show_rewritten_input = True
# Set the color scheme (NoColor, Linux, or LightBG).
# c.TerminalInteractiveShell.colors = 'Linux'
# Autoindent IPython code entered interactively.
# c.TerminalInteractiveShell.autoindent = True
#
# c.TerminalInteractiveShell.separate_in = '\n'
# Deprecated, use PromptManager.in2_template
# c.TerminalInteractiveShell.prompt_in2 = ' .\\D.: '
#
# c.TerminalInteractiveShell.separate_out = ''
# Deprecated, use PromptManager.in_template
# c.TerminalI | nteractiveShell.prompt_in1 = 'In [\\#]: '
# Make IPython automatically call any callable object even if you didn't type
# explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically.
# The value can be '0' to disable the feature, '1' for 'smart' autocall, where
# it is not applied if there are no more arguments on the line, and '2' for
# 'full' autocall, where all callable objects are automatically called (even if
# no arguments are present).
# c.TerminalInteractiveShell.autocall = 0
# Number of lines of your screen, used to control printing of very long strings.
# Strings longer than this number of lines will be sent through a pager instead
# of directly printed. The default value for this is 0, which means IPython
# will auto-detect your screen size every time it needs to print certain
# potentially long strings (this doesn't change the behavior of the 'print'
# keyword, it's only triggered internally). If for some reason this isn't
# working well (it needs curses support), specify it yourself. Otherwise don't
# change the default.
# c.TerminalInteractiveShell.screen_length = 0
# |
0dataloss/pyrax | samples/cloud_loadbalancers/session_persistence.py | Python | apache-2.0 | 1,492 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c)2012 Rackspace US, Inc.
# All Rights Reserved.
#
# Licen | sed under the Apache License, Version 2.0 (the "L | icense"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import os
import sys
import pyrax
pyrax.set_setting("identity_type", "rackspace")
creds_file = os.path.expanduser("~/.rackspace_cloud_credentials")
pyrax.set_credential_file(creds_file)
clb = pyrax.cloud_loadbalancers
try:
lb = clb.list()[0]
except IndexError:
print("You do not have any load balancers yet.")
print("Please create one and then re-run this script.")
sys.exit()
print("Load Balancer:", lb)
orig = lb.session_persistence
print("Current setting of session persistence:", orig or '""')
print()
if orig:
print("Clearing...")
lb.session_persistence = ""
else:
print("Setting persistence to HTTP_COOKIE...")
lb.session_persistence = "HTTP_COOKIE"
print("New setting of session persistence:", lb.session_persistence or '""')
|
jzbontar/orange-tree | Orange/statistics/contingency.py | Python | gpl-3.0 | 10,334 | 0.001355 | import math
import numpy as np
from Orange import data
def _get_variable(variable, dat, attr_name,
expected_type=None, expected_name=""):
failed = False
if isinstance(variable, data.Variable):
datvar = getattr(dat, "variable", None)
if datvar is not None and datvar is not variable:
raise ValueError("variable does not match the variable"
"in the data")
elif hasattr(dat, "domain"):
variable = dat.domain[variable]
elif hasattr(dat, attr_name):
variable = dat.variable
else:
failed = True
if failed or (expected_type is not None and
not isinstance(variable, expected_type)):
if not expected_type or isinstance(variable, data.Variable):
raise ValueError(
"expected %s variable not %s" % (expected_name, variable))
else:
raise ValueError("expected %s, not '%s'" %
(expected_type.__name__, type(variable).__name__))
return variable
class Discrete(np.ndarray):
def __new__(cls, dat=None, col_variable=None, row_variable=None, unknowns=None):
if isinstance(dat, data.Storage):
if unknowns is not None:
raise TypeError(
"incompatible arguments (data storage and 'unknowns'")
return cls.from_data(dat, col_variable, row_variable)
if row_variable is not None:
row_variable = _get_variable(row_variable, dat, "row_variable")
rows = len(row_variable.values)
else:
rows = dat.shape[0]
if col_variable is not None:
col_variable = _get_variable(col_variable, dat, "col_variable")
cols = len(col_variable.values)
else:
cols = dat.shape[1]
self = super().__new__(cls, (rows, cols))
self.row_variable = row_variable
self.col_variable = col_variable
if dat is None:
self[:] = 0
self.unknowns = unknowns or 0
else:
self[...] = dat
self.unknowns = (unknowns if unknowns is not None
else getattr(dat, "unknowns", 0))
return self
@classmethod
def from_data(cls, data, col_variable, row_variable=None):
if row_variable is None:
row_variable = data.domain.class_var
if row_variable is None:
raise ValueError("row_variable needs to be specified (data "
"has no class)")
row_variable = _get_variable(row_variable, data, "row_variable")
col_variable = _get_variable(col_variable, data, "col_variable")
try:
dist, unknowns = data._compute_contingency(
[col_variable], row_variable)[0]
self = super().__new__(cls, dist.shape)
self[...] = dist
self.unknowns = unknowns
except NotImplementedError:
self = np.zeros(
(len(row_variable.values), len(col_variable.values)))
self.unknowns = 0
rind = data.domain.index(row_variable)
cind = data.domain.index(col_variable)
for row in data:
rval, cval = row[rind], row[cind]
if math.isnan(rval):
| continue
| w = row.weight
if math.isnan(cval):
self.unknowns[cval] += w
else:
self[rval, cval] += w
self.row_variable = row_variable
self.col_variable = col_variable
return self
def __eq__(self, other):
return np.array_equal(self, other) and (
not hasattr(other, "unknowns") or
np.array_equal(self.unknowns, other.unknowns))
def __getitem__(self, index):
if isinstance(index, str):
if len(self.shape) == 2: # contingency
index = self.row_variable.to_val(index)
contingency_row = super().__getitem__(index)
contingency_row.col_variable = self.col_variable
return contingency_row
else: # Contingency row
column = self.strides == self.base.strides[:1]
if column:
index = self.row_variable.to_val(index)
else:
index = self.col_variable.to_val(index)
elif isinstance(index, tuple):
if isinstance(index[0], str):
index = (self.row_variable.to_val(index[0]), index[1])
if isinstance(index[1], str):
index = (index[0], self.col_variable.to_val(index[1]))
result = super().__getitem__(index)
if result.strides:
result.col_variable = self.col_variable
result.row_variable = self.row_variable
return result
def __setitem__(self, index, value):
if isinstance(index, str):
index = self.row_variable.to_val(index)
elif isinstance(index, tuple):
if isinstance(index[0], str):
index = (self.row_variable.to_val(index[0]), index[1])
if isinstance(index[1], str):
index = (index[0], self.col_variable.to_val(index[1]))
super().__setitem__(index, value)
def normalize(self, axis=None):
t = np.sum(self, axis=axis)
if t > 1e-6:
self[:] /= t
if axis is None or axis == 1:
self.unknowns /= t
class Continuous:
def __init__(self, dat=None, col_variable=None, row_variable=None,
unknowns=None):
if isinstance(dat, data.Storage):
if unknowns is not None:
raise TypeError(
"incompatible arguments (data storage and 'unknowns'")
return self.from_data(dat, col_variable, row_variable)
if row_variable is not None:
row_variable = _get_variable(row_variable, dat, "row_variable")
if col_variable is not None:
col_variable = _get_variable(col_variable, dat, "col_variable")
self.values, self.counts = dat
self.row_variable = row_variable
self.col_variable = col_variable
if unknowns is not None:
self.unknowns = unknowns
elif row_variable:
self.unknowns = np.zeros(len(row_variable.values))
else:
self.unknowns = None
def from_data(self, data, col_variable, row_variable=None):
if row_variable is None:
row_variable = data.domain.class_var
if row_variable is None:
raise ValueError("row_variable needs to be specified (data"
"has no class)")
self.row_variable = _get_variable(row_variable, data, "row_variable")
self.col_variable = _get_variable(col_variable, data, "col_variable")
try:
(self.values, self.counts), self.unknowns = data._compute_contingency(
[col_variable], row_variable)[0]
except NotImplementedError:
raise NotImplementedError("Fallback method for computation of "
"contingencies is not implemented yet")
def __eq__(self, other):
return (np.array_equal(self.values, other.values) and
np.array_equal(self.counts, other.counts) and
(not hasattr(other, "unknowns") or
np.array_equal(self.unknowns, other.unknowns)))
def __getitem__(self, index):
""" Return contingencies for a given class value. """
if isinstance(index, (str, float)):
index = self.row_variable.to_val(index)
C = self.counts[index]
ind = C > 0
return np.vstack((self.values[ind], C[ind]))
def __len__(self):
return self.counts.shape[0]
def __setitem__(self, index, value):
raise NotImplementedError("Setting individual class contingencies is "
"not implemented yet. Set .values and .counts.")
def normalize(self, axis=None):
if axis is None:
|
antivirtel/Flexget | tests/test_html5lib.py | Python | mit | 689 | 0 | from __future__ import unicode_literals, division, absolute_import
from flexget.utils.soup import ge | t_soup
class TestHtml5Lib(object):
config = 'tasks: {}'
def test_parse_broken(self, execute_task):
s = """<html>
<head><title>Foo</title>
<body>
<p class=foo><b>Some Text</b>
<p><em>Some Other Text</em>"""
soup = get_soup(s)
body = soup.find('body')
ps = body.find_all('p')
assert ps[0].parent.name == 'body'
assert ps[1].parent.name == 'body'
b = soup.find('b')
assert b.parent.name == 'p'
em = soup.find('em')
assert | em.parent.name == 'p'
assert soup.find('p', attrs={'class': 'foo'})
|
renhaocui/adPlatform | usernameExtractor.py | Python | mit | 2,122 | 0.001414 | import time
import twitter
import json
__author__ = 'rencui'
def oauth_login():
# credentials for OAuth
CONSUMER_KEY = c_k
CONSUMER_SECRET = c_s
OAUTH_TOKEN = a_t
OAUTH_TOKEN_SECRET = a_t_s
# Creating the authentification
auth = twitter.oauth.OAuth(OAUTH_TOKEN,
OAUTH_TOKEN_SECRET,
CONSUMER_KEY,
CONSUMER_SECRET)
# Twitter instance
twitter_api = twitter.Twitter(auth=auth)
return twitter_api
userFile = open('dataset/experiment/mention.list', 'r')
outputFile = open('dataset/experiment/mention.json', 'w')
c_k = 'R2FZHZcAcHFatakYhKL2cQcVo'
c_s = 'jwkcIPCkrOBdxKVTVVE7d7cIwH8ZyHHtqxYeCVUZs35Lu4BOkY'
a_t = '141612471-3UJPl93cGf2XBm2JkBn26VFewzwK3WGN1EiKJi4T'
a_t_s = 'do1I1vtIvjgQF3vr0ln4pYVbsAj5OZIxuuATXjgBaqUYM'
requestLimit = 180
twitter_api = oauth_login()
nameList = []
for line in userFile:
name = line.strip()
if name not in nameList:
nameList.append(name)
userFile.close()
print len(nameList)
outputList = []
tempList = []
requestCount = 0
for index, userId in enumerate(nameList):
if requestCount > requestLimit:
print 'waiting for 15m...'
time.sleep(900)
requestCount = 0
if index % 99 == 0 and index != 0:
tempList.append(userId)
requestCount += 1
response = tw | itter_api.users.lookup(screen_name=','.join(tempList))
tempList = []
| for user in response:
screenName = user['screen_name']
outputList.append(screenName)
outputFile.write(json.dumps(user)+'\n')
elif index == len(nameList)-1:
tempList.append(userId)
requestCount += 1
response = twitter_api.users.lookup(screen_name=','.join(tempList))
tempList = []
for user in response:
screenName = user['screen_name']
outputList.append(screenName)
outputFile.write(json.dumps(user)+'\n')
else:
tempList.append(userId)
outputFile.close()
for name in nameList:
if name not in outputList:
print name |
Ceasar/grammar | test_grammar.py | Python | mit | 1,016 | 0.000984 | impor | t pytest
from grammar import Grammar, Production
@pytest.fixture
def grammar():
S = 'S'
NP = 'NP'
VP = 'VP'
T, N, V = 'T', 'N', 'V'
productions = {
Production(S, [NP, VP]),
Prod | uction(NP, [T, N]),
Production(VP, [V, NP]),
Production(T, {'the'}),
Production(N, {'man', 'ball'}),
Production(V, {'hit', 'took'}),
}
grammar = Grammar(
terminals={T, N, V},
nonterminals={S, NP, VP},
productions=productions,
start=S,
)
return grammar
def test_tag(grammar):
sentence = 'the man hit the man'
got = list(grammar.tag(sentence.split()))
expected = ['T', 'N', 'V', 'T', 'N']
assert got == expected
def test_replace(grammar):
nonterminal = 'NP'
next_symbol = 'the'
got = grammar.replace(nonterminal, next_symbol)
expected = ['T', 'N']
assert got == expected
def test_recognize(grammar):
got = grammar.recognize('the man hit the ball')
assert got == True
|
darius/toot | toot7_chained.py | Python | gpl-3.0 | 2,474 | 0.005255 | "Represent the analyzed program as a list of instructions."
# t: tree node
# dn, dv: definition names, definition values (i.e. analyzed definitions)
# vn, vv: variable names, variable values
import abstract_syntax as A
def eval_program(program):
dn = tuple(defn.name for defn in program.defns)
dv = tuple(defn.expr.analyze(dn, defn.params) for defn in program.defns)
do_expr = program.expr.analyze(dn, ())
return run(do_exp | r, dv, ())
def run(instructions, dv, vv):
stack = []
pc = 0
while pc < len(instructions):
pc += instructions[pc](dv, vv, stack)
return stack.pop()
A.Constant.analyze = lambda t, dn, vn: do_constant(t.value)
A.Variable.analyze = lambda t, dn, vn: do_variable(vn.index(t.name))
A.If .analyze = lambda t, dn, vn: do_if(t.then.analyze(dn, vn),
t.test.analyze(dn, vn),
t.else_.analyze(dn, | vn))
A.Call .analyze = lambda t, dn, vn: do_call(dn.index(t.name),
tuple(argument.analyze(dn, vn)
for argument in t.arguments))
A.Prim2 .analyze = lambda t, dn, vn: do_prim2(t.op,
t.arg1.analyze(dn, vn),
t.arg2.analyze(dn, vn))
def do_constant(value):
def push_constant(dv, vv, stack):
stack.append(value)
return 1
return (push_constant,)
def do_variable(index):
def push_variable(dv, vv, stack):
stack.append(vv[index])
return 1
return (push_variable,)
def do_if(do_then, do_test, do_else):
def branch(dv, vv, stack):
test_value = stack.pop()
if test_value: return 1
else: return 1 + len(do_then) + 1
def goto(dv, vv, stack):
return 1 + len(do_else)
return do_test + (branch,) + do_then + (goto,) + do_else
def do_prim2(op, do_arg1, do_arg2):
def do_op(dv, vv, stack):
arg2 = stack.pop()
arg1 = stack.pop()
stack.append(op(arg1, arg2))
return 1
return do_arg1 + do_arg2 + (do_op,)
def do_call(defn_index, do_arguments):
def call(dv, vv, stack):
callee = dv[defn_index]
operands = stack[-len(do_arguments):]
stack[-len(do_arguments):] = []
stack.append(run(callee, dv, operands))
return 1
return sum(do_arguments, ()) + (call,)
|
s6joui/MirrorOS | system/core/gesture-recognizer/sensors_raw_left.py | Python | mit | 1,155 | 0.041558 | import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARD)
TRIG = 35
ECHO = 38
GPIO.setup(TRIG,GPIO.OUT)
GPIO.output(TRIG,0)
GPIO.setup(ECHO,GPIO.IN)
time.sleep(0.1)
print ("Starting gesture recognition")
try:
# here you put your main loop or block of code
while True:
value_list = []
for x in xrange(0,5):
GPIO.output(TRIG,1)
time.sleep(0.0001)
GPIO.output(TRIG,0)
start = time.time()
while GPIO.input(ECHO) == 0 and time.time()-start < 0.4:
pass
start = time.time()
while GPIO.input(ECHO) == 1:
pass
stop = time.time()
distance = (stop - start) * 17000
value_list.append(distance)
time.sleep(0.025)
value_list.sor | t();
print value_list[2]
except KeyboardInterrupt:
# here you put any code you want to run before the program
# exits when you press CTRL+C
print ("exiting")
except:
# this catches ALL other exceptions including errors.
# You won't get any error messages for debugging
# so only use it once your code is working
print ("Other error or exception occurred!")
finally:
| GPIO.cleanup() # this ensures a clean exit
|
rahul67/hue | apps/beeswax/src/beeswax/test_base.py | Python | apache-2.0 | 16,313 | 0.010804 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import atexit
import json
import logging
import os
import subprocess
import time
from nose.tools import assert_true, assert_false
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from desktop.lib.django_test_util import make_logged_in_client
from desktop.lib.paths import get_run_root
from desktop.lib.python_util import find_unused_port
from desktop.lib.security_util import get_localhost_name
from desktop.lib.test_utils import add_to_group, grant_access
from hadoop import pseudo_hdfs4
from hadoop.pseudo_hdfs4 import is_live_cluster, get_db_prefix
import beeswax.conf
from beeswax.server.dbms import get_query_server_config
from beeswax.server import dbms
HIVE_SERVER_TEST_PORT = find_unused_port()
_INITIALIZED = False
_SHARED_HIVE_SERVER_PROCESS = None
_SHARED_HIVE_SERVER = None
_SHARED_HIVE_SERVER_CLOSER = None
LOG = logging.getLogger(__name__)
def _start_server(cluster):
args = [beeswax.conf.HIVE_SERVER_BIN.get()]
env = cluster._mr2_env.copy()
hadoop_cp_proc = subprocess.Popen(args=[get_run_root('ext/hadoop/hadoop') + '/bin/hadoop', 'classpath'], env=env, cwd=cluster._tmpdir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
hadoop_cp_proc.wait()
hadoop_cp = hadoop_cp_proc.stdout.read().strip()
env.update({
'HADOOP_HOME': get_run_root('ext/hadoop/hadoop'), # Used only by Hive for some reason
'HIVE_CONF_DIR': beeswax.conf.HIVE_CONF_DIR.get(),
'HIVE_SERVER2_THRIFT_PORT': str(HIVE_SERVER_TEST_PORT),
'HADOOP_MAPRED_HOME': get_run_root('ext/hadoop/hadoop') + '/share/hadoop/mapreduce',
# Links created in jenkins script.
# If missing classes when booting HS2, chec | k here.
'AUX_CLASSPATH':
get_run_root('ext/hadoop/hadoop') + '/share/hadoop/hdfs/hadoop-hdfs.jar'
+ ':' +
get_run_root('ext/hadoop/hadoop') + '/share/hadoop/common/lib/hadoop-auth.jar'
+ ':' +
get_run_root('ext/hadoop/hadoop') + '/share/hadoop/common/hadoop-common.jar'
+ ':' +
get_run_root('ext/hadoop/hadoop') + '/share/hadoop/mapreduce/hadoop-mapreduce-client-core.jar'
,
'HADOOP_CLASSPATH': hadoop_cp,
})
if os.getenv("JAVA_HOME"):
| env["JAVA_HOME"] = os.getenv("JAVA_HOME")
LOG.info("Executing %s, env %s, cwd %s" % (repr(args), repr(env), cluster._tmpdir))
return subprocess.Popen(args=args, env=env, cwd=cluster._tmpdir, stdin=subprocess.PIPE)
def get_shared_beeswax_server(db_name='default'):
global _SHARED_HIVE_SERVER
global _SHARED_HIVE_SERVER_CLOSER
if _SHARED_HIVE_SERVER is None:
cluster = pseudo_hdfs4.shared_cluster()
if is_live_cluster():
def s():
pass
else:
s = _start_mini_hs2(cluster)
start = time.time()
started = False
sleep = 1
make_logged_in_client()
user = User.objects.get(username='test')
query_server = get_query_server_config()
db = dbms.get(user, query_server)
while not started and time.time() - start <= 30:
try:
db.open_session(user)
started = True
break
except Exception, e:
LOG.info('HiveServer2 server could not be found after: %s' % e)
time.sleep(sleep)
if not started:
raise Exception("Server took too long to come up.")
_SHARED_HIVE_SERVER, _SHARED_HIVE_SERVER_CLOSER = cluster, s
return _SHARED_HIVE_SERVER, _SHARED_HIVE_SERVER_CLOSER
def _start_mini_hs2(cluster):
HIVE_CONF = cluster.hadoop_conf_dir
finish = (
beeswax.conf.HIVE_SERVER_HOST.set_for_testing(get_localhost_name()),
beeswax.conf.HIVE_SERVER_PORT.set_for_testing(HIVE_SERVER_TEST_PORT),
beeswax.conf.HIVE_SERVER_BIN.set_for_testing(get_run_root('ext/hive/hive') + '/bin/hiveserver2'),
beeswax.conf.HIVE_CONF_DIR.set_for_testing(HIVE_CONF)
)
default_xml = """<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:derby:;databaseName=%(root)s/metastore_db;create=true</value>
<description>JDBC connect string for a JDBC metastore</description>
</property>
<property>
<name>hive.server2.enable.impersonation</name>
<value>false</value>
</property>
<property>
<name>hive.querylog.location</name>
<value>%(querylog)s</value>
</property>
</configuration>
""" % {'root': cluster._tmpdir, 'querylog': cluster.log_dir + '/hive'}
file(HIVE_CONF + '/hive-site.xml', 'w').write(default_xml)
global _SHARED_HIVE_SERVER_PROCESS
if _SHARED_HIVE_SERVER_PROCESS is None:
p = _start_server(cluster)
LOG.info("started")
cluster.fs.do_as_superuser(cluster.fs.chmod, '/tmp', 01777)
_SHARED_HIVE_SERVER_PROCESS = p
def kill():
LOG.info("Killing server (pid %d)." % p.pid)
os.kill(p.pid, 9)
p.wait()
atexit.register(kill)
def s():
for f in finish:
f()
cluster.stop()
return s
def wait_for_query_to_finish(client, response, max=60.0):
# Take a async API execute_query() response in input
start = time.time()
sleep_time = 0.05
if is_finished(response): # aka Has error at submission
return response
content = json.loads(response.content)
watch_url = content['watch_url']
response = client.get(watch_url, follow=True)
# Loop and check status
while not is_finished(response):
time.sleep(sleep_time)
sleep_time = min(1.0, sleep_time * 2) # Capped exponential
if (time.time() - start) > max:
message = "Query took too long! %d seconds" % (time.time() - start)
LOG.warning(message)
raise Exception(message)
response = client.get(watch_url, follow=True)
return response
def is_finished(response):
status = json.loads(response.content)
return 'error' in status or status.get('isSuccess') or status.get('isFailure')
def fetch_query_result_data(client, status_response, n=0, server_name='beeswax'):
# Take a wait_for_query_to_finish() response in input
status = json.loads(status_response.content)
response = client.get("/%(server_name)s/results/%(id)s/%(n)s?format=json" % {'server_name': server_name, 'id': status.get('id'), 'n': n})
content = json.loads(response.content)
return content
def make_query(client, query, submission_type="Execute",
udfs=None, settings=None, resources=None,
wait=False, name=None, desc=None, local=True,
is_parameterized=True, max=60.0, database='default', email_notify=False, params=None, server_name='beeswax', **kwargs):
"""
Prepares arguments for the execute view.
If wait is True, waits for query to finish as well.
"""
if settings is None:
settings = []
if params is None:
params = []
if local:
# Tests run faster if not run against the real cluster.
settings.append(('mapreduce.framework.name', 'local'))
# Prepares arguments for the execute view.
parameters = {
'query-query': query,
'query-name': name if name else '',
'query-desc': desc if desc else '',
'query-is_parameterized': is_parameterized and "on",
'query-database': database,
'query-email_notify': email_notify and "on",
}
if submission_type == 'Execute':
parameters['button-submit'] = 'Whatever'
elif submission_type == 'Explain':
parameters['button-explain'] = 'Whatever'
elif submission_type == 'Save':
parameters['saveform-save'] = 'True'
if nam |
pawelmhm/splash | splash/qwebpage.py | Python | bsd-3-clause | 6,186 | 0.000647 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from collections import namedtuple
import sip
from PyQt5.QtWebKitWidgets import QWebPage, QWebView
from PyQt5.QtCore import QByteArray
from twisted.python import log
import six
from splash.har_builder import HarBuilder
RenderErrorInfo = namedtuple('RenderErrorInfo', 'type code text url')
class SplashQWebView(QWebView):
"""
QWebView subclass that handles 'close' requests.
"""
onBeforeClose = None
def closeEvent(self, event):
dont_close = False
if self.onBeforeClose:
dont_close = self.onBeforeClose()
if dont_close:
event.ignore()
else:
event.accept()
class SplashQWebPage(QWebPage):
"""
QWebPage subclass that:
* changes user agent;
* logs JS console messages;
* handles alert and confirm windows;
* returns additional info about render errors;
* logs HAR events;
* stores options for various Splash components.
"""
error_info = None
custom_user_agent = None
custom_headers = None
skip_custom_headers = False
navigation_locked = False
resource_timeout = 0
response_body_enabled = False
def __init__(self, verbosity=0):
super(QWebPage, self).__init__()
self.verbosity = verbosity
self.callbacks = {
"on_request": [],
"on_response_headers": [],
"on_response": [],
}
self.mainFrame().urlChanged.connect(self.on_url_changed)
self.mainFrame().titleChanged.connect(self.on_title_changed)
self.mainFrame().loadFinished.connect(self.on_load_finished)
self.mainFrame().initialLayoutCompleted.connect(self.on_layout_completed)
self.har = HarBuilder()
def reset_har(self):
self.har.reset()
def clear_callbacks(self, event=None):
"""
Unregister all callbacks for an event. If event is None
then all callbacks are removed.
"""
if event is None:
for ev in self.callbacks:
assert ev is not None
self.clear_callbacks(ev)
return
del self.callbacks[event][:]
def on_title_changed(self, title):
self.har.store_title(title)
def on_url_changed(self, url):
self.har.store_url(url)
def on_load_finished(self, ok):
self.har.store_timing("onLoad")
def on_layout_completed(self):
self.har.store_timing("onContentLoad")
def acceptNavigationRequest(self, webFrame, networkRequest, navigationType):
if self.navigation_locked:
return False
self.error_info = None
return super(SplashQWebPage, self).acceptNavigationRequest(webFrame, networkRequest, navigationType)
def javaScriptAlert(self, frame, msg):
return
def javaScriptConfirm(self, frame, msg):
return False
def javaScriptConsoleMessage(self, msg, line_number, source_id):
if self.verbosity >= 2:
log.msg("JsConsole(%s | :%d): %s" % (source_id, line_number, msg), system='render')
def userAgentForUrl(self, url):
if self.custom_user_agent is None:
return super(SplashQWebPage, self).userAgentForUrl(url)
else:
return self.custom_user_agent
# loadFinished signal handler receives ok=False at least these cases:
# 1. when there is an error with the page (e.g. the page is not available);
| # 2. when a redirect happened before all related resource are loaded;
# 3. when page sends headers that are not parsed correctly
# (e.g. a bad Content-Type).
# By implementing ErrorPageExtension we can catch (1) and
# distinguish it from (2) and (3).
def extension(self, extension, info=None, errorPage=None):
if extension == QWebPage.ErrorPageExtension:
# catch the error, populate self.errorInfo and return an error page
info = sip.cast(info, QWebPage.ErrorPageExtensionOption)
domain = 'Unknown'
if info.domain == QWebPage.QtNetwork:
domain = 'Network'
elif info.domain == QWebPage.Http:
domain = 'HTTP'
elif info.domain == QWebPage.WebKit:
domain = 'WebKit'
self.error_info = RenderErrorInfo(
domain,
int(info.error),
six.text_type(info.errorString),
six.text_type(info.url.toString())
)
# XXX: this page currently goes nowhere
content = u"""
<html><head><title>Failed loading page</title></head>
<body>
<h1>Failed loading page ({0.text})</h1>
<h2>{0.url}</h2>
<p>{0.type} error #{0.code}</p>
</body></html>""".format(self.error_info)
errorPage = sip.cast(errorPage, QWebPage.ErrorPageExtensionReturn)
errorPage.content = QByteArray(content.encode('utf-8'))
return True
# XXX: this method always returns True, even if we haven't
# handled the extension. Is it correct? When can this method be
# called with extension which is not ErrorPageExtension if we
# are returning False in ``supportsExtension`` for such extensions?
return True
def supportsExtension(self, extension):
if extension == QWebPage.ErrorPageExtension:
return True
return False
def maybe_redirect(self, load_finished_ok):
"""
Return True if the current webpage state looks like a redirect.
Use this function from loadFinished handler to ignore spurious
signals.
FIXME: This can return True if server returned incorrect
Content-Type header, but there is no an additional loadFinished
signal in this case.
"""
return not load_finished_ok and self.error_info is None
def is_ok(self, load_finished_ok):
return load_finished_ok and self.error_info is None
def error_loading(self, load_finished_ok):
return load_finished_ok and self.error_info is not None
|
GbalsaC/bitnamiP | venv/lib/python2.7/site-packages/boto/mws/connection.py | Python | agpl-3.0 | 49,807 | 0.000161 | # Copyright (c) 2012-2014 Andy Davidoff http://www.disruptek.com/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import xml.sax
import hashlib
import string
import collections
from boto.connection import AWSQueryConnection
from boto.exception import BotoServerError
import boto.mws.exception
import boto.mws.response
from boto.handler import XmlHandler
from boto.compat import filter, map, six, encodebytes
__all__ = ['MWSConnection']
api_version_path = {
'Feeds': ('2009-01-01', 'Merchant', '/'),
'Reports': ('2009-01-01', 'Merchant', '/'),
'Orders': ('2013-09-01', 'SellerId', '/Orders/2013-09-01'),
'Products': ('2011-10-01', 'SellerId', '/Products/2011-10-01'),
'Sellers': ('2011-07-01', 'SellerId', '/Sellers/2011-07-01'),
'Inbound': ('2010-10-01', 'SellerId',
'/FulfillmentInboundShipment/2010-10-01'),
'Outbound': ('2010-10-01', 'SellerId',
'/FulfillmentOutboundShipment/2010-10-01'),
'Inventory': ('2010-10-01', 'SellerId',
'/FulfillmentInventory/2010-10-01'),
'Recommendations': ('2013-04-01', 'SellerI | d',
'/Recommendations/2013-04-01'),
'CustomerInfo': ('2014-03-01', 'SellerId',
'/CustomerInformation/2014-03-01'),
'CartInfo': ('2014-03-01', 'SellerId',
'/CartInformation/2014-03-01'),
'Subscriptions': ('2013-07-01', 'SellerId',
'/Subscriptions/2013-07-01'),
'OffAma | zonPayments': ('2013-01-01', 'SellerId',
'/OffAmazonPayments/2013-01-01'),
}
content_md5 = lambda c: encodebytes(hashlib.md5(c).digest()).strip()
decorated_attrs = ('action', 'response', 'section',
'quota', 'restore', 'version')
api_call_map = {}
def add_attrs_from(func, to):
for attr in decorated_attrs:
setattr(to, attr, getattr(func, attr, None))
to.__wrapped__ = func
return to
def structured_lists(*fields):
def decorator(func):
def wrapper(self, *args, **kw):
for key, acc in [f.split('.') for f in fields]:
if key in kw:
newkey = key + '.' + acc + (acc and '.' or '')
for i in range(len(kw[key])):
kw[newkey + str(i + 1)] = kw[key][i]
kw.pop(key)
return func(self, *args, **kw)
wrapper.__doc__ = "{0}\nLists: {1}".format(func.__doc__,
', '.join(fields))
return add_attrs_from(func, to=wrapper)
return decorator
def http_body(field):
def decorator(func):
def wrapper(*args, **kw):
if any([f not in kw for f in (field, 'content_type')]):
message = "{0} requires {1} and content_type arguments for " \
"building HTTP body".format(func.action, field)
raise KeyError(message)
kw['body'] = kw.pop(field)
kw['headers'] = {
'Content-Type': kw.pop('content_type'),
'Content-MD5': content_md5(kw['body']),
}
return func(*args, **kw)
wrapper.__doc__ = "{0}\nRequired HTTP Body: " \
"{1}".format(func.__doc__, field)
return add_attrs_from(func, to=wrapper)
return decorator
def destructure_object(value, into, prefix, members=False):
if isinstance(value, boto.mws.response.ResponseElement):
destructure_object(value.__dict__, into, prefix, members=members)
elif isinstance(value, collections.Mapping):
for name in value:
if name.startswith('_'):
continue
destructure_object(value[name], into, prefix + '.' + name,
members=members)
elif isinstance(value, six.string_types):
into[prefix] = value
elif isinstance(value, collections.Iterable):
for index, element in enumerate(value):
suffix = (members and '.member.' or '.') + str(index + 1)
destructure_object(element, into, prefix + suffix,
members=members)
elif isinstance(value, bool):
into[prefix] = str(value).lower()
else:
into[prefix] = value
def structured_objects(*fields, **kwargs):
def decorator(func):
def wrapper(*args, **kw):
members = kwargs.get('members', False)
for field in filter(lambda i: i in kw, fields):
destructure_object(kw.pop(field), kw, field, members=members)
return func(*args, **kw)
wrapper.__doc__ = "{0}\nElement|Iter|Map: {1}\n" \
"(ResponseElement or anything iterable/dict-like)" \
.format(func.__doc__, ', '.join(fields))
return add_attrs_from(func, to=wrapper)
return decorator
def requires(*groups):
def decorator(func):
def requires(*args, **kw):
hasgroup = lambda group: all(key in kw for key in group)
if 1 != len(list(filter(hasgroup, groups))):
message = ' OR '.join(['+'.join(g) for g in groups])
message = "{0} requires {1} argument(s)" \
"".format(func.action, message)
raise KeyError(message)
return func(*args, **kw)
message = ' OR '.join(['+'.join(g) for g in groups])
requires.__doc__ = "{0}\nRequired: {1}".format(func.__doc__,
message)
return add_attrs_from(func, to=requires)
return decorator
def exclusive(*groups):
def decorator(func):
def wrapper(*args, **kw):
hasgroup = lambda group: all(key in kw for key in group)
if len(list(filter(hasgroup, groups))) not in (0, 1):
message = ' OR '.join(['+'.join(g) for g in groups])
message = "{0} requires either {1}" \
"".format(func.action, message)
raise KeyError(message)
return func(*args, **kw)
message = ' OR '.join(['+'.join(g) for g in groups])
wrapper.__doc__ = "{0}\nEither: {1}".format(func.__doc__,
message)
return add_attrs_from(func, to=wrapper)
return decorator
def dependent(field, *groups):
def decorator(func):
def wrapper(*args, **kw):
hasgroup = lambda group: all(key in kw for key in group)
if field in kw and not any(hasgroup(g) for g in groups):
message = ' OR '.join(['+'.join(g) for g in groups])
message = "{0} argument {1} requires {2}" \
"".format(func.action, field, message)
raise KeyError(message)
return func(*args, **kw)
message = ' OR '.join(['+'.join(g) for g in groups])
wrapper.__doc__ = "{0}\n{1} requires: {2}".format(func.__doc__,
field,
|
gticket/scikit-neuralnetwork | sknn/tests/test_sklearn.py | Python | bsd-3-clause | 2,706 | 0.005174 | import unittest
from nose.tools import (assert_equal, assert_raises, assert_in, assert_not_in)
import numpy
from scipy.stats import randint, uniform
from sklearn.grid_search import GridSearchCV, RandomizedSearchCV
from sklearn.cross_validation import cross_val_score
from sknn.mlp import Regressor as MLPR, Classifier as MLPC
from sknn.mlp import Layer as L
class TestGridSearchRegressor(unittest.TestCase):
__estimator__ = MLPR
def setUp(self):
self.a_in = numpy.random.uniform(0.0, 1.0, (64,16))
self.a_out = numpy.zeros((64,1))
def test_GridGlobalParams(self):
clf = GridSearchCV(
self.__estimator__(layers=[L("Linear")], n_iter=1),
param_grid={'learning_rate': [0.01, 0.001]})
clf.fit(self.a_in, self.a_out)
def test_GridLayerParams(self):
clf = Grid | SearchCV(
self.__estimator__(layers=[L("Rectifier", units=12), L("Linear")], n_iter=1),
param_grid={'hidden0__units': [4, 8, 12]})
clf.fit(self.a_in, self.a_out)
def test_RandomGlobalParams(self):
clf = RandomizedSearchCV(
self.__estimator__(layers=[L("Softmax")], n_iter=1),
param_distributions={'learning_rate': uniform(0.001 | , 0.01)},
n_iter=2)
clf.fit(self.a_in, self.a_out)
def test_RandomLayerParams(self):
clf = RandomizedSearchCV(
self.__estimator__(layers=[L("Softmax", units=12), L("Linear")], n_iter=1),
param_distributions={'hidden0__units': randint(4, 12)},
n_iter=2)
clf.fit(self.a_in, self.a_out)
def test_RandomMultipleJobs(self):
clf = RandomizedSearchCV(
self.__estimator__(layers=[L("Softmax", units=12), L("Linear")], n_iter=1),
param_distributions={'hidden0__units': randint(4, 12)},
n_iter=4, n_jobs=4)
clf.fit(self.a_in, self.a_out)
class TestGridSearchClassifier(TestGridSearchRegressor):
__estimator__ = MLPC
def setUp(self):
self.a_in = numpy.random.uniform(0.0, 1.0, (64,16))
self.a_out = numpy.random.randint(0, 4, (64,))
class TestCrossValidation(unittest.TestCase):
def test_Regressor(self):
a_in = numpy.random.uniform(0.0, 1.0, (64,16))
a_out = numpy.zeros((64,1))
cross_val_score(MLPR(layers=[L("Linear")], n_iter=1), a_in, a_out, cv=5)
def test_Classifier(self):
a_in = numpy.random.uniform(0.0, 1.0, (64,16))
a_out = numpy.random.randint(0, 4, (64,))
cross_val_score(MLPC(layers=[L("Linear")], n_iter=1), a_in, a_out, cv=5)
|
stscieisenhamer/glue | glue/viewers/scatter/layer_artist.py | Python | bsd-3-clause | 18,014 | 0.001832 | from __future__ import absolute_import, division, print_function
import numpy as np
from matplotlib.colors import Normalize
from matplotlib.collections import LineCollection
from mpl_scatter_density import ScatterDensityArtist
from astropy.visualization import (ImageNormalize, LinearStretch, SqrtStretch,
AsinhStretch, LogStretch)
from glue.utils import defer_draw, broadcast_to
from glue.viewers.scatter.state import ScatterLayerState
from glue.viewers.matplotlib.layer_artist import MatplotlibLayerArtist
from glue.core.exceptions import IncompatibleAttribute
STRETCHES = {'linear': LinearStretch,
'sqrt': SqrtStretch,
'arcsinh': AsinhStretch,
'log': LogStretch}
CMAP_PROPERTIES = set(['cmap_mode', 'cmap_att', 'cmap_vmin', 'cmap_vmax', 'cmap'])
MARKER_PROPERTIES = set(['size_mode', 'size_att', 'size_vmin', 'size_vmax', 'size_scaling', 'size'])
LINE_PROPERTIES = set(['linewidth', 'linestyle'])
DENSITY_PROPERTIES = set(['dpi', 'stretch', 'density_contrast'])
VISUAL_PROPERTIES = (CMAP_PROPERTIES | MARKER_PROPERTIES | DENSITY_PROPERTIES |
LINE_PROPERTIES | set(['color', 'alpha', 'zorder', 'visible']))
DATA_PROPERTIES = set(['layer', 'x_att', 'y_att', 'cmap_mode', 'size_mode', 'density_map',
'xerr_att', 'yerr_att', 'xerr_visible', 'yerr_visible',
'vector_visible', 'vx_att', 'vy_att', 'vector_arrowhead', 'vector_mode',
'vector_origin', 'line_visible', 'markers_visible', 'vector_scaling'])
class InvertedNormalize(Normalize):
def __call__(self, *args, **kwargs):
return 1 - super(InvertedNormalize, self).__call__(*args, **kwargs)
class DensityMapLimits(object):
contrast = 1
def min(self, array):
return 0
def max(self, array):
return 10. ** (np.log10(np.nanmax(array)) * self.contrast)
def set_mpl_artist_cmap(artist, values, state):
vmin = state.cmap_vmin
vmax = state.cmap_vmax
cmap = state.cmap
if isinstance(artist, ScatterDensityArtist):
artist.set_c(values)
else:
artist.set_array(values)
artist.set_cmap(cmap)
if vmin > vmax:
artist.set_clim(vmax, vmin)
artist.set_norm(InvertedNormalize(vmax, vmin))
else:
artist.set_clim(vmin, vmax)
artist.set_norm(Normalize(vmin, vmax))
class ScatterLayerArtist(MatplotlibLayerArtist):
_layer_state_cls = ScatterLayerState
def __init__(self, axes, viewer_state, layer_state=None, layer=None):
super(ScatterLayerArtist, self).__init__(axes, viewer_state,
layer_state=layer_state, layer=layer)
# Watch for changes in the viewer state which would require the
# layers to be redrawn
self._viewer_state.add_global_callback(self._update_scatter)
self.state.add_global_callback(self._update_scatter)
# Scatter
self.scatter_artist = self.axes.scatter([], [])
self.plot_artist = self.axes.plot([], [], 'o', mec='none')[0]
self.errorbar_artist = self.axes.errorbar([], [], fmt='none')
self.vector_artist = None
self.line_collection = LineCollection(np.zeros((0, 2, 2)))
self.axes.add_collection(self.line_collection)
# Scatter density
self.density_auto_limits = DensityMapLimits()
self.density_artist = ScatterDensityArtist(self.axes, [], [], color='white',
vmin=self.density_auto_limits.min,
vmax=self.density_auto_limits.max)
self.axes.add_artist(self.density_artist)
self.mpl_artists = [self.scatter_artist, self.plot_artist,
self.errorbar_artist, self.vector_artist,
self.line_collection, self.density_artist]
self.errorbar_index = 2
self.vector_index = 3
self.reset_cache()
def reset_cache(self):
self._last_viewer_state = {}
self._last_layer_state = {}
@defer_draw
def _update_data(self, changed):
# Layer artist has been cleared already
if len(self.mpl_artists) == 0:
return
try:
x = self.layer[self._viewer_state.x_att].ravel()
except (IncompatibleAttribute, IndexError):
# The following includes a call to self.clear()
self.disable_invalid_attributes(self._viewer_state.x_att)
return
else:
self.enable()
try:
y = self.layer[self._viewer_state.y_att].ravel()
except (IncompatibleAttribute, IndexError):
# The following includes a call to self.clear()
self.disable_invalid_attributes(self._viewer_state.y_att)
return
else:
self.enable()
if self.state.markers_visible:
if self.state.density_map:
self.density_artist.set_xy(x, y)
self.plot_artist.set_data([], [])
self.scatter_artist.set_offsets(np.zeros((0, 2)))
else:
if self.state.cmap_mode == 'Fixed' and self.state.size_mode == 'Fixed':
# In this case we use Matplotlib's plot function because it has much
# better performance than scatter.
self.plot_artist.set_data(x, y)
self.scatter_artist.set_offsets(np.zeros((0, 2)))
self.density_artist.set_xy([], [])
else:
self.plot_artist.set_data([], [])
offsets = np.vstack((x, y)).transpose()
self.scatter_artist.set_offsets(offsets)
self.density_artist.set_xy([], [])
else:
self.plot_artist.set_data([], [])
self.scatter_artist.set_offsets(np.zeros((0, 2)))
self.density_artist.set_xy([], [])
if self.state.line_visible:
if self.state.cmap_mode == 'Fixed':
points = np.array([x, y]).transpose()
self.line_collection.set_segments([points])
else:
# In the case where we want to color the line, we need to over
# sample the line by a factor of two so that we can assign the
# correct colors to segments - if we didn't do this, then
# segments on one side of a point would be a different color
# from the other side. With oversampling, we can have half a
# segment on either side of a point be the same color as a
# point
x_fine = np.zeros(len(x) * 2 - 1, dtype=float)
y_fine = np.zeros(len(y) * 2 - 1, dtype=float)
x_fine[::2] = x
x_fine[1::2] = 0.5 * (x[1:] + x[:-1])
y_fine[::2] = y
y_fine[1::2] = 0.5 * (y[1:] + y[:-1])
points = np.array([x_fine, y_fine]).transpose().reshape(-1, 1, 2)
segments = np.concatenate([points[:-1], points[1:]], axis=1)
self.line_collection.set_segments(segments)
else:
self.line_collection.set_segments(np.zeros((0, 2, 2)))
for eartist in list(self.errorbar_artist[2]):
if eartist is not None:
try:
eartist.remove()
except ValueError:
pass
except AttributeError: # Matplotlib < 1.5
pass
if self.vector_artist is not None:
self.vector_artist.remove()
self.vector_artist = None
if self.state.vector_visible:
| if self.state.vx_att is not None and self.state.vy_att is not None:
vx = self.layer[self.state.vx_att].ravel()
vy = self.layer[self.state.vy_att].ravel()
if self.state.vector_mode == 'Polar':
| ang = vx
length = vy
# assume ang is anti clockwise from the x axis
vx = length * np.cos(np.ra |
pyhmsa/pyhmsa | pyhmsa/fileformat/importer/emsa.py | Python | mit | 12,424 | 0.002173 | """
Importer from EMSA file format
"""
# Standard library modules.
import datetime
# Third party modules.
import numpy as np
# Local modules.
from pyhmsa.fileformat.importer.importer import _Importer, _ImporterThread
from pyhmsa.fileformat.common.emsa import calculate_checksum
from pyhmsa.datafile import DataFile
from pyhmsa.spec.header import Header
from pyhmsa.spec.condition.probe import ProbeEM, ProbeTEM
from pyhmsa.spec.condition.acquisition import AcquisitionPoint
from pyhmsa.spec.condition.specimenposition import SpecimenPosition
from pyhmsa.spec.condition.detector import \
(DetectorSpectrometer, DetectorSpectrometerXEDS, DetectorSpectrometerCL,
Window)
from pyhmsa.spec.condition.calibration import CalibrationLinear
from pyhmsa.spec.datum.analysis import Analysis1D
from pyhmsa.type.unit import validate_unit
from pyhmsa.util.parsedict import parsed | ict
# Globals and constants variables.
from pyhmsa.spec.condition.detector import \
(COLLECTION_MODE_PARALLEL, COLLECTION_MODE_SERIAL,
XEDS_TEC | HNOLOGY_GE, XEDS_TECHNOLOGY_SILI, XEDS_TECHNOLOGY_SDD,
XEDS_TECHNOLOGY_UCAL,
SIGNAL_TYPE_EDS, SIGNAL_TYPE_WDS, SIGNAL_TYPE_CLS)
from pyhmsa.fileformat.common.emsa import \
(EMSA_ELS_DETECTOR_SERIAL, EMSA_ELS_DETECTOR_PARALL,
EMSA_EDS_DETECTOR_SIBEW, EMSA_EDS_DETECTOR_SIUTW, EMSA_EDS_DETECTOR_SIWLS,
EMSA_EDS_DETECTOR_GEBEW, EMSA_EDS_DETECTOR_GEUTW, EMSA_EDS_DETECTOR_GEWLS,
EMSA_EDS_DETECTOR_SDBEW, EMSA_EDS_DETECTOR_SDUTW, EMSA_EDS_DETECTOR_SDWLS,
EMSA_EDS_DETECTOR_UCALUTW)
_ELSDET_TO_COLLECTION_MODE = \
{EMSA_ELS_DETECTOR_PARALL: COLLECTION_MODE_PARALLEL,
EMSA_ELS_DETECTOR_SERIAL: COLLECTION_MODE_SERIAL}
_EDSDET_TO_XEDS_TECHNOLOGY = \
{EMSA_EDS_DETECTOR_SIBEW: XEDS_TECHNOLOGY_SILI,
EMSA_EDS_DETECTOR_SIUTW: XEDS_TECHNOLOGY_SILI,
EMSA_EDS_DETECTOR_SIWLS: XEDS_TECHNOLOGY_SILI,
EMSA_EDS_DETECTOR_GEBEW: XEDS_TECHNOLOGY_GE,
EMSA_EDS_DETECTOR_GEUTW: XEDS_TECHNOLOGY_GE,
EMSA_EDS_DETECTOR_GEWLS: XEDS_TECHNOLOGY_GE,
EMSA_EDS_DETECTOR_SDBEW: XEDS_TECHNOLOGY_SDD,
EMSA_EDS_DETECTOR_SDUTW: XEDS_TECHNOLOGY_SDD,
EMSA_EDS_DETECTOR_SDWLS: XEDS_TECHNOLOGY_SDD,
EMSA_EDS_DETECTOR_UCALUTW: XEDS_TECHNOLOGY_UCAL}
class _ImporterEMSAThread(_ImporterThread):
def _run(self, filepath, *args, **kwargs):
emsa_file = None
try:
# Parse EMSA file
emsa_file = open(filepath, 'rt')
lines = emsa_file.readlines()
self._update_status(0.1, 'Verify checksum')
self._verify_checksum(lines)
self._update_status(0.2, 'Parse keywords')
keywords = self._parse_keywords(lines)
self._update_status(0.3, 'Parse data')
buffer = self._parse_data(lines, keywords)
# Create data file
datafile = DataFile()
self._update_status(0.4, 'Extracting header')
datafile.header.update(self._extract_header(keywords))
self._update_status(0.5, 'Extracting probe')
datafile.conditions.update(self._extract_probe(keywords))
self._update_status(0.6, 'Extracting acquisition')
datafile.conditions.update(self._extract_acquisition(keywords))
self._update_status(0.7, 'Extracting detector')
datafile.conditions.update(self._extract_detector(keywords))
datum = Analysis1D(len(buffer), dtype=buffer.dtype,
buffer=np.ravel(buffer),
conditions=datafile.conditions)
datafile.data['Spectrum'] = datum
finally:
if emsa_file is not None:
emsa_file.close()
return datafile
def _is_line_keyword(self, line):
try:
return line.strip()[0] == '#'
except:
return False
def _verify_checksum(self, lines):
for line in lines:
if not self._is_line_keyword(line):
continue
tag, _comment, expected_checksum = self._parse_keyword_line(line)
if tag == 'ENDOFDATA':
return # No checksum
if tag == 'CHECKSUM':
break
actual_checksum = calculate_checksum(lines)
if actual_checksum != expected_checksum:
raise IOError("The checksums don't match: %i != %i " % \
(actual_checksum, expected_checksum))
def _parse_keywords(self, lines):
keywords = parsedict()
# First pass
for line in lines:
if not self._is_line_keyword(line):
break
tag, _comment, value = self._parse_keyword_line(line)
if tag == 'SPECTRUM':
break
keywords.setdefault(tag, []).append(value)
# Second pass (remove list if only one value)
for tag, values in keywords.items():
if len(values) == 1:
keywords[tag] = values[0]
else:
keywords[tag] = tuple(values)
return keywords
def _parse_keyword_line(self, line):
line = line.strip("#") # Strip keyword character
tag, value = line.split(":", 1)
tag = tag.strip()
value = value.strip()
try:
tag, comment = tag.split()
except:
comment = ""
tag = tag.upper()
comment = comment.strip("-")
return tag, comment, value
def _parse_data(self, lines, keywords):
# Filter to get only data lines
lines = filter(lambda line: not self._is_line_keyword(line), lines)
# Read based on data type
datatype = keywords.get('DATATYPE')
if datatype is None:
raise ValueError('No DATATYPE specified')
datatype = datatype.upper()
if datatype == 'XY':
data = self._parse_data_xy(lines, keywords)
elif datatype == 'Y':
data = self._parse_data_y(lines, keywords)
else:
raise ValueError('Unknown data type')
# Check number of points
npoints = int(float(keywords.get('NPOINTS', len(data))))
if npoints != len(data):
raise ValueError('Inconsistent number of points. NPOINTS=%i != len(data)=%i' % \
(npoints, len(data)))
return data
def _parse_data_xy(self, lines, keywords):
data = []
for line in lines:
data.append(self._parse_data_line(line))
return np.array(data)[:, 1]
def _parse_data_y(self, lines, keywords):
ydata = []
for line in lines:
ydata.extend(self._parse_data_line(line))
return np.array(ydata)
def _parse_data_line(self, line):
# Split values separated by a comma
tmprow = [value.strip() for value in line.split(',')]
# Split values separated by a space
row = []
for value in tmprow:
row.extend(value.split())
# Convert to float
row = list(map(float, row))
return row
def _extract_header(self, keywords):
header = Header()
header.title = keywords['TITLE']
header.date = \
datetime.datetime.strptime(keywords['DATE'], '%d-%b-%Y').date()
header.time = \
datetime.datetime.strptime(keywords['TIME'], '%H:%M').time()
header.author = keywords['OWNER']
return header
def _extract_probe(self, keywords):
if 'BEAMKV' not in keywords:
return {}
kwargs = {}
kwargs['beam_voltage'] = (keywords.getfloat('BEAMKV'), 'kV')
kwargs['beam_current'] = (keywords.getfloat('PROBECUR'), 'nA')
kwargs['emission_current'] = (keywords.getfloat('EMISSION'), 'uA')
kwargs['beam_diameter'] = (keywords.getfloat('BEAMDIAM'), 'nm')
kwargs['scan_magnification'] = keywords.getint('MAGCAM')
if 'OPERMODE' in keywords:
kwargs['lens_mode'] = keywords.get('OPERMODE') # Enums are identical
kwargs['convergence_angle'] = (keywords.getfloat('CONVANGLE'), 'mrad')
c = ProbeTEM(**kwargs)
else:
|
fangjing828/LEHome | home.py | Python | apache-2.0 | 9,281 | 0.004432 | #!/usr/bin/env python
# encoding: utf-8
# Copyright 2014 Xinyu, He <legendmohe@foxmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import importlib
import traceback
import signal
import time
import tornado.ioloop
import tornado.web
import redis
from lib.command.runtime import Rumtime
from lib.speech.Speech import Text2Speech
from lib.helper.SwitchHelper import SwitchHelper
from lib.helper.RilHelper import RilHelper
from lib.helper.SensorHelper import SensorHelper
from lib.helper.MessageHelper import MessageHelper
from lib.helper.TagHelper import TagHelper
from util.Res import Res
from lib.sound import Sound
from util.log import *
# class TracePrints(object):
# def __init__(self):
# self.stdout = sys.stdout
# def write(self, s):
# self.stdout.write("Writing %r\n" % s)
# traceback.print_stack(file=self.stdout)
# sys.stdout = TracePrints()
class Home:
def __init__(self):
INFO(u"==========服务器启动==========")
self._global_context = {}
self._init_res = Res.init("init.json")
self._init_storage()
self._init_cmd_socket()
self._init_audio_server()
self._init_helper()
self._init_speaker()
self._init_command()
self._resume = False
self.runtime.init_tasklist() # load unfinished task
self.publish_msg("init", u"==========服务器启动==========")
def _init_command(self):
INFO('initlizing command...')
settings = self._init_res
if settings:
com_json = settings['command']
self.runtime = Rumtime({
"whiles":com_json["while"],
"ifs":com_json["if"],
"thens":com_json["then"],
"elses":com_json["else"],
"delay":com_json["delay"],
"trigger":com_json["trigger"],
"action":com_json["action"],
"target":com_json["target"],
"stop":com_json["stop"],
"finish":com_json["finish"],
"nexts":com_json["next"],
"logical":com_json["logical"],
"compare":com_json["compare"],
})
self.runtime.setDEBUG(False)
self.runtime.cmd_begin_callback = self._cmd_begin_callback
self.runtime.cmd_end_callback = self._cmd_end_callback
module_cache = {}
cb_json = settings["callback"]
for com_name in cb_json.keys():
cbs = cb_json[com_name]
for cb_token in cbs.keys():
try:
token = cbs[cb_token].encode("utf-8")
if token == "" or token is None:
WARN("token ", token, " no callbacks.")
continue
dpos = token.rindex('.')
module_name = token[:dpos]
class_name = token[dpos + 1:]
cb_module_name = "usr.callbacks.%s.%s" % (com_name, module_name)
cb_object = module_cache.get("%s.%s" % \
(cb_module_name, class_name)
)
if cb_object is None:
cb_module = importlib.import_module(cb_module_name)
cb_object = getattr(cb_module, class_name)()
cb_object.initialize(
_global_context = self._global_context,
_class_context = {},
_speaker = self._spk,
_home = self,
)
DEBUG("load callback: " + cb_module_name + " for command token:" + cb_token)
self.runtime.register_callback(
com_name,
cb_token,
cb_object)
except Exception, e:
ERROR("init commands faild.")
ERROR(traceback.format_exc())
def _init_storage(self):
host = self._init_res["storage"]["host"]
port = self._init_res["storage"]["port"]
INFO("initlizing storage:%s:%s" % (host, port))
self._storage = redis.Redis(host=host, port=port)
if self._storage is None:
ERROR("storage init faild!")
def _init_speaker(self):
INFO("initlizing speaker...")
self._spk = Text2Speech()
def _init_audio_server(self):
Sound.AUDIO_SERVER_ADDRESS = self._init_res["connection"]["audio_server"]
INFO("connect to audio server: %s " % (Sound.AUDIO_SERVER_ADDRESS))
def _init_cmd_socket(self):
cmd_bind_port = self._init_res["connection"]["cmd_bind_port"]
INFO("initlizing cmd socket, bing to:" + cmd_bind_port)
self._cmd_bind_port = cmd_bind_port
def _init_helper(self):
publisher_ip = self._init_res["connection"]["publisher"]
msg_cmd_bind = self._init_res["connection"]["msg_cmd_bind"]
INFO("init message publisher: %s, cmd bind: %s" %
(publisher_ip, msg_cmd_bind))
self._msg_sender = MessageHelper(publisher_ip, msg_cmd_bind)
switch_scan = SwitchHelper.BOARDCAST_ADDRESS
INFO("init switch scan: " + switch_scan)
self._switch = SwitchHelper()
INFO("init ril helper")
self._ril = RilHelper()
INFO("init sensor helper")
self._sensor = SensorHelper()
tag_server_ips = self._init_res["connection"]["tag_server"]
INFO("init tag server.")
for tag in tag_server_ips:
INFO(" place:%s, ip:%s" % (tag, tag_server_ips[tag]))
self._tag = TagHelper(tag_server_ips, self._init_res["tag"])
def _cmd_begin_callback(self, command):
INFO("command begin: %s" % (command))
# self.publish_msg(command, u"执行: " + command)
def _cmd_end_callback(self, command):
INFO("command end: %s" % (command))
# self.publish_msg(command, "end: " + command)
def publish_msg(self, sub_id, msg, cmd_type="normal"):
self._msg_sender.publish_msg(sub_id, msg, cmd_type)
def parse_cmd(self, cmd):
if not self._resume:
timestamp = int(time.time())
INFO("command: " + cmd)
if cmd.startswith("@"):
cmd = cmd[1:]
self._storage.rpush(
"lehome:cmd_location_list",
"%d:%s" % (timestamp, cmd)
)
self.publish_msg(cmd, cmd, cmd_type="bc_loc")
else:
self._storage.rpush(
"lehome:cmd_history_list",
"%d:%s" % (timestamp, cmd)
)
self.runtime | .parse(cmd)
def activate(self):
| Sound.play(Res.get_res_path("sound/com_begin"))
self._spk.start()
self.runtime.start()
application = tornado.web.Application([
(r"/home/cmd", CmdHandler, dict(home=self)),
])
application.listen(self._cmd_bind_port.encode("utf-8"))
tornado.ioloop.PeriodicCallback(try_exit, 1000).start()
tornado.ioloop.IOLoop.instance().start()
INFO("home activate!")
def deactivate(self):
self._spk.stop() |
peterayeni/dash | dash/dashblocks/migrations/0003_auto_20140804_0236.py | Python | bsd-3-clause | 1,506 | 0.001328 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def generate_initial_block_types(apps, schema_editor):
User = apps.get_model("auth", "User")
root = User.objects.filter(username="root").first()
if not root:
root = User.objects.filter(username="root2").first()
if not root:
root = User.objects.create(username="root2")
DashBlockType = apps.get_model("dashblocks", "DashBlockType")
DashBlockType.objects.get_or_create(name="U-Reporters",
slug="ureporters",
description="U-Reporters Page",
has_title=True,
has_image=True,
has_rich_text=False,
has_summary=False,
has_link=False,
has_gallery=False,
| has_color=False,
| has_video=False,
has_tags=False,
created_by=root,
modified_by=root)
class Migration(migrations.Migration):
dependencies = [
('dashblocks', '0002_auto_20140802_2112'),
]
operations = [
migrations.RunPython(generate_initial_block_types),
]
|
googleapis/python-spanner | samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py | Python | apache-2.0 | 1,667 | 0.0006 | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for CreateDatabase
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-spanner-admin-database
# [START spanner_v1_generated_DatabaseAdmin_CreateDatabase_async]
from google.cloud import spanner_admin_database_v1
async def sample_create_database():
# Create a client
client = spanner_admin_database_v1.DatabaseAdminAsyncClient()
# Initialize request | argument(s)
request = spanner_admin_database_v1.CreateDatabaseRequest(
parent="parent_value",
create_statement="create_statement_value",
)
# Make the request
operation = client.create_database(request=request)
print("Waiting for operation to complete...")
response = await operation.result()
# Handle the response
print(response)
# [EN | D spanner_v1_generated_DatabaseAdmin_CreateDatabase_async]
|
googleapis/python-datalabeling | samples/generated_samples/datalabeling_v1beta1_generated_data_labeling_service_update_evaluation_job_sync.py | Python | apache-2.0 | 1,524 | 0.001969 | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www. | apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or impl | ied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for UpdateEvaluationJob
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-datalabeling
# [START datalabeling_v1beta1_generated_DataLabelingService_UpdateEvaluationJob_sync]
from google.cloud import datalabeling_v1beta1
def sample_update_evaluation_job():
# Create a client
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Initialize request argument(s)
request = datalabeling_v1beta1.UpdateEvaluationJobRequest(
)
# Make the request
response = client.update_evaluation_job(request=request)
# Handle the response
print(response)
# [END datalabeling_v1beta1_generated_DataLabelingService_UpdateEvaluationJob_sync]
|
Microsoft/PTVS | Python/Tests/TestData/DebuggerProject/LocalsTest3.py | Python | apache-2.0 | 28 | 0.071429 | de | f f(x):
y = 'abc'
f(42)
| |
abhipec/academicCodes | CS350M/main.py | Python | gpl-2.0 | 2,153 | 0.006038 | from tree import *
from worker import *
import multiprocessing
import graph as graph
import random
# create a sample undirected graph
g = graph.sampleUnDirectedGraph()
# get list of all vertex in graph
vertexList = g.getVertices()
# initialize an empty tree object
tree = Tree()
# choose random node from graph, this will be root node in new tree
root = random.choice(vertexList)
# impliment BFS algorithm to convert graph to tree
queue = [root]
marked = [root]
while len(queue):
# explore neighbors of first element of queue
for edge in g.getEdges(queue[0]):
# if vertex is no | t visited before, add it marked li | st and queue
if edge not in marked:
# add this edge to tree
tree.addChild(queue[0],edge)
marked.append(edge)
queue.append(edge)
# remove first element from queue
queue.pop(0)
# open data source file
f = open('data.txt', 'r')
# read data source file into a list
dataSource = []
for d in filter(None,f.read().split('\n')):
dataSource.append(int(d))
# find total number of leaf nodes in tree
leafList = sorted(tree.getLeafs())
leafListLength = len(leafList)
# calculate size of data that will be given to each worker process
# last leaf node based on index will receive the extra data if result is
# not integer
dataSize = len(dataSource)/leafListLength
# only run this for main file
if __name__ == '__main__':
# run one worker process for each node in tree
for vertex in tree.getVertices():
# if the node is leaf distribute data to them based on their index
if vertex in leafList:
index = leafList.index(vertex)
if index == leafListLength - 1:
data = dataSource[index * dataSize:]
elif index == 0:
data = dataSource[ : (index + 1) * dataSize]
else :
data = dataSource[index * dataSize : (index + 1) * dataSize]
else :
data = []
# create new process with arguments : tree object, its name, data
p = multiprocessing.Process(target = worker, args=(tree, vertex,data))
# start process
p.start()
|
decvalts/iris | lib/iris/tests/test_hybrid.py | Python | gpl-3.0 | 9,487 | 0.000211 | # (C) British Crown Copyright 2010 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
Test the hybrid vertical coordinate representations.
"""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
import six
# import iris tests first so that some things can be initialised before
# importing anything else
import iris.tests as tests
import warnings
import numpy as np
from iris.aux_factory import HybridHeightFactory, HybridPressureFactory
import iris
import iris.tests.stock
@tests.skip_plot
class TestRealistic4d(tests.GraphicsTest):
def setUp(self):
self.cube = iris.tests.stock.realistic_4d()
self.altitude = self.cube.coord('altitude')
def test_metadata(self):
self.assertEqual(self.altitude.units, 'm')
self.assertIsNone(self.altitude.coord_system)
self.assertEqual(self.altitude.attributes, {'positive': 'up'})
def test_points(self):
self.assertAlmostEqual(self.altitude.points.min(), np.float32(191.84892))
self.assertAlmostEqual(self.altitude.points.max(), np.float32(40000))
def test_transpose(self):
self.assertCML(self.cube, ('stock', 'realistic_4d.cml'))
self.cube.transpose()
self.assertCML(self.cube, ('derived', 'transposed.cml'))
def test_indexing(self):
cube = self.cube[:, :, 0, 0]
# Make sure the derived 'altitude' coordinate survived the indexing.
altitude = cube.coord('altitude')
self.assertCML(cube, ('derived', 'column.cml'))
def test_removing_sigma(self):
# Check the cube remains OK when sigma is removed.
cube = self.cube
cube.remove_coord('sigma')
self.assertCML(cube, ('derived', 'removed_sigma.cml'))
self.assertString(str(cube), ('derived', 'removed_sigma.__str__.txt'))
# Check the factory now only has surface_altitude and delta dependencies.
factory = cube.aux_factory(name='altitude')
t = [key
for key, coord in six.iteritems(factory.dependencies)
if coord is not None]
six.assertCountEqual(self, t, ['orography', 'delta'])
def test_removing_orography(self):
# Check the cube remains OK when the orography is removed.
cube = self.cube
cube.remove_coord('surface_altitude')
self.assertCML(cube, ('derived', 'removed_orog.cml'))
self.assertString(str(cube), ('derived', 'removed_orog.__str__.txt'))
# Check the factory now only has sigma and delta dependencies.
factory = cube.aux_factory(name='altitude')
t = [key
for key, coord in six.iteritems(factory.dependencies)
if coord is not None]
six.assertCountEqual(self, t, ['sigma', 'delta'])
def test_derived_coords(self):
derived_coords = self.cube.derived_coords
self.assertEqual(len(derived_coords), 1)
altitude = derived_coords[0]
self.assertEqual(altitude.standard_name, 'altitude')
self.assertEqual(altitude.attributes, {'positive': 'up'})
def test_aux_factory(self):
factory = self.cube.aux_factory(name='altitude')
self.assertEqual(factory.standard_name, 'altitude')
self.assertEqual(factory.attributes, {'positive': 'up'})
def test_aux_factory_var_name(self):
factory = self.cube.aux_factory(name='altitude')
factory.var_name = 'alt'
factory = self.cube.aux_factory(var_name='alt')
self.assertEqual(factory.standard_name, 'altitude')
self.assertEqual(factory.attributes, {'positive': 'up'})
def test_no_orography(self):
# Get rid of the normal hybrid-height factory.
cube = self.cube
factory = cube.aux_factory(name='altitude')
cube.remove_aux_factory(factory)
# Add a new one which only references level_height & sigma.
delta = cube.coord('level_height')
sigma = cube.coord('sigma')
factory = HybridHeightFactory(delta, sigma)
cube.add_aux_factory(factory)
self.assertEqual(len(cube.aux_factories), 1)
self.assertEqual(len(cube.derived_coords), 1)
self.assertString(str(cube), ('derived', 'no_orog.__str__.txt'))
self.assertCML(cube, ('derived', 'no_orog.cml'))
def test_invalid_dependencies(self):
# Must have either delta or orography
with self.assertRaises(ValueError):
factory = HybridHeightFactory()
sigma = self.cube.coord('sigma')
with self.assertRaises(ValueError):
factory = HybridHeightFactory(sigma=sigma)
# Orography must not have bounds
with warnings.catch_warnings():
# Cause all warnings to raise Exceptions
warnings.simplefilter("error")
with self.assertRaises(UserWarning):
factory = HybridHeightFactory(orography=sigma)
def test_bounded_orography(self):
# Start with everything normal
orog = self.cube.coord('surface_altitude')
altitude = self.cube.coord('altitude')
self.assertIsInstance(altitude.bounds, np.ndarray)
# Make sure altitude still works OK if orography was messed
# with *after* altitude was created.
altitude = self.cube.coord('altitude')
orog.bounds = np.zeros(orog.shape + (4,))
self.assertIsInstance(altitude.bounds, np.ndarray)
# Make sure altitude.bounds now raises an error.
altitude = self.cube.coord('altitude')
with self.assertRaises(ValueError):
bounds = altitude.bounds
class TestHybridPressure(tests.IrisTest):
def setUp(self):
# Convert the hybrid-height into hybrid-pressure...
cube = iris.tests.stock.realistic_4d()
# Get rid of the normal hybrid-height factory.
factory = cube.aux_factory(name='altitude')
cube.remove_aux_factory(factory)
# Mangle the height coords into pressure coords.
delta = cube.coord('level_height')
delta.rename('level_pressure')
delta.units = 'Pa'
sigma = cube.coord('sigma')
ref = cube.coord('surface_altitude')
ref.rename('surface_air_pressure')
ref.units = 'Pa'
factory = HybridPressureFactory(delta, sigma, ref)
cube.add_aux_factory(factory)
self.cube = cube
self.air_pressure = self.cube.coord('air_pressure')
def test_metadata(self):
self.assertEqual(self.air_pressure.units, 'Pa')
self.assertIsNone(self.air_pressure.coord_system)
self.assertEqual(self.air_pressure.attributes, {})
def test_points(self):
points = self.air_pressure.points
| self.assertEqual(points.dtype, np.float32)
self.assertAlmostEqual(points.min(), np.float32(191.84892))
self.assertAlmostEqual(points.max(), np.float32(40000))
| # Convert the reference surface to float64 and check the
# derived coordinate becomes float64.
temp = self.cube.coord('surface_air_pressure').points
temp = temp.astype('f8')
self.cube.coord('surface_air_pressure').points = temp
points = self.cube.coord('air_pressure').points
self.assertEqual(points.dtype, np.float64)
self.assertAlmostEqual(points.min(), 191.8489257)
self.assertAlmostEqual(points.max(), 40000)
def test_invalid_dependencies(self):
# Must have either delta or surface_air_pressure
with self.assertRaises(ValueError):
factory = Hybrid |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.