code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
#!/usr/bin/env python
from pymongo import MongoClient
import pymongo
HOST = "mongo-nosh-norep-10f:27017"
c = MongoClient(HOST)
dbname = "google"
task = "task_events"
db = c[dbname]
c[dbname].drop_collection(task)
c[dbname].create_collection(task)
#c.admin.command('enableSharding', dbname)
db = c[dbname]
task_col = db[task]
task_col.create_index([("_id.filepath", pymongo.ASCENDING),("_id.numline", pymongo.ASCENDING)])
task_col.create_index([("_id", pymongo.ASCENDING)])
#c.admin.command('shardCollection', dbname+'.'+task, key={'_id': "hashed"})
#c.admin.command('shardCollection', dbname+'.'+task, key={'_id.filepath': "hashed"})
|
elainenaomi/sciwonc-dataflow-examples
|
sbbd2016/experiments/0-import-data/2-import-10files-mongo-nosh-norep/init/DataStoreInit.py
|
Python
|
gpl-3.0
| 645
|
from rest_framework.routers import SimpleRouter
from . import views
router = SimpleRouter()
router.register('countries', views.CountryViewSet)
urlpatterns = router.urls
|
marcgibbons/drf_signed_auth
|
example/countries/urls.py
|
Python
|
bsd-2-clause
| 173
|
#
# Copyright (c) 2008--2016 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
#
# Package import process
#
import sys
import os.path
from importLib import GenericPackageImport, IncompletePackage, \
Import, InvalidArchError, InvalidChannelError, \
IncompatibleArchError
from mpmSource import mpmBinaryPackage
from spacewalk.common import rhn_pkg
from spacewalk.common.rhnConfig import CFG
from spacewalk.server import taskomatic
from spacewalk.server.rhnServer import server_packages
class ChannelPackageSubscription(GenericPackageImport):
def __init__(self, batch, backend, caller=None, strict=0, repogen=True):
# If strict, the set of packages that was passed in will be the only
# one in the channels - everything else will be unlinked
GenericPackageImport.__init__(self, batch, backend)
self.affected_channels = []
# A hash keyed on the channel id, and with tuples
# (added_packages, removed_packages) as values (packages are package
# ids)
self.affected_channel_packages = {}
if not caller:
self.caller = "backend.(unknown)"
else:
self.caller = caller
self._strict_subscription = strict
self.repogen = repogen
def preprocess(self):
# Processes the package batch to a form more suitable for database
# operations
for package in self.batch:
# if package object doesn't have multiple checksums (like satellite-sync objects)
# then let's fake it
if 'checksums' not in package:
package['checksums'] = {package['checksum_type']: package['checksum']}
if not isinstance(package, IncompletePackage):
raise TypeError("Expected an IncompletePackage instance, "
"got %s" % package.__class__.__name__)
self._processPackage(package)
def fix(self):
# Look up arches and channels
self.backend.lookupPackageArches(self.package_arches)
self.backend.lookupChannels(self.channels)
# Initialize self.channel_package_arch_compat
self.channel_package_arch_compat = {}
for channel, channel_row in self.channels.items():
if not channel_row:
# Unsupported channel
continue
self.channel_package_arch_compat[channel_row['channel_arch_id']] = None
self.backend.lookupChannelPackageArchCompat(self.channel_package_arch_compat)
self.backend.lookupPackageNames(self.names)
self.backend.lookupEVRs(self.evrs)
self.backend.lookupChecksums(self.checksums)
# Fix the package information up, and uniquify the packages too
uniqdict = {}
for package in self.batch:
if package.ignored:
continue
self._postprocessPackageNEVRA(package)
if not CFG.ENABLE_NVREA:
# nvrea disabled, skip checksum
nevrao = (
package['name_id'],
package['evr_id'],
package['package_arch_id'],
package['org_id'])
else:
# As nvrea is enabled uniquify based on checksum
nevrao = (
package['name_id'],
package['evr_id'],
package['package_arch_id'],
package['org_id'],
package['checksum_id'])
if nevrao not in uniqdict:
# Uniquify the channel names
package['channels'] = {}
# Initialize the channels
# This is a handy way of checking arch compatibility for this
# package with its channels
self.__copyChannels(package, package)
uniqdict[nevrao] = package
else:
# Package is found twice in the same batch
# Are the packages the same?
self._comparePackages(package, uniqdict[nevrao])
# Invalidate it
package.ignored = 1
firstpackage = uniqdict[nevrao]
# Copy any new channels
self.__copyChannels(package, firstpackage)
# Knowing the id of the referenced package
package.first_package = firstpackage
def _comparePackages(self, package1, package2):
# XXX This should probably do a deep compare of the two packages
pass
def submit(self):
self.backend.lookupPackages(self.batch, self.checksums)
try:
affected_channels = self.backend.subscribeToChannels(self.batch,
strict=self._strict_subscription)
except:
self.backend.rollback()
raise
self.compute_affected_channels(affected_channels)
if len(self.batch) < 10:
# update small batch per package
name_ids = [pkg['name_id'] for pkg in self.batch]
else:
# update bigger batch at once
name_ids = []
self.backend.update_newest_package_cache(caller=self.caller,
affected_channels=self.affected_channel_packages, name_ids=name_ids)
# Now that channel is updated, schedule the repo generation
if self.repogen:
taskomatic.add_to_repodata_queue_for_channel_package_subscription(
self.affected_channels, self.batch, self.caller)
self.backend.commit()
def compute_affected_channels(self, affected_channels):
# Fill the list of affected channels
self.affected_channel_packages.clear()
self.affected_channel_packages.update(affected_channels)
for channel_label, channel_row in list(self.channels.items()):
channel_id = channel_row['id']
if channel_id in affected_channels:
affected_channels[channel_id] = channel_label
self.affected_channels = list(affected_channels.values())
def _processPackage(self, package):
GenericPackageImport._processPackage(self, package)
# Process channels
channels = []
channelHash = {}
for channel in package['channels']:
channelName = channel['label']
if channelName not in channelHash:
channels.append(channelName)
channelHash[channelName] = None
self.channels[channelName] = None
# Replace the channel list with the uniquified list
package.channels = channels
# Copies the channels from one package to the other
def __copyChannels(self, sourcePackage, destPackage):
dpHash = destPackage['channels']
for schannelName in sourcePackage.channels:
# Check if the package is compatible with the channel
channel = self.channels[schannelName]
if not channel:
# Unknown channel
sourcePackage.ignored = 1
raise InvalidChannelError(channel,
"Unsupported channel %s" % schannelName)
# Check channel-package compatibility
charch = channel['channel_arch_id']
archCompat = self.channel_package_arch_compat[charch]
if not archCompat:
# Invalid architecture
sourcePackage.ignored = 1
raise InvalidArchError(charch,
"Invalid channel architecture %s" % charch)
# Now check if the source package's arch is compatible with the
# current channel
if sourcePackage['package_arch_id'] not in archCompat:
sourcePackage.ignored = 1
raise IncompatibleArchError(sourcePackage.arch, charch,
"Package arch %s incompatible with channel %s" %
(sourcePackage.arch, schannelName))
dpHash[channel['id']] = schannelName
destPackage.channels = list(dpHash.values())
class PackageImport(ChannelPackageSubscription):
def __init__(self, batch, backend, caller=None, update_last_modified=0):
ChannelPackageSubscription.__init__(self, batch, backend,
caller=caller)
self.ignoreUploaded = 1
self._update_last_modified = update_last_modified
self.capabilities = {}
self.groups = {}
self.sourceRPMs = {}
self.changelog_data = {}
def _processPackage(self, package):
ChannelPackageSubscription._processPackage(self, package)
# Process package groups
group = package['package_group']
if group not in self.groups:
self.groups[group] = None
sourceRPM = package['source_rpm']
if (sourceRPM is not None) and (sourceRPM not in self.sourceRPMs):
self.sourceRPMs[sourceRPM] = None
# Change copyright to license
# XXX
package['copyright'] = self._fix_encoding(package['license'])
for tag in ('recommends', 'suggests', 'supplements', 'enhances', 'breaks', 'predepends'):
if tag not in package or type(package[tag]) != type([]):
# older spacewalk server do not export weak deps.
# lets create an empty list
package[tag] = []
# Creates all the data structures needed to insert capabilities
for tag in ('provides', 'requires', 'conflicts', 'obsoletes', 'recommends', 'suggests', 'supplements', 'enhances', 'breaks', 'predepends'):
depList = package[tag]
if type(depList) != type([]):
sys.stderr.write("!!! packageImport.PackageImport._processPackage: "
"erronous depList for '%s', converting to []\n" % tag)
depList = []
for dep in depList:
nv = []
for f in ('name', 'version'):
nv.append(dep[f])
del dep[f]
nv = tuple(nv)
dep['capability'] = nv
if nv not in self.capabilities:
self.capabilities[nv] = None
# Process files too
fileList = package['files']
for f in fileList:
filename = self._fix_encoding(f['name'])
nv = (filename, '')
del f['name']
f['capability'] = nv
if nv not in self.capabilities:
self.capabilities[nv] = None
fchecksumTuple = (f['checksum_type'], f['checksum'])
if fchecksumTuple not in self.checksums:
self.checksums[fchecksumTuple] = None
# Uniquify changelog entries
unique_package_changelog_hash = {}
unique_package_changelog = []
for changelog in package['changelog']:
key = (changelog['name'], changelog['time'], changelog['text'])
if key not in unique_package_changelog_hash:
self.changelog_data[key] = None
unique_package_changelog.append(changelog)
unique_package_changelog_hash[key] = 1
package['changelog'] = unique_package_changelog
# fix encoding issues in package summary and description
package['description'] = self._fix_encoding(package['description'])
package['summary'] = self._fix_encoding(package['summary'])
def fix(self):
# If capabilities are available, process them
if self.capabilities:
try:
self.backend.processCapabilities(self.capabilities)
except:
# Oops
self.backend.rollback()
raise
# Since this is the bulk of the work, commit
self.backend.commit()
self.backend.processChangeLog(self.changelog_data)
ChannelPackageSubscription.fix(self)
self.backend.lookupSourceRPMs(self.sourceRPMs)
self.backend.lookupPackageGroups(self.groups)
# Postprocess the gathered information
self.__postprocess()
def submit(self):
upload_force = self.uploadForce
if not upload_force and self._update_last_modified:
# # Force it just a little bit - kind of hacky
upload_force = 0.5
try:
self.backend.processPackages(self.batch,
uploadForce=upload_force,
forceVerify=self.forceVerify,
ignoreUploaded=self.ignoreUploaded,
transactional=self.transactional)
self._import_signatures()
except:
# Oops
self.backend.rollback()
raise
self.backend.commit()
if not self._update_last_modified:
# Go though the list of objects and clear out the ones that have a
# force of 0.5
for p in self.batch:
if p.diff and p.diff.level == 0.5:
# Ignore this difference completely
p.diff = None
# Leave p.diff_result in place
def subscribeToChannels(self):
affected_channels = self.backend.subscribeToChannels(self.batch)
# Fill the list of affected channels
self.compute_affected_channels(affected_channels)
name_ids = [pkg['name_id'] for pkg in self.batch]
self.backend.update_newest_package_cache(caller=self.caller,
affected_channels=self.affected_channel_packages, name_ids=name_ids)
taskomatic.add_to_repodata_queue_for_channel_package_subscription(
self.affected_channels, self.batch, self.caller)
self.backend.commit()
def __postprocess(self):
# Gather the IDs we've found
for package in self.batch:
if package.ignored:
# Skip it
continue
# Only deal with packages
self.__postprocessPackage(package)
def __postprocessPackage(self, package):
""" populate the columns foo_id with id numbers from appropriate hashes """
package['package_group'] = self.groups[package['package_group']]
source_rpm = package['source_rpm']
if source_rpm is not None:
source_rpm = self.sourceRPMs[source_rpm]
else:
source_rpm = ''
package['source_rpm_id'] = source_rpm
package['checksum_id'] = self.checksums[(package['checksum_type'], package['checksum'])]
# Postprocess the dependency information
for tag in ('provides', 'requires', 'conflicts', 'obsoletes', 'files', 'recommends', 'suggests', 'supplements', 'enhances', 'breaks', 'predepends'):
for entry in package[tag]:
nv = entry['capability']
entry['capability_id'] = self.capabilities[nv]
for c in package['changelog']:
c['changelog_data_id'] = self.changelog_data[(c['name'], c['time'], c['text'])]
fileList = package['files']
for f in fileList:
f['checksum_id'] = self.checksums[(f['checksum_type'], f['checksum'])]
def _comparePackages(self, package1, package2):
if (package1['checksum_type'] == package2['checksum_type']
and package1['checksum'] == package2['checksum']):
return
# XXX Handle this better
raise Exception("Different packages in the same batch")
def _cleanup_object(self, object):
ChannelPackageSubscription._cleanup_object(self, object)
if object.ignored:
object.id = object.first_package.id
def _import_signatures(self):
for package in self.batch:
# skip missing files and mpm packages
if package['path'] and not isinstance(package, mpmBinaryPackage):
full_path = os.path.join(CFG.MOUNT_POINT, package['path'])
if os.path.exists(full_path):
header = rhn_pkg.get_package_header(filename=full_path)
server_packages.processPackageKeyAssociations(header,
package['checksum_type'], package['checksum'])
def _fix_encoding(self, text):
if text is None:
return None
try:
return text.decode('utf8')
except UnicodeDecodeError:
return text.decode('iso8859-1')
class SourcePackageImport(Import):
def __init__(self, batch, backend, caller=None, update_last_modified=0):
Import.__init__(self, batch, backend)
self._update_last_modified = update_last_modified
self.ignoreUploaded = 1
self.sourceRPMs = {}
self.groups = {}
self.checksums = {}
def preprocess(self):
for package in self.batch:
self._processPackage(package)
def fix(self):
self.backend.lookupSourceRPMs(self.sourceRPMs)
self.backend.lookupPackageGroups(self.groups)
self.backend.lookupChecksums(self.checksums)
self.__postprocess()
# Uniquify the packages
uniqdict = {}
for package in self.batch:
# Unique key
key = (package['org_id'], package['source_rpm_id'])
if key not in uniqdict:
uniqdict[key] = package
continue
else:
self._comparePackages(package, uniqdict[key])
# And invalidate it
package.ignored = 1
package.first_package = uniqdict[key]
def submit(self):
upload_force = self.uploadForce
if not upload_force and self._update_last_modified:
# # Force it just a little bit - kind of hacky
upload_force = 0.5
try:
self.backend.processSourcePackages(self.batch,
uploadForce=upload_force,
forceVerify=self.forceVerify,
ignoreUploaded=self.ignoreUploaded,
transactional=self.transactional)
except:
# Oops
self.backend.rollback()
raise
self.backend.commit()
if not self._update_last_modified:
# Go though the list of objects and clear out the ones that have a
# force of 0.5
for p in self.batch:
if p.diff and p.diff.level == 0.5:
# Ignore this difference completely
p.diff = None
# Leave p.diff_result in place
def _comparePackages(self, package1, package2):
if (package1['checksum_type'] == package2['checksum_type']
and package1['checksum'] == package2['checksum']):
return
# XXX Handle this better
raise Exception("Different packages in the same batch")
def _processPackage(self, package):
Import._processPackage(self, package)
# Fix the arch
package.arch = 'src'
package.source_rpm = package['source_rpm']
sourceRPM = package['source_rpm']
if not sourceRPM:
# Should not happen
raise Exception("Source RPM %s does not exist")
self.sourceRPMs[sourceRPM] = None
self.groups[package['package_group']] = None
checksumTuple = (package['checksum_type'], package['checksum'])
if checksumTuple not in self.checksums:
self.checksums[checksumTuple] = None
sigchecksumTuple = (package['sigchecksum_type'], package['sigchecksum'])
if sigchecksumTuple not in self.checksums:
self.checksums[sigchecksumTuple] = None
def __postprocess(self):
# Gather the IDs we've found
for package in self.batch:
if package.ignored:
# Skip it
continue
# Only deal with packages
self.__postprocessPackage(package)
def __postprocessPackage(self, package):
# Set the ids
package['package_group'] = self.groups[package['package_group']]
package['source_rpm_id'] = self.sourceRPMs[package['source_rpm']]
package['checksum_id'] = self.checksums[(package['checksum_type'],
package['checksum'])]
package['sigchecksum_id'] = self.checksums[(package['sigchecksum_type'],
package['sigchecksum'])]
def _cleanup_object(self, object):
Import._cleanup_object(self, object)
if object.ignored:
object.id = object.first_package.id
def packageImporter(batch, backend, source=0, caller=None):
if source:
return SourcePackageImport(batch, backend, caller=caller)
return PackageImport(batch, backend, caller=caller)
|
jdobes/spacewalk
|
backend/server/importlib/packageImport.py
|
Python
|
gpl-2.0
| 21,688
|
import logging
from autotest.client.shared import error
from virttest.libvirt_xml import VMXML, LibvirtXMLError
from virttest import virt_vm
def run_virsh_define(test, params, env):
"""
Test defining/undefining domain by dumping xml, changing it, and re-adding.
(1) Get name and uuid of existing vm
(2) Rename domain and verify domain start
(3) Get uuid of new vm
(4) Change name&uuid back to original
(5) Verify domain start
"""
def do_rename(vm, new_name, uuid=None, fail_info=[]):
# Change name in XML
logging.info("Rename %s to %s.", vm.name, new_name)
try:
vm = VMXML.vm_rename(vm, new_name, uuid) # give it a new uuid
except LibvirtXMLError, detail:
raise error.TestFail("Rename %s to %s failed:\n%s"
% (vm.name, new_name, detail))
# Exercize the defined XML
try:
vm.start()
except virt_vm.VMStartError, detail:
# Do not raise TestFail because vm should be restored
fail_info.append("Start guest %s failed:%s" % (vm.name, detail))
vm.destroy()
return fail_info
# Run test
vm_name = params.get("main_vm")
vm = env.get_vm(params["main_vm"])
new_name = params.get("new_name", "test")
uuid = vm.get_uuid()
logging.info("Original uuid: %s", vm.get_uuid())
assert uuid is not None
# Rename to a new name
fail_info = do_rename(vm, new_name)
logging.info("Generated uuid: %s", vm.get_uuid())
assert vm.uuid != uuid
# Rename back to original to maintain known-state
fail_info = do_rename(vm, vm_name, uuid, fail_info)
logging.info("Final uuid: %s", vm.get_uuid())
if len(fail_info):
raise error.TestFail(fail_info)
|
ehabkost/virt-test
|
libvirt/tests/virsh_define.py
|
Python
|
gpl-2.0
| 1,794
|
#!/usr/bin/env python2
import i3ipc
import subprocess
# process all windows on this workspace. hide when leaving and show when entering
# because chrome/ium doesnt consider itself hidden when on an invisible workspace
# this script drops my cpu usage when listening to google music from ~10% to ~3%
# I'm just putting any workspaces that have chromium apps running on them
WATCHED_WORKSPACES = [5, 6]
HIDDEN = '_NET_WM_STATE_HIDDEN'
SHOWN = '_NET_WM_STATE_SHOWN'
def showWindow(windowId):
print "SHOWING"
subprocess.call(["xprop", "-id", str(windowId), "-f",
"_NET_WM_STATE", "32a", "-remove", HIDDEN])
subprocess.call(["xprop", "-id", str(windowId), "-f",
"_NET_WM_STATE", "32a", "-set", "_NET_WM_STATE", SHOWN])
def hideWindow(windowId):
print "HIDING"
subprocess.call(["xprop", "-id", str(windowId), "-f",
"_NET_WM_STATE", "32a", "-remove", SHOWN])
subprocess.call(["xprop", "-id", str(windowId), "-f",
"_NET_WM_STATE", "32a", "-set", "_NET_WM_STATE", HIDDEN])
def process_window(window, ws_event):
print "Processing window: %s (%d)" % (window.name, window.window)
if ws_event.current.num in WATCHED_WORKSPACES:
# music workspace has been focused
showWindow(window.window)
elif ws_event.old.num in WATCHED_WORKSPACES:
# music workspace has been unfocused
hideWindow(window.window)
def onWorkspace(i3, event):
if event.change in ['focus']:
windows = i3.get_tree().leaves()
for window in windows:
if window.workspace().num in WATCHED_WORKSPACES:
process_window(window, event)
i3 = i3ipc.Connection()
i3.on('workspace', onWorkspace)
i3.main()
|
lbeckman314/dotfiles
|
i3/musicwatcher.py
|
Python
|
mit
| 1,595
|
# -*- coding: utf-8 -*-
##############################################################################
#
# jasper_server module for OpenERP
# Copyright (c) 2008-2009 EVERLIBRE (http://everlibre.fr) Eric VERNICHON
# Copyright (C) 2009-2011 SYLEAM ([http://www.syleam.fr]) Christophe CHAUVET
#
# This file is a part of jasper_server
#
# jasper_server is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# jasper_server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
#
##############################################################################
{
'name': 'JasperReport Server Interface',
'version': '6.3',
'category': 'Reporting',
'sequence': 20,
'complexity': "expert",
'description': """This module interface JasperReport Server with OpenERP
Features:
- Document source must be in CSV, XML
- Save document as attachment on object
- Retrieve attachment if present
- Launch multiple reports and merge in one printing action
- Add additionnals parameters (ex from fields function)
- Affect group on report
- Use context to display or not the print button
(eg: in stock.picking separate per type)
- Execute SQL query before and after treatement
- Launch report based on SQL View
- Add additional pages at the begining or at the end of the document
This module required library to work properly
# pip install httplib2 (>= 0.6.0)
# pip install pyPdf (>= 1.13)
In collaboration with Eric Vernichon (from Everlibre)
""",
'author': 'SYLEAM',
'website': 'http://www.syleam.fr',
'images': ['images/accueil.png', 'images/palette.png',
'images/document_form.png'],
'depends': [
'base',
],
'data': [
'security/groups.xml',
'security/ir.model.access.csv',
'data/jasper_document_extension.xml',
'wizard/wizard.xml',
'wizard/load_file_view.xml',
'obj_server_view.xml',
'obj_document_view.xml',
],
'demo': [
'demo/jasper_document.xml',
],
'installable': True,
'auto_install': False,
'external_dependencies': {'python': ['httplib2', 'pyPdf', 'dime']},
'application': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Jgarcia-IAS/Fidelizacion_odoo
|
openerp/extras/jasper_server/__openerp__.py
|
Python
|
agpl-3.0
| 2,744
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011 Florian Mounier
# Copyright (c) 2011 Kenji_Takahashi
# Copyright (c) 2012 roger
# Copyright (c) 2012, 2014 Tycho Andersen
# Copyright (c) 2012 Maximilian Köhl
# Copyright (c) 2013 Craig Barnes
# Copyright (c) 2014 Sean Vig
# Copyright (c) 2014 Adi Sieker
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from . import base
from .. import bar, hook
class CurrentLayout(base._TextBox):
"""
Display the name of the current layout of the current
group of the screen, the bar containing the widget, is on.
"""
orientations = base.ORIENTATION_HORIZONTAL
def __init__(self, width=bar.CALCULATED, **config):
base._TextBox.__init__(self, "", width, **config)
def _configure(self, qtile, bar):
base._TextBox._configure(self, qtile, bar)
self.text = self.bar.screen.group.layouts[0].name
self.setup_hooks()
def setup_hooks(self):
def hook_response(layout, group):
if group.screen is not None and group.screen == self.bar.screen:
self.text = layout.name
self.bar.draw()
hook.subscribe.layout_change(hook_response)
def button_press(self, x, y, button):
if button == 1:
self.qtile.cmd_next_layout()
elif button == 2:
self.qtile.cmd_prev_layout()
|
xplv/qtile
|
libqtile/widget/currentlayout.py
|
Python
|
mit
| 2,371
|
# encoding: utf-8
# module PyQt4.QtGui
# from /usr/lib/python3/dist-packages/PyQt4/QtGui.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import PyQt4.QtCore as __PyQt4_QtCore
class QTextLength(): # skipped bases: <class 'sip.simplewrapper'>
"""
QTextLength()
QTextLength(QTextLength.Type, float)
QTextLength(object)
QTextLength(QTextLength)
"""
def rawValue(self): # real signature unknown; restored from __doc__
""" QTextLength.rawValue() -> float """
return 0.0
def type(self): # real signature unknown; restored from __doc__
""" QTextLength.type() -> QTextLength.Type """
pass
def value(self, p_float): # real signature unknown; restored from __doc__
""" QTextLength.value(float) -> float """
return 0.0
def __eq__(self, *args, **kwargs): # real signature unknown
""" Return self==value. """
pass
def __ge__(self, *args, **kwargs): # real signature unknown
""" Return self>=value. """
pass
def __gt__(self, *args, **kwargs): # real signature unknown
""" Return self>value. """
pass
def __init__(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
pass
def __le__(self, *args, **kwargs): # real signature unknown
""" Return self<=value. """
pass
def __lt__(self, *args, **kwargs): # real signature unknown
""" Return self<value. """
pass
def __ne__(self, *args, **kwargs): # real signature unknown
""" Return self!=value. """
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
FixedLength = 1
PercentageLength = 2
Type = None # (!) real value is ''
VariableLength = 0
__hash__ = None
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247971765/PyQt4/QtGui/QTextLength.py
|
Python
|
gpl-2.0
| 1,916
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
GeoAlgorithm.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os.path
import traceback
import subprocess
import copy
from qgis.PyQt.QtGui import QIcon
from qgis.PyQt.QtCore import QCoreApplication, QSettings
from processing.core.ProcessingLog import ProcessingLog
from processing.core.ProcessingConfig import ProcessingConfig
from processing.core.GeoAlgorithmExecutionException import GeoAlgorithmExecutionException
from processing.core.SilentProgress import SilentProgress
from processing.core.parameters import ParameterRaster, ParameterVector, ParameterMultipleInput, ParameterTable, Parameter
from processing.core.outputs import OutputVector, OutputRaster, OutputTable, OutputHTML, Output
from processing.algs.gdal.GdalUtils import GdalUtils
from processing.tools import dataobjects, vector
from processing.tools.system import setTempOutput
from processing.algs.help import shortHelp
class GeoAlgorithm:
def __init__(self):
self._icon = QIcon(os.path.dirname(__file__) + '/../images/alg.png')
# Parameters needed by the algorithm
self.parameters = list()
# Outputs generated by the algorithm
self.outputs = list()
# Name and group for normal toolbox display
self.name, self.i18n_name = '', ''
self.group, self.i18n_group = '', ''
# The crs taken from input layers (if possible), and used when
# loading output layers
self.crs = None
# Change any of the following if your algorithm should not
# appear in the toolbox or modeler
self.showInToolbox = True
self.showInModeler = True
# if true, will show only loaded layers in parameters dialog.
# Also, if True, the algorithm does not run on the modeler
# or batch ptocessing interface
self.allowOnlyOpenedLayers = False
# False if it should not be run a a batch process
self.canRunInBatchMode = True
# To be set by the provider when it loads the algorithm
self.provider = None
# If the algorithm is run as part of a model, the parent model
# can be set in this variable, to allow for customized
# behaviour, in case some operations should be run differently
# when running as part of a model
self.model = None
self.defineCharacteristics()
def getCopy(self):
"""Returns a new instance of this algorithm, ready to be used
for being executed.
"""
newone = copy.copy(self)
newone.parameters = copy.deepcopy(self.parameters)
newone.outputs = copy.deepcopy(self.outputs)
return newone
# methods to overwrite when creating a custom geoalgorithm
def getIcon(self):
return self._icon
@staticmethod
def getDefaultIcon():
return GeoAlgorithm._icon
def _formatHelp(self, text):
return "<h2>%s</h2>%s" % (self.name, "".join(["<p>%s</p>" % s for s in text.split("\n")]))
def help(self):
return False, None
def shortHelp(self):
text = shortHelp.get(self.commandLineName(), None)
if text is not None:
text = self._formatHelp(text)
return text
def processAlgorithm(self, progress):
"""Here goes the algorithm itself.
There is no return value from this method.
A GeoAlgorithmExecutionException should be raised in case
something goes wrong.
"""
pass
def defineCharacteristics(self):
"""Here is where the parameters and outputs should be defined.
"""
pass
def getCustomParametersDialog(self):
"""If the algorithm has a custom parameters dialog, it should
be returned here, ready to be executed.
"""
return None
def getCustomModelerParametersDialog(self, modelAlg, algName=None):
"""If the algorithm has a custom parameters dialog when called
from the modeler, it should be returned here, ready to be
executed.
"""
return None
def getParameterDescriptions(self):
"""Returns a dict with param names as keys and detailed
descriptions of each param as value. These descriptions are
used as tool tips in the parameters dialog.
If a description does not exist, the parameter's
human-readable name is used.
"""
descs = {}
return descs
def checkBeforeOpeningParametersDialog(self):
"""If there is any check to perform before the parameters
dialog is opened, it should be done here.
This method returns an error message string if there is any
problem (for instance, an external app not configured yet),
or None if the parameters dialog can be opened.
Note that this check should also be done in the
processAlgorithm method, since algorithms can be called without
opening the parameters dialog.
"""
return None
def checkParameterValuesBeforeExecuting(self):
"""If there is any check to do before launching the execution
of the algorithm, it should be done here.
If values are not correct, a message should be returned
explaining the problem.
This check is called from the parameters dialog, and also when
calling from the console.
"""
return None
# =========================================================
def execute(self, progress=SilentProgress(), model=None):
"""The method to use to call a processing algorithm.
Although the body of the algorithm is in processAlgorithm(),
it should be called using this method, since it performs
some additional operations.
Raises a GeoAlgorithmExecutionException in case anything goes
wrong.
"""
self.model = model
try:
self.setOutputCRS()
self.resolveTemporaryOutputs()
self.resolveDataObjects()
self.checkOutputFileExtensions()
self.runPreExecutionScript(progress)
self.processAlgorithm(progress)
progress.setPercentage(100)
self.convertUnsupportedFormats(progress)
self.runPostExecutionScript(progress)
except GeoAlgorithmExecutionException as gaee:
lines = [self.tr('Uncaught error while executing algorithm')]
lines.append(traceback.format_exc())
ProcessingLog.addToLog(ProcessingLog.LOG_ERROR, gaee.msg)
raise GeoAlgorithmExecutionException(gaee.msg, lines, gaee)
except Exception as e:
# If something goes wrong and is not caught in the
# algorithm, we catch it here and wrap it
lines = [self.tr('Uncaught error while executing algorithm')]
lines.append(traceback.format_exc())
ProcessingLog.addToLog(ProcessingLog.LOG_ERROR, lines)
raise GeoAlgorithmExecutionException(unicode(e) + self.tr('\nSee log for more details'), lines, e)
def _checkParameterValuesBeforeExecuting(self):
for param in self.parameters:
if isinstance(param, (ParameterRaster, ParameterVector,
ParameterMultipleInput)):
if param.value:
if isinstance(param, ParameterMultipleInput):
inputlayers = param.value.split(';')
else:
inputlayers = [param.value]
for inputlayer in inputlayers:
obj = dataobjects.getObject(inputlayer)
if obj is None:
return "Wrong parameter value: " + param.value
return self.checkParameterValuesBeforeExecuting()
def runPostExecutionScript(self, progress):
scriptFile = ProcessingConfig.getSetting(
ProcessingConfig.POST_EXECUTION_SCRIPT)
self.runHookScript(scriptFile, progress)
def runPreExecutionScript(self, progress):
scriptFile = ProcessingConfig.getSetting(
ProcessingConfig.PRE_EXECUTION_SCRIPT)
self.runHookScript(scriptFile, progress)
def runHookScript(self, filename, progress):
if filename is None or not os.path.exists(filename):
return
try:
script = 'import processing\n'
ns = {}
ns['progress'] = progress
ns['alg'] = self
f = open(filename)
lines = f.readlines()
for line in lines:
script += line
exec(script, ns)
except Exception as e:
ProcessingLog.addToLog(ProcessingLog.LOG_WARNING,
"Error in hook script: " + str(e))
# A wrong script should not cause problems, so we swallow
# all exceptions
pass
def convertUnsupportedFormats(self, progress):
i = 0
progress.setText(self.tr('Converting outputs'))
for out in self.outputs:
if isinstance(out, OutputVector):
if out.compatible is not None:
layer = dataobjects.getObjectFromUri(out.compatible)
if layer is None:
# For the case of memory layer, if the
# getCompatible method has been called
continue
writer = out.getVectorWriter(
layer.fields(),
layer.wkbType(), layer.crs()
)
features = vector.features(layer)
for feature in features:
writer.addFeature(feature)
elif isinstance(out, OutputRaster):
if out.compatible is not None:
layer = dataobjects.getObjectFromUri(out.compatible)
format = self.getFormatShortNameFromFilename(out.value)
orgFile = out.compatible
destFile = out.value
crsid = layer.crs().authid()
settings = QSettings()
path = unicode(settings.value('/GdalTools/gdalPath', ''))
envval = unicode(os.getenv('PATH'))
if not path.lower() in envval.lower().split(os.pathsep):
envval += '%s%s' % (os.pathsep, path)
os.putenv('PATH', envval)
command = 'gdal_translate -of %s -a_srs %s %s %s' % (format, crsid, orgFile, destFile)
if os.name == 'nt':
command = command.split(" ")
else:
command = [command]
proc = subprocess.Popen(
command,
shell=True,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=False,
)
proc.communicate()
elif isinstance(out, OutputTable):
if out.compatible is not None:
layer = dataobjects.getObjectFromUri(out.compatible)
writer = out.getTableWriter(layer.fields())
features = vector.features(layer)
for feature in features:
writer.addRecord(feature)
progress.setPercentage(100 * i / float(len(self.outputs)))
def getFormatShortNameFromFilename(self, filename):
ext = filename[filename.rfind('.') + 1:]
supported = GdalUtils.getSupportedRasters()
for name in supported.keys():
exts = supported[name]
if ext in exts:
return name
return 'GTiff'
def checkOutputFileExtensions(self):
"""Checks if the values of outputs are correct and have one of
the supported output extensions.
If not, it adds the first one of the supported extensions, which
is assumed to be the default one.
"""
for out in self.outputs:
if not out.hidden and out.value is not None:
if not os.path.isabs(out.value):
continue
if isinstance(out, OutputRaster):
exts = \
dataobjects.getSupportedOutputRasterLayerExtensions()
elif isinstance(out, OutputVector):
exts = \
dataobjects.getSupportedOutputVectorLayerExtensions()
elif isinstance(out, OutputTable):
exts = dataobjects.getSupportedOutputTableExtensions()
elif isinstance(out, OutputHTML):
exts = ['html', 'htm']
else:
continue
idx = out.value.rfind('.')
if idx == -1:
out.value = out.value + '.' + exts[0]
else:
ext = out.value[idx + 1:]
if ext not in exts + ['dbf']:
out.value = out.value + '.' + exts[0]
def resolveTemporaryOutputs(self):
"""Sets temporary outputs (output.value = None) with a
temporary file instead.
"""
for out in self.outputs:
if not out.hidden and out.value is None:
setTempOutput(out, self)
def setOutputCRS(self):
layers = dataobjects.getAllLayers()
for param in self.parameters:
if isinstance(param, (ParameterRaster, ParameterVector, ParameterMultipleInput)):
if param.value:
if isinstance(param, ParameterMultipleInput):
inputlayers = param.value.split(';')
else:
inputlayers = [param.value]
for inputlayer in inputlayers:
for layer in layers:
if layer.source() == inputlayer:
self.crs = layer.crs()
return
p = dataobjects.getObjectFromUri(inputlayer)
if p is not None:
self.crs = p.crs()
p = None
return
try:
from qgis.utils import iface
if iface is not None:
self.crs = iface.mapCanvas().mapSettings().destinationCrs()
except:
pass
def resolveDataObjects(self):
layers = dataobjects.getAllLayers()
for param in self.parameters:
if isinstance(param, (ParameterRaster, ParameterVector, ParameterTable,
ParameterMultipleInput)):
if param.value:
if isinstance(param, ParameterMultipleInput):
inputlayers = param.value.split(';')
else:
inputlayers = [param.value]
for i, inputlayer in enumerate(inputlayers):
for layer in layers:
if layer.name() == inputlayer:
inputlayers[i] = layer.source()
break
param.setValue(";".join(inputlayers))
def checkInputCRS(self):
"""It checks that all input layers use the same CRS. If so,
returns True. False otherwise.
"""
crsList = []
for param in self.parameters:
if isinstance(param, (ParameterRaster, ParameterVector, ParameterMultipleInput)):
if param.value:
if isinstance(param, ParameterMultipleInput):
layers = param.value.split(';')
else:
layers = [param.value]
for item in layers:
crs = dataobjects.getObject(item).crs()
if crs not in crsList:
crsList.append(crs)
return len(crsList) < 2
def addOutput(self, output):
# TODO: check that name does not exist
if isinstance(output, Output):
self.outputs.append(output)
def addParameter(self, param):
# TODO: check that name does not exist
if isinstance(param, Parameter):
self.parameters.append(param)
def setParameterValue(self, paramName, value):
for param in self.parameters:
if param.name == paramName:
return param.setValue(value)
def setOutputValue(self, outputName, value):
for out in self.outputs:
if out.name == outputName:
out.setValue(value)
def getVisibleOutputsCount(self):
"""Returns the number of non-hidden outputs.
"""
i = 0
for out in self.outputs:
if not out.hidden:
i += 1
return i
def getVisibleParametersCount(self):
"""Returns the number of non-hidden parameters.
"""
i = 0
for param in self.parameters:
if not param.hidden:
i += 1
return i
def getHTMLOutputsCount(self):
"""Returns the number of HTML outputs.
"""
i = 0
for out in self.outputs:
if isinstance(out, OutputHTML):
i += 1
return i
def getOutputValuesAsDictionary(self):
d = {}
for out in self.outputs:
d[out.name] = out.value
return d
def __str__(self):
s = 'ALGORITHM: ' + self.name + '\n'
for param in self.parameters:
s += '\t' + unicode(param) + '\n'
for out in self.outputs:
s += '\t' + unicode(out) + '\n'
s += '\n'
return s
def commandLineName(self):
name = self.provider.getName().lower() + ':' + self.name.lower()
validChars = \
'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789:'
name = ''.join(c for c in name if c in validChars)
return name
def removeOutputFromName(self, name):
for out in self.outputs:
if out.name == name:
self.outputs.remove(out)
def getOutputFromName(self, name):
for out in self.outputs:
if out.name == name:
return out
def getParameterFromName(self, name):
for param in self.parameters:
if param.name == name:
return param
def getParameterValue(self, name):
for param in self.parameters:
if param.name == name:
return param.value
return None
def getOutputValue(self, name):
for out in self.outputs:
if out.name == name:
return out.value
return None
def getAsCommand(self):
"""Returns the command that would run this same algorithm from
the console.
Should return None if the algorithm cannot be run from the
console.
"""
s = 'processing.runalg("' + self.commandLineName() + '",'
for param in self.parameters:
s += param.getValueAsCommandLineParameter() + ','
for out in self.outputs:
if not out.hidden:
s += out.getValueAsCommandLineParameter() + ','
s = s[:-1] + ')'
return s
def displayName(self):
return self.i18n_name or self.name
def displayNames(self):
return self.name, self.i18n_name
def tr(self, string, context=''):
if context == '':
context = self.__class__.__name__
return QCoreApplication.translate(context, string)
def trAlgorithm(self, string, context=''):
if context == '':
context = self.__class__.__name__
return string, QCoreApplication.translate(context, string)
|
alexbruy/QGIS
|
python/plugins/processing/core/GeoAlgorithm.py
|
Python
|
gpl-2.0
| 21,081
|
#!/usr/bin/env python3
import matplotlib.pyplot as plt
from math import sqrt
from math import log
dx = [1/sqrt(16), 1/sqrt(64), 1/sqrt(256), 1/sqrt(1024)]
dx_tri = [1/sqrt(32), 1/sqrt(128), 1/sqrt(512), 1/sqrt(2048)]
dx_pert = [0.0270466, 0.0134827, 0.00680914, 0.00367054]
dx_fp = [0.122799, 0.081584, 0.0445639, 0.0225922, 0.0113763]
fp_actual = 0.0441995
rl2_euler = [0.00059068, 0.000113051, 2.26156e-05, 5.11884e-06]
rl2_euler_tri = [0.00101603, 0.000277795, 6.37774e-05, 1.4947e-05]
rl2_euler_tri_pert = [0.00053851, 0.000121805, 2.67446e-05, 4.97857e-05]
rl2_euler_tri_limited = [0.00234712, 0.000548344, 0.000139978, 3.56414e-05]
rl2_euler_lp_tri_limited = [0.00242227, 0.000586065, 0.000140727]
rl2_euler_limited = [0.00187271, 0.000435096, 0.000120633, 2.90233e-05]
rl2_euler_lp_limited = [0.00180033, 0.000422567, 0.000120477, 2.90644e-05]
rl2_ns = [0.000576472, 0.000132735, 7.0506e-05, 6.67272e-05]
rl2_ns_fp = [abs(fp_actual - 0.008118), abs(fp_actual - 0.015667), abs(fp_actual - 0.026915), abs(fp_actual - 0.037524), abs(fp_actual - 0.042895)]
print("rho euler l2: "+str(log(rl2_euler[2]/rl2_euler[3])/log(dx[2]/dx[3])))
print("rho euler tri l2: "+str(log(rl2_euler_tri[2]/rl2_euler_tri[3])/log(dx_tri[2]/dx_tri[3])))
print("rho euler tri perturbed l2: "+str(log(rl2_euler_tri_pert[1]/rl2_euler_tri_pert[2])/log(dx_pert[1]/dx_pert[2])))
print("rho euler tri limited l2: "+str(log(rl2_euler_tri_limited[2]/rl2_euler_tri_limited[3])/log(dx_tri[2]/dx_tri[3])))
print("rho euler lp tri limited l2: "+str(log(rl2_euler_lp_tri_limited[1]/rl2_euler_lp_tri_limited[2])/log(dx_tri[1]/dx_tri[2])))
print("rho euler limited l2: "+str(log(rl2_euler_limited[2]/rl2_euler_limited[3])/log(dx[2]/dx[3])))
print("rho euler lp limited l2: "+str(log(rl2_euler_lp_limited[2]/rl2_euler_lp_limited[3])/log(dx[2]/dx[3])))
print("rho ns l2: "+str(log(rl2_ns[0]/rl2_ns[1])/log(dx[0]/dx[1])))
print("rho ns end l2: "+str(log(rl2_ns[2]/rl2_ns[3])/log(dx[2]/dx[3])))
print("rho ns fp l2: "+str(log(rl2_ns_fp[0]/rl2_ns_fp[1])/log(dx_fp[0]/dx_fp[1])))
print("rho ns fp end l2: "+str(log(rl2_ns_fp[3]/rl2_ns_fp[4])/log(dx_fp[3]/dx_fp[4])))
plt.figure()
hlines = plt.loglog(dx, rl2_euler, dx, rl2_ns, dx, rl2_euler_limited, dx, rl2_euler_lp_limited, dx_tri, rl2_euler_tri, dx_tri, rl2_euler_tri_limited, dx_pert[0:3], rl2_euler_tri_pert[0:3], dx_fp, rl2_ns_fp)
plt.rc('text', usetex=True)
plt.xlabel("Grid size")
plt.ylabel("$L_2$ error")
plt.legend(hlines, ["euler", "NS manufactured", "euler scalar limited", "euler lp limited", "euler tri", "euler tri limited", "euler tri pert", "NS flat plate"])
plt.grid(True,which="both")
plt.show()
|
Rob-Rau/EbbCFD
|
ms_refinement/plot_conv.py
|
Python
|
mit
| 2,727
|
from werkzeug.wrappers import Request, Response
@Request.application
def application(request):
return Response('Hello World!')
if __name__ == '__main__':
from werkzeug.serving import run_simple
run_simple('localhost', 4000, application)
|
mightysabean/c-
|
simple.py
|
Python
|
mpl-2.0
| 250
|
'''
Add the following to your project/settings.py
AUTHENTICATION_BACKENDS = ('django_linotp.linotp_auth.LinOTP', )
LINOTP = { 'url' : 'https://puckel/validate/check',
'timeout' : 5,
'ssl_verify' : False,
'host_verify' : False,
'create_user' : False,
}
'create_user': if set to True, the user in the django DB will be created, if LinOTP returns a successful authentication
'''
from django.conf import settings
from django.contrib.auth.models import User, check_password
import sys
import pycurl
import logging
import traceback
from urllib import urlencode
import json
logger = logging.getLogger(__name__)
class Test:
def __init__(self):
self.contents = ''
def body_callback(self, buf):
self.contents = self.contents + buf
class LinOTP(object):
def __init__(self):
self.url = 'https://localhost/validate/check'
self.timeout = 5
self.ssl_verify = False
self.host_verify = False
self.create_user = False
if settings.LINOTP:
self.url = settings.LINOTP.get('url', self.url)
self.timeout = settings.LINOTP.get('timeout', self.timeout)
self.ssl_verify = settings.LINOTP.get('ssl_verify', self.ssl_verify)
self.host_verify = settings.LINOTP.get('host_verify', self.host_verify)
self.create_user = settings.LINOTP.get('create_user', self.create_user)
def authenticate(self, username=None, password=None):
user = None
try:
t = Test()
c = pycurl.Curl()
params = { 'user' : username, 'pass' : password }
url = str("%s?%s" % (self.url, urlencode(params)))
print "Connecting to %s" % url
c.setopt(c.URL, url)
c.setopt(c.WRITEFUNCTION, t.body_callback)
c.setopt(c.HEADER, False)
c.setopt(c.SSL_VERIFYPEER, self.ssl_verify)
c.setopt(c.SSL_VERIFYHOST, self.host_verify)
c.setopt(c.CONNECTTIMEOUT, self.timeout)
c.perform()
c.close()
print t.contents
res = json.loads(t.contents)
if (res.get('result',{}).get('status') == True and
res.get('result',{}).get('value') == True):
user = User.objects.get(username=username)
except User.DoesNotExist:
# The user was authenticated by LinOTP but does not exist!
print "User authenticated but does not exist"
if self.create_user:
print "creating user"
# FIXME: For any reason does not work at the moment
user = User(username=username, password="supersecret")
user.is_staff = True
user.is_superuser = False
user.save
except Exception as e:
print traceback.format_exc()
print e
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
cornelinux/django-linotp-auth
|
django_linotp/linotp_auth.py
|
Python
|
gpl-3.0
| 2,749
|
import os
import re
import codecs
from setuptools import setup, find_packages
def read(*parts):
filename = os.path.join(os.path.dirname(__file__), *parts)
with codecs.open(filename, encoding='utf-8') as fp:
return fp.read()
def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
setup(
name='django-constance',
version=find_version("constance", "__init__.py"),
url="http://github.com/jezdez/django-constance",
description='Django live settings with pluggable backends, including Redis.',
long_description=read('README.rst'),
author='Jannis Leidel',
author_email='jannis@leidel.info',
license='BSD',
keywords='django libraries settings redis'.split(),
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Utilities',
],
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
zip_safe=False,
extras_require={
'database': ['django-picklefield'],
'redis': ['redis'],
}
)
|
metalpriest/django-constance
|
setup.py
|
Python
|
bsd-3-clause
| 1,931
|
import json
import os
rootUrl = os.environ.get(
'TASKCLUSTER_ROOT_URL',
'https://community-tc.services.mozilla.com')
if 'TC_PROXY' in os.environ:
PROXY_INDEX_URL = 'http://taskcluster/api/index/v1/task/{}'
else:
PROXY_INDEX_URL = rootUrl + '/api/index/v1/task/{}'
ARTIFACT_URL = rootUrl + '/api/queue/v1/task/{}/artifacts/{}'
DEFAULT_DATA = {
'repo_name': 'git-cinnabar',
'login': 'glandium',
'commit': 'HEAD',
'branch': '',
'decision_id': '',
}
DEFAULT_DATA['repo_url'] = 'https://github.com/{}/{}'.format(
DEFAULT_DATA['login'], DEFAULT_DATA['repo_name'])
for k in ('repo_name', 'login'):
DEFAULT_DATA['base_{}'.format(k)] = DEFAULT_DATA[k]
TC_DATA = json.loads(os.environ.get('TC_DATA', json.dumps(DEFAULT_DATA)))
def get(k):
return TC_DATA.get(k, DEFAULT_DATA[k])
TC_LOGIN = get('login')
TC_REPO_NAME = get('repo_name')
TC_REPO_URL = get('repo_url')
TC_COMMIT = get('commit')
TC_BRANCH = get('branch')
TC_BASE_LOGIN = get('base_login')
TC_BASE_REPO_NAME = get('base_repo_name')
TC_ACTION = os.environ.get('TC_ACTION')
TC_IS_PUSH = os.environ.get('TC_IS_PUSH') == '1'
DEFAULT_REPO = 'https://hg.mozilla.org/users/mh_glandium.org/jqplot'
REPO = os.environ.get('REPO', DEFAULT_REPO)
|
glandium/git-cinnabar
|
CI/variables.py
|
Python
|
gpl-2.0
| 1,243
|
'''
Created on Aug 25, 2011
@author: r4stl1n
'''
import sys
from threading import Thread
import paramiko
#Check For Paramiko Dependency
class Connection (Thread):
'''
This is the class that checks if a specific
Username and password combination was successful.
'''
def __init__(self,username, password, targetIp, portNumber, timeoutTime):
super(Connection, self).__init__()
self.username = username
self.password = password
self.targetIp = targetIp
self.portNumber = portNumber
self.timeoutTime = timeoutTime
self.status = ""
def run(self):
sshConnection = paramiko.SSHClient()
sshConnection.set_missing_host_key_policy(paramiko.AutoAddPolicy())
paramiko.util.log_to_file("filename.log")
try:
sshConnection.connect(self.targetIp, port = int(self.portNumber),
username = self.username,password = self.password,
timeout = int(self.timeoutTime), allow_agent = False,look_for_keys = False)
self.status = 'Succeeded'
sshConnection.close()
except:
self.status = 'Failed'
|
CarlosLannister/TFG-ShodanScripts
|
ssh/SSH-Brute-Forcer/Connection.py
|
Python
|
mit
| 1,232
|
from contentbase import upgrade_step
@upgrade_step('analysis_step', '1', '2')
def analysis_step_1_2(value, system):
# http://redmine.encodedcc.org/issues/2770
input_mapping = {
'align-star-pe-v-1-0-2': ['reads'],
'align-star-pe-v-2-0-0': ['reads'],
'align-star-se-v-1-0-2': ['reads'],
'align-star-se-v-2-0-0': ['reads'],
'index-star-v-1-0-1': ['genome reference', 'spike-in sequence', 'reference genes'],
'index-star-v-2-0-0': ['genome reference', 'spike-in sequence', 'reference genes'],
'index-rsem-v-1-0-1': ['genome reference', 'spike-in sequence', 'reference genes'],
'index-tophat-v-1-0-0': ['genome reference', 'spike-in sequence', 'reference genes'],
'quant-rsem-v-1-0-2': ['transcriptome alignments'],
'stranded-signal-star-v-1-0-1': ['alignments'],
'stranded-signal-star-v-2-0-0': ['alignments'],
'unstranded-signal-star-v-1-0-1': ['alignments'],
'unstranded-signal-star-v-2-0-0': ['alignments'],
'align-tophat-pe-v-1-0-1': ['reads'],
'align-tophat-se-v-1-0-1': ['reads']
}
output_mapping = {
'align-star-pe-v-1-0-2': ['alignments'],
'align-star-pe-v-2-0-0': ['alignments'],
'align-star-se-v-1-0-2': ['alignments'],
'align-star-se-v-2-0-0': ['alignments'],
'index-star-v-1-0-1': ['genome index'],
'index-star-v-2-0-0': ['genome index'],
'index-rsem-v-1-0-1': ['genome index'],
'index-tophat-v-1-0-0': ['genome index'],
'quant-rsem-v-1-0-2': ['gene quantifications'],
'stranded-signal-star-v-1-0-1': [
'minus strand signal of multi-mapped reads',
'plus strand signal of multi-mapped reads',
'minus strand signal of unique reads',
'plus strand signal of unique reads'
],
'stranded-signal-star-v-2-0-0': [
'minus strand signal of multi-mapped reads',
'plus strand signal of multi-mapped reads',
'minus strand signal of unique reads',
'plus strand signal of unique reads'
],
'unstranded-signal-star-v-1-0-1': [
'signal of multi-mapped reads',
'signal of unique reads'
],
'unstranded-signal-star-v-2-0-0': [
'signal of multi-mapped reads',
'signal of unique reads'
],
'align-tophat-pe-v-1-0-1': ['alignments'],
'align-tophat-se-v-1-0-1': ['alignments']
}
value['input_file_types'] = input_mapping[value['name']]
value['output_file_types'] = output_mapping[value['name']]
@upgrade_step('analysis_step', '2', '3')
def analysis_step_2_3(value, system):
# http://redmine.encodedcc.org/issues/3019
import re
if 'output_file_types' in value:
for i in range(0, len(value['output_file_types'])):
string = value['output_file_types'][i]
value['output_file_types'][i] = re.sub('multi-mapped', 'all', string)
if 'input_file_types' in value:
for i in range(0, len(value['input_file_types'])):
string = value['input_file_types'][i]
value['input_file_types'][i] = re.sub('multi-mapped', 'all', string)
# http://redmine.encodedcc.org/issues/3074
del value['software_versions']
# http://redmine.encodedcc.org/issues/3074 note 16 and 3073
if value.get('name') in ['lrna-se-star-alignment-step-v-2-0',
'lrna-pe-star-alignment-step-v-2-0',
'lrna-pe-star-stranded-signal-step-v-2-0',
'lrna-pe-star-stranded-signals-for-tophat-step-v-2-0',
'lrna-se-star-unstranded-signal-step-v-2-0',
'lrna-se-star-unstranded-signals-for-tophat-step-v-2-0',
'index-star-v-2-0',
'rampage-grit-peak-calling-step-v-1-1'
]:
value['status'] = 'deleted'
if value.get('name') == 'lrna-pe-rsem-quantification-v-1':
value['parents'] = ['ace7163c-563a-43d6-a86f-686405af167d', #/analysis-steps/lrna-pe-star-alignment-step-v-1/'
'9ca04da2-5ef7-4ba1-b78c-41dfc4be0c11' #/analysis-steps/index-rsem-v-1-0/'
]
elif value.get('name') == 'lrna-se-rsem-quantification-step-v-1':
value['parents'] = ['3cad3827-7f21-4f70-9cbc-e718b5529775', #/analysis-steps/lrna-se-star-alignment-step-v-1/',
'9ca04da2-5ef7-4ba1-b78c-41dfc4be0c11' #/analysis-steps/index-rsem-v-1-0/'
]
|
kidaa/encoded
|
src/encoded/upgrade/analysis_step.py
|
Python
|
mit
| 4,642
|
# -*- coding: utf-8 -*-
"""
===============================================================================
module __StokesFlow__: Viscous fluid flow
===============================================================================
"""
import scipy as sp
from OpenPNM.Algorithms import GenericLinearTransport
from OpenPNM.Base import logging
logger = logging.getLogger(__name__)
class StokesFlow(GenericLinearTransport):
r"""
A subclass of GenericLinearTransport to simulate viscous flow. The 2
main roles of this subclass are to set the default property names and to
implement a method for calculating the hydraulic permeability of the network.
Examples
--------
>>> import OpenPNM
>>> pn = OpenPNM.Network.TestNet()
>>> geo = OpenPNM.Geometry.TestGeometry(network=pn,
... pores=pn.pores(),
... throats=pn.throats())
>>> phase1 = OpenPNM.Phases.TestPhase(network=pn)
>>> phys1 = OpenPNM.Physics.TestPhysics(network=pn, phase=phase1,
... pores=pn.pores(),throats=pn.throats())
>>> alg = OpenPNM.Algorithms.StokesFlow(network=pn, phase=phase1)
>>> BC1_pores = pn.pores('top')
>>> alg.set_boundary_conditions(bctype='Dirichlet', bcvalue=0.6, pores=BC1_pores)
>>> BC2_pores = pn.pores('bottom')
>>> alg.set_boundary_conditions(bctype='Dirichlet', bcvalue=0.4, pores=BC2_pores)
>>> alg.run()
>>> alg.return_results()
>>> Peff = round(alg.calc_eff_permeability(), 10)
>>> print(Peff)
1.8663e-05
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
logger.info('Create ' + self.__class__.__name__ + ' Object')
def setup(self, conductance='hydraulic_conductance', quantity='pressure',
super_pore_conductance=None, **params):
r"""
This setup provides the initial requirements for the solver setup.
"""
logger.info('Setup ' + self.__class__.__name__)
super().setup(conductance=conductance, quantity=quantity,
super_pore_conductance=super_pore_conductance)
def calc_eff_permeability(self):
r"""
This calculates the effective permeability in this linear
transport algorithm.
"""
d_normal = self._calc_eff_prop()
self._eff_property = d_normal * sp.mean(self._phase['pore.viscosity'])
return self._eff_property
|
amdouglas/OpenPNM
|
OpenPNM/Algorithms/__StokesFlow__.py
|
Python
|
mit
| 2,480
|
#!/usr/bin/python3
from email import encoders
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import psycopg2
import xlsxwriter
import os
import sys
import smtplib
#usage python3 colum.py filename toaddress
SQL_Code = open(str(sys.argv[3]), 'r').read()
#Connecting to PostgreSQL
def main():
conn_string = "host='db' dbname='directski' user='pgsql' password=''"
print ("Connecting to database\n ->%s" % (conn_string))
conn = psycopg2.connect(conn_string)
cursor = conn.cursor()
print ("Connected!\n")
cursor.execute(SQL_Code)
filename = str(sys.argv[1]).replace(" ", "_").lower()
workbook = xlsxwriter.Workbook(filename + ".xlsx", {'remove_timezone': True})
worksheet = workbook.add_worksheet()
data = cursor.fetchall()
# Headers
for colidx,heading in enumerate(cursor.description):
worksheet.write(0, colidx, heading[0])
# Writing the Rows
for rowid, row in enumerate(data):
for colid, col in enumerate(row):
worksheet.write(rowid+1, colid, col)
# Saving
workbook.close()
fromaddr = "temp@temp.com"
toaddr = str(sys.argv[2])
msg = MIMEMultipart()
msg['From'] = fromaddr
msg['To'] = toaddr
msg['Subject'] = str(sys.argv[1])
body = ""
msg.attach(MIMEText(body, 'plain'))
attachment = open(filename + ".xlsx", "rb")
part = MIMEBase('application', 'octet-stream')
part.set_payload((attachment).read())
encoders.encode_base64(part)
part.add_header('Content-Disposition', "attachment; filename= %s" % filename + ".xlsx")
msg.attach(part)
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login(fromaddr, "temp123")
text = msg.as_string()
server.sendmail(fromaddr, toaddr, text)
server.quit()
if __name__ == "__main__":
main()
|
ColumBrennan/data-dumper
|
app.py
|
Python
|
gpl-3.0
| 1,987
|
import pytest
import json
from . import TestBase
from tests.factories import item_factories
def check_valid_header_type(headers):
assert headers['Content-Type'] == 'application/json'
class TestItemAPI(TestBase):
def test_get_item_present(self, test_client, item):
r = test_client.get('/api/items/%d' % item.id)
check_valid_header_type(r.headers)
assert r.status_code == 200
data = json.loads(r.data)
assert data['item']['title'] == item.title
assert data['item']['vote'] == 0
assert data['item']['unread'] == True
def test_get_item_missing(self, test_client):
r = test_client.get('/api/items/%d' % 10)
check_valid_header_type(r.headers)
assert r.status_code == 404
def test_put_item_upvote(self, test_client, user, item):
self.login(test_client, user.email, user.password)
upvote = dict(vote=1)
r = test_client.put('/api/items/%d' % item.id, data=upvote)
check_valid_header_type(r.headers)
assert r.status_code == 200
data = json.loads(r.data)
assert data['item']['voteSum'] == 1
def test_put_item_downvote(self, test_client, user, item):
self.login(test_client, user.email, user.password)
downvote = dict(vote=-1)
r = test_client.put('/api/items/%d' % item.id, data=downvote)
check_valid_header_type(r.headers)
assert r.status_code == 200
data = json.loads(r.data)
assert data['item']['voteSum'] == -1
def test_put_multi_upvotes(self, test_client, user_item_upvote):
self.login(test_client, user_item_upvote.user.email, user_item_upvote.user.password)
upvote = dict(vote=1)
r = test_client.put('/api/items/%d' % user_item_upvote.item.id, data=upvote)
check_valid_header_type(r.headers)
assert r.status_code == 422
data = json.loads(r.data)
assert "already voted" in data['errors']['vote'][0]
def test_put_upvote_then_downvote(self, test_client, user_item_upvote):
self.login(test_client, user_item_upvote.user.email, user_item_upvote.user.password)
downvote = dict(vote=-1)
r = test_client.put('/api/items/%d' % user_item_upvote.item.id, data=downvote)
check_valid_header_type(r.headers)
assert r.status_code == 200
data = json.loads(r.data)
assert data['item']['voteSum'] == -1
def test_put_downvote_then_upvote(self, test_client, user_item_downvote):
self.login(test_client, user_item_downvote.user.email, user_item_downvote.user.password)
upvote = dict(vote=1)
r = test_client.put('/api/items/%d' % user_item_downvote.item.id, data=upvote)
check_valid_header_type(r.headers)
assert r.status_code == 200
data = json.loads(r.data)
assert data['item']['voteSum'] == 1
def test_put_multi_downvotes(self, test_client, user_item_downvote):
self.login(test_client, user_item_downvote.user.email, user_item_downvote.user.password)
downvote = dict(vote=-1)
r = test_client.put('/api/items/%d' % user_item_downvote.item.id, data=downvote)
check_valid_header_type(r.headers)
assert r.status_code == 422
data = json.loads(r.data)
assert "already voted" in data['errors']['vote'][0]
def test_put_item_too_big_vote(self, test_client, user, item):
self.login(test_client, user.email, user.password)
original_vote_count = item.voteSum
too_big_vote = dict(vote=5)
r = test_client.put('/api/items/%d' % item.id, data=too_big_vote)
check_valid_header_type(r.headers)
assert r.status_code == 422
data = json.loads(r.data)
assert "Vote may only be" in data['errors']['vote'][0]
def test_put_item_non_integer_vote(self, test_client, user, item):
self.login(test_client, user.email, user.password)
non_integer_vote = dict(vote=0.33)
r = test_client.put('/api/items/%d' % item.id, data=non_integer_vote)
check_valid_header_type(r.headers)
assert r.status_code == 400
def test_put_vote_missing(self, test_client, user, item):
self.login(test_client, user.email, user.password)
original_vote_count = item.voteSum
no_vote = dict()
r = test_client.put('/api/items/%d' % item.id, data=no_vote)
check_valid_header_type(r.headers)
assert r.status_code == 200
data = json.loads(r.data)
assert data['item']['voteSum'] == original_vote_count
def test_put_item_missing(self, test_client, user, item):
self.login(test_client, user.email, user.password)
upvote = dict(vote=1)
r = test_client.put('/api/items/%d' % (int(item.id+1)), data=upvote)
check_valid_header_type(r.headers)
assert r.status_code == 404
def test_put_item_mark_read(self, test_client, user, item):
self.login(test_client, user.email, user.password)
read = dict(unread=False)
r = test_client.put('/api/items/%d' % item.id, data=read)
check_valid_header_type(r.headers)
assert r.status_code == 200
data = json.loads(r.data)
assert data['item']['unread'] == False
def test_put_item_mark_unread(self, test_client, user, item):
self.login(test_client, user.email, user.password)
read = dict(unread=True)
r = test_client.put('/api/items/%d' % item.id, data=read)
check_valid_header_type(r.headers)
assert r.status_code == 200
data = json.loads(r.data)
assert data['item']['unread'] == True
def test_put_item_mark_read_as_unread(self, test_client, user, user_item_read):
self.login(test_client, user.email, user.password)
read = dict(unread=True)
r = test_client.put('/api/items/%d' % user_item_read.item.id, data=read)
check_valid_header_type(r.headers)
assert r.status_code == 200
data = json.loads(r.data)
assert data['item']['unread'] == True
def test_put_item_save(self, test_client, user_item):
self.login(test_client, user_item.user.email, user_item.user.password)
saved = dict(saved=True)
r = test_client.put('/api/items/%d' % user_item.item.id, data=saved)
check_valid_header_type(r.headers)
assert r.status_code == 200
data = json.loads(r.data)
print data
assert data['item']['saved'] == True
def test_put_item_remove_saved(self, test_client, user_item_saved):
self.login(test_client, user_item_saved.user.email, user_item_saved.user.password)
saved = dict(saved=False)
r = test_client.put('/api/items/%d' % user_item_saved.item.id, data=saved)
check_valid_header_type(r.headers)
assert r.status_code == 200
data = json.loads(r.data)
assert data['item']['saved'] == False
class TestSavedItemListAPI(TestBase):
def test_get_items(self, test_client, user_item_saved):
self.login(test_client, user_item_saved.user.email, user_item_saved.user.password)
r = test_client.get('/api/items/saved')
check_valid_header_type(r.headers)
assert r.status_code == 200
data = json.loads(r.data)
assert len(data['items']) == 1
class TestTrendingItemListAPI(TestBase):
def test_get_items_only_nonzero_votes(self, test_client, user_item_upvote, user_item):
self.login(test_client, user_item_upvote.user.email, user_item_upvote.user.password)
r = test_client.get('/api/items/trending')
check_valid_header_type(r.headers)
assert r.status_code == 200
data = json.loads(r.data)
assert len(data['items']) == 1
class TestFeedItemListAPI:
def test_get_items(self, test_client, item):
r = test_client.get('/api/feeds/%d/items' % item.feed.id)
check_valid_header_type(r.headers)
assert r.status_code == 200
data = json.loads(r.data)
assert len(data['items']) == 1
class TestCategoryItemListAllAPI:
def test_get_category_items_missing_category(self, test_client):
r = test_client.get('/api/categories/100/items/all')
check_valid_header_type(r.headers)
assert r.status_code == 404
def test_get_category_items_present(self, test_client, itemsWithCategory):
category = itemsWithCategory[0].cats.first()
r = test_client.get('/api/categories/%d/items/all' % category.id)
check_valid_header_type(r.headers)
assert r.status_code == 200
data = json.loads(r.data)
assert len(data['items']) == 5
class TestCategoryItemListAPI(TestBase):
def test_get_items_present(self, test_client, userWithPopulatedFeed):
self.login(test_client, userWithPopulatedFeed.email, userWithPopulatedFeed.password)
feed = userWithPopulatedFeed.subscribed.first()
category = feed.items[0].cats.first()
r = test_client.get('/api/categories/%d/items' % category.id)
check_valid_header_type(r.headers)
assert r.status_code == 200
data = json.loads(r.data)
assert len(data['items']) == 5
def test_get_items_category_missing(self, test_client, userWithPopulatedFeed):
self.login(test_client, userWithPopulatedFeed.email, userWithPopulatedFeed.password)
r = test_client.get('/api/categories/100/items')
check_valid_header_type(r.headers)
assert r.status_code == 404
def test_get_items_category_with_unsubscribed_item(self, test_client, userWithPopulatedFeed, itemsWithCategory):
self.login(test_client, userWithPopulatedFeed.email, userWithPopulatedFeed.password)
feed = userWithPopulatedFeed.subscribed.first()
user_items_length = feed.items.count()
category = feed.items[0].cats.first()
r = test_client.get('/api/categories/%d/items' % category.id)
check_valid_header_type(r.headers)
assert r.status_code == 200
data = json.loads(r.data)
assert len(data['items']) == user_items_length + 1
assert len(filter(lambda item: item['feed']['subscribed'] == False, data['items'])) == 1
def test_get_items_category_ignore_unsubscribed_item(self, test_client, userWithoutSuggestedContent, itemsWithCategory):
self.login(test_client, userWithoutSuggestedContent.email, userWithoutSuggestedContent.password)
feed = userWithoutSuggestedContent.subscribed.first()
user_items_length = feed.items.count()
category = feed.items[0].cats.first()
r = test_client.get('/api/categories/%d/items' % category.id)
check_valid_header_type(r.headers)
assert r.status_code == 200
data = json.loads(r.data)
assert len(data['items']) == user_items_length
assert len(filter(lambda item: item['feed']['subscribed'] == False, data['items'])) == 0
|
sourcemash/Sourcemash
|
tests/test_api/test_items.py
|
Python
|
gpl-2.0
| 10,962
|
# Generated by Django 1.10.7 on 2017-05-18 22:37
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('case_search', '0005_migrate_json_config'),
]
operations = [
migrations.RemoveField(
model_name='casesearchconfig',
name='_config',
),
]
|
dimagi/commcare-hq
|
corehq/apps/case_search/migrations/0006_remove_casesearchconfig__config.py
|
Python
|
bsd-3-clause
| 345
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import json
from random import shuffle
from datetime import datetime
import psycopg2
from django.db import connection
from django.db.models import Q
from django.core.exceptions import ValidationError
from django.contrib.gis.geos import Point, Polygon
from treemap.tests import (make_instance, make_commander_user,
make_officer_user,
set_write_permissions)
from treemap.lib.object_caches import role_permissions
from treemap.lib.udf import udf_create
from treemap.udf import UserDefinedFieldDefinition
from treemap.models import Instance, Plot, User
from treemap.audit import (AuthorizeException, FieldPermission, Role,
approve_or_reject_audit_and_apply,
approve_or_reject_audits_and_apply)
from treemap.tests.base import OTMTestCase
def make_collection_udf(instance, name='Stewardship', model='Plot',
datatype=None):
# Need to setup the hstore extension to make UDFs
psycopg2.extras.register_hstore(connection.cursor(), globally=True)
if datatype is None:
datatype = [
{'type': 'choice',
'choices': ['water', 'prune'],
'name': 'action'},
{'type': 'int',
'name': 'height'}]
return UserDefinedFieldDefinition.objects.create(
instance=instance,
model_type=model,
datatype=json.dumps(datatype),
iscollection=True,
name=name)
class ScalarUDFFilterTest(OTMTestCase):
def setUp(self):
self.p = Point(0, 0)
self.instance = make_instance(point=self.p)
self.commander_user = make_commander_user(self.instance)
set_write_permissions(self.instance, self.commander_user,
'Plot',
['udf:Test choice', 'udf:Test string',
'udf:Test int', 'udf:Test date',
'udf:Test float'])
UserDefinedFieldDefinition.objects.create(
instance=self.instance,
model_type='Plot',
datatype=json.dumps({'type': 'choice',
'choices': ['a', 'b', 'c']}),
iscollection=False,
name='Test choice')
UserDefinedFieldDefinition.objects.create(
instance=self.instance,
model_type='Plot',
datatype=json.dumps({'type': 'string'}),
iscollection=False,
name='Test string')
UserDefinedFieldDefinition.objects.create(
instance=self.instance,
model_type='Plot',
datatype=json.dumps({'type': 'date'}),
iscollection=False,
name='Test date')
UserDefinedFieldDefinition.objects.create(
instance=self.instance,
model_type='Plot',
datatype=json.dumps({'type': 'int'}),
iscollection=False,
name='Test int')
UserDefinedFieldDefinition.objects.create(
instance=self.instance,
model_type='Plot',
datatype=json.dumps({'type': 'float'}),
iscollection=False,
name='Test float')
self.plot = Plot(geom=self.p, instance=self.instance)
self.plot.save_with_user(self.commander_user)
psycopg2.extras.register_hstore(connection.cursor(), globally=True)
def create_and_save_with_choice(c, n=1):
plots = []
for i in xrange(n):
plot = Plot(geom=self.p, instance=self.instance)
plot.udfs['Test choice'] = c
plot.save_with_user(self.commander_user)
plots.append(plot)
return {plot.pk for plot in plots}
self.choice_a = create_and_save_with_choice('a', n=2)
self.choice_b = create_and_save_with_choice('b', n=3)
self.choice_c = create_and_save_with_choice('c', n=7)
def test_filtering_on_string_and_choice_using_count(self):
plots = Plot.objects.filter(**{'udf:Test choice': 'a'})
self.assertEqual(
len(self.choice_a),
plots.count())
def test_filtering_on_value_works(self):
plots = Plot.objects.filter(**{'udf:Test choice': 'b'})
self.assertEqual(
self.choice_b,
{plot.pk for plot in plots})
def test_combine_with_geom(self):
plot_a = Plot.objects.get(pk=self.choice_a.pop())
plot_b = Plot.objects.get(pk=self.choice_b.pop())
p = Point(10, 0)
poly = Polygon(((5, -5), (15, -5), (15, 5), (5, 5), (5, -5)))
plot_a.geom = p
plot_a.save_with_user(self.commander_user)
plot_b.geom = p
plot_b.save_with_user(self.commander_user)
a_in_poly = Plot.objects.filter(**{'udf:Test choice': 'a'})\
.filter(geom__contained=poly)
self.assertEqual({plot.pk for plot in a_in_poly},
{plot_a.pk, })
b_in_poly = Plot.objects.filter(**{'udf:Test choice': 'b'})\
.filter(geom__contained=poly)
self.assertEqual({plot.pk for plot in b_in_poly},
{plot_b.pk, })
def test_search_suffixes(self):
plot1 = Plot(geom=self.p, instance=self.instance)
plot1.udfs['Test string'] = 'this is a test'
plot1.save_with_user(self.commander_user)
plot2 = Plot(geom=self.p, instance=self.instance)
plot2.udfs['Test string'] = 'this is aLsO'
plot2.save_with_user(self.commander_user)
def run(sfx, val):
return {plot.pk
for plot
in Plot.objects.filter(
**{'udf:Test string' + sfx: val})}
self.assertEqual(set(), run('', 'also'))
self.assertEqual({plot1.pk, plot2.pk},
run('__contains', 'this is a'))
self.assertEqual({plot2.pk}, run('__icontains', 'this is al'))
def _setup_dates(self):
def create_plot_with_date(adate):
plot = Plot(geom=self.p, instance=self.instance)
plot.udfs['Test date'] = adate
plot.save_with_user(self.commander_user)
return plot
dates = [
(2010, 3, 4),
(2010, 3, 5),
(2010, 4, 4),
(2010, 5, 5),
(2012, 3, 4),
(2012, 3, 5),
(2012, 4, 4),
(2012, 5, 5),
(2013, 3, 4)]
dates = [datetime(*adate) for adate in dates]
# Get dates out of standard order
shuffle(dates, lambda: 0.5)
for adate in dates:
create_plot_with_date(adate)
return dates
def test_date_ordering_normal(self):
dates = self._setup_dates()
plots = Plot.objects.filter(**{'udf:Test date__isnull': False})\
.order_by('MapFeature.udf:Test date')
dates.sort()
selected_dates = [plot.udfs['Test date']
for plot in plots]
self.assertEqual(dates, selected_dates)
def test_date_ordering_reverse(self):
dates = self._setup_dates()
plots = Plot.objects.filter(**{'udf:Test date__isnull': False})\
.order_by('-MapFeature.udf:Test date')
dates.sort()
dates.reverse()
selected_dates = [plot.udfs['Test date']
for plot in plots]
self.assertEqual(dates, selected_dates)
def test_date_ordering_gt(self):
self._setup_dates()
adate = datetime(2011, 1, 1)
plots = Plot.objects.filter(**{'udf:Test date__gt': adate})
self.assertEqual(len(plots), 5)
plots = Plot.objects.filter(**{'udf:Test date__lt': adate})
self.assertEqual(len(plots), 4)
def test_integer_gt_and_lte_constraints(self):
def create_plot_with_num(anint):
plot = Plot(geom=self.p, instance=self.instance)
plot.udfs['Test int'] = anint
plot.save_with_user(self.commander_user)
return plot
for i in xrange(0, 7):
create_plot_with_num(i)
plots = Plot.objects.filter(**{'udf:Test int__gt': 2,
'udf:Test int__lte': 4})
self.assertEqual(len(plots), 2)
def test_float_gt_and_lte_constraints(self):
def create_plot_with_num(afloat):
plot = Plot(geom=self.p, instance=self.instance)
plot.udfs['Test float'] = afloat
plot.save_with_user(self.commander_user)
return plot
# creates 1.0 through 3.0 moving by tenths
for i in xrange(10, 30):
create_plot_with_num(float(i)/10.0)
plots = Plot.objects.filter(**{'udf:Test float__gt': 1.5,
'udf:Test float__lte': 2.0})
self.assertEqual(len(plots), 5) # 1.6, 1.7, 1.8, 1.9, 2.0
def test_using_q_objects(self):
qb = Q(**{'udf:Test choice': 'b'})
qc = Q(**{'udf:Test choice': 'c'})
q = qb | qc
plots = Plot.objects.filter(q)
self.assertEqual(
self.choice_b | self.choice_c,
{plot.pk for plot in plots})
class UDFAuditTest(OTMTestCase):
def setUp(self):
self.p = Point(-8515941.0, 4953519.0)
self.instance = make_instance(point=self.p)
self.commander_user = make_commander_user(self.instance)
set_write_permissions(self.instance, self.commander_user,
'Plot', ['udf:Test choice'])
UserDefinedFieldDefinition.objects.create(
instance=self.instance,
model_type='Plot',
datatype=json.dumps({'type': 'choice',
'choices': ['a', 'b', 'c']}),
iscollection=False,
name='Test choice')
UserDefinedFieldDefinition.objects.create(
instance=self.instance,
model_type='Plot',
datatype=json.dumps({'type': 'string'}),
iscollection=False,
name='Test unauth')
UserDefinedFieldDefinition.objects.create(
instance=self.instance,
model_type='Plot',
datatype=json.dumps([{'type': 'choice',
'name': 'a choice',
'choices': ['a', 'b', 'c']},
{'type': 'string',
'name': 'a string'}]),
iscollection=True,
name='Test collection')
self.plot = Plot(geom=self.p, instance=self.instance)
self.plot.save_with_user(self.commander_user)
psycopg2.extras.register_hstore(connection.cursor(), globally=True)
def test_mask_unauthorized_with_udfs(self):
officer_user = make_officer_user(self.instance)
self.plot.udfs['Test choice'] = 'b'
self.plot.save_with_user(self.commander_user)
self.plot.udfs['Test unauth'] = 'foo'
self.plot.save_base()
newplot = Plot.objects.get(pk=self.plot.pk)
self.assertEqual(newplot.udfs['Test choice'], 'b')
self.assertEqual(newplot.udfs['Test unauth'], 'foo')
newplot = Plot.objects.get(pk=self.plot.pk)
newplot.mask_unauthorized_fields(self.commander_user)
self.assertEqual(newplot.udfs['Test choice'], 'b')
self.assertEqual(newplot.udfs['Test unauth'], None)
newplot = Plot.objects.get(pk=self.plot.pk)
newplot.mask_unauthorized_fields(officer_user)
self.assertEqual(newplot.udfs['Test choice'], None)
self.assertEqual(newplot.udfs['Test unauth'], None)
def test_update_field_creates_audit(self):
self.plot.udfs['Test choice'] = 'b'
self.plot.save_with_user(self.commander_user)
last_audit = list(self.plot.audits())[-1]
self.assertEqual(last_audit.model, 'Plot')
self.assertEqual(last_audit.model_id, self.plot.pk)
self.assertEqual(last_audit.field, 'udf:Test choice')
self.assertEqual(last_audit.previous_value, None)
self.assertEqual(last_audit.current_value, 'b')
self.plot.udfs['Test choice'] = 'c'
self.plot.save_with_user(self.commander_user)
last_audit = list(self.plot.audits())[-1]
self.assertEqual(last_audit.model, 'Plot')
self.assertEqual(last_audit.model_id, self.plot.pk)
self.assertEqual(last_audit.field, 'udf:Test choice')
self.assertEqual(last_audit.previous_value, 'b')
self.assertEqual(last_audit.current_value, 'c')
def test_cant_edit_unauthorized_collection(self):
self.plot.udfs['Test collection'] = [
{'a choice': 'a', 'a string': 's'}]
self.assertRaises(AuthorizeException,
self.plot.save_with_user, self.commander_user)
def test_cant_edit_unauthorized_field(self):
self.plot.udfs['Test unauth'] = 'c'
self.assertRaises(AuthorizeException,
self.plot.save_with_user, self.commander_user)
def test_create_and_apply_pending(self):
pending = self.plot.audits().filter(requires_auth=True)
self.assertEqual(len(pending), 0)
role = self.commander_user.get_role(self.instance)
fp, __ = FieldPermission.objects.get_or_create(
model_name='Plot', field_name='udf:Test unauth',
permission_level=FieldPermission.WRITE_WITH_AUDIT,
role=role, instance=self.instance)
self.plot.udfs['Test unauth'] = 'c'
self.plot.save_with_user(self.commander_user)
reloaded_plot = Plot.objects.get(pk=self.plot.pk)
self.assertEqual(
reloaded_plot.udfs['Test unauth'],
None)
pending = self.plot.audits().filter(requires_auth=True)
self.assertEqual(len(pending), 1)
fp.permission_level = FieldPermission.WRITE_DIRECTLY
fp.save()
approve_or_reject_audit_and_apply(pending[0],
self.commander_user,
True)
reloaded_plot = Plot.objects.get(pk=self.plot.pk)
self.assertEqual(
reloaded_plot.udfs['Test unauth'],
'c')
def test_create_invalid_pending_collection(self):
pending = self.plot.audits().filter(requires_auth=True)
self.assertEqual(len(pending), 0)
role = self.commander_user.get_role(self.instance)
fp, __ = FieldPermission.objects.get_or_create(
model_name='Plot', field_name='udf:Test collection',
permission_level=FieldPermission.WRITE_WITH_AUDIT,
role=role, instance=self.instance)
self.plot.udfs['Test collection'] = [
{'a choice': 'invalid choice', 'a string': 's'}]
self.assertRaises(ValidationError,
self.plot.save_with_user, self.commander_user)
def test_create_and_apply_pending_collection(self):
pending = self.plot.audits().filter(requires_auth=True)
self.assertEqual(len(pending), 0)
role = self.commander_user.get_role(self.instance)
fp, __ = FieldPermission.objects.get_or_create(
model_name='Plot', field_name='udf:Test collection',
permission_level=FieldPermission.WRITE_WITH_AUDIT,
role=role, instance=self.instance)
self.plot.udfs['Test collection'] = [
{'a choice': 'a', 'a string': 's'}]
self.plot.save_with_user(self.commander_user)
reloaded_plot = Plot.objects.get(pk=self.plot.pk)
self.assertEqual(
reloaded_plot.udfs['Test collection'],
[])
pending = self.plot.audits().filter(requires_auth=True)
# Expecting 'model_id', 'id', 'field def id'
# and two udf fields ('a string' and 'a choice')
self.assertEqual(len(pending), 5)
fp.permission_level = FieldPermission.WRITE_DIRECTLY
fp.save()
approve_or_reject_audits_and_apply(pending,
self.commander_user,
True)
reloaded_plot = Plot.objects.get(pk=self.plot.pk)
col = reloaded_plot.udfs['Test collection']
self.assertEqual(len(col), 1)
self.assertEqual(col[0]['a choice'], 'a')
self.assertEqual(col[0]['a string'], 's')
class UDFDefTest(OTMTestCase):
def setUp(self):
self.instance = make_instance()
def _create_and_save_with_datatype(
self, d, model_type='Plot', name='Blah', iscollection=False):
return UserDefinedFieldDefinition.objects.create(
instance=self.instance,
model_type=model_type,
datatype=json.dumps(d),
iscollection=iscollection,
name=name)
def test_cannot_create_datatype_with_invalid_model(self):
self.assertRaises(
ValidationError,
self._create_and_save_with_datatype,
{'type': 'string'},
model_type='InvalidModel')
def test_cannot_create_datatype_with_nonudf(self):
self.assertRaises(
ValidationError,
self._create_and_save_with_datatype,
{'type': 'string'},
model_type='InstanceUser')
def test_cannot_create_duplicate_udfs(self):
self._create_and_save_with_datatype(
{'type': 'string'},
name='random')
self.assertRaises(
ValidationError,
self._create_and_save_with_datatype,
{'type': 'string'},
name='random')
self._create_and_save_with_datatype(
{'type': 'string'},
name='random2')
def test_cannot_create_datatype_with_existing_field(self):
self.assertRaises(
ValidationError,
self._create_and_save_with_datatype,
{'type': 'string'},
name='width')
self.assertRaises(
ValidationError,
self._create_and_save_with_datatype,
{'type': 'string'},
name='id')
self._create_and_save_with_datatype(
{'type': 'string'},
name='random')
def test_must_have_type_key(self):
self.assertRaises(
ValidationError,
self._create_and_save_with_datatype, {})
def test_invalid_type(self):
self.assertRaises(
ValidationError,
self._create_and_save_with_datatype, {'type': 'woohoo'})
self._create_and_save_with_datatype({'type': 'float'})
def test_description_op(self):
self._create_and_save_with_datatype(
{'type': 'float',
'description': 'this is a float field'})
def test_choices_not_missing(self):
self.assertRaises(
ValidationError,
self._create_and_save_with_datatype,
{'type': 'choice'})
self._create_and_save_with_datatype(
{'type': 'choice',
'choices': ['a choice', 'another']})
def test_choices_not_empty(self):
self.assertRaises(
ValidationError,
self._create_and_save_with_datatype,
{'type': 'choice',
'choices': []})
self._create_and_save_with_datatype(
{'type': 'choice',
'choices': ['a choice', 'another']})
def test_cannot_create_choices_with_numeric_values(self):
with self.assertRaises(ValidationError):
self._create_and_save_with_datatype(
{'type': 'choice',
'choices': [0, 1, 3, 4, 5]})
def test_can_create_subfields(self):
self._create_and_save_with_datatype(
[{'type': 'choice',
'name': 'achoice',
'choices': ['a', 'b']},
{'type': 'string',
'name': 'something'}], iscollection=True)
def test_must_have_name_on_subfields(self):
self.assertRaises(
ValidationError,
self._create_and_save_with_datatype,
[{'type': 'choice',
'choices': ['a', 'b']},
{'type': 'string',
'name': 'something'}],
iscollection=True)
self.assertRaises(
ValidationError,
self._create_and_save_with_datatype,
[{'type': 'choice',
'choices': ['a', 'b'],
'name': ''},
{'type': 'string',
'name': 'something'}],
iscollection=True)
self._create_and_save_with_datatype(
[{'type': 'choice',
'name': 'valid name',
'choices': ['a', 'b']},
{'type': 'string',
'name': 'something'}],
iscollection=True)
def test_subfields_may_not_have_duplicate_names(self):
self.assertRaises(
ValidationError,
self._create_and_save_with_datatype,
[{'type': 'choice',
'name': 'valid name',
'choices': ['a', 'b']},
{'type': 'string',
'name': 'valid name'}],
name='another',
iscollection=True)
self._create_and_save_with_datatype(
[{'type': 'choice',
'name': 'valid name',
'choices': ['a', 'b']},
{'type': 'string',
'name': 'valid name2'}],
iscollection=True)
def test_iscollection_requires_json_array(self):
self.assertRaises(
ValidationError,
self._create_and_save_with_datatype,
[{'type': 'choice',
'name': 'a name',
'choices': ['a', 'b']},
{'type': 'string',
'name': 'something'}],
iscollection=False)
self._create_and_save_with_datatype(
[{'type': 'choice',
'choices': ['a', 'b'],
'name': 'a name'},
{'type': 'string',
'name': 'something'}],
iscollection=True)
def test_not_iscollection_requires_only_a_dict(self):
self.assertRaises(
ValidationError,
self._create_and_save_with_datatype,
{'type': 'choice',
'choices': ['a', 'b']},
iscollection=True)
self._create_and_save_with_datatype(
{'type': 'choice',
'choices': ['a', 'b']},
iscollection=False)
def test_subfield_cannot_be_called_id(self):
self.assertRaises(
ValidationError,
self._create_and_save_with_datatype,
[{'type': 'choice',
'name': 'id',
'choices': ['a', 'b']},
{'type': 'string',
'name': 'something'}],
iscollection=True)
self._create_and_save_with_datatype(
[{'type': 'choice',
'name': 'anything else',
'choices': ['a', 'b']},
{'type': 'string',
'name': 'something'}],
iscollection=True)
def test_default_values(self):
with self.assertRaises(ValidationError):
self._create_and_save_with_datatype(
[{'type': 'choice',
'name': 'a name',
'choices': ['a', 'b'],
'default': 'c'},
{'type': 'string',
'name': 'something'}],
iscollection=True)
self._create_and_save_with_datatype(
[{'type': 'choice',
'name': 'a name',
'choices': ['a', 'b'],
'default': 'a'},
{'type': 'string',
'name': 'something',
'default': 'anything'}],
iscollection=True)
def test_invalid_names(self):
with self.assertRaises(ValidationError):
UserDefinedFieldDefinition.objects.create(
instance=self.instance,
model_type='Plot',
datatype=json.dumps({'type': 'string'}),
iscollection=False,
name='%')
with self.assertRaises(ValidationError):
UserDefinedFieldDefinition.objects.create(
instance=self.instance,
model_type='Tree',
datatype=json.dumps({'type': 'string'}),
iscollection=False,
name='.')
with self.assertRaises(ValidationError):
UserDefinedFieldDefinition.objects.create(
instance=self.instance,
model_type='Plot',
datatype=json.dumps({'type': 'string'}),
iscollection=False,
name='__contains')
class ScalarUDFTest(OTMTestCase):
def setUp(self):
self.p = Point(-8515941.0, 4953519.0)
self.instance = make_instance(point=self.p)
def make_and_save_type(dtype):
UserDefinedFieldDefinition.objects.create(
instance=self.instance,
model_type='Plot',
datatype=json.dumps({'type': dtype}),
iscollection=False,
name='Test %s' % dtype)
allowed_types = 'float', 'int', 'string', 'user', 'date'
addl_fields = ['udf:Test %s' % ttype for ttype in allowed_types]
addl_fields.append('udf:Test choice')
addl_fields.append('udf:Test multichoice')
self.commander_user = make_commander_user(self.instance)
set_write_permissions(self.instance, self.commander_user,
'Plot', addl_fields)
for dtype in allowed_types:
make_and_save_type(dtype)
self.choice_udfd = UserDefinedFieldDefinition.objects.create(
instance=self.instance,
model_type='Plot',
datatype=json.dumps({'type': 'choice',
'choices': ['a', 'b', 'c']}),
iscollection=False,
name='Test choice')
self.multichoice_udfd = UserDefinedFieldDefinition.objects.create(
instance=self.instance,
model_type='Plot',
datatype=json.dumps({'type': 'multichoice',
'choices': ['a', 'b', 'c']}),
iscollection=False,
name='Test multichoice')
self.plot = Plot(geom=self.p, instance=self.instance)
self.plot.save_with_user(self.commander_user)
psycopg2.extras.register_hstore(connection.cursor(), globally=True)
def _test_datatype(self, field, value):
self.plot.udfs[field] = value
self.plot.save_with_user(self.commander_user)
self.plot = Plot.objects.get(pk=self.plot.pk)
self.assertEqual(
self.plot.udfs[field], value)
def test_int_datatype(self):
self._test_datatype('Test int', 4)
def test_int_validation_non_integer(self):
self.assertRaises(ValidationError,
self._test_datatype, 'Test int', 42.3)
self.assertRaises(ValidationError,
self._test_datatype, 'Test int', 'blah')
def test_float_datatype(self):
self._test_datatype('Test float', 4.4)
def test_float_validation(self):
self.assertRaises(ValidationError,
self._test_datatype, 'Test float', 'blah')
def test_cant_update_choices_on_non_choice_model(self):
floatfield = UserDefinedFieldDefinition\
.objects\
.filter(name='Test float')
self.assertRaises(ValidationError,
floatfield[0].update_choice,
'a', 'b')
def test_update_invalid_choice(self):
self.assertRaises(ValidationError,
self.choice_udfd.update_choice,
'WHAT?????', 'm')
def test_delete_choice_value(self):
self.plot.udfs['Test choice'] = 'a'
self.plot.save_with_user(self.commander_user)
self.plot = Plot.objects.get(pk=self.plot.pk)
audit = self.plot.audits().get(field='udf:Test choice')
self.assertEqual(
self.plot.udfs['Test choice'], 'a')
self.assertEqual(
audit.current_value, 'a')
self.choice_udfd.delete_choice('a')
self.plot = Plot.objects.get(pk=self.plot.pk)
audit = self.plot.audits().filter(field='udf:Test choice')
self.assertEqual(
self.plot.udfs['Test choice'], None)
self.assertEqual(
audit.exists(), False)
choice = UserDefinedFieldDefinition.objects.get(
pk=self.choice_udfd.pk)
self.assertEqual(
set(choice.datatype_dict['choices']),
{'b', 'c'})
def test_delete_multichoice_value(self):
self.plot.udfs['Test multichoice'] = ['a']
self.plot.save_with_user(self.commander_user)
self.plot = Plot.objects.get(pk=self.plot.pk)
audit = self.plot.audits().get(field='udf:Test multichoice')
self.assertEqual(
self.plot.udfs['Test multichoice'], ['a'])
self.assertEqual(
json.loads(audit.current_value), ['a'])
self.multichoice_udfd.delete_choice('a')
self.plot = Plot.objects.get(pk=self.plot.pk)
audit = self.plot.audits().filter(field='udf:Test multichoice')
self.assertEqual(self.plot.udfs['Test multichoice'], None)
self.assertEqual(json.loads(audit[0].current_value), None)
choice = UserDefinedFieldDefinition.objects.get(
pk=self.multichoice_udfd.pk)
self.assertEqual(
set(choice.datatype_dict['choices']),
{'b', 'c'})
def test_update_multichoice_value(self):
# setup plot and requery
self.plot.udfs['Test multichoice'] = ['a']
self.plot.save_with_user(self.commander_user)
self.plot = Plot.objects.get(pk=self.plot.pk)
self.multichoice_udfd.update_choice('a', 'weird \\\\\\1a2chars')
self.plot = Plot.objects.get(pk=self.plot.pk)
audit = self.plot.audits().get(field='udf:Test multichoice')
self.assertEqual(
self.plot.udfs['Test multichoice'], ['weird \\\\\\1a2chars'])
self.assertEqual(json.loads(audit.current_value),
['weird \\\\\\1a2chars'])
choice = UserDefinedFieldDefinition.objects.get(
pk=self.multichoice_udfd.pk)
self.assertEqual(
set(choice.datatype_dict['choices']),
{'weird \\\\\\1a2chars', 'b', 'c'})
self.plot = Plot.objects.get(pk=self.plot.pk)
self.multichoice_udfd.update_choice('b', 'd')
self.assertEqual(
self.plot.udfs['Test multichoice'], ['weird \\\\\\1a2chars'])
choice = UserDefinedFieldDefinition.objects.get(
pk=self.multichoice_udfd.pk)
self.assertEqual(
set(choice.datatype_dict['choices']),
{'weird \\\\\\1a2chars', 'd', 'c'})
def test_update_choice_value(self):
self.plot.udfs['Test choice'] = 'a'
self.plot.save_with_user(self.commander_user)
self.plot = Plot.objects.get(pk=self.plot.pk)
audit = self.plot.audits().get(field='udf:Test choice')
self.assertEqual(
self.plot.udfs['Test choice'], 'a')
self.assertEqual(
audit.current_value, 'a')
self.choice_udfd.update_choice('a', 'm')
self.plot = Plot.objects.get(pk=self.plot.pk)
audit = self.plot.audits().get(field='udf:Test choice')
self.assertEqual(
self.plot.udfs['Test choice'], 'm')
self.assertEqual(
audit.current_value, 'm')
choice = UserDefinedFieldDefinition.objects.get(
pk=self.choice_udfd.pk)
self.assertEqual(
set(choice.datatype_dict['choices']),
{'m', 'b', 'c'})
def test_choice_datatype(self):
self._test_datatype('Test choice', 'a')
def test_choice_validation(self):
self.assertRaises(ValidationError,
self._test_datatype, 'Test choice', 'bad choice')
def test_user_datatype(self):
self._test_datatype('Test user', self.commander_user)
def test_date_datatype(self):
d = datetime.now().replace(microsecond=0)
self._test_datatype('Test date', d)
def test_string_datatype(self):
self._test_datatype('Test string', 'Sweet Plot')
def test_user_validation_invalid_id(self):
self.assertRaises(ValidationError,
self._test_datatype, 'Test user', 349949)
def test_user_validation_non_integer(self):
self.assertRaises(ValidationError,
self._test_datatype, 'Test user', 'zztop')
def test_in_operator(self):
self.assertEqual('Test string' in self.plot.udfs,
True)
self.assertEqual('RanDoM NAme' in self.plot.udfs,
False)
def test_returns_none_for_empty_but_valid_udfs(self):
self.assertEqual(self.plot.udfs['Test string'],
None)
def test_raises_keyerror_for_invalid_udf(self):
self.assertRaises(KeyError,
lambda: self.plot.udfs['RaNdoName'])
class CollectionUDFTest(OTMTestCase):
def setUp(self):
self.p = Point(-8515941.0, 4953519.0)
self.instance = make_instance(point=self.p)
self.udf = make_collection_udf(self.instance, 'Stewardship')
self.commander_user = make_commander_user(self.instance)
set_write_permissions(self.instance, self.commander_user,
'Plot', ['udf:Stewardship'])
self.plot = Plot(geom=self.p, instance=self.instance)
self.plot.save_with_user(self.commander_user)
def test_can_update_choice_option(self):
stews = [{'action': 'water',
'height': 42},
{'action': 'prune',
'height': 12}]
self.plot.udfs['Stewardship'] = stews
self.plot.save_with_user(self.commander_user)
plot = Plot.objects.get(pk=self.plot.pk)
audits = [a.current_value for a in
plot.audits().filter(field='udf:action')]
self.assertEqual(plot.udfs['Stewardship'][0]['action'], 'water')
self.assertEqual(audits, ['water', 'prune'])
self.udf.update_choice('water', 'h2o', name='action')
plot = Plot.objects.get(pk=self.plot.pk)
audits = [a.current_value for a in
plot.audits().filter(field='udf:action')]
self.assertEqual(plot.udfs['Stewardship'][0]['action'], 'h2o')
self.assertEqual(audits, ['h2o', 'prune'])
def test_can_delete_choice_option(self):
stews = [{'action': 'water',
'height': 42},
{'action': 'prune',
'height': 12}]
self.plot.udfs['Stewardship'] = stews
self.plot.save_with_user(self.commander_user)
plot = Plot.objects.get(pk=self.plot.pk)
audits = [a.current_value for a in
plot.audits().filter(field='udf:action')]
self.assertEqual(plot.udfs['Stewardship'][0]['action'], 'water')
self.assertEqual(audits, ['water', 'prune'])
self.udf.delete_choice('water', name='action')
plot = Plot.objects.get(pk=self.plot.pk)
audits = [a.current_value for a in
plot.audits().filter(field='udf:action')]
self.assertEqual(plot.udfs['Stewardship'][0]['action'], '')
self.assertEqual(audits, ['prune'])
def test_can_get_and_set(self):
stews = [{'action': 'water',
'height': 42},
{'action': 'prune',
'height': 12}]
self.plot.udfs['Stewardship'] = stews
self.plot.save_with_user(self.commander_user)
reloaded_plot = Plot.objects.get(pk=self.plot.pk)
new_stews = reloaded_plot.udfs['Stewardship']
for expected_stew, actual_stew in zip(stews, new_stews):
self.assertIn('id', actual_stew)
self.assertDictContainsSubset(expected_stew, actual_stew)
def test_can_delete(self):
stews = [{'action': 'water',
'height': 42},
{'action': 'prune',
'height': 12}]
self.plot.udfs['Stewardship'] = stews
self.plot.save_with_user(self.commander_user)
reloaded_plot = Plot.objects.get(pk=self.plot.pk)
all_new_stews = reloaded_plot.udfs['Stewardship']
# Remove first one
new_stews = all_new_stews[1:]
reloaded_plot.udfs['Stewardship'] = new_stews
reloaded_plot.save_with_user(self.commander_user)
reloaded_plot = Plot.objects.get(pk=self.plot.pk)
newest_stews = reloaded_plot.udfs['Stewardship']
self.assertEqual(len(newest_stews), 1)
self.assertEqual(newest_stews[0]['action'], 'prune')
self.assertEqual(newest_stews[0]['height'], 12)
# Collection fields used the same validation logic as scalar
# udfs the point of this section is prove that the code is hooked
# up, not to exhaustively test datatype validation
def test_cannot_save_with_invalid_field_name(self):
self.plot.udfs['Stewardship'] = [
{'action': 'water',
'height': 32,
'random': 'test'}]
self.assertRaises(
ValidationError,
self.plot.save_with_user,
self.commander_user)
def test_cannot_save_with_invalid_value(self):
self.plot.udfs['Stewardship'] = [
{'action': 'water',
'height': 'too high'}]
self.assertRaises(
ValidationError,
self.plot.save_with_user,
self.commander_user)
class UdfDeleteTest(OTMTestCase):
def setUp(self):
self.instance = make_instance()
self.commander_user = make_commander_user(self.instance)
def test_delete_udf_deletes_perms_collection(self):
set_write_permissions(self.instance, self.commander_user,
'Plot', ['udf:Test choice'])
udf_def = UserDefinedFieldDefinition(
instance=self.instance,
model_type='Plot',
datatype=json.dumps([{'name': 'pick',
'type': 'choice',
'choices': ['a', 'b', 'c']},
{'type': 'int',
'name': 'height'}]),
iscollection=True,
name='Test choice')
udf_def.save()
qs = FieldPermission.objects.filter(
field_name='udf:Test choice',
model_name='Plot')
self.assertTrue(qs.exists())
udf_def.delete()
self.assertFalse(qs.exists())
def test_delete_udf_deletes_perms_value(self):
set_write_permissions(self.instance, self.commander_user,
'Plot', ['udf:Test string'])
udf_def = UserDefinedFieldDefinition(
instance=self.instance,
model_type='Plot',
datatype=json.dumps({'type': 'string'}),
iscollection=False,
name='Test string')
udf_def.save()
qs = FieldPermission.objects.filter(
field_name='udf:Test string',
model_name='Plot')
self.assertTrue(qs.exists())
udf_def.delete()
self.assertFalse(qs.exists())
def test_delete_udf_deletes_mobile_api_field(self):
udf_def = UserDefinedFieldDefinition(
instance=self.instance,
model_type='Plot',
datatype=json.dumps({'type': 'string'}),
iscollection=False,
name='Test string')
udf_def.save()
self.instance.mobile_api_fields = [
{'header': 'fields', 'model': 'plot',
'field_keys': ['plot.udf:Test string']}]
self.instance.save()
udf_def.delete()
updated_instance = Instance.objects.get(pk=self.instance.pk)
self.assertEquals(
0, len(updated_instance.mobile_api_fields[0]['field_keys']))
def test_delete_cudf_deletes_mobile_api_field_group(self):
tree_udf_def = UserDefinedFieldDefinition(
instance=self.instance,
model_type='Plot',
datatype=json.dumps([{'name': 'pick',
'type': 'choice',
'choices': ['a', 'b', 'c']},
{'type': 'int',
'name': 'height'}]),
iscollection=True,
name='Choices')
tree_udf_def.save()
plot_udf_def = UserDefinedFieldDefinition(
instance=self.instance,
model_type='Tree',
datatype=json.dumps([{'name': 'pick',
'type': 'choice',
'choices': ['1', '2', '3']},
{'type': 'int',
'name': 'times'}]),
iscollection=True,
name='Choices')
plot_udf_def.save()
self.instance.mobile_api_fields = [
{'header': 'plot', 'model': 'plot', 'field_keys': ['plot.width']},
{'header': 'Choices', 'sort_key': 'pick',
'collection_udf_keys': ['plot.udf:Choices', 'tree.udf:Choices']}
]
self.instance.save()
tree_udf_def.delete()
updated_instance = Instance.objects.get(pk=self.instance.pk)
self.assertEquals(1, len(
updated_instance.mobile_api_fields[1]['collection_udf_keys']))
plot_udf_def.delete()
updated_instance = Instance.objects.get(pk=self.instance.pk)
self.assertEquals(1, len(updated_instance.mobile_api_fields))
class UdfCRUTestCase(OTMTestCase):
def setUp(self):
User._system_user.save_base()
self.instance = make_instance()
self.user = make_commander_user(self.instance)
set_write_permissions(self.instance, self.user,
'Plot', ['udf:Test choice'])
self.udf = UserDefinedFieldDefinition.objects.create(
instance=self.instance,
model_type='Plot',
datatype=json.dumps({'type': 'choice',
'choices': ['a', 'b', 'c']}),
iscollection=False,
name='Test choice')
class UdfCreateTest(UdfCRUTestCase):
def test_create_non_choice_udf(self):
body = {'udf.name': ' cool udf ',
'udf.model': 'Plot',
'udf.type': 'string'}
udf = udf_create(body, self.instance)
self.assertEqual(udf.instance_id, self.instance.pk)
self.assertEqual(udf.model_type, 'Plot')
self.assertEqual(udf.name, 'cool udf')
self.assertEqual(udf.datatype_dict['type'], 'string')
def test_adds_udf_to_role_when_created(self):
body = {'udf.name': 'cool udf',
'udf.model': 'Plot',
'udf.type': 'string'}
udf_create(body, self.instance)
roles_in_instance = Role.objects.filter(instance=self.instance)
self.assertGreater(len(roles_in_instance), 0)
for role in roles_in_instance:
perms = [perm.field_name
for perm in role_permissions(role, self.instance)]
self.assertIn('udf:cool udf', perms)
def test_create_choice_udf(self):
body = {'udf.name': 'cool udf',
'udf.model': 'Plot',
'udf.type': 'choice',
'udf.choices': ['a', 'b', 'c']}
udf = udf_create(body, self.instance)
self.assertEqual(udf.instance_id, self.instance.pk)
self.assertEqual(udf.model_type, 'Plot')
self.assertEqual(udf.name, 'cool udf')
self.assertEqual(udf.datatype_dict['type'], 'choice')
self.assertEqual(udf.datatype_dict['choices'], ['a', 'b', 'c'])
def test_invalid_choice_list(self):
body = {'udf.name': 'cool udf',
'udf.model': 'Plot',
'udf.type': 'choice'}
self.assertRaises(ValidationError, udf_create, body, self.instance)
body = {'udf.name': 'cool udf',
'udf.model': 'Plot',
'udf.type': 'choice',
'udf.choices': ['', 'a']}
self.assertRaises(ValidationError, udf_create, body, self.instance)
body = {'udf.name': 'cool udf',
'udf.model': 'Plot',
'udf.type': 'choice',
'udf.choices': ['a', 'a']}
self.assertRaises(ValidationError, udf_create, body, self.instance)
def test_missing_params(self):
body = {'udf.model': 'Plot',
'udf.type': 'string',
'udf.choices': []}
self.assertRaises(ValidationError, udf_create, body, self.instance)
body = {'udf.name': 'cool udf',
'udf.type': 'string',
'udf.choices': []}
self.assertRaises(ValidationError, udf_create, body, self.instance)
body = {'udf.name': 'cool udf',
'udf.model': 'Plot'}
self.assertRaises(ValidationError, udf_create, body, self.instance)
def test_empty_name(self):
body = {'udf.name': '',
'udf.model': 'Plot',
'udf.type': 'string'}
self.assertRaises(ValidationError, udf_create, body, self.instance)
def test_duplicate_name(self):
body = {'udf.name': 'Test choice',
'udf.model': 'Plot',
'udf.type': 'string'}
self.assertRaises(ValidationError, udf_create, body, self.instance)
def test_invalid_model_name(self):
body = {'udf.name': 'Testing choice',
'udf.model': 'Shoe',
'udf.type': 'string'}
self.assertRaises(ValidationError, udf_create, body, self.instance)
|
kdeloach/otm-core
|
opentreemap/treemap/tests/test_udfs.py
|
Python
|
gpl-3.0
| 46,210
|
"""
MIMEJSON Serialization.
MIMEJSON extends JSON to allow automatically serialization of large binary objects as "attached" objects.
These large object can then be LAZILY loaded. This is an ALPHA software - the exact specification
of MIMEJSON is likely to evolve through iteration.
"""
import os
from .mimejson import MIMEJSON
from .codec import CodecRegister
__version__ = open(os.path.join(os.path.dirname(__file__), "VERSION"), "r").read()
__all__ = ('MIMEJSON', 'CodecRegister', '__version__')
|
wideioltd/mimejson
|
mimejson/__init__.py
|
Python
|
bsd-3-clause
| 502
|
# -*- coding: utf-8 -*-
"""
Created on Sun Jun 07 21:39:18 2015
@author: Paco
"""
from api import API
class RubyGems(API):
_class_name = 'Ruby Gems'
_category = 'Code'
_help_url = 'http://guides.rubygems.org/rubygems-org-api/'
_version = '1'
_api_url = 'https://rubygems.org/api/v' + _version + '/'
def _parsing_data(self,data):
res = {'name':list(),'downloads':list(),'info':list(),'url':list()}
for d in data:
res['name'].append(self._tools.key_test('name',d))
res['downloads'].append(self._tools.key_test('downloads',d,'int'))
res['info'].append(self._tools.key_test('info',d))
res['url'].append(self._tools.key_test('homepage_uri',d))
return res
def search(self,text=''):
text = text.replace(' ','+')
url = self._api_url+'search.json?query='+text
data = self._tools.data_from_url(url)
self._increment_nb_call()
return self._parsing_data(data)
def latest_added(self):
url = self._api_url+'activity/latest.json'
data = self._tools.data_from_url(url)
self._increment_nb_call()
return self._parsing_data(data)
def latest_updated(self):
url = self._api_url+'activity/just_updated.json'
data = self._tools.data_from_url(url)
self._increment_nb_call()
return self._parsing_data(data)
def total_download_gems(self):
url = self._api_url+'downloads.json'
data = self._tools.data_from_url(url)
self._increment_nb_call()
return data
|
franblas/pyAPI
|
src/pyapi/rubygems.py
|
Python
|
mit
| 1,582
|
#!/usr/bin/env python
# Copyright (C) 2014 Dan Scott <dscott@laurentian.ca>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Proof-of-concept union catalogue via sitemaps + schema.org
This script currently takes a sitemap URL as a starting point, determines if
there are any linked sitemaps, and crawls all of the URLs it is given with a
singular goal of extracting schema.org structured data from each URL.
By default the script expects to find the metadata marked up in RDFa, but you
can control that via a command line switch. For example, "-p microdata" will
tell the script to parse the metadata in a given URL as microdata instead.
By default the script generates n3 output, but you can control that via a
command line switch. For example, "-t turtle" generates turtle output.
There are many improvements to be made to this script before it would be
suitable for a real life deployment:
* It currently has no idea when the last time it was run, so it will blindly
crawl every URL in the sitemaps--even if the sitemaps contain <lastmod>
elements that would enable it to only crawl those URLs that have changed
since the last time it has run.
* Also, thus far the script has no opinion about where the retrieved metadata
should be stored. One could target a triple store or a relational database,
for example--but a more functional script should probably work out of the
box with _something_ to provides some simple search capabilities.
* sitemaps.org encourages site owners to gzip-compress large sitemaps, but
this script currently simply blindly expects regular XML and would have
a horrendous time trying to parse a gzipped XML file.
"""
import logging
import sys
try:
from urllib.request import urlopen
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
from urllib import urlopen
from xml.dom.minidom import parse
from rdflib.graph import ConjunctiveGraph
from rdflib.namespace import RDF, RDFS, OWL, XSD
from rdflib.parser import Parser
from rdflib.serializer import Serializer
# Set your default sitemap URL here
SITEMAP_URL = 'http://laurentian.concat.ca/osul_sitemap1.xml'
SITEMAP_URL = 'http://laurentian.concat.ca/osul_sitemapindex.xml'
SITEMAP_URL = 'http://find.senatehouselibrary.ac.uk/sitemapIndex.xml'
# It would be rude to repeatedly retrieve megabytes of sitemaps from a
# third-party site just for testing purposes.
# If true, skip parsing the sitemaps entirely and just use the sample URLs
SHORT_CIRCUIT = True
SAMPLE_URLS = [
u'http://find.senatehouselibrary.ac.uk/Record/.b24804241',
u'http://acorn.biblio.org/eg/opac/record/1826746'
]
logging.basicConfig()
def url_value(url):
"Get the URL value from a given <loc> element"
locs = url.getElementsByTagName('loc')
if len(locs) > 1:
raise Exception('More than 1 loc in url %s' % url.nodeValue)
if len(locs) < 1:
raise Exception('No loc in url %s' % url.nodeValue)
for node in locs[0].childNodes:
if node.nodeType == node.TEXT_NODE:
return node.nodeValue
def parse_sitemap_urls(sitemap):
"Parse the URLs from a sitemap file"
rv = []
sitemap = urlopen(sitemap)
if sitemap.getcode() < 400:
doc = parse(sitemap)
for url in doc.getElementsByTagName('url'):
rv.append(url_value(url))
return rv
def parse_sitemap_sitemaps(url):
"Parse the list of linked sitemaps from a sitemap file"
sitemaps = []
url = urlopen(url)
doc = parse(url)
for sitemap in doc.getElementsByTagName('sitemap'):
sitemaps.append(url_value(sitemap))
return sitemaps
def parse_sitemap(url):
"Parse a sitemap file, including linked sitemaps"
urls = []
sitemaps = parse_sitemap_sitemaps(url)
if sitemaps:
for sitemap in sitemaps:
urls += parse_sitemap_urls(sitemap)
else:
urls += parse_sitemap_urls(url)
return(urls)
def extract_rdfa(url, outfile=sys.stdout, parser="rdfa", serializer="n3"):
"""
Extract RDFa from a given URL
Parsers are listed at https://rdflib.readthedocs.org/en/4.1.0/plugin_parsers.html
Serializers are listed at https://rdflib.readthedocs.org/en/4.1.0/plugin_serializers.html
"""
store = None
graph = ConjunctiveGraph()
graph.parse(url, format=parser)
graph.serialize(destination=outfile, format=serializer)
def main():
import argparse
import pprint
import traceback
parser = argparse.ArgumentParser(
description="Crawl a sitemap.xml and extract RDFa from the documents")
parser.add_argument('-s', '--sitemap', default=SITEMAP_URL,
help='Location of the sitemap to parse')
parser.add_argument('-o', '--output', required=True,
help='Path / filename for the output')
parser.add_argument('-p', '--parser', default='rdfa1.1',
help='Parser to use for the input format ("rdfa", "microdata", etc)')
parser.add_argument('-t', '--serializer', default='n3',
help='Serializer to use for the output format ("n3", "nt", "turtle", "xml", etc)')
args = parser.parse_args()
errors = []
urls = []
outfile = open(args.output, 'wb')
if SHORT_CIRCUIT:
urls = SAMPLE_URLS
else:
urls = parse_sitemap(args.sitemap)
for url in urls:
try:
extract_rdfa(url, outfile, args.parser, args.serializer)
except Exception as e:
traceback.print_exc()
if __name__ == '__main__':
main()
|
dbs/schema-unioncat
|
schema_union.py
|
Python
|
gpl-3.0
| 6,085
|
# Spawn Area file created with PSWG Planetary Spawn Tool
import sys
from java.util import Vector
def addSpawnArea(core):
dynamicGroups = Vector()
dynamicGroups.add('lok_flit')
dynamicGroups.add('lok_kusak')
dynamicGroups.add('lok_perlek')
core.spawnService.addDynamicSpawnArea(dynamicGroups, 5500, 0, 3500, 'lok')
return
|
agry/NGECore2
|
scripts/mobiles/spawnareas/lok_e_1.py
|
Python
|
lgpl-3.0
| 328
|
# Copyright 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Initial operations for dvr
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'dvr_host_macs',
sa.Column('host', sa.String(length=255), nullable=False),
sa.Column('mac_address', sa.String(length=32),
nullable=False, unique=True),
sa.PrimaryKeyConstraint('host')
)
op.create_table(
'ml2_dvr_port_bindings',
sa.Column('port_id', sa.String(length=36), nullable=False),
sa.Column('host', sa.String(length=255), nullable=False),
sa.Column('router_id', sa.String(length=36), nullable=True),
sa.Column('vif_type', sa.String(length=64), nullable=False),
sa.Column('vif_details', sa.String(length=4095),
nullable=False, server_default=''),
sa.Column('vnic_type', sa.String(length=64),
nullable=False, server_default='normal'),
sa.Column('profile', sa.String(length=4095),
nullable=False, server_default=''),
sa.Column('cap_port_filter', sa.Boolean(), nullable=False),
sa.Column('driver', sa.String(length=64), nullable=True),
sa.Column('segment', sa.String(length=36), nullable=True),
sa.Column(u'status', sa.String(16), nullable=False),
sa.ForeignKeyConstraint(['port_id'], ['ports.id'],
ondelete='CASCADE'),
sa.ForeignKeyConstraint(['segment'], ['ml2_network_segments.id'],
ondelete='SET NULL'),
sa.PrimaryKeyConstraint('port_id', 'host')
)
op.create_table(
'csnat_l3_agent_bindings',
sa.Column('router_id', sa.String(length=36), nullable=False),
sa.Column('l3_agent_id', sa.String(length=36), nullable=False),
sa.Column('host_id', sa.String(length=255), nullable=True),
sa.Column('csnat_gw_port_id', sa.String(length=36), nullable=True),
sa.ForeignKeyConstraint(['l3_agent_id'], ['agents.id'],
ondelete='CASCADE'),
sa.ForeignKeyConstraint(['router_id'], ['routers.id'],
ondelete='CASCADE'),
sa.ForeignKeyConstraint(['csnat_gw_port_id'], ['ports.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('router_id')
)
|
takeshineshiro/neutron
|
neutron/db/migration/alembic_migrations/dvr_init_opts.py
|
Python
|
apache-2.0
| 2,933
|
"""
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 1.11.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
from __future__ import absolute_import, unicode_literals
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
PROJECT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
BASE_DIR = os.path.dirname(PROJECT_DIR)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# Application definition
INSTALLED_APPS = [
'home',
'search',
'wagtail.contrib.forms',
'wagtail.contrib.redirects',
'wagtail.embeds',
'wagtail.sites',
'wagtail.users',
'wagtail.snippets',
'wagtail.documents',
'wagtail.images',
'wagtail.search',
'wagtail.admin',
'wagtail.core',
'modelcluster',
'taggit',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'wagtail.core.middleware.SiteMiddleware',
'wagtail.contrib.redirects.middleware.RedirectMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(PROJECT_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
STATICFILES_DIRS = [
os.path.join(PROJECT_DIR, 'static'),
]
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
# Wagtail settings
WAGTAIL_SITE_NAME = "mysite"
# Base URL to use when referring to full URLs within the Wagtail admin backend -
# e.g. in notification emails. Don't include '/admin' or a trailing slash
BASE_URL = 'http://example.com'
|
texperience/wagtail-pythonanywhere-quickstart
|
mysite/settings/base.py
|
Python
|
isc
| 3,566
|
from pyjamas.ui.Button import Button
from pyjamas.ui.PopupPanel import PopupPanel
from pyjamas.ui.HTML import HTML
from pyjamas.ui.DockPanel import DockPanel
from pyjamas.ui.DialogBox import DialogBox
from pyjamas.ui.Frame import Frame
from pyjamas.ui import HasAlignment
class FileDialog(DialogBox):
def __init__(self, url):
DialogBox.__init__(self)
self.setText("Upload Files")
iframe = Frame(url)
closeButton = Button("Close", self)
msg = HTML("<center>Upload files, here. Please avoid spaces in file names.<br />(rename the file before uploading)</center>", True)
dock = DockPanel()
dock.setSpacing(4)
dock.add(closeButton, DockPanel.SOUTH)
dock.add(msg, DockPanel.NORTH)
dock.add(iframe, DockPanel.CENTER)
dock.setCellHorizontalAlignment(closeButton, HasAlignment.ALIGN_RIGHT)
dock.setCellWidth(iframe, "100%")
dock.setWidth("100%")
iframe.setWidth("800px")
iframe.setHeight("600px")
self.setWidget(dock)
def onClick(self, sender):
self.hide()
|
minghuascode/pyj
|
examples/misc/djangoweb/media/Popups.py
|
Python
|
apache-2.0
| 1,125
|
from setuptools import setup, find_packages
from os import path
VERSION = '0.1.2'
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='program-synthesis',
version=VERSION,
description='NEAR Program Synthesis: models, tools, and datasets for program synthesis tasks',
long_description=long_description,
long_description_content_type='text/markdown',
packages=find_packages(exclude=["*_test.py", "test_*.py"]),
author='NEAR Inc and Contributors',
author_email='contact@near.ai',
install_requires=[
'boto3',
'cached-property',
'ipython',
'gym',
'numpy',
'torchfold',
'ply',
'pylru',
'pyparsing',
'pytest',
'pytest-timeout',
'pytest-xdist',
'python-Levenshtein',
'prompt_toolkit',
'sortedcontainers',
'tensorflow',
'tqdm',
],
project_urls={
'Source': "https://github.com/nearai/program_synthesis",
},
python_requires='>=3.5',
)
|
nearai/program_synthesis
|
setup.py
|
Python
|
apache-2.0
| 1,125
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mangaki', '0020_pairing_is_checked'),
]
operations = [
migrations.CreateModel(
name='Deck',
fields=[
('id', models.AutoField(primary_key=True, auto_created=True, serialize=False, verbose_name='ID')),
('category', models.CharField(max_length=32)),
('sort_mode', models.CharField(max_length=32)),
('content', models.CommaSeparatedIntegerField(max_length=42000)),
],
options={
},
bases=(models.Model,),
),
]
|
Mako-kun/mangaki
|
mangaki/mangaki/migrations/0021_deck.py
|
Python
|
agpl-3.0
| 749
|
"""
homeassistant.components.lock.demo
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Demo platform that has two fake locks.
"""
from homeassistant.components.lock import LockDevice
from homeassistant.const import STATE_LOCKED, STATE_UNLOCKED
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
""" Find and return demo locks. """
add_devices_callback([
DemoLock('Front Door', STATE_LOCKED),
DemoLock('Kitchen Door', STATE_UNLOCKED)
])
class DemoLock(LockDevice):
""" Provides a demo lock. """
def __init__(self, name, state):
self._name = name
self._state = state
@property
def should_poll(self):
""" No polling needed for a demo lock. """
return False
@property
def name(self):
""" Returns the name of the device if any. """
return self._name
@property
def is_locked(self):
""" True if device is locked. """
return self._state == STATE_LOCKED
def lock(self, **kwargs):
""" Lock the device. """
self._state = STATE_LOCKED
self.update_ha_state()
def unlock(self, **kwargs):
""" Unlock the device. """
self._state = STATE_UNLOCKED
self.update_ha_state()
|
nevercast/home-assistant
|
homeassistant/components/lock/demo.py
|
Python
|
mit
| 1,292
|
# BlenderBIM Add-on - OpenBIM Blender Add-on
# Copyright (C) 2020, 2021 Dion Moult <dion@thinkmoult.com>
#
# This file is part of BlenderBIM Add-on.
#
# BlenderBIM Add-on is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# BlenderBIM Add-on is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BlenderBIM Add-on. If not, see <http://www.gnu.org/licenses/>.
import os
import bpy
import time
import logging
import tempfile
import ifcopenshell
import ifcopenshell.api
import ifcopenshell.util.selector
import ifcopenshell.util.representation
import blenderbim.bim.handler
import blenderbim.tool as tool
import blenderbim.core.project as core
import blenderbim.core.context
import blenderbim.core.owner
from blenderbim.bim.ifc import IfcStore
from blenderbim.bim.ui import IFCFileSelector
from blenderbim.bim import import_ifc
from blenderbim.bim import export_ifc
from ifcopenshell.api.context.data import Data as ContextData
class CreateProject(bpy.types.Operator):
bl_idname = "bim.create_project"
bl_label = "Create Project"
bl_options = {"REGISTER", "UNDO"}
bl_description = "Create a new IFC project"
def execute(self, context):
IfcStore.begin_transaction(self)
IfcStore.add_transaction_operation(self, rollback=self.rollback, commit=lambda data: True)
self._execute(context)
self.transaction_data = {"file": tool.Ifc.get()}
IfcStore.add_transaction_operation(self, rollback=lambda data: True, commit=self.commit)
IfcStore.end_transaction(self)
return {"FINISHED"}
def _execute(self, context):
props = context.scene.BIMProjectProperties
template = None if props.template_file == "0" else props.template_file
core.create_project(tool.Ifc, tool.Project, schema=props.export_schema, template=template)
def rollback(self, data):
IfcStore.file = None
def commit(self, data):
IfcStore.file = data["file"]
class SelectLibraryFile(bpy.types.Operator, IFCFileSelector):
bl_idname = "bim.select_library_file"
bl_label = "Select Library File"
bl_options = {"REGISTER", "UNDO"}
bl_description = "Select an IFC file that can be used as a library"
filepath: bpy.props.StringProperty(subtype="FILE_PATH")
filter_glob: bpy.props.StringProperty(default="*.ifc;*.ifczip;*.ifcxml", options={"HIDDEN"})
def execute(self, context):
IfcStore.begin_transaction(self)
old_filepath = IfcStore.library_path
result = self._execute(context)
self.transaction_data = {"old_filepath": old_filepath, "filepath": self.filepath}
IfcStore.add_transaction_operation(self)
IfcStore.end_transaction(self)
return result
def _execute(self, context):
IfcStore.library_path = self.filepath
IfcStore.library_file = ifcopenshell.open(self.filepath)
bpy.ops.bim.refresh_library()
if context.area:
context.area.tag_redraw()
return {"FINISHED"}
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {"RUNNING_MODAL"}
def rollback(self, data):
if data["old_filepath"]:
IfcStore.library_path = data["old_filepath"]
IfcStore.library_file = ifcopenshell.open(data["old_filepath"])
else:
IfcStore.library_path = ""
IfcStore.library_file = None
def commit(self, data):
IfcStore.library_path = data["filepath"]
IfcStore.library_file = ifcopenshell.open(data["filepath"])
class RefreshLibrary(bpy.types.Operator):
bl_idname = "bim.refresh_library"
bl_label = "Refresh Library"
def execute(self, context):
self.props = context.scene.BIMProjectProperties
self.props.library_elements.clear()
self.props.library_breadcrumb.clear()
self.props.active_library_element = ""
types = IfcStore.library_file.wrapped_data.types_with_super()
for importable_type in sorted(["IfcTypeProduct", "IfcMaterial", "IfcCostSchedule", "IfcProfileDef"]):
if importable_type in types:
new = self.props.library_elements.add()
new.name = importable_type
return {"FINISHED"}
class ChangeLibraryElement(bpy.types.Operator):
bl_idname = "bim.change_library_element"
bl_label = "Change Library Element"
bl_options = {"REGISTER", "UNDO"}
element_name: bpy.props.StringProperty()
def execute(self, context):
self.props = context.scene.BIMProjectProperties
self.file = IfcStore.get_file()
self.library_file = IfcStore.library_file
ifc_classes = set()
self.props.active_library_element = self.element_name
crumb = self.props.library_breadcrumb.add()
crumb.name = self.element_name
elements = self.library_file.by_type(self.element_name)
[ifc_classes.add(e.is_a()) for e in elements]
self.props.library_elements.clear()
if len(ifc_classes) == 1 and list(ifc_classes)[0] == self.element_name:
for name, ifc_definition_id in sorted([(self.get_name(e), e.id()) for e in elements]):
self.add_library_asset(name, ifc_definition_id)
else:
for ifc_class in sorted(ifc_classes):
if ifc_class == self.element_name:
continue
new = self.props.library_elements.add()
new.name = ifc_class
for name, ifc_definition_id, ifc_class in sorted([(self.get_name(e), e.id(), e.is_a()) for e in elements]):
if ifc_class == self.element_name:
self.add_library_asset(name, ifc_definition_id)
return {"FINISHED"}
def get_name(self, element):
if element.is_a("IfcProfileDef"):
return element.ProfileName or "Unnamed"
return element.Name or "Unnamed"
def add_library_asset(self, name, ifc_definition_id):
new = self.props.library_elements.add()
new.name = name
new.ifc_definition_id = ifc_definition_id
element = self.library_file.by_id(ifc_definition_id)
if self.library_file.schema == "IFC2X3" or not self.library_file.by_type("IfcProjectLibrary"):
new.is_declared = False
elif getattr(element, "HasContext", None) and element.HasContext[0].RelatingContext.is_a("IfcProjectLibrary"):
new.is_declared = True
try:
if element.is_a("IfcMaterial"):
next(e for e in self.file.by_type("IfcMaterial") if e.Name == name)
elif element.is_a("IfcProfileDef"):
next(e for e in self.file.by_type("IfcProfileDef") if e.ProfileName == name)
else:
self.file.by_guid(element.GlobalId)
new.is_appended = True
except (AttributeError, RuntimeError, StopIteration):
new.is_appended = False
class RewindLibrary(bpy.types.Operator):
bl_idname = "bim.rewind_library"
bl_label = "Rewind Library"
bl_options = {"REGISTER", "UNDO"}
def execute(self, context):
self.props = context.scene.BIMProjectProperties
total_breadcrumbs = len(self.props.library_breadcrumb)
if total_breadcrumbs < 2:
bpy.ops.bim.refresh_library()
return {"FINISHED"}
element_name = self.props.library_breadcrumb[total_breadcrumbs - 2].name
self.props.library_breadcrumb.remove(total_breadcrumbs - 1)
self.props.library_breadcrumb.remove(total_breadcrumbs - 2)
bpy.ops.bim.change_library_element(element_name=element_name)
return {"FINISHED"}
class AssignLibraryDeclaration(bpy.types.Operator):
bl_idname = "bim.assign_library_declaration"
bl_label = "Assign Library Declaration"
bl_options = {"REGISTER", "UNDO"}
definition: bpy.props.IntProperty()
def execute(self, context):
IfcStore.begin_transaction(self)
IfcStore.library_file.begin_transaction()
result = self._execute(context)
IfcStore.library_file.end_transaction()
IfcStore.add_transaction_operation(self)
IfcStore.end_transaction(self)
return result
def _execute(self, context):
self.props = context.scene.BIMProjectProperties
self.file = IfcStore.library_file
ifcopenshell.api.run(
"project.assign_declaration",
self.file,
definition=self.file.by_id(self.definition),
relating_context=self.file.by_type("IfcProjectLibrary")[0],
)
element_name = self.props.active_library_element
bpy.ops.bim.rewind_library()
bpy.ops.bim.change_library_element(element_name=element_name)
return {"FINISHED"}
def rollback(self, data):
IfcStore.library_file.undo()
def commit(self, data):
IfcStore.library_file.redo()
class UnassignLibraryDeclaration(bpy.types.Operator):
bl_idname = "bim.unassign_library_declaration"
bl_label = "Unassign Library Declaration"
bl_options = {"REGISTER", "UNDO"}
definition: bpy.props.IntProperty()
def execute(self, context):
IfcStore.begin_transaction(self)
IfcStore.library_file.begin_transaction()
result = self._execute(context)
IfcStore.library_file.end_transaction()
IfcStore.add_transaction_operation(self)
IfcStore.end_transaction(self)
return result
def _execute(self, context):
self.props = context.scene.BIMProjectProperties
self.file = IfcStore.library_file
ifcopenshell.api.run(
"project.unassign_declaration",
self.file,
definition=self.file.by_id(self.definition),
relating_context=self.file.by_type("IfcProjectLibrary")[0],
)
element_name = self.props.active_library_element
bpy.ops.bim.rewind_library()
bpy.ops.bim.change_library_element(element_name=element_name)
return {"FINISHED"}
def rollback(self, data):
IfcStore.library_file.undo()
def commit(self, data):
IfcStore.library_file.redo()
class SaveLibraryFile(bpy.types.Operator):
bl_idname = "bim.save_library_file"
bl_label = "Save Library File"
def execute(self, context):
IfcStore.library_file.write(IfcStore.library_path)
return {"FINISHED"}
class AppendLibraryElement(bpy.types.Operator):
bl_idname = "bim.append_library_element"
bl_label = "Append Library Element"
bl_options = {"REGISTER", "UNDO"}
definition: bpy.props.IntProperty()
prop_index: bpy.props.IntProperty()
@classmethod
def poll(cls, context):
return IfcStore.get_file()
def execute(self, context):
return IfcStore.execute_ifc_operator(self, context)
def _execute(self, context):
self.file = IfcStore.get_file()
element = ifcopenshell.api.run(
"project.append_asset",
self.file,
library=IfcStore.library_file,
element=IfcStore.library_file.by_id(self.definition),
)
if not element:
return {"FINISHED"}
if element.is_a("IfcTypeProduct"):
self.import_type_from_ifc(element, context)
elif element.is_a("IfcMaterial"):
self.import_material_from_ifc(element, context)
try:
context.scene.BIMProjectProperties.library_elements[self.prop_index].is_appended = True
except:
# TODO Remove this terrible code when I refactor this into the core
pass
blenderbim.bim.handler.purge_module_data()
return {"FINISHED"}
def import_material_from_ifc(self, element, context):
self.file = IfcStore.get_file()
logger = logging.getLogger("ImportIFC")
ifc_import_settings = import_ifc.IfcImportSettings.factory(context, IfcStore.path, logger)
ifc_importer = import_ifc.IfcImporter(ifc_import_settings)
ifc_importer.file = self.file
blender_material = ifc_importer.create_material(element)
self.import_material_styles(blender_material, element, ifc_importer)
def import_type_from_ifc(self, element, context):
self.file = IfcStore.get_file()
logger = logging.getLogger("ImportIFC")
ifc_import_settings = import_ifc.IfcImportSettings.factory(context, IfcStore.path, logger)
type_collection = bpy.data.collections.get("Types")
if not type_collection:
type_collection = bpy.data.collections.new("Types")
for collection in bpy.context.view_layer.layer_collection.children:
if "IfcProject/" in collection.name:
collection.collection.children.link(type_collection)
collection.children["Types"].hide_viewport = True
break
ifc_importer = import_ifc.IfcImporter(ifc_import_settings)
ifc_importer.file = self.file
ifc_importer.type_collection = type_collection
ifc_importer.material_creator.load_existing_materials()
self.import_type_materials(element, ifc_importer)
self.import_type_styles(element, ifc_importer)
ifc_importer.create_type_product(element)
ifc_importer.place_objects_in_collections()
def import_type_materials(self, element, ifc_importer):
for rel in element.HasAssociations:
if not rel.is_a("IfcRelAssociatesMaterial"):
continue
for material in [e for e in self.file.traverse(rel) if e.is_a("IfcMaterial")]:
if IfcStore.get_element(material.id()):
continue
blender_material = ifc_importer.create_material(material)
self.import_material_styles(blender_material, material, ifc_importer)
def import_type_styles(self, element, ifc_importer):
for representation_map in element.RepresentationMaps or []:
for element in self.file.traverse(representation_map):
if not element.is_a("IfcRepresentationItem") or not element.StyledByItem:
continue
for element2 in self.file.traverse(element.StyledByItem[0]):
if element2.is_a("IfcSurfaceStyle") and not IfcStore.get_element(element2.id()):
ifc_importer.create_style(element2)
def import_material_styles(self, blender_material, material, ifc_importer):
if not material.HasRepresentation:
return
for element in self.file.traverse(material.HasRepresentation[0]):
if element.is_a("IfcSurfaceStyle") and not IfcStore.get_element(element.id()):
ifc_importer.create_style(element, blender_material)
class EnableEditingHeader(bpy.types.Operator):
bl_idname = "bim.enable_editing_header"
bl_label = "Enable Editing Header"
bl_options = {"REGISTER", "UNDO"}
bl_description = "Edit the IFC header file such as Author, Organization, ..."
@classmethod
def poll(cls, context):
return IfcStore.get_file()
def execute(self, context):
self.file = IfcStore.get_file()
props = context.scene.BIMProjectProperties
props.is_editing = True
mvd = "".join(IfcStore.get_file().wrapped_data.header.file_description.description)
if "[" in mvd:
props.mvd = mvd.split("[")[1][0:-1]
else:
props.mvd = ""
author = self.file.wrapped_data.header.file_name.author
if author:
props.author_name = author[0]
if len(author) > 1:
props.author_email = author[1]
organisation = self.file.wrapped_data.header.file_name.organization
if organisation:
props.organisation_name = organisation[0]
if len(organisation) > 1:
props.organisation_email = organisation[1]
props.authorisation = self.file.wrapped_data.header.file_name.authorization
return {"FINISHED"}
class EditHeader(bpy.types.Operator):
bl_idname = "bim.edit_header"
bl_label = "Edit Header"
bl_options = {"REGISTER", "UNDO"}
bl_description = "Save header informations"
@classmethod
def poll(cls, context):
return IfcStore.get_file()
def execute(self, context):
IfcStore.begin_transaction(self)
self.transaction_data = {}
self.transaction_data["old"] = self.record_state()
result = self._execute(context)
self.transaction_data["new"] = self.record_state()
IfcStore.add_transaction_operation(self)
IfcStore.end_transaction(self)
return result
def _execute(self, context):
self.file = IfcStore.get_file()
props = context.scene.BIMProjectProperties
props.is_editing = True
self.file.wrapped_data.header.file_description.description = (f"ViewDefinition[{props.mvd}]",)
self.file.wrapped_data.header.file_name.author = (props.author_name, props.author_email)
self.file.wrapped_data.header.file_name.organization = (props.organisation_name, props.organisation_email)
self.file.wrapped_data.header.file_name.authorization = props.authorisation
bpy.ops.bim.disable_editing_header()
return {"FINISHED"}
def record_state(self):
self.file = IfcStore.get_file()
return {
"description": self.file.wrapped_data.header.file_description.description,
"author": self.file.wrapped_data.header.file_name.author,
"organisation": self.file.wrapped_data.header.file_name.organization,
"authorisation": self.file.wrapped_data.header.file_name.authorization,
}
def rollback(self, data):
file = IfcStore.get_file()
file.wrapped_data.header.file_description.description = data["old"]["description"]
file.wrapped_data.header.file_name.author = data["old"]["author"]
file.wrapped_data.header.file_name.organization = data["old"]["organisation"]
file.wrapped_data.header.file_name.authorization = data["old"]["authorisation"]
def commit(self, data):
file = IfcStore.get_file()
file.wrapped_data.header.file_description.description = data["new"]["description"]
file.wrapped_data.header.file_name.author = data["new"]["author"]
file.wrapped_data.header.file_name.organization = data["new"]["organisation"]
file.wrapped_data.header.file_name.authorization = data["new"]["authorisation"]
class DisableEditingHeader(bpy.types.Operator):
bl_idname = "bim.disable_editing_header"
bl_label = "Disable Editing Header"
bl_options = {"REGISTER", "UNDO"}
bl_description = "Cancel unsaved header informations"
def execute(self, context):
context.scene.BIMProjectProperties.is_editing = False
return {"FINISHED"}
class LoadProject(bpy.types.Operator, IFCFileSelector):
bl_idname = "bim.load_project"
bl_label = "Load Project"
bl_options = {"REGISTER", "UNDO"}
bl_description = "Load an existing IFC project"
filepath: bpy.props.StringProperty(subtype="FILE_PATH")
filter_glob: bpy.props.StringProperty(default="*.ifc;*.ifczip;*.ifcxml", options={"HIDDEN"})
is_advanced: bpy.props.BoolProperty(name="Enable Advanced Mode", default=False)
def execute(self, context):
if not self.is_existing_ifc_file():
return {"FINISHED"}
context.scene.BIMProperties.ifc_file = self.filepath
context.scene.BIMProjectProperties.is_loading = True
if not self.is_advanced:
bpy.ops.bim.load_project_elements()
return {"FINISHED"}
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {"RUNNING_MODAL"}
def draw(self, context):
self.layout.prop(self, "is_advanced")
IFCFileSelector.draw(self, context)
class UnloadProject(bpy.types.Operator):
bl_idname = "bim.unload_project"
bl_label = "Unload Project"
bl_options = {"REGISTER", "UNDO"}
def execute(self, context):
IfcStore.purge()
context.scene.BIMProperties.ifc_file = ""
context.scene.BIMProjectProperties.is_loading = False
return {"FINISHED"}
class LoadProjectElements(bpy.types.Operator):
bl_idname = "bim.load_project_elements"
bl_label = "Load Project Elements"
bl_options = {"REGISTER", "UNDO"}
def execute(self, context):
self.props = context.scene.BIMProjectProperties
self.file = IfcStore.get_file()
start = time.time()
logger = logging.getLogger("ImportIFC")
path_log = os.path.join(context.scene.BIMProperties.data_dir, "process.log")
if not os.access(context.scene.BIMProperties.data_dir, os.W_OK):
path_log = os.path.join(tempfile.mkdtemp(), "process.log")
logging.basicConfig(
filename=path_log,
filemode="a",
level=logging.DEBUG,
)
settings = import_ifc.IfcImportSettings.factory(context, context.scene.BIMProperties.ifc_file, logger)
settings.has_filter = self.props.filter_mode != "NONE"
settings.should_filter_spatial_elements = self.props.should_filter_spatial_elements
if self.props.filter_mode == "DECOMPOSITION":
settings.elements = self.get_decomposition_elements()
elif self.props.filter_mode == "IFC_CLASS":
settings.elements = self.get_ifc_class_elements()
elif self.props.filter_mode == "WHITELIST":
settings.elements = self.get_whitelist_elements()
elif self.props.filter_mode == "BLACKLIST":
settings.elements = self.get_blacklist_elements()
settings.logger.info("Starting import")
ifc_importer = import_ifc.IfcImporter(settings)
ifc_importer.execute()
settings.logger.info("Import finished in {:.2f} seconds".format(time.time() - start))
print("Import finished in {:.2f} seconds".format(time.time() - start))
context.scene.BIMProjectProperties.is_loading = False
return {"FINISHED"}
def get_decomposition_elements(self):
containers = set()
for filter_category in self.props.filter_categories:
if not filter_category.is_selected:
continue
container = self.file.by_id(filter_category.ifc_definition_id)
while container:
containers.add(container)
container = ifcopenshell.util.element.get_aggregate(container)
if self.file.schema == "IFC2X3" and container.is_a("IfcProject"):
container = None
elif self.file.schema != "IFC2X3" and container.is_a("IfcContext"):
container = None
elements = set()
for container in containers:
for rel in container.ContainsElements:
elements.update(rel.RelatedElements)
self.append_decomposed_elements(elements)
return elements
def append_decomposed_elements(self, elements):
decomposed_elements = set()
for element in elements:
if element.IsDecomposedBy:
for subelement in element.IsDecomposedBy[0].RelatedObjects:
decomposed_elements.add(subelement)
if decomposed_elements:
self.append_decomposed_elements(decomposed_elements)
elements.update(decomposed_elements)
def get_ifc_class_elements(self):
elements = set()
for filter_category in self.props.filter_categories:
if not filter_category.is_selected:
continue
elements.update(self.file.by_type(filter_category.name, include_subtypes=False))
return elements
def get_whitelist_elements(self):
selector = ifcopenshell.util.selector.Selector()
return set(selector.parse(self.file, self.props.filter_query))
def get_blacklist_elements(self):
selector = ifcopenshell.util.selector.Selector()
return set(self.file.by_type("IfcElement")) - set(selector.parse(self.file, self.props.filter_query))
class LinkIfc(bpy.types.Operator):
bl_idname = "bim.link_ifc"
bl_label = "Link IFC"
bl_options = {"REGISTER", "UNDO"}
bl_description = "Link a Blender file"
filepath: bpy.props.StringProperty(subtype="FILE_PATH")
filter_glob: bpy.props.StringProperty(default="*.blend;*.blend1", options={"HIDDEN"})
def execute(self, context):
new = context.scene.BIMProjectProperties.links.add()
new.name = self.filepath
bpy.ops.bim.load_link(filepath=self.filepath)
return {"FINISHED"}
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {"RUNNING_MODAL"}
class UnlinkIfc(bpy.types.Operator):
bl_idname = "bim.unlink_ifc"
bl_label = "UnLink IFC"
bl_options = {"REGISTER", "UNDO"}
bl_description = "Remove the selected file from the link list"
filepath: bpy.props.StringProperty()
def execute(self, context):
bpy.ops.bim.unload_link(filepath=self.filepath)
index = context.scene.BIMProjectProperties.links.find(self.filepath)
if index != -1:
context.scene.BIMProjectProperties.links.remove(index)
return {"FINISHED"}
class UnloadLink(bpy.types.Operator):
bl_idname = "bim.unload_link"
bl_label = "Unload Link"
bl_options = {"REGISTER", "UNDO"}
bl_description = "Unload the selected linked file"
filepath: bpy.props.StringProperty()
def execute(self, context):
for collection in context.scene.collection.children:
if collection.library and collection.library.filepath == self.filepath:
context.scene.collection.children.unlink(collection)
for scene in bpy.data.scenes:
if scene.library and scene.library.filepath == self.filepath:
bpy.data.scenes.remove(scene)
link = context.scene.BIMProjectProperties.links.get(self.filepath)
link.is_loaded = False
return {"FINISHED"}
class LoadLink(bpy.types.Operator):
bl_idname = "bim.load_link"
bl_label = "Load Link"
bl_options = {"REGISTER", "UNDO"}
bl_description = "Load the selected file"
filepath: bpy.props.StringProperty()
def execute(self, context):
with bpy.data.libraries.load(self.filepath, link=True) as (data_from, data_to):
data_to.scenes = data_from.scenes
for scene in bpy.data.scenes:
if not scene.library or scene.library.filepath != self.filepath:
continue
for child in scene.collection.children:
if "IfcProject" not in child.name:
continue
bpy.data.scenes[0].collection.children.link(child)
link = context.scene.BIMProjectProperties.links.get(self.filepath)
link.is_loaded = True
return {"FINISHED"}
class ExportIFC(bpy.types.Operator):
bl_idname = "export_ifc.bim"
bl_label = "Export IFC"
bl_options = {"REGISTER", "UNDO"}
filename_ext = ".ifc"
filter_glob: bpy.props.StringProperty(default="*.ifc;*.ifczip;*.ifcxml;*.ifcjson", options={"HIDDEN"})
filepath: bpy.props.StringProperty(subtype="FILE_PATH")
json_version: bpy.props.EnumProperty(items=[("4", "4", ""), ("5a", "5a", "")], name="IFC JSON Version")
json_compact: bpy.props.BoolProperty(name="Export Compact IFCJSON", default=False)
should_save_as: bpy.props.BoolProperty(name="Should Save As", default=False)
use_relative_path: bpy.props.BoolProperty(name="Use Relative Path", default=False)
def invoke(self, context, event):
if not IfcStore.get_file():
self.report({"ERROR"}, "No IFC project is available for export - create or import a project first.")
return {"FINISHED"}
if context.scene.BIMProperties.ifc_file and not self.should_save_as:
self.filepath = context.scene.BIMProperties.ifc_file
if not os.path.isabs(self.filepath):
self.filepath = os.path.abspath(os.path.join(bpy.path.abspath("//"), self.filepath))
return self.execute(context)
if not self.filepath:
self.filepath = bpy.path.ensure_ext(bpy.data.filepath, ".ifc")
WindowManager = context.window_manager
WindowManager.fileselect_add(self)
return {"RUNNING_MODAL"}
def execute(self, context):
return IfcStore.execute_ifc_operator(self, context)
def _execute(self, context):
start = time.time()
logger = logging.getLogger("ExportIFC")
path_log = os.path.join(context.scene.BIMProperties.data_dir, "process.log")
if not os.access(context.scene.BIMProperties.data_dir, os.W_OK):
path_log = os.path.join(tempfile.mkdtemp(), "process.log")
logging.basicConfig(
filename=path_log,
filemode="a",
level=logging.DEBUG,
)
extension = self.filepath.split(".")[-1]
if extension == "ifczip":
output_file = bpy.path.ensure_ext(self.filepath, ".ifczip")
elif extension == "ifcjson":
output_file = bpy.path.ensure_ext(self.filepath, ".ifcjson")
else:
output_file = bpy.path.ensure_ext(self.filepath, ".ifc")
settings = export_ifc.IfcExportSettings.factory(context, output_file, logger)
settings.json_version = self.json_version
settings.json_compact = self.json_compact
ifc_exporter = export_ifc.IfcExporter(settings)
settings.logger.info("Starting export")
ifc_exporter.export()
settings.logger.info("Export finished in {:.2f} seconds".format(time.time() - start))
print("Export finished in {:.2f} seconds".format(time.time() - start))
scene = context.scene
if not scene.DocProperties.ifc_files:
new = scene.DocProperties.ifc_files.add()
new.name = output_file
if self.use_relative_path and bpy.data.is_saved:
output_file = os.path.relpath(output_file, bpy.path.abspath("//"))
if scene.BIMProperties.ifc_file != output_file and extension not in ["ifczip", "ifcjson"]:
scene.BIMProperties.ifc_file = output_file
if bpy.data.is_saved and bpy.data.is_dirty and bpy.data.filepath:
bpy.ops.wm.save_mainfile(filepath=bpy.data.filepath)
blenderbim.bim.handler.purge_module_data()
return {"FINISHED"}
class ImportIFC(bpy.types.Operator):
bl_idname = "import_ifc.bim"
bl_label = "Import IFC"
bl_options = {"REGISTER", "UNDO"}
def execute(self, context):
bpy.ops.bim.load_project("INVOKE_DEFAULT")
return {"FINISHED"}
|
IfcOpenShell/IfcOpenShell
|
src/blenderbim/blenderbim/bim/module/project/operator.py
|
Python
|
lgpl-3.0
| 31,195
|
from __future__ import print_function
from __future__ import absolute_import
from celery import current_task
from cloudmesh.pbs.celery import celery_pbs_queue
from cloudmesh.config.cm_config import cm_config
from cloudmesh.pbs.pbs_mongo import pbs_mongo
import datetime
import sys
import os
import time
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
@celery_pbs_queue.task(track_started=True)
# checks the mongodb for last qstat refresh and if it is
def refresh_qstat(hosts):
'''
Launches the recipies on the server as per the task_dict. The task dict should the following properties
name: name of the server
recipies: a list of tuples where the first element of the tuple would be name of recipie and second would be the type
host: information about the host
'''
max_time_diff_allowed = 30 # indicates 30 seconds of time difference allowed between old and new values
config = cm_config()
user = config["cloudmesh"]["hpc"]["username"]
pbs = pbs_mongo()
error = ""
print("task recieved")
for host in hosts:
time = datetime.datetime.now()
datetime.datetime.now()
data = pbs.get_qstat(host)
perform_refresh = False
jobcount = data.count()
if jobcount > 0:
last_refresh = data[0]["cm_refresh"]
time_diff = time - last_refresh
if time_diff.seconds > max_time_diff_allowed:
perform_refresh = True
else:
perform_refresh = True
if perform_refresh:
print("Beginning refresh for {0}".format(host))
pbs.activate(host, user)
try:
d = pbs.refresh_qstat(host)
except Exception, e:
error += "error {0} {1}".format(str(host), str(e))
else:
print("No refresh needed for {0}".format(host))
return error
|
rajpushkar83/cloudmesh
|
cloudmesh/pbs/tasks.py
|
Python
|
apache-2.0
| 1,907
|
#!/usr/bin/env python
# This file is part of VoltDB.
# Copyright (C) 2008-2016 VoltDB Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
from subprocess import *
def run(procs, threadsPerProc, workload):
results = []
totalThroughput = 0.0
output = Popen(['./throughput', str(procs), str(threadsPerProc), workload], stdout=PIPE).communicate()[0]
lines = output.split('\n')
for line in lines:
if line.startswith('RESULT: '):
print line
line = line.split(' ')[1]
parts = line.split(',')
results += [float(parts[2])]
for r in results:
totalThroughput += r
print "--"
print "PER THREAD AVG: " + str(totalThroughput / (procs * threadsPerProc))
print "PER PROC AVG: " + str(totalThroughput / procs)
print "TOTAL THROUGHPUT: " + str(totalThroughput)
print "--"
run(1, 1, 'r')
run(1, 2, 'r')
run(1, 3, 'r')
run(1, 4, 'r')
|
paulmartel/voltdb
|
tests/bench/throughput/run.py
|
Python
|
agpl-3.0
| 1,927
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import time
import operator
import argparse
import sys
import os
import dbconf
import utils
import re
import codecs
import datetime
def analize(what, data, logfile):
global configuration
regex = False
""" It supports "*" as wildcard in data """
if re.search(r'\*', data):
data = re.escape(data)
regex = re.sub(r'\\\*', r'.*', data)
summary = {}
total_lines = 0
total = 0
first_read = False
for line in logfile:
total_lines += 1
log = utils.parse_logline(line)
if not first_read and log:
print "Reading from %s (UTC)..." % (datetime.datetime.fromtimestamp(log['ts']).strftime('%Y-%m-%d %H:%M:%S'))
sys.stdout.flush()
first_read = True
if not log or (not regex and log[what] != data) or (regex and not re.match(regex, log[what])):
continue
total += 1
utils.add_log2dict(log, summary)
print "TOTAL LINES: %d" % (total_lines,)
print "FILTERED LINES: %d (%.2f%%)" % (total, 100 * total/float(total_lines))
if total == 0: return
for k in [what] + [x for x in summary if x != what]:
print "%ss (%d): " % (k.upper(),len(summary[k]))
sorted_vals = sorted(summary[k].items(), key=lambda x:x[1], reverse=True)
if configuration.maxitems > 0:
sorted_vals = sorted_vals[:configuration.maxitems]
for v in sorted_vals:
print "%8d %6.2f%% %s" % (v[1], 100 * v[1]/float(total), v[0])
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.set_defaults(what="ip")
parser.add_argument('data', help='Keyword to analyze (IP, username, etc.)')
group = parser.add_mutually_exclusive_group()
group.add_argument("-i", dest="what", action="store_const", const="ip", help="Show IP summary (default)")
group.add_argument("-u", dest="what", action="store_const", const="user", help="Show user summary")
group.add_argument("-s", dest="what", action="store_const", const="script", help="Show script summary [fullname required]")
group.add_argument("-n", dest="what", action="store_const", const="server", help="Show server/hostname summary")
group.add_argument("-w", dest="what", help="Show a given log category")
group = parser.add_mutually_exclusive_group()
group.add_argument("--hours", "-H", type=int, default=1, help="Hours to analyze since the current time")
group.add_argument("--minutes", "-M", type=int, help="Minutes to analyze since the current time")
group.add_argument("--megabytes", "-m", type=int, default=-1, help="The number of megabytes to analyze from the end, default 100, 0 for the whole file")
parser.add_argument("--maxitems", "-x", type=int, default=20, help="Max number per each displayed item, default 20, 0 for all")
parser.add_argument("--logfile", "-l", default="/var/log/groar_access.log", help="Logfile pathname, default /var/log/groar_access.log")
configuration = parser.parse_args()
try:
logfile = codecs.open(configuration.logfile,"rU", "utf-8")
if configuration.megabytes >= 0:
if configuration.megabytes > 0:
fsize = os.path.getsize(configuration.logfile);
nbytes = configuration.megabytes * 1024 * 1024
if fsize > nbytes:
logfile.seek(-nbytes, 2)
logfile.readline() # Clean the first line
elif configuration.minutes > 0:
utils.time_position_log(logfile, configuration.minutes)
elif configuration.hours > 0:
utils.time_position_log(logfile, configuration.hours*60)
except (IOError), e:
print >> sys.stderr, e
exit(1)
analize(configuration.what, configuration.data, logfile)
exit(0)
|
groarnet/groar
|
scripts/summary_access.py
|
Python
|
agpl-3.0
| 3,492
|
########################################################################
#
# University of Southampton IT Innovation Centre, 2011
#
# Copyright in this library belongs to the University of Southampton
# University Road, Highfield, Southampton, UK, SO17 1BJ
#
# This software may not be used, sold, licensed, transferred, copied
# or reproduced in whole or in part in any manner or form or in or
# on any media by any person other than in accordance with the terms
# of the Licence Agreement supplied with the software, or otherwise
# without the prior written consent of the copyright owners.
#
# This software is distributed WITHOUT ANY WARRANTY, without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE, except where stated in the Licence Agreement supplied with
# the software.
#
# Created By : Mark McArdle
# Created Date : 2011-03-25
# Created for Project : PrestoPrime
#
########################################################################
from django.core.cache import cache
from models import *
import base64
import pickle
def get_task_description(task_name):
task_description = cache.get(task_name)
if task_description:
return pickle.loads(base64.b64decode(task_description))
else:
try:
td = TaskDescription.objects.get(task_name=task_name)
task_description = td.get_json()
cache.set(task_name, base64.b64encode(pickle.dumps(task_description)))
return task_description
except TaskDescription.DoesNotExist:
raise Exception("No such job description '%s'" % task_name)
def register_task_description(task_name, task_description):
cache.set(task_name, base64.b64encode(pickle.dumps(task_description)))
save_task_description(task_name, task_description)
def save_task_description(task_name, task_description):
td = TaskDescription.objects.get_or_create(task_name=task_name)[0]
for i in range(0,task_description['nbinputs']):
TaskInput.objects.get_or_create(taskdescription=td, num=i, mimetype=task_description['input-%s'%i]["mimetype"])
for i in range(0,task_description['nboutputs']):
TaskOutput.objects.get_or_create(taskdescription=td, num=i, mimetype=task_description['output-%s'%i]["mimetype"])
for o in task_description['options']:
TaskOption.objects.get_or_create(taskdescription=td, name=o)
for r in task_description['results']:
TaskResult.objects.get_or_create(taskdescription=td, name=r)
try:
for td in TaskDescription.objects.all():
register_task_description(td.task_name, td.get_json())
except:
import logging
logging.debug("Could not register task descriptions")
|
mmcardle/MServe
|
django-mserve/jobservice/__init__.py
|
Python
|
lgpl-2.1
| 2,703
|
from time import time
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import rcParams
import math
from scipy.spatial.distance import pdist, squareform
from sklearn.decomposition import PCA
import os
from tsptw_with_ortools import Solver
from config import get_config, print_config
# Compute a sequence's reward
def reward(tsptw_sequence,speed):
# Convert sequence to tour (end=start)
tour = np.concatenate((tsptw_sequence,np.expand_dims(tsptw_sequence[0],0)))
# Compute tour length
inter_city_distances = np.sqrt(np.sum(np.square(tour[:-1,:2]-tour[1:,:2]),axis=1))
distance = np.sum(inter_city_distances)
# Compute develiry times at each city and count late cities
elapsed_time = -10
late_cities = 0
for i in range(tsptw_sequence.shape[0]-1):
travel_time = inter_city_distances[i]/speed
tw_open = tour[i+1,2]
tw_close = tour[i+1,3]
elapsed_time += travel_time
if elapsed_time <= tw_open:
elapsed_time = tw_open
elif elapsed_time > tw_close:
late_cities += 1
# Reward
return distance + 100000000*late_cities
# Swap city[i] with city[j] in sequence
def swap2opt(tsptw_sequence,i,j):
new_tsptw_sequence = np.copy(tsptw_sequence)
new_tsptw_sequence[i:j+1] = np.flip(tsptw_sequence[i:j+1], axis=0) # flip or swap ?
return new_tsptw_sequence
# One step of 2opt = one double loop and return first improved sequence
def step2opt(tsptw_sequence,speed):
seq_length = tsptw_sequence.shape[0]
distance = reward(tsptw_sequence,speed)
for i in range(1,seq_length-1):
for j in range(i+1,seq_length):
new_tsptw_sequence = swap2opt(tsptw_sequence,i,j)
new_distance = reward(new_tsptw_sequence,speed)
if new_distance < distance:
return new_tsptw_sequence
return tsptw_sequence
class DataGenerator(object):
# Initialize a DataGenerator
def __init__(self,config):
self.batch_size = config.batch_size
self.dimension = config.input_dimension
self.max_length = config.max_length
self.speed = config.speed
self.kNN = config.kNN # int for random k_nearest_neighbor
self.width = config.width_mean, config.width_std # time window width gaussian distribution [mean,std]
self.pretrain = config.pretrain
# Create Solver and Data Generator
self.solver = Solver(self.max_length, self.speed) # reference solver for TSP-TW (Google_OR_tools)
# Solve an instance with reference solver
def solve_instance(self, sequence, tw_open, tw_close):
# Calculate distance matrix
precision = 1 #20 #int(self.speed)
dist_array = pdist(sequence)
dist_matrix = squareform(dist_array)
# Call OR Tools to solve instance
demands = np.zeros(tw_open.size)
tour, tour_length, delivery_time = self.solver.run(precision*dist_matrix, demands, precision*(1+tw_open), precision*(1+tw_close)) # a tour is a permutation + start_index # Rq: +1 for depot offset
tour_length= tour_length/precision
delivery_time = np.asarray(delivery_time)/precision - 1 # offset -1 because depot opens at -1
return tour[:-1], tour_length, delivery_time
# Iterate step2opt max_iter times
def loop2opt(self, tsptw_sequence, max_iter=2000, speed=1.):
best_reward = reward(tsptw_sequence,speed)
new_tsptw_sequence = np.copy(tsptw_sequence)
for _ in range(max_iter):
new_tsptw_sequence = step2opt(new_tsptw_sequence,speed)
new_reward = reward(new_tsptw_sequence,speed)
if new_reward < best_reward:
best_reward = new_reward
else:
break
return new_tsptw_sequence, best_reward
def get_tour_length(self, sequence):
# Convert sequence to tour (end=start)
tour = np.concatenate((sequence,np.expand_dims(sequence[0],0)))
# Compute tour length
inter_city_distances = np.sqrt(np.sum(np.square(tour[:-1]-tour[1:]),axis=1))
return np.sum(inter_city_distances)
# Reorder sequence with random k NN (TODO: Less computations)
def k_nearest_neighbor(self, sequence):
# Calculate dist_matrix
dist_array = pdist(sequence)
dist_matrix = squareform(dist_array)
# Construct tour
new_sequence = [sequence[0]]
current_city = 0
visited_cities = [0]
for i in range(1,len(sequence)):
j = np.random.randint(0,min(len(sequence)-i,self.kNN))
next_city = [index for index in dist_matrix[current_city].argsort() if index not in visited_cities][j]
visited_cities.append(next_city)
new_sequence.append(sequence[next_city])
current_city = next_city
return np.asarray(new_sequence)
# Generate random TSP-TW instance
def gen_instance(self, test_mode=True, seed=0):
if seed!=0: np.random.seed(seed)
# Randomly generate (max_length+1) city integer coordinates in [0,100[ # Rq: +1 for depot
sequence = np.random.randint(100, size=(self.max_length+1, self.dimension))
if self.pretrain == False:
sequence = self.k_nearest_neighbor(sequence) # k nearest neighbour tour (reverse order - depot end)
# Principal Component Analysis to center & rotate coordinates
pca = PCA(n_components=self.dimension)
sequence_ = pca.fit_transform(sequence)
# TW constraint 1 (open time)
if self.pretrain == True:
tw_open = np.random.randint(100, size=(self.max_length, 1)) # t_open random integer in [0,100[
tw_open = np.concatenate((tw_open,[[-1]]), axis=0) # depot opens at -1
tw_open[::-1].sort(axis=0) # sort cities by TW open constraint (reverse order)
else: # Open time defined by kNN tour
ordered_seq = sequence[::-1]
inter_city_distances = np.sqrt(np.sum(np.square(ordered_seq[1:]-ordered_seq[:-1]),axis=1))
time_at_cities = np.cumsum(inter_city_distances/self.speed,axis=0)
time_at_cities = np.expand_dims(np.floor(time_at_cities).astype(int),axis=1)
tw_open = np.concatenate(([[-1]],time_at_cities), axis=0) # depot opens at -1
tw_open = tw_open[::-1] # TW open constraint sorted (reverse order) Rq: depot = tw_open[-1], tw_width[-1] and sequence[-1]
# TW constraint 2 (time width): Gaussian or uniform distribution
tw_width = np.abs(np.random.normal(loc=self.width[0], scale=self.width[1], size=(self.max_length, 1))) # gaussian distribution
tw_width = np.concatenate((tw_width,[[1]]), axis=0) # depot opened for 1
tw_width = np.ceil(tw_width).astype(int)
tw_close = tw_open+tw_width
# TW feature 1 = Centered mean time (invariance)
tw_mean_ = (tw_open+tw_close)/2
tw_mean_ -= np.mean(tw_mean_)
# TW feature 2 = Width
tw_width_ = tw_width
print(tw_width_)
# Concatenate input (sorted by time) and scale to [0,1[
input_ = np.concatenate((sequence_,tw_mean_,tw_width_), axis=1)/100
if test_mode == True:
return input_, sequence, tw_open, tw_close
else:
return input_
# Generate random batch for training procedure
def train_batch(self):
input_batch = []
for _ in range(self.batch_size):
# Generate random TSP-TW instance
input_ = self.gen_instance(test_mode=False)
# Store batch
input_batch.append(input_)
return input_batch
# Generate random batch for testing procedure
def test_batch(self, seed=0):
# Generate random TSP-TW instance
input_, or_sequence, tw_open, tw_close = self.gen_instance(test_mode=True, seed=seed)
# Store batch
input_batch = np.tile(input_,(self.batch_size,1,1))
return input_batch, or_sequence, tw_open, tw_close
# Plot a tour
def visualize_2D_trip(self,trip,tw_open,tw_close):
plt.figure(figsize=(30,30))
rcParams.update({'font.size': 22})
# Plot cities
colors = ['red'] # Depot is first city
for i in range(len(tw_open)-1):
colors.append('blue')
plt.scatter(trip[:,0], trip[:,1], color=colors, s=200)
# Plot tour
tour=np.array(list(range(len(trip))) + [0])
X = trip[tour, 0]
Y = trip[tour, 1]
plt.plot(X, Y,"--", markersize=100)
# Annotate cities with TW
tw_open = np.rint(tw_open)
tw_close = np.rint(tw_close)
time_window = np.concatenate((tw_open,tw_close),axis=1)
for tw, (x, y) in zip(time_window,(zip(X,Y))):
plt.annotate(tw,xy=(x, y))
plt.xlim(0,60)
plt.ylim(0,60)
plt.show()
# Heatmap of permutations (x=cities; y=steps)
def visualize_sampling(self,permutations):
max_length = len(permutations[0])
grid = np.zeros([max_length,max_length]) # initialize heatmap grid to 0
transposed_permutations = np.transpose(permutations)
for t, cities_t in enumerate(transposed_permutations): # step t, cities chosen at step t
city_indices, counts = np.unique(cities_t,return_counts=True,axis=0)
for u,v in zip(city_indices, counts):
grid[t][u]+=v # update grid with counts from the batch of permutations
# plot heatmap
fig = plt.figure()
rcParams.update({'font.size': 22})
ax = fig.add_subplot(1,1,1)
ax.set_aspect('equal')
plt.imshow(grid, interpolation='nearest', cmap='gray')
plt.colorbar()
plt.title('Sampled permutations')
plt.ylabel('Time t')
plt.xlabel('City i')
plt.show()
# Heatmap of attention (x=cities; y=steps)
def visualize_attention(self,attention):
# plot heatmap
fig = plt.figure()
rcParams.update({'font.size': 22})
ax = fig.add_subplot(1,1,1)
ax.set_aspect('equal')
plt.imshow(attention, interpolation='nearest', cmap='hot')
plt.colorbar()
plt.title('Attention distribution')
plt.ylabel('Step t')
plt.xlabel('Attention_t')
plt.show()
def load_Dumas(self,dir_='n20w100'):
dataset = {}
shrinkage = 80
for file_name in os.listdir('benchmark/'+dir_):
if 'solution' in file_name: continue
# Gather data
data = open('benchmark/'+dir_+'/'+file_name, 'r')
x,y,t_open,t_close = [],[],[],[]
for i,line in enumerate(data):
if i>5:
line = line.split()
if line[0]!='999':
x.append(int(float(line[1])))
y.append(int(float(line[2])))
t_open.append(int(float(line[4])))
t_close.append(int(float(line[5])))
# TW constraint 1 (open time)
t_open = np.asarray(t_open) # open time
sorted_index = np.argsort(t_open)[::-1] # sort cities by TW open constraint (reverse order)
tw_open = t_open[sorted_index]
tw_open = np.expand_dims(tw_open,axis=1)
tw_open_ = shrinkage*tw_open/tw_open[0]-1 # scale open time in [0,100[ (depot opens at -1)
# TW constraint 2 (close time) ############################### RESCALE ??
t_close = np.asarray(t_close)-1 ############################### depot ?
tw_close = t_close[sorted_index]
tw_close = np.expand_dims(tw_close,axis=1)
tw_close_ = shrinkage*tw_close/tw_open[0]-1 # scale close time
tw_close_[-1] = 0 # depot open till 0
# Coordinates
seq = np.stack((x,y),axis=1) # city integer coordinates in [0,100[ ############################### RESCALE ??
sequence = seq[sorted_index]
pca = PCA(n_components=self.dimension) # Principal Component Analysis to center & rotate coordinates
sequence_ = pca.fit_transform(sequence)
sequence_ = self.speed*shrinkage*sequence_/tw_open[0] # scale sequence
# TW feature 1 = Centered mean time (invariance)
tw_mean_ = (tw_open_+tw_close_)/2
tw_mean_ -= np.mean(tw_mean_)
# TW feature 2 = Width
tw_width_ = tw_close_-tw_open_
# Concatenate input (sorted by time) and scale to [0,1[
input_ = np.concatenate((sequence_,tw_mean_,tw_width_), axis=1)/100
# Gather solution
solution = open('benchmark/'+dir_+'/'+file_name+'.solution', 'r')
for i,line in enumerate(solution):
if i==0: opt_permutation = np.asarray(line.split()).astype(int)-1
if i==1: opt_length = int(line.split()[0])
opt_length = self.get_tour_length(seq[opt_permutation])/100
# Save data
dataset[file_name]={'input_': input_, 'sequence':sequence, 'tw_open':tw_open, 'tw_close':tw_close, 'optimal_sequence':seq[opt_permutation],
'optimal_tw_open':np.expand_dims(t_open[opt_permutation],axis=1), 'optimal_tw_close':np.expand_dims(t_close[opt_permutation],axis=1), 'optimal_length':opt_length}
return dataset
if __name__ == "__main__":
# Config
config, _ = get_config()
dataset = DataGenerator(config)
# Generate some data
#input_batch = dataset.train_batch()
input_batch, or_sequence, tw_open, tw_close = dataset.test_batch(seed=0)
print()
# Some print
#print('Input batch: \n',100*input_batch)
#print(np.rint(np.mean(100*input_batch,1)))
# 2D plot for coord batch
#dataset.visualize_2D_trip(or_sequence[::-1], tw_open[::-1], tw_close[::-1])
# Solve to optimality and plot solution
#or_permutation, or_tour_length, or_delivery_time = dataset.solve_instance(or_sequence, tw_open, tw_close)
#print('Solver tour length: \n', or_tour_length/100)
#print('Time var: \n', or_delivery_time)
#dataset.visualize_2D_trip(or_sequence[or_permutation], tw_open[or_permutation], tw_close[or_permutation])
#dataset.visualize_sampling([or_permutation])
dataset.load_Dumas()
|
MichelDeudon/neural-combinatorial-optimization-rl-tensorflow
|
Ptr_Net_TSPTW/dataset.py
|
Python
|
mit
| 14,639
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Integration tests around House Bill "content" scraping.
"""
from django.test import TestCase
from scraper.interpreters import HouseBillPageContentInterpreter
class HouseBillContentInterpreterTestCase(TestCase):
def setUp(self):
self.interpreter = HouseBillPageContentInterpreter(
url="https://raw.githubusercontent.com/access-missouri/am-scraper-test-mirror/master/house/bills/20190105-hb-26-bill-content.html") #noqa
def test_attributes_dictionary_captured(self):
self.assertEqual(len(self.interpreter.attributes), 7)
def test_access_actions(self):
self.assertGreater(len(self.interpreter.actions), 0)
|
access-missouri/am-django-project
|
am/scraper/tests/test_integration_house_bill_content.py
|
Python
|
bsd-2-clause
| 708
|
from rest_framework import permissions
class CreatorPermission(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
if request.method in permissions.SAFE_METHODS:
return True
return obj.creator == request.user
class CreatorOrRestaurantOwner(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
if request.method in permissions.SAFE_METHODS and \
(obj.creator == request.user or \
obj.restaurant.creator == request.user):
return True
else:
return obj.creator == request.user
|
MichaelCombs28/google-restaurant
|
server/restaurant/permissions.py
|
Python
|
mit
| 633
|
''' Version 1.000
Code provided by Daniel Jiwoong Im
Permission is granted for anyone to copy, use, modify, or distribute this
program and accompanying programs and documents for any purpose, provided
this copyright notice is retained and prominently displayed, along with
a note saying that the original programs are available from our
web page.
The programs and documents are distributed without any warranty, express or
implied. As the programs were written for research purposes only, they have
not been tested to the degree that would be advisable in any important
application. All use of these programs is entirely at the user's own risk.'''
'''Demo of Minimum Probability Flow learning method with one-bit flip
connectivities on Restricted Boltzmann Machines.
For more information, see :http://arxiv.org/abs/1412.6617
'''
import numpy as np
import timeit, pickle, sys
import theano
import theano.tensor as T
import os
import signal, sys
import matplotlib as mp
import matplotlib.pyplot as plt
from rbm_mpf import *
from mpf_optimizer import *
from utils import *
'''Train Restricted Boltzmann Machines'''
def train_rbm(train_data, valid_data, hyper_params, mpf_type='1bit'):
batch_sz, epsilon, lam, num_hid, N, Nv, D, num_epoches= hyper_params
hyper_params = [batch_sz, epsilon, lam]
numpy_rng = numpy.random.RandomState()
rbm = RBM_MPF(n_visible=D, n_hidden=num_hid, batch_sz=batch_sz, numpy_rng=numpy_rng, mpf_type=mpf_type)
trainer = MPF_optimizer(hyper_params)
num_batches = N / batch_sz
train_set = theano.shared(train_data)
valid_set = theano.shared(valid_data)
train_update, get_valid_cost = trainer.mpf_MBGD(rbm, train_set, valid_set, reg_type='l2')
start_mpf = timeit.default_timer()
for epoch in xrange(num_epoches+1):
tot_cost = []
thrd_epoch = get_thrd(epoch, num_epoches)
for ith_batch in xrange(num_batches):
ith_batch_cost = train_update(ith_batch)
tot_cost.append(ith_batch_cost)
if epoch % 10 == 0:
valid_cost = []
for j in xrange(Nv / 1000):
valid_cost_j = get_valid_cost(j)
valid_cost.append(valid_cost_j)
print 'Epoch %d, Train Cost %g, Valid Cost %g, Thrd %g'\
% (epoch, np.mean(np.asarray(tot_cost)), np.mean(np.asarray(valid_cost)), thrd_epoch)
stop_mpf = timeit.default_timer()
print '...Time it took to train rbm %f' % (stop_mpf-start_mpf)
print 'Batch size %d, lr %g, lam %g, num_hid %d, num_dim %d' % \
(batch_sz, epsilon, lam, num_hid, D)
return rbm
#Hyper-parameters
rbm_type='RBM'
batch_sz= 75
epsilon = 0.03
num_hid = 200
lam = 0.001
num_epoches = 150
if __name__ == '__main__':
data_path = '/mnt/data/datasets/mnist_binary.pkl'
print 'opening data'
f = open(data_path)
train_set, valid_set, test_set = pickle.load(f)
f.close()
N, D = train_set[0].shape
Nv = valid_set[0].shape[0]
hyper_params = [batch_sz, epsilon, lam, num_hid, N, Nv, D, num_epoches]
start_mpf = timeit.default_timer()
rbm = train_rbm(train_set[0], valid_set[0], hyper_params, mpf_type='1bit')
stop_mpf = timeit.default_timer()
X = train_set[0][0:16,:]
display_dataset(X, (28,28), (4,4), i=1)
XX = T.matrix('X'); K=T.iscalar('s')
v_samples, v_means, h_sample, updates = rbm.get_samples(XX,step=K)
gen_samples = theano.function([XX, K], v_samples, updates=updates)
samples = gen_samples(X,1)
display_dataset(samples, (28,28), (4,4), i=2)
samples = gen_samples(X,10)
display_dataset(samples, (28,28), (4,4), i=3)
samples = gen_samples(X,30)
display_dataset(samples, (28,28), (4,4), i=4)
samples = gen_samples(X,100)
display_dataset(samples, (28,28), (4,4), i=4)
plt.show()
|
jiwoongim/minimum_probability_flow_learning
|
mnist_1bit_mpf.py
|
Python
|
bsd-3-clause
| 3,868
|
from __future__ import division, print_function, absolute_import
import numbers
from numpy.random.mtrand import RandomState
import pandas
import numpy
from sklearn.utils import check_random_state
from ..utils import get_columns_dict, get_columns_in_df
# generating random seeds in the interval [0, RANDINT)
RANDINT = 10000000
class LabeledDataStorage(object):
"""
This class implements interface of data for estimators training. It contains data, labels and weights -
all information to train model.
Parameters:
-----------
:param pandas.DataFrame ds: data
:param target: labels for classification and values for regression (set None for predict methods)
:type target: None or numbers.Number or array-like
:param sample_weight: weight (set None for predict methods)
:type sample_weight: None or numbers.Number or array-like
:param random_state: for pseudo random generator
:type random_state: None or int or RandomState
:param bool shuffle: shuffle or not data
"""
def __init__(self, data, target=None, sample_weight=None, random_state=None, shuffle=False):
self.data = data
self.target = self._get_key(self.data, target)
self.sample_weight = self._get_key(self.data, sample_weight, allow_nones=True)
assert len(self.data) == len(self.target), 'ERROR: Lengths are different for data and target'
if self.sample_weight is not None:
assert len(self.data) == len(self.sample_weight), 'ERROR: Lengths are different for data and sample_weight'
self._random_state = check_random_state(random_state).randint(RANDINT)
self.shuffle = shuffle
self._indices = None
def _get_key(self, ds, key, allow_nones=False):
"""
Get data from ds by key
:param pandas.DataFrame ds: data
:param key: what data get from ds
:type key: None or numbers.Number or array-like
:return: key data
"""
if isinstance(key, str) and ds is not None:
# assert key in set(ds.columns), self._print_err('ERROR:', '%s is absent in data storage' % key)
name = list(get_columns_dict([key]).keys())[0]
return numpy.array(get_columns_in_df(self.data, key)[name])
elif isinstance(key, numbers.Number):
return numpy.array([key] * len(ds))
else:
if not allow_nones:
return numpy.array(key) if key is not None else numpy.ones(len(ds))
else:
return numpy.array(key) if key is not None else key
def __len__(self):
"""
:return: count of rows in storage
:rtype: int
"""
return len(self.data)
def get_data(self, features=None):
"""
Get data for estimator
:param features: set of feature names (if None then use all features in data storage)
:type features: None or list[str]
:rtype: pandas.DataFrame
"""
df = get_columns_in_df(self.data, features)
if self.shuffle:
return df.irow(self.get_indices())
return df
def get_targets(self):
"""
Get sample targets for estimator
:rtype: numpy.array
"""
if self.shuffle:
return self.target[self.get_indices()]
return self.target
def get_weights(self, allow_nones=False):
"""
Get sample weights for estimator
:rtype: numpy.array
"""
if self.sample_weight is None:
if allow_nones:
return self.sample_weight
else:
return numpy.ones(len(self.data))
else:
if self.shuffle:
return self.sample_weight[self.get_indices()]
return self.sample_weight
def get_indices(self):
"""
Get data indices
:rtype: numpy.array
"""
if self._indices is None:
rs = RandomState(seed=self._random_state)
self._indices = rs.permutation(len(self))
return self._indices
def col(self, index):
"""
Get necessary columns
:param index: names
:type index: None or str or list(str)
:rtype: pandas.Series or pandas.DataFrame
"""
if isinstance(index, str):
name = list(get_columns_dict([index]).keys())[0]
return self.get_data([index])[name]
return self.get_data(index)
def eval_column(self, expression):
"""
Evaluate some expression to get necessary data
:type expression: numbers.Number or array-like or str or function(pandas.DataFrame)
:rtype: numpy.array
"""
if isinstance(expression, numbers.Number):
return numpy.zeros(len(self), dtype=type(expression)) + expression
elif isinstance(expression, str):
return numpy.array(self.col(expression))
elif hasattr(expression, '__call__'):
return numpy.array(expression(self.get_data()))
else:
assert len(expression) == len(self), 'Different length'
return numpy.array(expression)
|
Quadrocube/rep
|
rep/data/storage.py
|
Python
|
apache-2.0
| 5,170
|
from datetime import datetime
import numpy as np
import pandas as pd
import pytest
from datalore.display.supported_data_type import _standardize_dict
from datalore.display.supported_data_type import _standardize_value
@pytest.mark.parametrize('value, expected, result_type', [
(np.array([1, 2]), [1, 2], list),
(pd.Series([1, 2]), [1, 2], list),
(np.float64(0.25), 0.25, float),
(('a', 1), ('a', 1), tuple),
], ids=['np.array', 'pd.Series', 'np.float', 'tuple'])
def test_standardize_simple_values(value, expected, result_type):
check_standardization(value, expected, result_type)
def test_datetime_standardization():
value = datetime(2000, 1, 1)
expected = value.timestamp() * 1000
check_standardization(value, expected, float)
def check_standardization(value, expected, result_type):
standardized_np_array = _standardize_value(value)
assert standardized_np_array == expected
assert type(standardized_np_array) == result_type # we should the check exact type, not inheritance
class TestStandardizeDictionaries:
# noinspection PyAttributeOutsideInit
@pytest.fixture(autouse=True)
def setup(self):
self.dictionary = {'column': [1, 2, 3]}
def test_standardize_nested_df(self):
nested_df = {'a': pd.DataFrame(self.dictionary)}
standardized = _standardize_dict(nested_df)
assert isinstance(standardized, dict)
self.check_dictionary(standardized['a'])
def test_standardize_df(self):
df = pd.DataFrame(self.dictionary)
standardized = _standardize_dict(df)
self.check_dictionary(standardized)
def test_standardize_several_nested_objects(self):
nested_df = {
'a': self.dictionary.copy(),
'b': pd.Series([1, 2, 3])
}
standardized = _standardize_dict(nested_df)
assert type(standardized) == dict
self.check_dictionary(standardized['a'])
assert type(standardized['b']) == list
assert standardized['b'] == [1, 2, 3]
def check_dictionary(self, value):
assert value == self.dictionary
assert type(value) == dict
assert type(value['column']) == list
|
jwren/intellij-community
|
python/helpers/pycharm_display/tests/display/test_supported_data_type.py
|
Python
|
apache-2.0
| 2,191
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'openstackdocstheme'
]
# autodoc generation is a bit aggressive and a nuisance when doing heavy
# text edit cycles.
# execute "export SPHINX_DEBUG=1" in your terminal to disable
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'tosca-parser'
copyright = '2013, OpenStack Foundation'
# openstackdocstheme options
openstackdocs_repo_name = 'openstack/tosca-parser'
openstackdocs_use_storyboard = True
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native'
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
html_theme = 'openstackdocs'
# Output file base name for HTML help builder.
htmlhelp_basename = '%sdoc' % project
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index',
'%s.tex' % project,
'%s Documentation' % project,
'OpenStack Foundation', 'manual'),
]
# Example configuration for intersphinx: refer to the Python standard library.
#intersphinx_mapping = {'http://docs.python.org/': None}
|
openstack/tosca-parser
|
doc/source/conf.py
|
Python
|
apache-2.0
| 2,423
|
import numpy as np
from pyflux.arma import ARIMA
from pyflux.families import Exponential
data = np.random.exponential(3,200)
def a_test_no_terms():
"""
Tests an ARIMA model with no AR or MA terms, and that
the latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = ARIMA(data=data, ar=0, ma=0, family=Exponential())
x = model.fit()
assert(len(model.latent_variables.z_list) == 1)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def a_test_couple_terms():
"""
Tests an ARIMA model with 1 AR and 1 MA term and that
the latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = ARIMA(data=data, ar=1, ma=1, family=Exponential())
x = model.fit()
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def a_test_bbvi():
"""
Tests an ARIMA model estimated with BBVI and that the length of the latent variable
list is correct, and that the estimated latent variables are not nan
"""
model = ARIMA(data=data, ar=1, ma=0, family=Exponential())
x = model.fit('BBVI', iterations=200)
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def a_test_bbvi_mini_batch():
"""
Tests an ARIMA model estimated with BBVI and that the length of the latent variable
list is correct, and that the estimated latent variables are not nan
"""
model = ARIMA(data=data, ar=1, ma=0, family=Exponential())
x = model.fit('BBVI', iterations=200, mini_batch=32)
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def a_test_bbvi_elbo():
"""
Tests that the ELBO increases
"""
model = ARIMA(data=data, ar=1, ma=0, family=Exponential())
x = model.fit('BBVI',iterations=200, record_elbo=True, map_start=False)
assert(x.elbo_records[-1]>x.elbo_records[0])
def a_test_bbvi_mini_batch_elbo():
"""
Tests that the ELBO increases
"""
model = ARIMA(data=data, ar=1, ma=1, family=Exponential())
x = model.fit('BBVI',iterations=100, mini_batch=32, record_elbo=True, map_start=False)
assert(x.elbo_records[-1]>x.elbo_records[0])
def a_test_mh():
"""
Tests an ARIMA model estimated with Metropolis-Hastings and that the length of the
latent variable list is correct, and that the estimated latent variables are not nan
"""
model = ARIMA(data=data, ar=1, ma=1, family=Exponential())
x = model.fit('M-H',nsims=300)
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def a_test_laplace():
"""
Tests an ARIMA model estimated with Laplace approximation and that the length of the
latent variable list is correct, and that the estimated latent variables are not nan
"""
model = ARIMA(data=data, ar=1, ma=1, family=Exponential())
x = model.fit('Laplace')
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def a_test_pml():
"""
Tests a PML model estimated with Laplace approximation and that the length of the
latent variable list is correct, and that the estimated latent variables are not nan
"""
model = ARIMA(data=data, ar=1, ma=1, family=Exponential())
x = model.fit('PML')
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def a_test_predict_length():
"""
Tests that the prediction dataframe length is equal to the number of steps h
"""
model = ARIMA(data=data, ar=2, ma=2, family=Exponential())
x = model.fit()
assert(model.predict(h=5).shape[0] == 5)
def a_test_predict_is_length():
"""
Tests that the prediction IS dataframe length is equal to the number of steps h
"""
model = ARIMA(data=data, ar=2, ma=2, family=Exponential())
x = model.fit()
assert(model.predict_is(h=5).shape[0] == 5)
def a_test_predict_nans():
"""
Tests that the predictions are not nans
"""
model = ARIMA(data=data, ar=2, ma=2, family=Exponential())
x = model.fit()
assert(len(model.predict(h=5).values[np.isnan(model.predict(h=5).values)]) == 0)
def a_test_predict_is_nans():
"""
Tests that the in-sample predictions are not nans
"""
model = ARIMA(data=data, ar=2, ma=2, family=Exponential())
x = model.fit()
assert(len(model.predict_is(h=5).values[np.isnan(model.predict_is(h=5).values)]) == 0)
def a_test_predict_nonconstant():
"""
We should not really have predictions that are constant (should be some difference)...
This captures bugs with the predict function not iterating forward
"""
model = ARIMA(data=data, ar=2, ma=2, family=Exponential())
x = model.fit()
predictions = model.predict(h=10, intervals=False)
assert(not np.all(predictions.values==predictions.values[0]))
def a_test_predict_is_nonconstant():
"""
We should not really have predictions that are constant (should be some difference)...
This captures bugs with the predict function not iterating forward
"""
model = ARIMA(data=data, ar=2, ma=2, family=Exponential())
x = model.fit()
predictions = model.predict_is(h=10, intervals=False)
assert(not np.all(predictions.values==predictions.values[0]))
def a_test_predict_intervals():
"""
Tests prediction intervals are ordered correctly
"""
model = ARIMA(data=data, ar=2, ma=2, family=Exponential())
x = model.fit()
predictions = model.predict(h=10, intervals=True)
assert(np.all(predictions['99% Prediction Interval'].values > predictions['95% Prediction Interval'].values))
assert(np.all(predictions['95% Prediction Interval'].values > predictions['5% Prediction Interval'].values))
assert(np.all(predictions['5% Prediction Interval'].values > predictions['1% Prediction Interval'].values))
def a_test_predict_is_intervals():
"""
Tests prediction intervals are ordered correctly
"""
model = ARIMA(data=data, ar=2, ma=2, family=Exponential())
x = model.fit()
predictions = model.predict_is(h=10, intervals=True)
assert(np.all(predictions['99% Prediction Interval'].values > predictions['95% Prediction Interval'].values))
assert(np.all(predictions['95% Prediction Interval'].values > predictions['5% Prediction Interval'].values))
assert(np.all(predictions['5% Prediction Interval'].values > predictions['1% Prediction Interval'].values))
def a_test_predict_intervals_bbvi():
"""
Tests prediction intervals are ordered correctly
"""
model = ARIMA(data=data, ar=1, ma=0, family=Exponential())
x = model.fit('BBVI', iterations=200)
predictions = model.predict(h=10, intervals=True)
assert(np.all(predictions['99% Prediction Interval'].values > predictions['95% Prediction Interval'].values))
assert(np.all(predictions['95% Prediction Interval'].values > predictions['5% Prediction Interval'].values))
assert(np.all(predictions['5% Prediction Interval'].values > predictions['1% Prediction Interval'].values))
def a_test_predict_is_intervals_bbvi():
"""
Tests prediction intervals are ordered correctly
"""
model = ARIMA(data=data, ar=1, ma=0, family=Exponential())
x = model.fit('BBVI', iterations=200)
predictions = model.predict_is(h=10, intervals=True)
assert(np.all(predictions['99% Prediction Interval'].values > predictions['95% Prediction Interval'].values))
assert(np.all(predictions['95% Prediction Interval'].values > predictions['5% Prediction Interval'].values))
assert(np.all(predictions['5% Prediction Interval'].values > predictions['1% Prediction Interval'].values))
def a_test_predict_intervals_mh():
"""
Tests prediction intervals are ordered correctly
"""
model = ARIMA(data=data, ar=2, ma=2, family=Exponential())
x = model.fit('M-H', nsims=400)
predictions = model.predict(h=10, intervals=True)
assert(np.all(predictions['99% Prediction Interval'].values > predictions['95% Prediction Interval'].values))
assert(np.all(predictions['95% Prediction Interval'].values > predictions['5% Prediction Interval'].values))
assert(np.all(predictions['5% Prediction Interval'].values > predictions['1% Prediction Interval'].values))
def a_test_predict_is_intervals_mh():
"""
Tests prediction intervals are ordered correctly
"""
model = ARIMA(data=data, ar=2, ma=2, family=Exponential())
x = model.fit('M-H', nsims=400)
predictions = model.predict_is(h=10, intervals=True)
assert(np.all(predictions['99% Prediction Interval'].values > predictions['95% Prediction Interval'].values))
assert(np.all(predictions['95% Prediction Interval'].values > predictions['5% Prediction Interval'].values))
assert(np.all(predictions['5% Prediction Interval'].values > predictions['1% Prediction Interval'].values))
def a_test_sample_model():
"""
Tests sampling function
"""
model = ARIMA(data=data, ar=2, ma=2, family=Exponential())
x = model.fit('BBVI', iterations=100)
sample = model.sample(nsims=100)
assert(sample.shape[0]==100)
assert(sample.shape[1]==len(data)-2)
def a_test_ppc():
"""
Tests PPC value
"""
model = ARIMA(data=data, ar=2, ma=2, family=Exponential())
x = model.fit('BBVI', iterations=100)
p_value = model.ppc(nsims=100)
assert(0.0 <= p_value <= 1.0)
|
RJT1990/pyflux
|
pyflux/arma/tests/test_arima_exponential.py
|
Python
|
bsd-3-clause
| 9,932
|
"""
This is a basic model to test saving and loading boolean and date-related
types, which in the past were problematic for some database backends.
"""
from django.db import models
from django.conf import settings
class Donut(models.Model):
name = models.CharField(max_length=100)
is_frosted = models.BooleanField(default=False)
has_sprinkles = models.NullBooleanField()
baked_date = models.DateField(null=True)
baked_time = models.TimeField(null=True)
consumed_at = models.DateTimeField(null=True)
review = models.TextField()
class Meta:
ordering = ('consumed_at',)
def __str__(self):
return self.name
__test__ = {'API_TESTS': """
# No donuts are in the system yet.
>>> Donut.objects.all()
[]
>>> d = Donut(name='Apple Fritter')
# Ensure we're getting True and False, not 0 and 1
>>> d.is_frosted
False
>>> d.has_sprinkles
>>> d.has_sprinkles = True
>>> d.has_sprinkles == True
True
>>> d.save()
>>> d2 = Donut.objects.all()[0]
>>> d2
<Donut: Apple Fritter>
>>> d2.is_frosted == False
True
>>> d2.has_sprinkles == True
True
>>> import datetime
>>> d2.baked_date = datetime.date(year=1938, month=6, day=4)
>>> d2.baked_time = datetime.time(hour=5, minute=30)
>>> d2.consumed_at = datetime.datetime(year=2007, month=4, day=20, hour=16, minute=19, second=59)
>>> d2.save()
>>> d3 = Donut.objects.all()[0]
>>> d3.baked_date
datetime.date(1938, 6, 4)
>>> d3.baked_time
datetime.time(5, 30)
>>> d3.consumed_at
datetime.datetime(2007, 4, 20, 16, 19, 59)
# Year boundary tests (ticket #3689)
>>> d = Donut(name='Date Test 2007', baked_date=datetime.datetime(year=2007, month=12, day=31), consumed_at=datetime.datetime(year=2007, month=12, day=31, hour=23, minute=59, second=59))
>>> d.save()
>>> d1 = Donut(name='Date Test 2006', baked_date=datetime.datetime(year=2006, month=1, day=1), consumed_at=datetime.datetime(year=2006, month=1, day=1))
>>> d1.save()
>>> Donut.objects.filter(baked_date__year=2007)
[<Donut: Date Test 2007>]
>>> Donut.objects.filter(baked_date__year=2006)
[<Donut: Date Test 2006>]
>>> Donut.objects.filter(consumed_at__year=2007).order_by('name')
[<Donut: Apple Fritter>, <Donut: Date Test 2007>]
>>> Donut.objects.filter(consumed_at__year=2006)
[<Donut: Date Test 2006>]
>>> Donut.objects.filter(consumed_at__year=2005)
[]
>>> Donut.objects.filter(consumed_at__year=2008)
[]
# Regression test for #10238: TextField values returned from the database
# should be unicode.
>>> d2 = Donut.objects.create(name=u'Jelly Donut', review=u'Outstanding')
>>> Donut.objects.get(id=d2.id).review
u'Outstanding'
"""}
# Regression test for #8354: the MySQL backend should raise an error if given
# a timezone-aware datetime object.
if settings.DATABASE_ENGINE == 'mysql':
__test__['API_TESTS'] += """
>>> from django.utils import tzinfo
>>> dt = datetime.datetime(2008, 8, 31, 16, 20, tzinfo=tzinfo.FixedOffset(0))
>>> d = Donut(name='Bear claw', consumed_at=dt)
>>> d.save()
Traceback (most recent call last):
....
ValueError: MySQL backend does not support timezone-aware datetimes.
"""
|
grangier/django-11599
|
tests/regressiontests/datatypes/models.py
|
Python
|
bsd-3-clause
| 3,077
|
# encoding: utf-8
from flask import redirect
from flask.views import MethodView
from project import Cache
class RedirectHandler(MethodView):
header = {'Content-Type': 'application/json; charset=UTF-8'}
def get(self, shortened=None):
original = Cache.redis.get("%s:original" % shortened)
if original:
Cache.redis.incr("%s:clicks" % shortened)
return redirect(original, 301)
else:
return('{"error": "User Does Not Exist"}', 404)
|
hugoantunes/shortURL
|
project/users/views/redirect.py
|
Python
|
mit
| 500
|
from dashmat.option_spec.module_imports import module_import_spec
from dashmat.formatter import MergedOptionStringFormatter
from dashmat.core_modules.base import Module
from dashmat.errors import UnknownModule
from input_algorithms.spec_base import boolean, string_spec, formatted, listof, overridden, or_spec, set_options
from input_algorithms.many_item_spec import many_item_formatted_spec
from input_algorithms.dictobj import dictobj
import six
class import_line_spec(many_item_formatted_spec):
value_name = "Import line"
specs = [listof(string_spec()), or_spec(string_spec(), set_options(import_path=string_spec()))]
optional_specs = [string_spec()]
def create_result(self, imports, module_name, import_from, meta, val, dividers):
"""Default permissions to rw"""
options = {"imports": imports, "module_name": module_name, "import_from": import_from}
return ImportLine.FieldSpec(formatter=MergedOptionStringFormatter).normalise(meta, options)
class ImportLine(dictobj.Spec):
module_name = dictobj.Field(
lambda: or_spec(string_spec(), set_options(import_path=module_import_spec(Module)))
, formatted = True
, help = "The name of the module this import comes from"
)
imports = dictobj.Field(
string_spec
, formatted = True
, wrapper = listof
, help = "The modules that are imported"
)
import_from = dictobj.Field(
string_spec
, formatted = True
, default = "main.jsx"
, help = "The module in our import_path to import the imports from"
)
def import_line(self, modules):
module_name = self.module_name
if type(module_name) is dict:
module_name = self.module_name['import_path']
if isinstance(module_name, six.string_types):
if module_name not in modules:
raise UnknownModule(module=module_name, available=list(modules.keys()))
module = modules[module_name]
else:
module = module_name
if type(module) is type:
import_path = "{0}:{1}".format(module.module_path, module.__name__)
module = module(import_path, import_path)
imports = "{{{0}}}".format(", ".join(self.imports))
relative_to = module.relative_to
return 'import {0} from "/modules/{1}/{2}"'.format(imports, relative_to, self.import_from)
|
realestate-com-au/dashmat
|
dashmat/option_spec/import_line.py
|
Python
|
mit
| 2,434
|
"""The tests for the integration sensor platform."""
from datetime import timedelta
from unittest.mock import patch
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
async def test_state(hass):
"""Test integration sensor state."""
config = {
'sensor': {
'platform': 'integration',
'name': 'integration',
'source': 'sensor.power',
'unit': 'kWh',
'round': 2,
}
}
assert await async_setup_component(hass, 'sensor', config)
entity_id = config['sensor']['source']
hass.states.async_set(entity_id, 1, {})
await hass.async_block_till_done()
now = dt_util.utcnow() + timedelta(seconds=3600)
with patch('homeassistant.util.dt.utcnow',
return_value=now):
hass.states.async_set(entity_id, 1, {}, force_update=True)
await hass.async_block_till_done()
state = hass.states.get('sensor.integration')
assert state is not None
# Testing a power sensor at 1 KiloWatts for 1hour = 1kWh
assert round(float(state.state), config['sensor']['round']) == 1.0
assert state.attributes.get('unit_of_measurement') == 'kWh'
async def test_trapezoidal(hass):
"""Test integration sensor state."""
config = {
'sensor': {
'platform': 'integration',
'name': 'integration',
'source': 'sensor.power',
'unit': 'kWh',
'round': 2,
}
}
assert await async_setup_component(hass, 'sensor', config)
entity_id = config['sensor']['source']
hass.states.async_set(entity_id, 0, {})
await hass.async_block_till_done()
# Testing a power sensor with non-monotonic intervals and values
for time, value in [(20, 10), (30, 30), (40, 5), (50, 0)]:
now = dt_util.utcnow() + timedelta(minutes=time)
with patch('homeassistant.util.dt.utcnow',
return_value=now):
hass.states.async_set(entity_id, value, {}, force_update=True)
await hass.async_block_till_done()
state = hass.states.get('sensor.integration')
assert state is not None
assert round(float(state.state), config['sensor']['round']) == 8.33
assert state.attributes.get('unit_of_measurement') == 'kWh'
async def test_left(hass):
"""Test integration sensor state with left reimann method."""
config = {
'sensor': {
'platform': 'integration',
'name': 'integration',
'method': 'left',
'source': 'sensor.power',
'unit': 'kWh',
'round': 2,
}
}
assert await async_setup_component(hass, 'sensor', config)
entity_id = config['sensor']['source']
hass.states.async_set(entity_id, 0, {})
await hass.async_block_till_done()
# Testing a power sensor with non-monotonic intervals and values
for time, value in [(20, 10), (30, 30), (40, 5), (50, 0)]:
now = dt_util.utcnow() + timedelta(minutes=time)
with patch('homeassistant.util.dt.utcnow',
return_value=now):
hass.states.async_set(entity_id, value, {}, force_update=True)
await hass.async_block_till_done()
state = hass.states.get('sensor.integration')
assert state is not None
assert round(float(state.state), config['sensor']['round']) == 7.5
assert state.attributes.get('unit_of_measurement') == 'kWh'
async def test_right(hass):
"""Test integration sensor state with left reimann method."""
config = {
'sensor': {
'platform': 'integration',
'name': 'integration',
'method': 'right',
'source': 'sensor.power',
'unit': 'kWh',
'round': 2,
}
}
assert await async_setup_component(hass, 'sensor', config)
entity_id = config['sensor']['source']
hass.states.async_set(entity_id, 0, {})
await hass.async_block_till_done()
# Testing a power sensor with non-monotonic intervals and values
for time, value in [(20, 10), (30, 30), (40, 5), (50, 0)]:
now = dt_util.utcnow() + timedelta(minutes=time)
with patch('homeassistant.util.dt.utcnow',
return_value=now):
hass.states.async_set(entity_id, value, {}, force_update=True)
await hass.async_block_till_done()
state = hass.states.get('sensor.integration')
assert state is not None
assert round(float(state.state), config['sensor']['round']) == 9.17
assert state.attributes.get('unit_of_measurement') == 'kWh'
async def test_prefix(hass):
"""Test integration sensor state using a power source."""
config = {
'sensor': {
'platform': 'integration',
'name': 'integration',
'source': 'sensor.power',
'round': 2,
'unit_prefix': 'k'
}
}
assert await async_setup_component(hass, 'sensor', config)
entity_id = config['sensor']['source']
hass.states.async_set(entity_id, 1000, {'unit_of_measurement': 'W'})
await hass.async_block_till_done()
now = dt_util.utcnow() + timedelta(seconds=3600)
with patch('homeassistant.util.dt.utcnow',
return_value=now):
hass.states.async_set(entity_id, 1000, {'unit_of_measurement': 'W'},
force_update=True)
await hass.async_block_till_done()
state = hass.states.get('sensor.integration')
assert state is not None
# Testing a power sensor at 1000 Watts for 1hour = 1kWh
assert round(float(state.state), config['sensor']['round']) == 1.0
assert state.attributes.get('unit_of_measurement') == 'kWh'
async def test_suffix(hass):
"""Test integration sensor state using a network counter source."""
config = {
'sensor': {
'platform': 'integration',
'name': 'integration',
'source': 'sensor.bytes_per_second',
'round': 2,
'unit_prefix': 'k',
'unit_time': 's'
}
}
assert await async_setup_component(hass, 'sensor', config)
entity_id = config['sensor']['source']
hass.states.async_set(entity_id, 1000, {})
await hass.async_block_till_done()
now = dt_util.utcnow() + timedelta(seconds=10)
with patch('homeassistant.util.dt.utcnow',
return_value=now):
hass.states.async_set(entity_id, 1000, {}, force_update=True)
await hass.async_block_till_done()
state = hass.states.get('sensor.integration')
assert state is not None
# Testing a network speed sensor at 1000 bytes/s over 10s = 10kbytes
assert round(float(state.state), config['sensor']['round']) == 10.0
|
HydrelioxGitHub/home-assistant
|
tests/components/sensor/test_integration.py
|
Python
|
apache-2.0
| 6,750
|
import tinctest
class CardinalitySmokeTests(tinctest.TINCTestCase):
def test_smoke_cardinality1(self):
pass
def test_smoke_cardinality2(self):
pass
|
lintzc/gpdb
|
src/test/tinc/tinctest/test/discovery/mockquery/cardinality/test_smoke_cardinality.py
|
Python
|
apache-2.0
| 175
|
# -*- coding: utf-8 -*-
"""
Metocean scatter diagram for Åsgard given as joint Hs Tp probability.
TODO:
- implement joint CDF(Hs, Tp)
need to think how to use this in practice
- read weather window, return operability
Created on 2018 2 Feb Fri 14:53:20
@author: rarossi
"""
from scipy import stats as ss
import numpy as np
import scipy as sp
from math import pi, sqrt, exp, log
import matplotlib.pyplot as plt
# #####################################################################
# Åsgard metocean
# annual omni direction joint distribution Hs, Tp
gamma, theta, eta, zeta, nu = 1.35, 2.513, 4.691, 0.563, 0.818
a1, a2, a3, b1, b2, b3 = 1.713, 0.396, 0.39, 0.005, 0.086, 0.28
# #####################################################################
def f_Hs(hs):
"""Probability density function of Hs."""
if hs < 1e-3:
return 0
elif hs <= eta:
return 1/(hs*zeta*sqrt(2*pi))*exp(-((log(hs)-nu)**2)/(2*zeta**2))
else:
return gamma/theta*(hs/theta)**(gamma-1)*exp(-(hs/theta)**gamma)
def F_Hs(hs):
"""Cumulative probability function of Hs"""
return sp.integrate.quad(f_Hs, 0, hs)[0]
def f_Tp_Hs(tp, hs):
"""Probability density function of Tp conditioned to Hs."""
mi = a1 + a2 * hs**a3
sigma = sqrt(b1 + b2 * exp(-b3*hs))
return 1/(tp*sigma*sqrt(2*pi))*exp(-((log(tp)-mi)**2)/(2*sigma**2))
def f_HsTp(hs, tp):
"""Joint probability density function for Hs and Tp."""
return f_Hs(hs) * f_Tp_Hs(tp, hs)
# plot PDF of Hs
x = np.linspace(0.1, 10, 100)
y = np.array([f_Hs(hs) for hs in x])
plt.plot(x, y)
plt.title('PDF of Hs')
plt.show()
# plot PDF of Tp for a few Hs
x = np.linspace(3, 20, 100)
for hs in [1.5, 2.5, 3.5]:
plt.plot(x, np.array([f_Tp_Hs(tp, hs) for tp in x]), label='%.1f' % hs)
plt.legend()
plt.title('PDF of Tp for some Hs')
plt.show()
|
haphaeu/yoshimi
|
metocean.py
|
Python
|
lgpl-3.0
| 1,856
|
############################################################################
# Copyright (C) Internet Systems Consortium, Inc. ("ISC")
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at https://mozilla.org/MPL/2.0/.
#
# See the COPYRIGHT file distributed with this work for additional
# information regarding copyright ownership.
############################################################################
# flake8: noqa: E501
from typing import List, Tuple
from docutils import nodes
from docutils.nodes import Node, system_message
from docutils.parsers.rst import roles
from sphinx import addnodes
from sphinx.util.docutils import ReferenceRole
GITLAB_BASE_URL = 'https://gitlab.isc.org/isc-projects/bind9/-/'
# Custom Sphinx role enabling automatic hyperlinking to GitLab issues/MRs.
class GitLabRefRole(ReferenceRole):
def __init__(self, base_url: str) -> None:
self.base_url = base_url
super().__init__()
def run(self) -> Tuple[List[Node], List[system_message]]:
gl_identifier = '[GL %s]' % self.target
target_id = 'index-%s' % self.env.new_serialno('index')
entries = [('single', 'GitLab; ' + gl_identifier, target_id, '', None)]
index = addnodes.index(entries=entries)
target = nodes.target('', '', ids=[target_id])
self.inliner.document.note_explicit_target(target)
try:
refuri = self.build_uri()
reference = nodes.reference('', '', internal=False, refuri=refuri,
classes=['gl'])
if self.has_explicit_title:
reference += nodes.strong(self.title, self.title)
else:
reference += nodes.strong(gl_identifier, gl_identifier)
except ValueError:
error_text = 'invalid GitLab identifier %s' % self.target
msg = self.inliner.reporter.error(error_text, line=self.lineno)
prb = self.inliner.problematic(self.rawtext, self.rawtext, msg)
return [prb], [msg]
return [index, target, reference], []
def build_uri(self):
if self.target[0] == '#':
return self.base_url + 'issues/%d' % int(self.target[1:])
if self.target[0] == '!':
return self.base_url + 'merge_requests/%d' % int(self.target[1:])
raise ValueError
def setup(_):
roles.register_local_role('gl', GitLabRefRole(GITLAB_BASE_URL))
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'BIND 9 管理员参考手册'
copyright = u'2021, Internet Systems Consortium'
author = u"Internet Systems Consortium \\and 翻译: sunguonian@yahoo.com"
# The full version, including alpha/beta/rc tags
release = 'BIND 9.16.16(稳定版)'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'zh_CN'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = [
'_build',
'Thumbs.db',
'.DS_Store',
'*.grammar.rst',
'*.zoneopts.rst',
'catz.rst',
'dlz.rst',
'dnssec.rst',
'dyndb.rst',
'logging-cattegories.rst',
'managed-keys.rst',
'pkcs11.rst',
'plugins.rst'
]
# The master toctree document.
master_doc = 'index'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
#html_theme = 'alabaster'
html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
latex_engine = 'xelatex'
latex_elements = {
'fontpkg': r'''
\setmainfont{Source Han Serif CN:style=Regular}
\setsansfont{Source Han Sans CN Medium:style=Medium,Regular}
\setmonofont{Source Han Sans CN:style=Regular}
\setCJKfamilyfont{song}{Source Han Serif CN:style=Regular}
\setCJKfamilyfont{heiti}{Source Han Sans CN:style=Regular}
''',
'pointsize': '11pt',
'preamble': r'\input{../mystyle.tex.txt}'
}
latex_documents = [
(master_doc, 'Bv9ARM.tex', u'BIND 9管理员参考手册', author, 'manual'),
]
latex_logo = "isc-logo.pdf"
|
perlang/bv9arm-chinese
|
branches/9.16.16/arm/conf.py
|
Python
|
mpl-2.0
| 5,717
|
import os
import sys
from distutils.core import setup
from distutils.sysconfig import get_python_lib
VERSION = '0.4'
# Warn if we are installing over top of an existing installation. This can
# cause issues where files that were deleted from a more recent Django are
# still present in site-packages. See #18115.
overlay_warning = False
if "install" in sys.argv:
lib_paths = [get_python_lib()]
if lib_paths[0].startswith("/usr/lib/"):
# We have to try also with an explicit prefix of /usr/local in order to
# catch Debian's custom user site-packages directory.
lib_paths.append(get_python_lib(prefix="/usr/local"))
for lib_path in lib_paths:
existing_path = os.path.abspath(os.path.join(lib_path, "djpikaday"))
if os.path.exists(existing_path):
# We note the need for the warning here, but present it after the
# command is run, so it's more likely to be seen.
overlay_warning = True
break
def fullsplit(path, result=None):
"""
Split a pathname into components (the opposite of os.path.join)
in a platform-neutral way.
"""
if result is None:
result = []
head, tail = os.path.split(path)
if head == '':
return [tail] + result
if head == path:
return result
return fullsplit(head, [tail] + result)
EXCLUDE_FROM_PACKAGES = []
def is_package(package_name):
for pkg in EXCLUDE_FROM_PACKAGES:
if package_name.startswith(pkg):
return False
return True
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, package_data = [], {}
root_dir = os.path.dirname(__file__)
if root_dir != '':
os.chdir(root_dir)
django_dir = 'djpikaday'
for dirpath, dirnames, filenames in os.walk(django_dir):
# Ignore PEP 3147 cache dirs and those whose names start with '.'
dirnames[:] = [d for d in dirnames if not d.startswith('.') and d != '__pycache__']
parts = fullsplit(dirpath)
package_name = '.'.join(parts)
if '__init__.py' in filenames and is_package(package_name):
packages.append(package_name)
elif filenames:
relative_path = []
while '.'.join(parts) not in packages:
relative_path.append(parts.pop())
relative_path.reverse()
path = os.path.join(*relative_path)
package_files = package_data.setdefault('.'.join(parts), [])
package_files.extend([os.path.join(path, f) for f in filenames])
setup(
name='django-pikaday',
version=VERSION,
url='http://github.com/caioariede/django-pikaday',
author='Caio Ariede',
author_email='caio.ariede@gmail.com',
description='A simple app that provides easy integration with the Pikaday Datepicker',
license='MIT',
packages=packages,
package_data=package_data,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: JavaScript",
"Framework :: Django",
"Topic :: Utilities",
],
)
if overlay_warning:
sys.stderr.write("""
========
WARNING!
========
You have just installed django-pikaday over top of an existing
installation, without removing it first. Because of this,
your install may now include extraneous files from a
previous version that have since been removed from
django-pikaday. This is known to cause a variety of problems. You
should manually remove the
%(existing_path)s
directory and re-install django-pikaday.
""" % {"existing_path": existing_path})
|
caioariede/django-pikaday
|
setup.py
|
Python
|
mit
| 3,816
|
import numpy as np
from copy import deepcopy
from tools.belief_momdp import MOMDPBelief
import math
import itertools
#################################################################
# Implements the Rock Sample POMDP problem
#################################################################
class RockSamplePOMDP():
# constructor
def __init__(self,
xs=7, # size of grid y dim
ys=7, # size of grid x dim
rocks={(2,4):False, (3,4):True, (5,5):False, # (2,0):False, (0,1):True, (3,1):False, (6,3):True,
(1,6):True},
seed=1, # random seed
rbad=-10.0, rgood=10.0, rexit=10.0, rbump=-100.0, # reward values
d0=20, # quality of rover observation,
h_conf=0.5, # confidence level before moving in heuristic policy
discount=0.99):
self.random_state = np.random.RandomState(seed) # used for sampling
self.discount = discount
self.xs = xs - 1 # y-size of the grid
self.ys = ys - 1 # x-size of the grid
self.rocks = rocks # dictionary mapping rock positions to their types (x,y) => good or bad
self.rock_pos = [k for k in sorted(rocks.keys())]
self.rock_types = [rocks[k] for k in sorted(rocks.keys())]
self.rock_map = {(k):i for (i, k) in enumerate(sorted(rocks.keys()))}
k = len(rocks)
self.k = k # number of rocks
self.rbad = rbad
self.rgood = rgood
self.rbump = rbump
self.rexit = rexit
# states: state is represented by the rover position and the rock types
self.rover_states = [(j,i) for i in range(xs) for j in range(ys)] # fully observable vars
rs = itertools.product(*(xrange(2) for i in xrange(k)))
self.rock_states = [[bool(j) for j in i] for i in rs]
self.n_rock_states = len(self.rock_states)
self.n_rover_states = len(self.rover_states)
# actions: total of 5+k
self.ractions = [0, # move left
1, # move right
2, # move up
3, # move down
4] # sample
for i in range(k):
self.ractions.append(5+i) # sample rock i
# observations
self.robs = [0, # none
1, # good
2] # bad
# pre-allocate state variables
self.rover_state = np.zeros(2) # rover (x,y) position
self.rock_state = np.zeros(k, dtype=np.bool) # (good, bad) type for each rock
self.d0 = d0
self.h_conf = h_conf
self.action_vectors = [[-1, 0], [1, 0], [0, 1], [0, -1]]
# belief and observation dimensions
self.xdims = 2
self.odims = 1
#################################################################
# Setters
#################################################################
def set_discount(self, d):
self.discount = d
def set_rewards(self, rs, rg, rb, re, rm):
self.rsample = rs
self.rgood = rg
self.rbad = rb
self.rexit = re
#################################################################
# S, A, O Spaces
#################################################################
def fully_obs_states(self):
return self.rover_states
def partially_obs_states(self):
return self.rock_states
def actions(self):
return self.ractions
def observations(self):
return self.robs
#################################################################
# Reward Function
#################################################################
def reward(self, x, y, a):
# Rewarded:
# sampling good or bad rocks
# exiting the map
# trying to move off the grid
rocks = self.rocks
xpos, ypos = x
# if in terminal state, no reward
if self.isterminal(x, y):
return 0.0
# if exit get exit reward
if a == 1 and xpos == self.xs:
return self.rexit
# if trying to move off the grid
if (a == 0 and xpos == 0) or (a == 2 and ypos == self.ys) or (a == 3 and ypos == 0):
return self.rbump
# if trying to sample
if a == 4:
# if in a space with a rock
if x in rocks:
# if rock is good
if rocks[x]:
return self.rgood
# if rock is bad
else:
return self.rbad
return 0.0
#################################################################
# Distribution Functions
#################################################################
# rover moves determinisitcally: distribution is just the position of rover
def fully_obs_transition(self, x, y, a, dist):
xpos = x[0]
ypos = x[1]
# going left
if a == 0 and xpos > 0:
xpos -= 1
# going right
elif a == 1 and xpos < (self.xs+1):
xpos += 1
# going up
elif a == 2 and ypos < self.ys:
ypos += 1
# going down
elif a == 3 and ypos > 0:
ypos -= 1
dist[0] = xpos
dist[1] = ypos
return dist
# the positions of rocks don't change, good rocks turn bad after sampling
def partially_obs_transition(self, x, y, a, dist):
# fill the distribution with our y var
for i in xrange(len(y)):
dist[i] = y[i]
# if a rock is sampled it becomes bad
if a == 4:
rocks = self.rocks
# if we are on a rock state change type to bad
if x in rocks:
ri = self.rock_map[x]
self.rock_types[ri] = False
rocks[x] = False
dist[ri] = False
return dist
# sample the transtion distribution
def sample_fully_obs_state(self, d):
# deterministic transition
return (d[0], d[1])
def sample_partially_obs_state(self, d):
# rock states do not change
return d
# returns the observation dsitribution of o from the (x,y,a)
def observation(self, x, y, a, dist):
prob = 0.0
# if the action checks a rock
if self.is_check_action(a):
xpos = x[0]
ypos = x[1]
ri = self.act2rock(a) # rock index
rock_pos = self.rock_pos[ri] # rock position
rock_type = y[ri] # rock type
r = math.sqrt((xpos - rock_pos[0])**2 + (ypos - rock_pos[1])**2)
eta = math.exp(-r/self.d0)
p_correct = 0.5 + 0.5 * eta # probability of correct measure
dist.fill(0.0)
# if rock is good
if rock_type == True:
dist[1] = p_correct
dist[2] = 1.0 - p_correct
# rock is bad
else:
dist[1] = 1 - p_correct
dist[2] = p_correct
else:
dist.fill(0.0)
dist[0] = 1.0
return dist
# sample the observation distirbution
def sample_observation(self, d):
oidx = self.categorical(d)
return self.robs[oidx]
def fully_obs_transition_pdf(self, d, x):
if d[0] == x[0] and d[1] == x[1]:
return 1.0
else:
return 0.0
# only single rock configuration, always return 1
def partially_obs_transition_pdf(self, d, y):
if y == d:
return 1.0
else:
return 0.0
# pdf for observation prob
def observation_pdf(self, d, dval):
assert dval < 3, "Attempting to retrive pdf value larger than observation size"
return d[dval]
# numpy categorical sampling hack
def categorical(self, d):
return np.flatnonzero( self.random_state.multinomial(1,d,1) )[0]
#################################################################
# Create functions
#################################################################
def create_fully_obs_transition_distribution(self):
td = np.array([0,0]) # position of rover
return td
def create_partially_obs_transition_distribution(self):
return deepcopy(self.rock_types)
def create_observation_distribution(self):
od = np.zeros(3) + 1.0/3 # none, good, bad
return od
def create_belief(self):
return MOMDPBelief(self.n_rock_states)
def initial_belief(self):
return MOMDPBelief(self.n_rock_states)
def initial_fully_obs_state(self):
# returns a (0, y) tuple
return (0, self.random_state.randint(self.xs+1))
def initial_partially_obs_state(self):
for (i, k) in enumerate(sorted(self.rocks.keys())):
t = bool(self.random_state.randint(2))
self.rock_types[i] = t
self.rocks[k] = t
return deepcopy(self.rock_types)
#################################################################
# Misc Functions
#################################################################
def isterminal(self, x, y):
xpos, ypos = x
if xpos > self.xs:
return True
return False
def index2action(self, ai):
return ai
def is_check_action(self, a):
return True if a > 4 else False
def act2rock(self, a):
return a - 5
def n_xstates(self):
return len(self.rover_states)
def n_ystates(self):
return len(self.rock_states)
def n_actions(self):
return len(self.ractions)
def n_obsevations(self):
return 2
#################################################################
# Policies
#################################################################
def heuristic_policy(self, sc):
# takes in a screen shot, [x, b] array
x = (sc[0], sc[1]) # x and y pos
b = np.array(sc[2:]) # belief
return self.heuristic(x, b)
def heuristic(self, x, b):
# if we are not confident, keep checking randomly
if b.max() < self.h_conf:
return self.random_state.randint(5, 5+self.k)
else:
ri = b.argmax() # index of highest confidence rock state
y = self.rock_states[ri] # rock state
# find closest good rock
c = float('inf')
ci = -1
for (i, t) in enumerate(y):
# if rock is good
if t:
# if on the rock sample
if x == self.rock_pos[i]:
return 4
xrover = x[0]
yrover = x[1]
xrock, yrock = self.rock_pos[i]
dist = math.sqrt((xrock-xrover)**2 + (yrock-yrover)**2)
if dist < c:
c = dist
ci = i
if ci > -1:
return self.move_to(x, self.rock_pos[ci])
# if no good rocks left move right
return 1
# action to move rover from origin o to target t
def move_to(self, o, t):
# vector components
v = [t[0] - o[0], t[1] - o[1]]
sa = float('inf')
ai = 1
# move in the direction that minimizes angle between action and target
for (i, a) in enumerate(self.action_vectors):
ang = angle(v, a)
if ang < sa:
sa = ang
ai = i
return ai
def dotproduct(v1, v2):
return sum((a*b) for a, b in zip(v1, v2))
def length(v):
return math.sqrt(dotproduct(v, v))
def angle(v1, v2):
return math.acos(dotproduct(v1, v2) / (length(v1) * length(v2)))
|
sisl/Chimp
|
chimp/simulators/pomdp/models/rock_sample.py
|
Python
|
apache-2.0
| 11,876
|
"""Manages logic and models related to homepage and CCExtractor data."""
|
canihavesomecoffee/sample-platform
|
mod_home/__init__.py
|
Python
|
isc
| 73
|
import os
from setuptools import setup, find_packages
import stalker_pyramid
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README')).read()
CHANGES = open(os.path.join(here, 'CHANGELOG')).read()
requires = [
'pyramid>=1.4',
'transaction',
'pyramid_tm',
'pyramid_beaker',
'pyramid_debugtoolbar',
'pyramid_mailer',
'zope.sqlalchemy',
'waitress',
'jinja2',
'pyramid_jinja2',
'pillow',
'stalker>=0.2.2', 'webtest', 'mocker'
]
setup(name='stalker_pyramid',
version=stalker_pyramid.__version__,
description='Stalker (ProdAM) Based Web App',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)",
"Operating System :: OS Independent",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
"Topic :: Database",
"Topic :: Software Development",
"Topic :: Utilities",
"Topic :: Office/Business :: Scheduling",
],
author='Erkan Ozgur Yilmaz',
author_email='eoyilmaz@gmail.com',
url='http://code.google.com/p/stalker_pyramid/',
keywords=['web', 'wsgi', 'bfg', 'pylons', 'pyramid', 'production',
'asset', 'management', 'vfx', 'animation', 'houdini', 'nuke',
'fusion', 'xsi', 'blender', 'vue'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='stalker',
install_requires=requires,
entry_points="""\
[paste.app_factory]
main = stalker_pyramid:main
[console_scripts]
initialize_stalker_pyramid_db = stalker_pyramid.scripts.initializedb:main
""",
)
|
iyyed/stalker-pyramid
|
setup.py
|
Python
|
gpl-3.0
| 1,995
|
from gensim.interfaces import TransformedCorpus
from safire.data.loaders import IndexLoader
from safire.utils.transformers import SimilarityTransformer
__author__ = 'Jan Hajic jr'
import unittest
from test.safire_test_case import SafireTestCase
class TestSimilarityTransformer(SafireTestCase):
@classmethod
def setUpClass(cls, clean_only=False, no_datasets=False):
super(TestSimilarityTransformer, cls).setUpClass(clean_only,
no_datasets)
cls.iloader = IndexLoader(cls.data_root, 'test-data')
def setUp(self):
self.corpus = self.loader.get_default_image_corpus()
def test_apply(self):
self.transformer = SimilarityTransformer(self.corpus,
self.iloader.output_prefix())
self.index_corpus = self.transformer[self.corpus]
self.assertIsInstance(self.index_corpus, TransformedCorpus)
print iter(self.index_corpus).next()
if __name__ == '__main__':
suite = unittest.TestSuite()
loader = unittest.TestLoader()
tests = loader.loadTestsFromTestCase(TestSimilarityTransformer)
suite.addTest(tests)
runner = unittest.TextTestRunner()
runner.run(suite)
|
hajicj/safire
|
test/test_similarity_transformer.py
|
Python
|
gpl-3.0
| 1,257
|
# coding=utf-8
from simple_ars import search_object
__authors__ = 'Manolis Tsoukalas'
__date__ = '2017-1-3'
__version__ = '0.9.2'
"""
extraction functionalities
"""
def ars_list(response_data, search_json):
"""
method for extracted data in a list format.
this method is ideal if you want to extract the retrieved data
in csv format or to import them in data tables.
:param response_data: the data you want to extract
:param search_json: the search parameters in format {"from":["select]}
:return: the extracted data in list format.
"""
sub_keys = False
if isinstance(search_json, dict):
search = search_object.SearchObject(search_json)
_from = search.src_from
_select = search.src_select
for select in _select:
if isinstance(select, dict):
sub_keys = True
else:
_from = search_json
_select = []
if sub_keys:
if isinstance(response_data, dict):
list_data = []
for key, value in response_data.items():
if key == _from:
if isinstance(value, dict):
for select in _select:
retrieved_data = ars_list(value, select)
if isinstance(retrieved_data, list):
list_data = retrieved_data
else:
list_data.append(retrieved_data)
return list_data
elif isinstance(value, list):
list_data = []
for element in value:
sub_data = {}
for select in _select:
retrieved_data = ars_list(element, select)
if isinstance(retrieved_data, list):
for i in retrieved_data:
sub_data.update(i)
else:
sub_data.update(retrieved_data)
list_data.append(sub_data)
return list_data
elif _from == '~':
for select in _select:
if isinstance(select, dict):
retrieved_data = ars_list(response_data, select)
list_data.append(retrieved_data)
else:
list_data.append({select: response_data.get(select)})
return list_data
elif isinstance(response_data, list):
list_data = []
for items in response_data:
sub_data = {}
for select in _select:
if isinstance(select, dict):
retrieved_data = ars_list(items, select)
if retrieved_data:
if isinstance(retrieved_data, list):
sub_data.update(*retrieved_data)
else:
sub_data.update(retrieved_data)
else:
sub_data.update({select: items.get(select)})
list_data.append(sub_data)
return list_data
else:
if isinstance(response_data, dict):
if response_data.get(_from):
if isinstance(response_data[_from], list):
sub_data = response_data[_from]
return [{_from: {key: items.get(key) for key in _select}} for items in sub_data]
elif isinstance(response_data[_from], dict):
sub_data = response_data[_from]
return {_from: {key: sub_data.get(key) for key in _select}}
else:
return {_from: response_data.get(_from)}
elif _from == "~":
return [{key: response_data.get(key) for key in _select}]
elif isinstance(response_data, list):
return [{key: items.get(key) for key in _select} for items in response_data]
|
m19t12/simpleARS
|
simple_ars/extraction.py
|
Python
|
gpl-3.0
| 4,202
|
"""
Definition of the Session class.
"""
import re
import sys
import time
import json
import base64
import random
import hashlib
import asyncio
import weakref
import datetime
from http.cookies import SimpleCookie
from ..event._component import new_type
from ._component2 import PyComponent, JsComponent, AppComponentMeta
from ._asset import Asset, Bundle, solve_dependencies
from ._assetstore import AssetStore, INDEX
from ._assetstore import assets as assetstore
from ._clientcore import serializer
from . import logger
from .. import config
reprs = json.dumps
# Use the system PRNG for session id generation (if possible)
# NOTE: secure random string generation implementation is adapted
# from the Django project.
def get_random_string(length=24, allowed_chars=None):
""" Produce a securely generated random string.
With a length of 12 with the a-z, A-Z, 0-9 character set returns
a 71-bit value. log_2((26+26+10)^12) =~ 71 bits
"""
allowed_chars = allowed_chars or ('abcdefghijklmnopqrstuvwxyz' +
'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789')
try:
srandom = random.SystemRandom()
except NotImplementedError: # pragma: no cover
srandom = random
logger.warning('Falling back to less secure Mersenne Twister random string.')
bogus = "%s%s%s" % (random.getstate(), time.time(), 'sdkhfbsdkfbsdbhf')
random.seed(hashlib.sha256(bogus.encode()).digest())
return ''.join(srandom.choice(allowed_chars) for i in range(length))
class Session:
""" A connection between Python and the client runtime (JavaScript).
The session is what holds together the app widget, the web runtime,
and the websocket instance that connects to it.
Responsibilities:
* Send messages to the client and process messages received by the client.
* Keep track of PyComponent instances used by the session.
* Keep track of JsComponent instances associated with the session.
* Ensure that the client has all the module definitions it needs.
"""
STATUS = new_type('Enum', (), {'PENDING': 1, 'CONNECTED': 2, 'CLOSED': 0})
def __init__(self, app_name, store=None,
request=None): # Allow custom store for testing
self._store = store if (store is not None) else assetstore
assert isinstance(self._store, AssetStore)
self._creation_time = time.time() # used by app manager
# Id and name of the app
self._id = get_random_string()
self._app_name = app_name
# To keep track of what modules are defined at the client
self._present_classes = set() # Component classes known by the client
self._present_modules = set() # module names that, plus deps
self._present_assets = set() # names of used associated assets
self._assets_to_ignore = set() # user settable
# Data for this session (in addition to the data provided by the store)
self._data = {}
# More vars
self._runtime = None # init web runtime, will be set when used
self._ws = None # init websocket, will be set when a connection is made
self._closing = False # Flag to help with shutdown
# PyComponent or JsComponent instance, can be None if app_name is __default__
self._component = None
# The session assigns component id's and keeps track of component objects
self._component_counter = 0
self._component_instances = weakref.WeakValueDictionary()
self._dead_component_ids = set()
# Keep track of roundtrips. The _ping_calls elements are:
# [ping_count, {objects}, *(callback, args)]
self._ping_calls = []
self._ping_counter = 0
self._eval_result = {}
self._eval_count = 0
# While the client is not connected, we keep a queue of
# commands, which are send to the client as soon as it connects
self._pending_commands = []
# request related information
self._request = request
if request and request.cookies:
cookies = request.cookies
else:
cookies = {}
self._set_cookies(cookies)
def __repr__(self):
t = '<%s for %r (%i) at 0x%x>'
return t % (self.__class__.__name__, self.app_name, self.status, id(self))
@property
def request(self):
"""The tornado request that was at the origin of this session.
"""
return self._request
@property
def id(self):
""" The unique identifier of this session.
"""
return self._id
@property
def app_name(self):
""" The name of the application that this session represents.
"""
return self._app_name
@property
def app(self):
""" The root PyComponent or JsComponent instance that represents the app.
"""
return self._component
@property
def runtime(self):
""" The runtime that is rendering this app instance. Can be
None if the client is a browser.
"""
return self._runtime
@property
def status(self):
""" The status of this session.
The lifecycle for each session is:
* status 1: pending
* status 2: connected
* status 0: closed
"""
if self._ws is None:
return self.STATUS.PENDING # not connected yet
elif self._ws.close_code is None:
return self.STATUS.CONNECTED # alive and kicking
else:
return self.STATUS.CLOSED # connection closed
@property
def present_modules(self):
""" The set of module names that is (currently) available at the client.
"""
return set(self._present_modules)
@property
def assets_to_ignore(self):
""" The set of names of assets that should *not* be pushed to
the client, e.g. because they are already present on the page.
Add names to this set to prevent them from being loaded.
"""
return self._assets_to_ignore
def close(self):
""" Close the session: close websocket, close runtime, dispose app.
"""
# Stop guarding objects to break down any circular refs
self._ping_calls = []
self._closing = True # suppress warnings for session being closed.
try:
# Close the websocket
if self._ws:
self._ws.close_this()
# Close the runtime
if self._runtime:
self._runtime.close()
# Dispose the component and break the circular reference
if self._component is not None:
self._component.dispose()
self._component = None
# Discard data
self._data = {}
finally:
self._closing = False
## Hooking up with app, websocket, runtime
def _set_ws(self, ws):
""" A session is always first created, so we know what page to
serve. The client will connect the websocket, and communicate
the session_id so it can be connected to the correct Session
via this method
"""
if self._ws is not None:
raise RuntimeError('Session is already connected.')
# Set websocket object - this is what changes the status to CONNECTED
self._ws = ws
self._ws.write_command(("PRINT", "Flexx session says hi"))
# Send pending commands
for command in self._pending_commands:
self._ws.write_command(command)
self._ws.write_command(('INIT_DONE', ))
def _set_cookies(self, cookies=None):
""" To set cookies, must be an http.cookie.SimpleCookie object.
When the app is loaded as a web app, the cookies are set *before* the
main component is instantiated. Otherwise they are set when the websocket
is connected.
"""
self._cookies = cookies if cookies else SimpleCookie()
def _set_runtime(self, runtime):
if self._runtime is not None:
raise RuntimeError('Session already has a runtime.')
self._runtime = runtime
## Cookies, mmm
def get_cookie(self, name, default=None, max_age_days=31, min_version=None):
""" Gets the value of the cookie with the given name, else default.
Note that cookies only really work for web apps.
"""
from tornado.web import decode_signed_value
if name in self._cookies:
value = self._cookies[name].value
value = decode_signed_value(config.cookie_secret,
name, value, max_age_days=max_age_days,
min_version=min_version)
return value.decode()
else:
return default
def set_cookie(self, name, value, expires_days=30, version=None,
domain=None, expires=None, path="/", **kwargs):
""" Sets the given cookie name/value with the given options. Set value
to None to clear. The cookie value is secured using
`flexx.config.cookie_secret`; don't forget to set that config
value in your server. Additional keyword arguments are set on
the Cookie.Morsel directly.
"""
# This code is taken (in modified form) from the Tornado project
# Copyright 2009 Facebook
# Licensed under the Apache License, Version 2.0
# Assume tornado is available ...
from tornado.escape import native_str
from tornado.httputil import format_timestamp
from tornado.web import create_signed_value
# Clear cookie?
if value is None:
value = ""
expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
else:
secret = config.cookie_secret
value = create_signed_value(secret, name, value, version=version,
key_version=None)
# The cookie library only accepts type str, in both python 2 and 3
name = native_str(name)
value = native_str(value)
if re.search(r"[\x00-\x20]", name + value):
# Don't let us accidentally inject bad stuff
raise ValueError("Invalid cookie %r: %r" % (name, value))
if name in self._cookies:
del self._cookies[name]
self._cookies[name] = value
morsel = self._cookies[name]
if domain:
morsel["domain"] = domain
if expires_days is not None and not expires:
expires = datetime.datetime.utcnow() + datetime.timedelta(
days=expires_days)
if expires:
morsel["expires"] = format_timestamp(expires)
if path:
morsel["path"] = path
for k, v in kwargs.items():
if k == 'max_age':
k = 'max-age'
# skip falsy values for httponly and secure flags because
# SimpleCookie sets them regardless
if k in ['httponly', 'secure'] and not v:
continue
morsel[k] = v
self.send_command('EXEC', 'document.cookie = "%s";' %
morsel.OutputString().replace('"', '\\"'))
## Data
def add_data(self, name, data):
""" Add data to serve to the client (e.g. images), specific to this
session. Returns the link at which the data can be retrieved.
Note that actions can be used to send (binary) data directly
to the client (over the websocket).
Parameters:
name (str): the name of the data, e.g. 'icon.png'. If data has
already been set on this name, it is overwritten.
data (bytes): the data blob.
Returns:
str: the (relative) url at which the data can be retrieved.
"""
if not isinstance(name, str):
raise TypeError('Session.add_data() name must be a str.')
if name in self._data:
raise ValueError('Session.add_data() got existing name %r.' % name)
if not isinstance(data, bytes):
raise TypeError('Session.add_data() data must be bytes.')
self._data[name] = data
return 'flexx/data/%s/%s' % (self.id, name) # relative path for export
def remove_data(self, name):
""" Remove the data associated with the given name. If you need this,
consider using actions instead. Note that data is automatically
released when the session is closed.
"""
self._data.pop(name, None)
def get_data_names(self):
""" Get a list of names of the data provided by this session.
"""
return list(self._data.keys())
def get_data(self, name):
""" Get the data corresponding to the given name. This can be
data local to the session, or global data. Returns None if data
by that name is unknown.
"""
if True:
data = self._data.get(name, None)
if data is None:
data = self._store.get_data(name)
return data
def _dump_data(self):
""" Get a dictionary that contains all data specific to this session.
The keys represent relative paths, the values are all bytes.
Private method, used by App.dump().
"""
d = {}
for fname in self.get_data_names():
d['flexx/data/{}/{}'.format(self.id, fname)] = self.get_data(fname)
return d
## Keeping track of component objects
def _register_component(self, component, id=None):
""" Called by PyComponent and JsComponent to give them an id
and register with the session.
"""
assert isinstance(component, (PyComponent, JsComponent))
assert component.session is self
cls = component.__class__
if self._component is None:
self._component = component # register root component (i.e. the app)
# Set id
if id is None:
self._component_counter += 1
id = cls.__name__ + '_' + str(self._component_counter)
component._id = id
component._uid = self.id + '_' + id
# Register the instance using a weakref
self._component_instances[component._id] = component
# Register the class to that the client has the needed definitions
self._register_component_class(cls)
self.keep_alive(component)
def _unregister_component(self, component):
self._dead_component_ids.add(component.id)
# self.keep_alive(component) # does not work on pypy; deletion in final
# Because we use weak refs, and we want to be able to keep (the id of)
# the object so that INVOKE on it can be silently ignored (because it
# is disposed). The object id gets removed by the DISPOSE_ACK command.
def get_component_instance(self, id):
""" Get PyComponent or JsComponent instance that is associated with
this session and has the corresponding id. The returned value can be
None if it does not exist, and a returned component can be disposed.
"""
return self._component_instances.get(id, None)
## JIT asset definitions
def _register_component_class(self, cls):
""" Mark the given PyComponent or JsComponent class as used; ensure
that the client knows about the module that it is defined in,
dependencies of this module, and associated assets of any of these
modules.
"""
if not (isinstance(cls, type) and issubclass(cls, (PyComponent, JsComponent))):
raise TypeError('_register_component_class() needs a PyComponent '
'or JsComponent class')
# Early exit if we know the class already
if cls in self._present_classes:
return
# Make sure that no two Component classes have the same name, or we get problems
# that are difficult to debug. Unless classes are defined interactively.
# The modules of classes that are re-registered are re-defined. The base
# class of such a component is assumed to be either unchanged or defined
# in the same module. It can also happen that a class is registered for
# which the module was defined earlier (e.g. ui.html). Such modules
# are redefined as well.
same_name = [c for c in self._present_classes if c.__name__ == cls.__name__]
if same_name:
is_interactive = self._app_name == '__default__'
same_name.append(cls)
is_dynamic_cls = all([c.__module__ == '__main__' for c in same_name])
if not (is_interactive and is_dynamic_cls):
raise RuntimeError('Cannot have multiple Component classes with '
'the same name unless using interactive session '
'and the classes are dynamically defined: %r'
% same_name)
# Mark the class and the module as used
logger.debug('Registering Component class %r' % cls.__name__)
self._register_module(cls.__jsmodule__)
def _register_module(self, mod_name):
""" Register a module with the client, as well as its
dependencies, and associated assests of the module and its
dependencies. If the module was already defined, it is
re-defined.
"""
if (mod_name.startswith(('flexx.app', 'flexx.event')) and
'.examples' not in mod_name):
return # these are part of flexx core assets
modules = set()
assets = []
def collect_module_and_deps(mod):
if mod.name.startswith(('flexx.app', 'flexx.event')):
return # these are part of flexx core assets
if mod.name not in self._present_modules:
self._present_modules.add(mod.name)
for dep in mod.deps:
if dep.startswith(('flexx.app', 'flexx.event')):
continue
submod = self._store.modules[dep]
collect_module_and_deps(submod)
modules.add(mod)
# Collect module and dependent modules that are not yet defined
self._store.update_modules() # Ensure up-to-date module definition
mod = self._store.modules[mod_name]
collect_module_and_deps(mod)
f = lambda m: (m.name.startswith('__main__'), m.name)
modules = solve_dependencies(sorted(modules, key=f))
# Collect associated assets
for mod in modules:
for asset_name in self._store.get_associated_assets(mod.name):
if asset_name not in self._present_assets:
self._present_assets.add(asset_name)
assets.append(self._store.get_asset(asset_name))
# If the module was already defined and thus needs to be re-defined,
# we only redefine *this* module, no deps and no assoctated assets.
if not modules:
modules.append(mod)
# Collect CSS and JS assets
for mod in modules:
if mod.get_css().strip():
assets.append(self._store.get_asset(mod.name + '.css'))
for mod in modules:
assets.append(self._store.get_asset(mod.name + '.js'))
# Mark classes as used
for mod in modules:
for cls in mod.component_classes:
self._present_classes.add(cls)
# Push assets over the websocket. Note how this works fine with the
# notebook because we turn ws commands into display(HTML()).
# JS can be defined via eval() or by adding a <script> to the DOM.
# The latter allows assets that do not use strict mode, but sourceURL
# does not work on FF. So we only want to eval our own assets.
for asset in assets:
if asset.name in self._assets_to_ignore:
continue
logger.debug('Loading asset %s' % asset.name)
# Determine command suffix. All our sources come in bundles,
# for which we use eval because it makes sourceURL work on FF.
# (It does not work in Chrome in either way.)
suffix = asset.name.split('.')[-1].upper()
if suffix == 'JS' and isinstance(asset, Bundle):
suffix = 'JS-EVAL'
self.send_command('DEFINE', suffix, asset.name, asset.to_string())
## Communication with the client
def send_command(self, *command):
""" Send a command to the other side. Commands consists of at least one
argument (a string representing the type of command).
"""
assert len(command) >= 1
if self._closing:
pass
elif self.status == self.STATUS.CONNECTED:
self._ws.write_command(command)
elif self.status == self.STATUS.PENDING:
self._pending_commands.append(command)
else:
#raise RuntimeError('Cannot send commands; app is closed')
logger.warning('Cannot send commands; app is closed')
def _receive_command(self, command):
""" Received a command from JS.
"""
cmd = command[0]
if cmd == 'EVALRESULT':
self._eval_result[command[2]] = command[1]
elif cmd == 'PRINT':
print('JS:', command[1])
elif cmd == 'INFO':
logger.info('JS: ' + command[1])
elif cmd == 'WARN':
logger.warning('JS: ' + command[1])
elif cmd == 'ERROR':
logger.error('JS: ' + command[1] +
' - stack trace in browser console (hit F12).')
elif cmd == 'INVOKE':
id, name, args = command[1:]
ob = self.get_component_instance(id)
if ob is None:
if id not in self._dead_component_ids:
t = 'Cannot invoke %s.%s; session does not know it (anymore).'
logger.warning(t % (id, name))
elif ob._disposed:
pass # JS probably send something before knowing the object was dead
else:
func = getattr(ob, name, None)
if func:
func(*args)
elif cmd == 'PONG':
self._receive_pong(command[1])
elif cmd == 'INSTANTIATE':
modulename, cname, id, args, kwargs = command[1:]
# Maybe we still have the instance?
c = self.get_component_instance(id)
if c and not c._disposed:
self.keep_alive(c)
return
# Try to find the class
m, cls, e = None, None, 0
if modulename in assetstore.modules:
m = sys.modules[modulename]
cls = getattr(m, cname, None)
if cls is None:
e = 1
elif not (isinstance(cls, type) and issubclass(cls, JsComponent)):
cls, e = None, 2
elif cls not in AppComponentMeta.CLASSES:
cls, e = None, 3
if cls is None:
raise RuntimeError('Cannot INSTANTIATE %s.%s (%i)' %
(modulename, cname, e))
# Instantiate
kwargs['flx_session'] = self
kwargs['flx_id'] = id
assert len(args) == 0
c = cls(**kwargs) # calls keep_alive via _register_component()
elif cmd == 'DISPOSE': # Gets send from local to proxy
id = command[1]
c = self.get_component_instance(id)
if c and not c._disposed: # no need to warn if component does not exist
c._dispose()
self.send_command('DISPOSE_ACK', command[1])
self._component_instances.pop(id, None) # Drop local ref now
elif cmd == 'DISPOSE_ACK': # Gets send from proxy to local
self._component_instances.pop(command[1], None)
self._dead_component_ids.discard(command[1])
else:
logger.error('Unknown command received from JS:\n%s' % command)
def keep_alive(self, ob, iters=1):
""" Keep an object alive for a certain amount of time, expressed
in Python-JS ping roundtrips. This is intended for making JsComponent
(i.e. proxy components) survice the time between instantiation
triggered from JS and their attachement to a property, though any type
of object can be given.
"""
ping_to_schedule_at = self._ping_counter + iters
el = self._get_ping_call_list(ping_to_schedule_at)
el[1][id(ob)] = ob # add to dict of objects to keep alive
def call_after_roundtrip(self, callback, *args):
""" A variant of ``call_soon()`` that calls a callback after
a py-js roundrip. This can be convenient to delay an action until
after other things have settled down.
"""
# The ping_counter represents the ping count that is underway.
# Since we want at least a full ping, we want one count further.
ping_to_schedule_at = self._ping_counter + 1
el = self._get_ping_call_list(ping_to_schedule_at)
el.append((callback, args))
async def co_roundtrip(self):
""" Coroutine to wait for one Py-JS-Py roundtrip.
"""
count = 0
def up():
nonlocal count
count += 1
self.call_after_roundtrip(up)
while count < 1:
await asyncio.sleep(0.02)
async def co_eval(self, js):
""" Coroutine to evaluate JS in the client, wait for the result,
and then return it. It is recomended to use this method only
for testing purposes.
"""
id = self._eval_count
self._eval_count += 1
self.send_command('EVALANDRETURN', js, id)
while id not in self._eval_result:
await asyncio.sleep(0.2)
return self._eval_result.pop(id)
def _get_ping_call_list(self, ping_count):
""" Get an element from _ping_call for the specified ping_count.
The element is a list [ping_count, {objects}, *(callback, args)]
"""
# No pending ping_calls?
if len(self._ping_calls) == 0:
# Start pinging
send_ping_later(self)
# Append element
el = [ping_count, {}]
self._ping_calls.append(el)
return el
# Try to find existing element, or insert it
for i in reversed(range(len(self._ping_calls))):
el = self._ping_calls[i]
if el[0] == ping_count:
return el
elif el[0] < ping_count:
el = [ping_count, {}]
self._ping_calls.insert(i + 1, el)
return el
else:
el = [ping_count, {}]
self._ping_calls.insert(0, el)
return el
def _receive_pong(self, count):
# Process ping calls
while len(self._ping_calls) > 0 and self._ping_calls[0][0] <= count:
_, objects, *callbacks = self._ping_calls.pop(0)
objects.clear()
del objects
for callback, args in callbacks:
asyncio.get_event_loop().call_soon(callback, *args)
# Continue pinging?
if len(self._ping_calls) > 0:
send_ping_later(self)
def send_ping_later(session):
# This is to prevent the prevention of the session from being discarded due
# to a ref lingering in an asyncio loop.
def x(weaksession):
s = weaksession()
if s is not None and s.status > 0:
s._ping_counter += 1
s.send_command('PING', s._ping_counter)
# asyncio.get_event_loop().call_soon(x, weakref.ref(session))
asyncio.get_event_loop().call_later(0.01, x, weakref.ref(session))
## Functions to get page
# These could be methods, but are only for internal use
def get_page(session):
""" Get the string for the HTML page to render this session's app.
Not a lot; all other JS and CSS assets are pushed over the websocket.
"""
css_assets = [assetstore.get_asset('reset.css')]
js_assets = [assetstore.get_asset('flexx-core.js')]
return _get_page(session, js_assets, css_assets, 3, False)
def get_page_for_export(session, commands, link=0):
""" Get the string for an exported HTML page (to run without a server).
In this case, there is no websocket to push JS/CSS assets over; these
need to be included inside or alongside the main html page.
"""
# This function basically collects all assets that the session needs,
# creates a special -export.js asset that executes the given commands,
# and puts it al together using _get_page().
# We start as a normal page ...
css_assets = [assetstore.get_asset('reset.css')]
js_assets = [assetstore.get_asset('flexx-core.js')]
# Get all the used modules
modules = [assetstore.modules[name] for name in session.present_modules]
f = lambda m: (m.name.startswith('__main__'), m.name)
modules = solve_dependencies(sorted(modules, key=f))
# First the associated assets
asset_names = set()
for mod in modules:
for asset_name in assetstore.get_associated_assets(mod.name):
if asset_name not in asset_names:
asset_names.add(asset_name)
asset = assetstore.get_asset(asset_name)
if asset.name.lower().endswith('.js'):
js_assets.append(asset)
else:
css_assets.append(asset)
# Then the modules themselves
for mod in modules:
if mod.get_css().strip():
css_assets.append(assetstore.get_asset(mod.name + '.css'))
for mod in modules:
js_assets.append(assetstore.get_asset(mod.name + '.js'))
# Create asset for launching the app (commands that normally get send
# over the websocket)
lines = []
lines.append('flexx.is_exported = true;\n')
lines.append('flexx.run_exported_app = function () {')
lines.append(' var commands_b64 = [')
for command in commands:
if command[0] != 'DEFINE':
command_str = base64.encodebytes(serializer.encode(command)).decode()
lines.append(' "' + command_str.replace('\n', '') + '",')
lines.append(' ];')
lines.append(' bb64 = flexx.require("bb64");')
lines.append(' for (var i=0; i<commands_b64.length; i++) {')
lines.append(' var command = flexx.serializer.decode('
'bb64.decode(commands_b64[i]));')
lines.append(' flexx.s1._receive_command(command);')
lines.append(' }\n};\n')
# Create a session asset for it, "-export.js" is always embedded
export_asset = Asset('flexx-export.js', '\n'.join(lines))
js_assets.append(export_asset)
# Combine it all
return _get_page(session, js_assets, css_assets, link, True)
def _get_page(session, js_assets, css_assets, link, export):
""" Compose index page. Depending on the value of link and the types
of assets, the assets are either embedded or linked.
"""
pre_path = 'flexx/assets' if export else '/flexx/assets' # relative / abs
codes = []
for assets in [css_assets, js_assets]:
for asset in assets:
if link in (0, 1):
html = asset.to_html('{}', link)
else:
if asset.name.endswith(('-info.js', '-export.js')):
# Special case, is always embedded, see get_page_for_export()
html = asset.to_html('', 0)
else:
html = asset.to_html(pre_path + '/shared/{}', link)
codes.append(html)
if export and assets is js_assets:
codes.append('<script>window.flexx.spin();</script>')
codes.append('') # whitespace between css and js assets
codes.append('<script>flexx.create_session("%s", "%s");</script>\n' %
(session.app_name, session.id))
headers = session.app.headers if hasattr(session.app, 'headers') else ''
src = INDEX.replace('HEADER-HOOK', headers)
if link in (0, 1):
asset_names = [a.name for a in css_assets + js_assets]
toc = '<!-- Contents:\n\n- ' + '\n- '.join(asset_names) + '\n\n-->'
codes.insert(0, toc)
src = src.replace('ASSET-HOOK', '\n\n\n'.join(codes))
else:
src = src.replace('ASSET-HOOK', '\n'.join(codes))
return src
|
jrversteegh/flexx
|
flexx/app/_session.py
|
Python
|
bsd-2-clause
| 32,665
|
# This file is part of PyBuilder
#
# Copyright 2011-2014 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mockito import verify, unstub, any, times, when
import unittest
from test_utils import mock
from pybuilder.errors import MissingTaskDependencyException, CircularTaskDependencyException, NoSuchTaskException,\
MissingActionDependencyException, InvalidNameException
from pybuilder.core import Logger
from pybuilder.execution import as_task_name_list, Action, Executable, ExecutionManager, Task,\
DependenciesNotResolvedException, Initializer
class AsTaskNameList(unittest.TestCase):
def test_should_return_list_of_strings_when_string_given(self):
self.assertEquals(["spam"], as_task_name_list("spam"))
def test_should_return_list_of_strings_when_list_of_strings_given(self):
self.assertEquals(
["spam", "eggs"], as_task_name_list(["spam", "eggs"]))
def test_should_return_list_of_strings_when_function_given(self):
def spam():
pass
self.assertEquals(["spam"], as_task_name_list(spam))
def test_should_return_list_of_strings_when_list_of_functions_given(self):
def spam():
pass
def eggs():
pass
self.assertEquals(["spam", "eggs"], as_task_name_list([spam, eggs]))
class ExecutableTest(unittest.TestCase):
def test_should_raise_exception_when_passing_non_function_to_constructor(self):
self.assertRaises(TypeError, Executable, "callable", "spam")
def test_should_raise_exception_when_executable_name_is_invalid(self):
def callable():
pass
self.assertRaises(InvalidNameException, Executable, "a-b", callable)
self.assertRaises(InvalidNameException, Executable, "88aa", callable)
self.assertRaises(
InvalidNameException, Executable, "l asd ll", callable)
self.assertRaises(InvalidNameException, Executable, "@", callable)
self.assertRaises(InvalidNameException, Executable, "$", callable)
self.assertRaises(InvalidNameException, Executable, "%", callable)
def test_should_execute_callable_without_arguments(self):
def callable():
callable.called = True
callable.called = False
Executable("callable", callable).execute({})
self.assertTrue(callable.called)
def test_should_execute_callable_with_single_arguments(self):
def callable(spam):
callable.called = True
callable.spam = spam
callable.called = False
Executable("callable", callable).execute({"spam": "spam"})
self.assertTrue(callable.called)
self.assertEquals("spam", callable.spam)
def test_should_raise_exception_when_callable_argument_cannot_be_satisfied(self):
def callable(spam):
pass
executable = Executable("callable", callable)
self.assertRaises(ValueError, executable.execute, {})
class ActionTest(unittest.TestCase):
def test_should_initialize_fields(self):
def callable():
pass
action = Action("callable", callable, "before", "after", "description")
self.assertEquals(["before"], action.execute_before)
self.assertEquals(["after"], action.execute_after)
self.assertEquals("description", action.description)
class TaskTest(unittest.TestCase):
def test_should_sort_tasks_by_name(self):
task_a = Task("a_name", lambda: None, "dependency", "description")
task_b = Task("b_name", lambda: None, "dependency", "description")
task_list = [task_b, task_a]
self.assertEquals(["a_name", "b_name"], [
task.name for task in sorted(task_list)])
def test_should_initialize_fields(self):
def callable():
pass
task = Task("callable", callable, "dependency", "description")
self.assertEquals(["dependency"], task.dependencies)
self.assertEquals(["description"], task.description)
def test_should_execute_callable_without_arguments(self):
def callable():
callable.called = True
callable.called = False
Task("callable", callable).execute(mock(), {})
self.assertTrue(callable.called)
def test_should_execute_callable_with_single_arguments(self):
def callable(spam):
callable.called = True
callable.spam = spam
callable.called = False
Task("callable", callable).execute(mock(), {"spam": "spam"})
self.assertTrue(callable.called)
self.assertEquals("spam", callable.spam)
def test_should_raise_exception_when_callable_argument_cannot_be_satisfied(self):
def callable(spam):
pass
executable = Task("callable", callable)
self.assertRaises(ValueError, executable.execute, mock(), {})
class TaskExtensionTest(unittest.TestCase):
def test_should_extend_task_with_values_from_other_task(self):
def callable_one():
pass
def callable_two(param):
pass
task = Task("task", callable_one, "dependency", "description")
replacement = Task("replacement", callable_two,
"another_dependency", "replacement description")
task.extend(replacement)
self.assertEquals("task", task.name)
self.assertEquals(
["dependency", "another_dependency"], task.dependencies)
self.assertEquals(
["description", "replacement description"], task.description)
def test_should_execute_both_callables_when_extending_task(self):
def callable_one():
callable_one.called = True
callable_one.called = False
def callable_two(param):
callable_two.called = True
callable_two.called = False
task_one = Task("task", callable_one)
task_two = Task("task", callable_two)
task_one.extend(task_two)
task_one.execute(mock(), {"param": "spam"})
self.assertTrue(callable_one.called)
self.assertTrue(callable_two.called)
class InitializerTest(unittest.TestCase):
def setUp(self):
def callable():
pass
self.callable = callable
def test_should_return_true_when_invoking_is_applicable_without_environment_and_initializer_does_not_define_environments(
self):
initializer = Initializer("initialzer", self.callable)
self.assertTrue(initializer.is_applicable())
def test_should_return_true_when_invoking_is_applicable_with_environment_and_initializer_does_not_define_environments(
self):
initializer = Initializer("initialzer", self.callable)
self.assertTrue(initializer.is_applicable("any_environment"))
def test_should_return_true_when_invoking_is_applicable_with_environment_and_initializer_defines_environment(
self):
initializer = Initializer(
"initialzer", self.callable, "any_environment")
self.assertTrue(initializer.is_applicable("any_environment"))
def test_should_return_true_when_invoking_is_applicable_with_environments_and_initializer_defines_environment(
self):
initializer = Initializer(
"initialzer", self.callable, "any_environment")
self.assertTrue(initializer.is_applicable(
["any_environment", "any_other_environment"]))
def test_should_return_false_when_invoking_is_applicable_with_environment_and_initializer_defines_environment(
self):
initializer = Initializer(
"initialzer", self.callable, "any_environment")
self.assertFalse(initializer.is_applicable("any_other_environment"))
def test_should_return_false_when_invoking_is_applicable_without_environment_and_initializer_defines_environment(
self):
initializer = Initializer(
"initialzer", self.callable, "any_environment")
self.assertFalse(initializer.is_applicable())
def test_should_return_true_when_invoking_is_applicable_with_environment_and_initializer_defines_multiple_environments(
self):
initializer = Initializer(
"initialzer", self.callable, ["any_environment", "any_other_environment"])
self.assertTrue(initializer.is_applicable(["any_environment"]))
class ExecutionManagerTestBase(unittest.TestCase):
def setUp(self):
self.execution_manager = ExecutionManager(Logger())
def tearDown(self):
unstub()
class ExecutionManagerInitializerTest(ExecutionManagerTestBase):
def test_ensure_that_initializer_is_added_when_calling_register_initializer(self):
initializer = mock()
self.execution_manager.register_initializer(initializer)
self.assertEquals([initializer], self.execution_manager.initializers)
def test_ensure_that_registered_initializers_are_executed_when_calling_execute_initializers(self):
initializer_1 = mock()
when(initializer_1).is_applicable(any()).thenReturn(True)
self.execution_manager.register_initializer(initializer_1)
initializer_2 = mock()
when(initializer_2).is_applicable(any()).thenReturn(True)
self.execution_manager.register_initializer(initializer_2)
self.execution_manager.execute_initializers(a=1)
verify(initializer_1).execute({"a": 1})
verify(initializer_2).execute({"a": 1})
def test_ensure_that_registered_initializers_are_not_executed_when_environments_do_not_match(self):
initializer = mock()
when(initializer).is_applicable(any()).thenReturn(False)
self.execution_manager.register_initializer(initializer)
environments = []
self.execution_manager.execute_initializers(environments, a=1)
verify(initializer).is_applicable(environments)
verify(initializer, 0).execute(any())
class ExecutionManagerTaskTest(ExecutionManagerTestBase):
def test_ensure_task_is_added_when_calling_register_task(self):
task = mock()
self.execution_manager.register_task(task)
self.assertEquals([task], self.execution_manager.tasks)
def test_ensure_task_is_replaced_when_registering_two_tasks_with_same_name(self):
original = mock(name="spam")
replacement = mock(name="spam")
self.execution_manager.register_task(original)
self.execution_manager.register_task(replacement)
verify(original).extend(replacement)
def test_should_raise_exception_when_calling_execute_task_before_resolve_dependencies(self):
self.assertRaises(DependenciesNotResolvedException,
self.execution_manager.execute_task,
mock())
def test_ensure_task_is_executed_when_calling_execute_task(self):
task = mock(name="spam", dependencies=[])
self.execution_manager.register_task(task)
self.execution_manager.resolve_dependencies()
self.execution_manager.execute_task(task, a=1)
verify(task).execute(any(), {"a": 1})
def test_ensure_before_action_is_executed_when_task_is_executed(self):
task = mock(name="task", dependencies=[])
action = mock(name="action", execute_before=["task"], execute_after=[])
self.execution_manager.register_action(action)
self.execution_manager.register_task(task)
self.execution_manager.resolve_dependencies()
self.execution_manager.execute_task(task)
verify(action).execute({})
verify(task).execute(any(), {})
def test_ensure_after_action_is_executed_when_task_is_executed(self):
task = mock(name="task", dependencies=[])
action = mock(name="action", execute_before=[], execute_after=["task"])
self.execution_manager.register_action(action)
self.execution_manager.register_task(task)
self.execution_manager.resolve_dependencies()
self.execution_manager.execute_task(task)
verify(action).execute({})
verify(task).execute(any(), {})
def test_should_return_single_task_name(self):
self.execution_manager.register_task(mock(name="spam"))
self.assertEquals(["spam"], self.execution_manager.task_names)
def test_should_return_all_task_names(self):
self.execution_manager.register_task(
mock(name="spam"), mock(name="eggs"))
self.assertEquals(["eggs", "spam"], self.execution_manager.task_names)
class ExecutionManagerActionTest(ExecutionManagerTestBase):
def test_ensure_action_is_registered(self):
action = mock(name="action")
self.execution_manager.register_action(action)
self.assertEquals({"action": action}, self.execution_manager._actions)
def test_ensure_action_registered_for_two_tasks_is_executed_two_times(self):
spam = mock(name="spam", dependencies=[])
eggs = mock(name="eggs", dependencies=[])
self.execution_manager.register_task(spam, eggs)
action = mock(name="action",
execute_before=[],
execute_after=["spam", "eggs"],
only_once=False)
self.execution_manager.register_action(action)
self.execution_manager.resolve_dependencies()
self.execution_manager.execute_execution_plan([spam, eggs])
verify(action, times(2)).execute(any())
def test_ensure_action_registered_for_two_tasks_is_executed_only_once_if_single_attribute_is_present(self):
spam = mock(name="spam", dependencies=[])
eggs = mock(name="eggs", dependencies=[])
self.execution_manager.register_task(spam, eggs)
action = mock(name="action",
execute_before=[],
execute_after=["spam", "eggs"],
only_once=True)
self.execution_manager.register_action(action)
self.execution_manager.resolve_dependencies()
self.execution_manager.execute_execution_plan([spam, eggs])
verify(action, times(1)).execute(any())
class ExecutionManagerResolveDependenciesTest(ExecutionManagerTestBase):
def test_ensure_that_dependencies_are_resolved_when_no_task_is_given(self):
self.execution_manager.resolve_dependencies()
self.assertTrue(self.execution_manager._dependencies_resolved)
def test_ensure_that_dependencies_are_resolved_when_single_task_is_given(self):
task = mock(dependencies=[])
self.execution_manager.register_task(task)
self.execution_manager.resolve_dependencies()
self.assertTrue(self.execution_manager._dependencies_resolved)
def test_should_raise_exception_when_task_depends_on_task_not_found(self):
task = mock(dependencies=["not_found"])
self.execution_manager.register_task(task)
self.assertRaises(MissingTaskDependencyException,
self.execution_manager.resolve_dependencies)
def test_should_raise_exception_when_before_action_depends_on_task_not_found(self):
action = mock(execute_before=["not_found"], execute_after=[])
self.execution_manager.register_action(action)
self.assertRaises(MissingActionDependencyException,
self.execution_manager.resolve_dependencies)
def test_should_raise_exception_when_after_action_depends_on_task_not_found(self):
action = mock(execute_before=[], execute_after=["not_found"])
self.execution_manager.register_action(action)
self.assertRaises(MissingActionDependencyException,
self.execution_manager.resolve_dependencies)
def test_ensure_that_dependencies_are_resolved_when_simple_dependency_is_found(self):
one = mock(name="one", dependencies=[])
two = mock(name="two", dependencies=["one"])
self.execution_manager.register_task(one, two)
self.execution_manager.resolve_dependencies()
self.assertEquals(
[], self.execution_manager._task_dependencies.get("one"))
self.assertEquals(
[one], self.execution_manager._task_dependencies.get("two"))
def test_ensure_that_dependencies_are_resolved_when_task_depends_on_multiple_tasks(self):
one = mock(name="one", dependencies=[])
two = mock(name="two", dependencies=["one"])
three = mock(name="three", dependencies=["one", "two"])
self.execution_manager.register_task(one, two, three)
self.execution_manager.resolve_dependencies()
self.assertEquals(
[], self.execution_manager._task_dependencies.get("one"))
self.assertEquals(
[one], self.execution_manager._task_dependencies.get("two"))
self.assertEquals(
[one, two], self.execution_manager._task_dependencies.get("three"))
class ExecutionManagerBuildExecutionPlanTest(ExecutionManagerTestBase):
def test_should_raise_exception_when_building_execution_plan_and_dependencies_are_not_resolved(self):
self.assertRaises(DependenciesNotResolvedException,
self.execution_manager.build_execution_plan, ("boom",))
def test_should_raise_exception_when_building_execution_plan_for_task_not_found(self):
self.execution_manager.resolve_dependencies()
self.assertRaises(
NoSuchTaskException, self.execution_manager.build_execution_plan, ("boom",))
def test_should_return_execution_plan_with_single_task_when_single_task_is_to_be_executed(self):
one = mock(name="one", dependencies=[])
self.execution_manager.register_task(one)
self.execution_manager.resolve_dependencies()
self.assertEqual(
[one], self.execution_manager.build_execution_plan(["one"]))
def test_should_return_execution_plan_with_two_tasks_when_two_tasks_are_to_be_executed(self):
one = mock(name="one", dependencies=[])
two = mock(name="two", dependencies=[])
self.execution_manager.register_task(one, two)
self.execution_manager.resolve_dependencies()
self.assertEqual(
[one, two], self.execution_manager.build_execution_plan(["one", "two"]))
def test_ensure_that_dependencies_are_executed_before_root_task(self):
one = mock(name="one", dependencies=[])
two = mock(name="two", dependencies=["one"])
self.execution_manager.register_task(one, two)
self.execution_manager.resolve_dependencies()
self.assertEqual(
[one, two], self.execution_manager.build_execution_plan(["two"]))
def test_ensure_that_tasks_are_not_executed_multiple_times(self):
one = mock(name="one", dependencies=[])
self.execution_manager.register_task(one)
self.execution_manager.resolve_dependencies()
self.assertEqual(
[one], self.execution_manager.build_execution_plan(["one", "one"]))
def test_ensure_that_tasks_are_not_executed_multiple_times_when_being_dependencies(self):
one = mock(name="one", dependencies=[])
two = mock(name="two", dependencies=["one"])
self.execution_manager.register_task(one, two)
self.execution_manager.resolve_dependencies()
self.assertEqual(
[one, two], self.execution_manager.build_execution_plan(["one", "two"]))
def test_should_raise_exception_when_circular_reference_is_detected_on_single_task(self):
one = mock(name="one", dependencies=["one"])
self.execution_manager.register_task(one)
self.execution_manager.resolve_dependencies()
self.assertRaises(CircularTaskDependencyException,
self.execution_manager.build_execution_plan, ["one"])
def test_should_raise_exception_when_circular_reference_is_detected_on_two_tasks(self):
one = mock(name="one", dependencies=["two"])
two = mock(name="two", dependencies=["one"])
self.execution_manager.register_task(one, two)
self.execution_manager.resolve_dependencies()
self.assertRaises(CircularTaskDependencyException,
self.execution_manager.build_execution_plan, ["one"])
def test_should_raise_exception_when_circular_reference_is_detected_on_three_tasks(self):
one = mock(name="one", dependencies=["three"])
two = mock(name="two", dependencies=["one"])
three = mock(name="three", dependencies=["one", "two"])
self.execution_manager.register_task(one, two, three)
self.execution_manager.resolve_dependencies()
self.assertRaises(CircularTaskDependencyException,
self.execution_manager.build_execution_plan, ["one"])
class ExecutionManagerExecuteExecutionPlanTest(ExecutionManagerTestBase):
def test_should_raise_exception_when_dependencies_are_not_resolved(self):
self.assertRaises(DependenciesNotResolvedException,
self.execution_manager.execute_execution_plan, ["boom"])
def test_ensure_tasks_are_executed(self):
one = mock(name="one", dependencies=[])
two = mock(name="two", dependencies=[])
self.execution_manager.register_task(one, two)
self.execution_manager.resolve_dependencies()
self.execution_manager.execute_execution_plan([one, two])
verify(one).execute(any(), {})
verify(two).execute(any(), {})
|
shakamunyi/pybuilder
|
src/unittest/python/execution_tests.py
|
Python
|
apache-2.0
| 21,950
|
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# Third Party
from django.core import mail
from django.core.management import call_command
# wger
from wger.core.tests.base_testcase import WorkoutManagerTestCase
class EmailInactiveUserTestCase(WorkoutManagerTestCase):
'''
Test email reminders for inactive users
'''
def test_reminder(self, fail=False):
'''
Test email reminders for inactive users
'''
call_command('inactive-members')
self.assertEqual(len(mail.outbox), 6)
recipment_list = [message.to[0] for message in mail.outbox]
trainer_list = ['trainer4@example.com',
'trainer5@example.com',
'trainer1@example.com',
'trainer2@example.com',
'trainer3@example.com']
recipment_list.sort()
trainer_list.sort()
self.assertEqual(recipment_list.sort(), trainer_list.sort())
|
petervanderdoes/wger
|
wger/gym/tests/test_inactive_members.py
|
Python
|
agpl-3.0
| 1,562
|
# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
import documentation
|
adhoc-dev/odoo-web
|
website_doc/models/__init__.py
|
Python
|
agpl-3.0
| 291
|
import tempfile
import os
import re
import shutil
import cStringIO
from contextlib import contextmanager
import netlib
from pathod import utils, test, pathoc, pathod, language
from netlib import tcp
import requests
def treader(bytes):
"""
Construct a tcp.Read object from bytes.
"""
fp = cStringIO.StringIO(bytes)
return tcp.Reader(fp)
class DaemonTests(object):
noweb = False
noapi = False
nohang = False
ssl = False
timeout = None
hexdump = False
ssloptions = None
nocraft = False
@classmethod
def setup_class(cls):
opts = cls.ssloptions or {}
cls.confdir = tempfile.mkdtemp()
opts["confdir"] = cls.confdir
so = pathod.SSLOptions(**opts)
cls.d = test.Daemon(
staticdir=test_data.path("data"),
anchors=[
(re.compile("/anchor/.*"), "202:da")
],
ssl=cls.ssl,
ssloptions=so,
sizelimit=1 * 1024 * 1024,
noweb=cls.noweb,
noapi=cls.noapi,
nohang=cls.nohang,
timeout=cls.timeout,
hexdump=cls.hexdump,
nocraft=cls.nocraft,
logreq=True,
logresp=True,
explain=True
)
@classmethod
def teardown_class(cls):
cls.d.shutdown()
shutil.rmtree(cls.confdir)
def teardown(self):
if not (self.noweb or self.noapi):
self.d.clear_log()
def getpath(self, path, params=None):
scheme = "https" if self.ssl else "http"
resp = requests.get(
"%s://localhost:%s/%s" % (
scheme,
self.d.port,
path
),
verify=False,
params=params
)
return resp
def get(self, spec):
resp = requests.get(self.d.p(spec), verify=False)
return resp
def pathoc(
self,
specs,
timeout=None,
connect_to=None,
ssl=None,
ws_read_limit=None,
use_http2=False,
):
"""
Returns a (messages, text log) tuple.
"""
if ssl is None:
ssl = self.ssl
logfp = cStringIO.StringIO()
c = pathoc.Pathoc(
("localhost", self.d.port),
ssl=ssl,
ws_read_limit=ws_read_limit,
timeout=timeout,
fp=logfp,
use_http2=use_http2,
)
c.connect(connect_to)
ret = []
for i in specs:
resp = c.request(i)
if resp:
ret.append(resp)
for frm in c.wait():
ret.append(frm)
c.stop()
return ret, logfp.getvalue()
tmpdir = netlib.tutils.tmpdir
raises = netlib.tutils.raises
test_data = utils.Data(__name__)
def render(r, settings=language.Settings()):
r = r.resolve(settings)
s = cStringIO.StringIO()
assert language.serve(r, s, settings)
return s.getvalue()
|
ikoz/mitmproxy
|
test/pathod/tutils.py
|
Python
|
mit
| 3,003
|
from network.utils import network_point_coverage
__author__ = 'gabriel'
from network import TEST_DATA_FILE
from network.itn import read_gml, ITNStreetNet
from network.streetnet import NetPath, NetPoint, Edge, GridEdgeIndex
from data import models
import os
import unittest
import settings
import numpy as np
from matplotlib import pyplot as plt
from network import utils
from validation import hotspot, roc
import networkx as nx
from shapely.geometry import LineString
def load_test_network():
# load some toy network data
test_data = read_gml(TEST_DATA_FILE)
return ITNStreetNet.from_data_structure(test_data)
def toy_network(loop=False):
g = nx.MultiGraph()
node_coords = {
'a': (0, 0),
'b': (5, 0),
'c': (5, 2),
'd': (5, 3),
'e': (6, 2),
'f': (7, 0),
'g': (7, -2),
'h': (5, -2),
'i': (5, -3),
'j': (4, -2),
'k': (0, -2),
'l': (-1, -2),
'm': (-2 ** .5 / 2., -2 ** .5 / 2. - 2),
'n': (0, -3),
'o': (1, -2),
'p': (0, 2),
}
edges = [
('a', 'b'),
('a', 'k'),
('k', 'l'),
('k', 'm'),
('k', 'n'),
('k', 'o'),
('b', 'c'),
('b', 'h'),
('c', 'd'),
('c', 'e'),
('b', 'f'),
('f', 'g'),
('g', 'h'),
('b', 'h'),
('h', 'j'),
('h', 'i'),
('a', 'p')
]
def attr_factory(start, end):
xy0 = node_coords[start]
xy1 = node_coords[end]
ls = LineString([xy0, xy1])
attr_dict = {
'linestring': ls,
'length': ls.length,
'fid': start + end + '1',
'orientation_neg': start,
'orientation_pos': end
}
return attr_dict
for i0, i1 in edges:
attr = attr_factory(i0, i1)
g.add_edge(i0, i1, key=attr['fid'], attr_dict=attr)
# add 2 more multilines between a and b
attr = attr_factory('a', 'b')
th = np.linspace(0, np.pi, 50)[::-1]
x = 2.5 * (np.cos(th) + 1)
y = np.sin(th)
ls = LineString(zip(x, y))
attr['fid'] = 'ab2'
attr['linestring'] = ls
attr['length'] = ls.length
g.add_edge('a', 'b', key=attr['fid'], attr_dict=attr)
ls = LineString([
(0, 0),
(2.5, -1),
(5, 0)
])
attr['fid'] = 'ab3'
attr['linestring'] = ls
g.add_edge('a', 'b', key=attr['fid'], attr_dict=attr)
if loop:
# add cycle at p
attr = attr_factory('p', 'p')
th = np.linspace(-np.pi / 2., 3 * np.pi / 2., 50)
x = np.cos(th)
y = np.sin(th) + node_coords['p'][1] + 1
ls = LineString(zip(x, y))
attr['linestring'] = ls
attr['length'] = ls.length
g.add_edge('p', 'p', key=attr['fid'], attr_dict=attr)
# add node coords
for k, v in node_coords.items():
g.node[k]['loc'] = v
net = ITNStreetNet.from_multigraph(g)
return net
class TestNetworkData(unittest.TestCase):
def setUp(self):
# this_dir = os.path.dirname(os.path.realpath(__file__))
# IN_FILE = os.path.join(this_dir, 'test_data', 'mastermap-itn_417209_0_brixton_sample.gml')
self.test_data = read_gml(TEST_DATA_FILE)
self.itn_net = ITNStreetNet.from_data_structure(self.test_data)
def test_grid_index(self):
xmin, ymin, xmax, ymax = self.itn_net.extent
grid_edge_index = self.itn_net.build_grid_edge_index(50)
x_grid_expct = np.arange(xmin, xmax, 50)
self.assertTrue(np.all(grid_edge_index.x_grid == x_grid_expct))
def test_extent(self):
expected_extent = (530960.0, 174740.0, 531856.023, 175436.0)
for eo, ee in zip(expected_extent, self.itn_net.extent):
self.assertAlmostEqual(eo, ee)
def test_net_point(self):
#Four test points - 1 and 3 on same segment, 2 on neighbouring segment, 4 long way away.
#5 and 6 are created so that there are 2 paths of almost-equal length between them - they
#lie on opposite sides of a 'square'
x_pts = (
531190,
531149,
531210,
531198,
531090
)
y_pts = (
175214,
175185,
175214,
174962,
175180
)
xmin, ymin, xmax, ymax = self.itn_net.extent
grid_edge_index = self.itn_net.build_grid_edge_index(50)
net_points = []
snap_dists = []
for x, y in zip(x_pts, y_pts):
tmp = self.itn_net.closest_edges_euclidean(x, y, grid_edge_index=grid_edge_index)
net_points.append(tmp[0])
snap_dists.append(tmp[0])
# test net point subtraction
self.assertIsInstance(net_points[1] - net_points[0], NetPath)
self.assertAlmostEqual((net_points[1] - net_points[0]).length, (net_points[0] - net_points[1]).length)
for i in range(len(net_points)):
self.assertEqual((net_points[i] - net_points[i]).length, 0.)
net_point_array = models.NetworkData(net_points)
self.assertFalse(np.any(net_point_array.distance(net_point_array).data.sum()))
def test_snapping_brute_force(self):
# lay down some known points
coords = [
(531022.868, 175118.877),
(531108.054, 175341.141),
(531600.117, 175243.572),
(531550, 174740),
]
# the edges they should correspond to
edge_params = [
{'orientation_neg': 'osgb4000000029961720_0',
'orientation_pos': 'osgb4000000029961721_0',
'fid': 'osgb4000000030340202'},
{'orientation_neg': 'osgb4000000029962839_0',
'orientation_pos': 'osgb4000000029962853_0',
'fid': 'osgb4000000030235941'},
{'orientation_neg': 'osgb4000000030778079_0',
'orientation_pos': 'osgb4000000030684375_0',
'fid': 'osgb4000000030235965'},
None, # no edge within radius
]
for c, e in zip(coords, edge_params):
# snap point
this_netpoint = NetPoint.from_cartesian(self.itn_net, *c, radius=50)
# check edge equality
if e:
this_edge = Edge(self.itn_net, **e)
self.assertEqual(this_netpoint.edge, this_edge)
else:
self.assertTrue(this_netpoint is None)
def test_snapping_indexed(self):
# lay down some known points
coords = [
(531022.868, 175118.877),
(531108.054, 175341.141),
(531600.117, 175243.572),
(531550, 174740),
]
# the edges they should correspond to
edge_params = [
{'orientation_neg': 'osgb4000000029961720_0',
'orientation_pos': 'osgb4000000029961721_0',
'fid': 'osgb4000000030340202'},
{'orientation_neg': 'osgb4000000029962839_0',
'orientation_pos': 'osgb4000000029962853_0',
'fid': 'osgb4000000030235941'},
{'orientation_neg': 'osgb4000000030778079_0',
'orientation_pos': 'osgb4000000030684375_0',
'fid': 'osgb4000000030235965'},
None, # no edge within radius
]
gei = self.itn_net.build_grid_edge_index(50)
# supply incompatible radius
with self.assertRaises(AssertionError):
this_netpoint = NetPoint.from_cartesian(self.itn_net, *coords[0], grid_edge_index=gei, radius=51)
for c, e in zip(coords, edge_params):
# snap point
this_netpoint = NetPoint.from_cartesian(self.itn_net, *c, grid_edge_index=gei, radius=50)
# check edge equality
if e:
this_edge = Edge(self.itn_net, **e)
self.assertEqual(this_netpoint.edge, this_edge)
else:
self.assertTrue(this_netpoint is None)
# retest last point without a radius
e = {'orientation_neg': 'osgb4000000029961762_0',
'orientation_pos': 'osgb4000000029961741_0',
'fid': 'osgb4000000030145824'}
c = coords[-1]
this_netpoint = NetPoint.from_cartesian(self.itn_net, *c, grid_edge_index=gei)
this_edge = Edge(self.itn_net, **e)
self.assertEqual(this_netpoint.edge, this_edge)
class TestUtils(unittest.TestCase):
def setUp(self):
self.test_data = read_gml(TEST_DATA_FILE)
self.itn_net = ITNStreetNet.from_data_structure(self.test_data)
def test_network_edge_walker(self):
g = utils.network_walker(self.itn_net, repeat_edges=False, verbose=False)
res = list(g)
# if repeat_edges == False. every edge should be covered exactly once
self.assertEqual(len(res), len(self.itn_net.edges()))
# since no start node was supplied, walker should have started at node 0
self.assertEqual(res[0][0].nodes, [self.itn_net.nodes()[0]])
# restart walk at a different node
g = utils.network_walker(self.itn_net, repeat_edges=False, verbose=False, source_node=self.itn_net.nodes()[-1])
res2 = list(g)
self.assertEqual(len(res2), len(self.itn_net.edges()))
# now run it again using the class
obj = utils.NetworkWalker(self.itn_net,
[],
repeat_edges=False)
g = obj.walker()
res3 = list(g)
self.assertListEqual(res, res3)
# test caching
start = self.itn_net.nodes()[0]
self.assertTrue(start in obj.cached_walks)
self.assertListEqual(res, obj.cached_walks[start])
start = self.itn_net.nodes()[-1]
g = obj.walker(start)
res4 = list(g)
self.assertListEqual(res2, res4)
self.assertTrue(start in obj.cached_walks)
self.assertListEqual(res2, obj.cached_walks[start])
def test_fixed_distance_walk(self):
net = toy_network()
pt = NetPoint.from_cartesian(net, 2.5, 0)
if __name__ == "__main__":
b_plot = False
# mini test dataset
# test dataset is in a directory in the same path as this module called 'test_data'
this_dir = os.path.dirname(os.path.realpath(__file__))
IN_FILE = os.path.join(this_dir, 'test_data', 'mastermap-itn_417209_0_brixton_sample.gml')
test_data = read_gml(IN_FILE)
itn_net = ITNStreetNet.from_data_structure(test_data)
# buffered Camden dataset from raw data
# test dataset is in a directory in the data directory called 'network_data'
# this_dir = os.path.join(settings.DATA_DIR, 'network_data')
# IN_FILE = os.path.join(this_dir, 'mastermap-itn_544003_0_camden_buff2000.gml')
# test_data = read_gml(IN_FILE)
# itn_net = ITNStreetNet.from_data_structure(test_data)
# buffered Camden dataset from pickle
# this_dir = os.path.dirname(os.path.realpath(__file__))
# IN_FILE = os.path.join(this_dir, 'test_data', 'mastermap-itn_544003_0_camden_buff2000.pickle')
# itn_net = ITNStreetNet.from_pickle(IN_FILE)
# get the spatial extent of the network
xmin, ymin, xmax, ymax = itn_net.extent
# lay down some random points within that box
num_pts = 100
x_pts = np.random.rand(num_pts) * (xmax - xmin) + xmin
y_pts = np.random.rand(num_pts) * (ymax - ymin) + ymin
# now we want to snap them all to the network...
# method A: do it in two steps...
# A1: push them into a single data array for easier operation
xy = models.DataArray.from_args(x_pts, y_pts)
# A2: use the class method from_cartesian,
net_point_array_a = models.NetworkData.from_cartesian(itn_net, xy, grid_size=50) # grid_size defaults to 50
# method B: do it manually, just to check
# also going to take this opportunity to test a minor problem with closest_edges_euclidean
grid_edge_index = itn_net.build_grid_edge_index(50)
net_points = []
snap_dists = []
fail_idx = []
for i, (x, y) in enumerate(zip(x_pts, y_pts)):
tmp = itn_net.closest_edges_euclidean(x, y, grid_edge_index=grid_edge_index)
if tmp[0] is None:
# some of those calls fail when the grid_size is too small (e.g. 50 is actually too small)
# the fall back should probably be a method that does not depend on the grid, which is what
# closest_segments_euclidean_brute_force is designed to do
# this method is MUCH slower but always finds an edge
tmp = itn_net.closest_edges_euclidean_brute_force(x, y)
fail_idx.append(i)
net_points.append(tmp[0])
snap_dists.append(tmp[1])
net_point_array_b = models.NetworkData(net_points)
# check these are the same
print net_point_array_a == net_point_array_b # this is just doing a point-by-point equality check behind the scenes
# find the cartesian_coords after snapping
xy_post_snap = net_point_array_a.to_cartesian()
# plot showing the snapping operation
# this separates the data arrays back into their constituent dims
x_pre, y_pre = xy.separate
x_post, y_post = xy_post_snap.separate
if b_plot:
fig = plt.figure()
ax = fig.add_subplot(111)
itn_net.plot_network(ax=ax, edge_width=7, edge_inner_col='w')
ax.plot(x_pre, y_pre, 'ro')
ax.plot(x_post, y_post, 'bo')
[ax.plot([x_pre[i], x_post[i]], [y_pre[i], y_post[i]], 'k-') for i in range(xy.ndata)]
# highlight failed points (where closest_edges_euclidean didn't find any snapped point) in black circles
[ax.plot(x_pre[i], y_pre[i], marker='o', markersize=20, c='k', fillstyle='none') for i in fail_idx]
# glue the network point array together with a time dimension - just take time at uniform intervals on [0, 1]
st_net_point_array = models.NetworkSpaceTimeData(
zip(np.linspace(0, 1, num_pts), net_points)
)
# compute linkages at a max delta t and delta d
# i, j = network_linkages(st_net_point_array, max_t=1.0, max_d=5000.)
# excise data with time cutoff (validation machinery does this for you normally)
training_data = st_net_point_array.getrows(np.where(st_net_point_array.time <= 0.6)[0])
training_t = training_data.toarray(0)
training_xy = training_data.space.to_cartesian()
testing_data = st_net_point_array.getrows(np.where(st_net_point_array.time > 0.6)[0])
# create instance of Bowers ProMap network kernel
h = hotspot.STNetworkBowers(1000, 2)
# bind it to data
h.train(training_data)
# instantiate Roc
r = roc.NetworkRocSegments(data=testing_data.space, graph=itn_net)
r.set_sample_units(None)
prediction_points_net = r.sample_points
prediction_points_xy = prediction_points_net.to_cartesian()
z = h.predict(0.6, prediction_points_net)
r.set_prediction(z)
if b_plot:
# show the predicted values, training data and sampling points
r.plot()
plt.scatter(training_xy.toarray(0), training_xy.toarray(1), c=training_t, cmap='jet', s=40)
plt.plot(prediction_points_xy.toarray(0), prediction_points_xy.toarray(1), 'kx', markersize=20)
plt.colorbar()
# repeat for a more accurate Roc class that uses multiple readings per segment
if False: # disable for now
r2 = roc.NetworkRocSegmentsMean(data=testing_data.space, graph=itn_net)
r2.set_sample_units(None, 10)
prediction_points_net2 = r2.sample_points
prediction_points_xy2 = prediction_points_net2.to_cartesian()
z2 = h.predict(0.6, prediction_points_net2)
r2.set_prediction(z2)
if b_plot:
# show the predicted values, training data and sampling points
r2.plot()
plt.scatter(training_xy.toarray(0), training_xy.toarray(1), c=training_t, cmap='jet', s=40)
plt.plot(prediction_points_xy2.toarray(0), prediction_points_xy2.toarray(1), 'kx', markersize=20)
plt.colorbar()
# get a roughly even coverage of points across the network
net_points, edge_count = network_point_coverage(itn_net, dx=10)
xy_points = net_points.to_cartesian()
c_edge_count = np.cumsum(edge_count)
# make a 'prediction' for time 1.1
# st_net_prediction_array = models.DataArray(
# np.ones(net_points.ndata) * 1.1
# ).adddim(net_points, type=models.NetworkSpaceTimeData)
# z = h.predict(st_net_prediction_array)
if b_plot:
# get colour limits - otherwise single large values dominate the plot
fmax = 0.7
vmax = sorted(z)[int(len(z) * fmax)]
plt.figure()
itn_net.plot_network(edge_width=8, edge_inner_col='w')
plt.scatter(xy_points[:, 0], xy_points[:,1], c=z, cmap='Reds', vmax=vmax, s=50, edgecolor='none', zorder=3)
# j = 0
# for i in range(len(itn_net.edges())):
# n = c_edge_count[i]
# x = xy_points[j:n, 0]
# y = xy_points[j:n, 1]
# val = z[j:n]
# plotting.colorline(x, y, val, linewidth=8)
# j = n
from network import utils
# n_iter = 30
g = utils.network_walker(itn_net, verbose=False, repeat_edges=False)
# res = [g.next() for i in range(n_iter)]
res = list(g)
import matplotlib.lines as mlines
import matplotlib.patches as mpatches
def add_arrow_to_line2D(
line,
axes=None,
arrowstyle='-|>',
arrowsize=1):
"""
Add arrows to a matplotlib.lines.Line2D at the midpoint.
Parameters:
-----------
axes:
line: list of 1 Line2D obbject as returned by plot command
arrowstyle: style of the arrow
arrowsize: size of the arrow
transform: a matplotlib transform instance, default to data coordinates
Returns:
--------
arrows: list of arrows
"""
axes = axes or plt.gca()
if (not(isinstance(line, list)) or not(isinstance(line[0],
mlines.Line2D))):
raise ValueError("expected a matplotlib.lines.Line2D object")
x, y = line[0].get_xdata(), line[0].get_ydata()
arrow_kw = dict(arrowstyle=arrowstyle, mutation_scale=10 * arrowsize)
color = line[0].get_color()
use_multicolor_lines = isinstance(color, np.ndarray)
if use_multicolor_lines:
raise NotImplementedError("multicolor lines not supported")
else:
arrow_kw['color'] = color
linewidth = line[0].get_linewidth()
if isinstance(linewidth, np.ndarray):
raise NotImplementedError("multiwidth lines not supported")
else:
arrow_kw['linewidth'] = linewidth
sc = np.concatenate(([0], np.cumsum(np.sqrt(np.diff(x) ** 2 + np.diff(y) ** 2))))
x0 = np.interp(0.45 * sc[-1], sc, x)
y0 = np.interp(0.45 * sc[-1], sc, y)
x1 = np.interp(0.55 * sc[-1], sc, x)
y1 = np.interp(0.55 * sc[-1], sc, y)
# s = np.cumsum(np.sqrt(np.diff(x) ** 2 + np.diff(y) ** 2))
# n = np.searchsorted(s, s[-1] * loc)
# arrow_tail = (x[n], y[n])
# arrow_head = (np.mean(x[n:n + 2]), np.mean(y[n:n + 2]))
arrow_tail = (x0, y0)
arrow_head = (x1, y1)
p = mpatches.FancyArrowPatch(
arrow_tail, arrow_head, transform=axes.transData,
**arrow_kw)
axes.add_patch(p)
return p
if b_plot:
fig = plt.figure(figsize=(16, 12))
itn_net.plot_network()
for i in range(len(res)):
node_loc = itn_net.g.node[res[i][0][-1]]['loc']
h = plt.plot(node_loc[0], node_loc[1], 'ko', markersize=10)[0]
edge_x, edge_y = res[i][2].linestring.xy
# which way are we walking?
if res[i][2].orientation_pos == res[i][0][-1]:
# need to reverse the linestring
edge_x = edge_x[::-1]
edge_y = edge_y[::-1]
line = plt.plot(edge_x, edge_y, 'k-')
add_arrow_to_line2D(line, arrowsize=2)
fig.savefig('/home/gabriel/tmp/%02d.png' % i)
h.remove()
if i + 1 == len(res):
# final image - save it 25 more times to have a nice lead out
for j in range(25):
fig.savefig('/home/gabriel/tmp/%02d.png' % (j + i))
# run to stitch images together:
# avconv -r 10 -crf 20 -i "%02d.png" -vf "scale=trunc(iw/2)*2:trunc(ih/2)*2" -c:v libx264 -pix_fmt yuv420p output.mp4
# network KDE stuff
from kde import models as kde_models, kernels
prediction_points_tnet = hotspot.generate_st_prediction_dataarray(0.6,
prediction_points_net,
dtype=models.NetworkSpaceTimeData)
a = kde_models.NetworkFixedBandwidthKde(training_data, bandwidths=[5., 50.], parallel=False)
res = a.pdf(prediction_points_tnet)
if b_plot:
itn_net.plot_network()
plt.scatter(*training_data.space.to_cartesian().separate, c='r', s=80)
plt.scatter(*prediction_points_net.to_cartesian().separate, c=res/res.max(), s=40)
|
gaberosser/geo-network
|
tests.py
|
Python
|
mit
| 21,263
|
import sys
import os
from fabric.api import sudo, hosts, env, task, run, cd
from fabric.contrib.files import exists
from . import config, users, application
DEV = config.HOSTS['development']['ip']
USER = config.HOSTS['development']['user']
PASSWD = config.HOSTS['development']['password']
HOST_PKGS = config.HOSTS['development']['packages']
PY_PKGS = config.HOSTS['development']['python_packages']
@task
@hosts(DEV)
def apt_install(pkg):
apt_update()
sudo("apt-get -q -y install %s" % pkg)
@task
@hosts(DEV)
def apt_upgrade_all():
apt_update()
sudo("apt-get -q -y upgrade")
@task
@hosts(DEV)
def apt_upgrade(pkg):
#: Really, just for convenience
apt_install(pkg)
@task
@hosts(DEV)
def apt_remove(pkg):
sudo("apt-get -q -y remove %s" % pkg)
@task
@hosts(DEV)
def apt_update():
sudo("apt-get -q update")
@task
@hosts(DEV)
def pip_install(pkg):
sudo("pip install %s" % pkg)
@task
@hosts(DEV)
def pip_remove(pkg):
sudo("pip uninstall %s" % pkg)
@task
@hosts(DEV)
def pip_upgrade(pkg):
sudo("pip install --upgrade %s" % pkg)
@task
@hosts(DEV)
def reboot():
sudo("shutdown -r now")
@task
@hosts(DEV)
def uptime():
run("uptime")
@task
@hosts(DEV)
def who():
run("who")
@task
@hosts(DEV)
def bootstrap():
env.user = USER
if PASSWD:
env.password = PASSWD
if not exists("/etc/machine_deployed"):
host_installs = " ".join(HOST_PKGS)
apt_install(host_installs)
py_installs = " ".join(PY_PKGS)
pip_install(py_installs)
users.user_add(DEV, "webapp", "system")
users.user_add(DEV, "andrew")
users.create_user_virtualenv("vagrant")
users.user_group_add("vagrant", "webapp")
users.user_group_add("andrew", "webapp")
users.user_group_add("www-data", "webapp")
sudo("touch /etc/machine_deployed")
@task
@hosts(DEV)
def clean():
env.user = USER
if PASSWD:
env.password = PASSWD
env.warn_only = True
users.user_remove("andrew")
users.user_remove("webapp")
users.group_remove("andrew")
users.group_remove("webapp")
sudo("rm -rf /etc/machine_deployed")
@task
@hosts(DEV)
def app_bootstrap(app=None, user=None):
if not app or not user:
print 'app and user are required'
sys.exit()
env.warn_only = True
deploy_dir = config.APPS[app]['deploy_dir']
configs_dir = config.APPS[app]['configs_dir']
logs_dir = config.APPS[app]['logs_dir']
tmp_dir = config.APPS[app]['tmp_dir']
scm_type = config.APPS[app]['scm_type']
scm_path = config.APPS[app]['scm_path']
python_pkgs = config.APPS[app]['python_packages']
requirements_file = config.APPS[app]['requirements_file']
conf_root = deploy_dir + os.sep + configs_dir
log_root = deploy_dir + os.sep + logs_dir
run_root = deploy_dir + os.sep + "run"
bin_root = deploy_dir + os.sep + "bin"
script_root = deploy_dir + os.sep + "scripts"
nginx_conf = conf_root + os.sep + "nginx-" + app
supervisor_conf = conf_root + os.sep + "supervisor-" + app
sudo("mkdir -p %s" % deploy_dir)
sudo("chmod -R 0775 %s" % deploy_dir)
sudo("chown -R webapp:webapp %s" % deploy_dir)
sudo("mkdir -p %s" % tmp_dir)
sudo("chmod -R 0664 %s" % tmp_dir)
sudo("umask 033 %s" % tmp_dir)
sudo("chown -R webapp:webapp %s" % tmp_dir)
py_installs = " ".join(python_pkgs)
application.create_virtualenv(deploy_dir, user, py_installs)
# virtualenv
remote_update, remote_checkout = application.scm_funcs(scm_type)
with cd(deploy_dir):
remote_checkout(deploy_dir, scm_path)
# requirements file
if requirements_file:
reqs_file = deploy_dir + os.sep + requirements_file
if exists(reqs_file):
application.pip_install_2_virtualenv(deploy_dir,
reqs_file_path=reqs_file)
# log directories
if not exists(log_root):
sudo("mkdir -p %s" % log_root)
# run directories
if not exists(run_root):
sudo("mkdir -p %s" % run_root)
# gunicorn
startup_script = bin_root + os.sep + "gunicorn_start.sh"
if not exists(startup_script):
sudo("mv %s/gunicorn_start.sh %s" %
(script_root, startup_script))
# nginx
if exists(nginx_conf):
sudo("cp %s /etc/nginx/sites-available/%s" % (nginx_conf, app))
sudo("ln -s /etc/nginx/sites-available/%s "
"/etc/nginx/sites-enabled/%s" % (app, app))
# supervisor
if exists(supervisor_conf):
sudo("cp %s /etc/supervisor/conf.d/%s.conf" % (supervisor_conf, app))
# secure and clean up
application.finalize(deploy_dir, system_user="webapp")
# start it up!
sudo("supervisorctl reread")
sudo("supervisorctl update")
sudo("supervisorctl start all")
@task
@hosts(DEV)
def app_update(app=None):
if not app:
print 'app is required'
sys.exit()
env.warn_only = True
deploy_dir = config.APPS[app]['deploy_dir']
scm_type = config.APPS[app]['scm_type']
scm_path = config.APPS[app]['scm_path']
remote_update, remote_checkout = application.scm_funcs(scm_type)
with cd(deploy_dir):
remote_checkout(deploy_dir, scm_path)
sudo("supervisorctl stop all")
sudo("supervisorctl start all")
@task
@hosts(DEV)
def app_clean(app):
env.warn_only = True
deploy_dir = config.APPS[app]['deploy_dir']
sudo("rm -rf %s" % deploy_dir)
if exists("/etc/nginx/sites-enabled/%s" % app):
sudo("rm /etc/nginx/sites-enabled/%s" % app)
if exists("/etc/nginx/sites-available/%s" % app):
sudo("rm /etc/nginx/sites-available/%s" % app)
if exists("/etc/supervisor/conf.d/%s.conf" % app):
sudo("rm /etc/supervisor/conf.d/%s.conf" % app)
sudo("supervisorctl reread")
sudo("supervisorctl update")
sudo("service supervisor stop")
sudo("service supervisor start")
|
andrewjsledge/python-project
|
fabfile/development.py
|
Python
|
bsd-3-clause
| 5,953
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('doctor', '0003_auto_20160111_2302'),
('agenda', '0002_auto_20160111_2258'),
]
operations = [
migrations.RemoveField(
model_name='slot',
name='refer_userprofile',
),
migrations.AddField(
model_name='slot',
name='refer_doctor',
field=models.ForeignKey(related_name='back_doctor', verbose_name='refer_doctor', to='doctor.Doctor', null=True),
),
]
|
Foxugly/medagenda
|
agenda/migrations/0003_auto_20160112_0123.py
|
Python
|
gpl-3.0
| 636
|
import sys
from setuptools import setup, find_packages
# Little hack to make 'python setup.py test' work on py2.7
try:
import multiprocessing
import logging
except:
pass
# Requirements to install buffet plugins and engines
_extra_genshi = ["Genshi >= 0.3.5"]
_extra_mako = ["Mako >= 0.1.1"]
_extra_jinja = ["Jinja2"]
tests_require = [
#'BeautifulSoup',
'nose',
'sieve',
] + _extra_mako
if sys.version_info[0] == 2 and sys.version_info[1] <= 5:
tests_require.append('WebTest<2.0')
else:
tests_require.append('WebTest')
if sys.version_info[0] < 3:
tests_require.append('FormEncode')
setup(
name='tw2.bootstrap.forms',
version='2.2.2.1',
description="A drop-in replacement for tw2.forms but with bootstrap!",
long_description=open('README.rst').read(),
author='Moritz Schlarb, Ralph Bean & contributors',
author_email='toscawidgets-discuss@googlegroups.com',
url="http://toscawidgets.org/",
download_url="https://pypi.python.org/pypi/tw2.bootstrap.forms/",
license='BSD 2-clause',
install_requires=[
"tw2.core",
"tw2.forms",
"tw2.jquery",
"six",
## Add other requirements here
# "Genshi",
],
packages=find_packages(exclude=['ez_setup', 'tests']),
namespace_packages=[
'tw2',
'tw2.bootstrap',
],
zip_safe=False,
include_package_data=True,
test_suite='nose.collector',
tests_require=tests_require,
entry_points="""
[tw2.widgets]
# Register your widgets so they can be listed in the WidgetBrowser
widgets = tw2.bootstrap.forms
""",
keywords=[
'toscawidgets.widgets',
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Environment :: Web Environment :: ToscaWidgets',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Widget Sets',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'License :: OSI Approved :: BSD License',
],
)
|
toscawidgets/tw2.bootstrap
|
setup.py
|
Python
|
bsd-2-clause
| 2,176
|
import os
import maya.cmds as m
from fxpt.fx_texture_manager.com import cleanupPath
# noinspection PySetFunctionToLiteral
IGNORED_OBJECT_TYPES = set([
'defaultShaderList',
'defaultTextureList'
])
IGNORED_OBJECTS = set()
for t in IGNORED_OBJECT_TYPES:
IGNORED_OBJECTS.update(m.ls(typ=t))
SHADING_ENGINE_TYPE = 'kShadingEngine'
def getShadingGroups(node, visited):
sgs = set()
visited.add(node)
outConnections = m.listConnections(node, s=False, d=True)
if outConnections:
for destinationNode in outConnections:
if destinationNode not in visited:
if m.nodeType(destinationNode, apiType=True) == SHADING_ENGINE_TYPE:
sgs.add(destinationNode)
else:
sgs.update(getShadingGroups(destinationNode, visited))
return sgs
class TexNode(object):
def __init__(self, node, attr):
self.node = None
self.attr = None
self.sgs = None
self.setNode(node)
self.setAttr(attr)
self.setSgs()
def __str__(self):
return 'TexNode: {}'.format(self.getFullAttrName())
def setNode(self, node):
self.node = node
def getNode(self):
return self.node
def setAttr(self, attr):
self.attr = attr
def getAttr(self):
return self.attr
def setSgs(self):
self.sgs = getShadingGroups(self.node, set(IGNORED_OBJECTS))
def getSgs(self):
return self.sgs
def isAssigned(self):
for sg in self.sgs:
if m.sets(sg, q=True):
return True
return False
def getFullAttrName(self):
return '{}.{}'.format(self.node, self.attr)
def getAttrValue(self):
slashedPath = cleanupPath(m.getAttr(self.getFullAttrName()))
if slashedPath.startswith('//'):
return '//{}'.format(slashedPath[2:].replace('//', '/'))
else:
return slashedPath.replace('//', '/')
def setAttrValue(self, value):
m.setAttr(self.getFullAttrName(), value, typ='string')
def nodeAttrExists(self):
return m.objExists(self.getFullAttrName())
def fileExists(self):
fullPath = os.path.expandvars(self.getAttrValue())
if os.path.basename(fullPath):
return os.path.exists(fullPath)
else:
return False
|
theetcher/fxpt
|
fxpt/fx_texture_manager/tex_node.py
|
Python
|
mit
| 2,368
|
# -*- coding: utf-8 -*-
###############################################################################
#
# AddAccessConfig
# Adds an access config to an instance's network interface.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class AddAccessConfig(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the AddAccessConfig Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(AddAccessConfig, self).__init__(temboo_session, '/Library/Google/ComputeEngine/Instances/AddAccessConfig')
def new_input_set(self):
return AddAccessConfigInputSet()
def _make_result_set(self, result, path):
return AddAccessConfigResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return AddAccessConfigChoreographyExecution(session, exec_id, path)
class AddAccessConfigInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the AddAccessConfig
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessConfiguration(self, value):
"""
Set the value of the AccessConfiguration input for this Choreo. ((optional, json) A JSON string containing the access configuration properties you wish to set. This can be used as an alternative to individual inputs that represent access configuration properties.)
"""
super(AddAccessConfigInputSet, self)._set_input('AccessConfiguration', value)
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((optional, string) A valid access token retrieved during the OAuth process. This is required unless you provide the ClientID, ClientSecret, and RefreshToken to generate a new access token.)
"""
super(AddAccessConfigInputSet, self)._set_input('AccessToken', value)
def set_ClientID(self, value):
"""
Set the value of the ClientID input for this Choreo. ((conditional, string) The Client ID provided by Google. Required unless providing a valid AccessToken.)
"""
super(AddAccessConfigInputSet, self)._set_input('ClientID', value)
def set_ClientSecret(self, value):
"""
Set the value of the ClientSecret input for this Choreo. ((conditional, string) The Client Secret provided by Google. Required unless providing a valid AccessToken.)
"""
super(AddAccessConfigInputSet, self)._set_input('ClientSecret', value)
def set_Instance(self, value):
"""
Set the value of the Instance input for this Choreo. ((required, string) Name of the instance for which to add an access configuration.)
"""
super(AddAccessConfigInputSet, self)._set_input('Instance', value)
def set_Name(self, value):
"""
Set the value of the Name input for this Choreo. ((optional, string) The name of this access configuration. Defaults to "External NAT" if not specified.)
"""
super(AddAccessConfigInputSet, self)._set_input('Name', value)
def set_NatIP(self, value):
"""
Set the value of the NatIP input for this Choreo. ((optional, string) An external IP address associated with this instance. Specify an unused static IP address available to the project. An external IP will be drawn from a shared ephemeral pool when not specified.)
"""
super(AddAccessConfigInputSet, self)._set_input('NatIP', value)
def set_NetworkInterface(self, value):
"""
Set the value of the NetworkInterface input for this Choreo. ((required, string) The name of the network interface to add the access config (e.g. nic0, nic1, etc).)
"""
super(AddAccessConfigInputSet, self)._set_input('NetworkInterface', value)
def set_Project(self, value):
"""
Set the value of the Project input for this Choreo. ((required, string) The ID of a Google Compute project.)
"""
super(AddAccessConfigInputSet, self)._set_input('Project', value)
def set_RefreshToken(self, value):
"""
Set the value of the RefreshToken input for this Choreo. ((conditional, string) An OAuth refresh token used to generate a new access token when the original token is expired. Required unless providing a valid AccessToken.)
"""
super(AddAccessConfigInputSet, self)._set_input('RefreshToken', value)
def set_Type(self, value):
"""
Set the value of the Type input for this Choreo. ((required, string) Type of configuration. Must be set to ONE_TO_ONE_NAT.)
"""
super(AddAccessConfigInputSet, self)._set_input('Type', value)
def set_Zone(self, value):
"""
Set the value of the Zone input for this Choreo. ((required, string) The name of the zone associated with this request.)
"""
super(AddAccessConfigInputSet, self)._set_input('Zone', value)
class AddAccessConfigResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the AddAccessConfig Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Google.)
"""
return self._output.get('Response', None)
def get_NewAccessToken(self):
"""
Retrieve the value for the "NewAccessToken" output from this Choreo execution. ((string) Contains a new AccessToken when the RefreshToken is provided.)
"""
return self._output.get('NewAccessToken', None)
class AddAccessConfigChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return AddAccessConfigResultSet(response, path)
|
jordanemedlock/psychtruths
|
temboo/core/Library/Google/ComputeEngine/Instances/AddAccessConfig.py
|
Python
|
apache-2.0
| 6,857
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
from django.contrib import admin
from tree.models import *
admin.site.register(Tree)
admin.site.register(Question)
admin.site.register(Answer)
admin.site.register(TreeState)
admin.site.register(Transition)
admin.site.register(Entry)
admin.site.register(Session)
|
genova/rapidsms-senegal
|
apps/tree/admin.py
|
Python
|
bsd-3-clause
| 316
|
import tweepy
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
tweepyapi = tweepy.API(auth)
tweepyapi.update_status('Hello World!')
print("Hello {}".format(tweepyapi.me().name))
|
LairdStreak/MyPyPlayGround
|
tujData/tweepy__.py
|
Python
|
mit
| 248
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensor_array_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.eager import def_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.platform import test
class TensorArrayOpsTest(test.TestCase):
@test_util.run_v1_only('Testing placeholders specifically.')
def test_concat_graph(self):
values = tensor_array_ops.TensorArray(
size=4, dtype=dtypes.string, element_shape=[None], infer_shape=False)
a = array_ops.placeholder(dtypes.string, [
None,
])
b = array_ops.placeholder(dtypes.string, [
None,
])
values = (values.write(0, a).write(
1, constant_op.constant([], dtypes.string))).write(2, b).write(
3, constant_op.constant([], dtypes.string))
with self.session() as s:
result = s.run(values.concat(), {a: ['a', 'b', 'c'], b: ['c', 'd', 'e']})
self.assertAllEqual(result, [b'a', b'b', b'c', b'c', b'd', b'e'])
@test_util.run_v2_only
def test_concat(self):
values = tensor_array_ops.TensorArray(
size=4, dtype=dtypes.string, element_shape=[None], infer_shape=False)
a = constant_op.constant(['a', 'b', 'c'], dtypes.string)
b = constant_op.constant(['c', 'd', 'e'], dtypes.string)
values = (values.write(0, a).write(
1, constant_op.constant([], dtypes.string))).write(2, b).write(
3, constant_op.constant([], dtypes.string))
self.assertAllEqual(values.concat(), [b'a', b'b', b'c', b'c', b'd', b'e'])
@test_util.run_v2_only
def test_concat_in_function(self):
@def_function.function
def fn(a, b):
values = tensor_array_ops.TensorArray(
size=4, dtype=dtypes.string, element_shape=[None], infer_shape=False)
values = (values.write(0, a).write(
1, constant_op.constant([], dtypes.string))).write(2, b).write(
3, constant_op.constant([], dtypes.string))
return values.concat()
self.assertAllEqual(fn(['a', 'b', 'c'], ['c', 'd', 'e']),
[b'a', b'b', b'c', b'c', b'd', b'e'])
def test_init_numpy_shape(self):
@def_function.function
def fn():
values = tensor_array_ops.TensorArray(
np.float32,
size=1,
dynamic_size=False,
element_shape=np.array((2, 3)))
values = values.write(0, np.ones((2, 3)))
return values.concat()
self.assertAllEqual(fn(), [[1., 1., 1.], [1., 1., 1.]])
if __name__ == '__main__':
test.main()
|
annarev/tensorflow
|
tensorflow/python/ops/tensor_array_ops_test.py
|
Python
|
apache-2.0
| 3,451
|
import numpy as np
def multivariate_gaussian(x, mean, cov):
'''Calculate the probability density of a multivariate
gaussian distribution at x.
'''
n = x.shape[0]
return 1 / (np.power(2 * np.pi, n / 2.0) * np.absolute(np.power(np.linalg.det(cov), 0.5))) \
* np.exp(-0.5 * np.dot(x - mean, np.dot(np.linalg.inv(cov), x - mean)))
def gaussian(x, mean, std):
'''Calculate the probability density of a gaussian distribution
at x.
'''
return multivariate_gaussian(np.array([x]), np.array([mean]), np.array([[std]]))
|
xingjiepan/ss_generator
|
ss_generator/numeric.py
|
Python
|
bsd-3-clause
| 561
|
"""
Content metadata exporter for Canvas
"""
from logging import getLogger
from integrated_channels.integrated_channel.exporters.content_metadata import ContentMetadataExporter
LOGGER = getLogger(__name__)
BLACKBOARD_COURSE_CONTENT_NAME = 'edX Course Details'
class BlackboardContentMetadataExporter(ContentMetadataExporter):
"""
Blackboard implementation of ContentMetadataExporter.
Note: courseId is not being exported here (instead done in client during content send)
"""
DATA_TRANSFORM_MAPPING = {
'externalId': 'key',
'course_metadata': 'course_metadata',
'course_content_metadata': 'course_content_metadata',
'course_child_content_metadata': 'course_child_content_metadata',
}
DESCRIPTION_TEXT_TEMPLATE = "<a href={enrollment_url} target=_blank>Go to edX course page</a><br/>"
LARGE_DESCRIPTION_TEXT_TEMPLATE = "<a href={enrollment_url} style='font-size:150%' target=_blank>" \
"Go to edX course page</a><br/>"
COURSE_TITLE_TEMPLATE = '<h1 style="font-size:xxx-large; margin-bottom:0; margin-top:0">{title}</h1>'
COURSE_DESCRIPTION_TEMPLATE = '<p style="width:60%;">{description}</p>'
COURSE_CONTENT_IMAGE_TEMPLATE = '<img src={image_url} width="30%" height="25%" border="40px"/>'
COURSE_CONTENT_BODY_TEMPLATE = '<div><div style="display: inline-block">' \
'{course_title}{large_description_text}<hr/></div>' \
'<br/>{course_content_image}' \
'<br/><br/><br/>{course_description}' \
'<br/>{description_text}</div>'.format(
course_title=COURSE_TITLE_TEMPLATE,
large_description_text=LARGE_DESCRIPTION_TEXT_TEMPLATE,
course_content_image=COURSE_CONTENT_IMAGE_TEMPLATE,
course_description=COURSE_DESCRIPTION_TEMPLATE,
description_text=DESCRIPTION_TEXT_TEMPLATE,
)
def transform_course_metadata(self, content_metadata_item):
"""
Formats the metadata necessary to create a base course object in Blackboard
"""
return {
'name': content_metadata_item.get('title', None),
'externalId': content_metadata_item.get('key', None),
'description': self.DESCRIPTION_TEXT_TEMPLATE.format(
enrollment_url=content_metadata_item.get('enrollment_url', None)
)
}
def transform_course_content_metadata(self, content_metadata_item): # pylint: disable=unused-argument
"""
Formats the metadata necessary to create a course content object in Blackboard
"""
return {
'title': BLACKBOARD_COURSE_CONTENT_NAME,
'position': 0,
"contentHandler": {"id": "resource/x-bb-folder"}
}
def transform_course_child_content_metadata(self, content_metadata_item):
"""
Formats the metadata necessary to create a course content object in Blackboard
"""
title = content_metadata_item.get('title', None)
return {
'title': BLACKBOARD_COURSE_CONTENT_NAME,
'availability': 'Yes',
'contentHandler': {
'id': 'resource/x-bb-document',
},
'body': self.COURSE_CONTENT_BODY_TEMPLATE.format(
title=title,
description=content_metadata_item.get('full_description', None),
image_url=content_metadata_item.get('image_url', None),
enrollment_url=content_metadata_item.get('enrollment_url', None)
)
}
|
edx/edx-enterprise
|
integrated_channels/blackboard/exporters/content_metadata.py
|
Python
|
agpl-3.0
| 3,845
|
#!/bin/env python
# @info - Unit testing suite for the main server program. We will spin it up on localhost
# and send test API requests to it, making sure the correct responses are received.
from unit_test import *
class serverUnitTest(UnitTest) :
def __init__(self, testargs) :
pass
|
Praxyk/Praxyk-DevOps
|
server/unittest/server_unit_tests.py
|
Python
|
gpl-2.0
| 310
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
__init__.py
Created by otger on 29/03/17.
All rights reserved.
"""
|
otger/PubSubTest
|
tests/webui/api/__init__.py
|
Python
|
lgpl-3.0
| 113
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Garfield"
language = "en"
url = "http://www.garfield.com/"
start_date = "1978-06-19"
rights = "Jim Davis"
class Crawler(CrawlerBase):
history_capable_days = 100
schedule = "Mo,Tu,We,Th,Fr,Sa,Su"
time_zone = "US/Eastern"
def crawl(self, pub_date):
if pub_date.weekday() == 6:
url = "http://picayune.uclick.com/comics/ga/{}.jpg".format(
pub_date.strftime("%Y/ga%y%m%d"),
)
else:
url = "http://images.ucomics.com/comics/ga/{}.gif".format(
pub_date.strftime("%Y/ga%y%m%d"),
)
return CrawlerImage(url)
|
jodal/comics
|
comics/comics/garfield.py
|
Python
|
agpl-3.0
| 795
|
from __future__ import division
import os
import tqdm
import copy
from random import (expovariate, uniform, triangular, gammavariate,
lognormvariate, weibullvariate)
from csv import writer, reader
from decimal import getcontext
from itertools import cycle
from .auxiliary import *
from .node import Node
from .exactnode import ExactNode, ExactArrivalNode
from .arrival_node import ArrivalNode
from .exit_node import ExitNode
from .individual import Individual
from .server import Server
from ciw import trackers
from ciw import deadlock
class Simulation(object):
"""
The Simulation class, that is the engine of the simulation.
"""
def __init__(self, network,
exact=False,
name='Simulation',
tracker=trackers.StateTracker(),
deadlock_detector=deadlock.NoDetection(),
node_class=None,
arrival_node_class=None,
individual_class=None,
server_class=None):
"""
Initialise an instance of the simualation.
"""
self.current_time = 0.0
self.network = network
self.set_classes(node_class, arrival_node_class, individual_class, server_class)
if exact:
self.NodeTypes = [ExactNode for _ in range(network.number_of_nodes)]
self.ArrivalNodeType = ExactArrivalNode
getcontext().prec = exact
self.name = name
self.deadlock_detector = deadlock_detector
self.inter_arrival_times = self.find_arrival_dists()
self.service_times = self.find_service_dists()
self.batch_sizes = self.find_batching_dists()
self.show_simulation_to_distributions()
self.number_of_priority_classes = self.network.number_of_priority_classes
self.transitive_nodes = [node_type(i + 1, self) for i, node_type in enumerate(self.NodeTypes)]
self.nodes = ([self.ArrivalNodeType(self)] + self.transitive_nodes + [ExitNode()])
self.nodes[0].initialise()
self.statetracker = tracker
self.statetracker.initialise(self)
self.times_dictionary = {self.statetracker.hash_state(): 0.0}
self.times_to_deadlock = {}
self.rejection_dict = self.nodes[0].rejection_dict
self.baulked_dict = self.nodes[0].baulked_dict
self.unchecked_blockage = False
def __repr__(self):
"""
Representation of the simulation.
"""
return self.name
def find_arrival_dists(self):
"""
Create the dictionary of arrival time distribution
objects for each node for each customer class.
"""
return {node + 1: {
clss: copy.deepcopy(
self.network.customer_classes[clss].arrival_distributions[node]
)
for clss in range(self.network.number_of_classes)}
for node in range(self.network.number_of_nodes)}
def find_service_dists(self):
"""
Create the dictionary of service time distribution
objects for each node for each customer class.
"""
return {node + 1: {
clss: copy.deepcopy(
self.network.customer_classes[clss].service_distributions[node]
)
for clss in range(self.network.number_of_classes)}
for node in range(self.network.number_of_nodes)}
def find_batching_dists(self):
"""
Create the dictionary of batch size distribution
objects for each node for each class.
"""
return {node + 1: {
clss: copy.deepcopy(
self.network.customer_classes[clss].batching_distributions[node]
)
for clss in range(self.network.number_of_classes)}
for node in range(self.network.number_of_nodes)}
def show_simulation_to_distributions(self):
"""
Adds the simulation object as an attribute of the distribution objects
"""
for clss in range(self.network.number_of_classes):
for nd in range(self.network.number_of_nodes):
self.inter_arrival_times[nd + 1][clss].simulation = self
self.service_times[nd + 1][clss].simulation = self
self.batch_sizes[nd + 1][clss].simulation = self
def find_next_active_node(self):
"""
Returns the next active node, the node whose next_event_date is next:
"""
mindate = float("Inf")
next_active_nodes = []
for nd in self.nodes:
if nd.next_event_date < mindate:
mindate = nd.next_event_date
next_active_nodes = [nd]
elif nd.next_event_date == mindate:
next_active_nodes.append(nd)
if len(next_active_nodes) > 1:
return random_choice(next_active_nodes)
return next_active_nodes[0]
def get_all_individuals(self):
"""
Returns list of all individuals with at least one data record.
"""
return [individual for node in self.nodes[1:] for individual in
node.all_individuals if len(individual.data_records) > 0]
def get_all_records(self):
"""
Gets all data records from all individuals.
"""
records = []
for individual in self.get_all_individuals():
for record in individual.data_records:
records.append(record)
self.all_records = records
return records
def set_classes(self, node_class, arrival_node_class, individual_class, server_class):
"""
Sets the type of ArrivalNode and Node classes being used
in the Simulation model (if customer classes are used.)
"""
if arrival_node_class is not None:
self.ArrivalNodeType = arrival_node_class
else:
self.ArrivalNodeType = ArrivalNode
if node_class is not None:
if not isinstance(node_class, list):
self.NodeTypes = [node_class for _ in range(self.network.number_of_nodes)]
else:
self.NodeTypes = node_class
else:
self.NodeTypes = [Node for _ in range(self.network.number_of_nodes)]
if individual_class is not None:
self.IndividualType = individual_class
else:
self.IndividualType = Individual
if server_class is not None:
self.ServerType = server_class
else:
self.ServerType = Server
def event_and_return_nextnode(self, next_active_node):
"""
Carries out the event of current next_active_node,
and returns the next next_active_node
"""
next_active_node.have_event()
for node in self.transitive_nodes:
node.update_next_event_date()
return self.find_next_active_node()
def simulate_until_deadlock(self):
"""
Runs the simulation until deadlock is reached.
"""
deadlocked = False
next_active_node = self.find_next_active_node()
self.current_time = next_active_node.next_event_date
while not deadlocked:
next_active_node = self.event_and_return_nextnode(next_active_node)
current_state = self.statetracker.hash_state()
if current_state not in self.times_dictionary:
self.times_dictionary[current_state] = self.current_time
if self.unchecked_blockage:
deadlocked = self.deadlock_detector.detect_deadlock()
self.unchecked_blockage = False
if deadlocked:
time_of_deadlock = self.current_time
self.current_time = next_active_node.next_event_date
self.wrap_up_servers(time_of_deadlock)
self.times_to_deadlock = {state:
time_of_deadlock - self.times_dictionary[state]
for state in self.times_dictionary.keys()}
def simulate_until_max_time(self, max_simulation_time, progress_bar=False):
"""
Runs the simulation until max_simulation_time is reached.
"""
next_active_node = self.find_next_active_node()
self.current_time = next_active_node.next_event_date
if progress_bar:
self.progress_bar = tqdm.tqdm(total=max_simulation_time)
while self.current_time < max_simulation_time:
next_active_node = self.event_and_return_nextnode(next_active_node)
self.statetracker.timestamp()
if progress_bar:
remaining_time = max_simulation_time - self.progress_bar.n
time_increment = next_active_node.next_event_date - self.current_time
self.progress_bar.update(min(time_increment, remaining_time))
self.current_time = next_active_node.next_event_date
self.wrap_up_servers(max_simulation_time)
if progress_bar:
remaining_time = max(max_simulation_time - self.progress_bar.n, 0)
self.progress_bar.update(remaining_time)
self.progress_bar.close()
def simulate_until_max_customers(self,
max_customers,
progress_bar=False,
method='Finish'):
"""
Runs the simulation until max_customers is reached:
- Method: Finish
Simulates until max_customers has reached the Exit Node
- Method: Arrive
Simulates until max_customers have spawned at the Arrival Node
- Method: Accept
Simulates until max_customers have been spawned and accepted
(not rejected) at the Arrival Node
"""
next_active_node = self.find_next_active_node()
self.current_time = next_active_node.next_event_date
if progress_bar:
self.progress_bar = tqdm.tqdm(total=max_customers)
if method == 'Finish':
check = lambda : self.nodes[-1].number_of_individuals
elif method == 'Arrive':
check = lambda : self.nodes[0].number_of_individuals
elif method == 'Accept':
check = lambda : self.nodes[0].number_accepted_individuals
else:
raise ValueError("Invalid 'method' for 'simulate_until_max_customers'.")
while check() < max_customers:
old_check = check()
next_active_node = self.event_and_return_nextnode(next_active_node)
self.statetracker.timestamp()
if progress_bar:
remaining_time = max_customers - self.progress_bar.n
time_increment = check() - old_check
self.progress_bar.update(min(time_increment, remaining_time))
previous_time = self.current_time
self.current_time = next_active_node.next_event_date
self.wrap_up_servers(previous_time)
if progress_bar:
remaining_time = max(max_customers - self.progress_bar.n, 0)
self.progress_bar.update(remaining_time)
self.progress_bar.close()
def wrap_up_servers(self, current_time):
"""
Updates the servers' total_time and busy_time as
the end of the simulation run. Finds the overall
server utilisation for each node.
"""
for nd in self.transitive_nodes:
nd.wrap_up_servers(current_time)
nd.find_server_utilisation()
def write_records_to_file(self, file_name, headers=True):
"""
Writes the records for all individuals to a csv file
"""
root = os.getcwd()
directory = os.path.join(root, file_name)
data_file = open('%s' % directory, 'w')
csv_wrtr = writer(data_file)
if headers:
csv_wrtr.writerow(['I.D. Number',
'Customer Class',
'Node',
'Arrival Date',
'Waiting Time',
'Service Start Date',
'Service Time',
'Service End Date',
'Time Blocked',
'Exit Date',
'Destination',
'Queue Size at Arrival',
'Queue Size at Departure',
'Server I.D.']),
records = self.get_all_records()
for row in records:
csv_wrtr.writerow(row)
data_file.close()
|
CiwPython/Ciw
|
ciw/simulation.py
|
Python
|
mit
| 12,635
|
from datetime import timedelta
from django.db import models
from django.db.models import functions
from yawn.utilities import logger
class Worker(models.Model):
"""Information about current and past workers"""
#
# NOTE: consider instead taking an advisory lock for each worker,
# and using it to check if a worker is still connected.
# See `pg_try_advisory_lock` and `select * from pg_locks where locktype = 'advisory'`
# That would give more immediate feedback, but its not clear we need to be faster.
#
ACTIVE = 'active'
EXITED = 'exited'
LOST = 'lost'
STATUS_CHOICES = [(x, x) for x in (ACTIVE, EXITED, LOST)]
name = models.TextField(blank=False)
status = models.TextField(choices=STATUS_CHOICES, default=ACTIVE)
start_timestamp = models.DateTimeField(default=functions.Now)
last_heartbeat = models.DateTimeField(default=functions.Now)
@staticmethod
def find_lost(timeout):
from yawn.task.models import Execution
# Make a sparse index so looking up active workers is fast:
# CREATE INDEX yawn_worker_active ON yawn_worker (status) WHERE status = 'active'
lost = Worker.objects.filter(
status=Worker.ACTIVE, last_heartbeat__lt=functions.Now() - timedelta(seconds=timeout)
)
for worker in lost:
logger.warning('Marking %r as lost', worker)
worker.status = Worker.LOST
worker.save()
executions = worker.execution_set.filter(status=Execution.RUNNING)
for execution in executions:
logger.warning('Marking %r as lost', execution)
execution.mark_finished(lost=True)
def __str__(self):
return self.name
class Queue(models.Model):
"""Arbitrary tag defining where tasks run."""
name = models.TextField(unique=True)
_default = None
def __str__(self):
return self.name
@classmethod
def get_default_queue(cls):
if not cls._default:
cls._default = Queue.objects.get_or_create(name='default')[0]
return cls._default
class Message(models.Model):
"""The order of tasks waiting to be processed, like messages on a queue"""
# I hope we never get to 9 Quintillion (9,223,372,036,854,775,807) messages
id = models.BigAutoField(primary_key=True)
queue = models.ForeignKey(Queue, models.PROTECT)
task = models.ForeignKey('Task', models.PROTECT)
|
aclowes/yawn
|
yawn/worker/models.py
|
Python
|
mit
| 2,457
|
#!/usr/bin/env python
import gammu
import time
# Whether be a bit more verbose
verbose = False
def ReplyTest(message):
if message['Number'] == '999':
# No reply to this number
return None
return 'Reply to %s' % message['Text']
# Reply function, first element is matching string, second can be:
# - string = fixed string will be sent as reply
# - function = function will be called with SMS data and it's result will be sent
# - None = no reply
replies = [
('1/1 www:', 'This is test'),
('1/2 www:', ReplyTest),
('2/2 www:', None),
]
def Callback(sm, type, data):
if verbose:
print 'Received incoming event type %s, data:' % type
if type != 'SMS':
print 'Unsupported event!'
if not data.has_key('Number'):
data = sm.GetSMS(data['Folder'], data['Location'])[0]
if verbose:
print data
for reply in replies:
if reply[0] == data['Text'][:len(reply[0])]:
if callable(reply[1]):
response = reply[1](data)
else:
response = reply[1]
if response is not None:
message = {'Text': response, 'SMSC': {'Location': 1}, 'Number': data['Number']}
if verbose:
print message
sm.SendSMS(message)
else:
if verbose:
print 'No reply!'
break
sm = gammu.StateMachine()
sm.ReadConfig()
sm.Init()
sm.SetIncomingCallback(Callback)
try:
sm.SetIncomingSMS()
except gammu.ERR_NOTSUPPORTED:
print 'Your phone does not support incoming SMS notifications!'
# We need to keep communication with phone to get notifications
print 'Press Ctrl+C to interrupt'
while 1:
time.sleep(1)
status = sm.GetBatteryCharge()
print 'Battery is at %d%%' % status['BatteryPercent']
|
markjeee/gammu
|
python/examples/sms-replier.py
|
Python
|
gpl-2.0
| 1,872
|
# kpbochenek@gmail.com
import time
memo = ["A0"] + [chr(a) + str(b) for a in range(ord('A'), ord('Z')+1) for b in range(1, 10)]
dformat = "%Y-%m-%d"
def count_ingots(report):
return sum(map(lambda v: memo.index(v), report.split(",")))
def count_reports(full_report, from_date, to_date):
from_date, to_date = time.strptime(from_date, dformat), time.strptime(to_date, dformat)
result = 0
for line in full_report.split('\n'):
data, ingots = line.split(' ')
pdata = time.strptime(data, dformat)
if from_date <= pdata and pdata <= to_date:
result += sum([count_ingots(x) for x in ingots.split(",")])
return result
if __name__ == '__main__':
# These using only for self-checking and not necessary for auto-testing
assert count_reports("2015-01-01 A1,B2\n"
"2015-01-05 C3,C2,C1\n"
"2015-02-01 B4\n"
"2015-01-03 Z9,Z9",
"2015-01-01", "2015-01-31") == 540, "Normal"
assert count_reports("2000-02-02 Z2,Z1\n"
"2000-02-01 Z2,Z1\n"
"2000-02-03 Z2,Z1",
"2000-02-04", "2000-02-28") == 0, "Zero"
assert count_reports("2999-12-31 Z9,A1", "2000-01-01", "2999-12-31") == 235, "Millenium"
|
kpbochenek/empireofcode
|
daily_reports.py
|
Python
|
apache-2.0
| 1,324
|
# Serial Photo Merge
# Copyright (C) 2017 Simone Riva mail: simone.rva {at} gmail {dot} com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
#(at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from imgmerge.mergeProcedureVirtual import *
import numpy as np
import sys
import scipy.ndimage as ndimage
try:
import pycuda.autoinit
from pycuda import gpuarray
except:
pass
from imgmerge.readImg import ReadImageBasic
from imgmerge.image import Image
from imgmerge.readImgFactory import ReadImageFarctory
class MergeAverageImage(MergeProcedureVirtual):
def __init__(self):
super().__init__()
#self._resimg = None
def execute(self): # new version
self.resulting_image = None
f_first = True
img_cnt = 0.0
for itr_img in self.images_iterator:
img_cnt += 1.0
if f_first:
self.resulting_image = itr_img
f_first = False
continue
if itr_img.shape != self.resulting_image.shape:
img_cnt -= 1.0
continue
self.resulting_image.add(itr_img)
self.resulting_image.image[:] = self.resulting_image.image[:] / img_cnt
class MergeAverageImageCUDA(MergeProcedureVirtual):
def __init__(self):
super().__init__()
#self._resimg = None
def execute(self):
resulting_image = None
nda = None
f_first = True
img_cnt = 0
for itr_img in self.images_iterator:
img_cnt += 1
if f_first:
nda = np.ndarray(shape=itr_img.image.shape,
dtype=itr_img.image.dtype)
nda[:] = itr_img.image[:]
self.resulting_image = itr_img
resulting_image = gpuarray.to_gpu(nda)
current_image = gpuarray.zeros_like(resulting_image)
f_first = False
shape = itr_img.shape
continue
if shape != itr_img.shape:
img_cnt -= 1
continue
current_image.set(itr_img.image)
resulting_image += current_image
resulting_image /= img_cnt
self.resulting_image.image[:] = resulting_image.get()
|
simon-r/SerialPhotoMerge
|
imgmerge/mergeAverageImage.py
|
Python
|
gpl-3.0
| 2,816
|
from urlparse import urljoin
from scrapy import log
from scrapy.http import HtmlResponse
from scrapy.utils.response import get_meta_refresh
from scrapy.exceptions import IgnoreRequest, NotConfigured
class BaseRedirectMiddleware(object):
enabled_setting = 'REDIRECT_ENABLED'
def __init__(self, settings):
if not settings.getbool(self.enabled_setting):
raise NotConfigured
self.max_redirect_times = settings.getint('REDIRECT_MAX_TIMES')
self.priority_adjust = settings.getint('REDIRECT_PRIORITY_ADJUST')
@classmethod
def from_crawler(cls, crawler):
return cls(crawler.settings)
def _redirect(self, redirected, request, spider, reason):
ttl = request.meta.setdefault('redirect_ttl', self.max_redirect_times)
redirects = request.meta.get('redirect_times', 0) + 1
if ttl and redirects <= self.max_redirect_times:
redirected.meta['redirect_times'] = redirects
redirected.meta['redirect_ttl'] = ttl - 1
redirected.meta['redirect_urls'] = request.meta.get('redirect_urls', []) + \
[request.url]
redirected.dont_filter = request.dont_filter
redirected.priority = request.priority + self.priority_adjust
log.msg(format="Redirecting (%(reason)s) to %(redirected)s from %(request)s",
level=log.DEBUG, spider=spider, request=request,
redirected=redirected, reason=reason)
return redirected
else:
log.msg(format="Discarding %(request)s: max redirections reached",
level=log.DEBUG, spider=spider, request=request)
raise IgnoreRequest("max redirections reached")
def _redirect_request_using_get(self, request, redirect_url):
redirected = request.replace(url=redirect_url, method='GET', body='')
redirected.headers.pop('Content-Type', None)
redirected.headers.pop('Content-Length', None)
return redirected
class RedirectMiddleware(BaseRedirectMiddleware):
"""Handle redirection of requests based on response status and meta-refresh html tag"""
def process_response(self, request, response, spider):
if 'dont_redirect' in request.meta:
return response
if request.method == 'HEAD':
if response.status in [301, 302, 303, 307] and 'Location' in response.headers:
redirected_url = urljoin(request.url, response.headers['location'])
redirected = request.replace(url=redirected_url)
return self._redirect(redirected, request, spider, response.status)
else:
return response
if response.status in [302, 303] and 'Location' in response.headers:
redirected_url = urljoin(request.url, response.headers['location'])
redirected = self._redirect_request_using_get(request, redirected_url)
return self._redirect(redirected, request, spider, response.status)
if response.status in [301, 307] and 'Location' in response.headers:
redirected_url = urljoin(request.url, response.headers['location'])
redirected = request.replace(url=redirected_url)
return self._redirect(redirected, request, spider, response.status)
return response
class MetaRefreshMiddleware(BaseRedirectMiddleware):
enabled_setting = 'METAREFRESH_ENABLED'
def __init__(self, settings):
super(MetaRefreshMiddleware, self).__init__(settings)
self._maxdelay = settings.getint('REDIRECT_MAX_METAREFRESH_DELAY',
settings.getint('METAREFRESH_MAXDELAY'))
def process_response(self, request, response, spider):
if 'dont_redirect' in request.meta or request.method == 'HEAD' or \
not isinstance(response, HtmlResponse):
return response
if isinstance(response, HtmlResponse):
interval, url = get_meta_refresh(response)
if url and interval < self._maxdelay:
redirected = self._redirect_request_using_get(request, url)
return self._redirect(redirected, request, spider, 'meta refresh')
return response
|
ofanoyi/scrapy
|
scrapy/contrib/downloadermiddleware/redirect.py
|
Python
|
bsd-3-clause
| 4,259
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PerlSvg(PerlPackage):
"""Perl extension for generating Scalable Vector Graphics (SVG) documents.
"""
homepage = "http://search.cpan.org/~manwar/SVG-2.78/lib/SVG.pm"
url = "http://search.cpan.org/CPAN/authors/id/M/MA/MANWAR/SVG-2.78.tar.gz"
version('2.78', 'b247c216ab11fae24533d0ce556e2e75')
|
EmreAtes/spack
|
var/spack/repos/builtin/packages/perl-svg/package.py
|
Python
|
lgpl-2.1
| 1,582
|
from thlib.side.Qt import QtWidgets as QtGui
from thlib.side.Qt import QtGui as Qt4Gui
from thlib.side.Qt import QtCore
import thlib.tactic_classes as tc
from thlib.environment import env_inst
import thlib.global_functions as gf
from thlib.ui_classes.ui_custom_qwidgets import Ui_horizontalCollapsableWidget
from thlib.ui_classes.ui_tactic_column_classes import Ui_tacticColumnEditorWidget
class Ui_columnsEditorWidget(QtGui.QWidget):
def __init__(self, project, stype, parent=None):
super(self.__class__, self).__init__(parent=parent)
self.project = project
self.stype = stype
self.item = None
self.items = []
self.columns_widgets = []
self.multiple_mode = False
self.current_active_tab = 0
self.create_ui()
def create_ui(self):
self.create_main_layout()
self.create_toolbar()
self.create_options_toolbar()
self.create_stretch()
self.create_tabbed_widget()
self.controls_actions()
def create_main_layout(self):
self.main_layout = QtGui.QGridLayout()
self.main_layout.setSpacing(0)
self.main_layout.setContentsMargins(0, 0, 0, 0)
self.setLayout(self.main_layout)
def controls_actions(self):
self.save_button.clicked.connect(self.save_all_changes)
self.refresh_button.clicked.connect(self.refresh)
self.definition_combo_box.currentIndexChanged.connect(self.refresh)
def create_toolbar(self):
self.collapsable_toolbar = Ui_horizontalCollapsableWidget()
buttons_layout = QtGui.QHBoxLayout()
buttons_layout.setSpacing(0)
buttons_layout.setContentsMargins(0, 0, 0, 0)
self.collapsable_toolbar.set_direction('right')
self.collapsable_toolbar.setLayout(buttons_layout)
self.collapsable_toolbar.setCollapsed(False)
self.save_button = QtGui.QToolButton()
self.save_button.setAutoRaise(True)
self.save_button.setIcon(gf.get_icon('content-save-all', icons_set='mdi', scale_factor=1))
self.save_button.setToolTip('Save Current Changes')
self.refresh_button = QtGui.QToolButton()
self.refresh_button.setAutoRaise(True)
self.refresh_button.setIcon(gf.get_icon('refresh', icons_set='mdi', scale_factor=1.3))
self.refresh_button.setToolTip('Refresh Current Tasks')
buttons_layout.addWidget(self.save_button)
buttons_layout.addWidget(self.refresh_button)
self.main_layout.addWidget(self.collapsable_toolbar, 0, 0, 1, 1)
def create_options_toolbar(self):
self.collapsable_options_toolbar = Ui_horizontalCollapsableWidget()
buttons_layout = QtGui.QHBoxLayout()
buttons_layout.setSpacing(9)
buttons_layout.setContentsMargins(0, 0, 0, 0)
self.collapsable_options_toolbar.set_direction('right')
self.collapsable_options_toolbar.setLayout(buttons_layout)
self.collapsable_options_toolbar.setCollapsed(True)
self.auto_save_check_box = QtGui.QCheckBox('Autosave')
self.auto_save_check_box.setChecked(False)
self.definition_label = QtGui.QLabel('Definition: ')
self.definition_combo_box = QtGui.QComboBox()
buttons_layout.addWidget(self.definition_label)
buttons_layout.addWidget(self.definition_combo_box)
buttons_layout.addWidget(self.auto_save_check_box)
self.main_layout.addWidget(self.collapsable_options_toolbar, 0, 1, 1, 1)
def create_stretch(self):
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.main_layout.addItem(spacerItem, 0, 2, 1, 1)
self.main_layout.setColumnStretch(2, 1)
def fill_definition_combo_box(self):
self.definition_combo_box.clear()
stype = self.item.stype
if stype.info.get('definition'):
current_idx = 0
default_current_definition = 'table'
for idx, definition in enumerate(stype.info['definition'].keys()):
if definition == default_current_definition:
current_idx = idx
self.definition_combo_box.addItem(gf.prettify_text(definition))
self.definition_combo_box.setItemData(idx, definition, QtCore.Qt.UserRole)
self.definition_combo_box.setCurrentIndex(current_idx)
def save_all_changes(self):
if self.multiple_mode:
data_to_update = {}
for item in self.items:
update_dict = {}
for column_widget in self.columns_widgets:
changed_data = column_widget.get_changed_data()
if changed_data is not None:
update_dict[column_widget.get_column()] = changed_data
if self.item.type == 'snapshot':
sobject = item.get_snapshot()
sobject.project = self.project # Snapshot class created without project in it
else:
sobject = item.get_sobject()
data_to_update[sobject.get_search_key()] = update_dict
return tc.server_start(project=self.project.get_code()).update_multiple(
data=data_to_update,
triggers=True
)
else:
if self.item.type == 'snapshot':
sobject = self.item.get_snapshot()
sobject.project = self.project # Snapshot class created without project in it
else:
sobject = self.item.get_sobject()
for column_widget in self.columns_widgets:
changed_data = column_widget.get_changed_data()
if changed_data is not None:
sobject.set_value(column_widget.get_column(), changed_data)
sobject.commit()
def set_dock_title(self, title_string):
checkin_out_widget = env_inst.get_check_tree(self.project.get_code(), 'checkin_out', self.stype.get_code())
columns_viewer_widget = checkin_out_widget.get_columns_viewer_widget()
dock_widget = columns_viewer_widget.parent()
if dock_widget:
if isinstance(dock_widget, QtGui.QDockWidget):
dock_widget.setWindowTitle(title_string)
def refresh(self):
if self.multiple_mode:
self.customize_with_multiple_items()
else:
self.customize_with_item()
def set_items(self, items_list):
if not self.visibleRegion().isEmpty():
self.items = items_list
self.fill_definition_combo_box()
if self.items:
self.customize_with_multiple_items()
else:
self.customize_without_item()
def set_item(self, item):
if not self.visibleRegion().isEmpty():
self.item = item
s = gf.time_it()
# self.fill_definition_combo_box()
gf.time_it(s)
if self.item:
self.customize_with_item()
else:
self.customize_without_item()
def customize_with_multiple_items(self):
self.multiple_mode = True
self.current_active_tab = self.columns_tab_widget.currentIndex()
self.columns_tab_widget.clear()
self.columns_widgets = []
self.set_dock_title(u'Multiple Editing Mode for: {0} items'.format(len(self.items)))
table_columns = []
stype = self.item.stype
idx = self.definition_combo_box.currentIndex()
current_definition = self.definition_combo_box.itemData(idx, QtCore.Qt.UserRole)
if not current_definition:
current_definition = 'table'
for i in stype.get_definition(current_definition):
table_columns.append(i.get('name'))
exclude_columns = ['__search_type__', '__search_key__', '__tasks_count__', '__notes_count__', '__snapshots__']
if self.item.type == 'snapshot':
sobject = self.item.get_snapshot()
else:
sobject = self.item.get_sobject()
if sobject:
sobject_dict = sobject.get_info()
for column, val in sobject_dict.items():
if column not in exclude_columns:
if column in table_columns:
column_editor = Ui_tacticColumnEditorWidget(sobject, column, stype, multiple_mode=True)
column_title = None
for j in stype.get_definition('definition'):
if j.get('name') == column:
column_title = j.get('title')
if not column_title:
column_title = gf.prettify_text(column)
self.columns_widgets.append(column_editor)
self.columns_tab_widget.addTab(column_editor, u'{0} | {1}'.format(column_title, len(self.items)))
self.columns_tab_widget.setCurrentIndex(self.current_active_tab)
def customize_with_item(self):
self.multiple_mode = False
# TODO save tabs names with their stypes
self.current_active_tab = self.columns_tab_widget.currentIndex()
self.columns_tab_widget.clear()
self.columns_widgets = []
table_columns = []
stype = self.item.stype
idx = self.definition_combo_box.currentIndex()
current_definition = self.definition_combo_box.itemData(idx, QtCore.Qt.UserRole)
if not current_definition:
current_definition = 'edit'
for i in stype.get_definition(current_definition):
table_columns.append(i.get('name'))
exclude_columns = ['__search_type__', '__search_key__', '__tasks_count__', '__notes_count__', '__snapshots__', 'preview']
if self.item.type == 'snapshot':
sobject = self.item.get_snapshot()
else:
sobject = self.item.get_sobject()
if sobject:
self.set_dock_title(u'Editing Columns of: {0}'.format(sobject.get_title()))
for column in table_columns:
if column not in exclude_columns:
if column in table_columns:
column_editor = Ui_tacticColumnEditorWidget(sobject, column, stype)
column_title = None
for j in stype.get_definition('definition'):
if j.get('name') == column:
column_title = j.get('title')
if not column_title:
column_title = gf.prettify_text(column)
self.columns_widgets.append(column_editor)
self.columns_tab_widget.addTab(column_editor, column_title)
self.columns_tab_widget.setCurrentIndex(self.current_active_tab)
def customize_without_item(self):
self.multiple_mode = False
self.item = None
self.items = []
self.columns_widgets = []
self.columns_tab_widget.clear()
self.set_dock_title(u'Columns Editor')
def create_tabbed_widget(self):
self.columns_tab_widget = QtGui.QTabWidget(self)
self.columns_tab_widget.setMovable(True)
self.columns_tab_widget.setTabsClosable(False)
self.columns_tab_widget.setObjectName("notes_tab_widget")
self.columns_tab_widget.setStyleSheet(
'#notes_tab_widget > QTabBar::tab {background: transparent;border: 2px solid transparent;'
'border-top-left-radius: 3px;border-top-right-radius: 3px;border-bottom-left-radius: 0px;border-bottom-right-radius: 0px;padding: 4px;}'
'#notes_tab_widget > QTabBar::tab:selected, #notes_tab_widget > QTabBar::tab:hover {'
'background: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, stop: 0 rgba(255, 255, 255, 48), stop: 1 rgba(255, 255, 255, 32));}'
'#notes_tab_widget > QTabBar::tab:selected {border-color: transparent;}'
'#notes_tab_widget > QTabBar::tab:!selected {margin-top: 0px;}')
self.main_layout.addWidget(self.columns_tab_widget, 1, 0, 1, 3)
|
listyque/TACTIC-Handler
|
thlib/ui_classes/ui_columns_editor_classes.py
|
Python
|
epl-1.0
| 12,181
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Piston Cloud Computing, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
SQLAlchemy models for nova data.
"""
from sqlalchemy import Column, Integer, BigInteger, String, schema
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import ForeignKey, DateTime, Boolean, Text, Float
from sqlalchemy.orm import relationship, backref, object_mapper
from nova.db.sqlalchemy.session import get_session
from nova import exception
from nova import flags
from nova.openstack.common import timeutils
FLAGS = flags.FLAGS
BASE = declarative_base()
class NovaBase(object):
"""Base class for Nova Models."""
__table_args__ = {'mysql_engine': 'InnoDB'}
__table_initialized__ = False
created_at = Column(DateTime, default=timeutils.utcnow)
updated_at = Column(DateTime, onupdate=timeutils.utcnow)
deleted_at = Column(DateTime)
deleted = Column(Boolean, default=False)
metadata = None
def save(self, session=None):
"""Save this object."""
if not session:
session = get_session()
session.add(self)
try:
session.flush()
except IntegrityError, e:
if str(e).endswith('is not unique'):
raise exception.Duplicate(str(e))
else:
raise
def delete(self, session=None):
"""Delete this object."""
self.deleted = True
self.deleted_at = timeutils.utcnow()
self.save(session=session)
def __setitem__(self, key, value):
setattr(self, key, value)
def __getitem__(self, key):
return getattr(self, key)
def get(self, key, default=None):
return getattr(self, key, default)
def __iter__(self):
columns = dict(object_mapper(self).columns).keys()
# NOTE(russellb): Allow models to specify other keys that can be looked
# up, beyond the actual db columns. An example would be the 'name'
# property for an Instance.
if hasattr(self, '_extra_keys'):
columns.extend(self._extra_keys())
self._i = iter(columns)
return self
def next(self):
n = self._i.next()
return n, getattr(self, n)
def update(self, values):
"""Make the model object behave like a dict"""
for k, v in values.iteritems():
setattr(self, k, v)
def iteritems(self):
"""Make the model object behave like a dict.
Includes attributes from joins."""
local = dict(self)
joined = dict([(k, v) for k, v in self.__dict__.iteritems()
if not k[0] == '_'])
local.update(joined)
return local.iteritems()
class Service(BASE, NovaBase):
"""Represents a running service on a host."""
__tablename__ = 'services'
id = Column(Integer, primary_key=True)
host = Column(String(255)) # , ForeignKey('hosts.id'))
binary = Column(String(255))
topic = Column(String(255))
report_count = Column(Integer, nullable=False, default=0)
disabled = Column(Boolean, default=False)
availability_zone = Column(String(255), default='nova')
class ComputeNode(BASE, NovaBase):
"""Represents a running compute service on a host."""
__tablename__ = 'compute_nodes'
id = Column(Integer, primary_key=True)
service_id = Column(Integer, ForeignKey('services.id'), nullable=True)
service = relationship(Service,
backref=backref('compute_node'),
foreign_keys=service_id,
primaryjoin='and_('
'ComputeNode.service_id == Service.id,'
'ComputeNode.deleted == False)')
vcpus = Column(Integer)
memory_mb = Column(Integer)
local_gb = Column(Integer)
vcpus_used = Column(Integer)
memory_mb_used = Column(Integer)
local_gb_used = Column(Integer)
hypervisor_type = Column(Text)
hypervisor_version = Column(Integer)
hypervisor_hostname = Column(String(255))
# Free Ram, amount of activity (resize, migration, boot, etc) and
# the number of running VM's are a good starting point for what's
# important when making scheduling decisions.
free_ram_mb = Column(Integer)
free_disk_gb = Column(Integer)
current_workload = Column(Integer)
running_vms = Column(Integer)
# Note(masumotok): Expected Strings example:
#
# '{"arch":"x86_64",
# "model":"Nehalem",
# "topology":{"sockets":1, "threads":2, "cores":3},
# "features":["tdtscp", "xtpr"]}'
#
# Points are "json translatable" and it must have all dictionary keys
# above, since it is copied from <cpu> tag of getCapabilities()
# (See libvirt.virtConnection).
cpu_info = Column(Text, nullable=True)
disk_available_least = Column(Integer)
class ComputeNodeStat(BASE, NovaBase):
"""Stats related to the current workload of a compute host that are
intended to aid in making scheduler decisions."""
__tablename__ = 'compute_node_stats'
id = Column(Integer, primary_key=True)
key = Column(String(511))
value = Column(String(255))
compute_node_id = Column(Integer, ForeignKey('compute_nodes.id'))
primary_join = ('and_(ComputeNodeStat.compute_node_id == '
'ComputeNode.id, ComputeNodeStat.deleted == False)')
stats = relationship("ComputeNode", backref="stats",
primaryjoin=primary_join)
def __str__(self):
return "{%d: %s = %s}" % (self.compute_node_id, self.key, self.value)
class Certificate(BASE, NovaBase):
"""Represents a x509 certificate"""
__tablename__ = 'certificates'
id = Column(Integer, primary_key=True)
user_id = Column(String(255))
project_id = Column(String(255))
file_name = Column(String(255))
class Instance(BASE, NovaBase):
"""Represents a guest VM."""
__tablename__ = 'instances'
injected_files = []
id = Column(Integer, primary_key=True, autoincrement=True)
@property
def name(self):
try:
base_name = FLAGS.instance_name_template % self.id
except TypeError:
# Support templates like "uuid-%(uuid)s", etc.
info = {}
# NOTE(russellb): Don't use self.iteritems() here, as it will
# result in infinite recursion on the name property.
for column in iter(object_mapper(self).columns):
key = column.name
# prevent recursion if someone specifies %(name)s
# %(name)s will not be valid.
if key == 'name':
continue
info[key] = self[key]
try:
base_name = FLAGS.instance_name_template % info
except KeyError:
base_name = self.uuid
return base_name
def _extra_keys(self):
return ['name']
user_id = Column(String(255))
project_id = Column(String(255))
image_ref = Column(String(255))
kernel_id = Column(String(255))
ramdisk_id = Column(String(255))
server_name = Column(String(255))
# image_ref = Column(Integer, ForeignKey('images.id'), nullable=True)
# kernel_id = Column(Integer, ForeignKey('images.id'), nullable=True)
# ramdisk_id = Column(Integer, ForeignKey('images.id'), nullable=True)
# ramdisk = relationship(Ramdisk, backref=backref('instances', order_by=id))
# kernel = relationship(Kernel, backref=backref('instances', order_by=id))
launch_index = Column(Integer)
key_name = Column(String(255))
key_data = Column(Text)
power_state = Column(Integer)
vm_state = Column(String(255))
task_state = Column(String(255))
memory_mb = Column(Integer)
vcpus = Column(Integer)
root_gb = Column(Integer)
ephemeral_gb = Column(Integer)
hostname = Column(String(255))
host = Column(String(255)) # , ForeignKey('hosts.id'))
# *not* flavor_id
instance_type_id = Column(Integer)
user_data = Column(Text)
reservation_id = Column(String(255))
scheduled_at = Column(DateTime)
launched_at = Column(DateTime)
terminated_at = Column(DateTime)
availability_zone = Column(String(255))
# User editable field for display in user-facing UIs
display_name = Column(String(255))
display_description = Column(String(255))
# To remember on which host an instance booted.
# An instance may have moved to another host by live migration.
launched_on = Column(Text)
locked = Column(Boolean)
os_type = Column(String(255))
architecture = Column(String(255))
vm_mode = Column(String(255))
uuid = Column(String(36))
root_device_name = Column(String(255))
default_ephemeral_device = Column(String(255), nullable=True)
default_swap_device = Column(String(255), nullable=True)
config_drive = Column(String(255))
# User editable field meant to represent what ip should be used
# to connect to the instance
access_ip_v4 = Column(String(255))
access_ip_v6 = Column(String(255))
auto_disk_config = Column(Boolean())
progress = Column(Integer)
# EC2 instance_initiated_shutdown_terminate
# True: -> 'terminate'
# False: -> 'stop'
# Note(maoy): currently Nova will always stop instead of terminate
# no matter what the flag says. So we set the default to False.
shutdown_terminate = Column(Boolean(), default=False, nullable=False)
# EC2 disable_api_termination
disable_terminate = Column(Boolean(), default=False, nullable=False)
class InstanceInfoCache(BASE, NovaBase):
"""
Represents a cache of information about an instance
"""
__tablename__ = 'instance_info_caches'
id = Column(Integer, primary_key=True, autoincrement=True)
# text column used for storing a json object of network data for api
network_info = Column(Text)
instance_uuid = Column(String(36), ForeignKey('instances.uuid'),
nullable=False, unique=True)
instance = relationship(Instance,
backref=backref('info_cache', uselist=False),
foreign_keys=instance_uuid,
primaryjoin=instance_uuid == Instance.uuid)
class InstanceTypes(BASE, NovaBase):
"""Represent possible instance_types or flavor of VM offered"""
__tablename__ = "instance_types"
id = Column(Integer, primary_key=True)
name = Column(String(255))
memory_mb = Column(Integer)
vcpus = Column(Integer)
root_gb = Column(Integer)
ephemeral_gb = Column(Integer)
flavorid = Column(String(255))
swap = Column(Integer, nullable=False, default=0)
rxtx_factor = Column(Float, nullable=False, default=1)
vcpu_weight = Column(Integer, nullable=True)
disabled = Column(Boolean, default=False)
is_public = Column(Boolean, default=True)
instances = relationship(Instance,
backref=backref('instance_type', uselist=False),
foreign_keys=id,
primaryjoin='and_('
'Instance.instance_type_id == '
'InstanceTypes.id)')
class Volume(BASE, NovaBase):
"""Represents a block storage device that can be attached to a VM."""
__tablename__ = 'volumes'
id = Column(String(36), primary_key=True)
@property
def name(self):
return FLAGS.volume_name_template % self.id
ec2_id = Column(Integer)
user_id = Column(String(255))
project_id = Column(String(255))
snapshot_id = Column(String(36))
host = Column(String(255)) # , ForeignKey('hosts.id'))
size = Column(Integer)
availability_zone = Column(String(255)) # TODO(vish): foreign key?
instance_uuid = Column(String(36))
mountpoint = Column(String(255))
attach_time = Column(DateTime)
status = Column(String(255)) # TODO(vish): enum?
attach_status = Column(String(255)) # TODO(vish): enum
scheduled_at = Column(DateTime)
launched_at = Column(DateTime)
terminated_at = Column(DateTime)
display_name = Column(String(255))
display_description = Column(String(255))
provider_location = Column(String(255))
provider_auth = Column(String(255))
volume_type_id = Column(Integer)
class VolumeMetadata(BASE, NovaBase):
"""Represents a metadata key/value pair for a volume"""
__tablename__ = 'volume_metadata'
id = Column(Integer, primary_key=True)
key = Column(String(255))
value = Column(String(255))
volume_id = Column(String(36), ForeignKey('volumes.id'), nullable=False)
volume = relationship(Volume, backref="volume_metadata",
foreign_keys=volume_id,
primaryjoin='and_('
'VolumeMetadata.volume_id == Volume.id,'
'VolumeMetadata.deleted == False)')
class VolumeTypes(BASE, NovaBase):
"""Represent possible volume_types of volumes offered"""
__tablename__ = "volume_types"
id = Column(Integer, primary_key=True)
name = Column(String(255))
volumes = relationship(Volume,
backref=backref('volume_type', uselist=False),
foreign_keys=id,
primaryjoin='and_('
'Volume.volume_type_id == VolumeTypes.id, '
'VolumeTypes.deleted == False)')
class VolumeTypeExtraSpecs(BASE, NovaBase):
"""Represents additional specs as key/value pairs for a volume_type"""
__tablename__ = 'volume_type_extra_specs'
id = Column(Integer, primary_key=True)
key = Column(String(255))
value = Column(String(255))
volume_type_id = Column(Integer, ForeignKey('volume_types.id'),
nullable=False)
volume_type = relationship(VolumeTypes, backref="extra_specs",
foreign_keys=volume_type_id,
primaryjoin='and_('
'VolumeTypeExtraSpecs.volume_type_id == VolumeTypes.id,'
'VolumeTypeExtraSpecs.deleted == False)')
class Quota(BASE, NovaBase):
"""Represents a single quota override for a project.
If there is no row for a given project id and resource, then the
default for the quota class is used. If there is no row for a
given quota class and resource, then the default for the
deployment is used. If the row is present but the hard limit is
Null, then the resource is unlimited.
"""
__tablename__ = 'quotas'
id = Column(Integer, primary_key=True)
project_id = Column(String(255), index=True)
resource = Column(String(255))
hard_limit = Column(Integer, nullable=True)
class QuotaClass(BASE, NovaBase):
"""Represents a single quota override for a quota class.
If there is no row for a given quota class and resource, then the
default for the deployment is used. If the row is present but the
hard limit is Null, then the resource is unlimited.
"""
__tablename__ = 'quota_classes'
id = Column(Integer, primary_key=True)
class_name = Column(String(255), index=True)
resource = Column(String(255))
hard_limit = Column(Integer, nullable=True)
class QuotaUsage(BASE, NovaBase):
"""Represents the current usage for a given resource."""
__tablename__ = 'quota_usages'
id = Column(Integer, primary_key=True)
project_id = Column(String(255), index=True)
resource = Column(String(255))
in_use = Column(Integer)
reserved = Column(Integer)
@property
def total(self):
return self.in_use + self.reserved
until_refresh = Column(Integer, nullable=True)
class Reservation(BASE, NovaBase):
"""Represents a resource reservation for quotas."""
__tablename__ = 'reservations'
id = Column(Integer, primary_key=True)
uuid = Column(String(36), nullable=False)
usage_id = Column(Integer, ForeignKey('quota_usages.id'), nullable=False)
project_id = Column(String(255), index=True)
resource = Column(String(255))
delta = Column(Integer)
expire = Column(DateTime, nullable=False)
usage = relationship(
"QuotaUsage",
foreign_keys=usage_id,
primaryjoin='and_(Reservation.usage_id == QuotaUsage.id,'
'QuotaUsage.deleted == False)')
class Snapshot(BASE, NovaBase):
"""Represents a block storage device that can be attached to a VM."""
__tablename__ = 'snapshots'
id = Column(String(36), primary_key=True)
@property
def name(self):
return FLAGS.snapshot_name_template % self.id
@property
def volume_name(self):
return FLAGS.volume_name_template % self.volume_id
user_id = Column(String(255))
project_id = Column(String(255))
volume_id = Column(String(36))
status = Column(String(255))
progress = Column(String(255))
volume_size = Column(Integer)
display_name = Column(String(255))
display_description = Column(String(255))
class BlockDeviceMapping(BASE, NovaBase):
"""Represents block device mapping that is defined by EC2"""
__tablename__ = "block_device_mapping"
id = Column(Integer, primary_key=True, autoincrement=True)
instance_uuid = Column(Integer, ForeignKey('instances.uuid'),
nullable=False)
instance = relationship(Instance,
backref=backref('block_device_mapping'),
foreign_keys=instance_uuid,
primaryjoin='and_(BlockDeviceMapping.'
'instance_uuid=='
'Instance.uuid,'
'BlockDeviceMapping.deleted=='
'False)')
device_name = Column(String(255), nullable=False)
# default=False for compatibility of the existing code.
# With EC2 API,
# default True for ami specified device.
# default False for created with other timing.
delete_on_termination = Column(Boolean, default=False)
# for ephemeral device
virtual_name = Column(String(255), nullable=True)
snapshot_id = Column(String(36))
volume_id = Column(String(36), nullable=True)
volume_size = Column(Integer, nullable=True)
# for no device to suppress devices.
no_device = Column(Boolean, nullable=True)
connection_info = Column(Text, nullable=True)
class IscsiTarget(BASE, NovaBase):
"""Represents an iscsi target for a given host"""
__tablename__ = 'iscsi_targets'
__table_args__ = (schema.UniqueConstraint("target_num", "host"),
{'mysql_engine': 'InnoDB'})
id = Column(Integer, primary_key=True)
target_num = Column(Integer)
host = Column(String(255))
volume_id = Column(String(36), ForeignKey('volumes.id'), nullable=True)
volume = relationship(Volume,
backref=backref('iscsi_target', uselist=False),
foreign_keys=volume_id,
primaryjoin='and_(IscsiTarget.volume_id==Volume.id,'
'IscsiTarget.deleted==False)')
class SecurityGroupInstanceAssociation(BASE, NovaBase):
__tablename__ = 'security_group_instance_association'
id = Column(Integer, primary_key=True)
security_group_id = Column(Integer, ForeignKey('security_groups.id'))
instance_uuid = Column(String(36), ForeignKey('instances.uuid'))
class SecurityGroup(BASE, NovaBase):
"""Represents a security group."""
__tablename__ = 'security_groups'
id = Column(Integer, primary_key=True)
name = Column(String(255))
description = Column(String(255))
user_id = Column(String(255))
project_id = Column(String(255))
instances = relationship(Instance,
secondary="security_group_instance_association",
primaryjoin='and_('
'SecurityGroup.id == '
'SecurityGroupInstanceAssociation.security_group_id,'
'SecurityGroupInstanceAssociation.deleted == False,'
'SecurityGroup.deleted == False)',
secondaryjoin='and_('
'SecurityGroupInstanceAssociation.instance_uuid == Instance.uuid,'
# (anthony) the condition below shouldn't be necessary now that the
# association is being marked as deleted. However, removing this
# may cause existing deployments to choke, so I'm leaving it
'Instance.deleted == False)',
backref='security_groups')
class SecurityGroupIngressRule(BASE, NovaBase):
"""Represents a rule in a security group."""
__tablename__ = 'security_group_rules'
id = Column(Integer, primary_key=True)
parent_group_id = Column(Integer, ForeignKey('security_groups.id'))
parent_group = relationship("SecurityGroup", backref="rules",
foreign_keys=parent_group_id,
primaryjoin='and_('
'SecurityGroupIngressRule.parent_group_id == SecurityGroup.id,'
'SecurityGroupIngressRule.deleted == False)')
protocol = Column(String(5)) # "tcp", "udp", or "icmp"
from_port = Column(Integer)
to_port = Column(Integer)
cidr = Column(String(255))
# Note: This is not the parent SecurityGroup. It's SecurityGroup we're
# granting access for.
group_id = Column(Integer, ForeignKey('security_groups.id'))
grantee_group = relationship("SecurityGroup",
foreign_keys=group_id,
primaryjoin='and_('
'SecurityGroupIngressRule.group_id == SecurityGroup.id,'
'SecurityGroupIngressRule.deleted == False)')
class ProviderFirewallRule(BASE, NovaBase):
"""Represents a rule in a security group."""
__tablename__ = 'provider_fw_rules'
id = Column(Integer, primary_key=True)
protocol = Column(String(5)) # "tcp", "udp", or "icmp"
from_port = Column(Integer)
to_port = Column(Integer)
cidr = Column(String(255))
class KeyPair(BASE, NovaBase):
"""Represents a public key pair for ssh."""
__tablename__ = 'key_pairs'
id = Column(Integer, primary_key=True)
name = Column(String(255))
user_id = Column(String(255))
fingerprint = Column(String(255))
public_key = Column(Text)
class Migration(BASE, NovaBase):
"""Represents a running host-to-host migration."""
__tablename__ = 'migrations'
id = Column(Integer, primary_key=True, nullable=False)
# NOTE(tr3buchet): the ____compute variables are instance['host']
source_compute = Column(String(255))
dest_compute = Column(String(255))
# NOTE(tr3buchet): dest_host, btw, is an ip address
dest_host = Column(String(255))
old_instance_type_id = Column(Integer())
new_instance_type_id = Column(Integer())
instance_uuid = Column(String(255), ForeignKey('instances.uuid'),
nullable=True)
#TODO(_cerberus_): enum
status = Column(String(255))
class Network(BASE, NovaBase):
"""Represents a network."""
__tablename__ = 'networks'
__table_args__ = (schema.UniqueConstraint("vpn_public_address",
"vpn_public_port"),
{'mysql_engine': 'InnoDB'})
id = Column(Integer, primary_key=True)
label = Column(String(255))
injected = Column(Boolean, default=False)
cidr = Column(String(255), unique=True)
cidr_v6 = Column(String(255), unique=True)
multi_host = Column(Boolean, default=False)
gateway_v6 = Column(String(255))
netmask_v6 = Column(String(255))
netmask = Column(String(255))
bridge = Column(String(255))
bridge_interface = Column(String(255))
gateway = Column(String(255))
broadcast = Column(String(255))
dns1 = Column(String(255))
dns2 = Column(String(255))
vlan = Column(Integer)
vpn_public_address = Column(String(255))
vpn_public_port = Column(Integer)
vpn_private_address = Column(String(255))
dhcp_start = Column(String(255))
rxtx_base = Column(Integer)
project_id = Column(String(255))
priority = Column(Integer)
host = Column(String(255)) # , ForeignKey('hosts.id'))
uuid = Column(String(36))
class VirtualInterface(BASE, NovaBase):
"""Represents a virtual interface on an instance."""
__tablename__ = 'virtual_interfaces'
id = Column(Integer, primary_key=True)
address = Column(String(255), unique=True)
network_id = Column(Integer, nullable=False)
instance_uuid = Column(String(36), nullable=False)
uuid = Column(String(36))
# TODO(vish): can these both come from the same baseclass?
class FixedIp(BASE, NovaBase):
"""Represents a fixed ip for an instance."""
__tablename__ = 'fixed_ips'
id = Column(Integer, primary_key=True)
address = Column(String(255))
network_id = Column(Integer, nullable=True)
virtual_interface_id = Column(Integer, nullable=True)
instance_uuid = Column(String(36), nullable=True)
# associated means that a fixed_ip has its instance_id column set
# allocated means that a fixed_ip has its virtual_interface_id column set
allocated = Column(Boolean, default=False)
# leased means dhcp bridge has leased the ip
leased = Column(Boolean, default=False)
reserved = Column(Boolean, default=False)
host = Column(String(255))
class FloatingIp(BASE, NovaBase):
"""Represents a floating ip that dynamically forwards to a fixed ip."""
__tablename__ = 'floating_ips'
id = Column(Integer, primary_key=True)
address = Column(String(255))
fixed_ip_id = Column(Integer, nullable=True)
project_id = Column(String(255))
host = Column(String(255)) # , ForeignKey('hosts.id'))
auto_assigned = Column(Boolean, default=False, nullable=False)
pool = Column(String(255))
interface = Column(String(255))
class DNSDomain(BASE, NovaBase):
"""Represents a DNS domain with availability zone or project info."""
__tablename__ = 'dns_domains'
domain = Column(String(512), primary_key=True)
scope = Column(String(255))
availability_zone = Column(String(255))
project_id = Column(String(255))
class ConsolePool(BASE, NovaBase):
"""Represents pool of consoles on the same physical node."""
__tablename__ = 'console_pools'
id = Column(Integer, primary_key=True)
address = Column(String(255))
username = Column(String(255))
password = Column(String(255))
console_type = Column(String(255))
public_hostname = Column(String(255))
host = Column(String(255))
compute_host = Column(String(255))
class Console(BASE, NovaBase):
"""Represents a console session for an instance."""
__tablename__ = 'consoles'
id = Column(Integer, primary_key=True)
instance_name = Column(String(255))
instance_uuid = Column(String(36))
password = Column(String(255))
port = Column(Integer, nullable=True)
pool_id = Column(Integer, ForeignKey('console_pools.id'))
pool = relationship(ConsolePool, backref=backref('consoles'))
class InstanceMetadata(BASE, NovaBase):
"""Represents a user-provided metadata key/value pair for an instance"""
__tablename__ = 'instance_metadata'
id = Column(Integer, primary_key=True)
key = Column(String(255))
value = Column(String(255))
instance_uuid = Column(String(36), ForeignKey('instances.uuid'),
nullable=False)
instance = relationship(Instance, backref="metadata",
foreign_keys=instance_uuid,
primaryjoin='and_('
'InstanceMetadata.instance_uuid == '
'Instance.uuid,'
'InstanceMetadata.deleted == False)')
class InstanceSystemMetadata(BASE, NovaBase):
"""Represents a system-owned metadata key/value pair for an instance"""
__tablename__ = 'instance_system_metadata'
id = Column(Integer, primary_key=True)
key = Column(String(255))
value = Column(String(255))
instance_uuid = Column(String(36),
ForeignKey('instances.uuid'),
nullable=False)
primary_join = ('and_(InstanceSystemMetadata.instance_uuid == '
'Instance.uuid, InstanceSystemMetadata.deleted == False)')
instance = relationship(Instance, backref="system_metadata",
foreign_keys=instance_uuid,
primaryjoin=primary_join)
class InstanceTypeProjects(BASE, NovaBase):
"""Represent projects associated instance_types"""
__tablename__ = "instance_type_projects"
id = Column(Integer, primary_key=True)
instance_type_id = Column(Integer, ForeignKey('instance_types.id'),
nullable=False)
project_id = Column(String(255))
instance_type = relationship(InstanceTypes, backref="projects",
foreign_keys=instance_type_id,
primaryjoin='and_('
'InstanceTypeProjects.instance_type_id == InstanceTypes.id,'
'InstanceTypeProjects.deleted == False)')
class InstanceTypeExtraSpecs(BASE, NovaBase):
"""Represents additional specs as key/value pairs for an instance_type"""
__tablename__ = 'instance_type_extra_specs'
id = Column(Integer, primary_key=True)
key = Column(String(255))
value = Column(String(255))
instance_type_id = Column(Integer, ForeignKey('instance_types.id'),
nullable=False)
instance_type = relationship(InstanceTypes, backref="extra_specs",
foreign_keys=instance_type_id,
primaryjoin='and_('
'InstanceTypeExtraSpecs.instance_type_id == InstanceTypes.id,'
'InstanceTypeExtraSpecs.deleted == False)')
class AggregateHost(BASE, NovaBase):
"""Represents a host that is member of an aggregate."""
__tablename__ = 'aggregate_hosts'
id = Column(Integer, primary_key=True, autoincrement=True)
host = Column(String(255), unique=False)
aggregate_id = Column(Integer, ForeignKey('aggregates.id'), nullable=False)
class AggregateMetadata(BASE, NovaBase):
"""Represents a metadata key/value pair for an aggregate."""
__tablename__ = 'aggregate_metadata'
id = Column(Integer, primary_key=True)
key = Column(String(255), nullable=False)
value = Column(String(255), nullable=False)
aggregate_id = Column(Integer, ForeignKey('aggregates.id'), nullable=False)
class Aggregate(BASE, NovaBase):
"""Represents a cluster of hosts that exists in this zone."""
__tablename__ = 'aggregates'
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String(255))
availability_zone = Column(String(255), nullable=False)
_hosts = relationship(AggregateHost,
lazy="joined",
secondary="aggregate_hosts",
primaryjoin='and_('
'Aggregate.id == AggregateHost.aggregate_id,'
'AggregateHost.deleted == False,'
'Aggregate.deleted == False)',
secondaryjoin='and_('
'AggregateHost.aggregate_id == Aggregate.id, '
'AggregateHost.deleted == False,'
'Aggregate.deleted == False)',
backref='aggregates')
_metadata = relationship(AggregateMetadata,
secondary="aggregate_metadata",
primaryjoin='and_('
'Aggregate.id == AggregateMetadata.aggregate_id,'
'AggregateMetadata.deleted == False,'
'Aggregate.deleted == False)',
secondaryjoin='and_('
'AggregateMetadata.aggregate_id == Aggregate.id, '
'AggregateMetadata.deleted == False,'
'Aggregate.deleted == False)',
backref='aggregates')
@property
def hosts(self):
return [h.host for h in self._hosts]
@property
def metadetails(self):
return dict([(m.key, m.value) for m in self._metadata])
class AgentBuild(BASE, NovaBase):
"""Represents an agent build."""
__tablename__ = 'agent_builds'
id = Column(Integer, primary_key=True)
hypervisor = Column(String(255))
os = Column(String(255))
architecture = Column(String(255))
version = Column(String(255))
url = Column(String(255))
md5hash = Column(String(255))
class BandwidthUsage(BASE, NovaBase):
"""Cache for instance bandwidth usage data pulled from the hypervisor"""
__tablename__ = 'bw_usage_cache'
id = Column(Integer, primary_key=True, nullable=False)
uuid = Column(String(36), nullable=False)
mac = Column(String(255), nullable=False)
start_period = Column(DateTime, nullable=False)
last_refreshed = Column(DateTime)
bw_in = Column(BigInteger)
bw_out = Column(BigInteger)
class S3Image(BASE, NovaBase):
"""Compatibility layer for the S3 image service talking to Glance"""
__tablename__ = 's3_images'
id = Column(Integer, primary_key=True, nullable=False, autoincrement=True)
uuid = Column(String(36), nullable=False)
class VolumeIdMapping(BASE, NovaBase):
"""Compatibility layer for the EC2 volume service"""
__tablename__ = 'volume_id_mappings'
id = Column(Integer, primary_key=True, nullable=False, autoincrement=True)
uuid = Column(String(36), nullable=False)
class SnapshotIdMapping(BASE, NovaBase):
"""Compatibility layer for the EC2 snapshot service"""
__tablename__ = 'snapshot_id_mappings'
id = Column(Integer, primary_key=True, nullable=False, autoincrement=True)
uuid = Column(String(36), nullable=False)
class SMFlavors(BASE, NovaBase):
"""Represents a flavor for SM volumes."""
__tablename__ = 'sm_flavors'
id = Column(Integer(), primary_key=True)
label = Column(String(255))
description = Column(String(255))
class SMBackendConf(BASE, NovaBase):
"""Represents the connection to the backend for SM."""
__tablename__ = 'sm_backend_config'
id = Column(Integer(), primary_key=True)
flavor_id = Column(Integer, ForeignKey('sm_flavors.id'), nullable=False)
sr_uuid = Column(String(255))
sr_type = Column(String(255))
config_params = Column(String(2047))
class SMVolume(BASE, NovaBase):
__tablename__ = 'sm_volume'
id = Column(String(36), ForeignKey(Volume.id), primary_key=True)
backend_id = Column(Integer, ForeignKey('sm_backend_config.id'),
nullable=False)
vdi_uuid = Column(String(255))
class InstanceFault(BASE, NovaBase):
__tablename__ = 'instance_faults'
id = Column(Integer(), primary_key=True, autoincrement=True)
instance_uuid = Column(String(36),
ForeignKey('instances.uuid'),
nullable=False)
code = Column(Integer(), nullable=False)
message = Column(String(255))
details = Column(Text)
class InstanceIdMapping(BASE, NovaBase):
"""Compatability layer for the EC2 instance service"""
__tablename__ = 'instance_id_mappings'
id = Column(Integer, primary_key=True, nullable=False, autoincrement=True)
uuid = Column(String(36), nullable=False)
class TaskLog(BASE, NovaBase):
"""Audit log for background periodic tasks"""
__tablename__ = 'task_log'
id = Column(Integer, primary_key=True, nullable=False, autoincrement=True)
task_name = Column(String(255), nullable=False)
state = Column(String(255), nullable=False)
host = Column(String(255))
period_beginning = Column(String(255), default=timeutils.utcnow)
period_ending = Column(String(255), default=timeutils.utcnow)
message = Column(String(255), nullable=False)
task_items = Column(Integer(), default=0)
errors = Column(Integer(), default=0)
|
tylertian/Openstack
|
openstack F/nova/nova/db/sqlalchemy/models.py
|
Python
|
apache-2.0
| 37,122
|
"""SCons.Tool.gs
Tool-specific initialization for Ghostscript.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import SCons.Action
import SCons.Platform
import SCons.Util
# Ghostscript goes by different names on different platforms...
platform = SCons.Platform.platform_default()
if platform == 'os2':
gs = 'gsos2'
elif platform == 'win32':
gs = 'gswin32c'
else:
gs = 'gs'
GhostscriptAction = None
def generate(env):
"""Add Builders and construction variables for Ghostscript to an
Environment."""
global GhostscriptAction
if GhostscriptAction is None:
GhostscriptAction = SCons.Action.Action('$GSCOM', '$GSCOMSTR')
import pdf
pdf.generate(env)
bld = env['BUILDERS']['PDF']
bld.add_action('.ps', GhostscriptAction)
env['GS'] = gs
env['GSFLAGS'] = SCons.Util.CLVar('-dNOPAUSE -dBATCH -sDEVICE=pdfwrite')
env['GSCOM'] = '$GS $GSFLAGS -sOutputFile=$TARGET $SOURCES'
def exists(env):
if env.has_key('PS2PDF'):
return env.Detect(env['PS2PDF'])
else:
return env.Detect(gs) or SCons.Util.WhereIs(gs)
|
datalogics/scons
|
src/engine/SCons/Tool/gs.py
|
Python
|
mit
| 2,354
|
"""
Support for ANSI colours in command-line client.
.. data:: ESC
ansi escape character
.. data:: RESET
ansi reset colour (ansi value)
.. data:: COLOURS_NAMED
dict of colour names mapped to their ansi value
.. data:: COLOURS_MIDS
A list of ansi values for Mid Spectrum Colours
"""
import itertools
import sys
ESC = chr(0x1B)
RESET = "0"
COLOURS_NAMED = dict(list(zip(
['black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'],
[str(x) for x in range(30, 38)]
)))
COLOURS_MIDS = [
colour for name, colour in list(COLOURS_NAMED.items())
if name not in ('black', 'white')
]
class AnsiColourException(Exception):
''' Exception while processing ansi colours '''
pass
class ColourMap(object):
'''
Object that allows for mapping strings to ansi colour values.
'''
def __init__(self, colors=COLOURS_MIDS):
''' uses the list of ansi `colors` values to initialize the map '''
self._cmap = {}
self._colourIter = itertools.cycle(colors)
def colourFor(self, string):
'''
Returns an ansi colour value given a `string`.
The same ansi colour value is always returned for the same string
'''
if string not in self._cmap:
self._cmap[string] = next(self._colourIter)
return self._cmap[string]
def cmdReset():
''' Returns the ansi cmd colour for a RESET '''
if sys.stdout.isatty():
return ESC + "[0m"
else:
return ""
def cmdColour(colour):
'''
Return the ansi cmd colour (i.e. escape sequence)
for the ansi `colour` value
'''
if sys.stdout.isatty():
return ESC + "[" + colour + "m"
else:
return ""
def cmdColourNamed(colour):
''' Return the ansi cmdColour for a given named `colour` '''
try:
return cmdColour(COLOURS_NAMED[colour])
except KeyError:
raise AnsiColourException('Unknown Colour %s' %(colour))
|
magloire/twitter
|
twitter/ansi.py
|
Python
|
mit
| 1,954
|
"""
def revertДана строка (возможно, пустая), состоящая из букв A-Z и пробелов, разделяющих слова.
Нужно написать функцию, которая развернет слова.
И сгенерирует ошибку, если на вход пришла невалидная строка.
Примеры:
"QUICK FOX JUMPS"->"KCIUQ XOF SPMUJ"
" QUICK FOX JUMPS "->" KCIUQ XOF SPMUJ "
" "->" "
""->"
"""
import re
def revers(word):
r = []
index = len(word)-1
while index >= 0:
r.append(word[index])
index -= 1
return "".join(r)
def revers_words(s):
if not s:
return ""
r = re.search(r"[A-Z]| ", s)
if not r:
raise Exception
a = s.split(" ") # "abc cde" => [['abc']['']['cde']] ==> revers() ==> join(" ")
result = []
for word in a:
result.append(revers(word))
return " ".join(result)
|
sdenisen/python
|
yandex/task10/task10_resolve.py
|
Python
|
unlicense
| 959
|
from __future__ import absolute_import
from django.conf.urls import include, url
from dynamic_rest.routers import DynamicRouter
from tests import viewsets
router = DynamicRouter()
router.register_resource(viewsets.UserViewSet)
router.register_resource(viewsets.GroupViewSet)
router.register_resource(viewsets.ProfileViewSet)
router.register_resource(viewsets.LocationViewSet)
router.register(r'cats', viewsets.CatViewSet)
router.register_resource(viewsets.DogViewSet)
router.register_resource(viewsets.HorseViewSet)
router.register_resource(viewsets.PermissionViewSet)
router.register(r'zebras', viewsets.ZebraViewSet) # not canonical
router.register(r'user_locations', viewsets.UserLocationViewSet)
# the above routes are duplicated to test versioned prefixes
router.register_resource(viewsets.CatViewSet, namespace='v2') # canonical
router.register(r'v1/user_locations', viewsets.UserLocationViewSet)
urlpatterns = [
url(r'^', include(router.urls))
]
|
AltSchool/dynamic-rest-client
|
tests/urls.py
|
Python
|
mit
| 964
|
import sys
import argparse
import logging
import importlib
from .server import Server, build_endpoint_description_strings
from .access import AccessLogGenerator
logger = logging.getLogger(__name__)
DEFAULT_HOST = '127.0.0.1'
DEFAULT_PORT = 8000
class CommandLineInterface(object):
"""
Acts as the main CLI entry point for running the server.
"""
description = "Django HTTP/WebSocket server"
def __init__(self):
self.parser = argparse.ArgumentParser(
description=self.description,
)
self.parser.add_argument(
'-p',
'--port',
type=int,
help='Port number to listen on',
default=None,
)
self.parser.add_argument(
'-b',
'--bind',
dest='host',
help='The host/address to bind to',
default=None,
)
self.parser.add_argument(
'--websocket_timeout',
type=int,
help='max time websocket connected. -1 to infinite.',
default=None,
)
self.parser.add_argument(
'--websocket_connect_timeout',
type=int,
help='max time to refuse establishing connection. -1 to infinite',
default=None,
)
self.parser.add_argument(
'-u',
'--unix-socket',
dest='unix_socket',
help='Bind to a UNIX socket rather than a TCP host/port',
default=None,
)
self.parser.add_argument(
'--fd',
type=int,
dest='file_descriptor',
help='Bind to a file descriptor rather than a TCP host/port or named unix socket',
default=None,
)
self.parser.add_argument(
'-e',
'--endpoint',
dest='socket_strings',
action='append',
help='Use raw server strings passed directly to twisted',
default=[],
)
self.parser.add_argument(
'-v',
'--verbosity',
type=int,
help='How verbose to make the output',
default=1,
)
self.parser.add_argument(
'-t',
'--http-timeout',
type=int,
help='How long to wait for worker server before timing out HTTP connections',
default=120,
)
self.parser.add_argument(
'--access-log',
help='Where to write the access log (- for stdout, the default for verbosity=1)',
default=None,
)
self.parser.add_argument(
'--ping-interval',
type=int,
help='The number of seconds a WebSocket must be idle before a keepalive ping is sent',
default=20,
)
self.parser.add_argument(
'--ping-timeout',
type=int,
help='The number of seconds before a WeSocket is closed if no response to a keepalive ping',
default=30,
)
self.parser.add_argument(
'--ws-protocol',
nargs='*',
dest='ws_protocols',
help='The WebSocket protocols you wish to support',
default=None,
)
self.parser.add_argument(
'--root-path',
dest='root_path',
help='The setting for the ASGI root_path variable',
default="",
)
self.parser.add_argument(
'--proxy-headers',
dest='proxy_headers',
help='Enable parsing and using of X-Forwarded-For and X-Forwarded-Port headers and using that as the '
'client address',
default=False,
action='store_true',
)
self.parser.add_argument(
'--force-sync',
dest='force_sync',
action='store_true',
help='Force the server to use synchronous mode on its ASGI channel layer',
default=False,
)
self.parser.add_argument(
'channel_layer',
help='The ASGI channel layer instance to use as path.to.module:instance.path',
)
self.server = None
@classmethod
def entrypoint(cls):
"""
Main entrypoint for external starts.
"""
cls().run(sys.argv[1:])
def run(self, args):
"""
Pass in raw argument list and it will decode them
and run the server.
"""
# Decode args
args = self.parser.parse_args(args)
# Set up logging
logging.basicConfig(
level={
0: logging.WARN,
1: logging.INFO,
2: logging.DEBUG,
}[args.verbosity],
format="%(asctime)-15s %(levelname)-8s %(message)s",
)
# If verbosity is 1 or greater, or they told us explicitly, set up access log
access_log_stream = None
if args.access_log:
if args.access_log == "-":
access_log_stream = sys.stdout
else:
access_log_stream = open(args.access_log, "a", 1)
elif args.verbosity >= 1:
access_log_stream = sys.stdout
# Import channel layer
sys.path.insert(0, ".")
module_path, object_path = args.channel_layer.split(":", 1)
channel_layer = importlib.import_module(module_path)
for bit in object_path.split("."):
channel_layer = getattr(channel_layer, bit)
if not any([args.host, args.port, args.unix_socket, args.file_descriptor, args.socket_strings]):
# no advanced binding options passed, patch in defaults
args.host = DEFAULT_HOST
args.port = DEFAULT_PORT
elif args.host and not args.port:
args.port = DEFAULT_PORT
elif args.port and not args.host:
args.host = DEFAULT_HOST
# build endpoint description strings from (optional) cli arguments
endpoints = build_endpoint_description_strings(
host=args.host,
port=args.port,
unix_socket=args.unix_socket,
file_descriptor=args.file_descriptor
)
endpoints = sorted(
args.socket_strings + endpoints
)
logger.info(
'Starting server at %s, channel layer %s.' %
(', '.join(endpoints), args.channel_layer)
)
self.server = Server(
channel_layer=channel_layer,
endpoints=endpoints,
http_timeout=args.http_timeout,
ping_interval=args.ping_interval,
ping_timeout=args.ping_timeout,
websocket_timeout=args.websocket_timeout,
websocket_connect_timeout=args.websocket_connect_timeout,
action_logger=AccessLogGenerator(access_log_stream) if access_log_stream else None,
ws_protocols=args.ws_protocols,
root_path=args.root_path,
verbosity=args.verbosity,
proxy_forwarded_address_header='X-Forwarded-For' if args.proxy_headers else None,
proxy_forwarded_port_header='X-Forwarded-Port' if args.proxy_headers else None,
force_sync=args.force_sync,
)
self.server.run()
|
maikhoepfel/daphne
|
daphne/cli.py
|
Python
|
bsd-3-clause
| 7,303
|
from hypothesis import given, example
from hypothesis.strategies import binary, integers
from mitmproxy.tls import ClientHello
from mitmproxy.proxy.layers.tls import parse_client_hello
client_hello_with_extensions = bytes.fromhex(
"16030300bb" # record layer
"010000b7" # handshake layer
"03033b70638d2523e1cba15f8364868295305e9c52aceabda4b5147210abc783e6e1000022c02bc02fc02cc030"
"cca9cca8cc14cc13c009c013c00ac014009c009d002f0035000a0100006cff0100010000000010000e00000b65"
"78616d706c652e636f6d0017000000230000000d00120010060106030501050304010403020102030005000501"
"00000000001200000010000e000c02683208687474702f312e3175500000000b00020100000a00080006001d00"
"170018"
)
@given(i=integers(0, len(client_hello_with_extensions)), data=binary())
@example(i=183, data=b'\x00\x00\x00\x00\x00\x00\x00\x00\x00')
def test_fuzz_parse_client_hello(i, data):
try:
ch = parse_client_hello(client_hello_with_extensions[:i] + data)
except ValueError:
pass
else:
assert ch is None or isinstance(ch, ClientHello)
|
mitmproxy/mitmproxy
|
test/mitmproxy/proxy/layers/test_tls_fuzz.py
|
Python
|
mit
| 1,067
|
# (C) Copyright 2016-2017 Hewlett Packard Enterprise Development LP
# Copyright 2017 Fujitsu LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pyparsing
from monasca_api.expression_parser import alarm_expr_parser
from monasca_api.tests import base
class TestAlarmExpression(base.BaseTestCase):
good_simple_expression = "max(cpu.idle_perc{hostname=fred}, 60) <= 3 times 4 OR \
avg(CPU.PERCENT)<5 OR min(cpu.percent, deterministic) gte 3"
def test_good_expression(self):
expression = self.good_simple_expression
sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list
self.assertEqual(3, len(sub_exprs))
def test_fmtd_sub_expr(self):
expression = self.good_simple_expression
sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list
self.assertEqual([x.fmtd_sub_expr_str for x in sub_exprs],
['MAX(cpu.idle_perc{hostname=fred}) <= 3.0 times 4',
'AVG(CPU.PERCENT{}) < 5.0', 'MIN(cpu.percent{}) gte 3.0'])
def test_dimensions_str(self):
expression = self.good_simple_expression
sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list
self.assertEqual([x.dimensions_str for x in sub_exprs], ['hostname=fred', '', ''])
def test_function(self):
expression = self.good_simple_expression
sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list
self.assertEqual([x.func for x in sub_exprs], ['max', 'avg', 'min'])
def test_normalized_function(self):
expression = self.good_simple_expression
sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list
self.assertEqual([x.normalized_func for x in sub_exprs], ['MAX', 'AVG', 'MIN'])
def test_metric_name(self):
expression = self.good_simple_expression
sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list
self.assertEqual([x.metric_name for x in sub_exprs],
['cpu.idle_perc', 'CPU.PERCENT', 'cpu.percent'])
def test_normalized_metric_name(self):
expression = self.good_simple_expression
sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list
self.assertEqual([x.normalized_metric_name for x in sub_exprs],
['cpu.idle_perc', 'cpu.percent', 'cpu.percent'])
def test_dimensions(self):
expression = self.good_simple_expression
sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list
self.assertEqual([x.dimensions for x in sub_exprs], ['hostname=fred', '', ''])
def test_dimensions_as_list(self):
expression = self.good_simple_expression
sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list
print([x.dimensions_as_list for x in sub_exprs].__str__())
self.assertEqual([x.dimensions_as_list for x in sub_exprs].__str__(),
"[ParseResults(['hostname=fred'], {}), [], []]")
def test_operator(self):
expression = self.good_simple_expression
sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list
self.assertEqual([x.operator for x in sub_exprs], ['<=', '<', 'gte'])
def test_threshold(self):
expression = self.good_simple_expression
sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list
self.assertEqual([x.threshold for x in sub_exprs], [3.0, 5.0, 3.0])
def test_period(self):
expression = self.good_simple_expression
sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list
self.assertEqual([x.period for x in sub_exprs], [60, 60, 60])
def test_periods(self):
expression = self.good_simple_expression
sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list
self.assertEqual([x.periods for x in sub_exprs], [4, 1, 1])
def test_deterministic(self):
expression = self.good_simple_expression
sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list
self.assertEqual([x.deterministic for x in sub_exprs], [False, False, True])
def test_normalized_operator(self):
expression = self.good_simple_expression
sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list
self.assertEqual([x.normalized_operator for x in sub_exprs], ['LTE', 'LT', 'GTE'])
def test_id(self):
expression = self.good_simple_expression
sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list
self.assertEqual([x.id for x in sub_exprs], [None, None, None])
def test_set_id(self):
expression = self.good_simple_expression
sub_exprs = alarm_expr_parser.AlarmExprParser(expression).sub_expr_list
for x in sub_exprs:
x.id = 88
self.assertEqual([x.id for x in sub_exprs], [88, 88, 88])
def _ensure_parse_fails(self, expression):
parser = alarm_expr_parser.AlarmExprParser(expression)
self.assertRaises(
(pyparsing.ParseException,
pyparsing.ParseFatalException),
getattr, parser, "sub_expr_list")
def test_incomplete_operator(self):
expression = self.good_simple_expression.replace('<= 3', '')
self._ensure_parse_fails(expression)
def test_no_dimension_name(self):
expression = self.good_simple_expression.replace('hostname', '')
self._ensure_parse_fails(expression)
def test_no_metric_name(self):
expression = self.good_simple_expression.replace('cpu.idle_perc', '')
self._ensure_parse_fails(expression)
def test_invalid_period(self):
expression = self.good_simple_expression.replace('60', '42')
self._ensure_parse_fails(expression)
def test_zero_period(self):
expression = self.good_simple_expression.replace('60', '0')
self._ensure_parse_fails(expression)
def test_negative_period(self):
expression = self.good_simple_expression.replace('60', '-60')
self._ensure_parse_fails(expression)
def test_zero_periods(self):
expression = self.good_simple_expression.replace('times 4', 'times 0')
self._ensure_parse_fails(expression)
|
openstack/monasca-api
|
monasca_api/tests/test_alarm_expression.py
|
Python
|
apache-2.0
| 6,872
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
""" Usefull functions and classes """
|
Martoni/periphondemand
|
periphondemand/bin/utils/__init__.py
|
Python
|
lgpl-2.1
| 81
|
"""Function/variables common to all the commands
"""
__copyright__ = """
Copyright (C) 2005, Catalin Marinas <catalin.marinas@gmail.com>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License version 2 as
published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
import sys, os, os.path, re, email.Utils
from stgit.exception import *
from stgit.utils import *
from stgit.out import *
from stgit.run import *
from stgit import stack, git, basedir
from stgit.config import config, file_extensions
from stgit.lib import stack as libstack
from stgit.lib import git as libgit
from stgit.lib import log
# Command exception class
class CmdException(StgException):
pass
# Utility functions
def parse_rev(rev):
"""Parse a revision specification into its branch:patch parts.
"""
try:
branch, patch = rev.split(':', 1)
except ValueError:
branch = None
patch = rev
return (branch, patch)
def git_id(crt_series, rev):
"""Return the GIT id
"""
# TODO: remove this function once all the occurrences were converted
# to git_commit()
repository = libstack.Repository.default()
return git_commit(rev, repository, crt_series.get_name()).sha1
def get_public_ref(branch_name):
"""Return the public ref of the branch."""
public_ref = config.get('branch.%s.public' % branch_name)
if not public_ref:
public_ref = 'refs/heads/%s.public' % branch_name
return public_ref
def git_commit(name, repository, branch_name = None):
"""Return the a Commit object if 'name' is a patch name or Git commit.
The patch names allowed are in the form '<branch>:<patch>' and can
be followed by standard symbols used by git rev-parse. If <patch>
is '{base}', it represents the bottom of the stack. If <patch> is
{public}, it represents the public branch corresponding to the stack as
described in the 'publish' command.
"""
# Try a [branch:]patch name first
branch, patch = parse_rev(name)
if not branch:
branch = branch_name or repository.current_branch_name
# The stack base
if patch.startswith('{base}'):
base_id = repository.get_stack(branch).base.sha1
return repository.rev_parse(base_id +
strip_prefix('{base}', patch))
elif patch.startswith('{public}'):
public_ref = get_public_ref(branch)
return repository.rev_parse(public_ref +
strip_prefix('{public}', patch),
discard_stderr = True)
# Other combination of branch and patch
try:
return repository.rev_parse('patches/%s/%s' % (branch, patch),
discard_stderr = True)
except libgit.RepositoryException:
pass
# Try a Git commit
try:
return repository.rev_parse(name, discard_stderr = True)
except libgit.RepositoryException:
raise CmdException('%s: Unknown patch or revision name' % name)
def color_diff_flags():
"""Return the git flags for coloured diff output if the configuration and
stdout allows."""
stdout_is_tty = (sys.stdout.isatty() and 'true') or 'false'
if config.get_colorbool('color.diff', stdout_is_tty) == 'true':
return ['--color']
else:
return []
def check_local_changes():
if git.local_changes():
raise CmdException('local changes in the tree. Use "refresh" or'
' "status --reset"')
def check_head_top_equal(crt_series):
if not crt_series.head_top_equal():
raise CmdException('HEAD and top are not the same. This can happen'
' if you modify a branch with git. "stg repair'
' --help" explains more about what to do next.')
def check_conflicts():
if git.get_conflicts():
raise CmdException('Unsolved conflicts. Please fix the conflicts'
' then use "git add --update <files>" or revert the'
' changes with "status --reset".')
def print_crt_patch(crt_series, branch = None):
if not branch:
patch = crt_series.get_current()
else:
patch = stack.Series(branch).get_current()
if patch:
out.info('Now at patch "%s"' % patch)
else:
out.info('No patches applied')
def resolved_all(reset = None):
conflicts = git.get_conflicts()
git.resolved(conflicts, reset)
def push_patches(crt_series, patches, check_merged = False):
"""Push multiple patches onto the stack. This function is shared
between the push and pull commands
"""
forwarded = crt_series.forward_patches(patches)
if forwarded > 1:
out.info('Fast-forwarded patches "%s" - "%s"'
% (patches[0], patches[forwarded - 1]))
elif forwarded == 1:
out.info('Fast-forwarded patch "%s"' % patches[0])
names = patches[forwarded:]
# check for patches merged upstream
if names and check_merged:
out.start('Checking for patches merged upstream')
merged = crt_series.merged_patches(names)
out.done('%d found' % len(merged))
else:
merged = []
for p in names:
out.start('Pushing patch "%s"' % p)
if p in merged:
crt_series.push_empty_patch(p)
out.done('merged upstream')
else:
modified = crt_series.push_patch(p)
if crt_series.empty_patch(p):
out.done('empty patch')
elif modified:
out.done('modified')
else:
out.done()
def pop_patches(crt_series, patches, keep = False):
"""Pop the patches in the list from the stack. It is assumed that
the patches are listed in the stack reverse order.
"""
if len(patches) == 0:
out.info('Nothing to push/pop')
else:
p = patches[-1]
if len(patches) == 1:
out.start('Popping patch "%s"' % p)
else:
out.start('Popping patches "%s" - "%s"' % (patches[0], p))
crt_series.pop_patch(p, keep)
out.done()
def parse_patches(patch_args, patch_list, boundary = 0, ordered = False):
"""Parse patch_args list for patch names in patch_list and return
a list. The names can be individual patches and/or in the
patch1..patch2 format.
"""
# in case it receives a tuple
patch_list = list(patch_list)
patches = []
for name in patch_args:
pair = name.split('..')
for p in pair:
if p and not p in patch_list:
raise CmdException, 'Unknown patch name: %s' % p
if len(pair) == 1:
# single patch name
pl = pair
elif len(pair) == 2:
# patch range [p1]..[p2]
# inclusive boundary
if pair[0]:
first = patch_list.index(pair[0])
else:
first = -1
# exclusive boundary
if pair[1]:
last = patch_list.index(pair[1]) + 1
else:
last = -1
# only cross the boundary if explicitly asked
if not boundary:
boundary = len(patch_list)
if first < 0:
if last <= boundary:
first = 0
else:
first = boundary
if last < 0:
if first < boundary:
last = boundary
else:
last = len(patch_list)
if last > first:
pl = patch_list[first:last]
else:
pl = patch_list[(last - 1):(first + 1)]
pl.reverse()
else:
raise CmdException, 'Malformed patch name: %s' % name
for p in pl:
if p in patches:
raise CmdException, 'Duplicate patch name: %s' % p
patches += pl
if ordered:
patches = [p for p in patch_list if p in patches]
return patches
def name_email(address):
p = email.Utils.parseaddr(address)
if p[1]:
return p
else:
raise CmdException('Incorrect "name <email>"/"email (name)" string: %s'
% address)
def name_email_date(address):
p = parse_name_email_date(address)
if p:
return p
else:
raise CmdException('Incorrect "name <email> date" string: %s' % address)
def address_or_alias(addr_pair):
"""Return a name-email tuple the e-mail address is valid or look up
the aliases in the config files.
"""
addr = addr_pair[1]
if '@' in addr:
# it's an e-mail address
return addr_pair
alias = config.get('mail.alias.' + addr)
if alias:
# it's an alias
return name_email(alias)
raise CmdException, 'unknown e-mail alias: %s' % addr
def prepare_rebase(crt_series):
# pop all patches
applied = crt_series.get_applied()
if len(applied) > 0:
out.start('Popping all applied patches')
crt_series.pop_patch(applied[0])
out.done()
return applied
def rebase(crt_series, target):
try:
tree_id = git_id(crt_series, target)
except:
# it might be that we use a custom rebase command with its own
# target type
tree_id = target
if target:
out.start('Rebasing to "%s"' % target)
else:
out.start('Rebasing to the default target')
git.rebase(tree_id = tree_id)
out.done()
def post_rebase(crt_series, applied, nopush, merged):
# memorize that we rebased to here
crt_series._set_field('orig-base', git.get_head())
# push the patches back
if not nopush:
push_patches(crt_series, applied, merged)
#
# Patch description/e-mail/diff parsing
#
def __end_descr(line):
return re.match('---\s*$', line) or re.match('diff -', line) or \
re.match('Index: ', line) or re.match('--- \w', line)
def __split_descr_diff(string):
"""Return the description and the diff from the given string
"""
descr = diff = ''
top = True
for line in string.split('\n'):
if top:
if not __end_descr(line):
descr += line + '\n'
continue
else:
top = False
diff += line + '\n'
return (descr.rstrip(), diff)
def __parse_description(descr):
"""Parse the patch description and return the new description and
author information (if any).
"""
subject = body = ''
authname = authemail = authdate = None
descr_lines = [line.rstrip() for line in descr.split('\n')]
if not descr_lines:
raise CmdException, "Empty patch description"
lasthdr = 0
end = len(descr_lines)
descr_strip = 0
# Parse the patch header
for pos in range(0, end):
if not descr_lines[pos]:
continue
# check for a "From|Author:" line
if re.match('\s*(?:from|author):\s+', descr_lines[pos], re.I):
auth = re.findall('^.*?:\s+(.*)$', descr_lines[pos])[0]
authname, authemail = name_email(auth)
lasthdr = pos + 1
continue
# check for a "Date:" line
if re.match('\s*date:\s+', descr_lines[pos], re.I):
authdate = re.findall('^.*?:\s+(.*)$', descr_lines[pos])[0]
lasthdr = pos + 1
continue
if subject:
break
# get the subject
subject = descr_lines[pos][descr_strip:]
if re.match('commit [\da-f]{40}$', subject):
# 'git show' output, look for the real subject
subject = ''
descr_strip = 4
lasthdr = pos + 1
# get the body
if lasthdr < end:
body = '\n' + '\n'.join(l[descr_strip:] for l in descr_lines[lasthdr:])
return (subject + body, authname, authemail, authdate)
def parse_mail(msg):
"""Parse the message object and return (description, authname,
authemail, authdate, diff)
"""
from email.Header import decode_header, make_header
def __decode_header(header):
"""Decode a qp-encoded e-mail header as per rfc2047"""
try:
words_enc = decode_header(header)
hobj = make_header(words_enc)
except Exception, ex:
raise CmdException, 'header decoding error: %s' % str(ex)
return unicode(hobj).encode('utf-8')
# parse the headers
if msg.has_key('from'):
authname, authemail = name_email(__decode_header(msg['from']))
else:
authname = authemail = None
# '\n\t' can be found on multi-line headers
descr = __decode_header(msg['subject'])
descr = re.sub('\n[ \t]*', ' ', descr)
authdate = msg['date']
# remove the '[*PATCH*]' expression in the subject
if descr:
descr = re.findall('^(\[.*?[Pp][Aa][Tt][Cc][Hh].*?\])?\s*(.*)$',
descr)[0][1]
else:
raise CmdException, 'Subject: line not found'
# the rest of the message
msg_text = ''
for part in msg.walk():
if part.get_content_type() in ['text/plain',
'application/octet-stream']:
msg_text += part.get_payload(decode = True)
rem_descr, diff = __split_descr_diff(msg_text)
if rem_descr:
descr += '\n\n' + rem_descr
# parse the description for author information
descr, descr_authname, descr_authemail, descr_authdate = \
__parse_description(descr)
if descr_authname:
authname = descr_authname
if descr_authemail:
authemail = descr_authemail
if descr_authdate:
authdate = descr_authdate
return (descr, authname, authemail, authdate, diff)
def parse_patch(text, contains_diff):
"""Parse the input text and return (description, authname,
authemail, authdate, diff)
"""
if contains_diff:
(text, diff) = __split_descr_diff(text)
else:
diff = None
(descr, authname, authemail, authdate) = __parse_description(text)
# we don't yet have an agreed place for the creation date.
# Just return None
return (descr, authname, authemail, authdate, diff)
def readonly_constant_property(f):
"""Decorator that converts a function that computes a value to an
attribute that returns the value. The value is computed only once,
the first time it is accessed."""
def new_f(self):
n = '__' + f.__name__
if not hasattr(self, n):
setattr(self, n, f(self))
return getattr(self, n)
return property(new_f)
def update_commit_data(cd, options):
"""Return a new CommitData object updated according to the command line
options."""
# Set the commit message from commandline.
if options.message != None:
cd = cd.set_message(options.message)
# Modify author data.
cd = cd.set_author(options.author(cd.author))
# Add Signed-off-by: or similar.
if options.sign_str != None:
sign_str = options.sign_str
else:
sign_str = config.get("stgit.autosign")
if sign_str != None:
cd = cd.set_message(
add_sign_line(cd.message, sign_str,
cd.committer.name, cd.committer.email))
# Let user edit the commit message manually, unless
# --save-template or --message was specified.
if not getattr(options, 'save_template', None) and not options.message:
cd = cd.set_message(edit_string(cd.message, '.stgit-new.txt'))
return cd
class DirectoryException(StgException):
pass
class _Directory(object):
def __init__(self, needs_current_series = True, log = True):
self.needs_current_series = needs_current_series
self.log = log
@readonly_constant_property
def git_dir(self):
try:
return Run('git', 'rev-parse', '--git-dir'
).discard_stderr().output_one_line()
except RunException:
raise DirectoryException('No git repository found')
@readonly_constant_property
def __topdir_path(self):
try:
lines = Run('git', 'rev-parse', '--show-cdup'
).discard_stderr().output_lines()
if len(lines) == 0:
return '.'
elif len(lines) == 1:
return lines[0]
else:
raise RunException('Too much output')
except RunException:
raise DirectoryException('No git repository found')
@readonly_constant_property
def is_inside_git_dir(self):
return { 'true': True, 'false': False
}[Run('git', 'rev-parse', '--is-inside-git-dir'
).output_one_line()]
@readonly_constant_property
def is_inside_worktree(self):
return { 'true': True, 'false': False
}[Run('git', 'rev-parse', '--is-inside-work-tree'
).output_one_line()]
def cd_to_topdir(self):
os.chdir(self.__topdir_path)
def write_log(self, msg):
if self.log:
log.compat_log_entry(msg)
class DirectoryAnywhere(_Directory):
def setup(self):
pass
class DirectoryHasRepository(_Directory):
def setup(self):
self.git_dir # might throw an exception
log.compat_log_external_mods()
class DirectoryInWorktree(DirectoryHasRepository):
def setup(self):
DirectoryHasRepository.setup(self)
if not self.is_inside_worktree:
raise DirectoryException('Not inside a git worktree')
class DirectoryGotoToplevel(DirectoryInWorktree):
def setup(self):
DirectoryInWorktree.setup(self)
self.cd_to_topdir()
class DirectoryHasRepositoryLib(_Directory):
"""For commands that use the new infrastructure in stgit.lib.*."""
def __init__(self):
self.needs_current_series = False
self.log = False # stgit.lib.transaction handles logging
def setup(self):
# This will throw an exception if we don't have a repository.
self.repository = libstack.Repository.default()
|
miracle2k/stgit
|
stgit/commands/common.py
|
Python
|
gpl-2.0
| 18,556
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#-----------------------------------------------------------------------#
# fits-for-roi.py #
# #
# Script to create FITS files from ROI observing output #
# Copyright (C) 2013 Germán A. Racca - <gracca[AT]gmail[DOT]com> #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
#-----------------------------------------------------------------------#
import ast
import pyfits
import numpy as np
# read input file
f = open('d_110801.txt')
lines = f.readlines()
# define some variables
nheadlin = 22
nchannel = 2048
nspectra = len(lines) / nchannel
coef = nheadlin + nchannel + 1
# create a list of "empty" spectra (header + data)
spec = [pyfits.PrimaryHDU() for i in range(nspectra)]
# read numerical data
nums = np.zeros(nchannel*nspectra, dtype='float32')
for i in range(nspectra):
limi = coef * i
lims = limi + nheadlin
nums[nchannel*i:nchannel*(i+1)] = lines[lims:lims+nchannel]
data = np.hsplit(nums, nspectra)
# read the headers
text = []
for i in range(nspectra):
limi = coef * i
lims = limi + nheadlin
text.append(lines[limi:lims-1])
# format the headers
for i, j in enumerate(text):
for m, k in enumerate(j):
l = k.strip().replace("'", "").split("=")
key = l[0].strip()
val = l[1].strip()
if m >= 4 and m <= 19:
val = ast.literal_eval(val)
spec[i].header.update(key, val)
# format the data
for i, j in enumerate(data):
spec[i].data = j
# create fits files
name = 'd_110801'
for i in range(nspectra):
n = name + '_' + str(i+1) + '.fits'
spec[i].writeto(n, clobber=True)
|
gracca/fits-for-roi
|
fits-for-roi.py
|
Python
|
gpl-3.0
| 2,686
|
#!/usr/bin/python
# Example file with custom commands, located at /magical/commands/example.py
import lldb
import fblldbbase as fb
def lldbcommands():
return [ PrintKeyWindowLevel() ]
class PrintKeyWindowLevel(fb.FBCommand):
def name(self):
return 'pkeywinlevel'
def description(self):
return 'An incredibly contrived command that prints the window level of the key window.'
def run(self, arguments, options):
# It's a good habit to explicitly cast the type of all return
# values and arguments. LLDB can't always find them on its own.
lldb.debugger.HandleCommand('p (CGFloat)[(id)[(id)[UIApplication sharedApplication] keyWindow] windowLevel]')
|
itsthejb/ChiselCommands
|
NSLogBreakPoint.py
|
Python
|
mit
| 683
|
from django.conf import settings
from django.db import models
from .ticket import Ticket
class Attachment(models.Model):
"""Ticket attachment model."""
ticket = models.ForeignKey(
Ticket, blank=False, related_name='attachments', db_index=True,
on_delete=models.DO_NOTHING)
user = models.ForeignKey(
settings.AUTH_USER_MODEL, blank=False, db_index=True,
on_delete=models.DO_NOTHING)
upload = models.FileField(upload_to='attachments/%Y/%m/%d', max_length=255)
created_at = models.DateTimeField(auto_now_add=True)
@classmethod
def filter_by_user(cls, user, queryset=None):
"""Returns any user accessible attachments.
Ones he has access to through the tickets.
"""
if queryset is None:
queryset = cls.objects
return queryset.filter(ticket__in=Ticket.filter_by_user(user))
|
occrp/id-backend
|
api_v3/models/attachment.py
|
Python
|
mit
| 887
|
#!/usr/bin/env python
"""
Script to automate some parts of checking NEW packages
Most functions are written in a functional programming style. They
return a string avoiding the side effect of directly printing the string
to stdout. Those functions can be used in multithreaded parts of dak.
@contact: Debian FTP Master <ftpmaster@debian.org>
@copyright: 2000, 2001, 2002, 2003, 2006 James Troup <james@nocrew.org>
@copyright: 2009 Joerg Jaspert <joerg@debian.org>
@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
# <Omnic> elmo wrote docs?!!?!?!?!?!?!
# <aj> as if he wasn't scary enough before!!
# * aj imagines a little red furry toy sitting hunched over a computer
# tapping furiously and giggling to himself
# <aj> eventually he stops, and his heads slowly spins around and you
# see this really evil grin and then he sees you, and picks up a
# knife from beside the keyboard and throws it at you, and as you
# breathe your last breath, he starts giggling again
# <aj> but i should be telling this to my psychiatrist, not you guys,
# right? :)
################################################################################
# suppress some deprecation warnings in squeeze related to md5 module
import warnings
warnings.filterwarnings('ignore', \
"the md5 module is deprecated; use hashlib instead", \
DeprecationWarning)
import errno
import os
import re
import sys
import md5
import apt_pkg
import apt_inst
import shutil
import commands
import threading
from daklib import utils
from daklib.dbconn import DBConn, get_component_by_package_suite
from daklib.gpg import SignedFile
from daklib.regexes import html_escaping, re_html_escaping, re_version, re_spacestrip, \
re_contrib, re_nonfree, re_localhost, re_newlinespace, \
re_package, re_doc_directory
from daklib.dak_exceptions import ChangesUnicodeError
################################################################################
Cnf = None
Cnf = utils.get_conf()
printed = threading.local()
printed.copyrights = {}
package_relations = {} #: Store relations of packages for later output
# default is to not output html.
use_html = 0
################################################################################
def usage (exit_code=0):
print """Usage: dak examine-package [PACKAGE]...
Check NEW package(s).
-h, --help show this help and exit
-H, --html-output output html page with inspection result
-f, --file-name filename for the html page
PACKAGE can be a .changes, .dsc, .deb or .udeb filename."""
sys.exit(exit_code)
################################################################################
# probably xml.sax.saxutils would work as well
def escape_if_needed(s):
if use_html:
return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
else:
return s
def headline(s, level=2, bodyelement=None):
if use_html:
if bodyelement:
return """<thead>
<tr><th colspan="2" class="title" onclick="toggle('%(bodyelement)s', 'table-row-group', 'table-row-group')">%(title)s <span class="toggle-msg">(click to toggle)</span></th></tr>
</thead>\n"""%{"bodyelement":bodyelement,"title":utils.html_escape(s)}
else:
return "<h%d>%s</h%d>\n" % (level, utils.html_escape(s), level)
else:
return "---- %s ----\n" % (s)
# Colour definitions, 'end' isn't really for use
ansi_colours = {
'main': "\033[36m",
'contrib': "\033[33m",
'nonfree': "\033[31m",
'provides': "\033[35m",
'arch': "\033[32m",
'end': "\033[0m",
'bold': "\033[1m",
'maintainer': "\033[32m",
'distro': "\033[1m\033[41m"}
html_colours = {
'main': ('<span style="color: aqua">',"</span>"),
'contrib': ('<span style="color: yellow">',"</span>"),
'nonfree': ('<span style="color: red">',"</span>"),
'provides': ('<span style="color: magenta">',"</span>"),
'arch': ('<span style="color: green">',"</span>"),
'bold': ('<span style="font-weight: bold">',"</span>"),
'maintainer': ('<span style="color: green">',"</span>"),
'distro': ('<span style="font-weight: bold; background-color: red">',"</span>")}
def colour_output(s, colour):
if use_html:
return ("%s%s%s" % (html_colours[colour][0], utils.html_escape(s), html_colours[colour][1]))
else:
return ("%s%s%s" % (ansi_colours[colour], s, ansi_colours['end']))
def escaped_text(s, strip=False):
if use_html:
if strip:
s = s.strip()
return "<pre>%s</pre>" % (s)
else:
return s
def formatted_text(s, strip=False):
if use_html:
if strip:
s = s.strip()
return "<pre>%s</pre>" % (utils.html_escape(s))
else:
return s
def output_row(s):
if use_html:
return """<tr><td>"""+s+"""</td></tr>"""
else:
return s
def format_field(k,v):
if use_html:
return """<tr><td class="key">%s:</td><td class="val">%s</td></tr>"""%(k,v)
else:
return "%s: %s"%(k,v)
def foldable_output(title, elementnameprefix, content, norow=False):
d = {'elementnameprefix':elementnameprefix}
result = ''
if use_html:
result += """<div id="%(elementnameprefix)s-wrap"><a name="%(elementnameprefix)s" />
<table class="infobox rfc822">\n"""%d
result += headline(title, bodyelement="%(elementnameprefix)s-body"%d)
if use_html:
result += """ <tbody id="%(elementnameprefix)s-body" class="infobody">\n"""%d
if norow:
result += content + "\n"
else:
result += output_row(content) + "\n"
if use_html:
result += """</tbody></table></div>"""
return result
################################################################################
def get_depends_parts(depend) :
v_match = re_version.match(depend)
if v_match:
d_parts = { 'name' : v_match.group(1), 'version' : v_match.group(2) }
else :
d_parts = { 'name' : depend , 'version' : '' }
return d_parts
def get_or_list(depend) :
or_list = depend.split("|")
return or_list
def get_comma_list(depend) :
dep_list = depend.split(",")
return dep_list
def split_depends (d_str) :
# creates a list of lists of dictionaries of depends (package,version relation)
d_str = re_spacestrip.sub('',d_str)
depends_tree = []
# first split depends string up amongs comma delimiter
dep_list = get_comma_list(d_str)
d = 0
while d < len(dep_list):
# put depends into their own list
depends_tree.append([dep_list[d]])
d += 1
d = 0
while d < len(depends_tree):
k = 0
# split up Or'd depends into a multi-item list
depends_tree[d] = get_or_list(depends_tree[d][0])
while k < len(depends_tree[d]):
# split depends into {package, version relation}
depends_tree[d][k] = get_depends_parts(depends_tree[d][k])
k += 1
d += 1
return depends_tree
def read_control (filename):
recommends = []
depends = []
section = ''
maintainer = ''
arch = ''
deb_file = utils.open_file(filename)
try:
extracts = utils.deb_extract_control(deb_file)
control = apt_pkg.TagSection(extracts)
except:
print formatted_text("can't parse control info")
deb_file.close()
raise
deb_file.close()
control_keys = control.keys()
if "Depends" in control:
depends_str = control["Depends"]
# create list of dependancy lists
depends = split_depends(depends_str)
if "Recommends" in control:
recommends_str = control["Recommends"]
recommends = split_depends(recommends_str)
if "Section" in control:
section_str = control["Section"]
c_match = re_contrib.search(section_str)
nf_match = re_nonfree.search(section_str)
if c_match :
# contrib colour
section = colour_output(section_str, 'contrib')
elif nf_match :
# non-free colour
section = colour_output(section_str, 'nonfree')
else :
# main
section = colour_output(section_str, 'main')
if "Architecture" in control:
arch_str = control["Architecture"]
arch = colour_output(arch_str, 'arch')
if "Maintainer" in control:
maintainer = control["Maintainer"]
localhost = re_localhost.search(maintainer)
if localhost:
#highlight bad email
maintainer = colour_output(maintainer, 'maintainer')
else:
maintainer = escape_if_needed(maintainer)
return (control, control_keys, section, depends, recommends, arch, maintainer)
def read_changes_or_dsc (suite, filename, session = None):
dsc = {}
dsc_file = utils.open_file(filename)
try:
dsc = utils.parse_changes(filename, dsc_file=1)
except:
return formatted_text("can't parse .dsc control info")
dsc_file.close()
filecontents = strip_pgp_signature(filename)
keysinorder = []
for l in filecontents.split('\n'):
m = re.match(r'([-a-zA-Z0-9]*):', l)
if m:
keysinorder.append(m.group(1))
for k in dsc.keys():
if k in ("build-depends","build-depends-indep"):
dsc[k] = create_depends_string(suite, split_depends(dsc[k]), session)
elif k == "architecture":
if (dsc["architecture"] != "any"):
dsc['architecture'] = colour_output(dsc["architecture"], 'arch')
elif k == "distribution":
if dsc["distribution"] not in ('unstable', 'experimental'):
dsc['distribution'] = colour_output(dsc["distribution"], 'distro')
elif k in ("files","changes","description"):
if use_html:
dsc[k] = formatted_text(dsc[k], strip=True)
else:
dsc[k] = ('\n'+'\n'.join(map(lambda x: ' '+x, dsc[k].split('\n')))).rstrip()
else:
dsc[k] = escape_if_needed(dsc[k])
keysinorder = filter(lambda x: not x.lower().startswith('checksums-'), keysinorder)
filecontents = '\n'.join(map(lambda x: format_field(x,dsc[x.lower()]), keysinorder))+'\n'
return filecontents
def get_provides(suite):
provides = set()
session = DBConn().session()
query = '''SELECT DISTINCT value
FROM binaries_metadata m
JOIN bin_associations b
ON b.bin = m.bin_id
WHERE key_id = (
SELECT key_id
FROM metadata_keys
WHERE key = 'Provides' )
AND b.suite = (
SELECT id
FROM suite
WHERE suite_name = '%(suite)s'
OR codename = '%(suite)s')''' % \
{'suite': suite}
for p in session.execute(query):
for e in p:
for i in e.split(','):
provides.add(i.strip())
session.close()
return provides
def create_depends_string (suite, depends_tree, session = None):
result = ""
if suite == 'experimental':
suite_list = ['experimental','unstable']
else:
suite_list = [suite]
provides = set()
comma_count = 1
for l in depends_tree:
if (comma_count >= 2):
result += ", "
or_count = 1
for d in l:
if (or_count >= 2 ):
result += " | "
# doesn't do version lookup yet.
component = get_component_by_package_suite(d['name'], suite_list, \
session = session)
if component is not None:
adepends = d['name']
if d['version'] != '' :
adepends += " (%s)" % (d['version'])
if component == "contrib":
result += colour_output(adepends, "contrib")
elif component == "non-free":
result += colour_output(adepends, "nonfree")
else :
result += colour_output(adepends, "main")
else:
adepends = d['name']
if d['version'] != '' :
adepends += " (%s)" % (d['version'])
if not provides:
provides = get_provides(suite)
if d['name'] in provides:
result += colour_output(adepends, "provides")
else:
result += colour_output(adepends, "bold")
or_count += 1
comma_count += 1
return result
def output_package_relations ():
"""
Output the package relations, if there is more than one package checked in this run.
"""
if len(package_relations) < 2:
# Only list something if we have more than one binary to compare
package_relations.clear()
return
to_print = ""
for package in package_relations:
for relation in package_relations[package]:
to_print += "%-15s: (%s) %s\n" % (package, relation, package_relations[package][relation])
package_relations.clear()
return foldable_output("Package relations", "relations", to_print)
def output_deb_info(suite, filename, packagename, session = None):
(control, control_keys, section, depends, recommends, arch, maintainer) = read_control(filename)
if control == '':
return formatted_text("no control info")
to_print = ""
if not package_relations.has_key(packagename):
package_relations[packagename] = {}
for key in control_keys :
if key == 'Depends':
field_value = create_depends_string(suite, depends, session)
package_relations[packagename][key] = field_value
elif key == 'Recommends':
field_value = create_depends_string(suite, recommends, session)
package_relations[packagename][key] = field_value
elif key == 'Section':
field_value = section
elif key == 'Architecture':
field_value = arch
elif key == 'Maintainer':
field_value = maintainer
elif key == 'Description':
if use_html:
field_value = formatted_text(control.find(key), strip=True)
else:
desc = control.find(key)
desc = re_newlinespace.sub('\n ', desc)
field_value = escape_if_needed(desc)
else:
field_value = escape_if_needed(control.find(key))
to_print += " "+format_field(key,field_value)+'\n'
return to_print
def do_command (command, filename, escaped=0):
o = os.popen("%s %s" % (command, filename))
if escaped:
return escaped_text(o.read())
else:
return formatted_text(o.read())
def do_lintian (filename):
if use_html:
return do_command("lintian --show-overrides --color html", filename, 1)
else:
return do_command("lintian --show-overrides --color always", filename, 1)
def get_copyright (deb_filename):
global printed
package = re_package.sub(r'\1', deb_filename)
o = os.popen("dpkg-deb -c %s | egrep 'usr(/share)?/doc/[^/]*/copyright' | awk '{print $6}' | head -n 1" % (deb_filename))
cright = o.read()[:-1]
if cright == "":
return formatted_text("WARNING: No copyright found, please check package manually.")
doc_directory = re_doc_directory.sub(r'\1', cright)
if package != doc_directory:
return formatted_text("WARNING: wrong doc directory (expected %s, got %s)." % (package, doc_directory))
o = os.popen("dpkg-deb --fsys-tarfile %s | tar xvOf - %s 2>/dev/null" % (deb_filename, cright))
cright = o.read()
copyrightmd5 = md5.md5(cright).hexdigest()
res = ""
if printed.copyrights.has_key(copyrightmd5) and printed.copyrights[copyrightmd5] != "%s (%s)" % (package, deb_filename):
res += formatted_text( "NOTE: Copyright is the same as %s.\n\n" % \
(printed.copyrights[copyrightmd5]))
else:
printed.copyrights[copyrightmd5] = "%s (%s)" % (package, deb_filename)
return res+formatted_text(cright)
def get_readme_source (dsc_filename):
tempdir = utils.temp_dirname()
os.rmdir(tempdir)
cmd = "dpkg-source --no-check --no-copy -x %s %s" % (dsc_filename, tempdir)
(result, output) = commands.getstatusoutput(cmd)
if (result != 0):
res = "How is education supposed to make me feel smarter? Besides, every time I learn something new, it pushes some\n old stuff out of my brain. Remember when I took that home winemaking course, and I forgot how to drive?\n"
res += "Error, couldn't extract source, WTF?\n"
res += "'dpkg-source -x' failed. return code: %s.\n\n" % (result)
res += output
return res
path = os.path.join(tempdir, 'debian/README.source')
res = ""
if os.path.exists(path):
res += do_command("cat", path)
else:
res += "No README.source in this package\n\n"
try:
shutil.rmtree(tempdir)
except OSError as e:
if errno.errorcode[e.errno] != 'EACCES':
res += "%s: couldn't remove tmp dir %s for source tree." % (dsc_filename, tempdir)
return res
def check_dsc (suite, dsc_filename, session = None):
(dsc) = read_changes_or_dsc(suite, dsc_filename, session)
return foldable_output(dsc_filename, "dsc", dsc, norow=True) + \
"\n" + \
foldable_output("lintian check for %s" % dsc_filename,
"source-lintian", do_lintian(dsc_filename)) + \
"\n" + \
foldable_output("README.source for %s" % dsc_filename,
"source-readmesource", get_readme_source(dsc_filename))
def check_deb (suite, deb_filename, session = None):
filename = os.path.basename(deb_filename)
packagename = filename.split('_')[0]
if filename.endswith(".udeb"):
is_a_udeb = 1
else:
is_a_udeb = 0
result = foldable_output("control file for %s" % (filename), "binary-%s-control"%packagename,
output_deb_info(suite, deb_filename, packagename, session), norow=True) + "\n"
if is_a_udeb:
result += foldable_output("skipping lintian check for udeb",
"binary-%s-lintian"%packagename, "") + "\n"
else:
result += foldable_output("lintian check for %s" % (filename),
"binary-%s-lintian"%packagename, do_lintian(deb_filename)) + "\n"
result += foldable_output("contents of %s" % (filename), "binary-%s-contents"%packagename,
do_command("dpkg -c", deb_filename)) + "\n"
if is_a_udeb:
result += foldable_output("skipping copyright for udeb",
"binary-%s-copyright"%packagename, "") + "\n"
else:
result += foldable_output("copyright of %s" % (filename),
"binary-%s-copyright"%packagename, get_copyright(deb_filename)) + "\n"
result += foldable_output("file listing of %s" % (filename),
"binary-%s-file-listing"%packagename, do_command("ls -l", deb_filename))
return result
# Read a file, strip the signature and return the modified contents as
# a string.
def strip_pgp_signature (filename):
with utils.open_file(filename) as f:
data = f.read()
signedfile = SignedFile(data, keyrings=(), require_signature=False)
return signedfile.contents
def display_changes(suite, changes_filename):
global printed
changes = read_changes_or_dsc(suite, changes_filename)
printed.copyrights = {}
return foldable_output(changes_filename, "changes", changes, norow=True)
def check_changes (changes_filename):
try:
changes = utils.parse_changes (changes_filename)
except ChangesUnicodeError:
utils.warn("Encoding problem with changes file %s" % (changes_filename))
print display_changes(changes['distribution'], changes_filename)
files = utils.build_file_list(changes)
for f in files.keys():
if f.endswith(".deb") or f.endswith(".udeb"):
print check_deb(changes['distribution'], f)
if f.endswith(".dsc"):
print check_dsc(changes['distribution'], f)
# else: => byhand
def main ():
global Cnf, db_files, waste, excluded
# Cnf = utils.get_conf()
Arguments = [('h',"help","Examine-Package::Options::Help"),
('H',"html-output","Examine-Package::Options::Html-Output"),
]
for i in [ "Help", "Html-Output", "partial-html" ]:
if not Cnf.has_key("Examine-Package::Options::%s" % (i)):
Cnf["Examine-Package::Options::%s" % (i)] = ""
args = apt_pkg.parse_commandline(Cnf,Arguments,sys.argv)
Options = Cnf.subtree("Examine-Package::Options")
if Options["Help"]:
usage()
if Options["Html-Output"]:
global use_html
use_html = 1
stdout_fd = sys.stdout
for f in args:
try:
if not Options["Html-Output"]:
# Pipe output for each argument through less
less_fd = os.popen("less -R -", 'w', 0)
# -R added to display raw control chars for colour
sys.stdout = less_fd
try:
if f.endswith(".changes"):
check_changes(f)
elif f.endswith(".deb") or f.endswith(".udeb"):
# default to unstable when we don't have a .changes file
# perhaps this should be a command line option?
print check_deb('unstable', f)
elif f.endswith(".dsc"):
print check_dsc('unstable', f)
else:
utils.fubar("Unrecognised file type: '%s'." % (f))
finally:
print output_package_relations()
if not Options["Html-Output"]:
# Reset stdout here so future less invocations aren't FUBAR
less_fd.close()
sys.stdout = stdout_fd
except IOError as e:
if errno.errorcode[e.errno] == 'EPIPE':
utils.warn("[examine-package] Caught EPIPE; skipping.")
pass
else:
raise
except KeyboardInterrupt:
utils.warn("[examine-package] Caught C-c; skipping.")
pass
#######################################################################################
if __name__ == '__main__':
main()
|
luther07/dak
|
dak/examine_package.py
|
Python
|
gpl-2.0
| 23,247
|
#!/usr/bin/env python
# Copyright 2011 Google Inc. All Rights Reserved.
"""Parser for IE index.dat files.
Note that this is a very naive and incomplete implementation and should be
replaced with a more intelligent one. Do not implement anything based on this
code, it is a placeholder for something real.
For anyone who wants a useful reference, see this:
http://heanet.dl.sourceforge.net/project/libmsiecf/Documentation/MSIE%20Cache%20
File%20format/MSIE%20Cache%20File%20%28index.dat%29%20format.pdf
"""
import datetime
import glob
import operator
import os
import struct
import sys
import urlparse
import logging
from grr.lib import parsers
from grr.lib.rdfvalues import webhistory
# Difference between 1 Jan 1601 and 1 Jan 1970.
WIN_UNIX_DIFF_MSECS = 11644473600 * 1e6
class IEHistoryParser(parsers.FileParser):
"""Parse IE index.dat files into BrowserHistoryItem objects."""
output_types = ["BrowserHistoryItem"]
supported_artifacts = ["InternetExplorerHistory"]
def Parse(self, stat, file_object, knowledge_base):
"""Parse the History file."""
_, _ = stat, knowledge_base
# TODO(user): Convert this to use the far more intelligent plaso parser.
ie = IEParser(file_object)
for dat in ie.Parse():
yield webhistory.BrowserHistoryItem(
url=dat["url"], domain=urlparse.urlparse(dat["url"]).netloc,
access_time=dat.get("mtime"),
program_name="Internet Explorer", source_urn=stat.aff4path)
class IEParser(object):
"""Parser object for index.dat files.
The file format for IE index.dat files is somewhat poorly documented.
The following implementation is based on information from:
http://www.forensicswiki.org/wiki/Internet_Explorer_History_File_Format
Returns results in chronological order based on mtime
"""
FILE_HEADER = "Client UrlCache MMF Ver 5.2"
BLOCK_SIZE = 0x80
def __init__(self, input_obj):
"""Initialize.
Args:
input_obj: A file like object to read the index.dat from.
"""
self._file = input_obj
self._entries = []
def Parse(self):
"""Parse the file."""
if not self._file:
logging.error("Couldn't open file")
return
# Limit read size to 5MB.
self.input_dat = self._file.read(1024 * 1024 * 5)
if not self.input_dat.startswith(self.FILE_HEADER):
logging.error("Invalid index.dat file %s", self._file)
return
# Events aren't time ordered in the history file, so we collect them all
# then sort.
events = []
for event in self._DoParse():
events.append(event)
for event in sorted(events, key=operator.itemgetter("mtime")):
yield event
def _GetRecord(self, offset, record_size):
"""Retrieve a single record from the file.
Args:
offset: offset from start of input_dat where header starts
record_size: length of the header according to file (untrusted)
Returns:
A dict containing a single browser history record.
"""
record_header = "<4sLQQL"
get4 = lambda x: struct.unpack("<L", self.input_dat[x:x + 4])[0]
url_offset = struct.unpack("B", self.input_dat[offset + 52:offset + 53])[0]
if url_offset in [0xFF, 0xFE]:
return None
data_offset = get4(offset + 68)
data_size = get4(offset + 72)
start_pos = offset + data_offset
data = struct.unpack("{0}s".format(data_size),
self.input_dat[start_pos:start_pos + data_size])[0]
fmt = record_header
unknown_size = url_offset - struct.calcsize(fmt)
fmt += "{0}s".format(unknown_size)
fmt += "{0}s".format(record_size - struct.calcsize(fmt))
dat = struct.unpack(fmt, self.input_dat[offset:offset + record_size])
header, blocks, mtime, ctime, ftime, _, url = dat
url = url.split(chr(0x00))[0]
if mtime: mtime = mtime/10 - WIN_UNIX_DIFF_MSECS
if ctime: ctime = ctime/10 - WIN_UNIX_DIFF_MSECS
return {"header": header, # the header
"blocks": blocks, # number of blocks
"urloffset": url_offset, # offset of URL in file
"data_offset": data_offset, # offset for start of data
"data_size": data_size, # size of data
"data": data, # actual data
"mtime": mtime, # modified time
"ctime": ctime, # created time
"ftime": ftime, # file time
"url": url # the url visited
}
def _DoParse(self):
"""Parse a file for history records yielding dicts.
Yields:
Dicts containing browser history
"""
get4 = lambda x: struct.unpack("<L", self.input_dat[x:x + 4])[0]
filesize = get4(0x1c)
offset = get4(0x20)
coffset = offset
while coffset < filesize:
etype = struct.unpack("4s", self.input_dat[coffset:coffset + 4])[0]
if etype == "REDR":
pass
elif etype in ["URL "]:
# Found a valid record
reclen = get4(coffset + 4) * self.BLOCK_SIZE
yield self._GetRecord(coffset, reclen)
coffset += self.BLOCK_SIZE
def main(argv):
if len(argv) < 2:
print "Usage: {0} index.dat".format(os.path.basename(argv[0]))
else:
files_to_process = []
for input_glob in argv[1:]:
files_to_process += glob.glob(input_glob)
for input_file in files_to_process:
ie = IEParser(open(input_file))
for dat in ie.Parse():
dat["ctime"] = datetime.datetime.utcfromtimestamp(dat["ctime"] / 1e6)
print "{ctime} {header} {url}".format(**dat)
if __name__ == "__main__":
main(sys.argv)
|
darrenbilby/grr
|
parsers/ie_history.py
|
Python
|
apache-2.0
| 5,599
|
# env_inspect.py: Check the testing environment.
# Copyright (C) 2010-2012 Red Hat, Inc.
#
# libvirt-test-API is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 2 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranties of
# TITLE, NON-INFRINGEMENT, MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from . import sharedmod
from libvirttestapi.utils import utils
from libvirttestapi.utils import process
def check_libvirt(logger):
command = 'rpm -q virt-install'
result = process.run(command, shell=True, ignore_status=True)
if result.exit_status:
command = 'dnf install virt-install -y'
result = process.run(command, shell=True, ignore_status=True)
virsh = 'virsh -v'
result = process.run(virsh, shell=True, ignore_status=True)
if result.exit_status:
logger.error(result.stderr)
return 1
else:
logger.info(" Virsh command line tool of libvirt: %s" % result.stdout)
command = 'rpm -q libvirt'
result = process.run(command, shell=True, ignore_status=True)
if result.exit_status:
command = 'dnf install libvirt -y'
result = process.run(command, shell=True, ignore_status=True)
command = 'systemctl start libvirtd'
result = process.run(command, shell=True, ignore_status=True)
libvirtd = 'libvirtd --version'
result = process.run(libvirtd, shell=True, ignore_status=True)
if result.exit_status:
logger.error(result.stderr)
return 1
else:
logger.info(" %s" % result.stdout)
default_uri = 'virsh uri'
result = process.run(default_uri, shell=True, ignore_status=True)
if result.exit_status:
logger.error(result.stderr)
return 1
else:
logger.info(" Default URI: %s" % result.stdout.strip())
if 'qemu' in result.stdout:
for qemu in ['/usr/bin/qemu-kvm', '/usr/libexec/qemu-kvm', 'kvm']:
cmd = '%s --version' % qemu
result = process.run(cmd, shell=True, ignore_status=True)
if not result.exit_status:
logger.info(" %s" % result.stdout)
break
if result.exit_status:
logger.error(" no qemu-kvm found")
return 1
elif 'xen' in result.stdout:
#TODO need to get xen hypervisor info here
pass
return 0
def hostinfo(logger):
cmd = 'uname -a'
result = process.run(cmd, shell=True, ignore_status=True)
if result.exit_status:
return 1
logger.info(" %s" % result.stdout)
return 0
def sharemod_init(env_parser, logger):
""" get connection object from libvirt module
initialize sharemod for use by testcases
"""
uri = env_parser.get_value('variables', 'defaulturi')
username = env_parser.get_value('variables', 'username')
password = env_parser.get_value('variables', 'password')
conn = utils.get_conn(uri, username, password)
if not conn:
return 1
# initialize conn object in sharedmod
sharedmod.libvirtobj.clear()
sharedmod.data.clear()
sharedmod.libvirtobj['conn'] = conn
return 0
class EnvInspect(object):
"""to check and collect the testing enviroment infomation
before performing testing
"""
def __init__(self, env_parser, logger):
self.logger = logger
self.env_parser = env_parser
def env_checking(self):
if hostinfo(self.logger):
return 1
if check_libvirt(self.logger):
return 1
if sharemod_init(self.env_parser, self.logger):
return 1
return 0
def close_hypervisor_connection(self):
conn = sharedmod.libvirtobj.get('conn', None)
if conn:
# conn probably is invalid pointer
# that means the connection is closed
# If so we ignore the error here
try:
conn.close()
conn = None
except Exception as err:
pass
sharedmod.libvirtobj.clear()
sharedmod.data.clear()
return 0
|
libvirt/libvirt-test-API
|
libvirttestapi/src/env_inspect.py
|
Python
|
gpl-2.0
| 4,465
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.