code stringlengths 2 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int32 2 1.05M |
|---|---|---|---|---|---|
import rhwl_hr
import rhwl_holidays
import controllers
| vnsofthe/odoo-dev | addons/rhwl_hr/__init__.py | Python | agpl-3.0 | 55 |
#!/usr/bin/env python
#coding:utf-8
import sys,os
from toughportal.common import utils
import shutil
import time
import random
import ConfigParser
def gen_secret(clen=32):
rg = random.SystemRandom()
r = list('1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ')
return ''.join([rg.choice(r) for _ in range(clen)])
| talkincode/ToughPORTAL | toughportal/common/secret.py | Python | agpl-3.0 | 344 |
from django.contrib import admin
from django.contrib import messages
from django.core.mail import EmailMultiAlternatives
from django.conf import settings
from .models import Email
from users.models import User
@admin.register(Email)
class EmailAdmin(admin.ModelAdmin):
list_display = ('subject', 'sent', 'created', 'modified', 'get_approvers', 'is_sendable')
list_filter = ('sent', 'created', 'modified')
search_fields = ('subject', 'content', )
readonly_fields = ('sent', 'approvers', 'markdown_content')
actions = ['approve', "send_email", "send_test_email"]
def get_approvers(self, obj):
return ", ".join([u.username for u in obj.approvers.all()])
get_approvers.short_description = "Approbateurs"
def is_sendable(self, obj):
return obj.approvers.count() >= settings.MINIMAL_MAIL_APPROVERS and not obj.sent
is_sendable.short_description = "Est envoyable"
def approve(self, request, queryset):
if not queryset.count() == 1:
self.message_user(request, message="Vous ne devez séléctionner qu'un email à approuver", level=messages.ERROR)
return
email = queryset.first()
email.approvers.add(request.user)
self.message_user(request, "L'email a été approuvé.")
approve.short_description = "Approuver cet email"
def send_email(self, request, queryset):
if not queryset.count() == 1:
self.message_user(request, message="Vous ne devez séléctionner qu'un email à envoyer", level=messages.ERROR)
return
email = queryset.first()
if email.sent:
self.message_user(request, message="Cet email a déjà été envoyé", level=messages.ERROR)
return
if email.approvers.count() < settings.MINIMAL_MAIL_APPROVERS:
self.message_user(request, message="Ce message n'a pas assez d'approbateurs", level=messages.ERROR)
return
recipients = [u.email for u in User.objects.filter(newsletter=True)]
message = EmailMultiAlternatives(
subject=email.subject,
body=email.content,
from_email='Newsletter UrLab <contact@urlab.be>',
to=["UrLab <contact@urlab.be>"],
bcc=recipients,
)
message.attach_alternative(email.markdown_content(), "text/html")
message.send()
email.sent = True
email.save()
self.message_user(request, "L'email a été énvoyé.")
send_email.short_description = "Envoyer cet email A TOUT LE MONDE"
def send_test_email(self, request, queryset):
if not queryset.count() == 1:
self.message_user(request, message="Vous ne devez séléctionner qu'un email à envoyer", level=messages.ERROR)
return
email = queryset.first()
if email.sent:
self.message_user(request, message="Cet email a déjà été envoyé", level=messages.ERROR)
return
message = EmailMultiAlternatives(
subject=email.subject,
body=email.content,
from_email='Newsletter UrLab <contact@urlab.be>',
to=["contact-test@urlab.be"],
bcc=[request.user.email],
)
message.attach_alternative(email.markdown_content(), "text/html")
message.send()
self.message_user(request, "L'email a été énvoyé à votre adresse")
send_test_email.short_description = "Envoyer cet email A MOI UNIQUEMENT"
| UrLab/incubator | manmail/admin.py | Python | agpl-3.0 | 3,491 |
# coding: utf-8
# Copyright (c) 2001-2018, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# channel `#navitia` on riot https://riot.im/app/#/room/#navitia:matrix.org
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from __future__ import absolute_import, print_function, division, unicode_literals
from tests.check_utils import api_get, api_post, api_delete, api_put, _dt
import json
import pytest
import jmespath
from navitiacommon import models
from tyr import app
@pytest.fixture
def create_autocomplete_parameter():
with app.app_context():
autocomplete_param = models.AutocompleteParameter('idf', 'OSM', 'BANO', 'FUSIO', 'OSM', [8, 9])
models.db.session.add(autocomplete_param)
models.db.session.commit()
# we also create 3 datasets, one for bano, 2 for osm
for i, dset_type in enumerate(['bano', 'osm', 'osm']):
job = models.Job()
dataset = models.DataSet()
dataset.type = dset_type
dataset.family_type = 'autocomplete_{}'.format(dataset.type)
dataset.name = '/path/to/dataset_{}'.format(i)
models.db.session.add(dataset)
job.autocomplete_params_id = autocomplete_param.id
job.data_sets.append(dataset)
job.state = 'done'
models.db.session.add(job)
models.db.session.commit()
@pytest.fixture
def create_two_autocomplete_parameters():
with app.app_context():
autocomplete_param1 = models.AutocompleteParameter('europe', 'OSM', 'BANO', 'OSM', 'OSM', [8, 9])
autocomplete_param2 = models.AutocompleteParameter('france', 'OSM', 'OSM', 'FUSIO', 'OSM', [8, 9])
models.db.session.add(autocomplete_param1)
models.db.session.add(autocomplete_param2)
models.db.session.commit()
@pytest.fixture
def autocomplete_parameter_json():
return {
"name": "peru",
"street": "OSM",
"address": "BANO",
"poi": "FUSIO",
"admin": "OSM",
"admin_level": [8],
}
def test_get_autocomplete_parameters_empty():
resp = api_get('/v0/autocomplete_parameters/')
assert resp == []
def test_get_all_autocomplete(create_autocomplete_parameter):
resp = api_get('/v0/autocomplete_parameters/')
assert len(resp) == 1
assert resp[0]['name'] == 'idf'
assert resp[0]['street'] == 'OSM'
assert resp[0]['address'] == 'BANO'
assert resp[0]['poi'] == 'FUSIO'
assert resp[0]['admin'] == 'OSM'
assert resp[0]['admin_level'] == [8, 9]
def test_get_autocomplete_by_name(create_two_autocomplete_parameters):
resp = api_get('/v0/autocomplete_parameters/')
assert len(resp) == 2
resp = api_get('/v0/autocomplete_parameters/france')
assert resp['name'] == 'france'
assert resp['street'] == 'OSM'
assert resp['address'] == 'OSM'
assert resp['poi'] == 'FUSIO'
assert resp['admin'] == 'OSM'
assert resp['admin_level'] == [8, 9]
def test_post_autocomplete(autocomplete_parameter_json):
resp = api_post(
'/v0/autocomplete_parameters',
data=json.dumps(autocomplete_parameter_json),
content_type='application/json',
)
assert resp['name'] == 'peru'
assert resp['street'] == 'OSM'
assert resp['address'] == 'BANO'
assert resp['poi'] == 'FUSIO'
assert resp['admin'] == 'OSM'
assert resp['admin_level'] == [8]
def test_post_autocomplete_cosmo():
resp = api_post(
'/v0/autocomplete_parameters',
data=json.dumps({"name": "bobette", "admin": "COSMOGONY"}),
content_type='application/json',
)
assert resp['name'] == 'bobette'
assert resp['street'] == 'OSM'
assert resp['address'] == 'BANO'
assert resp['poi'] == 'OSM'
assert resp['admin'] == 'COSMOGONY'
assert resp['admin_level'] == []
def test_put_autocomplete(create_two_autocomplete_parameters, autocomplete_parameter_json):
resp = api_get('/v0/autocomplete_parameters/france')
assert resp['name'] == 'france'
assert resp['street'] == 'OSM'
assert resp['address'] == 'OSM'
assert resp['poi'] == 'FUSIO'
assert resp['admin'] == 'OSM'
assert resp['admin_level'] == [8, 9]
resp = api_put(
'/v0/autocomplete_parameters/france',
data=json.dumps(autocomplete_parameter_json),
content_type='application/json',
)
assert resp['street'] == 'OSM'
assert resp['address'] == 'BANO'
assert resp['poi'] == 'FUSIO'
assert resp['admin'] == 'OSM'
assert resp['admin_level'] == [8]
def test_delete_autocomplete(create_two_autocomplete_parameters):
resp = api_get('/v0/autocomplete_parameters/')
assert len(resp) == 2
resp = api_get('/v0/autocomplete_parameters/france')
assert resp['name'] == 'france'
_, status = api_delete('/v0/autocomplete_parameters/france', check=False, no_json=True)
assert status == 204
_, status = api_get('/v0/autocomplete_parameters/france', check=False)
assert status == 404
resp = api_get('/v0/autocomplete_parameters/')
assert len(resp) == 1
def test_get_last_datasets_autocomplete(create_autocomplete_parameter):
"""
we query the loaded datasets of idf
we loaded 3 datasets, but by default we should get one by family_type, so one for bano, one for osm
"""
resp = api_get('/v0/autocomplete_parameters/idf/last_datasets')
assert len(resp) == 2
bano = next((d for d in resp if d['type'] == 'bano'), None)
assert bano
assert bano['family_type'] == 'autocomplete_bano'
assert bano['name'] == '/path/to/dataset_0'
osm = next((d for d in resp if d['type'] == 'osm'), None)
assert osm
assert osm['family_type'] == 'autocomplete_osm'
assert osm['name'] == '/path/to/dataset_2' # we should have the last one
# if we ask for the 2 last datasets per type, we got all of them
resp = api_get('/v0/autocomplete_parameters/idf/last_datasets?count=2')
assert len(resp) == 3
@pytest.fixture
def minimal_poi_types_json():
return {
"poi_types": [
{"id": "amenity:bicycle_rental", "name": "Station VLS"},
{"id": "amenity:parking", "name": "Parking"},
],
"rules": [
{
"osm_tags_filters": [{"key": "amenity", "value": "bicycle_rental"}],
"poi_type_id": "amenity:bicycle_rental",
},
{"osm_tags_filters": [{"key": "amenity", "value": "parking"}], "poi_type_id": "amenity:parking"},
],
}
def test_autocomplete_poi_types(create_two_autocomplete_parameters, minimal_poi_types_json):
resp = api_get('/v0/autocomplete_parameters/france')
assert resp['name'] == 'france'
# POST a minimal conf
resp = api_post(
'/v0/autocomplete_parameters/france/poi_types',
data=json.dumps(minimal_poi_types_json),
content_type='application/json',
)
def test_minimal_conf(resp):
assert len(resp['poi_types']) == 2
assert len(resp['rules']) == 2
bss_type = jmespath.search("poi_types[?id=='amenity:bicycle_rental']", resp)
assert len(bss_type) == 1
assert bss_type[0]['name'] == 'Station VLS'
bss_rule = jmespath.search("rules[?poi_type_id=='amenity:bicycle_rental']", resp)
assert len(bss_rule) == 1
assert bss_rule[0]['osm_tags_filters'][0]['value'] == 'bicycle_rental'
# check that it's not the "default" conf
assert not jmespath.search("poi_types[?id=='amenity:townhall']", resp)
# check that the conf is correctly set on france
test_minimal_conf(resp)
# check that the conf on europe is still empty
resp = api_get('/v0/autocomplete_parameters/europe/poi_types')
assert not resp
# check GET of newly defined france conf
resp = api_get('/v0/autocomplete_parameters/france/poi_types')
test_minimal_conf(resp)
# check DELETE of france conf
resp, code = api_delete('/v0/autocomplete_parameters/france/poi_types', check=False, no_json=True)
assert not resp
assert code == 204
# check get of conf on france is now empty
resp = api_get('/v0/autocomplete_parameters/france/poi_types')
assert not resp
# check that tyr refuses incorrect conf
resp, code = api_post(
'/v0/autocomplete_parameters/france/poi_types',
data=json.dumps({'poi_types': [{'id': 'bob', 'name': 'Bob'}]}),
content_type='application/json',
check=False,
)
assert code == 400
assert resp['status'] == 'error'
assert 'rules' in resp['message']
| xlqian/navitia | source/tyr/tests/integration/autocomplete_test.py | Python | agpl-3.0 | 9,528 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Map.bucket'
db.add_column(u'scout_map', 'bucket',
self.gf('django.db.models.fields.related.ForeignKey')(default=1, related_name='map', to=orm['bucket.Bucket']),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Map.bucket'
db.delete_column(u'scout_map', 'bucket_id')
models = {
u'accounts.profile': {
'Meta': {'object_name': 'Profile'},
'favourite_snack': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mugshot': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'privacy': ('django.db.models.fields.CharField', [], {'default': "'registered'", 'max_length': '15'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': u"orm['auth.User']"})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'bucket.bucket': {
'Meta': {'object_name': 'Bucket'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'scout.datalayer': {
'Meta': {'object_name': 'DataLayer'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'map': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'datalayers'", 'to': u"orm['scout.Map']"})
},
u'scout.map': {
'Meta': {'object_name': 'Map'},
'bucket': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'map'", 'to': u"orm['bucket.Bucket']"}),
'center': ('django.contrib.gis.db.models.fields.PointField', [], {'geography': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'locate': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': "'name'", 'unique_with': '()'}),
'tilelayer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'maps'", 'to': u"orm['scout.TileLayer']"}),
'zoom': ('django.db.models.fields.IntegerField', [], {'default': '7'})
},
u'scout.marker': {
'Meta': {'object_name': 'Marker'},
'address': ('django.db.models.fields.TextField', [], {'default': "''"}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'markers'", 'to': u"orm['scout.MarkerCategory']"}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.Profile']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'datalayer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'markers'", 'to': u"orm['scout.DataLayer']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'picture': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'position': ('django.contrib.gis.db.models.fields.PointField', [], {'geography': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'scout.markercategory': {
'Meta': {'object_name': 'MarkerCategory'},
'icon_color': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'icon_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'marker_color': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'scout.tilelayer': {
'Meta': {'object_name': 'TileLayer'},
'attribution': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_zoom': ('django.db.models.fields.IntegerField', [], {'default': '18'}),
'min_zoom': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'url_template': ('django.db.models.fields.CharField', [], {'max_length': '200'})
}
}
complete_apps = ['scout'] | CommonsDev/dataserver | scout/migrations/0003_auto__add_field_map_bucket.py | Python | agpl-3.0 | 8,430 |
# -*- coding: utf-8 -*-
# Copyright 2015-2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo.addons.connector_carepoint.unit import mapper
from .common import SetUpCarepointBase
class TestCarepointImporterMapper(SetUpCarepointBase):
def setUp(self):
super(TestCarepointImporterMapper, self).setUp()
self.Importer = mapper.CarepointImportMapper
self.model = 'carepoint.carepoint.store'
self.mock_env = self.get_carepoint_helper(
self.model
)
self.importer = self.Importer(self.mock_env)
def test_backend_id(self):
""" It should map backend_id correctly """
res = self.importer.backend_id(True)
expect = {'backend_id': self.importer.backend_record.id}
self.assertDictEqual(expect, res)
def test_company_id(self):
""" It should map company_id correctly """
res = self.importer.company_id(True)
expect = {'company_id': self.importer.backend_record.company_id.id}
self.assertDictEqual(expect, res)
| laslabs/odoo-connector-carepoint | connector_carepoint/tests/test_carepoint_import_mapper.py | Python | agpl-3.0 | 1,081 |
# -*- coding: utf-8 -*-
#
# Progdupeupl documentation build configuration file, created by
# sphinx-quickstart on Sat Dec 07 17:25:18 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.pngmath',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Progdupeupl'
copyright = u'2013, Romain Porte'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.1'
# The full version, including alpha/beta/rc tags.
release = '1.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Progdupeupldoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Progdupeupl.tex', u'Progdupeupl Documentation',
u'Romain Porte', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'progdupeupl', u'Progdupeupl Documentation',
[u'Romain Porte'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Progdupeupl', u'Progdupeupl Documentation',
u'Romain Porte', 'Progdupeupl', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
autodoc_default_flags = ['members']
| progdupeupl/pdp_website | doc/conf.py | Python | agpl-3.0 | 7,983 |
from tastypie import fields
from tastypie.bundle import Bundle
from tastypie.resources import ModelResource, ALL, ALL_WITH_RELATIONS
from api.authorization import DateaBaseAuthorization
from api.authentication import ApiKeyPlusWebAuthentication
from api.base_resources import JSONDefaultMixin
from api.serializers import UTCSerializer
from django.template.defaultfilters import linebreaksbr
from tastypie.cache import SimpleCache
from tastypie.throttle import CacheThrottle
from django.contrib.contenttypes.models import ContentType
from account.utils import get_domain_from_url
from comment.models import Comment
class CommentResource(JSONDefaultMixin, ModelResource):
user = fields.ToOneField('account.resources.UserResource',
attribute='user', full=True, readonly=True)
def dehydrate(self, bundle):
user_data = {
'username': bundle.data['user'].data['username'],
'image_small': bundle.data['user'].data['image_small'],
'id': bundle.data['user'].data['id']
}
bundle.data['user'] = user_data
bundle.data['content_type'] = bundle.obj.content_type.model
return bundle
def hydrate(self,bundle):
# preserve data
if bundle.request.method == 'PATCH':
#preserve original fields
fields = ['user', 'published', 'content_type', 'object_id', 'created', 'client_domain']
orig_obj = Comment.objects.get(pk=int(bundle.data['id']))
for f in fields:
if f in request.data:
request.data[f] = getattr(orig_obj, f)
elif bundle.request.method == 'POST':
# enforce post user
bundle.obj.user = bundle.request.user
bundle.data['user'] = bundle.request.user.id
# convert model name into model
bundle.obj.content_type = ContentType.objects.get(model=bundle.data['content_type'])
bundle.obj.client_domain = get_domain_from_url(bundle.request.META.get('HTTP_ORIGIN', ''))
del bundle.data['content_type']
return bundle
def apply_sorting(self, obj_list, options=None):
if options is None:
options = {}
else:
options = options.copy()
if not 'order_by' in options:
options['order_by'] = 'created'
return super(CommentResource, self).apply_sorting(obj_list, options)
class Meta:
queryset = Comment.objects.all()
resource_name = 'comment'
allowed_methods = ['get', 'post', 'patch', 'delete']
serializer = UTCSerializer(formats=['json'])
filtering={
'id' : ['exact'],
'user': ALL_WITH_RELATIONS,
'content_type': ALL_WITH_RELATIONS,
'object_id': ['exact']
}
authentication = ApiKeyPlusWebAuthentication()
authorization = DateaBaseAuthorization()
limit = 50
excludes = ['client_domain']
ordering=['created']
#cache = SimpleCache(timeout=5)
throttle = CacheThrottle(throttle_at=500)
always_return_data = True
include_resource_uri = False
def get_comment_resource_class():
return CommentResource
| lafactura/datea-api | datea_api/apps/comment/resources.py | Python | agpl-3.0 | 3,337 |
# -*- coding: utf-8 -*-
import datetime
from openerp import http
from openerp.http import request
from openerp.addons.website_portal.controllers.main import website_account
class website_account(website_account):
@http.route(['/my/home'], type='http', auth="user", website=True)
def account(self, **kw):
""" Add sales documents to main account page """
response = super(website_account, self).account()
partner = request.env.user.partner_id
res_sale_order = request.env['sale.order']
res_invoices = request.env['account.invoice']
quotations = res_sale_order.search([
('state', 'in', ['sent', 'cancel'])
])
orders = res_sale_order.search([
('state', 'in', ['sale', 'done'])
])
invoices = res_invoices.search([
('state', 'in', ['open', 'paid', 'cancelled'])
])
response.qcontext.update({
'date': datetime.date.today().strftime('%Y-%m-%d'),
'quotations': quotations,
'orders': orders,
'invoices': invoices,
})
return response
@http.route(['/my/orders/<int:order>'], type='http', auth="user", website=True)
def orders_followup(self, order=None):
partner = request.env['res.users'].browse(request.uid).partner_id
domain = [
('partner_id.id', '=', partner.id),
('state', 'not in', ['draft', 'cancel']),
('id', '=', order)
]
order = request.env['sale.order'].search(domain)
invoiced_lines = request.env['account.invoice.line'].search([('invoice_id', 'in', order.invoice_ids.ids)])
order_invoice_lines = {il.product_id.id: il.invoice_id for il in invoiced_lines}
return request.website.render("website_portal_sale.orders_followup", {
'order': order.sudo(),
'order_invoice_lines': order_invoice_lines,
})
| stephen144/odoo | addons/website_portal_sale/controllers/main.py | Python | agpl-3.0 | 1,940 |
"""
create_org_data_czar_policy.py
Creates an IAM group for an edX org and applies an S3 policy to that group
that allows for read-only access to the group.
"""
import argparse
import boto3
from botocore.exceptions import ClientError
from string import Template
import sys
template = Template("""{
"Version":"2012-10-17",
"Statement": [
{
"Sid": "AllowListingOfOrgFolder",
"Action": ["s3:ListBucket"],
"Effect": "Allow",
"Resource": ["arn:aws:s3:::edx-course-data"],
"Condition":{"StringLike":{"s3:prefix":["$org","$org/*"]}}
},
{
"Sid": "AllowGetBucketLocation",
"Action": ["s3:GetBucketLocation"],
"Effect": "Allow",
"Resource": ["arn:aws:s3:::edx-course-data"]
},
{
"Sid": "AllowGetS3ActionInOrgFolder",
"Effect": "Allow",
"Action": ["s3:GetObject"],
"Resource": ["arn:aws:s3:::edx-course-data/$org/*"]
}
]
}""")
def add_org_group(org, iam_connection):
group_name = "edx-course-data-{org}".format(org=org)
try:
iam_connection.create_group(GroupName=group_name)
except ClientError as bse:
if bse.response['ResponseMetadata']['HTTPStatusCode'] == 409:
pass
else:
print(bse)
try:
iam_connection.put_group_policy(
GroupName=group_name,
PolicyName=group_name,
PolicyDocument=template.substitute(org=org)
)
except boto.exception.BotoServerError as bse:
if bse.response['ResponseMetadata']['HTTPStatusCode'] == 409:
pass
else:
print(bse)
print(template.substitute(org=org))
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group()
group.add_argument('-o', '--org', help='Name of the org for which to create an IAM '
'role and policy, this should have the same '
'name as the S3 bucket')
group.add_argument('-f', '--file', help='The path to a file containing one org name '
'per line.')
args = parser.parse_args()
iam_connection = boto3.client('iam')
if args.org:
add_org_group(args.org.rstrip('\n').lower(), iam_connection)
elif args.file:
with open(args.file) as file:
for line in file:
org = line.rstrip('\n').lower()
add_org_group(org, iam_connection)
else:
parser.print_usage()
sys.exit(1)
sys.exit(0)
| edx/configuration | util/create_data_czar/create_org_data_czar_policy.py | Python | agpl-3.0 | 2,563 |
import zmq
import sys
import ConfigParser
import os.path
import proto_objs.venue_configuration_pb2
import daemon
import signal
import lockfile
from optparse import OptionParser
import datetime
full_config = proto_objs.venue_configuration_pb2.configuration()
bind_addr="tcp://127.0.0.1:11111"
def parse(filename):
config = ConfigParser.ConfigParser()
config.read(filename)
sections = config.sections()
full_config.Clear()
i = 0
for s in sections:
if s == 'global':
full_config.trade_serialization_addr = config.get(s, 'trade_serialization_addr')
full_config.recovery_listener_addr = config.get(s, 'recovery_listener_addr')
full_config.aggregated_bbo_book_addr = config.get(s, 'aggregated_bbo_book_addr')
full_config.aggregated_bbo_book_id = config.getint(s, 'aggregated_bbo_book_id')
else:
i+=1
print ("Adding venue: %d " % i)
single_venue_config = full_config.configs.add()
make_protobuf(s, config, single_venue_config)
print full_config.__str__()
return True
def make_protobuf(section, config, single_venue_config):
single_venue_config.venue_id = config.getint(section, 'venue_id')
single_venue_config.mic_name = config.get(section, 'mic_name')
single_venue_config.order_interface_addr = config.get(section, 'order_interface_addr')
single_venue_config.order_ping_addr = config.get(section, 'order_ping_addr')
single_venue_config.market_data_broadcast_addr = config.get(section, 'market_data_broadcast_addr')
if config.has_option(section, 'use_synthetic_cancel_replace'):
single_venue_config.use_synthetic_cancel_replace = config.getboolean(section, 'use_synthetic_cancel_replace')
def run(config_filename):
# Create context and connect
context = zmq.Context()
socket = context.socket(zmq.REP)
socket.setsockopt(zmq.LINGER, 0)
print "Binding to: ", bind_addr
socket.bind(bind_addr)
while True:
contents = socket.recv()
print datetime.datetime.now(), "Received msg:<", contents, ">"
if contents == 'R':
print "Refresh request"
refresh_ret = parse(config_filename)
if (refresh_ret == True):
refresh_status = "OK"
else:
refresh_status = "ERROR"
socket.send_multipart(["REFRESH", refresh_status])
elif contents == 'C':
print "Config request"
socket.send_multipart(["CONFIG", full_config.SerializeToString()])
else:
print "Unknown request - ERROR"
socket.send_multipart(["ERROR", "unknown message"])
def terminate():
print "Terminate"
socket.close()
context.close()
def main():
parser = OptionParser(usage="usage: %prog [options] <config_filename>")
parser.add_option("-D", "--daemon",
dest="runAsDaemon",
help="Run configuration server as daemon",
action="store_true",
default=False)
(options, args) = parser.parse_args();
if len(args) < 1:
parser.error("Missing arguments")
config_filename = args[0]
log_filename = "configuration_server.log"
log = open(log_filename, 'w+')
print "Using config file: ", config_filename
if os.path.exists(config_filename) == False:
print "Config file: ", config_filename, " does not exist"
raise Exception("Config file: ", config_filename, " does not exist")
if options.runAsDaemon == True:
context = daemon.DaemonContext(
working_directory='.',
umask=0o002,
#pidfile=lockfile.FileLock('./configuration_server.pid'),
stdout=log,
stderr=log)
#context.signal_map = {
#signal.SIGTERM: 'terminate',
#signal.SIGHUP: 'terminate',
#signal.SIGUSR1: 'terminate',
#}
#with daemon.DaemonContext():
with context:
parse(config_filename)
run(config_filename)
else:
parse(config_filename)
run(config_filename)
if __name__ == "__main__":
main()
| capitalk/system_k | configuration_server/configuration_server.py | Python | agpl-3.0 | 4,240 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class account_change_currency(osv.osv_memory):
_name = 'account.change.currency'
_description = 'Change Currency'
_columns = {
'currency_id': fields.many2one('res.currency', 'Change to', required=True, help="Select a currency to apply on the invoice"),
}
def view_init(self, cr , uid , fields_list, context=None):
obj_inv = self.pool.get('account.invoice')
if context is None:
context = {}
if context.get('active_id',False):
if obj_inv.browse(cr, uid, context['active_id']).state != 'draft':
raise osv.except_osv(_('Error!'), _('You can only change currency for Draft Invoice.'))
pass
def change_currency(self, cr, uid, ids, context=None):
obj_inv = self.pool.get('account.invoice')
obj_inv_line = self.pool.get('account.invoice.line')
obj_currency = self.pool.get('res.currency')
if context is None:
context = {}
data = self.browse(cr, uid, ids, context=context)[0]
new_currency = data.currency_id.id
invoice = obj_inv.browse(cr, uid, context['active_id'], context=context)
if invoice.currency_id.id == new_currency:
return {}
rate = obj_currency.browse(cr, uid, new_currency, context=context).rate
for line in invoice.invoice_line:
new_price = 0
if invoice.company_id.currency_id.id == invoice.currency_id.id:
new_price = line.price_unit * rate
if new_price <= 0:
raise osv.except_osv(_('Error!'), _('New currency is not configured properly.'))
if invoice.company_id.currency_id.id != invoice.currency_id.id and invoice.company_id.currency_id.id == new_currency:
old_rate = invoice.currency_id.rate
if old_rate <= 0:
raise osv.except_osv(_('Error!'), _('Current currency is not configured properly.'))
new_price = line.price_unit / old_rate
if invoice.company_id.currency_id.id != invoice.currency_id.id and invoice.company_id.currency_id.id != new_currency:
old_rate = invoice.currency_id.rate
if old_rate <= 0:
raise osv.except_osv(_('Error!'), _('Current currency is not configured properly.'))
new_price = (line.price_unit / old_rate ) * rate
obj_inv_line.write(cr, uid, [line.id], {'price_unit': new_price})
obj_inv.write(cr, uid, [invoice.id], {'currency_id': new_currency}, context=context)
return {'type': 'ir.actions.act_window_close'}
| OpusVL/odoo | addons/account/wizard/account_change_currency.py | Python | agpl-3.0 | 3,683 |
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Conwet Lab., Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from uuid import uuid4
from django.contrib.auth.models import User, Group
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from markdown.extensions.toc import slugify
__all__ = ('Organization', 'Team')
class OrganizationManager(models.Manager):
def is_available(self, name):
return not User.objects.filter(username=name).exists() and not Group.objects.filter(name=name).exists()
def search_available_name(self, username):
max_length = 30
uuid_length = 8
short_username = slugify(username, '-')[:max_length - uuid_length]
final_username = slugify(username, '-')[:max_length]
while not self.is_available(final_username):
final_username = short_username + uuid4().hex[:uuid_length]
return final_username
def create_organization(self, name, owners=[]):
user = User.objects.create(username=name)
group = Group.objects.create(name=name)
org = self.create(user=user, group=group)
team = Team.objects.create(organization=org, name='owners')
for owner in owners:
team.users.add(owner)
return org
@python_2_unicode_compatible
class Organization(models.Model):
user = models.OneToOneField(User)
group = models.OneToOneField(Group)
objects = OrganizationManager()
class Meta:
app_label = "platform"
def __str__(self):
return self.user.username
class TeamManager(models.Manager):
"""
The manager for the auth's Team model.
"""
def get_by_natural_key(self, organization, name):
return self.get(organization=organization, name=name)
@python_2_unicode_compatible
class Team(models.Model):
"""
Teams are a generic way of categorizing users to apply permissions, or
some other label, to those users. A user can belong to any number of
teams.
"""
organization = models.ForeignKey(Organization, on_delete=models.CASCADE)
name = models.CharField(_('name'), max_length=80)
users = models.ManyToManyField(User, verbose_name=_('users'), blank=True, related_name="teams")
objects = TeamManager()
class Meta:
app_label = "platform"
unique_together = ('organization', 'name')
verbose_name = _('team')
verbose_name_plural = _('teams')
def __str__(self):
return self.name
def natural_key(self):
return (self.organization, self.name)
| jpajuelo/wirecloud | src/wirecloud/platform/core/models.py | Python | agpl-3.0 | 3,337 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Grader.is_calibration'
db.add_column('controller_grader', 'is_calibration',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Grader.is_calibration'
db.delete_column('controller_grader', 'is_calibration')
models = {
'controller.grader': {
'Meta': {'object_name': 'Grader'},
'confidence': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '9'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'feedback': ('django.db.models.fields.TextField', [], {}),
'grader_id': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'grader_type': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_calibration': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'score': ('django.db.models.fields.IntegerField', [], {}),
'status_code': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'submission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['controller.Submission']"})
},
'controller.submission': {
'Meta': {'object_name': 'Submission'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'grader_settings': ('django.db.models.fields.TextField', [], {'default': "''"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128'}),
'max_score': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'next_grader_type': ('django.db.models.fields.CharField', [], {'default': "'NA'", 'max_length': '2'}),
'posted_results_back_to_queue': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'previous_grader_type': ('django.db.models.fields.CharField', [], {'default': "'NA'", 'max_length': '2'}),
'problem_id': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'prompt': ('django.db.models.fields.TextField', [], {'default': "''"}),
'rubric': ('django.db.models.fields.TextField', [], {'default': "''"}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'student_id': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'student_response': ('django.db.models.fields.TextField', [], {'default': "''"}),
'student_submission_time': (
'django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'xqueue_queue_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128'}),
'xqueue_submission_id': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128'}),
'xqueue_submission_key': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128'})
}
}
complete_apps = ['controller'] | edx/edx-ora | controller/migrations/0011_auto__add_field_grader_is_calibration.py | Python | agpl-3.0 | 3,932 |
# Copyright 2015, Oliver Nagy <olitheolix@gmail.com>
#
# This file is part of Azrael (https://github.com/olitheolix/azrael)
#
# Azrael is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Azrael is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Azrael. If not, see <http://www.gnu.org/licenses/>.
"""
This module does not contain any tests but utility functions often used in
other tests.
"""
import os
import base64
import subprocess
import numpy as np
import azrael.leonard
from azrael.types import FragMeta, FragDae, FragRaw, FragNone, Template
from azrael.types import CollShapeMeta, CollShapeEmpty, CollShapeSphere
from azrael.types import CollShapeBox, CollShapePlane, RigidBodyData
from azrael.types import Constraint6DofSpring2, ConstraintP2P, ConstraintMeta
def killAzrael():
subprocess.call(['pkill', 'Azreal:'])
# Delete all grids used in this test.
assert azrael.vectorgrid.deleteAllGrids().ok
azrael.database.init()
def getLeonard(LeonardCls=azrael.leonard.LeonardBase):
"""
Return a ``LeonardCls`` instance.
This is a convenience function to reduce code duplication in tests.
:param cls LeonardCls: Leonard class to instantiate.
"""
# Return a Leonard instance.
leo = LeonardCls()
leo.setup()
return leo
def getCSEmpty(pos=[0, 0, 0], rot=[0, 0, 0, 1]):
"""
Convenience function to construct an Empty shape.
"""
return CollShapeMeta('empty', pos, rot, CollShapeEmpty())
def getCSBox(pos=[0, 0, 0], rot=[0, 0, 0, 1], dim=[1, 1, 1]):
"""
Convenience function to construct a Box shape.
"""
return CollShapeMeta('box', pos, rot, CollShapeBox(*dim))
def getCSSphere(pos=[0, 0, 0], rot=[0, 0, 0, 1], radius=1):
"""
Convenience function to construct a Sphere shape.
"""
return CollShapeMeta('sphere', pos, rot, CollShapeSphere(radius))
def getCSPlane(pos=[0, 0, 0], rot=[0, 0, 0, 1], normal=[0, 0, 1], ofs=0):
"""
Convenience function to construct a Plane in the x/y dimension.
"""
return CollShapeMeta('plane', pos, rot, CollShapePlane(normal, ofs))
def getFragNone(scale=1, pos=(0, 0, 0), rot=(0, 0, 0, 1)):
"""
Convenience function to construct an empty geometry element.
"""
return FragMeta(fragtype='_del_', scale=scale, position=pos,
rotation=rot, fragdata=FragNone())
def getFragRaw(scale=1, pos=(0, 0, 0), rot=(0, 0, 0, 1)):
"""
Convenience function to construct a valid Raw geometry.
"""
vert = np.random.randint(0, 100, 9).tolist()
uv = np.random.randint(0, 100, 6).tolist()
rgb = np.random.randint(0, 100, 3).tolist()
geo = FragRaw(vert, uv, rgb)
return FragMeta(fragtype='RAW', scale=scale, position=pos,
rotation=rot, fragdata=geo)
def getFragDae(scale=1, pos=(0, 0, 0), rot=(0, 0, 0, 1)):
"""
Convenience function to construct a valid Collada geometry.
"""
b = os.path.dirname(__file__)
dae_file = open(b + '/cube.dae', 'rb').read()
dae_rgb1 = open(b + '/rgb1.png', 'rb').read()
dae_rgb2 = open(b + '/rgb2.jpg', 'rb').read()
dae_file = base64.b64encode(dae_file).decode('utf8')
dae_rgb1 = base64.b64encode(dae_rgb1).decode('utf8')
dae_rgb2 = base64.b64encode(dae_rgb2).decode('utf8')
geo = FragDae(dae=dae_file,
rgb={'rgb1.png': dae_rgb1,
'rgb2.jpg': dae_rgb2})
return FragMeta(fragtype='DAE', scale=scale, position=pos,
rotation=rot, fragdata=geo)
def getP2P(aid='constraint_p2p', rb_a=1, rb_b=2,
pivot_a=(0, 0, -1), pivot_b=(0, 0, 1)):
"""
Return a Point2Point constraint for bodies ``rb_a`` and ``rb_b`.
"""
p2p = ConstraintP2P(pivot_a, pivot_b)
return ConstraintMeta(aid, 'p2p', rb_a, rb_b, p2p)
def get6DofSpring2(aid='constraint_6dofspring2', rb_a=1, rb_b=2):
"""
Return a 6DofSpring2 constraint for bodies ``rb_a`` and ``rb_b`.
"""
dof = Constraint6DofSpring2(
frameInA=(0, 0, 0, 0, 0, 0, 1),
frameInB=(0, 0, 0, 0, 0, 0, 1),
stiffness=(1, 2, 3, 4, 5.5, 6),
damping=(2, 3.5, 4, 5, 6.5, 7),
equilibrium=(-1, -1, -1, 0, 0, 0),
linLimitLo=(-10.5, -10.5, -10.5),
linLimitHi=(10.5, 10.5, 10.5),
rotLimitLo=(-0.1, -0.2, -0.3),
rotLimitHi=(0.1, 0.2, 0.3),
bounce=(1, 1.5, 2),
enableSpring=(True, False, False, False, False, False))
return ConstraintMeta(aid, '6DOFSPRING2', rb_a, rb_b, dof)
def getRigidBody(scale: (int, float)=1,
imass: (int, float)=1,
restitution: (int, float)=0.9,
rotation: (tuple, list)=(0, 0, 0, 1),
position: (tuple, list, np.ndarray)=(0, 0, 0),
velocityLin: (tuple, list, np.ndarray)=(0, 0, 0),
velocityRot: (tuple, list, np.ndarray)=(0, 0, 0),
cshapes: dict={'cssphere': getCSSphere()},
axesLockLin: (tuple, list, np.ndarray)=(1, 1, 1),
axesLockRot: (tuple, list, np.ndarray)=(1, 1, 1),
version: int=0):
return RigidBodyData(scale, imass, restitution, rotation, position,
velocityLin, velocityRot, cshapes, axesLockLin,
axesLockRot, version)
def getTemplate(name='template',
rbs=None,
fragments={},
boosters={},
factories={}):
if rbs is None:
rbs = getRigidBody(cshapes={'cssphere': getCSSphere()})
return Template(name, rbs, fragments, boosters, factories)
| daviddeng/azrael | azrael/test/test.py | Python | agpl-3.0 | 6,059 |
# -*- coding: utf-8 -*-
# Copyright 2015 AvanzOsc (http://www.avanzosc.es)
# Copyright 2015-2016 - Pedro M. Baeza <pedro.baeza@tecnativa.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl)
from odoo import api, models
class PurchaseOrder(models.Model):
_inherit = 'purchase.order'
@api.model
def search(self, args, offset=0, limit=None, order=None, count=False):
make_po_conditions = {
'partner_id', 'state', 'picking_type_id', 'company_id',
'dest_address_id',
}
# Restrict the empty return for these conditions
if (self.env.context and
self.env.context.get('grouping', 'standard') == 'order' and
make_po_conditions.issubset(set(x[0] for x in args))):
return self.browse()
return super(PurchaseOrder, self).search(
args, offset=offset, limit=limit, order=order, count=count)
class PurchaseOrderLine(models.Model):
_inherit = 'purchase.order.line'
@api.model
def search(self, args, offset=0, limit=None, order=None, count=False):
# Restrict the empty return for these conditions
if (self.env.context and
self.env.context.get('grouping', 'standard') == 'line' and
len(args) == 1 and args[0][0] == 'order_id' and
args[0][1] == 'in'):
return self.browse()
return super(PurchaseOrderLine, self).search(
args, offset=offset, limit=limit, order=order, count=count)
| Eficent/purchase-workflow | procurement_purchase_no_grouping/models/purchase_order.py | Python | agpl-3.0 | 1,521 |
import os
import logging
from superdesk import get_resource_service
from jinja2.loaders import FileSystemLoader, ModuleLoader, ChoiceLoader, DictLoader, PrefixLoader
from liveblog.mongo_util import decode as mongodecode
__all__ = ['ThemeTemplateLoader', 'CompiledThemeTemplateLoader']
logger = logging.getLogger('superdesk')
class ThemeTemplateLoader(FileSystemLoader):
"""
Theme template loader for jinja2 SEO themes.
"""
def __init__(self, theme, encoding='utf-8', followlinks=False):
theme_name = theme['name']
themes = get_resource_service('themes')
theme_dirname = themes.get_theme_path(theme_name)
self.searchpath = [os.path.join(theme_dirname, 'templates')]
parent_theme = theme.get('extends')
if parent_theme:
parent_dirname = themes.get_theme_path(parent_theme)
self.searchpath.append(os.path.join(parent_dirname, 'templates'))
self.encoding = encoding
self.followlinks = followlinks
class CompiledThemeTemplateLoader(ChoiceLoader):
def __init__(self, theme):
"""
A Mixed logic template loader module. It will use Compiled theme template
for current theme and will also use FileSystemLoader like in order to enable
inheritance
"""
self.loaders = []
theme_name = theme['name']
themes = get_resource_service('themes')
parent_theme = theme.get('extends')
files = theme.get('files', {'templates': {}})
if files.get('templates'):
self.addDictonary(theme)
if parent_theme:
parent = themes.find_one(req=None, name=parent_theme)
self.addDictonary(parent)
else:
compiled = themes.get_theme_compiled_templates_path(theme_name)
self.loaders.append(ModuleLoader(compiled))
if parent_theme:
parent_compiled = themes.get_theme_compiled_templates_path(parent_theme)
self.loaders.append(ModuleLoader(parent_compiled))
# let's now add the parent theme prefix loader
if parent_theme:
prefix_loader = self._parent_prefix_loader(parent_theme)
self.loaders.append(prefix_loader)
def _parent_prefix_loader(self, name):
"""
Creates a PrefixLoader in order to be able to extends parent theme
templates using as prefix the parent theme name
Example:
{% extends 'parent_theme_name/template_name.html' %}
{% include 'parent_theme_name/template_name.html' %}
Args:
name (`str`): Parent theme name
Returns:
PrefixLoader instance with parent_name as prefix
"""
themes = get_resource_service('themes')
parent_dirname = themes.get_theme_path(name)
search_paths = [os.path.join(parent_dirname, 'templates')]
return PrefixLoader({name: FileSystemLoader(search_paths)})
def addDictonary(self, theme):
"""
Add template files as dictionary in the loaders.
"""
files = theme.get('files', {'templates': {}})
if files.get('templates'):
compiled = {}
for file, content in files.get('templates').items():
compiled[mongodecode(file)] = content
self.loaders.append(DictLoader(compiled))
| hlmnrmr/liveblog | server/liveblog/themes/template/loaders.py | Python | agpl-3.0 | 3,378 |
# GUI object/properties browser.
# Copyright (C) 2011 Matiychuk D.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place,
# Suite 330,
# Boston, MA 02111-1307 USA
import pywinauto
import sys, os
import time
import wx
import thread
import exceptions
import platform
import warnings
from const import *
'''
proxy module for pywinauto
'''
pywinauto.timings.Timings.window_find_timeout = 1
def resource_path(filename):
if hasattr(sys, '_MEIPASS'):
# PyInstaller >= 1.6
###os.chdir(sys._MEIPASS)
filename = os.path.join(sys._MEIPASS, filename)
elif '_MEIPASS2' in os.environ:
# PyInstaller < 1.6 (tested on 1.5 only)
###os.chdir(os.environ['_MEIPASS2'])
filename = os.path.join(os.environ['_MEIPASS2'], filename)
else:
###os.chdir(sys.path.dirname(sys.argv[0]))
filename = os.path.join(os.path.dirname(sys.argv[0]), filename)
return filename
class SWAPYObject(object):
'''
Base proxy class for pywinauto objects.
'''
def __init__(self, pwa_obj):
'''
Constructor
'''
#original pywinauto object
self.pwa_obj = pwa_obj
default_sort_key = lambda name: name[0].lower()
self.subitems_sort_key = default_sort_key
def GetProperties(self):
'''
Return dict of original + additional properies
Can be owerridden for non pywinauto obects
'''
properties = {}
properties.update(self._get_properies())
properties.update(self._get_additional_properties())
return properties
def Get_subitems(self):
'''
Return list of children - [(control_text, swapy_obj),...]
Can be owerridden for non pywinauto obects
'''
subitems = []
subitems += self._get_children()
'''
for control in children:
try:
texts = control.Texts()
except exceptions.RuntimeError:
texts = ['Unknown control name2!'] #workaround
while texts.count(''):
texts.remove('')
c_name = ', '.join(texts)
if not c_name:
#nontext_controlname = pywinauto.findbestmatch.GetNonTextControlName(control, children)[0]
top_level_parent = control.TopLevelParent().Children()
nontext_controlname = pywinauto.findbestmatch.GetNonTextControlName(control, top_level_parent)[0]
if nontext_controlname:
c_name = nontext_controlname
else:
c_name = 'Unknown control name1!'
subitems.append((c_name, self._get_swapy_object(control)))
'''
subitems += self._get_additional_children()
subitems.sort(key=self.subitems_sort_key)
#encode names
subitems_encoded = []
for (name, obj) in subitems:
name = name.encode('cp1251', 'replace')
subitems_encoded.append((name, obj))
return subitems_encoded
def Exec_action(self, action_id):
'''
Execute action on the control
'''
action = ACTIONS[action_id]
#print('self.pwa_obj.'+action+'()')
exec('self.pwa_obj.'+action+'()')
return 0
def Get_actions(self):
'''
return allowed actions for this object. [(id,action_name),...]
'''
allowed_actions = []
try:
obj_actions = dir(self.pwa_obj.WrapperObject())
except:
obj_actions = dir(self.pwa_obj)
for id, action in ACTIONS.items():
if action in obj_actions:
allowed_actions.append((id,action))
allowed_actions.sort(key=lambda name: name[1].lower())
return allowed_actions
def Get_code(self, action_id):
'''
Generate code for pywinauto module
'''
action = ACTIONS[action_id]
code = "\
ctrl = window['"+self._get_additional_properties()['Access names'][0].encode('unicode-escape', 'replace')+"']\n\
ctrl."+action+"()\n"
return code
def Highlight_control(self):
if self._check_visibility():
thread.start_new_thread(self._highlight_control,(3,))
return 0
def _get_properies(self):
'''
Get original pywinauto's object properties
'''
#print type(self.pwa_obj)
try:
properties = self.pwa_obj.GetProperties()
except exceptions.RuntimeError:
properties = {} #workaround
return properties
def _get_additional_properties(self):
'''
Get additonal useful properties, like a handle, process ID, etc.
Can be overridden by derived class
'''
additional_properties = {}
pwa_app = pywinauto.application.Application()
#-----Access names
try:
#parent_obj = self.pwa_obj.Parent()
parent_obj = self.pwa_obj.TopLevelParent()
except:
pass
else:
try:
#all_controls = parent_obj.Children()
all_controls = [pwa_app.window_(handle=ch) for ch in pywinauto.findwindows.find_windows(parent=parent_obj.handle, top_level_only=False)]
except:
pass
else:
access_names = []
uniq_names = pywinauto.findbestmatch.build_unique_dict(all_controls)
for uniq_name, obj in uniq_names.items():
if uniq_name != '' and obj.WrapperObject() == self.pwa_obj:
access_names.append(uniq_name)
access_names.sort(key=len)
additional_properties.update({'Access names' : access_names})
#-----
#-----pwa_type
additional_properties.update({'pwa_type' : str(type(self.pwa_obj))})
#---
#-----handle
try:
additional_properties.update({'handle' : str(self.pwa_obj.handle)})
except:
pass
#---
return additional_properties
def _get_children(self):
'''
Return original pywinauto's object children & names
[(control_text, swapy_obj),...]
'''
def _get_name_control(control):
try:
texts = control.Texts()
except exceptions.WindowsError:
texts = ['Unknown control name2!'] #workaround for WindowsError: [Error 0] ...
except exceptions.RuntimeError:
texts = ['Unknown control name3!'] #workaround for RuntimeError: GetButtonInfo failed for button with command id 256
while texts.count(''):
texts.remove('')
text = ', '.join(texts)
if not text:
u_names = []
for uniq_name, obj in uniq_names.items():
if uniq_name != '' and obj.WrapperObject() == control:
#if uniq_name != '' and obj == control:
u_names.append(uniq_name)
if u_names:
u_names.sort(key=len)
name = u_names[-1]
else:
name = 'Unknown control name1!'
else:
name = text
return (name, self._get_swapy_object(control))
pwa_app = pywinauto.application.Application()
try:
parent_obj = self.pwa_obj.TopLevelParent()
except pywinauto.controls.HwndWrapper.InvalidWindowHandle:
#For non visible windows
#...
#InvalidWindowHandle: Handle 0x262710 is not a vaild window handle
parent_obj = self.pwa_obj
children = self.pwa_obj.Children()
visible_controls = [pwa_app.window_(handle=ch) for ch in pywinauto.findwindows.find_windows(parent=parent_obj.handle, top_level_only=False)]
uniq_names = pywinauto.findbestmatch.build_unique_dict(visible_controls)
#uniq_names = pywinauto.findbestmatch.build_unique_dict(children)
names_children = map(_get_name_control, children)
return names_children
def _get_additional_children(self):
'''
Get additonal children, like for a menu, submenu, subtab, etc.
Should be owerriden in derived classes of non standart pywinauto object
'''
return []
def _get_pywinobj_type(self, obj):
'''
Check self pywinauto object type
'''
if type(obj) == pywinauto.application.WindowSpecification:
return 'window'
elif type(obj) == pywinauto.controls.menuwrapper.Menu:
return 'menu'
elif type(obj) == pywinauto.controls.menuwrapper.MenuItem:
return 'menu_item'
elif type(obj) == pywinauto.controls.win32_controls.ComboBoxWrapper:
return 'combobox'
elif type(obj) == pywinauto.controls.common_controls.ListViewWrapper:
return 'listview'
elif type(obj) == pywinauto.controls.common_controls.TabControlWrapper:
return 'tab'
elif type(obj) == pywinauto.controls.common_controls.ToolbarWrapper:
return 'toolbar'
elif type(obj) == pywinauto.controls.common_controls._toolbar_button:
return 'toolbar_button'
elif type(obj) == pywinauto.controls.common_controls.TreeViewWrapper:
return 'tree_view'
elif type(obj) == pywinauto.controls.common_controls._treeview_element:
return 'tree_item'
elif 1==0:
return 'other'
else:
return 'unknown'
def _get_swapy_object(self, pwa_obj):
pwa_type = self._get_pywinobj_type(pwa_obj)
#print pwa_type
if pwa_type == 'smt_NEW':
return smt_NEW(pwa_obj)
if pwa_type == 'window':
return Pwa_window(pwa_obj)
if pwa_type == 'menu':
return Pwa_menu(pwa_obj)
if pwa_type == 'menu_item':
return Pwa_menu_item(pwa_obj)
if pwa_type == 'combobox':
return Pwa_combobox(pwa_obj)
if pwa_type == 'listview':
return Pwa_listview(pwa_obj)
if pwa_type == 'tab':
return Pwa_tab(pwa_obj)
if pwa_type == 'toolbar':
return Pwa_toolbar(pwa_obj)
if pwa_type == 'toolbar_button':
return Pwa_toolbar_button(pwa_obj)
if pwa_type == 'tree_view':
return Pwa_tree(pwa_obj)
if pwa_type == 'tree_item':
return Pwa_tree_item(pwa_obj)
else:
return SWAPYObject(pwa_obj)
def _highlight_control(self, repeat = 1):
while repeat > 0:
repeat -= 1
self.pwa_obj.DrawOutline('red', thickness=1)
time.sleep(0.3)
self.pwa_obj.DrawOutline(colour=0xffffff, thickness=1)
time.sleep(0.2)
return 0
def _check_visibility(self):
'''
Check control/window visibility.
Return pwa.IsVisible() or False if fails
'''
is_visible = False
try:
is_visible = self.pwa_obj.IsVisible()
except:
pass
return is_visible
def _check_actionable(self):
'''
Check control/window Actionable.
Return True or False if fails
'''
try:
self.pwa_obj.VerifyActionable()
except:
is_actionable = False
else:
is_actionable = True
return is_actionable
def _check_existence(self):
'''
Check control/window Exists.
Return True or False if fails
'''
try:
handle_ = self.pwa_obj.handle
obj = pywinauto.application.WindowSpecification({'handle': handle_})
except:
is_exist = False
else:
is_exist = obj.Exists()
return is_exist
class VirtualSWAPYObject(SWAPYObject):
def __init__(self, parent, index):
self.parent = parent
self.index = index
self.pwa_obj = self
self._check_visibility = self.parent._check_visibility
self._check_actionable = self.parent._check_actionable
self._check_existence = self.parent._check_existence
def Select(self):
self.parent.pwa_obj.Select(self.index)
def Get_code(self, action_id):
'''
Generate code for pywinauto module
'''
action = ACTIONS[action_id]
arg = ""
try:
arg = "'"+self.index.encode('unicode-escape', 'replace')+"'"
except:
arg = str(self.index)
code = "\
ctrl."+action+"("+arg+")\n"
return code
def _get_properies(self):
return {}
def Get_subitems(self):
return []
def Highlight_control(self):
pass
return 0
'''
def Get_code(self, action_id):
return '#---Not implemented yet.---\n'
'''
class PC_system(SWAPYObject):
handle = 0
def Get_subitems(self):
'''
returns [(window_text, swapy_obj),...]
'''
#windows--------------------
windows = []
try_count = 3
app = pywinauto.application.Application()
for i in range(try_count):
try:
handles = pywinauto.findwindows.find_windows()
except exceptions.OverflowError: # workaround for OverflowError: array too large
time.sleep(1)
except exceptions.MemoryError:# workaround for MemoryError
time.sleep(1)
else:
break
else:
#TODO: add swapy exception: Could not get windows list
handles = []
#we have to find taskbar in windows list
warnings.filterwarnings("ignore", category=FutureWarning) #ignore future warning in taskbar module
from pywinauto import taskbar
taskbar_handle = taskbar.TaskBarHandle()
for w_handle in handles:
wind = app.window_(handle=w_handle)
if w_handle == taskbar_handle:
texts = ['TaskBar']
else:
texts = wind.Texts()
while texts.count(''):
texts.remove('')
title = ', '.join(texts)
if not title:
title = 'Window#%s' % w_handle
title = title.encode('cp1251', 'replace')
windows.append((title, self._get_swapy_object(wind)))
windows.sort(key=lambda name: name[0].lower())
#-----------------------
#smt new----------------
#------------------------
return windows
def _get_properies(self):
info = { 'Platform' : platform.platform(), \
'Processor' : platform.processor(), \
'PC name' : platform.node() }
return info
def Get_actions(self):
'''
No actions for PC_system
'''
return []
def Get_code(self, action_id):
'''
No code for PC_system
'''
return ''
def Highlight_control(self):
pass
return 0
def _check_visibility(self):
return True
def _check_actionable(self):
return True
def _check_existence(self):
return True
class Pwa_window(SWAPYObject):
def _get_additional_children(self):
'''
Add menu object as children
'''
additional_children = []
menu = self.pwa_obj.Menu()
if menu:
menu_child = [('!Menu', self._get_swapy_object(menu))]
additional_children += menu_child
return additional_children
def Get_code(self, action_id):
'''
winod code
'''
action = ACTIONS[action_id]
code = "\
w_handle = pywinauto.findwindows.find_windows(title=u'"+ self.pwa_obj.WindowText().encode('unicode-escape', 'replace') +"', class_name='"+ self.pwa_obj.Class() +"')[0]\n\
window = pwa_app.window_(handle=w_handle)\n\
window."+action+"()\n"
return code
class Pwa_menu(SWAPYObject):
def _check_visibility(self):
is_visible = False
try:
is_visible = self.pwa_obj.ctrl.IsVisible()
except:
pass
return is_visible
def _check_actionable(self):
try:
self.pwa_obj.ctrl.VerifyActionable()
except:
is_actionable = False
else:
is_actionable = True
return is_actionable
def _check_existence(self):
try:
self.pwa_obj.ctrl.handle
except:
is_exist = False
else:
is_exist = True
return is_exist
def _get_additional_children(self):
'''
Add submenu object as children
'''
#print(dir(self.pwa_obj))
#print(self.pwa_obj.is_main_menu)
#print(self.pwa_obj.owner_item)
self.subitems_sort_key = lambda obj: obj[1].pwa_obj.Index() #sorts items by indexes
additional_children = []
menu_items = self.pwa_obj.Items()
for menu_item in menu_items:
item_text = menu_item.Text()
if item_text == '':
if menu_item.Type() == 2048:
item_text = '-----Separator-----'
else:
item_text = 'Index: %d' % menu_item.Index()
menu_item_child = [(item_text, self._get_swapy_object(menu_item))]
additional_children += menu_item_child
return additional_children
def _get_children(self):
'''
Return original pywinauto's object children
'''
return []
def Highlight_control(self):
pass
return 0
class Pwa_menu_item(Pwa_menu):
def _check_actionable(self):
if self.pwa_obj.State() == 3: #grayed
is_actionable = False
else:
is_actionable = True
return is_actionable
def _get_additional_children(self):
'''
Add submenu object as children
'''
#print(dir(self.pwa_obj))
#print(self.pwa_obj.menu)
#print self.get_menuitems_path()
additional_children = []
submenu = self.pwa_obj.SubMenu()
if submenu:
submenu_child = [(self.pwa_obj.Text()+' submenu', self._get_swapy_object(submenu))]
additional_children += submenu_child
return additional_children
def get_menuitems_path(self):
'''
Compose menuitems_path for GetMenuPath. Example "#0 -> Save As", "Tools -> #0 -> Configure"
'''
path = []
owner_item = self.pwa_obj
while owner_item:
text = owner_item.Text()
if not text:
text = '#%d' % owner_item.Index()
path.append(text)
menu = owner_item.menu
owner_item = menu.owner_item
return '->'.join(path[::-1])
def Get_code(self, action_id):
'''
Generate code for pywinauto module
'''
action = ACTIONS[action_id]
code = "\
window.MenuItem(u'"+self.get_menuitems_path().encode('unicode-escape', 'replace')+"')."+action+"()\n\
"
return code
class Pwa_combobox(SWAPYObject):
def _get_additional_children(self):
'''
Add ComboBox items as children
'''
additional_children = []
items_texts = self.pwa_obj.ItemTexts()
for item_name in items_texts:
additional_children += [(item_name, virtual_combobox_item(self, item_name))]
return additional_children
class virtual_combobox_item(VirtualSWAPYObject):
def _get_properies(self):
index = None
text = self.index
for i, name in enumerate(self.parent.pwa_obj.ItemTexts()):
if name == text:
index = i
break
return {'Index' : index, 'Text' : text.encode('unicode-escape', 'replace')}
class Pwa_listview(SWAPYObject):
def _get_additional_children(self):
'''
Add SysListView32 items as children
'''
additional_children = []
for index in range(self.pwa_obj.ItemCount()):
item = self.pwa_obj.GetItem(index)
additional_children += [(item['text'], virtual_listview_item(self, index))]
return additional_children
class virtual_listview_item(VirtualSWAPYObject):
def _get_properies(self):
item_properties = {'Index' : self.index}
for index, item_props in enumerate(self.parent.pwa_obj.Items()):
if index == self.index:
item_properties.update(item_props)
break
return item_properties
class Pwa_tab(SWAPYObject):
def _get_additional_children(self):
'''
Add TabControl items as children
'''
additional_children = []
for index in range(self.pwa_obj.TabCount()):
text = self.pwa_obj.GetTabText(index)
additional_children += [(text, virtual_tab_item(self, index))]
return additional_children
class virtual_tab_item(VirtualSWAPYObject):
def _get_properies(self):
item_properties = {'Index' : self.index}
return item_properties
class Pwa_toolbar(SWAPYObject):
def _get_additional_children(self):
'''
Add button objects as children
'''
additional_children = []
buttons_count = self.pwa_obj.ButtonCount()
for button_index in range(buttons_count):
try:
button_text = self.pwa_obj.Button(button_index).info.text
button_object = self._get_swapy_object(self.pwa_obj.Button(button_index))
except exceptions.RuntimeError:
#button_text = ['Unknown button name1!'] #workaround for RuntimeError: GetButtonInfo failed for button with index 0
pass #ignore the button
else:
button_item = [(button_text, button_object)]
additional_children += button_item
return additional_children
def _get_children(self):
'''
Return original pywinauto's object children
'''
return []
class Pwa_toolbar_button(SWAPYObject):
def _check_visibility(self):
is_visible = False
try:
is_visible = self.pwa_obj.toolbar_ctrl.IsVisible()
except:
pass
return is_visible
def _check_actionable(self):
try:
self.pwa_obj.toolbar_ctrl.VerifyActionable()
except:
is_actionable = False
else:
is_actionable = True
return is_actionable
def _check_existence(self):
try:
handle_ = self.pwa_obj.toolbar_ctrl.handle
obj = pywinauto.application.WindowSpecification({'handle': handle_})
except:
is_exist = False
else:
is_exist = obj.Exists()
return is_exist
def _get_children(self):
return []
def _get_properies(self):
o = self.pwa_obj
props = {'IsCheckable' : o.IsCheckable(),
'IsChecked' : o.IsChecked(),
'IsEnabled': o.IsEnabled(),
'IsPressable' : o.IsPressable(),
'IsPressed' : o.IsPressed(),
'Rectangle' : o.Rectangle(),
'State' : o.State(),
'Style' : o.Style(),
'index' : o.index,}
return props
def Highlight_control(self):
pass
return 0
def Get_code(self, action_id):
'''
Generate code for pywinauto module
'''
action = ACTIONS[action_id]
arg = str(self.pwa_obj.index)
code = "\
ctrl.Button("+arg+")."+action+"()\n"
return code
class Pwa_tree(SWAPYObject):
def _get_additional_children(self):
'''
Add roots object as children
'''
additional_children = []
roots = self.pwa_obj.Roots()
for root in roots:
root_text = root.Text()
obj = self._get_swapy_object(root)
obj.path = [root_text]
root_item = [(root_text, obj)]
additional_children += root_item
return additional_children
def Highlight_control(self):
pass
return 0
class Pwa_tree_item(SWAPYObject):
def _get_properies(self):
o = self.pwa_obj
props = {'Rectangle' : o.Rectangle(),
'State' : o.State(),
'Text' : o.Text(),}
return props
def _check_visibility(self):
return True
def _check_existence(self):
return True
def _check_actionable(self):
return True
def _get_children(self):
return []
def Highlight_control(self):
pass
return 0
def _get_additional_children(self):
'''
Add sub tree items object as children
'''
additional_children = []
sub_items = self.pwa_obj.Children()
for item in sub_items:
item_text = item.Text()
obj = self._get_swapy_object(item)
obj.path = self.path + [item_text]
sub_item = [(item_text, obj)]
additional_children += sub_item
return additional_children
def Get_code(self, action_id):
'''
Generate code for pywinauto module
'''
action = ACTIONS[action_id]
code = "\
ctrl.GetItem("+str(self.path)+")."+action+"()\n"
return code | moden-py/SWAPY-deleting | proxy.py | Python | lgpl-2.1 | 26,563 |
#!/usr/bin/python
# Author: Ben Wing <ben@666.com>
# Date: April 2006
#############################################################################
# #
# ccg_editor.ply #
# #
# Edit a CCG-format file, graphically. Will have a mode for displaying #
# CCG files in a friendly fashion and allowing for editing of parts or #
# all of the file. Will also have a mode for testing a CCG grammar, and #
# allow for compilation and error-finding under control of the editor. #
# #
#############################################################################
# This code is based on PyEdit version 1.1, from Oreilly's Programming
# Python, 2nd Edition, 2001, by Mark Lutz.
from Tkinter import * # base widgets, constants
from tkFileDialog import * # standard dialogs
from tkMessageBox import *
from tkSimpleDialog import *
from tkColorChooser import askcolor
from string import split, atoi
import sys, os, string, md5
import ccg2xml
import Tree
import re
START = '1.0' # index of first char: row=1,col=0
SEL_FIRST = SEL + '.first' # map sel tag to index
SEL_LAST = SEL + '.last' # same as 'sel.last'
FontScale = 0 # use bigger font on linux
if sys.platform[:3] != 'win': # and other non-windows boxes
FontScale = 3
# Initial top-level window; it's not clear we need this.
# FIXME: It sucks that we have to call Tk() to get the first top-level window
# but Toplevel() for all others. We should be able to call Tk() initially,
# and then Toplevel() to create all top-level windows, including the first.
root = None
# List of all open CFile objects
openfiles = {}
filenames = []
class CTab(Frame):
# Initialize this tab. Usually called from a subclass. PARENT is
# the parent widget, CFILE the CFile object associated with the
# top-level window, and TABNAME is the name of this tab (that tab
# will be removed from the toolbar).
def __init__(self, parent, cfile, tabname):
Frame.__init__(self, parent)
self.parent = parent
self.cfile = cfile
self.toolbar = None
self.checkbar = None
self.menubar = [
('File', 0,
[('Open...', 0, self.cfile.onOpen),
('New', 0, self.cfile.onNew),
('Save', 0, self.onSave),
('Save As...', 5, self.onSaveAs),
('Close', 0, self.cfile.onClose),
'separator',
('Quit VisCCG', 0, self.cfile.onQuit)]
),
('Tools', 0,
[('Font List', 0, self.cfile.onFontList),
('Pick Bg...', 4, self.cfile.onPickBg),
('Pick Fg...', 0, self.cfile.onPickFg),
('Color List', 0, self.cfile.onColorList),
'separator',
('Info...', 0, self.cfile.onInfo)]
)
]
self.toolbar = [
# ('Display', self.cfile.onDisplay, {'side': LEFT}),
('Edit', self.cfile.onEdit, {'side': LEFT}),
('Lexicon', self.cfile.onLexicon, {'side': LEFT}),
('Testbed', self.cfile.onTestbed, {'side': LEFT}),
('Features', self.cfile.onFeatures, {'side': LEFT}),
('Words', self.cfile.onWords, {'side': LEFT}),
('Rules', self.cfile.onRules, {'side': LEFT}),
('Quit', self.cfile.onClose, {'side': RIGHT}),
('Help', self.cfile.help, {'side': RIGHT}),
('Save', self.onSave, {'side': RIGHT}),
]
# self.remove_toolbar_button(tabname)
# Add MENU (a tuple corresponding to a single top-level menu item)
# after the item with the name AFTER.
def add_menu(self, after, menu):
newmenu = []
for x in self.menubar:
newmenu += [x]
if x[0] == after:
newmenu += [menu]
self.menubar = newmenu
# Remove the toolbar button named NAME.
def remove_toolbar_button(self, name):
newtoolbar = []
for x in self.toolbar:
if x[0] != name:
newtoolbar += [x]
self.toolbar = newtoolbar
def reinit(self):
pass
#####################
# File menu commands
#####################
def onSave(self):
self.onSaveAs(self.cfile.currfile) # may be None
def onSaveAs(self, forcefile=None):
file = forcefile or self.cfile.my_asksaveasfilename()
if file:
text = self.cfile.getAllText()
try:
open(file, 'w').write(text)
except:
showerror('CCG Editor', 'Could not write file ' + file)
else:
self.cfile.setFileName(file) # may be newly created
self.cfile.edit_modified(NO)
self.cfile.last_save_signature = self.cfile.getSignature(text)
class CEdit(CTab):
def __init__(self, parent, cfile):
CTab.__init__(self, parent, cfile, 'Edit')
self.debugFrame= None
# Add a frame here, so that debug mode can be enabled
# by embedding other objects within this frame
editFrame = Frame(self, bd=1, bg= 'white')
editFrame.pack(fill=BOTH, expand=YES, side=TOP)
# Add a button frame, embed the button and
# link to command for the debug mode
btnFrame = Frame(editFrame, bd = 1)
btnFrame.grid (row=0, columnspan=3, sticky=NSEW)
vldButton = Button (btnFrame, text='Validate', command = lambda: self.onValidate(editFrame, cfile))
vldButton.pack(side=RIGHT)
# Put the main edit window in the row below this
vbar = Scrollbar(editFrame)
hbar = Scrollbar(editFrame, orient='horizontal')
self.text = Text(editFrame, padx=5, wrap='none', undo=YES)
vbar.grid(row=1, column=2, sticky=NS)
hbar.grid(row=2, columnspan=2, sticky=EW) # pack text last
self.text.grid(row=1, column=1, sticky=NSEW) # else sbars clipped
editFrame.columnconfigure(1, weight=1)
editFrame.rowconfigure(1, weight=1)
# Add a list containing line numbers
self.lineList = Text(editFrame, relief=SUNKEN, bg='white', bd=2, yscrollcommand = vbar.set, width=3)
self.lineList.grid(row=1, column=0, sticky=NS)
self.lineList.config(font=self.cfile.fonts[0],
bg=self.cfile.colors[0]['bg'], fg=self.cfile.colors[0]['fg'])
# TODO: The first time the display of the line numbers
# strangely doesn't go through --- somehow cfile
# isn't initialized. However, it works properly in the display.
# Need to understand why this happens.
try:
self.showLineNums()
except KeyError:
self.text.config(yscrollcommand=vbar.set) # call vbar.set on text move
self.text.config(xscrollcommand=hbar.set)
#vbar.config(command=text.yview) # call text.yview on scroll move
hbar.config(command=self.text.xview) # or hbar['command']=text.xview
self.text.config(font=self.cfile.fonts[0],
bg=self.cfile.colors[0]['bg'], fg=self.cfile.colors[0]['fg'])
#Setting the movement of the listbox and the text
#together to be controlled by the scrollbar
vbar.config(command=self.scrollSet)
self.add_menu('File',
('Edit', 0,
[('Cut', 0, self.onCut),
('Copy', 1, self.onCopy),
('Paste', 0, self.onPaste),
'separator',
('Delete', 0, self.onDelete),
('Select All', 0, self.onSelectAll)]
))
self.add_menu('Edit',
('Search', 0,
[('Goto...', 0, self.cfile.onGoto),
('Find...', 0, self.cfile.onFind),
('Refind', 0, self.cfile.onRefind),
('Change...', 0, self.onChange)]
))
def scrollSet(self, *args):
self.lineList.yview(*args)
self.text.yview(*args)
def reinit(self):
self.showLineNums()
self.text.focus()
def showLineNums(self):
#Make the list of lines editable
self.lineList.config(state=NORMAL)
textData = self.cfile.getAllText()
listOfLines = textData.split('\n')
for num in range(1,len(listOfLines)):
self.lineList.insert(END,"%s\n" % num)
#Now that we are done changing the number of lines,
#we reset the text to be uneditable
self.lineList.config(state=NORMAL)
def onValidate(self, editFrame, cfile):
#showwarning(title= 'Sorry', message='Validate and debug feature coming soon!')
# Destroy previous display of debug or error messages
# if present
if self.debugFrame:
self.debugFrame.grid_forget()
# Compile if file signature has changed
cfile.compile_if_needed()
# Now, call the error debug routine if errors are found
if (ccg2xml.error_count > 0):
self.debugError(editFrame, cfile)
else:
showinfo(title='VisCCG: Success', message='No validation errors!')
def debugError(self, editFrame, cfile):
self.debugFrame = Frame(editFrame, bg='white', bd=2)
self.debugFrame.grid(row=3, columnspan=2, sticky=NSEW)
# Create Listbox and scrollbars
sbar = Scrollbar(self.debugFrame)
list = Listbox(self.debugFrame, relief=SUNKEN, bg='white', bd=2, yscrollcommand = sbar.set)
sbar.config(command=list.yview)
list.pack(fill=BOTH, side=LEFT, expand=YES)
sbar.pack(fill=Y, side=RIGHT)
# Display each message in the log
for mesg in ccg2xml.message_log:
type = mesg[0]
lineno = mesg[1]
errwarn = mesg[2]
if lineno:
dispError = type+' at Line '+str(lineno)+': '+errwarn
else:
dispError = type+': '+errwarn
list.insert(END, dispError)
#####################
# Edit menu commands
#####################
def onCopy(self): # get text selected by mouse,etc
if not self.text.tag_ranges(SEL): # save in cross-app clipboard
showerror('CCG Editor', 'No text selected')
else:
text = self.text.get(SEL_FIRST, SEL_LAST)
self.clipboard_clear()
self.clipboard_append(text)
def onDelete(self): # delete selected text, no save
if not self.text.tag_ranges(SEL):
showerror('CCG Editor', 'No text selected')
else:
self.text.delete(SEL_FIRST, SEL_LAST)
def onCut(self):
if not self.text.tag_ranges(SEL):
showerror('CCG Editor', 'No text selected')
else:
self.onCopy() # save and delete selected text
self.onDelete()
def onPaste(self):
try:
text = self.selection_get(selection='CLIPBOARD')
except TclError:
showerror('CCG Editor', 'Nothing to paste')
return
self.text.insert(INSERT, text) # add at current insert cursor
self.text.tag_remove(SEL, '1.0', END)
self.text.tag_add(SEL, INSERT+'-%dc' % len(text), INSERT)
self.text.see(INSERT) # select it, so it can be cut
def onSelectAll(self):
self.text.tag_add(SEL, '1.0', END+'-1c') # select entire text
self.text.mark_set(INSERT, '1.0') # move insert point to top
self.text.see(INSERT) # scroll to top
#######################
# Search menu commands
#######################
def onChange(self):
new = Toplevel(self)
Label(new, text='Find text:').grid(row=0, column=0)
Label(new, text='Change to:').grid(row=1, column=0)
self.change1 = Entry(new)
self.change2 = Entry(new)
self.change1.grid(row=0, column=1, sticky=EW)
self.change2.grid(row=1, column=1, sticky=EW)
Button(new, text='Find',
command=self.onDoFind).grid(row=0, column=2, sticky=EW)
Button(new, text='Apply',
command=self.onDoChange).grid(row=1, column=2, sticky=EW)
new.columnconfigure(1, weight=1) # expandable entrys
def onDoFind(self):
self.onFind(self.change1.get()) # Find in change box
def onDoChange(self):
if self.text.tag_ranges(SEL): # must find first
self.text.delete(SEL_FIRST, SEL_LAST) # Apply in change
self.text.insert(INSERT, self.change2.get()) # deletes if empty
self.text.see(INSERT)
self.onFind(self.change1.get()) # goto next appear
self.text.update() # force refresh
####################################
# Others, useful outside this class
####################################
def isEmpty(self):
return not self.getAllText()
def getAllText(self):
return self.text.get('1.0', END+'-1c') # extract text as a string
def setAllText(self, text):
self.text.delete('1.0', END) # store text string in widget
self.text.insert(END, text) # or '1.0'
self.text.mark_set(INSERT, '1.0') # move insert point to top
self.text.see(INSERT) # scroll to top, insert set
self.cfile.edit_modified(NO)
def clearAllText(self):
self.text.delete('1.0', END) # clear text in widget
class CWords(CTab):
def __init__(self, parent, cfile):
CTab.__init__(self, parent, cfile, 'Words')
self.child=None
self.wordList = None
self.cfile = cfile
# Called when we switch to this mode using the toolbar at top.
def reinit(self):
if self.child:
self.child.pack_forget()
self.child = Frame(self, background='white')
self.child.pack(expand=YES, fill=BOTH)
scrollbar = Scrollbar(self.child, orient=VERTICAL)
self.wordList = Listbox(self.child, yscrollcommand=scrollbar.set)
self.wordList.grid(row=0, column=0, sticky=N+S+E+W)
scrollbar.config(command= self.wordList.yview)
scrollbar.grid(row=0, column=1, sticky=N+S)
self.child.grid_rowconfigure(0, weight=1)
self.child.grid_columnconfigure(0, weight=1)
#If the data hasn't been compiled yet, then do so
try:
dummy = ccg2xml.morph_xml
except:
self.cfile.compile_if_needed()
#Adding dummy code for all words
for x in ccg2xml.morph_xml:
assert x[0] == 'entry'
self.wordList.insert (END, ccg2xml.getprop('word', x[1]))
#print ccg2xml.getprop('word', x[1])
class CLexicon(CTab):
class lexicon_vars(object):
def __init__(self):
self.show_feat_id = IntVar()
self.show_feat_id.set(1)
self.show_feat_struct = IntVar()
self.show_feat_struct.set(1)
self.show_full_features = IntVar()
self.show_full_features.set(0)
self.show_semantics = IntVar()
self.show_semantics.set(1)
def __init__(self, parent, cfile):
CTab.__init__(self, parent, cfile, 'Lexicon')
self.child = None
self.cnv = None
self.mainFrame = None
self.vars = self.lexicon_vars()
# FIXME? It's a bit awkward that ccg.ply has references to the
# variables below scattered throughout it. But I'm not sure what
# a better solution would be.
self.checkbar = [
("Show feature ID's", self.vars.show_feat_id),
("Show features", self.vars.show_feat_struct),
('Full-form features', self.vars.show_full_features),
('Show semantics', self.vars.show_semantics),
]
# Called when we switch to this mode using the toolbar at top.
def reinit(self):
self.redraw()
def redraw(self):
self.cfile.compile_if_needed()
if self.child:
self.child.pack_forget()
if self.mainFrame:
self.mainFrame.pack_forget()
self.mainFrame = Frame(self, bd=1, bg='white')
self.mainFrame.pack_propagate(0)
self.mainFrame.pack(expand=YES, fill=BOTH)
self.mainFrame.grid_rowconfigure(0, weight=1)
self.mainFrame.grid_columnconfigure(0, weight=1)
xscrollbar = Scrollbar(self.mainFrame, orient=HORIZONTAL)
xscrollbar.grid(row=1, column=0, sticky=E+W)
yscrollbar = Scrollbar(self.mainFrame)
yscrollbar.grid(row=0, column=1, sticky=N+S)
self.cnv = Canvas(self.mainFrame, bd=2, xscrollcommand=xscrollbar.set,
yscrollcommand=yscrollbar.set, width = 847, height=369)
xscrollbar.config(command= self.cnv.xview)
yscrollbar.config(command= self.cnv.yview)
self.child = Frame(self.cnv, bd=2, relief=SUNKEN, background='white')
self.cnv.create_window(0, 0, anchor='nw', window=self.child)
ccg2xml.draw_parse(self.cfile.curparse.parse, self.cfile, self.child, self.vars, self.cnv, self.mainFrame)
self.child.update_idletasks()
self.cnv.config(scrollregion=self.cnv.bbox("all"))
self.cnv.grid(row=0, column=0, sticky='NSEW')
class CRules(CTab):
def __init__(self, parent, cfile):
CTab.__init__(self, parent, cfile, 'Rules')
class CFeatures(CTab):
def __init__(self, parent, cfile):
CTab.__init__(self, parent, cfile, 'Features')
self.child=None
self.checkbar=None
self.edit=None
self.text=None
# Called when we switch to this mode using the toolbar at top.
def reinit(self):
if self.child:
self.child.pack_forget()
self.child = Frame(self, background='white', width = 847, height = 369)
self.child.pack(expand=YES, fill=BOTH)
butframe = Frame(self.child, cursor='hand2',
relief=SUNKEN, bd=2)
butframe.pack(fill=X)
but1 = Button(butframe, text='Expand All', command=self.expand_all)
but1.pack(side=LEFT)
but2 = Button(butframe, text='Contract All', command=self.contract_all)
but2.pack(side=LEFT)
# Force editing in the same frame: but a lower view:
# pass self.child as the parent frame
self.edit = Button(butframe, text='Edit', command= lambda:self.edit_tree(self.child))
self.edit.pack(side=RIGHT)
featframe = Frame(self.child, bd=2, relief=SUNKEN,
background='white')
featframe.pack(expand=YES, fill=BOTH)
self.cfile.compile_if_needed()
# Build the tree
self.tree={}
self.root_name = re.sub(r'^(.*)\.(.*)$', r'\1', self.cfile.file)
self.tree[self.root_name]=[]
for feat in self.cfile.curparse.feature_to_values:
self.tree[self.root_name] += [str(feat)]
for feat in self.cfile.curparse.feature_to_values:
self.tree[feat] = []
for x in self.cfile.curparse.feature_to_values[feat]:
if x.name not in self.tree:
self.tree[x.name] = []
for x in self.cfile.curparse.feature_to_values[feat]:
if x.parents:
par = x.parents[0]
self.tree[par.name] += [x.name]
else:
self.tree[feat] += [x.name]
# Define the images for opened and closed categories
shut_icon=PhotoImage(data='R0lGODlhCQAQAJH/AMDAwAAAAGnD/wAAACH5BAEAAAAALAAA'
'AAAJABAAQAIdhI8hu2EqXIroyQrb\nyRf0VG0UxnSZ5jFjulrhaxQ'
'AO6olVwAAOw==')
open_icon=PhotoImage(data='R0lGODlhEAAJAJH/AMDAwAAAAGnD/wAAACH5BAEAAAAALAAA'
'AAAQAAkAQAIahI+pyyEPg3KwPrko\nTqH7/yGUJWxcZTapUQAAO8b'
'yUgAAOw==')
# Create the tree
self.t=Tree.Tree(master=featframe,
root_id='',
root_label=self.root_name,
collapsed_icon=shut_icon,
expanded_icon=open_icon,
get_contents_callback=self.get_treedata,
line_flag=False)
self.t.grid(row=0, column=0, sticky = 'nsew')
featframe.grid_rowconfigure(0, weight=1)
featframe.grid_columnconfigure(0, weight=1)
sb=Scrollbar(featframe)
sb.grid(row=0, column=1, sticky='ns')
self.t.configure(yscrollcommand=sb.set)
sb.configure(command=self.t.yview)
sb=Scrollbar(featframe, orient=HORIZONTAL)
sb.grid(row=1, column=0, sticky='ew')
self.t.configure(xscrollcommand=sb.set)
sb.configure(command=self.t.xview)
# Expand the whole tree out
self.expand_tree(self.t.root)
# Returns the nodes rooted at the node passed and adds them to the tree
def get_treedata(self,node):
lbl = str(node.get_label())
children = self.tree[lbl]
for x in children:
if self.tree[x]:
expands=1
else:
expands=0
self.t.add_node(name=x,flag=expands)
# Expand the tree rooted at node recursively
def expand_tree(self, node):
node.expand()
for child in node.children():
if child.expandable():
self.expand_tree(child)
def expand_all(self):
self.expand_tree(self.t.root)
def contract_all(self):
self.t.root.collapse()
def edit_tree(self, parent):
editFrame = Frame(parent, bd=1, background='white')
self.text = Text(editFrame, padx=5, wrap=None, undo = YES, background='white')
vbar = Scrollbar(editFrame)
hbar = Scrollbar(editFrame, orient='horizontal')
self.text.config(yscrollcommand=vbar.set) # call vbar.set on text move
self.text.config(xscrollcommand=hbar.set)
vbar.config(command=self.text.yview) # call text.yview on scroll move
hbar.config(command=self.text.xview) # or hbar['command']=text.xview
# Change the text on the button, and also pass the rest
# of the arguments so that the grid for the statements can be reset
self.edit.config(text='Done', command= lambda:self.save_tree(parent))
# Changing the mode of the cfile object here,
# so that once the user clicks done,
# the whole object is recompiled and redisplayed
self.cfile.mode= 'Edit'
vbar.pack(side=RIGHT, fill=Y)
hbar.pack(side=BOTTOM, fill=X)
self.text.pack(fill= BOTH, expand= YES)
# Set a mark at the beginning of the text
self.text.mark_set("START", INSERT)
self.text.mark_gravity("START", LEFT)
# Push in the rest of the file's contents
fileData = self.cfile.getAllText()
self.text.insert(INSERT, fileData)
# Move the insert position to the first occurence of the family name
# FIXME: this is poor implementation
# The positioning of the insert cursor should be happening by parsing the
# CFG production rules, using CSFamily.prod.lineno and endlineno
self.text.config(takefocus=True)
idx= self.text.search('feature', "START")
if idx:
self.text.mark_set(CURRENT, idx)
self.text.see(CURRENT)
else:
showwarning('Warning','Features not located in text')
editFrame.pack(expand=YES, fill=BOTH)
def save_tree(self, parent):
# We force the text contents of the cfile object to copy over
# all that is presently in the current text-box
self.cfile.setAllText(self.text.get(1.0,END))
self.edit.config(text='Edit', command= lambda:self.edit_tree(parent))
# Recompile whatever was edited and redisplay
# Note: changes are not saved hereby!!
self.cfile.compile_if_needed()
self.cfile.onFeatures()
class CTestbed(CTab):
def __init__(self, parent, cfile):
CTab.__init__(self, parent, cfile, 'Testbed')
self.child = None
self.edit = None
self.text = None
self.editFrame = None
self.cnv = None
self.mainFrame = None
self.newInsert = None
def makelab(self, text, row, col, **props):
lab = Label(self.child, text=text, background='white', **props)
# Make the label grow to fill all space allocated for the column
lab.grid(row=row, column=col, sticky='NSEW')
# Called when we switch to this mode using the toolbar at top.
def reinit(self):
if self.child:
self.child.pack_forget()
if self.mainFrame:
self.mainFrame.pack_forget()
self.mainFrame = Frame(self, bd=1, bg='white')
self.mainFrame.pack(expand=YES, fill=BOTH)
self.mainFrame.grid_rowconfigure(0, weight=1)
self.mainFrame.grid_columnconfigure(0, weight=1)
xscrollbar = Scrollbar(self.mainFrame, orient=HORIZONTAL)
xscrollbar.grid(row=1, column=0, sticky=E+W)
yscrollbar = Scrollbar(self.mainFrame)
yscrollbar.grid(row=0, column=1, sticky=N+S)
self.cnv= Canvas(self.mainFrame, bd=2, xscrollcommand=xscrollbar.set,
yscrollcommand=yscrollbar.set, width = 847, height=369)
xscrollbar.config(command=self.cnv.xview)
yscrollbar.config(command=self.cnv.yview)
self.child = Frame(self.cnv, bd=2, relief=SUNKEN, background='white')
self.child.rowconfigure(1, weight=1)
self.child.columnconfigure(1, weight=1)
self.child.pack(expand=YES, fill=BOTH)
butnFrame = Frame(self.child, relief=SUNKEN, bd=2)
butnFrame.grid(row=0, sticky='NSEW', columnspan=2)
self.edit = Button(butnFrame, text='Edit', command= self.edit_testbed)
self.edit.pack(side=RIGHT)
self.newInsert = Button(butnFrame, text='New Sentence', command= self.new_sentence)
self.newInsert.pack(side=RIGHT)
self.cfile.compile_if_needed()
self.makelab("Num Parses", 1, 0, bd=1, relief=SUNKEN, fg="#77AA77", font = ("Helvetica", FontScale +12))
self.makelab("Sentence", 1, 1, bd=1, relief=SUNKEN, fg="#77AA77", font = ("Helvetica", FontScale +12))
# Make the column containing the sentences grow to include all
# extra space
self.child.columnconfigure(1, weight=1)
for i in xrange(len(self.cfile.curparse.testbed_statements)):
x = self.cfile.curparse.testbed_statements[i]
assert x[0] == 'item'
x = x[1]
# Left-justify the text
numparse = ccg2xml.getprop('numOfParses', x)
string = ccg2xml.getprop('string', x)
# How many parses of the sentence are produced?
self.makelab('%s' % numparse, i+2, 0)
# Print the sentence itself
self.makelab('%s%s' % (numparse == 0 and '*' or '', string),
i+2, 1, anchor=W)
self.cnv.create_window(0, 0, anchor='nw', window=self.child)
self.child.update_idletasks()
#self.child.grid(row=0, column=0, sticky=NSEW)
self.cnv.config(scrollregion=self.cnv.bbox("all"))
self.cnv.grid(row=0, column=0, sticky='NSEW')
# Edit the testbed
def edit_testbed(self):
self.editFrame = Frame(self.mainFrame, bd=1, background='white')
#self.editFrame.grid(row=len(self.cfile.curparse.testbed_statements)+3, columnspan=2, sticky='NSEW')
self.editFrame.grid(row=2, columnspan=2, sticky='NSEW')
self.text = Text(self.editFrame, padx=5, wrap=None, undo = YES, background='white')
vbar = Scrollbar(self.editFrame)
hbar = Scrollbar(self.editFrame, orient='horizontal')
self.text.config(yscrollcommand=vbar.set) # call vbar.set on text move
self.text.config(xscrollcommand=hbar.set)
vbar.config(command=self.text.yview) # call text.yview on scroll move
hbar.config(command=self.text.xview) # or hbar['command']=text.xview
# Change the text on the button, and also pass the rest
# of the arguments so that the grid for the statements can be reset
self.edit.config(text='Done', command= self.save_testbed)
# Changing the mode of the cfile object here,
# so that once the user clicks done,
# the whole object is recompiled and redisplayed
self.cfile.mode= 'Edit'
vbar.pack(side=RIGHT, fill=Y)
hbar.pack(side=BOTTOM, fill=X)
self.text.pack(fill= BOTH, expand= YES)
# Set a mark at the beginning of the text
self.text.mark_set("START", INSERT)
self.text.mark_gravity("START", LEFT)
# Push in the rest of the file's contents
fileData = self.cfile.getAllText()
self.text.insert(INSERT, fileData)
# Move the insert position to the first occurence of the family name
# FIXME: this is poor implementation
# The positioning of the insert cursor should be happening by parsing the
# CFG production rules, using CSFamily.prod.lineno and endlineno
self.text.config(takefocus=True)
idx= self.text.search('testbed', "START")
if idx:
self.text.mark_set(CURRENT, idx)
self.text.see(CURRENT)
else:
showwarning(title= 'VisCCG: Warning', message='No initial testbed found')
#self.editFrame.pack(expand=YES, fill=BOTH)
self.child.update_idletasks()
self.cnv.config(scrollregion=self.cnv.bbox("all"))
# Save the edited text
def save_testbed(self):
# We force the text contents of the cfile object to copy over
# all that is presently in the current text-box
self.cfile.setAllText(self.text.get(1.0,END))
self.edit.config(text='Edit', command= self.edit_testbed)
self.editFrame.pack_forget()
# Recompile whatever was edited and redisplay
# Note: changes are not saved hereby!!
self.cfile.compile_if_needed()
self.cfile.onTestbed()
# Enter a new sentence
def new_sentence(self):
master = Tk()
master.title('VisCCG: New Sentence for the testbed')
sent = Entry(master, bg='#FFFFFF', width = 100)
nParses = Entry(master, bg='#FFFFFF', width = 2)
sLabel = Label (master, text = 'Sentence:')
nLabel = Label (master, text = 'Number of parses:')
sent.focus_set()
b = Button(master, text="Add sentence", width=10, command= lambda:self.editNew(master, sent, nParses))
c = Button(master, text="Cancel", command= master.destroy)
sent.grid (row=1, column=0, sticky = W)
nParses.grid (row=1, column=1, sticky= W)
sLabel.grid (row=0, column=0, sticky=W)
nLabel.grid (row=0, column=1, sticky = W)
b.grid (row=2, column = 0)
c.grid (row=2, column = 1)
# Print from the new sentence
def editNew(self, master, sent, nParses):
# Prepare the file's contents for editing
fileData = self.cfile.getAllText()
self.text = Text(master)
self.text.mark_set("START", INSERT)
self.text.mark_gravity("START", LEFT)
self.text.insert(INSERT, fileData)
testSent = sent.get()
npSent = nParses.get()
self.text.config(takefocus=True)
idx= self.text.search('testbed', "START")
if idx:
self.text.mark_set("START", idx)
idx = self.text.search('{', "START", forwards = True)
self.text.mark_set("START", idx)
idx = self.text.search('\n', "START", forwards = True)
# FIXME: really poor search for locating the right position
# to insert text here. Needs correction!
self.text.mark_set(INSERT, idx)
self.text.mark_gravity(INSERT, RIGHT)
self.text.insert (INSERT, '\n\t'+ testSent+ ':\t'+ npSent+ ';')
else:
showwarning(title= 'VisCCG: Warning', message='No initial testbed found, creating new')
self.text.mark_set(INSERT, END)
self.text.mark_gravity(INSERT, RIGHT)
self.text.insert (INSERT, ' testbed {\n')
self.text.insert (INSERT, '\n\t'+ testSent+ ':\t'+ npSent+ ';')
self.text.insert (INSERT, '}\n')
# Set the original file's data to be this
fileData= self.text.get(1.0, END)
self.cfile.setAllText(fileData)
# Destroy the entry window
master.destroy()
# Update the display
self.cfile.mode= 'Edit'
self.cfile.compile_if_needed()
self.cfile.onTestbed()
# Creates the top-level window and populates the widgets below it.
class CFile(object):
#### NOTE NOTE NOTE! Variables declared like this, in the class itself,
#### are class variables (not instance variables) until they are
#### assigned to. If you want pure instance variables, you need to
#### initialize them inside of __init__().
# Hash table describing modes and the associated class
modelist = {'Edit':CEdit, 'Lexicon':CLexicon, 'Features':CFeatures,
'Words':CWords, 'Testbed':CTestbed, 'Rules':CRules}
startfiledir = '.'
ftypes = [('All files', '*'), # for file open dialog
('Text files', '.txt'), # customize in subclass
('Python files', '.py')] # or set in each instance
colors = [{'fg':'black', 'bg':'white'}, # color pick list
{'fg':'yellow', 'bg':'black'}, # first item is default
{'fg':'white', 'bg':'blue'}, # tailor me as desired
{'fg':'black', 'bg':'beige'}, # or do PickBg/Fg chooser
{'fg':'yellow', 'bg':'purple'},
{'fg':'black', 'bg':'brown'},
{'fg':'lightgreen', 'bg':'darkgreen'},
{'fg':'darkblue', 'bg':'orange'},
{'fg':'orange', 'bg':'darkblue'}]
fonts = [('courier', 9+FontScale, 'normal'), # platform-neutral fonts
('courier', 12+FontScale, 'normal'), # (family, size, style)
('courier', 10+FontScale, 'bold'), # or popup a listbox
('courier', 10+FontScale, 'italic'), # make bigger on linux
('times', 10+FontScale, 'normal'),
('helvetica', 10+FontScale, 'normal'),
('ariel', 10+FontScale, 'normal'),
('system', 10+FontScale, 'normal'),
('courier', 20+FontScale, 'normal')]
def __init__(self, file=None):
self.file = file
self.openDialog = None
self.saveDialog = None
self.lastfind = None
self.current_parse = None
self.mode = None
self.last_save_signature = None
self.last_compile_signature = None
# First top-level window is Tk(); rest are Toplevel()
global root
if not root:
root = Tk()
self.top = root
else:
self.top = Toplevel(root)
ccg2xml.late_init_graphics()
openfiles[self] = True
self.top.protocol('WM_DELETE_WINDOW', self.onClose)
# We create an outer frame to hold the toolbar and the main widget.
# Create all the different kinds of main widget.
# FIXME: Maybe outer isn't necessary?
self.outer = Frame(self.top)
self.outer.pack(expand=YES, fill=BOTH) # make frame stretchable
self.modes = {}
for mode in self.modelist:
self.modes[mode] = self.modelist[mode](self.outer, self)
self.main = None
self.toolbar_widget = None
self.checkbar_widget = None
#self.switch_to('Edit')
self.setFileName(None)
if file:
self.onFirstOpen(file)
else:
# When the user has just opened a new file
# Need to load template from the src folder
openccg_home = os.environ['OPENCCG_HOME']
template = open(openccg_home + '/src/ccg2xml/grammar_template.ccg', 'r').read()
self.setAllText(template)
# Save the MD5 signature for future comparison
self.last_save_signature = self.getSignature(self.getAllText())
self.switch_to('Edit')
def switch_to(self, mode):
# Switch to a different mode (display, edit, test). Remove the
# existing main and toolbar widgets, if existing. Redo the menubar
# and toolbar widgets according to the new mode and then display
# the new widgets.
#
# FIXME: We should probably create the menubar and toolbar widgets
# only once, and remember them.
if self.mode != mode:
if self.main:
self.main.pack_forget()
if self.toolbar_widget:
self.toolbar_widget.pack_forget()
if self.checkbar_widget:
self.checkbar_widget.pack_forget()
self.mode = mode
self.main = self.modes[mode]
self.makeMenubar()
self.makeToolbar(mode)
self.makeCheckbar()
#print "Reinit being called now... "
self.main.reinit()
# Pack the main widget after the toolbar, so it goes below it.
self.main.pack(side=TOP, expand=YES, fill=BOTH)
# Create the menubar; assumes that self.menubar has been set to the
# appropriate menubar description. Note that the menubar has to be a
# child of the top-level window itself rather than any child of it, so
# that it can be correctly displayed at the top of the window -- or
# possibly in its decoration (Windows) or at top of screen (Mac).
#
# From PP2E guimaker.py.
def makeMenubar(self):
menubar = Menu(self.top)
self.top.config(menu=menubar)
for (name, key, items) in self.main.menubar:
pulldown = Menu(menubar)
self.addMenuItems(pulldown, items)
menubar.add_cascade(label=name, underline=key, menu=pulldown)
if sys.platform[:3] == 'win':
menubar.add_command(label='Help', command=self.help)
else:
pulldown = Menu(menubar) # linux needs real pulldown
pulldown.add_command(label='About', command=self.help)
menubar.add_cascade(label='Help', menu=pulldown)
# Add items to a menu or submenu. From PP2E guimaker.py.
def addMenuItems(self, menu, items):
for item in items: # scan nested items list
if item == 'separator': # string: add separator
menu.add_separator({})
elif type(item) is list: # list: disabled item list
for num in item:
menu.entryconfig(num, state=DISABLED)
elif type(item[2]) is not list:
menu.add_command(label = item[0], # command:
underline = item[1], # add command
command = item[2]) # cmd=callable
else:
pullover = Menu(menu)
self.addMenuItems(pullover, item[2]) # sublist:
menu.add_cascade(label = item[0], # make submenu
underline = item[1], # add cascade
menu = pullover)
def makeToolbar(self, selected):
"""
make toolbar (of buttons) at top, if any
expand=no, fill=x so same width on resize
"""
if self.main.toolbar:
self.toolbar_widget = Frame(self.outer, cursor='hand2',
relief=SUNKEN, bd=2)
self.toolbar_widget.pack(side=TOP, fill=X)
for (name, action, where) in self.main.toolbar:
but = Button(self.toolbar_widget, text=name,
command=action)
if name == selected:
but.config(relief=SUNKEN)
but.pack(where)
def makeCheckbar(self):
"""
make check-button bar at top, if any
expand=no, fill=x so same width on resize
"""
if self.main.checkbar:
self.checkbar_widget = Frame(self.outer, cursor='hand2',
relief=SUNKEN, bd=2)
self.checkbar_widget.pack(side=TOP, fill=X)
for (name, var) in self.main.checkbar:
Checkbutton(self.checkbar_widget, text=name,
variable=var,
command=self.main.redraw).pack(side=LEFT)
def getAllText(self):
return self.modes['Edit'].getAllText()
def setAllText(self, text):
self.modes['Edit'].setAllText(text)
#self.modes['Display'].setAllText(text)
def _getints(self, string):
"""Internal function."""
if string:
if type(string) is str:
textwid = self.modes['Edit'].text
return tuple(map(getint, textwid.tk.splitlist(string)))
else:
return string
def edit(self, *args):
"""Internal method
This method controls the undo mechanism and
the modified flag. The exact behavior of the
command depends on the option argument that
follows the edit argument. The following forms
of the command are currently supported:
edit_modified, edit_redo, edit_reset, edit_separator
and edit_undo
"""
textwid = self.modes['Edit'].text
return self._getints(
textwid.tk.call((textwid._w, 'edit') + args)) or ()
def edit_modified(self, arg=None):
"""Get or Set the modified flag
If arg is not specified, returns the modified
flag of the widget. The insert, delete, edit undo and
edit redo commands or the user can set or clear the
modified flag. If boolean is specified, sets the
modified flag of the widget to arg.
"""
# Added to use md5 functionality to watch for changed data
if arg is None:
alltext = self.getAllText()
if (self.last_save_signature != self.getSignature(alltext)):
return YES
return self.edit("modified", arg)
def onInfo(self):
text = self.getAllText() # added on 5/3/00 in 15 mins
bytes = len(text) # words uses a simple guess:
lines = len(string.split(text, '\n')) # any separated by whitespace
words = len(string.split(text))
index = self.main.text.index(INSERT)
where = tuple(string.split(index, '.'))
showinfo('CCG Editor Information',
'Current location:\n\n' +
'line:\t%s\ncolumn:\t%s\n\n' % where +
'File text statistics:\n\n' +
'Modified: %s\n\n' % self.edit_modified()+
'bytes:\t%d\nlines:\t%d\nwords:\t%d\n' %
(bytes, lines, words))
#######################
# Search menu commands
#######################
def onGoto(self, line=None):
if not line:
line = askinteger('CCG Editor', 'Enter line number')
self.main.text.update()
self.main.text.focus()
if line is not None:
maxindex = self.main.text.index(END+'-1c')
maxline = atoi(split(maxindex, '.')[0])
if line > 0 and line <= maxline:
self.main.text.mark_set(INSERT, '%d.0' % line) # goto line
self.main.text.tag_remove(SEL, '1.0', END) # delete selects
self.main.text.tag_add(SEL, INSERT, 'insert + 1l') # select line
self.main.text.see(INSERT) # scroll to line
else:
showerror('CCG Editor', 'Bad line number')
def onFind(self, lastkey=None):
key = lastkey or askstring('CCG Editor', 'Enter search string')
self.main.text.update()
self.main.text.focus()
self.lastfind = key
if key:
where = self.main.text.search(key, INSERT, END) # don't wrap
if not where:
showerror('CCG Editor', 'String not found')
else:
pastkey = where + '+%dc' % len(key) # index past key
self.main.text.tag_remove(SEL, '1.0', END) # remove any sel
self.main.text.tag_add(SEL, where, pastkey) # select key
self.main.text.mark_set(INSERT, pastkey) # for next find
self.main.text.see(where) # scroll display
def onRefind(self):
self.onFind(self.lastfind)
######################
# Tools menu commands
######################
def onFontList(self):
self.fonts.append(self.fonts[0]) # pick next font in list
del self.fonts[0] # resizes the text area
self.modes['Edit'].text.config(font=self.fonts[0])
self.modes['Display'].text.config(font=self.fonts[0])
def onColorList(self):
self.colors.append(self.colors[0]) # pick next color in list
del self.colors[0] # move current to end
self.modes['Edit'].text.config(fg=self.colors[0]['fg'], bg=self.colors[0]['bg'])
self.modes['Display'].text.config(fg=self.colors[0]['fg'], bg=self.colors[0]['bg'])
def onPickFg(self):
self.pickColor('fg')
def onPickBg(self):
self.pickColor('bg')
def pickColor(self, part):
(triple, hexstr) = askcolor()
if hexstr:
apply(self.modes['Edit'].text.config, (), {part: hexstr})
apply(self.modes['Display'].text.config, (), {part: hexstr})
# def onRunCode(self, parallelmode=1):
# """
# run Python code being edited--not an ide, but handy;
# tries to run in file's dir, not cwd (may be pp2e root);
# inputs and adds command-line arguments for script files;
# code's stdin/out/err = editor's start window, if any;
# but parallelmode uses start to open a dos box for i/o;
# """
# from PP2E.launchmodes import System, Start, Fork
# filemode = 0
# thefile = str(self.getFileName())
# cmdargs = askstring('CCG Editor', 'Commandline arguments?') or ''
# if os.path.exists(thefile):
# filemode = askyesno('CCG Editor', 'Run from file?')
# if not filemode: # run text string
# namespace = {'__name__': '__main__'} # run as top-level
# sys.argv = [thefile] + string.split(cmdargs) # could use threads
# exec self.getAllText() + '\n' in namespace # exceptions ignored
# elif askyesno('CCG Editor', 'Text saved in file?'):
# mycwd = os.getcwd() # cwd may be root
# os.chdir(os.path.dirname(thefile) or mycwd) # cd for filenames
# thecmd = thefile + ' ' + cmdargs
# if not parallelmode: # run as file
# System(thecmd, thecmd)() # block editor
# else:
# if sys.platform[:3] == 'win': # spawn in parallel
# Start(thecmd, thecmd)() # or use os.spawnv
# else:
# Fork(thecmd, thecmd)() # spawn in parallel
# os.chdir(mycwd)
#####################
# File menu commands
#####################
def getSignature(self, contents):
return md5.md5(contents).digest()
def my_askopenfilename(self): # objects remember last result dir/file
if not self.openDialog:
self.openDialog = Open(initialdir=self.startfiledir,
filetypes=self.ftypes)
return self.openDialog.show()
def my_asksaveasfilename(self): # objects remember last result dir/file
if not self.saveDialog:
self.saveDialog = SaveAs(initialdir=self.startfiledir,
filetypes=self.ftypes)
self.last_save_signature = self.getSignature(self.getAllText())
return self.saveDialog.show()
def onOpen(self):
file = self.my_askopenfilename()
# FIXME! Only create new window if file exists and is readable
if file:
CFile(file)
def onFirstOpen(self, file):
try:
text = open(file, 'r').read()
except:
showerror('CCG Editor', 'Could not open file ' + file)
else:
self.setAllText(text)
self.setFileName(file)
def compile_if_needed(self):
# Compare the last compiled MD5 signature and present one
# and compile if needed.
# To force compilation, set this signature to None
text = self.getAllText()
textSign = self.getSignature(text)
if textSign != self.last_compile_signature:
# Now compile
ccg2xml.init_global_state(errors_to_string=True)
ccg2xml.options.quiet = True
self.curparse = ccg2xml.parse_string(text)
self.last_compiled_signature = textSign
def onDisplay(self):
self.switch_to('Display')
def onEdit(self):
self.switch_to('Edit')
def onLexicon(self):
self.switch_to('Lexicon')
def onTestbed(self):
self.switch_to('Testbed')
def onRules(self):
self.switch_to('Rules')
def onWords(self):
self.switch_to('Words')
def onFeatures(self):
self.switch_to('Features')
def onNew(self):
CFile()
def getFileName(self):
return self.currfile
def setFileName(self, name):
self.currfile = name # for save
if name:
title = 'VisCCG Editor: %s' % name
else:
title = 'VisCCG Editor'
self.top.title(title)
self.top.iconname(title)
def help(self):
showinfo('Help', 'Sorry, no help for ' + self.__class__.__name__)
# Close this window; if this is the last window, quit
def onClose(self):
assert self in openfiles
if len(openfiles) == 1 or self.top == root:
self.onQuit()
# If we got this far, the user refused to quit, so do nothing
else:
ccg2xml.debug("fooooo\n")
del openfiles[self]
self.top.destroy()
def onQuit(self):
modfiles = False
for f in openfiles:
if f.edit_modified() == YES:
modfiles = True
break
if not modfiles or askyesno('CCG Editor', 'Files are modified, Really quit?'):
self.top.quit()
def main():
ccg2xml.parse_arguments(sys.argv[1:])
ccg2xml.init_global_state_once()
if ccg2xml.global_args and len(ccg2xml.global_args) > 0:
# file name:
fname = ccg2xml.global_args[0]
else:
fname = None
CFile(fname)
mainloop()
if __name__ == '__main__': # when run as a script
main()
| OpenCCG/openccg | src/ccg2xml/ccg_editor.py | Python | lgpl-2.1 | 52,260 |
#!/usr/bin/env python
#pylint: disable=missing-docstring
####################################################################################################
# DO NOT MODIFY THIS HEADER #
# MOOSE - Multiphysics Object Oriented Simulation Environment #
# #
# (c) 2010 Battelle Energy Alliance, LLC #
# ALL RIGHTS RESERVED #
# #
# Prepared by Battelle Energy Alliance, LLC #
# Under Contract No. DE-AC07-05ID14517 #
# With the U. S. Department of Energy #
# #
# See COPYRIGHT for full restrictions #
####################################################################################################
import unittest
import bs4
import MooseDocs
from MooseDocs.common import moose_docs_file_tree
from MooseDocs.testing import MarkdownTestCase
class TestTemplate(MarkdownTestCase):
EXTENSIONS = ['MooseDocs.extensions.template', 'MooseDocs.extensions.app_syntax', 'meta']
@classmethod
def updateExtensions(cls, configs):
"""
Method to change the arguments that come from the configuration file for
specific tests. This way one can test optional arguments without permanently
changing the configuration file.
"""
configs['MooseDocs.extensions.template']['template'] = 'testing.html'
configs['MooseDocs.extensions.app_syntax']['hide']['framework'].append('/Functions')
configs['MooseDocs.extensions.app_syntax']['hide']['phase_field'].append('/ICs')
@classmethod
def setUpClass(cls):
super(TestTemplate, cls).setUpClass()
# Use BoxMarker.md to test Doxygen and Code lookups
config = dict(base='docs/content',
include=['docs/content/documentation/systems/Adaptivity/Markers/*'])
root = moose_docs_file_tree({'framework': config})
node = root.findall('/BoxMarker')[0]
cls.html = cls.parser.convert(node)
#with open(node.markdown(), 'r') as fid:
# cls.html = fid.read()
cls.soup = bs4.BeautifulSoup(cls.html, "html.parser")
def testContent(self):
self.assertIsNotNone(self.soup.find('h1'))
self.assertIn('BoxMarker', self.html)
def testDoxygen(self):
a = str(self.soup)
self.assertIsNotNone(a)
self.assertIn('classBoxMarker.html', a)
self.assertIn('Doxygen', a)
def testCode(self):
html = str(self.soup)
self.assertIn('href="https://github.com/idaholab/moose/blob/master/framework/include/'\
'markers/BoxMarker.h"', html)
self.assertIn('href="https://github.com/idaholab/moose/blob/master/framework/src/'\
'markers/BoxMarker.C"', html)
def testHidden(self):
md = '!syntax objects /Functions'
html = self.convert(md)
gold = '<a class="moose-bad-link" data-moose-disable-link-error="1" ' \
'href="/Functions/framework/ParsedVectorFunction.md">ParsedVectorFunction</a>'
self.assertIn(gold.format(MooseDocs.MOOSE_DIR.rstrip('/')), html)
def testPolycrystalICs(self):
md = '[Foo](/ICs/PolycrystalICs/index.md)'
html = self.convert(md)
gold = '<a class="moose-bad-link" href="/ICs/PolycrystalICs/index.md">'
self.assertIn(gold, html)
if __name__ == '__main__':
unittest.main(verbosity=2)
| liuwenf/moose | python/MooseDocs/tests/template/test_template.py | Python | lgpl-2.1 | 4,058 |
import os
import sys
import shutil
import errno
import time
import hashlib
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions
if "TRAVIS_BUILD_NUMBER" in os.environ:
if "SAUCE_USERNAME" not in os.environ:
print "No sauce labs login credentials found. Stopping tests..."
sys.exit(0)
capabilities = {'browserName': "firefox"}
capabilities['platform'] = "Windows 7"
capabilities['version'] = "48.0"
capabilities['screenResolution'] = "1280x1024"
capabilities["build"] = os.environ["TRAVIS_BUILD_NUMBER"]
capabilities["tunnel-identifier"] = os.environ["TRAVIS_JOB_NUMBER"]
# connect to sauce labs
username = os.environ["SAUCE_USERNAME"]
access_key = os.environ["SAUCE_ACCESS_KEY"]
hub_url = "%s:%s@localhost:4445" % (username, access_key)
driver = webdriver.Remote(command_executor="http://%s/wd/hub" % hub_url, desired_capabilities=capabilities)
else:
# local
print "Using LOCAL webdriver"
profile = webdriver.FirefoxProfile()
profile.set_preference("intl.accept_languages", "en")
driver = webdriver.Firefox(profile)
driver.maximize_window()
def write_random_file(size, filename):
if not os.path.exists(os.path.dirname(filename)):
try:
os.makedirs(os.path.dirname(filename))
except OSError as exc: # Guard against race condition
if exc.errno != errno.EEXIST:
raise
with open(filename, 'wb') as fout:
fout.write(os.urandom(size))
def sha1_file(filename):
BLOCKSIZE = 65536
hasher = hashlib.sha1()
with open(filename, 'rb') as afile:
buf = afile.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(BLOCKSIZE)
return hasher.hexdigest()
def sha1_folder(folder):
sha1_dict = {}
for root, dirs, files in os.walk(folder):
for filename in files:
file_path = os.path.join(root, filename)
sha1 = sha1_file(file_path)
relative_file_path = os.path.relpath(file_path, folder)
sha1_dict.update({relative_file_path: sha1})
return sha1_dict
def wait_for_text(time, xpath, text):
WebDriverWait(driver, time).until(expected_conditions.text_to_be_present_in_element((By.XPATH, xpath), text))
BACKUP_NAME = "BackupName"
PASSWORD = "the_backup_password_is_really_long_and_safe"
SOURCE_FOLDER = os.path.abspath("duplicati_gui_test_source")
DESTINATION_FOLDER = os.path.abspath("duplicati_gui_test_destination")
DESTINATION_FOLDER_DIRECT_RESTORE = os.path.abspath("duplicati_gui_test_destination_direct_restore")
RESTORE_FOLDER = os.path.abspath("duplicati_gui_test_restore")
DIRECT_RESTORE_FOLDER = os.path.abspath("duplicati_gui_test_direct_restore")
# wait 5 seconds for duplicati server to start
time.sleep(5)
driver.implicitly_wait(10)
driver.get("http://localhost:8200/ngax/index.html")
if "Duplicati" not in driver.title:
raise Exception("Unable to load duplicati GUI!")
# Create and hash random files in the source folder
write_random_file(1024 * 1024, SOURCE_FOLDER + os.sep + "1MB.test")
write_random_file(100 * 1024, SOURCE_FOLDER + os.sep + "subfolder" + os.sep + "100KB.test")
sha1_source = sha1_folder(SOURCE_FOLDER)
# Dismiss the password request
driver.find_element_by_link_text("No, my machine has only a single account").click()
# Add new backup
driver.find_element_by_link_text("Add backup").click()
# Choose the "add new" option
driver.find_element_by_id("blank").click()
driver.find_element_by_xpath("//input[@class='submit next']").click()
# Add new backup - General page
time.sleep(1)
driver.find_element_by_id("name").send_keys(BACKUP_NAME)
driver.find_element_by_id("passphrase").send_keys(PASSWORD)
driver.find_element_by_id("repeat-passphrase").send_keys(PASSWORD)
driver.find_element_by_id("nextStep1").click()
# Add new backup - Destination page
driver.find_element_by_link_text("Manually type path").click()
driver.find_element_by_id("file_path").send_keys(DESTINATION_FOLDER)
driver.find_element_by_id("nextStep2").click()
# Add new backup - Source Data page
driver.find_element_by_id("sourcePath").send_keys(os.path.abspath(SOURCE_FOLDER) + os.sep)
driver.find_element_by_id("sourceFolderPathAdd").click()
driver.find_element_by_id("nextStep3").click()
# Add new backup - Schedule page
useScheduleRun = driver.find_element_by_id("useScheduleRun")
if useScheduleRun.is_selected():
useScheduleRun.click()
driver.find_element_by_id("nextStep4").click()
# Add new backup - Options page
driver.find_element_by_id("save").click()
# Run the backup job and wait for finish
driver.find_element_by_link_text(BACKUP_NAME).click()
[n for n in driver.find_elements_by_xpath("//dl[@class='taskmenu']/dd/p/span[contains(text(),'Run now')]") if n.is_displayed()][0].click()
wait_for_text(60, "//div[@class='task ng-scope']/dl[2]/dd[1]", "(took ")
# Restore
if len([n for n in driver.find_elements_by_xpath("//span[contains(text(),'Restore files ...')]") if n.is_displayed()]) == 0:
driver.find_element_by_link_text(BACKUP_NAME).click()
[n for n in driver.find_elements_by_xpath("//span[contains(text(),'Restore files ...')]") if n.is_displayed()][0].click()
driver.find_element_by_xpath("//span[contains(text(),'" + SOURCE_FOLDER + "')]") # wait for filelist
time.sleep(1)
driver.find_element_by_xpath("//restore-file-picker/ul/li/div/a[2]").click() # select root folder checkbox
driver.find_element_by_xpath("//form[@id='restore']/div[1]/div[@class='buttons']/a/span[contains(text(), 'Continue')]").click()
driver.find_element_by_id("restoretonewpath").click()
driver.find_element_by_id("restore_path").send_keys(RESTORE_FOLDER)
driver.find_element_by_xpath("//form[@id='restore']/div/div[@class='buttons']/a/span[contains(text(),'Restore')]").click()
# wait for restore to finish
wait_for_text(60, "//form[@id='restore']/div[3]/h3/div[1]", "Your files and folders have been restored successfully.")
# hash restored files
sha1_restore = sha1_folder(RESTORE_FOLDER)
# cleanup: delete source and restore folder and rename destination folder for direct restore
shutil.rmtree(SOURCE_FOLDER)
shutil.rmtree(RESTORE_FOLDER)
os.rename(DESTINATION_FOLDER, DESTINATION_FOLDER_DIRECT_RESTORE)
# direct restore
driver.find_element_by_link_text("Restore").click()
# Choose the "restore direct" option
driver.find_element_by_id("direct").click()
driver.find_element_by_xpath("//input[@class='submit next']").click()
time.sleep(1)
driver.find_element_by_link_text("Manually type path").click()
driver.find_element_by_id("file_path").send_keys(DESTINATION_FOLDER_DIRECT_RESTORE)
driver.find_element_by_id("nextStep1").click()
driver.find_element_by_id("password").send_keys(PASSWORD)
driver.find_element_by_id("connect").click()
driver.find_element_by_xpath("//span[contains(text(),'" + SOURCE_FOLDER + "')]") # wait for filelist
time.sleep(1)
driver.find_element_by_xpath("//restore-file-picker/ul/li/div/a[2]").click() # select root folder checkbox
time.sleep(1)
driver.find_element_by_xpath("//form[@id='restore']/div[1]/div[@class='buttons']/a/span[contains(text(), 'Continue')]").click()
driver.find_element_by_id("restoretonewpath").click()
driver.find_element_by_id("restore_path").send_keys(DIRECT_RESTORE_FOLDER)
driver.find_element_by_xpath("//form[@id='restore']/div/div[@class='buttons']/a/span[contains(text(),'Restore')]").click()
# wait for restore to finish
wait_for_text(60, "//form[@id='restore']/div[3]/h3/div[1]", "Your files and folders have been restored successfully.")
# hash direct restore files
sha1_direct_restore = sha1_folder(DIRECT_RESTORE_FOLDER)
print "Source hashes: " + str(sha1_source)
print "Restore hashes: " + str(sha1_restore)
print "Direct Restore hashes: " + str(sha1_direct_restore)
# Tell Sauce Labs to stop the test
driver.quit()
if not (sha1_source == sha1_restore and sha1_source == sha1_direct_restore):
sys.exit(1) # return with error
| agrajaghh/duplicati | guiTests/guiTest.py | Python | lgpl-2.1 | 8,072 |
#!/usr/bin/env python
# Copyright (C) 2010 Red Hat, Inc.
#
# This is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of
# the License, or (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this software; if not, write to the Free
# Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA, or see the FSF site: http://www.fsf.org.
import http
import xmlfmt
import yamlfmt
import jsonfmt
from testutils import *
opts = parseOptions()
(cluster, template) = (None, None)
if len(opts['oargs']) >= 2:
(cluster, template) = opts['oargs'][0:2]
links = http.HEAD_for_links(opts)
for fmt in [xmlfmt]:
t = TestUtils(opts, fmt)
print "=== ", fmt.MEDIA_TYPE, " ==="
for pool in t.get(links['vmpools']):
t.get(pool.href)
if cluster is None:
continue
pool = fmt.VmPool()
pool.name = randomName('foo')
pool.size = "2"
pool.cluster = fmt.Cluster()
pool.cluster.id = t.find(links['clusters'], cluster).id
pool.template = fmt.Template()
pool.template.id = t.find(links['templates'], template).id
pool = t.create(links['vmpools'], pool)
vms_in_pool = []
for vm in t.get(links['vms']):
if not hasattr(vm, "vmpool"):
continue
if vm.vmpool.id == pool.id:
vms_in_pool.append(vm)
assert len(vms_in_pool) == 2, "Expected 2 VMs with pool ID '" + pool.id + "', got " + str(len(vms_in_pool))
for vm in vms_in_pool:
t.syncAction(vm.actions, "detach")
t.delete(vm.href)
t.delete(pool.href)
| colloquium/rhevm-api | python/pool-test.py | Python | lgpl-2.1 | 1,969 |
##############################################################################
# adaptiveMD: A Python Framework to Run Adaptive Molecular Dynamics (MD)
# Simulations on HPC Resources
# Copyright 2017 FU Berlin and the Authors
#
# Authors: Jan-Hendrik Prinz
# Contributors:
#
# `adaptiveMD` is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with MDTraj. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
# part of the code below was taken from `openpathsampling` see
# <http://www.openpathsampling.org> or
# <http://github.com/openpathsampling/openpathsampling
# for details and license
import inspect
import logging
import time
import uuid
logger = logging.getLogger(__name__)
class StorableMixin(object):
"""Mixin that allows objects of the class to to be stored using netCDF+
"""
_base = None
_args = None
_ignore = False
_find_by = []
INSTANCE_UUID = list(uuid.uuid1().fields[:-1])
CREATION_COUNT = 0L
ACTIVE_LONG = int(uuid.UUID(
fields=tuple(
INSTANCE_UUID +
[CREATION_COUNT]
)
))
@staticmethod
def get_uuid():
"""
Create a new unique ID
Returns
-------
long
the unique number for an object in the project
"""
StorableMixin.ACTIVE_LONG += 2
return StorableMixin.ACTIVE_LONG
def __init__(self):
# set the universal ID
self.__uuid__ = StorableMixin.get_uuid()
# set the creation time
self.__time__ = int(time.time())
self.__store__ = None
def __eq__(self, other):
if isinstance(other, StorableMixin):
return self.__uuid__ == other.__uuid__
return NotImplemented
def named(self, name):
"""
Attach a .name property to an object
Parameters
----------
name : str
the name of the object
Returns
-------
self
the object itself for chaining
"""
self.name = name
return self
def idx(self, store):
"""
Return the index which is used for the object in the given store.
Once you store a storable object in a store it gets assigned a unique
number that can be used to retrieve the object back from the store. This
function will ask the given store if the object is stored if so what
the used index is.
Parameters
----------
store : :class:`ObjectStore`
the store in which to ask for the index
Returns
-------
int or None
the integer index for the object of it exists or None else
"""
if hasattr(store, 'index'):
return store.index.get(self, None)
else:
return store.idx(self)
@property
def cls(self):
"""
Return the class name as a string
Returns
-------
str
the class name
"""
return self.__class__.__name__
@classmethod
def base(cls):
"""
Return the most parent class actually derived from StorableMixin
Important to determine which store should be used for storage
Returns
-------
type
the base class
"""
if cls._base is None:
if cls is not StorableMixin:
if StorableMixin in cls.__bases__:
cls._base = cls
else:
if hasattr(cls.__base__, 'base'):
cls._base = cls.__base__.base()
else:
cls._base = cls
return cls._base
def __hash__(self):
return hash(self.__uuid__)
@property
def base_cls_name(self):
"""
Return the name of the base class
Returns
-------
str
the string representation of the base class
"""
return self.base().__name__
@property
def base_cls(self):
"""
Return the base class
Returns
-------
type
the base class
See Also
--------
:func:`base()`
"""
return self.base()
@classmethod
def descendants(cls):
"""
Return a list of all subclassed objects
Returns
-------
list of type
list of subclasses of a storable object
"""
return cls.__subclasses__() + \
[g for s in cls.__subclasses__() for g in s.descendants()]
@staticmethod
def objects():
"""
Returns a dictionary of all storable objects
Returns
-------
dict of str : type
a dictionary of all subclassed objects from StorableMixin.
The name points to the class
"""
subclasses = StorableMixin.descendants()
return {subclass.__name__: subclass for subclass in subclasses}
@classmethod
def args(cls):
"""
Return a list of args of the `__init__` function of a class
Returns
-------
list of str
the list of argument names. No information about defaults is
included.
"""
try:
args = inspect.getargspec(cls.__init__)
except TypeError:
return []
return args[0]
_excluded_attr = []
_included_attr = []
_exclude_private_attr = True
_restore_non_initial_attr = True
_restore_name = True
def to_dict(self):
"""
Convert object into a dictionary representation
Used to convert the dictionary into JSON string for serialization
Returns
-------
dict
the dictionary representing the (immutable) state of the object
"""
excluded_keys = ['idx', 'json', 'identifier']
keys_to_store = {
key for key in self.__dict__
if key in self._included_attr or (
key not in excluded_keys and
key not in self._excluded_attr and
not (key.startswith('_') and self._exclude_private_attr)
)
}
return {
key: self.__dict__[key] for key in keys_to_store
}
@classmethod
def from_dict(cls, dct):
"""
Reconstruct an object from a dictionary representation
Parameters
----------
dct : dict
the dictionary containing a state representation of the class.
Returns
-------
:class:`StorableMixin`
the reconstructed storable object
"""
if dct is None:
dct = {}
if hasattr(cls, 'args'):
args = cls.args()
init_dct = {key: dct[key] for key in dct if key in args}
try:
obj = cls(**init_dct)
if cls._restore_non_initial_attr:
non_init_dct = {
key: dct[key] for key in dct if key not in args}
if len(non_init_dct) > 0:
for key, value in non_init_dct.iteritems():
setattr(obj, key, value)
return obj
except TypeError as e:
if hasattr(cls, 'args'):
err = (
'Could not reconstruct the object of class `%s`. '
'\nStored parameters: %s \n'
'\nCall parameters: %s \n'
'\nSignature parameters: %s \n'
'\nActual message: %s'
) % (
cls.__name__,
str(dct),
str(init_dct),
str(cls.args),
str(e)
)
raise TypeError(err)
else:
raise
else:
return cls(**dct)
def create_to_dict(keys_to_store):
"""
Create a to_dict function from a list of attributes
Parameters
----------
keys_to_store : list of str
the attributes used in { attr: getattr(self, attr) }
Returns
-------
function
the `to_dict` function
"""
def to_dict(self):
return {key: getattr(self, key) for key in keys_to_store}
return to_dict
| thempel/adaptivemd | adaptivemd/mongodb/base.py | Python | lgpl-2.1 | 9,063 |
# -*- coding: UTF-8 -*-
# thermo.py
# Created by Francesco Porcari on 2010-09-03.
# Copyright (c) 2010 Softwell. All rights reserved.
#
#
import os
from gnr.core.gnrbag import Bag
import random
import time
cli_max = 12
invoice_max = 20
row_max = 100
sleep_time = 0.05
class GnrCustomWebPage(object):
dojo_version = '11'
py_requires = "gnrcomponents/testhandler:TestHandlerFull,gnrcomponents/thermopane:ThermoPane"
def windowTitle(self):
return 'Thermo'
def test_1_batch(self, pane):
"Batch"
box = pane.div(datapath='test1')
box.button('Start', fire='.start_test')
box.dataRpc('dummy', 'test_1_batch', _fired='^.start_test')
def test_2_batch(self, pane):
"Batch 2"
box = pane.div(datapath='test2')
box.button('Start', fire='.start_test')
box.dataRpc('dummy', 'test_2_batch', _fired='^.start_test')
def test_3_batch(self, pane):
"Batch 3"
box = pane.div(datapath='test3')
box.button('Start', fire='.start_test')
box.dataRpc('dummy', 'test_3_batch', _fired='^.start_test')
def rpc_test_1_batch(self, ):
t = time.time()
# thermo_lines = [{'title':'Clients',_class=}]
thermo_lines = 'clients,invoices,rows'
thermo_lines = None
self.btc.batch_create(title='testbatch',
thermo_lines=thermo_lines, note='This is a test batch_1 %i' % int(random.random() * 100))
clients = int(random.random() * cli_max)
self.btc.thermo_line_add(line='clients', maximum=clients)
try:
for client in range(1, clients + 1):
stopped = self.btc.thermo_line_update(line='clients',
maximum=clients, message='client %i/%i' % (client, clients),
progress=client)
invoices = int(random.random() * invoice_max)
self.btc.thermo_line_add(line='invoices', maximum=invoices)
for invoice in range(1, invoices + 1):
stopped = self.btc.thermo_line_update(line='invoices',
maximum=invoices,
message='invoice %i/%i' % (invoice, invoices),
progress=invoice)
rows = int(random.random() * row_max)
self.btc.thermo_line_add(line='rows', maximum=rows)
for row in range(1, rows + 1):
stopped = self.btc.thermo_line_update(line='rows',
maximum=rows, message='row %i/%i' % (row, rows),
progress=row)
time.sleep(sleep_time)
self.btc.thermo_line_del(line='rows')
self.btc.thermo_line_del(line='invoices')
self.btc.thermo_line_del(line='clients')
except self.btc.exception_stopped:
self.btc.batch_aborted()
except Exception, e:
self.btc.batch_error(error=str(e))
self.btc.batch_complete(result='Execution completed', result_attr=dict(url='http://www.apple.com'))
def rpc_test_2_batch(self):
t = time.time()
thermo_lines = 'clients,invoices,rows'
thermo_lines = None
self.btc.batch_create(title='testbatch',
thermo_lines=thermo_lines, note='This is a test batch_2 %i' % int(random.random() * 100))
try:
clients = int(random.random() * cli_max)
for client in self.client_provider(clients):
invoices = int(random.random() * invoice_max)
for invoice in self.invoice_provider(invoices):
rows = int(random.random() * row_max)
for row in self.row_provider(rows):
time.sleep(sleep_time)
except self.btc.exception_stopped:
self.btc.batch_aborted()
except Exception, e:
self.btc.batch_error(error=str(e))
self.btc.batch_complete(result='Execution completed', result_attr=dict(url='http://www.apple.com'))
def client_provider(self, clients):
self.btc.thermo_line_add(line='clients', maximum=clients)
for client in range(1, clients + 1):
self.btc.thermo_line_update(line='clients',
maximum=clients, message='client %i/%i' % (client, clients), progress=client)
yield client
self.btc.thermo_line_del(line='invoices')
def invoice_provider(self, invoices):
self.btc.thermo_line_add(line='invoices', maximum=invoices)
for invoice in range(1, invoices + 1):
self.btc.thermo_line_update(line='invoices',
maximum=invoices, message='invoice %i/%i' % (invoice, invoices),
progress=invoice)
yield invoice
self.btc.thermo_line_del(line='invoices')
def row_provider(self, rows):
self.btc.thermo_line_add(line='rows', maximum=rows)
for row in range(1, rows + 1):
self.btc.thermo_line_update(line='rows',
maximum=rows, message='row %i/%i' % (row, rows), progress=row)
yield row
self.btc.thermo_line_del(line='rows')
def rpc_test_3_batch(self):
t = time.time()
btc = self.btc
self.btc.batch_create(title='testbatch', note='This is a test batch_3 %i' % int(random.random() * 100))
def clients_cb():
return range(int(random.random() * cli_max))
def invoices_cb(client=None):
return range(int(random.random() * invoice_max))
def rows_cb(invoice=None):
return range(int(random.random() * row_max))
try:
for client in btc.thermo_wrapper(clients_cb, 'clients'):
for invoice in btc.thermo_wrapper(invoices_cb, 'invoices', client=client):
for row in btc.thermo_wrapper(rows_cb, 'rows', invoice=invoice):
time.sleep(sleep_time)
except self.btc.exception_stopped:
self.btc.batch_aborted()
except Exception, e:
self.btc.batch_error(error=str(e))
self.btc.batch_complete(result='Execution completed', result_attr=dict(url='http://www.apple.com'))
| poppogbr/genropy | packages/test15/webpages/components/thermo.py | Python | lgpl-2.1 | 6,614 |
from mdt import CompartmentTemplate
from mdt.lib.post_processing import DTIMeasures
__author__ = 'Robbert Harms'
__date__ = '2017-08-03'
__maintainer__ = 'Robbert Harms'
__email__ = 'robbert@xkls.nl'
__licence__ = 'LGPL v3'
class SSFP_Ball(CompartmentTemplate):
parameters = ('d', 'delta', 'G', 'TR', 'flip_angle', 'b1', 'T1', 'T2')
dependencies = ('SSFP',)
cl_code = '''
return SSFP(d, delta, G, TR, flip_angle, b1, T1, T2);
'''
class SSFP_Stick(CompartmentTemplate):
parameters = ('g', 'd', 'theta', 'phi', 'delta', 'G', 'TR', 'flip_angle', 'b1', 'T1', 'T2')
dependencies = ('SSFP',)
cl_code = '''
double adc = d * pown(dot(g, (float4)(cos(phi) * sin(theta), sin(phi) * sin(theta), cos(theta), 0.0)), 2);
return SSFP(adc, delta, G, TR, flip_angle, b1, T1, T2);
'''
class SSFP_Tensor(CompartmentTemplate):
parameters = ('g', 'd', 'dperp0', 'dperp1', 'theta', 'phi', 'psi', 'delta',
'G', 'TR', 'flip_angle', 'b1', 'T1', 'T2')
dependencies = ('SSFP', 'TensorApparentDiffusion')
cl_code = '''
double adc = TensorApparentDiffusion(theta, phi, psi, d, dperp0, dperp1, g);
return SSFP(adc, delta, G, TR, flip_angle, b1, T1, T2);
'''
constraints = '''
constraints[0] = dperp0 - d;
constraints[1] = dperp1 - dperp0;
'''
prior = 'return dperp1 < dperp0 && dperp0 < d;'
extra_optimization_maps = [DTIMeasures.extra_optimization_maps]
extra_sampling_maps = [DTIMeasures.extra_sampling_maps]
class SSFP_Zeppelin(CompartmentTemplate):
parameters = ('g', 'd', 'dperp0', 'theta', 'phi', 'delta', 'G', 'TR', 'flip_angle', 'b1', 'T1', 'T2')
dependencies = ('SSFP',)
cl_code = '''
double adc = dperp0 + ((d - dperp0) * pown(dot(g, (float4)(cos(phi) * sin(theta),
sin(phi) * sin(theta), cos(theta), 0.0)), 2);
return SSFP(adc, delta, G, TR, flip_angle, b1, T1, T2);
'''
| cbclab/MDT | mdt/data/components/standard/compartment_models/SSFP.py | Python | lgpl-3.0 | 2,008 |
# coding=utf-8
#####################################
# Imports
#####################################
# Python native imports
from PyQt5 import QtCore, QtWidgets, QtGui
import logging
#####################################
# Global Variables
#####################################
UI_LOGO = "logo_small.jpg"
#####################################
# TrayNotifier Class Definition
#####################################
class TrayNotifier(QtCore.QObject):
def __init__(self, shared_objects):
super(TrayNotifier, self).__init__()
# ########## Reference to objects and main screen objects ##########
self.shared_objects = shared_objects
self.core_signals = self.shared_objects["core_signals"]
self.main_screen = self.shared_objects["screens"]["main_screen"]
# ########## Get the settings and logging instances ##########
self.settings = QtCore.QSettings()
self.logger = logging.getLogger("zscanprocessor")
# ########## Class Variables ##########
self.system_tray_icon = QtWidgets.QSystemTrayIcon(QtGui.QIcon(UI_LOGO))
self.system_tray_menu = QtWidgets.QMenu()
self.system_tray_menu.addAction("Show")
self.system_tray_menu.addAction("Exit")
# ########## Setup tray icon ##########
self.setup_tray_icon()
# ########## Setup program start signal connections ##########
self.setup_signals()
def setup_tray_icon(self):
self.system_tray_icon.setContextMenu(self.system_tray_menu)
self.system_tray_icon.show()
self.system_tray_icon.showMessage("Zebrafish Scan Processor", "Application started.\nUpdates will be " +
"shown here.", QtWidgets.QSystemTrayIcon.Information, 5000)
def connect_signals_and_slots(self):
pass
def show_informational_message(self, message, time=2000):
self.system_tray_icon.showMessage("Zebrafish Scan Processor", message, QtWidgets.QSystemTrayIcon.Information,
time)
def show_failure_message(self, message, time=10000):
self.system_tray_icon.showMessage("Zebrafish Scan Processor", message, QtWidgets.QSystemTrayIcon.Critical,
time)
def on_tray_menu_item_clicked(self, event):
if event == QtWidgets.QSystemTrayIcon.Context: # Happens on right-click, ignore for tray menu instead
pass
elif event in [QtWidgets.QSystemTrayIcon.Trigger, QtWidgets.QSystemTrayIcon.DoubleClick]:
self.main_screen.show()
self.main_screen.setWindowState(
self.main_screen.windowState() & ~QtCore.Qt.WindowMinimized | QtCore.Qt.WindowActive)
self.main_screen.activateWindow()
elif event.text() == "Show":
self.main_screen.show()
self.main_screen.setWindowState(
self.main_screen.windowState() & ~QtCore.Qt.WindowMinimized | QtCore.Qt.WindowActive)
self.main_screen.activateWindow()
elif event.text() == "Exit":
self.system_tray_icon.hide()
self.main_screen.exit_requested_signal.emit()
def on_kill_threads__slot(self):
pass
def setup_signals(self):
self.core_signals["kill"].connect(self.on_kill_threads__slot)
self.core_signals["connect_signals_and_slots"].connect(self.connect_signals_and_slots)
self.system_tray_icon.activated.connect(self.on_tray_menu_item_clicked)
self.system_tray_menu.triggered.connect(self.on_tray_menu_item_clicked)
| SARL-Engineering/ZScan_Processor | Framework/TrayNotifier/TrayNotifierCore.py | Python | lgpl-3.0 | 3,597 |
import sys
from typing import Dict, Union, List, TYPE_CHECKING
import dcs.countries as countries
from dcs.mapping import Point
import dcs.unitgroup as unitgroup
import dcs.planes as planes
import dcs.helicopters as helicopters
import dcs.ships as ships
from dcs.unit import Vehicle, Static, Ship, FARP, SingleHeliPad
from dcs.flyingunit import Plane, Helicopter
from dcs.point import MovingPoint, StaticPoint
from dcs.country import Country
from dcs.status_message import StatusMessage, MessageType, MessageSeverity
if TYPE_CHECKING:
from . import Mission
class Coalition:
def __init__(self, name, bullseye=None):
self.name = name
self.countries = {} # type: Dict[str, Country]
self.bullseye = bullseye
self.nav_points = [] # TODO
@staticmethod
def _sort_keys(points):
keys = []
for imp_point_idx in points:
keys.append(int(imp_point_idx))
keys.sort()
return keys
@staticmethod
def _import_moving_point(mission, group: unitgroup.Group, imp_group) -> unitgroup.Group:
keys = Coalition._sort_keys(imp_group["route"]["points"])
for imp_point_idx in keys:
imp_point = imp_group["route"]["points"][imp_point_idx]
point = MovingPoint(Point(0, 0, mission.terrain))
point.load_from_dict(imp_point, mission.translation)
group.add_point(point)
return group
@staticmethod
def _import_static_point(mission, group: unitgroup.Group, imp_group) -> unitgroup.Group:
keys = Coalition._sort_keys(imp_group["route"]["points"])
for imp_point_idx in keys:
imp_point = imp_group["route"]["points"][imp_point_idx]
point = StaticPoint(Point(0, 0, mission.terrain))
point.load_from_dict(imp_point, mission.translation)
group.add_point(point)
return group
@staticmethod
def _park_unit_on_airport(
mission: 'Mission',
group: unitgroup.Group,
unit: Union[Plane, Helicopter]) -> List[StatusMessage]:
ret: List[StatusMessage] = []
if group.points[0].airdrome_id is not None and unit.parking is not None:
airport = mission.terrain.airport_by_id(group.points[0].airdrome_id)
slot = airport.parking_slot(unit.parking)
if slot is not None:
unit.set_parking(slot)
else:
msg = "Parking slot id '{i}' for unit '{u}' in group '{p}' on airport '{a}' " \
"not valid, placing on next free".format(i=unit.parking, u=unit.name,
a=airport.name, p=group.name)
print("WARN", msg, file=sys.stderr)
ret.append(StatusMessage(msg, MessageType.PARKING_SLOT_NOT_VALID, MessageSeverity.WARN))
slot = airport.free_parking_slot(unit.unit_type)
if slot is not None:
unit.set_parking(slot)
else:
msg = "No free parking slots for unit '{u}' in unit group '{p}' on airport '{a}', ignoring"\
.format(u=unit.name, a=airport.name, p=group.name)
print("ERRO", msg, file=sys.stderr)
ret.append(StatusMessage(msg, MessageType.PARKING_SLOTS_FULL, MessageSeverity.ERROR))
return ret
@staticmethod
def get_name(mission: "Mission", name: str) -> str:
# Group, unit names are not localized for missions are created in 2.7.
if mission.version < 19:
return str(mission.translation.get_string(name))
else:
return name
def load_from_dict(self, mission, d) -> List[StatusMessage]:
status: List[StatusMessage] = []
for country_idx in d["country"]:
imp_country = d["country"][country_idx]
_country = countries.get_by_id(imp_country["id"])
if "vehicle" in imp_country:
for vgroup_idx in imp_country["vehicle"]["group"]:
vgroup = imp_country["vehicle"]["group"][vgroup_idx]
vg = unitgroup.VehicleGroup(vgroup["groupId"], self.get_name(mission, vgroup["name"]),
vgroup["start_time"])
vg.load_from_dict(vgroup, mission.terrain)
mission.current_group_id = max(mission.current_group_id, vg.id)
Coalition._import_moving_point(mission, vg, vgroup)
# units
for imp_unit_idx in vgroup["units"]:
imp_unit = vgroup["units"][imp_unit_idx]
unit = Vehicle(
mission.terrain,
id=imp_unit["unitId"],
name=self.get_name(mission, imp_unit["name"]),
_type=imp_unit["type"])
unit.load_from_dict(imp_unit)
mission.current_unit_id = max(mission.current_unit_id, unit.id)
vg.add_unit(unit)
_country.add_vehicle_group(vg)
if "ship" in imp_country:
for group_idx in imp_country["ship"]["group"]:
imp_group = imp_country["ship"]["group"][group_idx]
ship_group = unitgroup.ShipGroup(imp_group["groupId"], self.get_name(mission, imp_group["name"]),
imp_group["start_time"])
ship_group.load_from_dict(imp_group, mission.terrain)
mission.current_group_id = max(mission.current_group_id, ship_group.id)
Coalition._import_moving_point(mission, ship_group, imp_group)
# units
for imp_unit_idx in imp_group["units"]:
imp_unit = imp_group["units"][imp_unit_idx]
ship = Ship(
mission.terrain,
id=imp_unit["unitId"],
name=self.get_name(mission, imp_unit["name"]),
_type=ships.ship_map[imp_unit["type"]])
ship.load_from_dict(imp_unit)
mission.current_unit_id = max(mission.current_unit_id, ship.id)
ship_group.add_unit(ship)
_country.add_ship_group(ship_group)
if "plane" in imp_country:
for pgroup_idx in imp_country["plane"]["group"]:
pgroup = imp_country["plane"]["group"][pgroup_idx]
plane_group = unitgroup.PlaneGroup(pgroup["groupId"],
self.get_name(mission, pgroup["name"]),
pgroup["start_time"])
plane_group.load_from_dict(pgroup, mission.terrain)
mission.current_group_id = max(mission.current_group_id, plane_group.id)
Coalition._import_moving_point(mission, plane_group, pgroup)
# units
for imp_unit_idx in pgroup["units"]:
imp_unit = pgroup["units"][imp_unit_idx]
plane = Plane(
mission.terrain,
_id=imp_unit["unitId"],
name=self.get_name(mission, imp_unit["name"]),
_type=planes.plane_map[imp_unit["type"]],
_country=_country)
plane.load_from_dict(imp_unit)
if _country.reserve_onboard_num(plane.onboard_num):
msg = "{c} Plane '{p}' already using tail number: {t}".format(
c=self.name.upper(), p=plane.name, t=plane.onboard_num)
status.append(StatusMessage(msg, MessageType.ONBOARD_NUM_DUPLICATE, MessageSeverity.WARN))
print("WARN:", msg, file=sys.stderr)
status += self._park_unit_on_airport(mission, plane_group, plane)
mission.current_unit_id = max(mission.current_unit_id, plane.id)
plane_group.add_unit(plane)
# check runway start
# if plane_group.points[0].airdrome_id is not None and plane_group.units[0].parking is None:
# airport = mission.terrain.airport_by_id(plane_group.points[0].airdrome_id)
# airport.occupy_runway(plane_group)
_country.add_plane_group(plane_group)
if "helicopter" in imp_country:
for pgroup_idx in imp_country["helicopter"]["group"]:
pgroup = imp_country["helicopter"]["group"][pgroup_idx]
helicopter_group = unitgroup.HelicopterGroup(
pgroup["groupId"],
self.get_name(mission, pgroup["name"]),
pgroup["start_time"])
helicopter_group.load_from_dict(pgroup, mission.terrain)
mission.current_group_id = max(mission.current_group_id, helicopter_group.id)
Coalition._import_moving_point(mission, helicopter_group, pgroup)
# units
for imp_unit_idx in pgroup["units"]:
imp_unit = pgroup["units"][imp_unit_idx]
heli = Helicopter(
mission.terrain,
_id=imp_unit["unitId"],
name=self.get_name(mission, imp_unit["name"]),
_type=helicopters.helicopter_map[imp_unit["type"]],
_country=_country)
heli.load_from_dict(imp_unit)
if _country.reserve_onboard_num(heli.onboard_num):
msg = "{c} Helicopter '{h}' already using tail number: {t}".format(
c=self.name.upper(), h=heli.name, t=heli.onboard_num)
status.append(StatusMessage(msg, MessageType.ONBOARD_NUM_DUPLICATE, MessageSeverity.WARN))
print("WARN:", msg, file=sys.stderr)
status += self._park_unit_on_airport(mission, helicopter_group, heli)
mission.current_unit_id = max(mission.current_unit_id, heli.id)
helicopter_group.add_unit(heli)
# check runway start
# if helicopter_group.points[0].airdrome_id is not None and helicopter_group.units[0].parking is None:
# airport = mission.terrain.airport_by_id(helicopter_group.points[0].airdrome_id)
# airport.occupy_runway(helicopter_group)
_country.add_helicopter_group(helicopter_group)
if "static" in imp_country:
for sgroup_idx in imp_country["static"]["group"]:
sgroup = imp_country["static"]["group"][sgroup_idx]
static_group = unitgroup.StaticGroup(sgroup["groupId"],
self.get_name(mission, sgroup["name"]))
static_group.load_from_dict(sgroup, mission.terrain)
mission.current_group_id = max(mission.current_group_id, static_group.id)
Coalition._import_static_point(mission, static_group, sgroup)
# units
for imp_unit_idx in sgroup["units"]:
imp_unit = sgroup["units"][imp_unit_idx]
static: Static
if imp_unit["type"] == "FARP":
static = FARP(
mission.terrain,
unit_id=imp_unit["unitId"],
name=self.get_name(mission, imp_unit["name"]))
elif imp_unit["type"] == "SINGLE_HELIPAD":
static = SingleHeliPad(
mission.terrain,
unit_id=imp_unit["unitId"],
name=self.get_name(mission, imp_unit["name"]))
else:
static = Static(
unit_id=imp_unit["unitId"],
name=self.get_name(mission, imp_unit["name"]),
_type=imp_unit["type"],
terrain=mission.terrain)
static.load_from_dict(imp_unit)
mission.current_unit_id = max(mission.current_unit_id, static.id)
static_group.add_unit(static)
_country.add_static_group(static_group)
self.add_country(_country)
return status
def set_bullseye(self, bulls):
self.bullseye = bulls
def add_country(self, country):
self.countries[country.name] = country
return country
def remove_country(self, name):
return self.countries.pop(name)
def swap_country(self, coalition, name):
return coalition.add_country(self.remove_country(name))
def country(self, country_name: str):
return self.countries.get(country_name, None)
def country_by_id(self, _id: int):
for cn in self.countries:
c = self.countries[cn]
if c.id == _id:
return c
return None
def find_group(self, group_name, search="exact"):
for c in self.countries:
g = self.countries[c].find_group(group_name, search)
if g:
return g
return None
def dict(self):
d = {"name": self.name}
if self.bullseye:
d["bullseye"] = self.bullseye
d["country"] = {}
i = 1
for country in sorted(self.countries.keys()):
d["country"][i] = self.country(country).dict()
i += 1
d["nav_points"] = {}
return d
| pydcs/dcs | dcs/coalition.py | Python | lgpl-3.0 | 14,260 |
from base import Entidade
from pynfe.utils.flags import CODIGO_BRASIL
class Emitente(Entidade):
# Dados do Emitente
# - Nome/Razao Social (obrigatorio)
razao_social = str()
# - Nome Fantasia
nome_fantasia = str()
# - CNPJ (obrigatorio)
cnpj = str()
# - Inscricao Estadual (obrigatorio)
inscricao_estadual = str()
# - CNAE Fiscal
cnae_fiscal = str()
# - Inscricao Municipal
inscricao_municipal = str()
# - Inscricao Estadual (Subst. Tributario)
inscricao_estadual_subst_tributaria = str()
# - Codigo de Regime Tributario (obrigatorio)
codigo_de_regime_tributario = str()
# Endereco
# - Logradouro (obrigatorio)
endereco_logradouro = str()
# - Numero (obrigatorio)
endereco_numero = str()
# - Complemento
endereco_complemento = str()
# - Bairro (obrigatorio)
endereco_bairro = str()
# - CEP
endereco_cep = str()
# - Pais (aceita somente Brasil)
endereco_pais = CODIGO_BRASIL
# - UF (obrigatorio)
endereco_uf = str()
# - Municipio (obrigatorio)
endereco_municipio = str()
# - Codigo Municipio (opt)
endereco_cod_municipio = str()
# - Telefone
endereco_telefone = str()
# Logotipo
logotipo = None
def __str__(self):
return self.cnpj
| YACOWS/PyNFe | pynfe/entidades/emitente.py | Python | lgpl-3.0 | 1,321 |
#!/usr/bin/env python
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2018
# Leandro Toledo de Souza <devs@python-telegram-bot.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains the ConversationHandler."""
import logging
from telegram import Update
from telegram.ext import (Handler, CallbackQueryHandler, InlineQueryHandler,
ChosenInlineResultHandler)
from telegram.utils.promise import Promise
from MongoDict import MongoDict
class ConversationHandler(Handler):
"""
A handler to hold a conversation with a single user by managing four collections of other
handlers. Note that neither posts in Telegram Channels, nor group interactions with multiple
users are managed by instances of this class.
The first collection, a ``list`` named :attr:`entry_points`, is used to initiate the
conversation, for example with a :class:`telegram.ext.CommandHandler` or
:class:`telegram.ext.RegexHandler`.
The second collection, a ``dict`` named :attr:`states`, contains the different conversation
steps and one or more associated handlers that should be used if the user sends a message when
the conversation with them is currently in that state. You will probably use mostly
:class:`telegram.ext.MessageHandler` and :class:`telegram.ext.RegexHandler` here.
The third collection, a ``list`` named :attr:`fallbacks`, is used if the user is currently in a
conversation but the state has either no associated handler or the handler that is associated
to the state is inappropriate for the update, for example if the update contains a command, but
a regular text message is expected. You could use this for a ``/cancel`` command or to let the
user know their message was not recognized.
The fourth, optional collection of handlers, a ``list`` named :attr:`timed_out_behavior` is
used if the wait for ``run_async`` takes longer than defined in :attr:`run_async_timeout`.
For example, you can let the user know that they should wait for a bit before they can
continue.
To change the state of conversation, the callback function of a handler must return the new
state after responding to the user. If it does not return anything (returning ``None`` by
default), the state will not change. To end the conversation, the callback function must
return :attr:`END` or ``-1``.
Attributes:
entry_points (List[:class:`telegram.ext.Handler`]): A list of ``Handler`` objects that can
trigger the start of the conversation.
states (Dict[:obj:`object`, List[:class:`telegram.ext.Handler`]]): A :obj:`dict` that
defines the different states of conversation a user can be in and one or more
associated ``Handler`` objects that should be used in that state.
fallbacks (List[:class:`telegram.ext.Handler`]): A list of handlers that might be used if
the user is in a conversation, but every handler for their current state returned
``False`` on :attr:`check_update`.
allow_reentry (:obj:`bool`): Optional. Determines if a user can restart a conversation with
an entry point.
run_async_timeout (:obj:`float`): Optional. The time-out for ``run_async`` decorated
Handlers.
timed_out_behavior (List[:class:`telegram.ext.Handler`]): Optional. A list of handlers that
might be used if the wait for ``run_async`` timed out.
per_chat (:obj:`bool`): Optional. If the conversationkey should contain the Chat's ID.
per_user (:obj:`bool`): Optional. If the conversationkey should contain the User's ID.
per_message (:obj:`bool`): Optional. If the conversationkey should contain the Message's
ID.
conversation_timeout (:obj:`float`|:obj:`datetime.timedelta`): Optional. When this handler
is inactive more than this timeout (in seconds), it will be automatically ended. If
this value is 0 (default), there will be no timeout.
Args:
entry_points (List[:class:`telegram.ext.Handler`]): A list of ``Handler`` objects that can
trigger the start of the conversation. The first handler which :attr:`check_update`
method returns ``True`` will be used. If all return ``False``, the update is not
handled.
states (Dict[:obj:`object`, List[:class:`telegram.ext.Handler`]]): A :obj:`dict` that
defines the different states of conversation a user can be in and one or more
associated ``Handler`` objects that should be used in that state. The first handler
which :attr:`check_update` method returns ``True`` will be used.
fallbacks (List[:class:`telegram.ext.Handler`]): A list of handlers that might be used if
the user is in a conversation, but every handler for their current state returned
``False`` on :attr:`check_update`. The first handler which :attr:`check_update` method
returns ``True`` will be used. If all return ``False``, the update is not handled.
allow_reentry (:obj:`bool`, optional): If set to ``True``, a user that is currently in a
conversation can restart the conversation by triggering one of the entry points.
run_async_timeout (:obj:`float`, optional): If the previous handler for this user was
running asynchronously using the ``run_async`` decorator, it might not be finished when
the next message arrives. This timeout defines how long the conversation handler should
wait for the next state to be computed. The default is ``None`` which means it will
wait indefinitely.
timed_out_behavior (List[:class:`telegram.ext.Handler`], optional): A list of handlers that
might be used if the wait for ``run_async`` timed out. The first handler which
:attr:`check_update` method returns ``True`` will be used. If all return ``False``,
the update is not handled.
per_chat (:obj:`bool`, optional): If the conversationkey should contain the Chat's ID.
Default is ``True``.
per_user (:obj:`bool`, optional): If the conversationkey should contain the User's ID.
Default is ``True``.
per_message (:obj:`bool`, optional): If the conversationkey should contain the Message's
ID. Default is ``False``.
conversation_timeout (:obj:`float`|:obj:`datetime.timedelta`, optional): When this handler
is inactive more than this timeout (in seconds), it will be automatically ended. If
this value is 0 or None (default), there will be no timeout.
Raises:
ValueError
"""
END = -1
""":obj:`int`: Used as a constant to return when a conversation is ended."""
def __init__(self,
entry_points,
states,
fallbacks,
allow_reentry=False,
run_async_timeout=None,
timed_out_behavior=None,
per_chat=True,
per_user=True,
per_message=False,
conversation_timeout=None,
collection=None):
self.logger = logging.getLogger(__name__)
self.entry_points = entry_points
self.states = states
self.fallbacks = fallbacks
self.allow_reentry = allow_reentry
self.run_async_timeout = run_async_timeout
self.timed_out_behavior = timed_out_behavior
self.per_user = per_user
self.per_chat = per_chat
self.per_message = per_message
self.conversation_timeout = conversation_timeout
self.timeout_jobs = dict()
self.conversations = MongoDict(collection=collection, warm_cache=True)
self.logger.info("Conversations: %s", self.conversations.idb)
self.current_conversation = None
self.current_handler = None
if not any((self.per_user, self.per_chat, self.per_message)):
raise ValueError("'per_user', 'per_chat' and 'per_message' can't all be 'False'")
if self.per_message and not self.per_chat:
logging.warning("If 'per_message=True' is used, 'per_chat=True' should also be used, "
"since message IDs are not globally unique.")
all_handlers = list()
all_handlers.extend(entry_points)
all_handlers.extend(fallbacks)
for state_handlers in states.values():
all_handlers.extend(state_handlers)
if self.per_message:
for handler in all_handlers:
if not isinstance(handler, CallbackQueryHandler):
logging.warning("If 'per_message=True', all entry points and state handlers"
" must be 'CallbackQueryHandler', since no other handlers "
"have a message context.")
else:
for handler in all_handlers:
if isinstance(handler, CallbackQueryHandler):
logging.warning("If 'per_message=False', 'CallbackQueryHandler' will not be "
"tracked for every message.")
if self.per_chat:
for handler in all_handlers:
if isinstance(handler, (InlineQueryHandler, ChosenInlineResultHandler)):
logging.warning("If 'per_chat=True', 'InlineQueryHandler' can not be used, "
"since inline queries have no chat context.")
def _get_key(self, update):
chat = update.effective_chat
user = update.effective_user
key = list()
if self.per_chat:
key.append(chat.id)
if self.per_user and user is not None:
key.append(user.id)
if self.per_message:
key.append(update.callback_query.inline_message_id
or update.callback_query.message.message_id)
return tuple(key)
def check_update(self, update):
"""
Determines whether an update should be handled by this conversationhandler, and if so in
which state the conversation currently is.
Args:
update (:class:`telegram.Update`): Incoming telegram update.
Returns:
:obj:`bool`
"""
# Ignore messages in channels
if (not isinstance(update, Update) or
update.channel_post or
self.per_chat and not update.effective_chat or
self.per_message and not update.callback_query or
update.callback_query and self.per_chat and not update.callback_query.message):
return False
key = self._get_key(update)
state = self.conversations.get(key)
# Resolve promises
if isinstance(state, tuple) and len(state) is 2 and isinstance(state[1], Promise):
self.logger.debug('waiting for promise...')
old_state, new_state = state
error = False
try:
res = new_state.result(timeout=self.run_async_timeout)
except Exception as exc:
self.logger.exception("Promise function raised exception")
self.logger.exception("{}".format(exc))
error = True
if not error and new_state.done.is_set():
self.update_state(res, key)
state = self.conversations.get(key)
else:
for candidate in (self.timed_out_behavior or []):
if candidate.check_update(update):
# Save the current user and the selected handler for handle_update
self.current_conversation = key
self.current_handler = candidate
return True
else:
return False
self.logger.debug('selecting conversation %s with state %s' % (str(key), str(state)))
handler = None
# Search entry points for a match
if state is None or self.allow_reentry:
for entry_point in self.entry_points:
if entry_point.check_update(update):
handler = entry_point
break
else:
if state is None:
return False
# Get the handler list for current state, if we didn't find one yet and we're still here
if state is not None and not handler:
handlers = self.states.get(state)
for candidate in (handlers or []):
if candidate.check_update(update):
handler = candidate
break
# Find a fallback handler if all other handlers fail
else:
for fallback in self.fallbacks:
if fallback.check_update(update):
handler = fallback
break
else:
return False
# Save the current user and the selected handler for handle_update
self.current_conversation = key
self.current_handler = handler
return True
def handle_update(self, update, dispatcher):
"""Send the update to the callback for the current state and Handler
Args:
update (:class:`telegram.Update`): Incoming telegram update.
dispatcher (:class:`telegram.ext.Dispatcher`): Dispatcher that originated the Update.
"""
new_state = self.current_handler.handle_update(update, dispatcher)
timeout_job = self.timeout_jobs.pop(self.current_conversation, None)
if timeout_job is not None:
timeout_job.schedule_removal()
if self.conversation_timeout and new_state != self.END:
self.timeout_jobs[self.current_conversation] = dispatcher.job_queue.run_once(
self._trigger_timeout, self.conversation_timeout,
context=self.current_conversation
)
self.update_state(new_state, self.current_conversation)
def update_state(self, new_state, key):
if new_state == self.END:
if key in self.conversations:
del self.conversations[key]
else:
pass
elif isinstance(new_state, Promise):
self.conversations[key] = (self.conversations.get(key), new_state)
elif new_state is not None:
self.conversations[key] = new_state
def _trigger_timeout(self, bot, job):
del self.timeout_jobs[job.context]
self.update_state(self.END, job.context) | d-qoi/TelegramBots | RoseAssassins/cust_handlers/conversationhandler.py | Python | lgpl-3.0 | 15,351 |
# Copyright (C) 2015 Will Metcalf william.metcalf@gmail.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from lib.cuckoo.common.abstracts import Signature
class BrowserScanbox(Signature):
name = "browser_scanbox"
description = "Scanbox Activity in Browser"
weight = 3
severity = 3
categories = ["exploit"]
authors = ["Will Metcalf"]
minimum = "1.3"
evented = True
def __init__(self, *args, **kwargs):
Signature.__init__(self, *args, **kwargs)
filter_categories = set(["browser"])
# backward compat
filter_apinames = set(["JsEval", "COleScript_Compile", "COleScript_ParseScriptText"])
def on_call(self, call, process):
if call["api"] == "JsEval":
buf = self.get_argument(call, "Javascript")
else:
buf = self.get_argument(call, "Script")
if 'softwarelist.push(' in buf.lower() and 'indexof("-2147023083")' in buf.lower():
return True
elif 'var logger' in buf.lower() and 'document.onkeypress = keypress;' in buf.lower() and 'setinterval(sendchar,' in buf.lower():
return True
| lixiangning888/whole_project | modules/signatures_orginal_20151110/browser_scanbox.py | Python | lgpl-3.0 | 1,730 |
import sys
from unittest import TestCase, expectedFailure, skipIf
import pygame
from pgzero.loaders import sounds, set_root, UnsupportedFormat
pygame.init()
class SoundFormatsTest(TestCase):
"""Test that sound formats we cannot open show an appropriate message."""
@classmethod
def setUpClass(self):
set_root(__file__)
def assert_loadable(self, name):
s = sounds.load(name)
l = s.get_length()
assert 0.85 < l < 1.0, \
"Failed to correctly load sound (got length %0.1fs)" % l
def assert_errmsg(self, name, pattern):
with self.assertRaisesRegex(UnsupportedFormat, pattern):
sounds.load(name)
def test_load_22k16bitpcm(self):
self.assert_loadable('wav22k16bitpcm')
def test_load_22k8bitpcm(self):
self.assert_loadable('wav22k8bitpcm')
def test_load_22kadpcm(self):
self.assert_loadable('wav22kadpcm')
@expectedFailure # See issue #22 - 8Khz files don't open correctly
def test_load_8k16bitpcm(self):
self.assert_loadable('wav8k16bitpcm')
@expectedFailure # See issue #22 - 8Khz files don't open correctly
def test_load_8k8bitpcm(self):
self.assert_loadable('wav8k8bitpcm')
@expectedFailure # See issue #22 - 8Khz files don't open correctly
def test_load_8kadpcm(self):
self.assert_loadable('wav8kadpcm')
@skipIf(sys.platform == "win32", "This will crash on Windows")
def test_load_11kgsm(self):
self.assert_errmsg('wav22kgsm', 'WAV audio encoded as GSM')
@skipIf(sys.platform == "win32", "This will crash on Windows")
def test_load_11kulaw(self):
self.assert_errmsg('wav22kulaw', 'WAV audio encoded as .* µ-law')
@skipIf(sys.platform == "win32", "This will crash on Windows")
def test_load_8kmp316(self):
self.assert_errmsg('wav8kmp316', 'WAV audio encoded as MP3')
@skipIf(sys.platform == "win32", "This will crash on Windows")
def test_load_8kmp38(self):
self.assert_errmsg('wav8kmp38', 'WAV audio encoded as MP3')
def test_load_vorbis1(self):
"""Load OGG Vorbis with .ogg extension."""
self.assert_loadable('vorbis1')
def test_load_vorbis2(self):
"""Load OGG Vorbis with .oga extension."""
self.assert_loadable('vorbis2')
| RPi-Distro/pgzero | test/test_sound_formats.py | Python | lgpl-3.0 | 2,385 |
import sys
PYVERSION = 2
if sys.version_info > (3,):
PYVERSION = 3
| csirtgadgets/csirtg-mail-py | csirtg_mail/constants.py | Python | lgpl-3.0 | 72 |
#!/usr/bin/env python
##############################################################################
##
## This file is part of Sardana
##
## http://www.sardana-controls.org/
##
## Copyright 2011 CELLS / ALBA Synchrotron, Bellaterra, Spain
##
## Sardana is free software: you can redistribute it and/or modify
## it under the terms of the GNU Lesser General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## Sardana is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public License
## along with Sardana. If not, see <http://www.gnu.org/licenses/>.
##
##############################################################################
"""
model.py:
"""
from lxml import etree
from taurus.external.qt import Qt
from sardana.taurus.core.tango.sardana import macro
class MacroSequenceTreeModel(Qt.QAbstractItemModel):
def __init__(self, parent=None):
Qt.QAbstractItemModel.__init__(self, parent)
self.columns = 4
self.setRoot(macro.SequenceNode())
self.headers = ["Macro", "Parameters", "Progress", "Pause"]
def root(self):
return self._root
def setRoot(self, root):
self._root = root
self.reset()
def clearSequence(self):
self.setRoot(macro.SequenceNode())
def isEmpty(self):
return len(self.root()) == 0
def flags(self, index):
column = index.column()
node = self.nodeFromIndex(index)
flags = Qt.Qt.ItemIsEnabled
if column == 0:
flags |= Qt.Qt.ItemIsSelectable
elif column == 1:
if isinstance(node, macro.SingleParamNode) and \
not node.type() == "User":
flags |= Qt.Qt.ItemIsEditable
else:
flags |= Qt.Qt.ItemIsSelectable
elif column == 2:
flags |= Qt.Qt.ItemIsSelectable
elif index.column() == 3:
flags |= (Qt.Qt.ItemIsSelectable | Qt.Qt.ItemIsEditable)
if isinstance(node, macro.MacroNode):
flags |= Qt.Qt.ItemIsDragEnabled
if node.isAllowedHooks():
flags |= Qt.Qt.ItemIsDropEnabled
return flags
def _insertRow(self, parentIndex, node=None, row=-1):
parentNode = self.nodeFromIndex(parentIndex)
if row == -1: row = len(parentNode)
if isinstance(parentNode, macro.RepeatParamNode):
if node == None: node = parentNode.newRepeat()
self.beginInsertRows(parentIndex, row, row)
row = parentNode.insertChild(node, row)
self.endInsertRows()
return self.index(row, 0, parentIndex)
def _removeRow(self, index):
"""This method is used remove macro (pased via index)"""
node = self.nodeFromIndex(index)
parentIndex = index.parent()
parentNode = self.nodeFromIndex(parentIndex)
row = parentNode.rowOfChild(node)
self.beginRemoveRows(parentIndex, row, row)
parentNode.removeChild(node)
self.endRemoveRows()
def _upRow(self, index):
node = self.nodeFromIndex(index)
parentIndex = index.parent()
parentNode = self.nodeFromIndex(parentIndex)
row = parentNode.rowOfChild(node)
self._removeRow(index)
newIndex = self._insertRow(parentIndex, node, row - 1)
if isinstance(parentNode, macro.RepeatParamNode):
parentNode.arrangeIndexes()
return newIndex
def _downRow(self, index):
node = self.nodeFromIndex(index)
parentIndex = index.parent()
parentNode = self.nodeFromIndex(parentIndex)
row = parentNode.rowOfChild(node)
self._removeRow(index)
newIndex = self._insertRow(parentIndex, node, row + 1)
if isinstance(parentNode, macro.RepeatParamNode):
parentNode.arrangeIndexes()
return newIndex
def _leftRow(self, index):
"""This method is used to move selected macro (pased via index)
to it's grandparent's hook list. In tree representation it basically move macro to the left"""
node = self.nodeFromIndex(index)
parentIndex = index.parent()
grandParentIndex = parentIndex.parent()
self._removeRow(index)
return self._insertRow(grandParentIndex, node)
def _rightRow(self, index):
"""This method is used to move selected macro (pased via index)
to it's grandparent's hook list. In tree representation it basically move macro to the left"""
node = self.nodeFromIndex(index)
parentIndex = index.parent()
row = index.row()
self._removeRow(index)
newParentIndex = self.index(row, 0, parentIndex)
return self._insertRow(newParentIndex, node)
def rowCount(self, parent):
branchNode = self.nodeFromIndex(parent)
return len(branchNode)
def columnCount(self, parent):
return self.columns
def data(self, index, role):
if role == Qt.Qt.DisplayRole:
node = self.nodeFromIndex(index)
if index.column() == 0:
return Qt.QVariant(node.name())
elif index.column() == 1:
return Qt.QVariant(str(node.value()))
elif index.column() == 2:
if isinstance(node, macro.MacroNode):
return Qt.QVariant(node.progress())
elif role == Qt.Qt.DecorationRole:
node = self.nodeFromIndex(index)
if index.column() == 3:
if isinstance(node, macro.MacroNode):
if node.isPause():
return Qt.QVariant(Qt.QIcon(":/actions/media-playback-pause.svg"))
return Qt.QVariant()
def setData (self, index, value, role=Qt.Qt.EditRole):
node = self.nodeFromIndex(index)
if index.column() == 1:
if isinstance(node, macro.SingleParamNode):
node.setValue(Qt.from_qvariant(value, str))
self.emit(Qt.SIGNAL("dataChanged(QModelIndex,QModelIndex)"), index, index)
while True:
index = index.parent()
node = self.nodeFromIndex(index)
if isinstance(node, macro.MacroNode):
self.emit(Qt.SIGNAL("dataChanged(QModelIndex,QModelIndex)"), index, index.sibling(index.row(), self.columnCount(index) - 1))
break
elif index.column() == 2:
progress = Qt.from_qvariant(value, float)
node.setProgress(progress)
self.emit(Qt.SIGNAL("dataChanged(QModelIndex,QModelIndex)"), index, index)
elif index.column() == 3:
node.setPause(Qt.from_qvariant(value, bool))
self.emit(Qt.SIGNAL("dataChanged(QModelIndex,QModelIndex)"), index, index)
return True
def headerData(self, section, orientation, role):
if orientation == Qt.Qt.Horizontal and role == Qt.Qt.DisplayRole:
return Qt.QVariant(self.headers[section])
return Qt.QVariant()
def index(self, row, column, parent):
assert self.root() is not None
branchNode = self.nodeFromIndex(parent)
assert branchNode is not None
return self.createIndex(row, column, branchNode.child(row))
def parent(self, child):
node = self.nodeFromIndex(child)
if node is None:
return Qt.QModelIndex()
parent = node.parent()
if parent is None:
return Qt.QModelIndex()
grandparent = parent.parent()
if grandparent is None:
return Qt.QModelIndex()
row = grandparent.rowOfChild(parent)
assert row != -1
return self.createIndex(row, 0, parent)
def nodeFromIndex(self, index):
if index.isValid():
return index.internalPointer()
else:
return self.root()
def toXmlString(self, pretty=False, withId=True):
xmlSequence = self.root().toXml(withId=withId)
xmlTree = etree.ElementTree(xmlSequence)
xmlString = etree.tostring(xmlTree, pretty_print=pretty)
return xmlString
def fromXmlString(self, xmlString):
xmlElement = etree.fromstring(xmlString)
newRoot = macro.SequenceNode(None)
newRoot.fromXml(xmlElement)
self.setRoot(newRoot)
self.reset()
return newRoot
def fromPlainText(self, text):
newRoot = macro.SequenceNode(None)
newRoot.fromPlainText(text)
self.setRoot(newRoot)
self.reset()
return newRoot
def assignIds(self):
"""
Assigns ids for all macros present in the sequence. If certain macro
already had an id, it stays without change. A list of all ids is returned
:return: (list)
"""
parentNode = self.root()
return self.__assignIds(parentNode)
def __assignIds(self, parentNode):
ids = []
for childNode in parentNode.children():
if isinstance(childNode, macro.MacroNode):
id = childNode.assignId()
ids.append(id)
ids.extend(self.__assignIds(childNode))
return ids
def firstMacroId(self):
return self.root().child(0).id()
def lastMacroId(self):
root = self.root()
return root.child(len(root.children()) - 1).id()
def createIdIndexDictionary(self):
parentIndex = Qt.QModelIndex()
parentNode = self.root()
return self.__createIdIndexDictionary(parentIndex, parentNode)
def __createIdIndexDictionary(self, parentIndex, parentNode):
d = {}
for row, child in enumerate(parentNode.children()):
if isinstance(child, macro.MacroNode):
index = self.index(row, 0, parentIndex)
d[child.id()] = index
d.update(self.__createIdIndexDictionary(index, child))
return d
# def supportedDropActions(self):
# return Qt.Qt.CopyAction | Qt.Qt.MoveAction
# def mimeTypes(self):
# types = Qt.QStringList()
# types.append("text/xml")
# return types
# def mimeData(self, indexes):
# mimeData = Qt.QMimeData()
# encodedData = Qt.QByteArray()
# stream = Qt.QDataStream(encodedData, Qt.QIODevice.WriteOnly)
# doc = xml.dom.minidom.Document()
# for i,index in enumerate(indexes):
# if i % 2:
# continue
# text = self.nodeFromIndex(index).toXml(doc).toxml()
# stream.writeString(text)
#
# mimeData.setData("text/xml", encodedData)
# return mimeData
#
# def dropMimeData(self, data, action, row, column, parent):
# if action == Qt.Qt.IgnoreAction:
# return True
# if not data.hasFormat("text/xml"):
# return False
#
# encodedData = data.data("text/xml")
# stream = Qt.QDataStream(encodedData, Qt.QIODevice.ReadOnly)
# newItems = Qt.QStringList()
# rows = 0
#
# while(not stream.atEnd()):
# text = stream.readString()
# newItems.append(text)
# rows += 1
#
# sequence = self.nodeFromIndex(parent)
#
# for text in newItems:
# macroNode = macro.MacroNode()
# macroNode.fromDoc(xml.dom.minidom.parseString(text))
# self.insertMacro(sequence, macroNode, row, False)
# macros = [macro.name() for macro in macroNode.allMacros()]
# if action == Qt.Qt.CopyAction:
# self.emit(Qt.SIGNAL("macrosAdded"), macros, macroNode.allMotors())
# self.emit(Qt.SIGNAL("dataChanged"))
# return True
class MacroSequenceProxyModel(Qt.QSortFilterProxyModel):
def __init__(self, parent=None):
Qt.QSortFilterProxyModel.__init__(self, parent)
self.setDynamicSortFilter(True)
self.headers = ["Macro", "Parameters", "Progress", "Pause"]
self.columns = 4
def __getattr__(self, name):
return getattr(self.sourceModel(), name)
def nodeFromIndex(self, index):
sourceIndex = self.mapToSource(index)
node = self.sourceModel().nodeFromIndex(sourceIndex)
return node
def createIdIndexDictionary(self):
d = self.sourceModel().createIdIndexDictionary()
for id, sourceIndex in d.iteritems():
proxyIndex = self.mapFromSource(sourceIndex)
d[id] = Qt.QPersistentModelIndex(proxyIndex)
return d
def filterAcceptsRow(self, row, parentIndex):
child = self.sourceModel().index(row, 0, parentIndex)
node = self.sourceModel().nodeFromIndex(child)
return isinstance(node, macro.MacroNode)
class MacroParametersProxyModel(Qt.QSortFilterProxyModel):
def __init__(self, parent=None):
Qt.QSortFilterProxyModel.__init__(self, parent)
self.columns = 2
self.headers = ["Parameter", "Value", "", "", "", ""]
self._macroIndex = None
def __getattr__(self, name):
return getattr(self.sourceModel(), name)
def headerData(self, section, orientation, role):
if orientation == Qt.Qt.Horizontal and role == Qt.Qt.DisplayRole:
return Qt.QVariant(self.headers[section])
return Qt.QVariant()
def nodeFromIndex(self, index):
sourceIndex = self.mapToSource(index)
node = self.sourceModel().nodeFromIndex(sourceIndex)
return node
def setMacroIndex(self, macroIndex):
self._macroIndex = macroIndex
def macroIndex(self):
return self._macroIndex
def columnCount(self, parent):
return self.columns
def filterAcceptsRow(self, row, parentIndex):
if self.macroIndex() == None:
return False
if self.macroIndex() == parentIndex:
child = self.sourceModel().index(row, 0, parentIndex)
node = self.sourceModel().nodeFromIndex(child)
if not isinstance(node, macro.ParamNode):
return False
return True
| sagiss/sardana | src/sardana/taurus/qt/qtgui/extra_macroexecutor/sequenceeditor/model.py | Python | lgpl-3.0 | 14,281 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('programacao', '0004_auto_20141221_1952'),
]
operations = [
migrations.AddField(
model_name='tarefadeprogramacao',
name='titulo',
field=models.CharField(default='Titulo', max_length=100, verbose_name='T\xedtulo', blank=True),
preserve_default=False,
),
migrations.AlterField(
model_name='tarefadeprogramacao',
name='descricao',
field=models.TextField(verbose_name='Descri\xe7\xe3o da Atividade', blank=True),
),
]
| dudanogueira/microerp | microerp/programacao/migrations/0005_auto_20141221_2023.py | Python | lgpl-3.0 | 721 |
"""
WSGI config for django_webapp project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_webapp.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| koehlma/pygrooveshark | examples/django_webapp/django_webapp/wsgi.py | Python | lgpl-3.0 | 401 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GuessIt - A library for guessing information from filenames
# Copyright (c) 2013 Nicolas Wack <wackou@gmail.com>
#
# GuessIt is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# GuessIt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import unicode_literals
from guessit import fileutils
import os.path
import logging
log = logging.getLogger(__name__)
priority = 255
def process(mtree):
"""first split our path into dirs + basename + ext
:return: the filename split into [ dir*, basename, ext ]
"""
components = fileutils.split_path(mtree.value)
basename = components.pop(-1)
components += list(os.path.splitext(basename))
components[-1] = components[-1][1:] # remove the '.' from the extension
mtree.split_on_components(components)
| nvbn/guessit | guessit/transfo/split_path_components.py | Python | lgpl-3.0 | 1,367 |
#!/usr/bin/python3
import logging
import os
import sys
import click
import click_log
import colorama
from typing import Optional
from rxncon.input.excel_book.excel_book import ExcelBook
from rxncon.simulation.boolean.boolean_model import SmoothingStrategy, KnockoutStrategy, OverexpressionStrategy
from rxncon.simulation.boolean.boolnet_from_boolean_model import QuantitativeContingencyStrategy, \
boolnet_strs_from_rxncon
colorama.init()
LOGGER = logging.getLogger(__name__)
def write_boolnet(excel_filename: str, smoothing_strategy: SmoothingStrategy, knockout_strategy: KnockoutStrategy,
overexpression_strategy: OverexpressionStrategy, k_plus_strategy: QuantitativeContingencyStrategy,
k_minus_strategy: QuantitativeContingencyStrategy, base_name: Optional[str] = None):
if not base_name:
base_name = os.path.splitext(os.path.basename(excel_filename))[0]
base_path = os.path.dirname(excel_filename)
boolnet_model_filename = os.path.join(base_path, '{0}.boolnet'.format(base_name))
boolnet_symbol_filename = os.path.join(base_path, '{0}_symbols.csv'.format(base_name))
boolnet_initial_val_filename = os.path.join(base_path, '{0}_initial_vals.csv'.format(base_name))
print('Reading in Excel file [{}] ...'.format(excel_filename))
excel_book = ExcelBook(excel_filename)
rxncon_system = excel_book.rxncon_system
print('Constructed rxncon system: [{} reactions], [{} contingencies]'
.format(len(rxncon_system.reactions), len(rxncon_system.contingencies)))
print('Generating BoolNet output using smoothing strategy [{}] ...'.format(smoothing_strategy.name))
model_str, symbol_str, initial_val_str = boolnet_strs_from_rxncon(rxncon_system, smoothing_strategy,
knockout_strategy,
overexpression_strategy, k_plus_strategy,
k_minus_strategy)
print('Writing BoolNet model file [{}] ...'.format(boolnet_model_filename))
with open(boolnet_model_filename, mode='w') as f:
f.write(model_str)
print('Writing BoolNet symbol file [{}] ...'.format(boolnet_symbol_filename))
with open(boolnet_symbol_filename, mode='w') as f:
f.write(symbol_str)
print('Writing BoolNet initial value file [{}] ...'.format(boolnet_initial_val_filename))
with open(boolnet_initial_val_filename, mode='w') as f:
f.write(initial_val_str)
valid_smoothing_strategies = [strategy.value for strategy in SmoothingStrategy.__members__.values()] # type: ignore
valid_knockout_strategies = [strategy.value for strategy in KnockoutStrategy.__members__.values()] # type: ignore
valid_overexpression_strategies = [strategy.value for strategy in OverexpressionStrategy.__members__.values()] # type: ignore
valid_quantitative_contingency_strategies = [strategy.value for strategy in
QuantitativeContingencyStrategy.__members__.values()] # type: ignore
def validate_smoothing_strategy(ctx, param, value):
try:
SmoothingStrategy(value)
return value
except ValueError:
raise click.BadParameter('Valid strategies are: {}'.format(', '.join(valid_smoothing_strategies)))
def validate_knockout_strategy(ctx, param, value):
try:
KnockoutStrategy(value)
return value
except ValueError:
raise click.BadParameter('Valid strategies are: {}'.format(', '.join(valid_knockout_strategies)))
def validate_overexpression_strategy(ctx, param, value):
try:
OverexpressionStrategy(value)
return value
except ValueError:
raise click.BadParameter('Valid strategies are: {}'.format(', '.join(valid_overexpression_strategies)))
def validate_quantitative_contingency_strategy(ctx, param, value):
try:
QuantitativeContingencyStrategy(value)
return value
except ValueError:
raise click.BadParameter(
'Valid strategies are: {}'.format(', '.join(valid_quantitative_contingency_strategies)))
@click.command()
@click.option('--smoothing', default='smooth_production_sources',
help='Smoothing strategy. Default: smooth_production_sources. Choices: {}'.format(
', '.join(valid_smoothing_strategies)),
callback=validate_smoothing_strategy)
@click.option('--knockout', default='no_knockout',
help='Generate knockouts. Default: no_knockout. Choices: {}'.format(', '.join(valid_knockout_strategies)),
callback=validate_knockout_strategy)
@click.option('--overexpression', default='no_overexpression',
help='Generate overexpressions. Default: no_overexpression. Choices: {}'.format(
', '.join(valid_overexpression_strategies)),
callback=validate_overexpression_strategy)
@click.option('--k_plus', default='strict',
help='Strategy for handling k+ contingencies. Default: strict. Choices: {}'.format(
', '.join(valid_quantitative_contingency_strategies)),
callback=validate_quantitative_contingency_strategy)
@click.option('--k_minus', default='strict',
help='Strategy for handling k- contingencies. Default: strict. Choices: {}'.format(
', '.join(valid_quantitative_contingency_strategies)),
callback=validate_quantitative_contingency_strategy)
@click.option('--output', default=None,
help='Base name for output files. Default: \'fn\' for input file \'fn.xls\'')
@click.argument('excel_file')
@click_log.simple_verbosity_option(default='WARNING')
@click_log.init()
def run(overexpression, knockout, smoothing, output, excel_file, k_plus, k_minus):
smoothing_strategy = SmoothingStrategy(smoothing)
knockout_strategy = KnockoutStrategy(knockout)
overexpression_strategy = OverexpressionStrategy(overexpression)
k_plus_strategy = QuantitativeContingencyStrategy(k_plus)
k_minus_strategy = QuantitativeContingencyStrategy(k_minus)
write_boolnet(excel_file, smoothing_strategy, knockout_strategy, overexpression_strategy,
k_plus_strategy, k_minus_strategy, output)
def setup_logging_colors():
click_log.ColorFormatter.colors = {
'error': dict(fg='red'),
'exception': dict(fg='red'),
'critical': dict(fg='red'),
'debug': dict(fg='yellow'),
'warning': dict(fg='yellow'),
'info': dict(fg='yellow')
}
def format(self, record):
if not record.exc_info:
level = record.levelname.lower()
if level in self.colors:
padding_size = 7 # Assume just INFO / DEBUG entries.
prefix = click.style('{}: '.format(level).ljust(padding_size),
**self.colors[level])
prefix += click.style('{} '.format(record.name), fg='blue')
msg = record.msg
if isinstance(msg, bytes):
msg = msg.decode(sys.getfilesystemencoding(),
'replace')
elif not isinstance(msg, str):
msg = str(msg)
record.msg = '\n'.join(prefix + x for x in msg.splitlines())
return logging.Formatter.format(self, record)
click_log.ColorFormatter.format = format
if __name__ == '__main__':
try:
setup_logging_colors()
run()
except Exception as e:
print('ERROR: {}\n{}\nPlease re-run this command with the \'-v DEBUG\' option.'.format(type(e), e))
| rxncon/rxncon | rxncon2boolnet.py | Python | lgpl-3.0 | 7,704 |
#!/usr/bin/python3
# -*- encoding: utf-8 -*-
import unittest
from model import *
from example_data import expenses, payments, participations, persons, events
kasse = Gruppenkasse.create_new()
kasse.fill_with(expenses, payments, participations)
class TestGruppenkasse(unittest.TestCase):
def setUp(self):
...
def test_persons(self):
person_names = list(map(lambda p: p.name, kasse.persons))
for name in person_names:
self.assertTrue(name in persons, msg=name)
def test_events(self):
print(kasse.person_dict)
event_names = list(map(lambda p: p.name, kasse.events))
for name in event_names:
self.assertTrue(name in events, msg=name)
for name in events:
self.assertTrue(name in event_names, msg=name)
def test_event(self):
for event in kasse.events:
...#print(event)
def test_person(self):
for person in kasse.persons:
print(person, "\t{:5.2f}".format(person.balance / 100))
def test_payments(self):
print(kasse.payments)
if __name__ == '__main__':
unittest.main()
| RincewindWizzard/gruppenkasse-gtk | src/tests/test_model.py | Python | lgpl-3.0 | 1,143 |
#!/usr/bin/env python
# This file should be compatible with both Python 2 and 3.
# If it is not, please file a bug report.
"""
Contains code that prepairs a subuser's image to be run.
"""
#external imports
import os
#internal imports
from subuserlib.classes.userOwnedObject import UserOwnedObject
class RunReadyImage(UserOwnedObject):
def __init__(self,user,subuser):
self.__subuser = subuser
self.__id = None
UserOwnedObject.__init__(self,user)
def setup(self):
if not "run-ready-image-id" in self.getSubuser().getRuntimeCache():
self.__id = self.build()
self.getSubuser().getRuntimeCache()["run-ready-image-id"] = self.__id
self.getSubuser().getRuntimeCache().save()
def getSubuser(self):
return self.__subuser
def getId(self):
if not self.__id:
self.__id = self.getSubuser().getRuntimeCache()["run-ready-image-id"]
return self.__id
def generateImagePreparationDockerfile(self):
"""
There is still some preparation that needs to be done before an image is ready to be run. But this preparation requires run time information, so we cannot preform that preparation at build time.
"""
dockerfileContents = "FROM "+self.getSubuser().getImageId()+"\n"
dockerfileContents += "RUN useradd --uid="+str(self.getUser().getEndUser().uid)+" "+self.getUser().getEndUser().name+" ;export exitstatus=$? ; if [ $exitstatus -eq 4 ] ; then echo uid exists ; elif [ $exitstatus -eq 9 ]; then echo username exists. ; else exit $exitstatus ; fi\n"
dockerfileContents += "RUN test -d "+self.getUser().getEndUser().homeDir+" || mkdir "+self.getUser().getEndUser().homeDir+" && chown "+self.getUser().getEndUser().name+" "+self.getUser().getEndUser().homeDir+"\n"
if self.getSubuser().getPermissions()["serial-devices"]:
dockerfileContents += "RUN groupadd dialout; export exitstatus=$? ; if [ $exitstatus -eq 4 ] ; then echo gid exists ; elif [ $exitstatus -eq 9 ]; then echo groupname exists. ; else exit $exitstatus ; fi\n"
dockerfileContents += "RUN groupadd uucp; export exitstatus=$? ; if [ $exitstatus -eq 4 ] ; then echo gid exists ; elif [ $exitstatus -eq 9 ]; then echo groupname exists. ; else exit $exitstatus ; fi\n"
dockerfileContents += "RUN usermod -a -G dialout "+self.getUser().getEndUser().name+"\n"
dockerfileContents += "RUN usermod -a -G uucp "+self.getUser().getEndUser().name+"\n"
if self.getSubuser().getPermissions()["sudo"]:
dockerfileContents += "RUN (umask 337; echo \""+self.getUser().getEndUser().name+" ALL=(ALL) NOPASSWD: ALL\" > /etc/sudoers.d/allowsudo )\n"
return dockerfileContents
def build(self):
"""
Returns the Id of the Docker image to be run.
"""
return self.getUser().getDockerDaemon().build(None,quietClient=True,useCache=True,forceRm=True,rm=True,dockerfile=self.generateImagePreparationDockerfile())
| ruipgpinheiro/subuser | logic/subuserlib/classes/subuserSubmodules/run/runReadyImage.py | Python | lgpl-3.0 | 2,881 |
import logging
import asyncio
logger = logging.getLogger(__name__)
class InfluxLineProtocol(asyncio.DatagramProtocol):
def __init__(self, loop):
self.loop = loop
self.transport = None
def connection_made(self, transport):
self.transport = transport
@staticmethod
def fmt(measurement, fields, *, tags={}, timestamp=None):
msg = measurement
msg = msg.replace(" ", "\\ ")
msg = msg.replace(",", "\\,")
for k, v in tags.items():
k = k.replace(" ", "\\ ")
k = k.replace(",", "\\,")
k = k.replace("=", "\\=")
v = v.replace(" ", "\\ ")
v = v.replace(",", "\\,")
v = v.replace("=", "\\=")
msg += ",{}={}".format(k, v)
msg += " "
for k, v in fields.items():
k = k.replace(" ", "\\ ")
k = k.replace(",", "\\,")
k = k.replace("=", "\\=")
msg += "{:s}=".format(k)
if isinstance(v, int):
msg += "{:d}i".format(v)
elif isinstance(v, float):
msg += "{:g}".format(v)
elif isinstance(v, bool):
msg += "{:s}".format(v)
elif isinstance(v, str):
msg += '"{:s}"'.format(v.replace('"', '\\"'))
else:
raise TypeError(v)
msg += ","
if fields:
msg = msg[:-1]
if timestamp:
msg += " {:d}".format(timestamp)
return msg
def write_one(self, *args, **kwargs):
msg = self.fmt(*args, **kwargs)
logger.debug(msg)
self.transport.sendto(msg.encode())
def write_many(self, lines):
msg = "\n".join(lines)
logger.debug(msg)
self.transport.sendto(msg.encode())
def datagram_received(self, data, addr):
logger.error("recvd %s %s", data, addr)
self.transport.close()
def error_received(self, exc):
logger.error("error %s", exc)
def connection_lost(self, exc):
logger.info("lost conn %s", exc)
| jordens/sensortag | influx_udp.py | Python | lgpl-3.0 | 2,082 |
#! /usr/bin/env python
from openturns import *
TESTPREAMBLE()
RandomGenerator().SetSeed(0)
try :
mu = NumericalPoint(4, 0.0)
sigma = NumericalPoint(4, 1.0)
a = NumericalPoint(4)
b = NumericalPoint(4)
a[0] = -4.0
b[0] = 4.0
a[1] = -1.0
b[1] = 4.0
a[2] = 1.0
b[2] = 2.0
a[3] = 3.0
b[3] = 6.0
PlatformInfo.SetNumericalPrecision(4)
for i in range(4) :
distribution = TruncatedNormal(mu[i], sigma[i], a[i], b[i])
size = 10000
sample = distribution.getSample(size)
factory = TruncatedNormalFactory()
estimatedDistribution = factory.build(sample)
print "distribution=", repr(distribution)
print "Estimated distribution=", repr(estimatedDistribution)
estimatedDistribution = factory.build()
print "Default distribution=", estimatedDistribution
estimatedDistribution = factory.build(distribution.getParametersCollection())
print "Distribution from parameters=", estimatedDistribution
estimatedTruncatedNormal = factory.buildAsTruncatedNormal(sample)
print "TruncatedNormal =", distribution
print "Estimated TruncatedNormal=", estimatedTruncatedNormal
estimatedTruncatedNormal = factory.buildAsTruncatedNormal()
print "Default TruncatedNormal=", estimatedTruncatedNormal
estimatedTruncatedNormal = factory.buildAsTruncatedNormal(distribution.getParametersCollection())
print "TruncatedNormal from parameters=", estimatedTruncatedNormal
except :
import sys
print "t_TruncatedNormalFactory_std.py", sys.exc_type, sys.exc_value
| dbarbier/privot | python/test/t_TruncatedNormalFactory_std.py | Python | lgpl-3.0 | 1,645 |
# Copyright (c) 2016 Ultimaker B.V.
# Uranium is released under the terms of the LGPLv3 or higher.
from typing import Union, List, cast
import numpy
from copy import deepcopy
def immutableNDArray(nda: Union[List, numpy.ndarray]) -> numpy.ndarray:
"""Creates an immutable copy of the given narray
If the array is already immutable then it just returns it.
:param nda: :type{numpy.ndarray} the array to copy. May be a list
:return: :type{numpy.ndarray} an immutable narray
"""
if nda is None:
return None
if type(nda) is list:
data = numpy.array(nda, numpy.float32)
data.flags.writeable = False
else:
data = cast(numpy.ndarray, nda)
if not data.flags.writeable:
return data
copy = deepcopy(data)
copy.flags.writeable = False
return copy
| Ultimaker/Uranium | UM/Math/NumPyUtil.py | Python | lgpl-3.0 | 830 |
# -*- coding: utf-8 -*-
"""
DO NOT EDIT THIS FILE!
It is automatically generated from opcfoundation.org schemas.
"""
from opcua import ua
from opcua.ua import NodeId, QualifiedName, NumericNodeId, StringNodeId, GuidNodeId
from opcua.ua import NodeClass, LocalizedText
def create_standard_address_space_Part11(server):
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(56, 0)
node.BrowseName = QualifiedName('HasHistoricalConfiguration', 0)
node.NodeClass = NodeClass.ReferenceType
node.ParentNodeId = NumericNodeId(44, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ReferenceTypeAttributes()
attrs.Description = LocalizedText("The type for a reference to the historical configuration for a data variable.")
attrs.DisplayName = LocalizedText("HasHistoricalConfiguration")
attrs.InverseName = LocalizedText("HistoricalConfigurationOf")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(56, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(44, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11192, 0)
node.BrowseName = QualifiedName('HistoryServerCapabilities', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(2268, 0)
node.ReferenceTypeId = NumericNodeId(47, 0)
node.TypeDefinition = NumericNodeId(2330, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("HistoryServerCapabilities")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11192, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11193, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11192, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11242, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11192, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11273, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11192, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11274, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11192, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11196, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11192, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11197, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11192, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11198, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11192, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11199, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11192, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11200, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11192, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11281, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11192, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11282, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11192, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11283, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11192, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11502, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11192, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11275, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(47, 0)
ref.SourceNodeId = NumericNodeId(11192, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11201, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(47, 0)
ref.SourceNodeId = NumericNodeId(11192, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2268, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11192, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2330, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11193, 0)
node.BrowseName = QualifiedName('AccessHistoryDataCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(11192, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("AccessHistoryDataCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11193, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11193, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11192, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11242, 0)
node.BrowseName = QualifiedName('AccessHistoryEventsCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(11192, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("AccessHistoryEventsCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11242, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11242, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11192, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11273, 0)
node.BrowseName = QualifiedName('MaxReturnDataValues', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(11192, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("MaxReturnDataValues")
attrs.DataType = ua.NodeId(ua.ObjectIds.UInt32)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11273, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11273, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11192, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11274, 0)
node.BrowseName = QualifiedName('MaxReturnEventValues', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(11192, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("MaxReturnEventValues")
attrs.DataType = ua.NodeId(ua.ObjectIds.UInt32)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11274, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11274, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11192, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11196, 0)
node.BrowseName = QualifiedName('InsertDataCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(11192, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("InsertDataCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11196, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11196, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11192, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11197, 0)
node.BrowseName = QualifiedName('ReplaceDataCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(11192, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("ReplaceDataCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11197, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11197, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11192, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11198, 0)
node.BrowseName = QualifiedName('UpdateDataCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(11192, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("UpdateDataCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11198, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11198, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11192, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11199, 0)
node.BrowseName = QualifiedName('DeleteRawCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(11192, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("DeleteRawCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11199, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11199, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11192, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11200, 0)
node.BrowseName = QualifiedName('DeleteAtTimeCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(11192, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("DeleteAtTimeCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11200, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11200, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11192, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11281, 0)
node.BrowseName = QualifiedName('InsertEventCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(11192, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("InsertEventCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11281, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11281, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11192, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11282, 0)
node.BrowseName = QualifiedName('ReplaceEventCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(11192, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("ReplaceEventCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11282, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11282, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11192, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11283, 0)
node.BrowseName = QualifiedName('UpdateEventCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(11192, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("UpdateEventCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11283, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11283, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11192, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11502, 0)
node.BrowseName = QualifiedName('DeleteEventCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(11192, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("DeleteEventCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11502, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11502, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11192, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11275, 0)
node.BrowseName = QualifiedName('InsertAnnotationCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(11192, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("InsertAnnotationCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11275, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11275, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11192, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11201, 0)
node.BrowseName = QualifiedName('AggregateFunctions', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(11192, 0)
node.ReferenceTypeId = NumericNodeId(47, 0)
node.TypeDefinition = NumericNodeId(61, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("AggregateFunctions")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11201, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(61, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(47, 0)
ref.SourceNodeId = NumericNodeId(11201, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11192, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11214, 0)
node.BrowseName = QualifiedName('Annotations', 0)
node.NodeClass = NodeClass.Variable
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("Annotations")
attrs.DataType = NumericNodeId(891, 0)
attrs.ValueRank = -2
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11214, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(2318, 0)
node.BrowseName = QualifiedName('HistoricalDataConfigurationType', 0)
node.NodeClass = NodeClass.ObjectType
node.ParentNodeId = NumericNodeId(58, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ObjectTypeAttributes()
attrs.DisplayName = LocalizedText("HistoricalDataConfigurationType")
attrs.IsAbstract = False
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(47, 0)
ref.SourceNodeId = NumericNodeId(2318, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3059, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(47, 0)
ref.SourceNodeId = NumericNodeId(2318, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11876, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2318, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2323, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2318, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2324, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2318, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2325, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2318, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2326, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2318, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2327, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2318, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2328, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2318, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11499, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2318, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11500, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(2318, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(58, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3059, 0)
node.BrowseName = QualifiedName('AggregateConfiguration', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(2318, 0)
node.ReferenceTypeId = NumericNodeId(47, 0)
node.TypeDefinition = NumericNodeId(11187, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("AggregateConfiguration")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3059, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11168, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3059, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11169, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3059, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11170, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3059, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11171, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3059, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11187, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(3059, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(47, 0)
ref.SourceNodeId = NumericNodeId(3059, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2318, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11168, 0)
node.BrowseName = QualifiedName('TreatUncertainAsBad', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(3059, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("TreatUncertainAsBad")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11168, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(11168, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11168, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3059, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11169, 0)
node.BrowseName = QualifiedName('PercentDataBad', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(3059, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("PercentDataBad")
attrs.DataType = ua.NodeId(ua.ObjectIds.Byte)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11169, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(11169, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11169, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3059, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11170, 0)
node.BrowseName = QualifiedName('PercentDataGood', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(3059, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("PercentDataGood")
attrs.DataType = ua.NodeId(ua.ObjectIds.Byte)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11170, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(11170, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11170, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3059, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11171, 0)
node.BrowseName = QualifiedName('UseSlopedExtrapolation', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(3059, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("UseSlopedExtrapolation")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11171, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(11171, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11171, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3059, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11876, 0)
node.BrowseName = QualifiedName('AggregateFunctions', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(2318, 0)
node.ReferenceTypeId = NumericNodeId(47, 0)
node.TypeDefinition = NumericNodeId(61, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("AggregateFunctions")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11876, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(61, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(11876, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(80, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(47, 0)
ref.SourceNodeId = NumericNodeId(11876, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2318, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(2323, 0)
node.BrowseName = QualifiedName('Stepped', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2318, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("Stepped")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(2323, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(2323, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2323, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2318, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(2324, 0)
node.BrowseName = QualifiedName('Definition', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2318, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("Definition")
attrs.DataType = ua.NodeId(ua.ObjectIds.String)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(2324, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(2324, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(80, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2324, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2318, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(2325, 0)
node.BrowseName = QualifiedName('MaxTimeInterval', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2318, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("MaxTimeInterval")
attrs.DataType = NumericNodeId(290, 0)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(2325, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(2325, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(80, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2325, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2318, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(2326, 0)
node.BrowseName = QualifiedName('MinTimeInterval', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2318, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("MinTimeInterval")
attrs.DataType = NumericNodeId(290, 0)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(2326, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(2326, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(80, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2326, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2318, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(2327, 0)
node.BrowseName = QualifiedName('ExceptionDeviation', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2318, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("ExceptionDeviation")
attrs.DataType = ua.NodeId(ua.ObjectIds.Double)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(2327, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(2327, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(80, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2327, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2318, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(2328, 0)
node.BrowseName = QualifiedName('ExceptionDeviationFormat', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2318, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("ExceptionDeviationFormat")
attrs.DataType = NumericNodeId(890, 0)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(2328, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(2328, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(80, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2328, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2318, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11499, 0)
node.BrowseName = QualifiedName('StartOfArchive', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2318, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("StartOfArchive")
attrs.DataType = NumericNodeId(294, 0)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11499, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(11499, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(80, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11499, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2318, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11500, 0)
node.BrowseName = QualifiedName('StartOfOnlineArchive', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2318, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("StartOfOnlineArchive")
attrs.DataType = NumericNodeId(294, 0)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11500, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(11500, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(80, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11500, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2318, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11202, 0)
node.BrowseName = QualifiedName('HA Configuration', 0)
node.NodeClass = NodeClass.Object
node.TypeDefinition = NumericNodeId(2318, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("HA Configuration")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(47, 0)
ref.SourceNodeId = NumericNodeId(11202, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11203, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11202, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11208, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11202, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2318, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11203, 0)
node.BrowseName = QualifiedName('AggregateConfiguration', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(11202, 0)
node.ReferenceTypeId = NumericNodeId(47, 0)
node.TypeDefinition = NumericNodeId(11187, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("AggregateConfiguration")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11203, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11204, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11203, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11205, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11203, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11206, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11203, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11207, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11203, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11187, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(47, 0)
ref.SourceNodeId = NumericNodeId(11203, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11202, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11204, 0)
node.BrowseName = QualifiedName('TreatUncertainAsBad', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(11203, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("TreatUncertainAsBad")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11204, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11204, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11203, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11205, 0)
node.BrowseName = QualifiedName('PercentDataBad', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(11203, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("PercentDataBad")
attrs.DataType = ua.NodeId(ua.ObjectIds.Byte)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11205, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11205, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11203, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11206, 0)
node.BrowseName = QualifiedName('PercentDataGood', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(11203, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("PercentDataGood")
attrs.DataType = ua.NodeId(ua.ObjectIds.Byte)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11206, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11206, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11203, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11207, 0)
node.BrowseName = QualifiedName('UseSlopedExtrapolation', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(11203, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("UseSlopedExtrapolation")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11207, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11207, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11203, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11208, 0)
node.BrowseName = QualifiedName('Stepped', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(11202, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("Stepped")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11208, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11208, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11202, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11215, 0)
node.BrowseName = QualifiedName('HistoricalEventFilter', 0)
node.NodeClass = NodeClass.Variable
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("HistoricalEventFilter")
attrs.DataType = NumericNodeId(725, 0)
attrs.ValueRank = -2
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11215, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(2330, 0)
node.BrowseName = QualifiedName('HistoryServerCapabilitiesType', 0)
node.NodeClass = NodeClass.ObjectType
node.ParentNodeId = NumericNodeId(58, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ObjectTypeAttributes()
attrs.DisplayName = LocalizedText("HistoryServerCapabilitiesType")
attrs.IsAbstract = False
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2330, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2331, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2330, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2332, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2330, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11268, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2330, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11269, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2330, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2334, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2330, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2335, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2330, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2336, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2330, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2337, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2330, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2338, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2330, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11278, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2330, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11279, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2330, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11280, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2330, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11501, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2330, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11270, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(47, 0)
ref.SourceNodeId = NumericNodeId(2330, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(11172, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(2330, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(58, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(2331, 0)
node.BrowseName = QualifiedName('AccessHistoryDataCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2330, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("AccessHistoryDataCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(2331, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(2331, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2331, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2330, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(2332, 0)
node.BrowseName = QualifiedName('AccessHistoryEventsCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2330, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("AccessHistoryEventsCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(2332, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(2332, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2332, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2330, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11268, 0)
node.BrowseName = QualifiedName('MaxReturnDataValues', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2330, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("MaxReturnDataValues")
attrs.DataType = ua.NodeId(ua.ObjectIds.UInt32)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11268, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(11268, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11268, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2330, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11269, 0)
node.BrowseName = QualifiedName('MaxReturnEventValues', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2330, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("MaxReturnEventValues")
attrs.DataType = ua.NodeId(ua.ObjectIds.UInt32)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11269, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(11269, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11269, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2330, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(2334, 0)
node.BrowseName = QualifiedName('InsertDataCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2330, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("InsertDataCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(2334, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(2334, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2334, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2330, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(2335, 0)
node.BrowseName = QualifiedName('ReplaceDataCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2330, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("ReplaceDataCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(2335, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(2335, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2335, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2330, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(2336, 0)
node.BrowseName = QualifiedName('UpdateDataCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2330, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("UpdateDataCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(2336, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(2336, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2336, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2330, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(2337, 0)
node.BrowseName = QualifiedName('DeleteRawCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2330, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("DeleteRawCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(2337, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(2337, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2337, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2330, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(2338, 0)
node.BrowseName = QualifiedName('DeleteAtTimeCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2330, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("DeleteAtTimeCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(2338, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(2338, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2338, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2330, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11278, 0)
node.BrowseName = QualifiedName('InsertEventCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2330, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("InsertEventCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11278, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(11278, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11278, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2330, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11279, 0)
node.BrowseName = QualifiedName('ReplaceEventCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2330, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("ReplaceEventCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11279, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(11279, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11279, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2330, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11280, 0)
node.BrowseName = QualifiedName('UpdateEventCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2330, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("UpdateEventCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11280, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(11280, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11280, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2330, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11501, 0)
node.BrowseName = QualifiedName('DeleteEventCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2330, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("DeleteEventCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11501, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(11501, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11501, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2330, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11270, 0)
node.BrowseName = QualifiedName('InsertAnnotationCapability', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2330, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("InsertAnnotationCapability")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11270, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(11270, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(11270, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2330, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(11172, 0)
node.BrowseName = QualifiedName('AggregateFunctions', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(2330, 0)
node.ReferenceTypeId = NumericNodeId(47, 0)
node.TypeDefinition = NumericNodeId(61, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("AggregateFunctions")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(11172, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(61, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(11172, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(47, 0)
ref.SourceNodeId = NumericNodeId(11172, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2330, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(2999, 0)
node.BrowseName = QualifiedName('AuditHistoryEventUpdateEventType', 0)
node.NodeClass = NodeClass.ObjectType
node.ParentNodeId = NumericNodeId(2104, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ObjectTypeAttributes()
attrs.DisplayName = LocalizedText("AuditHistoryEventUpdateEventType")
attrs.IsAbstract = True
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2999, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3025, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2999, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3028, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2999, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3003, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2999, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3029, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(2999, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3030, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(2999, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2104, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3025, 0)
node.BrowseName = QualifiedName('UpdatedNode', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2999, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("UpdatedNode")
attrs.DataType = ua.NodeId(ua.ObjectIds.NodeId)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3025, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(3025, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3025, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2999, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3028, 0)
node.BrowseName = QualifiedName('PerformInsertReplace', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2999, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("PerformInsertReplace")
attrs.DataType = NumericNodeId(11293, 0)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3028, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(3028, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3028, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2999, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3003, 0)
node.BrowseName = QualifiedName('Filter', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2999, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("Filter")
attrs.DataType = NumericNodeId(725, 0)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3003, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(3003, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3003, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2999, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3029, 0)
node.BrowseName = QualifiedName('NewValues', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2999, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("NewValues")
attrs.DataType = NumericNodeId(920, 0)
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3029, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(3029, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3029, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2999, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3030, 0)
node.BrowseName = QualifiedName('OldValues', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(2999, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("OldValues")
attrs.DataType = NumericNodeId(920, 0)
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3030, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(3030, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3030, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2999, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3006, 0)
node.BrowseName = QualifiedName('AuditHistoryValueUpdateEventType', 0)
node.NodeClass = NodeClass.ObjectType
node.ParentNodeId = NumericNodeId(2104, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ObjectTypeAttributes()
attrs.DisplayName = LocalizedText("AuditHistoryValueUpdateEventType")
attrs.IsAbstract = True
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3006, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3026, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3006, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3031, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3006, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3032, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3006, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3033, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(3006, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2104, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3026, 0)
node.BrowseName = QualifiedName('UpdatedNode', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(3006, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("UpdatedNode")
attrs.DataType = ua.NodeId(ua.ObjectIds.NodeId)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3026, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(3026, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3026, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3006, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3031, 0)
node.BrowseName = QualifiedName('PerformInsertReplace', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(3006, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("PerformInsertReplace")
attrs.DataType = NumericNodeId(11293, 0)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3031, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(3031, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3031, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3006, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3032, 0)
node.BrowseName = QualifiedName('NewValues', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(3006, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("NewValues")
attrs.DataType = NumericNodeId(23, 0)
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3032, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(3032, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3032, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3006, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3033, 0)
node.BrowseName = QualifiedName('OldValues', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(3006, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("OldValues")
attrs.DataType = NumericNodeId(23, 0)
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3033, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(3033, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3033, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3006, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3012, 0)
node.BrowseName = QualifiedName('AuditHistoryDeleteEventType', 0)
node.NodeClass = NodeClass.ObjectType
node.ParentNodeId = NumericNodeId(2104, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ObjectTypeAttributes()
attrs.DisplayName = LocalizedText("AuditHistoryDeleteEventType")
attrs.IsAbstract = True
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3012, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3027, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(3012, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(2104, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3027, 0)
node.BrowseName = QualifiedName('UpdatedNode', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(3012, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("UpdatedNode")
attrs.DataType = ua.NodeId(ua.ObjectIds.NodeId)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3027, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(3027, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3027, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3012, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3014, 0)
node.BrowseName = QualifiedName('AuditHistoryRawModifyDeleteEventType', 0)
node.NodeClass = NodeClass.ObjectType
node.ParentNodeId = NumericNodeId(3012, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ObjectTypeAttributes()
attrs.DisplayName = LocalizedText("AuditHistoryRawModifyDeleteEventType")
attrs.IsAbstract = True
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3014, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3015, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3014, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3016, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3014, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3017, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3014, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3034, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(3014, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3012, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3015, 0)
node.BrowseName = QualifiedName('IsDeleteModified', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(3014, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("IsDeleteModified")
attrs.DataType = ua.NodeId(ua.ObjectIds.Boolean)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3015, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(3015, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3015, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3014, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3016, 0)
node.BrowseName = QualifiedName('StartTime', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(3014, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("StartTime")
attrs.DataType = NumericNodeId(294, 0)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3016, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(3016, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3016, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3014, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3017, 0)
node.BrowseName = QualifiedName('EndTime', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(3014, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("EndTime")
attrs.DataType = NumericNodeId(294, 0)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3017, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(3017, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3017, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3014, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3034, 0)
node.BrowseName = QualifiedName('OldValues', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(3014, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("OldValues")
attrs.DataType = NumericNodeId(23, 0)
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3034, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(3034, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3034, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3014, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3019, 0)
node.BrowseName = QualifiedName('AuditHistoryAtTimeDeleteEventType', 0)
node.NodeClass = NodeClass.ObjectType
node.ParentNodeId = NumericNodeId(3012, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ObjectTypeAttributes()
attrs.DisplayName = LocalizedText("AuditHistoryAtTimeDeleteEventType")
attrs.IsAbstract = True
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3019, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3020, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3019, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3021, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(3019, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3012, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3020, 0)
node.BrowseName = QualifiedName('ReqTimes', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(3019, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("ReqTimes")
attrs.DataType = NumericNodeId(294, 0)
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3020, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(3020, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3020, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3019, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3021, 0)
node.BrowseName = QualifiedName('OldValues', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(3019, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("OldValues")
attrs.DataType = NumericNodeId(23, 0)
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3021, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(3021, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3021, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3019, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3022, 0)
node.BrowseName = QualifiedName('AuditHistoryEventDeleteEventType', 0)
node.NodeClass = NodeClass.ObjectType
node.ParentNodeId = NumericNodeId(3012, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.ObjectTypeAttributes()
attrs.DisplayName = LocalizedText("AuditHistoryEventDeleteEventType")
attrs.IsAbstract = True
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3022, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3023, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3022, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3024, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(3022, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3012, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3023, 0)
node.BrowseName = QualifiedName('EventIds', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(3022, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("EventIds")
attrs.DataType = ua.NodeId(ua.ObjectIds.ByteString)
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3023, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(3023, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3023, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3022, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(3024, 0)
node.BrowseName = QualifiedName('OldValues', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(3022, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("OldValues")
attrs.DataType = NumericNodeId(920, 0)
attrs.ValueRank = -1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(3024, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(3024, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(3024, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(3022, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(891, 0)
node.BrowseName = QualifiedName('Annotation', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(22, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.DisplayName = LocalizedText("Annotation")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(891, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(22, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(890, 0)
node.BrowseName = QualifiedName('ExceptionDeviationFormat', 0)
node.NodeClass = NodeClass.DataType
node.ParentNodeId = NumericNodeId(29, 0)
node.ReferenceTypeId = NumericNodeId(45, 0)
attrs = ua.DataTypeAttributes()
attrs.DisplayName = LocalizedText("ExceptionDeviationFormat")
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(890, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(7614, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(45, 0)
ref.SourceNodeId = NumericNodeId(890, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(29, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(7614, 0)
node.BrowseName = QualifiedName('EnumStrings', 0)
node.NodeClass = NodeClass.Variable
node.ParentNodeId = NumericNodeId(890, 0)
node.ReferenceTypeId = NumericNodeId(46, 0)
node.TypeDefinition = NumericNodeId(68, 0)
attrs = ua.VariableAttributes()
attrs.DisplayName = LocalizedText("EnumStrings")
attrs.DataType = ua.NodeId(ua.ObjectIds.LocalizedText)
attrs.Value = [LocalizedText('AbsoluteValue'),LocalizedText('PercentOfValue'),LocalizedText('PercentOfRange'),LocalizedText('PercentOfEURange'),LocalizedText('Unknown')]
attrs.ValueRank = 1
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(7614, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(68, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(37, 0)
ref.SourceNodeId = NumericNodeId(7614, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(78, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(46, 0)
ref.SourceNodeId = NumericNodeId(7614, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(890, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(893, 0)
node.BrowseName = QualifiedName('Default Binary', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(891, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default Binary")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(893, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(891, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(893, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(8244, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(893, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(892, 0)
node.BrowseName = QualifiedName('Default XML', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(891, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default XML")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(892, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(891, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(39, 0)
ref.SourceNodeId = NumericNodeId(892, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(8879, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(892, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
node = ua.AddNodesItem()
node.RequestedNewNodeId = NumericNodeId(15382, 0)
node.BrowseName = QualifiedName('Default JSON', 0)
node.NodeClass = NodeClass.Object
node.ParentNodeId = NumericNodeId(891, 0)
node.ReferenceTypeId = NumericNodeId(38, 0)
node.TypeDefinition = NumericNodeId(76, 0)
attrs = ua.ObjectAttributes()
attrs.DisplayName = LocalizedText("Default JSON")
attrs.EventNotifier = 0
node.NodeAttributes = attrs
server.add_nodes([node])
refs = []
ref = ua.AddReferencesItem()
ref.IsForward = False
ref.ReferenceTypeId = NumericNodeId(38, 0)
ref.SourceNodeId = NumericNodeId(15382, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(891, 0)
refs.append(ref)
ref = ua.AddReferencesItem()
ref.IsForward = True
ref.ReferenceTypeId = NumericNodeId(40, 0)
ref.SourceNodeId = NumericNodeId(15382, 0)
ref.TargetNodeClass = NodeClass.DataType
ref.TargetNodeId = NumericNodeId(76, 0)
refs.append(ref)
server.add_references(refs)
| iirob/python-opcua | opcua/server/standard_address_space/standard_address_space_part11.py | Python | lgpl-3.0 | 125,106 |
from datetime import datetime as dt
from functools import reduce
import transmissionrpc
from config import config
TRANSMISSION_ENABLED = config['TRANSMISSION_ENABLED']
TRANS_HOST = config['TRANS_HOST']
TRANS_PORT = config['TRANS_PORT']
TRANS_USER = config['TRANS_USER']
TRANS_PASS = config['TRANS_PASS']
TRANS_PUBLIC_RATIO_LIMIT = config['TRANS_PUBLIC_RATIO_LIMIT']
TRANS_ANIME_RATIO_LIMIT = config['TRANS_ANIME_RATIO_LIMIT']
def update_nyaa_torrents(host, port, user, password, ratio=TRANS_ANIME_RATIO_LIMIT):
tc = transmissionrpc.Client(host, port=port, user=user, password=password)
# All torrents
torrents = tc.get_torrents()
# Only public torrents
torrents = filter(lambda t: not t.isPrivate, torrents)
# Only torrents with matching trackers
trackers = ['nyaa', 'wakku']
torrents = list(filter(lambda t: reduce(lambda result, x: result or any(s in x['announce'] for s in trackers), t.trackers, False) is True, torrents))
# Torrent ids
ids = list(map(lambda t: t.id, torrents))
# Update torrents seed ratio limit and mode
if ids:
tc.change_torrent(ids, seedRatioLimit=ratio, seedRatioMode=1)
return ids
def update_global_ratio_public_torrents(host, port, user, password, ratio):
tc = transmissionrpc.Client(host, port=port, user=user, password=password)
# All torrents
torrents = tc.get_torrents()
# Only public torrents with a global seed ratio mode
torrents = filter(lambda t: not t.isPrivate and t.seed_ratio_mode == 'global', torrents)
# Torrent ids
ids = list(map(lambda t: t.id, torrents))
# Update torrents seed ratio limit and mode
if ids:
tc.change_torrent(ids, seedRatioLimit=ratio, seedRatioMode=1)
return ids
def stop_completed_public_seeding_torrents(host, port, user, password):
tc = transmissionrpc.Client(host, port=port, user=user, password=password)
# All torrents
torrents = tc.get_torrents()
# Only public, seeding torrents
torrents = filter(lambda t: not t.isPrivate and t.status == 'seeding' and t.seed_ratio_mode == 'global', torrents)
# Torrent ids
ids = list(map(lambda t: t.id, torrents))
# Stop torrents
if ids:
tc.stop_torrent(ids)
return ids
def delete_completed_public_stopped_torrents(host, port, user, password):
tc = transmissionrpc.Client(host, port=port, user=user, password=password)
# All torrents
torrents = tc.get_torrents()
# Only public, seeding torrents
torrents = filter(lambda t: not t.isPrivate and t.status == 'stopped', torrents)
# Torrents that are at least 2 hours complete
torrents = filter(lambda t: (dt.now() - t.date_done).seconds > 7200, torrents)
# Torrent ids
ids = list(map(lambda t: t.id, torrents))
# Stop torrents
if ids:
tc.remove_torrent(ids, delete_data=True)
return ids
num_changed = len(update_global_ratio_public_torrents(TRANS_HOST, TRANS_PORT, TRANS_USER, TRANS_PASS, TRANS_PUBLIC_RATIO_LIMIT))
num_changed += len(update_nyaa_torrents(TRANS_HOST, TRANS_PORT, TRANS_USER, TRANS_PASS, TRANS_ANIME_RATIO_LIMIT))
num_stopped = len(stop_completed_public_seeding_torrents(TRANS_HOST, TRANS_PORT, TRANS_USER, TRANS_PASS))
num_deleted = len(delete_completed_public_stopped_torrents(TRANS_HOST, TRANS_PORT, TRANS_USER, TRANS_PASS))
print("[%s] Torrents changed: %d; stopped: %d; deleted: %d" % (dt.now().strftime('%Y-%m-%d %H:%M:%S'), num_changed, num_stopped, num_deleted))
| sgtsquiggs/PlexThrottle | TransmissionCleanUp.py | Python | unlicense | 3,486 |
import pytz
priorities = ('US/Pacific', 'US/Mountain', 'US/Central', 'US/Eastern',
'Brazil/East', 'UTC')
all_tz = pytz.all_timezones_set.copy()
for priority in priorities:
all_tz.remove(priority)
all_tz = sorted(list(all_tz))
all_tz[:0] = priorities # prepends list to list
# tuples for selection widget
all_tz = tuple((tz, tz) for tz in all_tz)
| mixmastamyk/flask-skeleton | src/timezones.py | Python | unlicense | 371 |
# Note: you must have `flac` and `metaflac` available in your path
from silly_beatport_scraper import *
import os
import subprocess
import urllib
all_files = os.listdir(".")
all_files = [f_name for f_name in all_files if "wav" in f_name]
my_scraper = SillyBeatportScraper()
print("{} file(s) to scrape".format(len(all_files)))
print("note: bpm values can be off, better to leave that to analysis software")
for f in all_files:
if f[-3:].lower() == "wav":
print('-' * 70)
print("scraping for: {}".format(f))
print('-' * 70)
meta = my_scraper.meta_from_filename(f)
print('track title: {}'.format(meta['track_title']))
print('track artists: {}'.format(meta['track_artists']))
print('genre: {}'.format(meta['genre']))
print('bpm: {}'.format(meta['bpm']))
print('label: {}'.format(meta['labels']))
print('catalogue #: {}'.format(meta['album_info']['catalogue-number']))
print('release name: {}'.format(meta['release_name']))
print('release date: {}-{}-{}'.format(
meta['album_info']['release-date']['year'],
meta['album_info']['release-date']['month'],
meta['album_info']['release-date']['day'],
))
try:
print('track #: {}'.format(meta['track_number']))
except:
print('exception!')
print(meta)
print('album art url: {}'.format(meta['album_art_url']))
print("")
print("fetching album art (saved as: {})".format(f.replace('wav', 'jpg')))
urllib.urlretrieve(meta['album_art_url'], f.replace('wav', 'jpg'))
print("")
print("converting to flac...")
args = ['flac', '--verify', '--best'] + [f]
#print(args)
subprocess.Popen(args).wait()
print("")
print("tagging...")
tags = []
tags += ['"Title={}"'.format(meta['track_title'])]
tags += ['"Artist={}"'.format(meta['track_artists'])]
tags += ['"Album={}"'.format(meta['release_name'])]
tags += ['"Year={}"'.format(meta['album_info']['release-date']['year'])]
tags += ['"Track={}"'.format(meta['track_number'])]
tags += ['"Genre={}"'.format(meta['genre'])]
tags += ['"Comment={}"'.format("Catalog #{}".format(meta['album_info']['catalogue-number']))]
tags = ["--set-tag={}".format(tag) for tag in tags]
tags = ['--remove-all-tags'] + tags
tags += ['--import-picture-from={}'.format(f.replace('wav', 'jpg'))]
args = 'metaflac ' + " ".join(tags) + " " + f.replace('wav', 'flac')
#print(args)
subprocess.Popen(args, shell=True).wait()
print("")
| j-n-f/silly-beatport-scraper | example_scraper.py | Python | unlicense | 2,771 |
# -*- coding: utf-8 -*-
import os
SECRET_KEY = os.environ["SECRET_KEY"]
LANGUAGES = {"en": "English", "es": "Español"}
BABEL_TRANSLATION_DIRECTORIES = "translations"
HASHEDASSETS_CATALOG = "/srv/www/hashedassets.yml"
HASHEDASSETS_SRC_DIR = "static/build"
HASHEDASSETS_OUT_DIR = "/srv/www/site/static"
HASHEDASSETS_URL_PREFIX = "/static/"
SQLALCHEMY_DATABASE_URI = "postgresql://{}:{}@{}:{}/{}".format(
os.environ["STARTERKIT_DATABASE_USERNAME"],
os.environ["STARTERKIT_DATABASE_PASSWORD"],
os.environ["STARTERKIT_DATABASE_HOSTNAME"],
os.environ["STARTERKIT_DATABASE_TCP_PORT"],
os.environ["STARTERKIT_ENVIRONMENT"],
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
SENTRY_DSN = os.environ.get("SENTRY_DSN", "")
SENTRY_USER_ATTRS = ["email"]
STARTERKIT_HOMEPAGE_BLUEPRINT_URL_PREFIX = "/"
| carrete/docker-flask-starterkit-mirror | flask-app/starterkit/settings/common.py | Python | unlicense | 810 |
VERSION = "0.1"
from trello.api import *
| onnodb/CloudBackups | trello/__init__.py | Python | unlicense | 43 |
# coding: utf-8
from __future__ import unicode_literals
import re
import base64
from .common import InfoExtractor
from ..compat import (
compat_urlparse,
compat_parse_qs,
)
from ..utils import (
clean_html,
ExtractorError,
int_or_none,
unsmuggle_url,
smuggle_url,
)
class KalturaIE(InfoExtractor):
_VALID_URL = r'''(?x)
(?:
kaltura:(?P<partner_id>\d+):(?P<id>[0-9a-z_]+)|
https?://
(:?(?:www|cdnapi(?:sec)?)\.)?kaltura\.com(?::\d+)?/
(?:
(?:
# flash player
index\.php/(?:kwidget|extwidget/preview)|
# html5 player
html5/html5lib/[^/]+/mwEmbedFrame\.php
)
)(?:/(?P<path>[^?]+))?(?:\?(?P<query>.*))?
)
'''
_SERVICE_URL = 'http://cdnapi.kaltura.com'
_SERVICE_BASE = '/api_v3/index.php'
# See https://github.com/kaltura/server/blob/master/plugins/content/caption/base/lib/model/enums/CaptionType.php
_CAPTION_TYPES = {
1: 'srt',
2: 'ttml',
3: 'vtt',
}
_TESTS = [
{
'url': 'kaltura:269692:1_1jc2y3e4',
'md5': '3adcbdb3dcc02d647539e53f284ba171',
'info_dict': {
'id': '1_1jc2y3e4',
'ext': 'mp4',
'title': 'Straight from the Heart',
'upload_date': '20131219',
'uploader_id': 'mlundberg@wolfgangsvault.com',
'description': 'The Allman Brothers Band, 12/16/1981',
'thumbnail': 're:^https?://.*/thumbnail/.*',
'timestamp': int,
},
},
{
'url': 'http://www.kaltura.com/index.php/kwidget/cache_st/1300318621/wid/_269692/uiconf_id/3873291/entry_id/1_1jc2y3e4',
'only_matching': True,
},
{
'url': 'https://cdnapisec.kaltura.com/index.php/kwidget/wid/_557781/uiconf_id/22845202/entry_id/1_plr1syf3',
'only_matching': True,
},
{
'url': 'https://cdnapisec.kaltura.com/html5/html5lib/v2.30.2/mwEmbedFrame.php/p/1337/uiconf_id/20540612/entry_id/1_sf5ovm7u?wid=_243342',
'only_matching': True,
},
{
# video with subtitles
'url': 'kaltura:111032:1_cw786r8q',
'only_matching': True,
},
{
# video with ttml subtitles (no fileExt)
'url': 'kaltura:1926081:0_l5ye1133',
'info_dict': {
'id': '0_l5ye1133',
'ext': 'mp4',
'title': 'What Can You Do With Python?',
'upload_date': '20160221',
'uploader_id': 'stork',
'thumbnail': 're:^https?://.*/thumbnail/.*',
'timestamp': int,
'subtitles': {
'en': [{
'ext': 'ttml',
}],
},
},
'skip': 'Gone. Maybe https://www.safaribooksonline.com/library/tutorials/introduction-to-python-anon/3469/',
'params': {
'skip_download': True,
},
},
{
'url': 'https://www.kaltura.com/index.php/extwidget/preview/partner_id/1770401/uiconf_id/37307382/entry_id/0_58u8kme7/embed/iframe?&flashvars[streamerType]=auto',
'only_matching': True,
},
{
'url': 'https://www.kaltura.com:443/index.php/extwidget/preview/partner_id/1770401/uiconf_id/37307382/entry_id/0_58u8kme7/embed/iframe?&flashvars[streamerType]=auto',
'only_matching': True,
},
{
# unavailable source format
'url': 'kaltura:513551:1_66x4rg7o',
'only_matching': True,
}
]
@staticmethod
def _extract_url(webpage):
# Embed codes: https://knowledge.kaltura.com/embedding-kaltura-media-players-your-site
mobj = (
re.search(
r"""(?xs)
kWidget\.(?:thumb)?[Ee]mbed\(
\{.*?
(?P<q1>['"])wid(?P=q1)\s*:\s*
(?P<q2>['"])_?(?P<partner_id>(?:(?!(?P=q2)).)+)(?P=q2),.*?
(?P<q3>['"])entry_?[Ii]d(?P=q3)\s*:\s*
(?P<q4>['"])(?P<id>(?:(?!(?P=q4)).)+)(?P=q4)(?:,|\s*\})
""", webpage)
or re.search(
r'''(?xs)
(?P<q1>["'])
(?:https?:)?//cdnapi(?:sec)?\.kaltura\.com(?::\d+)?/(?:(?!(?P=q1)).)*\b(?:p|partner_id)/(?P<partner_id>\d+)(?:(?!(?P=q1)).)*
(?P=q1).*?
(?:
(?:
entry_?[Ii]d|
(?P<q2>["'])entry_?[Ii]d(?P=q2)
)\s*:\s*|
\[\s*(?P<q2_1>["'])entry_?[Ii]d(?P=q2_1)\s*\]\s*=\s*
)
(?P<q3>["'])(?P<id>(?:(?!(?P=q3)).)+)(?P=q3)
''', webpage)
or re.search(
r'''(?xs)
<(?:iframe[^>]+src|meta[^>]+\bcontent)=(?P<q1>["'])
(?:https?:)?//(?:(?:www|cdnapi(?:sec)?)\.)?kaltura\.com/(?:(?!(?P=q1)).)*\b(?:p|partner_id)/(?P<partner_id>\d+)
(?:(?!(?P=q1)).)*
[?&;]entry_id=(?P<id>(?:(?!(?P=q1))[^&])+)
(?:(?!(?P=q1)).)*
(?P=q1)
''', webpage)
)
if mobj:
embed_info = mobj.groupdict()
for k, v in embed_info.items():
embed_info[k] = v.strip()
url = 'kaltura:%(partner_id)s:%(id)s' % embed_info
escaped_pid = re.escape(embed_info['partner_id'])
service_url = re.search(
r'<script[^>]+src=["\']((?:https?:)?//.+?)/p/%s/sp/%s00/embedIframeJs' % (escaped_pid, escaped_pid),
webpage)
if service_url:
url = smuggle_url(url, {'service_url': service_url.group(1)})
return url
def _kaltura_api_call(self, video_id, actions, service_url=None, *args, **kwargs):
params = actions[0]
if len(actions) > 1:
for i, a in enumerate(actions[1:], start=1):
for k, v in a.items():
params['%d:%s' % (i, k)] = v
data = self._download_json(
(service_url or self._SERVICE_URL) + self._SERVICE_BASE,
video_id, query=params, *args, **kwargs)
status = data if len(actions) == 1 else data[0]
if status.get('objectType') == 'KalturaAPIException':
raise ExtractorError(
'%s said: %s' % (self.IE_NAME, status['message']))
return data
def _get_video_info(self, video_id, partner_id, service_url=None):
actions = [
{
'action': 'null',
'apiVersion': '3.1.5',
'clientTag': 'kdp:v3.8.5',
'format': 1, # JSON, 2 = XML, 3 = PHP
'service': 'multirequest',
},
{
'expiry': 86400,
'service': 'session',
'action': 'startWidgetSession',
'widgetId': '_%s' % partner_id,
},
{
'action': 'get',
'entryId': video_id,
'service': 'baseentry',
'ks': '{1:result:ks}',
'responseProfile:fields': 'createdAt,dataUrl,duration,name,plays,thumbnailUrl,userId',
'responseProfile:type': 1,
},
{
'action': 'getbyentryid',
'entryId': video_id,
'service': 'flavorAsset',
'ks': '{1:result:ks}',
},
{
'action': 'list',
'filter:entryIdEqual': video_id,
'service': 'caption_captionasset',
'ks': '{1:result:ks}',
},
]
return self._kaltura_api_call(
video_id, actions, service_url, note='Downloading video info JSON')
def _real_extract(self, url):
url, smuggled_data = unsmuggle_url(url, {})
mobj = re.match(self._VALID_URL, url)
partner_id, entry_id = mobj.group('partner_id', 'id')
ks = None
captions = None
if partner_id and entry_id:
_, info, flavor_assets, captions = self._get_video_info(entry_id, partner_id, smuggled_data.get('service_url'))
else:
path, query = mobj.group('path', 'query')
if not path and not query:
raise ExtractorError('Invalid URL', expected=True)
params = {}
if query:
params = compat_parse_qs(query)
if path:
splitted_path = path.split('/')
params.update(dict((zip(splitted_path[::2], [[v] for v in splitted_path[1::2]]))))
if 'wid' in params:
partner_id = params['wid'][0][1:]
elif 'p' in params:
partner_id = params['p'][0]
elif 'partner_id' in params:
partner_id = params['partner_id'][0]
else:
raise ExtractorError('Invalid URL', expected=True)
if 'entry_id' in params:
entry_id = params['entry_id'][0]
_, info, flavor_assets, captions = self._get_video_info(entry_id, partner_id)
elif 'uiconf_id' in params and 'flashvars[referenceId]' in params:
reference_id = params['flashvars[referenceId]'][0]
webpage = self._download_webpage(url, reference_id)
entry_data = self._parse_json(self._search_regex(
r'window\.kalturaIframePackageData\s*=\s*({.*});',
webpage, 'kalturaIframePackageData'),
reference_id)['entryResult']
info, flavor_assets = entry_data['meta'], entry_data['contextData']['flavorAssets']
entry_id = info['id']
# Unfortunately, data returned in kalturaIframePackageData lacks
# captions so we will try requesting the complete data using
# regular approach since we now know the entry_id
try:
_, info, flavor_assets, captions = self._get_video_info(
entry_id, partner_id)
except ExtractorError:
# Regular scenario failed but we already have everything
# extracted apart from captions and can process at least
# with this
pass
else:
raise ExtractorError('Invalid URL', expected=True)
ks = params.get('flashvars[ks]', [None])[0]
source_url = smuggled_data.get('source_url')
if source_url:
referrer = base64.b64encode(
'://'.join(compat_urlparse.urlparse(source_url)[:2])
.encode('utf-8')).decode('utf-8')
else:
referrer = None
def sign_url(unsigned_url):
if ks:
unsigned_url += '/ks/%s' % ks
if referrer:
unsigned_url += '?referrer=%s' % referrer
return unsigned_url
data_url = info['dataUrl']
if '/flvclipper/' in data_url:
data_url = re.sub(r'/flvclipper/.*', '/serveFlavor', data_url)
formats = []
for f in flavor_assets:
# Continue if asset is not ready
if f.get('status') != 2:
continue
# Original format that's not available (e.g. kaltura:1926081:0_c03e1b5g)
# skip for now.
if f.get('fileExt') == 'chun':
continue
# DRM-protected video, cannot be decrypted
if f.get('fileExt') == 'wvm':
continue
if not f.get('fileExt'):
# QT indicates QuickTime; some videos have broken fileExt
if f.get('containerFormat') == 'qt':
f['fileExt'] = 'mov'
else:
f['fileExt'] = 'mp4'
video_url = sign_url(
'%s/flavorId/%s' % (data_url, f['id']))
format_id = '%(fileExt)s-%(bitrate)s' % f
# Source format may not be available (e.g. kaltura:513551:1_66x4rg7o)
if f.get('isOriginal') is True and not self._is_valid_url(
video_url, entry_id, format_id):
continue
# audio-only has no videoCodecId (e.g. kaltura:1926081:0_c03e1b5g
# -f mp4-56)
vcodec = 'none' if 'videoCodecId' not in f and f.get(
'frameRate') == 0 else f.get('videoCodecId')
formats.append({
'format_id': format_id,
'ext': f.get('fileExt'),
'tbr': int_or_none(f['bitrate']),
'fps': int_or_none(f.get('frameRate')),
'filesize_approx': int_or_none(f.get('size'), invscale=1024),
'container': f.get('containerFormat'),
'vcodec': vcodec,
'height': int_or_none(f.get('height')),
'width': int_or_none(f.get('width')),
'url': video_url,
})
if '/playManifest/' in data_url:
m3u8_url = sign_url(data_url.replace(
'format/url', 'format/applehttp'))
formats.extend(self._extract_m3u8_formats(
m3u8_url, entry_id, 'mp4', 'm3u8_native',
m3u8_id='hls', fatal=False))
self._sort_formats(formats)
subtitles = {}
if captions:
for caption in captions.get('objects', []):
# Continue if caption is not ready
if caption.get('status') != 2:
continue
if not caption.get('id'):
continue
caption_format = int_or_none(caption.get('format'))
subtitles.setdefault(caption.get('languageCode') or caption.get('language'), []).append({
'url': '%s/api_v3/service/caption_captionasset/action/serve/captionAssetId/%s' % (self._SERVICE_URL, caption['id']),
'ext': caption.get('fileExt') or self._CAPTION_TYPES.get(caption_format) or 'ttml',
})
return {
'id': entry_id,
'title': info['name'],
'formats': formats,
'subtitles': subtitles,
'description': clean_html(info.get('description')),
'thumbnail': info.get('thumbnailUrl'),
'duration': info.get('duration'),
'timestamp': info.get('createdAt'),
'uploader_id': info.get('userId') if info.get('userId') != 'None' else None,
'view_count': info.get('plays'),
}
| hakatashi/youtube-dl | youtube_dl/extractor/kaltura.py | Python | unlicense | 15,137 |
#!/usr/bin/env python
"""tvnamer - Automagical TV episode renamer
Uses data from www.thetvdb.com (via tvdb_api) to rename TV episode files from
"some.show.name.s01e01.blah.avi" to "Some Show Name - [01x01] - The First.avi"
"""
__version__ = "3.0.0"
__author__ = "dbr/Ben"
| lahwaacz/tvnamer | tvnamer/__init__.py | Python | unlicense | 275 |
import primes
# same logic as p120
# I was hoping since these were primes that
# maybe Fermat's Little Theorem would show up
# ... but it didn't.
def check(p,n,target):
return 2*p*n > target
def main():
ps = primes.primes(250000)
i = 0
while not check(ps[i],i+1,10**10):
i += 2
print i+1 # zero indexing, yo
main()
| kbrose/project_euler | p120-129/p123.py | Python | unlicense | 347 |
#!/usr/bin/env python3
#-------------------------------------------------------------------------------
# Solution
# Find critical points, store in a max heap using min heap properties (-height)
#-------------------------------------------------------------------------------
import heapq
class Solution:
def getSkyline(self, buildings):
"""
:type buildings: List[List[int]]
:rtype: List[List[int]]
"""
def addsky(pos, hei):
if sky[-1][1] != hei:
sky.append([pos, hei])
sky = [[-1,0]]
# possible corner positions
position = set([b[0] for b in buildings] + [b[1] for b in buildings])
# live buildings
live = []
i = 0
for t in sorted(position):
# add the new buildings whose left side is lefter than position t
while i < len(buildings) and buildings[i][0] <= t:
heapq.heappush(live, (-buildings[i][2], buildings[i][1]))
i += 1
# remove the past buildings whose right side is lefter than position t
while live and live[0][1] <= t:
heapq.heappop(live)
# pick the highest existing building at this moment
h = -live[0][0] if live else 0
addsky(t, h)
return sky[1:]
#-------------------------------------------------------------------------------
# This does not pass test cases (Memory inefficient)
#-------------------------------------------------------------------------------
class Solution:
def getSkyline(self, buildings):
"""
:type buildings: List[List[int]]
:rtype: List[List[int]]
"""
if not buildings:
return []
if len(buildings) == 1:
l, r, h = buildings.pop()
return [[l, h],[r, 0]]
# Find the right most point
far_right = 0
for (_,r,_) in buildings:
far_right = max(far_right, r)
# Build a 1 dimension height map to track heights
height_map = [0]*(far_right+1)
for (l,r,h) in buildings:
for i in range(l, r):
if h > height_map[i]:
height_map[i] = h
height = 0
skyline = []
for i in range(len(height_map)):
if height_map[i] != height:
height = height_map[i]
skyline.append([i, height])
return skyline
#-------------------------------------------------------------------------------
# Testing
| kyle8998/Practice-Coding-Questions | leetcode/218-Hard-The-Skyline-Problem/answer.py | Python | unlicense | 2,601 |
from logging.handlers import BaseRotatingHandler
import string
import time
import datetime
import os
class TimePatternRotatingHandler(BaseRotatingHandler):
def __init__(self, filename, when, encoding=None, delay=0):
self.when = string.upper(when)
self.fname_pat = filename
self.mock_dt = None
self.computeNextRollover()
BaseRotatingHandler.__init__(self, self.filename, 'a', encoding, delay)
def get_now_dt(self):
if self.mock_dt is not None:
return self.mock_dt
return datetime.datetime.now()
def computeNextRollover(self):
now = self.get_now_dt()
if self.when == 'MONTH':
dtfmt = '%Y-%m'
dt = (now.replace(day=1) + datetime.timedelta(days=40)).replace(day=1, hour=0, minute=0, second=0)
rolloverAt = time.mktime(dt.timetuple())
elif self.when == 'DAY':
dtfmt = '%Y-%m-%d'
dt = (now + datetime.timedelta(days=1)).replace(hour=0, minute=0, second=0)
rolloverAt = time.mktime(dt.timetuple())
self.rolloverAt = rolloverAt
self.dtfmt = dtfmt
self.filename = os.path.abspath(self.fname_pat % (now.strftime(self.dtfmt)))
#print now, self.filename
def shouldRollover(self, record):
now = self.get_now_dt()
t = time.mktime(now.timetuple())
#print t, self.rolloverAt
if t >= self.rolloverAt:
return 1
return 0
def doRollover(self):
if self.stream:
self.stream.close()
self.computeNextRollover()
self.baseFilename = self.filename
self.stream = self._open()
| vls/python_utils | log_handlers.py | Python | unlicense | 1,677 |
#Problem 2 Project Euler
FiboNumSum = 0
first, second, end = 0, 1, 4000000
while second < end:
if second % 2 == 0: FiboNumSum += second
first, second = second, first + second
print FiboNumSum
| kingmak/Project_Euler_Solutions | Euler_002.py | Python | unlicense | 200 |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import pika
import time
connection = pika.BlockingConnection(pika.ConnectionParameters(
host='localhost'))
channel = connection.channel()
channel.queue_declare(queue='rpc_queue')
def fib(n):
if n == 0:
return 0
elif n == 1:
return 1
else:
return fib(n-1) + fib(n-2)
def on_request(ch, method, props, body):
n = int(body)
print(" [.] fib(%s)" % n)
response = fib(n)
ch.basic_publish(exchange='',
routing_key=props.reply_to,
properties=pika.BasicProperties(correlation_id = \
props.correlation_id),
body=str(response))
ch.basic_ack(delivery_tag = method.delivery_tag)
channel.basic_qos(prefetch_count=1)
channel.basic_consume(on_request, queue='rpc_queue')
print(" [x] Awaiting RPC requests")
channel.start_consuming() | dianshen/python_day | s12day10/rabbitMQ_rpc_serverl.py | Python | apache-2.0 | 951 |
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import http.client as http
import urllib.parse as urlparse
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import encodeutils
import webob.exc
from wsme.rest import json
from glance.api import policy
from glance.api.v2.model.metadef_namespace import Namespace
from glance.api.v2.model.metadef_namespace import Namespaces
from glance.api.v2.model.metadef_object import MetadefObject
from glance.api.v2.model.metadef_property_type import PropertyType
from glance.api.v2.model.metadef_resource_type import ResourceTypeAssociation
from glance.api.v2.model.metadef_tag import MetadefTag
from glance.api.v2 import policy as api_policy
from glance.common import exception
from glance.common import utils
from glance.common import wsgi
from glance.common import wsme_utils
import glance.db
import glance.gateway
from glance.i18n import _, _LE
import glance.notifier
import glance.schema
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
class NamespaceController(object):
def __init__(self, db_api=None, policy_enforcer=None, notifier=None):
self.db_api = db_api or glance.db.get_api()
self.policy = policy_enforcer or policy.Enforcer()
self.notifier = notifier or glance.notifier.Notifier()
self.gateway = glance.gateway.Gateway(db_api=self.db_api,
notifier=self.notifier,
policy_enforcer=self.policy)
self.ns_schema_link = '/v2/schemas/metadefs/namespace'
self.obj_schema_link = '/v2/schemas/metadefs/object'
self.tag_schema_link = '/v2/schemas/metadefs/tag'
def index(self, req, marker=None, limit=None, sort_key='created_at',
sort_dir='desc', filters=None):
try:
ns_repo = self.gateway.get_metadef_namespace_repo(
req.context, authorization_layer=False)
policy_check = api_policy.MetadefAPIPolicy(
req.context,
enforcer=self.policy)
# NOTE(abhishekk): This is just a "do you have permission to
# list namespace" check. Each namespace is checked against
# get_metadef_namespace below.
policy_check.get_metadef_namespaces()
# NOTE(abhishekk): We also need to fetch resource_types associated
# with namespaces, so better to check we have permission for the
# same in advance.
policy_check.list_metadef_resource_types()
# Get namespace id
if marker:
namespace_obj = ns_repo.get(marker)
marker = namespace_obj.namespace_id
database_ns_list = ns_repo.list(
marker=marker, limit=limit, sort_key=sort_key,
sort_dir=sort_dir, filters=filters)
ns_list = [
ns for ns in database_ns_list if api_policy.MetadefAPIPolicy(
req.context, md_resource=ns, enforcer=self.policy).check(
'get_metadef_namespace')]
rs_repo = (
self.gateway.get_metadef_resource_type_repo(
req.context, authorization_layer=False))
for db_namespace in ns_list:
# Get resource type associations
filters = dict()
filters['namespace'] = db_namespace.namespace
repo_rs_type_list = rs_repo.list(filters=filters)
resource_type_list = [
ResourceTypeAssociation.to_wsme_model(
resource_type
) for resource_type in repo_rs_type_list]
if resource_type_list:
db_namespace.resource_type_associations = (
resource_type_list)
namespace_list = [Namespace.to_wsme_model(
db_namespace,
get_namespace_href(db_namespace),
self.ns_schema_link) for db_namespace in ns_list]
namespaces = Namespaces()
namespaces.namespaces = namespace_list
if len(namespace_list) != 0 and len(namespace_list) == limit:
namespaces.next = ns_list[-1].namespace
except exception.Forbidden as e:
LOG.debug("User not permitted to retrieve metadata namespaces "
"index")
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
return namespaces
@utils.mutating
def create(self, req, namespace):
try:
namespace_created = False
# Create Namespace
ns_factory = self.gateway.get_metadef_namespace_factory(
req.context, authorization_layer=False)
ns_repo = self.gateway.get_metadef_namespace_repo(
req.context, authorization_layer=False)
# NOTE(abhishekk): Here we are going to check if user is authorized
# to create namespace, resource_types, objects, properties etc.
policy_check = api_policy.MetadefAPIPolicy(
req.context, enforcer=self.policy)
policy_check.add_metadef_namespace()
if namespace.resource_type_associations:
policy_check.add_metadef_resource_type_association()
if namespace.objects:
policy_check.add_metadef_object()
if namespace.properties:
policy_check.add_metadef_property()
if namespace.tags:
policy_check.add_metadef_tag()
# NOTE(abhishekk): As we are getting rid of auth layer, this
# is the place where we should add owner if it is not specified
# in request.
kwargs = namespace.to_dict()
if 'owner' not in kwargs:
kwargs.update({'owner': req.context.owner})
new_namespace = ns_factory.new_namespace(**kwargs)
ns_repo.add(new_namespace)
namespace_created = True
# Create Resource Types
if namespace.resource_type_associations:
rs_factory = (self.gateway.get_metadef_resource_type_factory(
req.context, authorization_layer=False))
rs_repo = self.gateway.get_metadef_resource_type_repo(
req.context, authorization_layer=False)
for resource_type in namespace.resource_type_associations:
new_resource = rs_factory.new_resource_type(
namespace=namespace.namespace,
**resource_type.to_dict())
rs_repo.add(new_resource)
# Create Objects
if namespace.objects:
object_factory = self.gateway.get_metadef_object_factory(
req.context, authorization_layer=False)
object_repo = self.gateway.get_metadef_object_repo(
req.context, authorization_layer=False)
for metadata_object in namespace.objects:
new_meta_object = object_factory.new_object(
namespace=namespace.namespace,
**metadata_object.to_dict())
object_repo.add(new_meta_object)
# Create Tags
if namespace.tags:
tag_factory = self.gateway.get_metadef_tag_factory(
req.context, authorization_layer=False)
tag_repo = self.gateway.get_metadef_tag_repo(
req.context, authorization_layer=False)
for metadata_tag in namespace.tags:
new_meta_tag = tag_factory.new_tag(
namespace=namespace.namespace,
**metadata_tag.to_dict())
tag_repo.add(new_meta_tag)
# Create Namespace Properties
if namespace.properties:
prop_factory = (self.gateway.get_metadef_property_factory(
req.context, authorization_layer=False))
prop_repo = self.gateway.get_metadef_property_repo(
req.context, authorization_layer=False)
for (name, value) in namespace.properties.items():
new_property_type = (
prop_factory.new_namespace_property(
namespace=namespace.namespace,
**self._to_property_dict(name, value)
))
prop_repo.add(new_property_type)
except exception.Invalid as e:
msg = (_("Couldn't create metadata namespace: %s")
% encodeutils.exception_to_unicode(e))
raise webob.exc.HTTPBadRequest(explanation=msg)
except exception.Forbidden as e:
self._cleanup_namespace(ns_repo, namespace, namespace_created)
LOG.debug("User not permitted to create metadata namespace")
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
self._cleanup_namespace(ns_repo, namespace, namespace_created)
raise webob.exc.HTTPNotFound(explanation=e.msg)
except exception.Duplicate as e:
self._cleanup_namespace(ns_repo, namespace, namespace_created)
raise webob.exc.HTTPConflict(explanation=e.msg)
# Return the user namespace as we don't expose the id to user
new_namespace.properties = namespace.properties
new_namespace.objects = namespace.objects
new_namespace.resource_type_associations = (
namespace.resource_type_associations)
new_namespace.tags = namespace.tags
return Namespace.to_wsme_model(new_namespace,
get_namespace_href(new_namespace),
self.ns_schema_link)
def _to_property_dict(self, name, value):
# Convert the model PropertyTypes dict to a JSON string
db_property_type_dict = dict()
db_property_type_dict['schema'] = json.tojson(PropertyType, value)
db_property_type_dict['name'] = name
return db_property_type_dict
def _cleanup_namespace(self, namespace_repo, namespace, namespace_created):
if namespace_created:
try:
namespace_obj = namespace_repo.get(namespace.namespace)
namespace_obj.delete()
namespace_repo.remove(namespace_obj)
LOG.debug("Cleaned up namespace %(namespace)s ",
{'namespace': namespace.namespace})
except Exception as e:
msg = (_LE("Failed to delete namespace %(namespace)s."
"Exception: %(exception)s"),
{'namespace': namespace.namespace,
'exception': encodeutils.exception_to_unicode(e)})
LOG.error(msg)
def show(self, req, namespace, filters=None):
try:
# Get namespace
ns_repo = self.gateway.get_metadef_namespace_repo(
req.context, authorization_layer=False)
try:
namespace_obj = ns_repo.get(namespace)
policy_check = api_policy.MetadefAPIPolicy(
req.context,
md_resource=namespace_obj,
enforcer=self.policy)
policy_check.get_metadef_namespace()
except (exception.Forbidden, webob.exc.HTTPForbidden):
LOG.debug("User not permitted to show namespace '%s'",
namespace)
# NOTE (abhishekk): Returning 404 Not Found as the
# namespace is outside of this user's project
raise webob.exc.HTTPNotFound()
# NOTE(abhishekk): We also need to fetch resource_types, objects,
# properties, tags associated with namespace, so better to check
# whether user has permissions for the same.
policy_check.list_metadef_resource_types()
policy_check.get_metadef_objects()
policy_check.get_metadef_properties()
policy_check.get_metadef_tags()
namespace_detail = Namespace.to_wsme_model(
namespace_obj,
get_namespace_href(namespace_obj),
self.ns_schema_link)
ns_filters = dict()
ns_filters['namespace'] = namespace
# Get objects
object_repo = self.gateway.get_metadef_object_repo(
req.context, authorization_layer=False)
db_metaobject_list = object_repo.list(filters=ns_filters)
object_list = [MetadefObject.to_wsme_model(
db_metaobject,
get_object_href(namespace, db_metaobject),
self.obj_schema_link) for db_metaobject in db_metaobject_list]
if object_list:
namespace_detail.objects = object_list
# Get resource type associations
rs_repo = self.gateway.get_metadef_resource_type_repo(
req.context, authorization_layer=False)
db_resource_type_list = rs_repo.list(filters=ns_filters)
resource_type_list = [ResourceTypeAssociation.to_wsme_model(
resource_type) for resource_type in db_resource_type_list]
if resource_type_list:
namespace_detail.resource_type_associations = (
resource_type_list)
# Get properties
prop_repo = self.gateway.get_metadef_property_repo(
req.context, authorization_layer=False)
db_properties = prop_repo.list(filters=ns_filters)
property_list = Namespace.to_model_properties(db_properties)
if property_list:
namespace_detail.properties = property_list
if filters and filters['resource_type']:
namespace_detail = self._prefix_property_name(
namespace_detail, filters['resource_type'])
# Get tags
tag_repo = self.gateway.get_metadef_tag_repo(
req.context, authorization_layer=False)
db_metatag_list = tag_repo.list(filters=ns_filters)
tag_list = [MetadefTag(**{'name': db_metatag.name})
for db_metatag in db_metatag_list]
if tag_list:
namespace_detail.tags = tag_list
except exception.Forbidden as e:
LOG.debug("User not permitted to show metadata namespace "
"'%s'", namespace)
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
return namespace_detail
def update(self, req, user_ns, namespace):
namespace_repo = self.gateway.get_metadef_namespace_repo(
req.context, authorization_layer=False)
try:
ns_obj = namespace_repo.get(namespace)
except (exception.Forbidden, exception.NotFound):
# NOTE (abhishekk): Returning 404 Not Found as the
# namespace is outside of this user's project
msg = _("Namespace %s not found") % namespace
raise webob.exc.HTTPNotFound(explanation=msg)
try:
# NOTE(abhishekk): Here we are just checking if use is authorized
# to modify the namespace or not
api_policy.MetadefAPIPolicy(
req.context,
md_resource=ns_obj,
enforcer=self.policy).modify_metadef_namespace()
ns_obj._old_namespace = ns_obj.namespace
ns_obj.namespace = wsme_utils._get_value(user_ns.namespace)
ns_obj.display_name = wsme_utils._get_value(user_ns.display_name)
ns_obj.description = wsme_utils._get_value(user_ns.description)
# Following optional fields will default to same values as in
# create namespace if not specified
ns_obj.visibility = (
wsme_utils._get_value(user_ns.visibility) or 'private')
ns_obj.protected = (
wsme_utils._get_value(user_ns.protected) or False)
ns_obj.owner = (
wsme_utils._get_value(user_ns.owner) or req.context.owner)
updated_namespace = namespace_repo.save(ns_obj)
except exception.Invalid as e:
msg = (_("Couldn't update metadata namespace: %s")
% encodeutils.exception_to_unicode(e))
raise webob.exc.HTTPBadRequest(explanation=msg)
except exception.Forbidden as e:
LOG.debug("User not permitted to update metadata namespace "
"'%s'", namespace)
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
except exception.Duplicate as e:
raise webob.exc.HTTPConflict(explanation=e.msg)
return Namespace.to_wsme_model(updated_namespace,
get_namespace_href(updated_namespace),
self.ns_schema_link)
def delete(self, req, namespace):
namespace_repo = self.gateway.get_metadef_namespace_repo(
req.context, authorization_layer=False)
try:
namespace_obj = namespace_repo.get(namespace)
except (exception.Forbidden, exception.NotFound):
# NOTE (abhishekk): Returning 404 Not Found as the
# namespace is outside of this user's project
msg = _("Namespace %s not found") % namespace
raise webob.exc.HTTPNotFound(explanation=msg)
try:
# NOTE(abhishekk): Here we are just checking user is authorized to
# delete the namespace or not.
api_policy.MetadefAPIPolicy(
req.context,
md_resource=namespace_obj,
enforcer=self.policy).delete_metadef_namespace()
namespace_obj.delete()
namespace_repo.remove(namespace_obj)
except exception.Forbidden as e:
LOG.debug("User not permitted to delete metadata namespace "
"'%s'", namespace)
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
def delete_objects(self, req, namespace):
ns_repo = self.gateway.get_metadef_namespace_repo(
req.context, authorization_layer=False)
try:
namespace_obj = ns_repo.get(namespace)
except (exception.Forbidden, exception.NotFound):
# NOTE (abhishekk): Returning 404 Not Found as the
# namespace is outside of this user's project
msg = _("Namespace %s not found") % namespace
raise webob.exc.HTTPNotFound(explanation=msg)
try:
# NOTE(abhishekk): This call currently checks whether user
# has permission to delete the namespace or not before deleting
# the objects associated with it.
api_policy.MetadefAPIPolicy(
req.context,
md_resource=namespace_obj,
enforcer=self.policy).delete_metadef_namespace()
namespace_obj.delete()
ns_repo.remove_objects(namespace_obj)
except exception.Forbidden as e:
LOG.debug("User not permitted to delete metadata objects "
"within '%s' namespace", namespace)
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
def delete_tags(self, req, namespace):
ns_repo = self.gateway.get_metadef_namespace_repo(
req.context, authorization_layer=False)
try:
namespace_obj = ns_repo.get(namespace)
except (exception.Forbidden, exception.NotFound):
# NOTE (abhishekk): Returning 404 Not Found as the
# namespace is outside of this user's project
msg = _("Namespace %s not found") % namespace
raise webob.exc.HTTPNotFound(explanation=msg)
try:
# NOTE(abhishekk): This call currently checks whether user
# has permission to delete the namespace or not before deleting
# the objects associated with it.
policy_check = api_policy.MetadefAPIPolicy(
req.context,
md_resource=namespace_obj,
enforcer=self.policy)
policy_check.delete_metadef_namespace()
# NOTE(abhishekk): This call checks whether user
# has permission to delete the tags or not.
policy_check.delete_metadef_tags()
namespace_obj.delete()
ns_repo.remove_tags(namespace_obj)
except exception.Forbidden as e:
LOG.debug("User not permitted to delete metadata tags "
"within '%s' namespace", namespace)
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
def delete_properties(self, req, namespace):
ns_repo = self.gateway.get_metadef_namespace_repo(
req.context, authorization_layer=False)
try:
namespace_obj = ns_repo.get(namespace)
except (exception.Forbidden, exception.NotFound):
# NOTE (abhishekk): Returning 404 Not Found as the
# namespace is outside of this user's project
msg = _("Namespace %s not found") % namespace
raise webob.exc.HTTPNotFound(explanation=msg)
try:
# NOTE(abhishekk): This call currently checks whether user
# has permission to delete the namespace or not before deleting
# the objects associated with it.
api_policy.MetadefAPIPolicy(
req.context,
md_resource=namespace_obj,
enforcer=self.policy).delete_metadef_namespace()
namespace_obj.delete()
ns_repo.remove_properties(namespace_obj)
except exception.Forbidden as e:
LOG.debug("User not permitted to delete metadata properties "
"within '%s' namespace", namespace)
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
def _prefix_property_name(self, namespace_detail, user_resource_type):
prefix = None
if user_resource_type and namespace_detail.resource_type_associations:
for resource_type in namespace_detail.resource_type_associations:
if resource_type.name == user_resource_type:
prefix = resource_type.prefix
break
if prefix:
if namespace_detail.properties:
new_property_dict = dict()
for (key, value) in namespace_detail.properties.items():
new_property_dict[prefix + key] = value
namespace_detail.properties = new_property_dict
if namespace_detail.objects:
for object in namespace_detail.objects:
new_object_property_dict = dict()
for (key, value) in object.properties.items():
new_object_property_dict[prefix + key] = value
object.properties = new_object_property_dict
if object.required and len(object.required) > 0:
required = [prefix + name for name in object.required]
object.required = required
return namespace_detail
class RequestDeserializer(wsgi.JSONRequestDeserializer):
_disallowed_properties = ['self', 'schema', 'created_at', 'updated_at']
def __init__(self, schema=None):
super(RequestDeserializer, self).__init__()
self.schema = schema or get_schema()
def _get_request_body(self, request):
output = super(RequestDeserializer, self).default(request)
if 'body' not in output:
msg = _('Body expected in request.')
raise webob.exc.HTTPBadRequest(explanation=msg)
return output['body']
@classmethod
def _check_allowed(cls, image):
for key in cls._disallowed_properties:
if key in image:
msg = _("Attribute '%s' is read-only.") % key
raise webob.exc.HTTPForbidden(explanation=msg)
def index(self, request):
params = request.params.copy()
limit = params.pop('limit', None)
marker = params.pop('marker', None)
sort_dir = params.pop('sort_dir', 'desc')
if limit is None:
limit = CONF.limit_param_default
limit = min(CONF.api_limit_max, int(limit))
query_params = {
'sort_key': params.pop('sort_key', 'created_at'),
'sort_dir': self._validate_sort_dir(sort_dir),
'filters': self._get_filters(params)
}
if marker is not None:
query_params['marker'] = marker
if limit is not None:
query_params['limit'] = self._validate_limit(limit)
return query_params
def _validate_sort_dir(self, sort_dir):
if sort_dir not in ['asc', 'desc']:
msg = _('Invalid sort direction: %s') % sort_dir
raise webob.exc.HTTPBadRequest(explanation=msg)
return sort_dir
def _get_filters(self, filters):
visibility = filters.get('visibility')
if visibility:
if visibility not in ['public', 'private']:
msg = _('Invalid visibility value: %s') % visibility
raise webob.exc.HTTPBadRequest(explanation=msg)
return filters
def _validate_limit(self, limit):
try:
limit = int(limit)
except ValueError:
msg = _("limit param must be an integer")
raise webob.exc.HTTPBadRequest(explanation=msg)
if limit < 0:
msg = _("limit param must be positive")
raise webob.exc.HTTPBadRequest(explanation=msg)
return limit
def show(self, request):
params = request.params.copy()
query_params = {
'filters': self._get_filters(params)
}
return query_params
def create(self, request):
body = self._get_request_body(request)
self._check_allowed(body)
try:
self.schema.validate(body)
except exception.InvalidObject as e:
raise webob.exc.HTTPBadRequest(explanation=e.msg)
namespace = json.fromjson(Namespace, body)
return dict(namespace=namespace)
def update(self, request):
body = self._get_request_body(request)
self._check_allowed(body)
try:
self.schema.validate(body)
except exception.InvalidObject as e:
raise webob.exc.HTTPBadRequest(explanation=e.msg)
namespace = json.fromjson(Namespace, body)
return dict(user_ns=namespace)
class ResponseSerializer(wsgi.JSONResponseSerializer):
def __init__(self, schema=None):
super(ResponseSerializer, self).__init__()
self.schema = schema
def create(self, response, namespace):
ns_json = json.tojson(Namespace, namespace)
response = self.__render(ns_json, response, http.CREATED)
response.location = get_namespace_href(namespace)
def show(self, response, namespace):
ns_json = json.tojson(Namespace, namespace)
response = self.__render(ns_json, response)
def index(self, response, result):
params = dict(response.request.params)
params.pop('marker', None)
query = urlparse.urlencode(params)
result.first = "/v2/metadefs/namespaces"
result.schema = "/v2/schemas/metadefs/namespaces"
if query:
result.first = '%s?%s' % (result.first, query)
if result.next:
params['marker'] = result.next
next_query = urlparse.urlencode(params)
result.next = '/v2/metadefs/namespaces?%s' % next_query
ns_json = json.tojson(Namespaces, result)
response = self.__render(ns_json, response)
def update(self, response, namespace):
ns_json = json.tojson(Namespace, namespace)
response = self.__render(ns_json, response, http.OK)
def delete(self, response, result):
response.status_int = http.NO_CONTENT
def delete_objects(self, response, result):
response.status_int = http.NO_CONTENT
def delete_properties(self, response, result):
response.status_int = http.NO_CONTENT
def delete_tags(self, response, result):
response.status_int = http.NO_CONTENT
def __render(self, json_data, response, response_status=None):
body = jsonutils.dumps(json_data, ensure_ascii=False)
response.unicode_body = body
response.content_type = 'application/json'
if response_status:
response.status_int = response_status
return response
def _get_base_definitions():
return get_schema_definitions()
def get_schema_definitions():
return {
"positiveInteger": {
"type": "integer",
"minimum": 0
},
"positiveIntegerDefault0": {
"allOf": [
{"$ref": "#/definitions/positiveInteger"},
{"default": 0}
]
},
"stringArray": {
"type": "array",
"items": {"type": "string"},
# "minItems": 1,
"uniqueItems": True
},
"property": {
"type": "object",
"additionalProperties": {
"type": "object",
"required": ["title", "type"],
"properties": {
"name": {
"type": "string",
"maxLength": 80
},
"title": {
"type": "string"
},
"description": {
"type": "string"
},
"operators": {
"type": "array",
"items": {
"type": "string"
}
},
"type": {
"type": "string",
"enum": [
"array",
"boolean",
"integer",
"number",
"object",
"string",
None
]
},
"required": {
"$ref": "#/definitions/stringArray"
},
"minimum": {
"type": "number"
},
"maximum": {
"type": "number"
},
"maxLength": {
"$ref": "#/definitions/positiveInteger"
},
"minLength": {
"$ref": "#/definitions/positiveIntegerDefault0"
},
"pattern": {
"type": "string",
"format": "regex"
},
"enum": {
"type": "array"
},
"readonly": {
"type": "boolean"
},
"default": {},
"items": {
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"array",
"boolean",
"integer",
"number",
"object",
"string",
None
]
},
"enum": {
"type": "array"
}
}
},
"maxItems": {
"$ref": "#/definitions/positiveInteger"
},
"minItems": {
"$ref": "#/definitions/positiveIntegerDefault0"
},
"uniqueItems": {
"type": "boolean",
"default": False
},
"additionalItems": {
"type": "boolean"
},
}
}
}
}
def _get_base_properties():
return {
"namespace": {
"type": "string",
"description": _("The unique namespace text."),
"maxLength": 80,
},
"display_name": {
"type": "string",
"description": _("The user friendly name for the namespace. Used "
"by UI if available."),
"maxLength": 80,
},
"description": {
"type": "string",
"description": _("Provides a user friendly description of the "
"namespace."),
"maxLength": 500,
},
"visibility": {
"type": "string",
"description": _("Scope of namespace accessibility."),
"enum": ["public", "private"],
},
"protected": {
"type": "boolean",
"description": _("If true, namespace will not be deletable."),
},
"owner": {
"type": "string",
"description": _("Owner of the namespace."),
"maxLength": 255,
},
"created_at": {
"type": "string",
"readOnly": True,
"description": _("Date and time of namespace creation"),
"format": "date-time"
},
"updated_at": {
"type": "string",
"readOnly": True,
"description": _("Date and time of the last namespace"
" modification"),
"format": "date-time"
},
"schema": {
'readOnly': True,
"type": "string"
},
"self": {
'readOnly': True,
"type": "string"
},
"resource_type_associations": {
"type": "array",
"items": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"prefix": {
"type": "string"
},
"properties_target": {
"type": "string"
}
}
}
},
"properties": {
"$ref": "#/definitions/property"
},
"objects": {
"type": "array",
"items": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"required": {
"$ref": "#/definitions/stringArray"
},
"properties": {
"$ref": "#/definitions/property"
},
}
}
},
"tags": {
"type": "array",
"items": {
"type": "object",
"properties": {
"name": {
"type": "string"
}
}
}
},
}
def get_schema():
properties = _get_base_properties()
definitions = _get_base_definitions()
mandatory_attrs = Namespace.get_mandatory_attrs()
schema = glance.schema.Schema(
'namespace',
properties,
required=mandatory_attrs,
definitions=definitions
)
return schema
def get_collection_schema():
namespace_schema = get_schema()
return glance.schema.CollectionSchema('namespaces', namespace_schema)
def get_namespace_href(namespace):
base_href = '/v2/metadefs/namespaces/%s' % namespace.namespace
return base_href
def get_object_href(namespace_name, metadef_object):
base_href = ('/v2/metadefs/namespaces/%s/objects/%s' %
(namespace_name, metadef_object.name))
return base_href
def get_tag_href(namespace_name, metadef_tag):
base_href = ('/v2/metadefs/namespaces/%s/tags/%s' %
(namespace_name, metadef_tag.name))
return base_href
def create_resource():
"""Namespaces resource factory method"""
schema = get_schema()
deserializer = RequestDeserializer(schema)
serializer = ResponseSerializer(schema)
controller = NamespaceController()
return wsgi.Resource(controller, deserializer, serializer)
| openstack/glance | glance/api/v2/metadef_namespaces.py | Python | apache-2.0 | 38,490 |
"""Support for monitoring OctoPrint 3D printers."""
from datetime import timedelta
import logging
from typing import cast
from pyoctoprintapi import ApiError, OctoprintClient, PrinterOffline
import voluptuous as vol
from yarl import URL
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import (
CONF_API_KEY,
CONF_BINARY_SENSORS,
CONF_HOST,
CONF_MONITORED_CONDITIONS,
CONF_NAME,
CONF_PATH,
CONF_PORT,
CONF_SENSORS,
CONF_SSL,
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.typing import ConfigType
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import slugify as util_slugify
import homeassistant.util.dt as dt_util
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
def has_all_unique_names(value):
"""Validate that printers have an unique name."""
names = [util_slugify(printer["name"]) for printer in value]
vol.Schema(vol.Unique())(names)
return value
def ensure_valid_path(value):
"""Validate the path, ensuring it starts and ends with a /."""
vol.Schema(cv.string)(value)
if value[0] != "/":
value = f"/{value}"
if value[-1] != "/":
value += "/"
return value
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR]
DEFAULT_NAME = "OctoPrint"
CONF_NUMBER_OF_TOOLS = "number_of_tools"
CONF_BED = "bed"
BINARY_SENSOR_TYPES = [
"Printing",
"Printing Error",
]
BINARY_SENSOR_SCHEMA = vol.Schema(
{
vol.Optional(
CONF_MONITORED_CONDITIONS, default=list(BINARY_SENSOR_TYPES)
): vol.All(cv.ensure_list, [vol.In(BINARY_SENSOR_TYPES)]),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
SENSOR_TYPES = [
"Temperatures",
"Current State",
"Job Percentage",
"Time Remaining",
"Time Elapsed",
]
SENSOR_SCHEMA = vol.Schema(
{
vol.Optional(CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES)): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
CONFIG_SCHEMA = vol.Schema(
vol.All(
cv.deprecated(DOMAIN),
{
DOMAIN: vol.All(
cv.ensure_list,
[
vol.Schema(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_SSL, default=False): cv.boolean,
vol.Optional(CONF_PORT, default=80): cv.port,
vol.Optional(CONF_PATH, default="/"): ensure_valid_path,
# Following values are not longer used in the configuration of the integration
# and are here for historical purposes
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(
CONF_NUMBER_OF_TOOLS, default=0
): cv.positive_int,
vol.Optional(CONF_BED, default=False): cv.boolean,
vol.Optional(CONF_SENSORS, default={}): SENSOR_SCHEMA,
vol.Optional(
CONF_BINARY_SENSORS, default={}
): BINARY_SENSOR_SCHEMA,
}
)
],
has_all_unique_names,
)
},
),
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the OctoPrint component."""
if DOMAIN not in config:
return True
domain_config = config[DOMAIN]
for conf in domain_config:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={
CONF_API_KEY: conf[CONF_API_KEY],
CONF_HOST: conf[CONF_HOST],
CONF_PATH: conf[CONF_PATH],
CONF_PORT: conf[CONF_PORT],
CONF_SSL: conf[CONF_SSL],
},
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up OctoPrint from a config entry."""
if DOMAIN not in hass.data:
hass.data[DOMAIN] = {}
websession = async_get_clientsession(hass)
client = OctoprintClient(
entry.data[CONF_HOST],
websession,
entry.data[CONF_PORT],
entry.data[CONF_SSL],
entry.data[CONF_PATH],
)
client.set_api_key(entry.data[CONF_API_KEY])
coordinator = OctoprintDataUpdateCoordinator(hass, client, entry, 30)
await coordinator.async_config_entry_first_refresh()
hass.data[DOMAIN][entry.entry_id] = {"coordinator": coordinator, "client": client}
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
class OctoprintDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to manage fetching Octoprint data."""
config_entry: ConfigEntry
def __init__(
self,
hass: HomeAssistant,
octoprint: OctoprintClient,
config_entry: ConfigEntry,
interval: int,
) -> None:
"""Initialize."""
super().__init__(
hass,
_LOGGER,
name=f"octoprint-{config_entry.entry_id}",
update_interval=timedelta(seconds=interval),
)
self.config_entry = config_entry
self._octoprint = octoprint
self._printer_offline = False
self.data = {"printer": None, "job": None, "last_read_time": None}
async def _async_update_data(self):
"""Update data via API."""
printer = None
try:
job = await self._octoprint.get_job_info()
except ApiError as err:
raise UpdateFailed(err) from err
# If octoprint is on, but the printer is disconnected
# printer will return a 409, so continue using the last
# reading if there is one
try:
printer = await self._octoprint.get_printer_info()
except PrinterOffline:
if not self._printer_offline:
_LOGGER.debug("Unable to retrieve printer information: Printer offline")
self._printer_offline = True
except ApiError as err:
raise UpdateFailed(err) from err
else:
self._printer_offline = False
return {"job": job, "printer": printer, "last_read_time": dt_util.utcnow()}
@property
def device_info(self) -> DeviceInfo:
"""Device info."""
unique_id = cast(str, self.config_entry.unique_id)
configuration_url = URL.build(
scheme=self.config_entry.data[CONF_SSL] and "https" or "http",
host=self.config_entry.data[CONF_HOST],
port=self.config_entry.data[CONF_PORT],
path=self.config_entry.data[CONF_PATH],
)
return DeviceInfo(
identifiers={(DOMAIN, unique_id)},
manufacturer="OctoPrint",
name="OctoPrint",
configuration_url=str(configuration_url),
)
| mezz64/home-assistant | homeassistant/components/octoprint/__init__.py | Python | apache-2.0 | 7,884 |
from . import font
from .indicator import Indicator, IndicatorOptions
from .airspeed import AirspeedIndicator
from .altitude import AltitudeIndicator
from .attitude import AttitudeIndicator
from .compass import CompassIndicator
from .pfd import PFD
from .joystick import Joystick
from . import base_test
| rbmj/pyflightcontrol | pyflightcontrol/base/__init__.py | Python | apache-2.0 | 305 |
# Copyright 2016 Yanis Guenane <yguenane@redhat.com>
# Author: Yanis Guenane <yguenane@redhat.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import codecs
import os
import setuptools
from lecm import version
def _get_requirements():
requirements_path = '%s/%s' % (os.path.dirname(os.path.abspath(__file__)),
'requirements.txt')
with open(requirements_path, 'r') as f:
requirements = f.read()
# remove the dependencies which comes from url source because
# it's not supported by install_requires
return [dep for dep in requirements.split('\n')
if not dep.startswith('-e')]
def _get_readme():
readme_path = '%s/%s' % (os.path.dirname(os.path.abspath(__file__)),
'README.rst')
with codecs.open(readme_path, 'r', encoding='utf8') as f:
return f.read()
setuptools.setup(
name='lecm',
version=version.__version__,
packages=setuptools.find_packages(),
author='Yanis Guenane',
author_email='yguenane@redhat.com',
description='Tool to manage Let''s Encrypt certificates \
from configuration file',
long_description=_get_readme(),
install_requires=_get_requirements(),
url='https://github.com/Spredzy/lecm',
license='Apache v2.0',
include_package_data=True,
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
entry_points={
'console_scripts': [
'lecm = lecm.shell:main'
],
}
)
| Spredzy/lecm | setup.py | Python | apache-2.0 | 2,374 |
#!/usr/bin/env python3
# Copyright 2010-2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Code for issue 46 in or-tools."""
from ortools.constraint_solver import pywrapcp
class AssignToStartMin(pywrapcp.PyDecisionBuilder):
def __init__(self, intervals):
pywrapcp.PyDecisionBuilder.__init__(self)
self.__intervals = intervals
def Next(self, solver):
for interval in self.__intervals:
interval.SetStartMax(interval.StartMin())
return None
def DebugString(self):
return 'CustomDecisionBuilder'
def NoSequence():
print('NoSequence')
solver = pywrapcp.Solver('Ordo')
tasks = []
[
tasks.append(
solver.FixedDurationIntervalVar(0, 25, 5, False, 'Tasks%i' % i))
for i in range(3)
]
print(tasks)
disj = solver.DisjunctiveConstraint(tasks, 'Disjunctive')
solver.Add(disj)
collector = solver.AllSolutionCollector()
collector.Add(tasks)
intervalPhase = solver.Phase(tasks, solver.INTERVAL_DEFAULT)
solver.Solve(intervalPhase, [collector])
print(collector.SolutionCount())
for i in range(collector.SolutionCount()):
print("Solution ", i)
print(collector.ObjectiveValue(i))
print([collector.StartValue(i, tasks[j]) for j in range(3)])
print([collector.EndValue(i, tasks[j]) for j in range(3)])
def Sequence():
print('Sequence')
solver = pywrapcp.Solver('Ordo')
tasks = []
[
tasks.append(
solver.FixedDurationIntervalVar(0, 25, 5, False, 'Tasks%i' % i))
for i in range(3)
]
print(tasks)
disj = solver.DisjunctiveConstraint(tasks, 'Disjunctive')
solver.Add(disj)
sequence = []
sequence.append(disj.SequenceVar())
sequence[0].RankFirst(0)
collector = solver.AllSolutionCollector()
collector.Add(sequence)
collector.Add(tasks)
sequencePhase = solver.Phase(sequence, solver.SEQUENCE_DEFAULT)
intervalPhase = AssignToStartMin(tasks)
# intervalPhase = solver.Phase(tasks, solver.INTERVAL_DEFAULT)
mainPhase = solver.Compose([sequencePhase, intervalPhase])
solver.Solve(mainPhase, [collector])
print(collector.SolutionCount())
for i in range(collector.SolutionCount()):
print("Solution ", i)
print(collector.ObjectiveValue(i))
print([collector.StartValue(i, tasks[j]) for j in range(3)])
print([collector.EndValue(i, tasks[j]) for j in range(3)])
def main():
NoSequence()
Sequence()
if __name__ == '__main__':
main()
| google/or-tools | examples/tests/issue46.py | Python | apache-2.0 | 3,057 |
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# murano documentation build configuration file, created by
# sphinx-quickstart on Sat May 1 15:17:47 2010.
#
# This file is execfile()d with the current directory set to
# its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
extensions = [
'os_api_ref',
'openstackdocstheme'
]
html_theme = 'openstackdocs'
html_theme_options = {
"sidebar_mode": "toc",
}
# openstackdocstheme options
openstackdocs_repo_name = 'openstack/murano'
openstackdocs_bug_project = 'murano'
openstackdocs_bug_tag = 'api-ref'
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../'))
sys.path.insert(0, os.path.abspath('../'))
sys.path.insert(0, os.path.abspath('./'))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
copyright = u'2016-present, OpenStack Foundation'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# The reST default role (used for this markup: `text`) to use
# for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = False
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native'
# -- Options for man page output ----------------------------------------------
# Grouping the document tree for man pages.
# List of tuples 'sourcefile', 'target', u'title', u'Authors name', 'manual'
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
# html_theme_path = ["."]
# html_theme = '_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_use_modindex = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'muranodoc'
# -- Options for LaTeX output -------------------------------------------------
# The paper size ('letter' or 'a4').
# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'Murano.tex', u'OpenStack Application Catalog API Documentation',
u'OpenStack Foundation', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# Additional stuff for the LaTeX preamble.
# latex_preamble = ''
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_use_modindex = True
| openstack/murano | api-ref/source/conf.py | Python | apache-2.0 | 6,670 |
# -*- coding: utf-8 -*-
###############################################################################
#
# GetNewsletterScheduleTime
# Get the scheduled delivery time of a specified Newsletter.
#
# Python version 2.6
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class GetNewsletterScheduleTime(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the GetNewsletterScheduleTime Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
Choreography.__init__(self, temboo_session, '/Library/SendGrid/NewsletterAPI/Schedule/GetNewsletterScheduleTime')
def new_input_set(self):
return GetNewsletterScheduleTimeInputSet()
def _make_result_set(self, result, path):
return GetNewsletterScheduleTimeResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return GetNewsletterScheduleTimeChoreographyExecution(session, exec_id, path)
class GetNewsletterScheduleTimeInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the GetNewsletterScheduleTime
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_APIKey(self, value):
"""
Set the value of the APIKey input for this Choreo. ((required, string) The API Key obtained from SendGrid.)
"""
InputSet._set_input(self, 'APIKey', value)
def set_APIUser(self, value):
"""
Set the value of the APIUser input for this Choreo. ((required, string) The username registered with SendGrid.)
"""
InputSet._set_input(self, 'APIUser', value)
def set_Name(self, value):
"""
Set the value of the Name input for this Choreo. ((required, string) The name of the newsletter for which delivery schedule information will be retrieved.)
"""
InputSet._set_input(self, 'Name', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format of the response from SendGrid, in either json, or xml. Default is set to json.)
"""
InputSet._set_input(self, 'ResponseFormat', value)
class GetNewsletterScheduleTimeResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the GetNewsletterScheduleTime Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from SendGrid. The format corresponds to the ResponseFormat input. Default is json.)
"""
return self._output.get('Response', None)
class GetNewsletterScheduleTimeChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return GetNewsletterScheduleTimeResultSet(response, path)
| egetzel/wecrow | truehand2014/temboo/Library/SendGrid/NewsletterAPI/Schedule/GetNewsletterScheduleTime.py | Python | apache-2.0 | 3,356 |
# -*- encoding: utf-8 -*-
'''
:maintainer: HubbleStack
:maturity: 2016.7.0
:platform: Windows
:requires: SaltStack
'''
from __future__ import absolute_import
import copy
import fnmatch
import logging
import salt.utils
import salt.utils.platform
from salt.exceptions import CommandExecutionError
from distutils.version import LooseVersion
log = logging.getLogger(__name__)
__virtualname__ = 'win_pkg'
def __virtual__():
if not salt.utils.platform.is_windows():
return False, 'This audit module only runs on windows'
return True
def apply_labels(__data__, labels):
'''
Filters out the tests whose label doesn't match the labels given when running audit and returns a new data structure with only labelled tests.
'''
labelled_data = {}
if labels:
labelled_data[__virtualname__] = {}
for topkey in ('blacklist', 'whitelist'):
if topkey in __data__.get(__virtualname__, {}):
labelled_test_cases=[]
for test_case in __data__[__virtualname__].get(topkey, []):
# each test case is a dictionary with just one key-val pair. key=test name, val=test data, description etc
if isinstance(test_case, dict) and test_case:
test_case_body = test_case.get(next(iter(test_case)))
if set(labels).issubset(set(test_case_body.get('labels',[]))):
labelled_test_cases.append(test_case)
labelled_data[__virtualname__][topkey]=labelled_test_cases
else:
labelled_data = __data__
return labelled_data
def audit(data_list, tags, labels, debug=False, **kwargs):
'''
Runs auditpol on the local machine and audits the return data
with the CIS yaml processed by __virtual__
'''
__data__ = {}
try:
__pkgdata__ = __salt__['pkg.list_pkgs']()
except CommandExecutionError:
__salt__['pkg.refresh_db']()
__pkgdata__ = __salt__['pkg.list_pkgs']()
for profile, data in data_list:
_merge_yaml(__data__, data, profile)
__data__ = apply_labels(__data__, labels)
__tags__ = _get_tags(__data__)
if debug:
log.debug('package audit __data__:')
log.debug(__data__)
log.debug('package audit __tags__:')
log.debug(__tags__)
ret = {'Success': [], 'Failure': [], 'Controlled': []}
for tag in __tags__:
if fnmatch.fnmatch(tag, tags):
for tag_data in __tags__[tag]:
if 'control' in tag_data:
ret['Controlled'].append(tag_data)
continue
name = tag_data['name']
audit_type = tag_data['type']
match_output = tag_data['match_output'].lower()
# Blacklisted audit (do not include)
if 'blacklist' in audit_type:
if name not in __pkgdata__:
ret['Success'].append(tag_data)
else:
tag_data['failure_reason'] = "Blacklisted package '{0}' is installed " \
"on the system".format(name)
ret['Failure'].append(tag_data)
# Whitelisted audit (must include)
if 'whitelist' in audit_type:
if name in __pkgdata__:
audit_value = __pkgdata__[name]
tag_data['found_value'] = audit_value
secret = _translate_value_type(audit_value, tag_data['value_type'], match_output)
if secret:
ret['Success'].append(tag_data)
else:
tag_data['failure_reason'] = "Version '{0}({1}) of the requisite" \
" package '{2}' is not installed on" \
" the system".format(match_output,
tag_data['value_type'],
name)
ret['Failure'].append(tag_data)
else:
tag_data['failure_reason'] = "Version '{0}({1}) of the requisite package" \
" '{2}' is not installed on the system" \
.format(match_output, tag_data['value_type'], name)
ret['Failure'].append(tag_data)
return ret
def _merge_yaml(ret, data, profile=None):
'''
Merge two yaml dicts together at the secedit:blacklist and
secedit:whitelist level
'''
if __virtualname__ not in ret:
ret[__virtualname__] = {}
for topkey in ('blacklist', 'whitelist'):
if topkey in data.get(__virtualname__, {}):
if topkey not in ret[__virtualname__]:
ret[__virtualname__][topkey] = []
for key, val in data[__virtualname__][topkey].iteritems():
if profile and isinstance(val, dict):
val['nova_profile'] = profile
ret[__virtualname__][topkey].append({key: val})
return ret
def _get_tags(data):
'''
Retrieve all the tags for this distro from the yaml
'''
ret = {}
distro = __grains__.get('osfullname')
for toplist, toplevel in data.get(__virtualname__, {}).iteritems():
# secedit:whitelist
for audit_dict in toplevel:
for audit_id, audit_data in audit_dict.iteritems():
# secedit:whitelist:PasswordComplexity
tags_dict = audit_data.get('data', {})
# secedit:whitelist:PasswordComplexity:data
tags = None
for osfinger in tags_dict:
if osfinger == '*':
continue
osfinger_list = [finger.strip() for finger in osfinger.split(',')]
for osfinger_glob in osfinger_list:
if fnmatch.fnmatch(distro, osfinger_glob):
tags = tags_dict.get(osfinger)
break
if tags is not None:
break
# If we didn't find a match, check for a '*'
if tags is None:
tags = tags_dict.get('*', [])
# secedit:whitelist:PasswordComplexity:data:Windows 2012
if isinstance(tags, dict):
# malformed yaml, convert to list of dicts
tmp = []
for name, tag in tags.iteritems():
tmp.append({name: tag})
tags = tmp
for item in tags:
for name, tag in item.iteritems():
tag_data = {}
# Whitelist could have a dictionary, not a string
if isinstance(tag, dict):
tag_data = copy.deepcopy(tag)
tag = tag_data.pop('tag')
if tag not in ret:
ret[tag] = []
formatted_data = {'name': name,
'tag': tag,
'module': 'win_auditpol',
'type': toplist}
formatted_data.update(tag_data)
formatted_data.update(audit_data)
formatted_data.pop('data')
ret[tag].append(formatted_data)
return ret
def _translate_value_type(current, value, evaluator):
if 'equal' in value.lower() and LooseVersion(current) == LooseVersion(evaluator):
return True
if 'less' in value.lower() and LooseVersion(current) <= LooseVersion(evaluator):
return True
if 'more' in value.lower() and LooseVersion(current) >= LooseVersion(evaluator):
return True
return False
| hubblestack/hubble-salt | hubblestack_nova/win_pkg.py | Python | apache-2.0 | 8,165 |
# Copyright 2021 The SLOE Logistic Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Builds sloe_logistic package."""
from distutils import core
from distutils.command import build_clib
from pybind11.setup_helpers import build_ext
from pybind11.setup_helpers import Pybind11Extension
libraries = [
("scipy_brentq", {
"sources": ["third_party/py/scipy/optimize/Zeros/brentq.c",],
}),
]
ext_modules = [
Pybind11Extension("sloe_logistic.mle_param_integrands", [
"mle_param_integrands.cc",
]),
]
core.setup(
name="sloe_logistic",
version="0.0.1",
description="Implements SLOE method and Logistic Regression Inference",
long_description="Code to supplement the ICML submission SLOE: A Faster "
"Method for Statistical Inference in High-Dimensional Logistic Regression.",
packages=["sloe_logistic", "sloe_logistic.sloe_experiments"],
package_dir={
"sloe_logistic": ".",
"sloe_logistic.sloe_experiments": "sloe_experiments/"
},
libraries=libraries,
ext_modules=ext_modules,
cmdclass={
"build_ext": build_ext,
"build_clib": build_clib.build_clib,
},
zip_safe=False,
)
| google-research/sloe-logistic | setup.py | Python | apache-2.0 | 1,690 |
# coding=utf-8
# Copyright 2020 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Policy for reward prediction and boltzmann exploration."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from typing import Optional, Text, Tuple, Sequence
import gin
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
import tensorflow_probability as tfp
from tf_agents.bandits.networks import heteroscedastic_q_network
from tf_agents.bandits.policies import constraints as constr
from tf_agents.bandits.specs import utils as bandit_spec_utils
from tf_agents.distributions import shifted_categorical
from tf_agents.policies import tf_policy
from tf_agents.policies import utils as policy_utilities
from tf_agents.specs import tensor_spec
from tf_agents.trajectories import policy_step
from tf_agents.typing import types
@gin.configurable
class BoltzmannRewardPredictionPolicy(tf_policy.TFPolicy):
"""Class to build Reward Prediction Policies with Boltzmann exploration."""
def __init__(self,
time_step_spec: types.TimeStep,
action_spec: types.NestedTensorSpec,
reward_network: types.Network,
temperature: types.FloatOrReturningFloat = 1.0,
boltzmann_gumbel_exploration_constant: Optional[
types.Float] = None,
observation_and_action_constraint_splitter: Optional[
types.Splitter] = None,
accepts_per_arm_features: bool = False,
constraints: Tuple[constr.NeuralConstraint, ...] = (),
emit_policy_info: Tuple[Text, ...] = (),
num_samples_list: Sequence[tf.Variable] = (),
name: Optional[Text] = None):
"""Builds a BoltzmannRewardPredictionPolicy given a reward network.
This policy takes a tf_agents.Network predicting rewards and chooses an
action with weighted probabilities (i.e., using a softmax over the network
estimates of value for each action).
Args:
time_step_spec: A `TimeStep` spec of the expected time_steps.
action_spec: A nest of BoundedTensorSpec representing the actions.
reward_network: An instance of a `tf_agents.network.Network`,
callable via `network(observation, step_type) -> (output, final_state)`.
temperature: float or callable that returns a float. The temperature used
in the Boltzmann exploration.
boltzmann_gumbel_exploration_constant: optional positive float. When
provided, the policy implements Neural Bandit with Boltzmann-Gumbel
exploration from the paper:
N. Cesa-Bianchi et al., "Boltzmann Exploration Done Right", NIPS 2017.
observation_and_action_constraint_splitter: A function used for masking
valid/invalid actions with each state of the environment. The function
takes in a full observation and returns a tuple consisting of 1) the
part of the observation intended as input to the network and 2) the
mask. The mask should be a 0-1 `Tensor` of shape
`[batch_size, num_actions]`. This function should also work with a
`TensorSpec` as input, and should output `TensorSpec` objects for the
observation and mask.
accepts_per_arm_features: (bool) Whether the policy accepts per-arm
features.
constraints: iterable of constraints objects that are instances of
`tf_agents.bandits.agents.NeuralConstraint`.
emit_policy_info: (tuple of strings) what side information we want to get
as part of the policy info. Allowed values can be found in
`policy_utilities.PolicyInfo`.
num_samples_list: list or tuple of tf.Variable's. Used only in
Boltzmann-Gumbel exploration. Otherwise, empty.
name: The name of this policy. All variables in this module will fall
under that name. Defaults to the class name.
Raises:
NotImplementedError: If `action_spec` contains more than one
`BoundedTensorSpec` or the `BoundedTensorSpec` is not valid.
"""
policy_utilities.check_no_mask_with_arm_features(
accepts_per_arm_features, observation_and_action_constraint_splitter)
flat_action_spec = tf.nest.flatten(action_spec)
if len(flat_action_spec) > 1:
raise NotImplementedError(
'action_spec can only contain a single BoundedTensorSpec.')
self._temperature = temperature
action_spec = flat_action_spec[0]
if (not tensor_spec.is_bounded(action_spec) or
not tensor_spec.is_discrete(action_spec) or
action_spec.shape.rank > 1 or
action_spec.shape.num_elements() != 1):
raise NotImplementedError(
'action_spec must be a BoundedTensorSpec of type int32 and shape (). '
'Found {}.'.format(action_spec))
self._expected_num_actions = action_spec.maximum - action_spec.minimum + 1
self._action_offset = action_spec.minimum
reward_network.create_variables()
self._reward_network = reward_network
self._constraints = constraints
self._boltzmann_gumbel_exploration_constant = (
boltzmann_gumbel_exploration_constant)
self._num_samples_list = num_samples_list
if self._boltzmann_gumbel_exploration_constant is not None:
if self._boltzmann_gumbel_exploration_constant <= 0.0:
raise ValueError(
'The Boltzmann-Gumbel exploration constant is expected to be ',
'positive. Found: ', self._boltzmann_gumbel_exploration_constant)
if self._action_offset > 0:
raise NotImplementedError('Action offset is not supported when ',
'Boltzmann-Gumbel exploration is enabled.')
if accepts_per_arm_features:
raise NotImplementedError(
'Boltzmann-Gumbel exploration is not supported ',
'for arm features case.')
if len(self._num_samples_list) != self._expected_num_actions:
raise ValueError(
'Size of num_samples_list: ', len(self._num_samples_list),
' does not match the expected number of actions:',
self._expected_num_actions)
self._emit_policy_info = emit_policy_info
predicted_rewards_mean = ()
if policy_utilities.InfoFields.PREDICTED_REWARDS_MEAN in emit_policy_info:
predicted_rewards_mean = tensor_spec.TensorSpec(
[self._expected_num_actions])
bandit_policy_type = ()
if policy_utilities.InfoFields.BANDIT_POLICY_TYPE in emit_policy_info:
bandit_policy_type = (
policy_utilities.create_bandit_policy_type_tensor_spec(shape=[1]))
if accepts_per_arm_features:
# The features for the chosen arm is saved to policy_info.
chosen_arm_features_info = (
policy_utilities.create_chosen_arm_features_info_spec(
time_step_spec.observation))
info_spec = policy_utilities.PerArmPolicyInfo(
predicted_rewards_mean=predicted_rewards_mean,
bandit_policy_type=bandit_policy_type,
chosen_arm_features=chosen_arm_features_info)
else:
info_spec = policy_utilities.PolicyInfo(
predicted_rewards_mean=predicted_rewards_mean,
bandit_policy_type=bandit_policy_type)
self._accepts_per_arm_features = accepts_per_arm_features
super(BoltzmannRewardPredictionPolicy, self).__init__(
time_step_spec, action_spec,
policy_state_spec=reward_network.state_spec,
clip=False,
info_spec=info_spec,
emit_log_probability='log_probability' in emit_policy_info,
observation_and_action_constraint_splitter=(
observation_and_action_constraint_splitter),
name=name)
@property
def accepts_per_arm_features(self):
return self._accepts_per_arm_features
def _variables(self):
policy_variables = self._reward_network.variables
for c in self._constraints:
policy_variables.append(c.variables)
return policy_variables
def _get_temperature_value(self):
if callable(self._temperature):
return self._temperature()
return self._temperature
def _distribution(self, time_step, policy_state):
observation = time_step.observation
if self.observation_and_action_constraint_splitter is not None:
observation, _ = self.observation_and_action_constraint_splitter(
observation)
predictions, policy_state = self._reward_network(
observation, time_step.step_type, policy_state)
batch_size = tf.shape(predictions)[0]
if isinstance(self._reward_network,
heteroscedastic_q_network.HeteroscedasticQNetwork):
predicted_reward_values = predictions.q_value_logits
else:
predicted_reward_values = predictions
predicted_reward_values.shape.with_rank_at_least(2)
predicted_reward_values.shape.with_rank_at_most(3)
if predicted_reward_values.shape[
-1] is not None and predicted_reward_values.shape[
-1] != self._expected_num_actions:
raise ValueError(
'The number of actions ({}) does not match the reward_network output'
' size ({}).'.format(self._expected_num_actions,
predicted_reward_values.shape[1]))
mask = constr.construct_mask_from_multiple_sources(
time_step.observation, self._observation_and_action_constraint_splitter,
self._constraints, self._expected_num_actions)
if self._boltzmann_gumbel_exploration_constant is not None:
logits = predicted_reward_values
# Apply masking if needed. Overwrite the logits for invalid actions to
# logits.dtype.min.
if mask is not None:
almost_neg_inf = tf.constant(logits.dtype.min, dtype=logits.dtype)
logits = tf.compat.v2.where(
tf.cast(mask, tf.bool), logits, almost_neg_inf)
gumbel_dist = tfp.distributions.Gumbel(loc=0., scale=1.)
gumbel_samples = gumbel_dist.sample(tf.shape(logits))
num_samples_list_float = tf.stack(
[tf.cast(x.read_value(), tf.float32) for x in self._num_samples_list],
axis=-1)
exploration_weights = tf.math.divide_no_nan(
self._boltzmann_gumbel_exploration_constant,
tf.sqrt(num_samples_list_float))
final_logits = logits + exploration_weights * gumbel_samples
actions = tf.cast(
tf.math.argmax(final_logits, axis=1), self._action_spec.dtype)
# Log probability is not available in closed form. We treat this as a
# deterministic policy at the moment.
log_probability = tf.zeros([batch_size], tf.float32)
else:
# Apply the temperature scaling, needed for Boltzmann exploration.
logits = predicted_reward_values / self._get_temperature_value()
# Apply masking if needed. Overwrite the logits for invalid actions to
# logits.dtype.min.
if mask is not None:
almost_neg_inf = tf.constant(logits.dtype.min, dtype=logits.dtype)
logits = tf.compat.v2.where(
tf.cast(mask, tf.bool), logits, almost_neg_inf)
if self._action_offset != 0:
distribution = shifted_categorical.ShiftedCategorical(
logits=logits,
dtype=self._action_spec.dtype,
shift=self._action_offset)
else:
distribution = tfp.distributions.Categorical(
logits=logits,
dtype=self._action_spec.dtype)
actions = distribution.sample()
log_probability = distribution.log_prob(actions)
bandit_policy_values = tf.fill([batch_size, 1],
policy_utilities.BanditPolicyType.BOLTZMANN)
if self._accepts_per_arm_features:
# Saving the features for the chosen action to the policy_info.
def gather_observation(obs):
return tf.gather(params=obs, indices=actions, batch_dims=1)
chosen_arm_features = tf.nest.map_structure(
gather_observation,
observation[bandit_spec_utils.PER_ARM_FEATURE_KEY])
policy_info = policy_utilities.PerArmPolicyInfo(
log_probability=log_probability if
policy_utilities.InfoFields.LOG_PROBABILITY in self._emit_policy_info
else (),
predicted_rewards_mean=(
predicted_reward_values if policy_utilities.InfoFields
.PREDICTED_REWARDS_MEAN in self._emit_policy_info else ()),
bandit_policy_type=(bandit_policy_values
if policy_utilities.InfoFields.BANDIT_POLICY_TYPE
in self._emit_policy_info else ()),
chosen_arm_features=chosen_arm_features)
else:
policy_info = policy_utilities.PolicyInfo(
log_probability=log_probability if
policy_utilities.InfoFields.LOG_PROBABILITY in self._emit_policy_info
else (),
predicted_rewards_mean=(
predicted_reward_values if policy_utilities.InfoFields
.PREDICTED_REWARDS_MEAN in self._emit_policy_info else ()),
bandit_policy_type=(bandit_policy_values
if policy_utilities.InfoFields.BANDIT_POLICY_TYPE
in self._emit_policy_info else ()))
return policy_step.PolicyStep(
tfp.distributions.Deterministic(loc=actions), policy_state, policy_info)
| tensorflow/agents | tf_agents/bandits/policies/boltzmann_reward_prediction_policy.py | Python | apache-2.0 | 13,821 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Minio Python Library for Amazon S3 Compatible Cloud Storage,
# (C) 2015, 2016, 2017, 2018 Minio, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
import os
import io
from sys import exit
import uuid
import shutil
import inspect
import json
from random import random
from string import ascii_lowercase
import time
import traceback
from datetime import datetime, timedelta
import urllib3
import certifi
import hashlib
from threading import Thread
from minio import Minio, PostPolicy, CopyConditions
from minio.error import (APINotImplemented, NoSuchBucketPolicy, ResponseError,
PreconditionFailed, BucketAlreadyOwnedByYou,
BucketAlreadyExists, InvalidBucketError)
class LimitedRandomReader(object):
"""
LimitedRandomReader returns a Reader that upon read
returns random data, but stops with EOF after *limit*
bytes.
LimitedRandomReader is compatible with BufferedIOBase.
returns a class:`LimitedRandomReader` that upon read
provides random data and stops with EOF after *limit*
bytes
:param limit: Trigger EOF after limit bytes.
"""
def __init__(self, limit):
self._limit = limit
self._offset_location = 0
def read(self, amt=64*1024):
"""
Similar to :meth:`io.read`, with amt option.
:param amt:
How much of the content to read.
"""
# If offset is bigger than size. Treat it as EOF return here.
if self._offset_location == self._limit:
# return empty bytes to indicate EOF.
return b''
# make translation table from 0..255 to 97..122
bal = [c.encode('ascii') for c in ascii_lowercase]
amt = min(amt, self._limit - self._offset_location)
data = b''.join([bal[int(random() * 26)] for _ in range(amt)])
self._offset_location += len(data)
return data
class LogOutput(object):
"""
LogOutput is the class for log output. It is required standard for all
SDK tests controlled by mint.
Here are its attributes:
'name': name of the SDK under test, e.g. 'minio-py'
'function': name of the method/api under test with its signature
The following python code can be used to
pull args information of a <method> and to
put together with the method name:
<method>.__name__+'('+', '.join(args_list)+')'
e.g. 'remove_object(bucket_name, object_name)'
'args': method/api arguments with their values, in
dictionary form: {'arg1': val1, 'arg2': val2, ...}
'duration': duration of the whole test in milliseconds,
defaults to 0
'alert': any extra information user is needed to be alerted about,
like whether this is a Blocker/Gateway/Server related
issue, etc., defaults to None
'message': descriptive error message, defaults to None
'error': stack-trace/exception message(only in case of failure),
actual low level exception/error thrown by the program,
defaults to None
'status': exit status, possible values are 'PASS', 'FAIL', 'NA',
defaults to 'PASS'
"""
PASS = 'PASS'
FAIL = 'FAIL'
NA = 'NA'
def __init__(self, meth, test_name):
self.__args_list = inspect.getargspec(meth).args[1:]
self.__name = 'minio-py:'+test_name
self.__function = meth.__name__+'('+', '.join(self.__args_list)+')'
self.__args = {}
self.__duration = 0
self.__alert = ''
self.__message = None
self.__error = None
self.__status = self.PASS
self.__start_time = time.time()
@property
def name(self): return self.__name
@property
def function(self): return self.__function
@property
def args(self): return self.__args
@name.setter
def name(self, val): self.__name = val
@function.setter
def function(self, val): self.__function = val
@args.setter
def args(self, val): self.__args = val
def json_report(self, err_msg='', alert='', status=''):
self.__args = {k: v for k, v in self.__args.items() if v and v != ''}
entry = {'name': self.__name,
'function': self.__function,
'args': self.__args,
'duration': int(round((time.time() - self.__start_time)*1000)),
'alert': str(alert),
'message': str(err_msg),
'error': traceback.format_exc() if err_msg and err_msg != '' else '',
'status': status if status and status != '' else \
self.FAIL if err_msg and err_msg != '' else self.PASS
}
return json.dumps({k: v for k, v in entry.items() if v and v != ''})
def generate_bucket_name():
return "minio-py-test-" + uuid.uuid4().__str__()
def is_s3(client):
return "s3.amazonaws" in client._endpoint_url
def test_make_bucket_default_region(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "make_bucket(bucket_name, location)"
# Get a unique bucket_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
# Default location
log_output.args['location'] = "default value ('us-east-1')"
try:
# Create a bucket with default bucket location
client.make_bucket(bucket_name)
# Check if bucket was created properly
log_output.function = 'bucket_exists(bucket_name)'
client.bucket_exists(bucket_name)
# Remove bucket
log_output.function = 'remove_bucket(bucket_name)'
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
log_output.function = 'make_bucket(bucket_name, location)'
print(log_output.json_report())
def test_make_bucket_with_region(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "make_bucket(bucket_name, location)"
# Get a unique bucket_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
# A non-default location
log_output.args['location'] = location = 'us-west-1'
try:
# Create a bucket with default bucket location
client.make_bucket(bucket_name, location)
# Check if bucket was created properly
log_output.function = 'bucket_exists(bucket_name)'
client.bucket_exists(bucket_name)
# Remove bucket
log_output.function = 'remove_bucket(bucket_name)'
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
log_output.function = 'make_bucket(bucket_name, location)'
print(log_output.json_report())
def test_negative_make_bucket_invalid_name(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "make_bucket(bucket_name, location)"
# Get a unique bucket_name
bucket_name = generate_bucket_name()
# Default location
log_output.args['location'] = "default value ('us-east-1')"
# Create an array of invalid bucket names to test
invalid_bucket_name_list = [bucket_name+'.', '.'+bucket_name, bucket_name+'...'+'abcd']
for name in invalid_bucket_name_list:
log_output.args['bucket_name'] = name
try:
# Create a bucket
client.make_bucket(name)
# Check if bucket was created properly
log_output.function = 'bucket_exists(bucket_name)'
client.bucket_exists(name)
# Remove bucket
log_output.function = 'remove_bucket(bucket_name)'
client.remove_bucket(name)
except InvalidBucketError as err:
pass
except Exception as err:
raise Exception(err)
# Test passes
log_output.function = 'make_bucket(bucket_name, location)'
log_output.args['bucket_name'] = invalid_bucket_name_list
print(log_output.json_report())
def test_make_bucket_recreate(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "make_bucket(bucket_name, location)"
# Get a unique bucket_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
# s3 amazon has a bug and can let a bucket to be recreated for
# 'us-east-1' region, as opposed to the expected failure behavior.
# Until this issue is fixed by amazon, the following
# location manipulation will be used in our testing.
location = 'us-west-1' if is_s3(client) else 'us-east-1'
failed_as_expected = False
try:
client.make_bucket(bucket_name, location)
client.make_bucket(bucket_name, location)
except BucketAlreadyOwnedByYou as err:
# Expected this exception. Test passes
failed_as_expected = True
print(log_output.json_report())
except BucketAlreadyExists as err:
# Expected this exception. Test passes
failed_as_expected = True
print(log_output.json_report())
except Exception as err:
raise Exception(err)
if not failed_as_expected:
print(log_output.json_report("Recreating the same bucket SHOULD have failed!"))
exit()
def test_list_buckets(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "list_buckets( )"
# Get a unique bucket_name
bucket_name = generate_bucket_name()
try:
client.make_bucket(bucket_name)
# List all buckets.
buckets = client.list_buckets()
for bucket in buckets:
# bucket object should be of a valid value.
if bucket.name and bucket.creation_date:
continue
raise ValueError('list_bucket api failure')
except Exception as err:
raise Exception(err)
finally:
client.remove_bucket(bucket_name)
# Test passes
print(log_output.json_report())
def test_fput_object_small_file(client, testfile, log_output):
# default value for log_output.function attribute is;
# log_output.function = "fput_object(bucket_name, object_name, file_path, content_type, metadata)"
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = uuid.uuid4().__str__()
log_output.args['file_path'] = testfile
log_output.args['metadata'] = metadata = {'x-amz-storage-class': 'STANDARD_IA'}
try:
client.make_bucket(bucket_name)
# upload local small file.
if is_s3(client):
client.fput_object(bucket_name, object_name+'-f', testfile,
metadata)
else:
client.fput_object(bucket_name, object_name+'-f', testfile)
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_object(bucket_name, object_name+'-f')
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_fput_object_large_file(client, largefile, log_output):
# default value for log_output.function attribute is;
# log_output.function = "fput_object(bucket_name, object_name, file_path, content_type, metadata)"
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = uuid.uuid4().__str__()
log_output.args['file_path'] = largefile
log_output.args['metadata'] = metadata = {'x-amz-storage-class': 'STANDARD_IA'}
# upload local large file through multipart.
try:
client.make_bucket(bucket_name)
if is_s3(client):
client.fput_object(bucket_name, object_name+'-large', largefile,
metadata)
else:
client.fput_object(bucket_name, object_name+'-large', largefile)
client.stat_object(bucket_name, object_name+'-large')
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_object(bucket_name, object_name+'-large')
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_fput_object_with_content_type(client, testfile, log_output):
# default value for log_output.function attribute is;
# log_output.function = "fput_object(bucket_name, object_name, file_path, content_type, metadata)"
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = uuid.uuid4().__str__()
log_output.args['file_path'] = testfile
log_output.args['content_type'] = content_type = 'application/octet-stream'
log_output.args['metadata'] = metadata = {'x-amz-storage-class': 'STANDARD_IA'}
try:
client.make_bucket(bucket_name)
# upload local small file with content_type defined.
if is_s3(client):
client.fput_object(bucket_name, object_name+'-f', testfile,
content_type, metadata)
else:
client.fput_object(bucket_name, object_name+'-f', testfile,
content_type)
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_object(bucket_name, object_name+'-f')
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_copy_object_no_copy_condition(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "copy_object(bucket_name, object_name, object_source, conditions)"
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
object_name = uuid.uuid4().__str__()
log_output.args['object_source'] = object_source = object_name+'-source'
log_output.args['object_name'] = object_copy = object_name+'-copy'
try:
client.make_bucket(bucket_name)
# Upload a streaming object of 1MiB
KB_1 = 1024 # 1KiB.
KB_1_reader = LimitedRandomReader(KB_1)
client.put_object(bucket_name, object_source, KB_1_reader, KB_1)
# Perform a server side copy of an object
client.copy_object(bucket_name, object_copy,
'/'+bucket_name+'/'+object_source)
st_obj = client.stat_object(bucket_name, object_copy)
validate_stat_data(st_obj, KB_1, {})
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_object(bucket_name, object_source)
client.remove_object(bucket_name, object_copy)
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_copy_object_etag_match(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "copy_object(bucket_name, object_name, object_source, conditions)"
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
object_name = uuid.uuid4().__str__()
log_output.args['object_source'] = object_source = object_name+'-source'
log_output.args['object_name'] = object_copy = object_name+'-copy'
try:
client.make_bucket(bucket_name)
# Upload a streaming object of 1MiB
KB_1 = 1024 # 1KiB.
KB_1_reader = LimitedRandomReader(KB_1)
client.put_object(bucket_name, object_source, KB_1_reader, KB_1)
# Perform a server side copy of an object
client.copy_object(bucket_name, object_copy,
'/'+bucket_name+'/'+object_source)
# Verification
source_etag = client.stat_object(bucket_name, object_source).etag
copy_conditions = CopyConditions()
copy_conditions.set_match_etag(source_etag)
log_output.args['conditions'] = {'set_match_etag': source_etag}
client.copy_object(bucket_name, object_copy,
'/'+bucket_name+'/'+object_source,
copy_conditions)
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_object(bucket_name, object_source)
client.remove_object(bucket_name, object_copy)
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_copy_object_negative_etag_match(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "copy_object(bucket_name, object_name, object_source, conditions)"
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
object_name = uuid.uuid4().__str__()
log_output.args['object_source'] = object_source = object_name+'-source'
log_output.args['object_name'] = object_copy = object_name+'-copy'
try:
client.make_bucket(bucket_name)
# Upload a streaming object of 1MiB
KB_1 = 1024 # 1KiB.
KB_1_reader = LimitedRandomReader(KB_1)
client.put_object(bucket_name, object_source, KB_1_reader, KB_1)
try:
# Perform a server side copy of an object
# with incorrect pre-conditions and fail
etag = 'test-etag'
copy_conditions = CopyConditions()
copy_conditions.set_match_etag(etag)
log_output.args['conditions'] = {'set_match_etag': etag}
client.copy_object(bucket_name, object_copy,
'/'+bucket_name+'/'+object_source,
copy_conditions)
except PreconditionFailed as err:
if err.message != 'At least one of the preconditions you specified did not hold.':
raise Exception(err)
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_object(bucket_name, object_source)
client.remove_object(bucket_name, object_copy)
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_copy_object_modified_since(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "copy_object(bucket_name, object_name, object_source, conditions)"
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
object_name = uuid.uuid4().__str__()
log_output.args['object_source'] = object_source = object_name+'-source'
log_output.args['object_name'] = object_copy = object_name+'-copy'
try:
client.make_bucket(bucket_name)
# Upload a streaming object of 1MiB
KB_1 = 1024 # 1KiB.
KB_1_reader = LimitedRandomReader(KB_1)
client.put_object(bucket_name, object_source, KB_1_reader, KB_1)
# Set up the 'modified_since' copy condition
copy_conditions = CopyConditions()
t = (2014, 4, 1, 0, 0, 0, 0, 0, 0)
mod_since = datetime.utcfromtimestamp(time.mktime(t))
copy_conditions.set_modified_since(mod_since)
date_pretty = mod_since.strftime('%c')
log_output.args['conditions'] = {'set_modified_since':date_pretty}
# Perform a server side copy of an object
# and expect the copy to complete successfully
client.copy_object(bucket_name, object_copy,
'/'+bucket_name+'/'+object_source,
copy_conditions)
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_object(bucket_name, object_source)
client.remove_object(bucket_name, object_copy)
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_copy_object_unmodified_since(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "copy_object(bucket_name, object_name, object_source, conditions)"
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
object_name = uuid.uuid4().__str__()
log_output.args['object_source'] = object_source = object_name+'-source'
log_output.args['object_name'] = object_copy = object_name+'-copy'
try:
client.make_bucket(bucket_name)
# Upload a streaming object of 1MiB
KB_1 = 1024 # 1KiB.
KB_1_reader = LimitedRandomReader(KB_1)
client.put_object(bucket_name, object_source, KB_1_reader, KB_1)
# Set up the 'modified_since' copy condition
copy_conditions = CopyConditions()
t = (2014, 4, 1, 0, 0, 0, 0, 0, 0)
unmod_since = datetime.utcfromtimestamp(time.mktime(t))
copy_conditions.set_unmodified_since(unmod_since)
date_pretty = unmod_since.strftime('%c')
log_output.args['conditions'] = {'set_unmodified_since': date_pretty}
try:
# Perform a server side copy of an object and expect
# the copy to fail since the creation/modification
# time is now, way later than unmodification time, April 1st, 2014
client.copy_object(bucket_name, object_copy,
'/'+bucket_name+'/'+object_source,
copy_conditions)
except PreconditionFailed as err:
if err.message != 'At least one of the preconditions you specified did not hold.':
raise Exception(err)
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_object(bucket_name, object_source)
client.remove_object(bucket_name, object_copy)
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_put_object(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "put_object(bucket_name, object_name, data, length, content_type, metadata)"
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = uuid.uuid4().__str__()
try:
client.make_bucket(bucket_name)
# Put/Upload a streaming object of 1MiB
log_output.args['length'] = MB_1 = 1024*1024 # 1MiB.
MB_1_reader = LimitedRandomReader(MB_1)
log_output.args['data'] = 'LimitedRandomReader(MB_1)'
client.put_object(bucket_name, object_name, MB_1_reader, MB_1)
client.stat_object(bucket_name, object_name)
# Put/Upload a streaming object of 11MiB
log_output.args['length'] = MB_11 = 11*1024*1024 # 11MiB.
MB_11_reader = LimitedRandomReader(MB_11)
log_output.args['data'] = 'LimitedRandomReader(MB_11)'
log_output.args['metadata'] = metadata = {'x-amz-meta-testing': 'value','test-key':'value2'}
log_output.args['content_type'] = content_type='application/octet-stream'
client.put_object(bucket_name,
object_name+'-metadata',
MB_11_reader,
MB_11,
content_type,
metadata)
# Stat on the uploaded object to check if it exists
# Fetch saved stat metadata on a previously uploaded object with metadata.
st_obj = client.stat_object(bucket_name, object_name+'-metadata')
if 'X-Amz-Meta-Testing' not in st_obj.metadata:
raise ValueError("Metadata key 'x-amz-meta-testing' not found")
value = st_obj.metadata['X-Amz-Meta-Testing']
if value != 'value':
raise ValueError('Metadata key has unexpected'
' value {0}'.format(value))
if 'X-Amz-Meta-Test-Key' not in st_obj.metadata:
raise ValueError("Metadata key 'x-amz-meta-test-key' not found")
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_object(bucket_name, object_name)
client.remove_object(bucket_name, object_name+'-metadata')
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_negative_put_object_with_path_segment(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "put_object(bucket_name, object_name, data, length, content_type, metadata)"
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = "/a/b/c/" + uuid.uuid4().__str__()
try:
client.make_bucket(bucket_name)
log_output.args['length'] = 0 # Keep 0 bytes body to check for error.
log_output.args['data'] = ''
client.put_object(bucket_name,
object_name,
io.BytesIO(b''), 0)
except ResponseError as err:
pass
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_object(bucket_name, object_name)
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def validate_stat_data(st_obj, expected_size, expected_meta):
received_modification_time = st_obj.last_modified
received_etag = st_obj.etag
received_metadata = st_obj.metadata
received_content_type = st_obj.content_type
received_size = st_obj.size
received_is_dir = st_obj.is_dir
if not isinstance(received_modification_time, time.struct_time):
raise ValueError('Incorrect last_modified time type'
', received type: ', type(received_modification_time))
if not received_etag or received_etag == '':
raise ValueError('No Etag value is returned.')
if received_content_type != 'application/octet-stream':
raise ValueError('Incorrect content type. Expected: ',
"'application/octet-stream', received: ",
received_content_type)
if received_size != expected_size:
raise ValueError('Incorrect file size. Expected: 11534336',
', received: ', received_size)
if received_is_dir != False:
raise ValueError('Incorrect file type. Expected: is_dir=False',
', received: is_dir=', received_is_dir)
if not all(i in expected_meta.items() for i in received_metadata.items()):
raise ValueError("Metadata key 'x-amz-meta-testing' not found")
def test_stat_object(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "stat_object(bucket_name, object_name)"
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = uuid.uuid4().__str__()
try:
client.make_bucket(bucket_name)
# Put/Upload a streaming object of 1MiB
log_output.args['length'] = MB_1 = 1024*1024 # 1MiB.
MB_1_reader = LimitedRandomReader(MB_1)
log_output.args['data'] = 'LimitedRandomReader(MB_1)'
client.put_object(bucket_name, object_name, MB_1_reader, MB_1)
client.stat_object(bucket_name, object_name)
# Put/Upload a streaming object of 11MiB
log_output.args['length'] = MB_11 = 11*1024*1024 # 11MiB.
MB_11_reader = LimitedRandomReader(MB_11)
log_output.args['data'] = 'LimitedRandomReader(MB_11)'
log_output.args['metadata'] = metadata = {'X-Amz-Meta-Testing': 'value'}
log_output.args['content_type'] = content_type='application/octet-stream'
client.put_object(bucket_name,
object_name+'-metadata',
MB_11_reader,
MB_11,
content_type,
metadata)
# Get the stat on the uploaded object
st_obj = client.stat_object(bucket_name, object_name+'-metadata')
# Verify the collected stat data.
validate_stat_data(st_obj, MB_11, metadata)
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_object(bucket_name, object_name)
client.remove_object(bucket_name, object_name+'-metadata')
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_remove_object(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "remove_object(bucket_name, object_name)"
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = uuid.uuid4().__str__()
try:
client.make_bucket(bucket_name)
KB_1 = 1024 # 1KiB.
KB_1_reader = LimitedRandomReader(KB_1)
client.put_object(bucket_name, object_name, KB_1_reader, KB_1)
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_object(bucket_name, object_name)
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_get_object(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "get_object(bucket_name, object_name, request_headers)"
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = uuid.uuid4().__str__()
try:
newfile = 'newfile جديد'
MB_1 = 1024*1024 # 1MiB.
MB_1_reader = LimitedRandomReader(MB_1)
client.make_bucket(bucket_name)
client.put_object(bucket_name, object_name, MB_1_reader, MB_1)
# Get/Download a full object, iterate on response to save to disk
object_data = client.get_object(bucket_name, object_name)
with open(newfile, 'wb') as file_data:
shutil.copyfileobj(object_data, file_data)
except Exception as err:
raise Exception(err)
finally:
try:
os.remove(newfile)
client.remove_object(bucket_name, object_name)
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_fget_object(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "fget_object(bucket_name, object_name, file_path, request_headers)"
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = uuid.uuid4().__str__()
log_output.args['file_path'] = newfile_f = 'newfile-f 新'
try:
MB_1 = 1024*1024 # 1MiB.
MB_1_reader = LimitedRandomReader(MB_1)
client.make_bucket(bucket_name)
client.put_object(bucket_name, object_name, MB_1_reader, MB_1)
# Get/Download a full object and save locally at path
client.fget_object(bucket_name, object_name, newfile_f)
except Exception as err:
raise Exception(err)
finally:
try:
os.remove(newfile_f)
client.remove_object(bucket_name, object_name)
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_get_partial_object_with_default_length(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "get_partial_object(bucket_name, object_name, offset, length, request_headers)"
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = uuid.uuid4().__str__()
try:
newfile = 'newfile'
MB_1 = 1024*1024 # 1MiB.
length = 1000
log_output.args['offset'] = offset = MB_1 - length
MB_1_reader = LimitedRandomReader(MB_1)
client.make_bucket(bucket_name)
client.put_object(bucket_name, object_name, MB_1_reader, MB_1)
# Get half of the object
object_data = client.get_partial_object(bucket_name, object_name, offset)
with open(newfile, 'wb') as file_data:
for d in object_data:
file_data.write(d)
#Check if the new file is the right size
new_file_size = os.path.getsize('./newfile')
if new_file_size != length:
raise ValueError('Unexpected file size after running ')
except Exception as err:
raise Exception(err)
finally:
try:
# os.remove(newfile)
client.remove_object(bucket_name, object_name)
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_get_partial_object(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "get_partial_object(bucket_name, object_name, offset, length, request_headers)"
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = uuid.uuid4().__str__()
try:
newfile = 'newfile'
MB_1 = 1024*1024 # 1MiB.
log_output.args['offset'] = offset = int(MB_1/2)
log_output.args['length'] = length = int(MB_1/2)-1000
MB_1_reader = LimitedRandomReader(MB_1)
client.make_bucket(bucket_name)
client.put_object(bucket_name, object_name, MB_1_reader, MB_1)
# Get half of the object
object_data = client.get_partial_object(bucket_name, object_name, offset, length)
with open(newfile, 'wb') as file_data:
for d in object_data:
file_data.write(d)
#Check if the new file is the right size
new_file_size = os.path.getsize('./newfile')
if new_file_size != length:
raise ValueError('Unexpected file size after running ')
except Exception as err:
raise Exception(err)
finally:
try:
# os.remove(newfile)
client.remove_object(bucket_name, object_name)
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_list_objects(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "list_objects(bucket_name, prefix, recursive)"
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = uuid.uuid4().__str__()
try:
client.make_bucket(bucket_name)
MB_1 = 1024*1024 # 1MiB.
MB_1_reader = LimitedRandomReader(MB_1)
client.put_object(bucket_name, object_name+"-1", MB_1_reader, MB_1)
MB_1_reader = LimitedRandomReader(MB_1)
client.put_object(bucket_name, object_name+"-2", MB_1_reader, MB_1)
# List all object paths in bucket.
log_output.args['recursive'] = is_recursive = True
objects = client.list_objects(bucket_name, None, is_recursive)
for obj in objects:
_, _, _, _, _, _ = obj.bucket_name,\
obj.object_name,\
obj.last_modified,\
obj.etag, obj.size,\
obj.content_type
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_object(bucket_name, object_name+"-1")
client.remove_object(bucket_name, object_name+"-2")
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def count_objects(objects):
no_of_files = 0
for obj in objects:
_, _, _, _, _, _ = obj.bucket_name,\
obj.object_name,\
obj.last_modified,\
obj.etag, obj.size,\
obj.content_type
no_of_files += 1
return no_of_files
def list_objects_api_test(client, bucket_name, expected_no, *argv):
# argv is composed of prefix and recursive arguments of
# list_objects api. They are both supposed to be passed as strings.
no_of_files = count_objects(client.list_objects(bucket_name, *argv) ) # expect all objects to be listed
if expected_no != no_of_files:
raise ValueError("Listed no of objects ({}), does not match the expected no of objects ({})".format(no_of_files, expected_no))
def test_list_objects_with_prefix(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "list_objects(bucket_name, prefix, recursive)"
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = uuid.uuid4().__str__()
try:
client.make_bucket(bucket_name)
MB_1 = 1024*1024 # 1MiB.
no_of_created_files = 4
path_prefix = ''
# Create files and directories
for i in range(no_of_created_files):
str_i = str(i)
MB_1_reader = LimitedRandomReader(MB_1)
client.put_object(bucket_name, path_prefix + str_i + '_' + object_name, MB_1_reader, MB_1)
path_prefix += str_i + '/'
# Created files and directory structure
# ._<bucket_name>/
# |___0_<object_name>
# |___0/
# |___1_<object_name>
# |___1/
# |___2_<object_name>
# |___2/
# |___3_<object_name>
#
# Test and verify list_objects api outputs
# List objects recursively with NO prefix
log_output.args['recursive'] = recursive = 'True'
log_output.args['prefix'] = prefix = '' # no prefix
list_objects_api_test(client, bucket_name,
no_of_created_files,
prefix, recursive)
# List objects at the top level with no prefix and no recursive option
# Expect only the top 2 objects to be listed
log_output.args['recursive'] = recursive = ''
log_output.args['prefix'] = prefix = ''
list_objects_api_test(client, bucket_name, 2)
# List objects for '0' directory/prefix without recursive option
# Expect 2 object (directory '0' and '0_' object) to be listed
log_output.args['prefix'] = prefix = '0'
list_objects_api_test(client, bucket_name, 2, prefix)
# List objects for '0/' directory/prefix without recursive option
# Expect only 2 objects under directory '0/' to be listed, non-recursive
log_output.args['prefix'] = prefix = '0/'
list_objects_api_test(client, bucket_name, 2, prefix)
# List objects for '0/' directory/prefix, recursively
# Expect 2 objects to be listed
log_output.args['prefix'] = prefix = '0/'
log_output.args['recursive'] = recursive = 'True'
list_objects_api_test(client, bucket_name, 3, prefix, recursive)
# List object with '0/1/2/' directory/prefix, non-recursive
# Expect the single object under directory '0/1/2/' to be listed
log_output.args['prefix'] = prefix = '0/1/2/'
list_objects_api_test(client, bucket_name, 1, prefix)
except Exception as err:
raise Exception(err)
finally:
try:
path_prefix = ''
for i in range(no_of_created_files):
str_i = str(i)
client.remove_object(bucket_name, path_prefix + str_i + '_' + object_name)
path_prefix += str_i + '/'
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
log_output.args['recursive'] = 'Several prefix/recursive combinations are tested'
log_output.args['prefix'] = 'Several prefix/recursive combinations are tested'
print(log_output.json_report())
def test_list_objects_with_1001_files(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "list_objects(bucket_name, prefix, recursive)"
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
object_name = uuid.uuid4().__str__()
log_output.args['object_name'] = object_name + '_0 ~ ' + object_name + '_1000'
try:
client.make_bucket(bucket_name)
KB_1 = 1024 # 1KiB.
no_of_created_files = 2000
path_prefix = ''
# Create 1001 1KiB files under bucket_name at the same layer
for i in range(no_of_created_files):
str_i = str(i)
KB_1_reader = LimitedRandomReader(KB_1)
client.put_object(bucket_name, path_prefix + object_name + '_' + str_i, KB_1_reader, KB_1)
# List objects and check if 1001 files are returned
list_objects_api_test(client, bucket_name, no_of_created_files)
except Exception as err:
raise Exception(err)
finally:
try:
path_prefix = ''
for i in range(no_of_created_files):
str_i = str(i)
client.remove_object(bucket_name, path_prefix + object_name + '_' + str_i)
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_list_objects_v2(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "list_objects(bucket_name, prefix, recursive)"
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = uuid.uuid4().__str__()
try:
client.make_bucket(bucket_name)
MB_1 = 1024*1024 # 1MiB.
MB_1_reader = LimitedRandomReader(MB_1)
client.put_object(bucket_name, object_name+"-1", MB_1_reader, MB_1)
MB_1_reader = LimitedRandomReader(MB_1)
client.put_object(bucket_name, object_name+"-2", MB_1_reader, MB_1)
# List all object paths in bucket using V2 API.
log_output.args['recursive'] = is_recursive = True
objects = client.list_objects_v2(bucket_name, None, is_recursive)
for obj in objects:
_, _, _, _, _, _ = obj.bucket_name,\
obj.object_name,\
obj.last_modified,\
obj.etag, obj.size,\
obj.content_type
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_object(bucket_name, object_name+"-1")
client.remove_object(bucket_name, object_name+"-2")
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
# Helper method for test_list_incomplete_uploads
# and test_remove_incomplete_uploads tests
def create_upload_ids(client, b_name, o_name, n):
# Create 'n' many incomplete upload ids and
# return the list of created upload ids
upload_ids_created = []
for i in range(n):
upload_id = client._new_multipart_upload(b_name, o_name, {})
upload_ids_created.append(upload_id)
return upload_ids_created
# Helper method for test_list_incomplete_uploads
# and test_remove_incomplete_uploads tests
def collect_incomplete_upload_ids(client, b_name, o_name):
# Collect the upload ids from 'list_incomplete_uploads'
# command, and return the list of created upload ids
upload_ids_listed = []
for obj in client.list_incomplete_uploads(b_name, o_name, False):
upload_ids_listed.append(obj.upload_id)
return upload_ids_listed
def test_remove_incomplete_upload(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "remove_incomplete_upload(bucket_name, object_name)"
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = uuid.uuid4().__str__()
try:
client.make_bucket(bucket_name)
no_of_upload_ids = 3
# Create 'no_of_upload_ids' many incomplete upload ids
create_upload_ids(client, bucket_name, object_name, no_of_upload_ids)
# Remove all of the created upload ids
client.remove_incomplete_upload(bucket_name, object_name)
# Get the list of incomplete upload ids for object_name
# using 'list_incomplete_uploads' command
upload_ids_listed = collect_incomplete_upload_ids(client,
bucket_name,
object_name)
# Verify listed/returned upload id list
if upload_ids_listed:
# The list is not empty
raise ValueError("There are still upload ids not removed")
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_presigned_get_object_default_expiry(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "presigned_get_object(bucket_name, object_name, expires, response_headers)"
_http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED',
ca_certs=certifi.where())
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = uuid.uuid4().__str__()
try:
client.make_bucket(bucket_name)
MB_1 = 1024*1024 # 1MiB.
MB_1_reader = LimitedRandomReader(MB_1)
client.put_object(bucket_name, object_name, MB_1_reader, MB_1)
presigned_get_object_url = client.presigned_get_object(bucket_name,
object_name)
response = _http.urlopen('GET', presigned_get_object_url)
if response.status != 200:
raise ResponseError(response,
'GET',
bucket_name,
object_name).get_exception()
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_object(bucket_name, object_name)
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_presigned_get_object_expiry_5sec(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "presigned_get_object(bucket_name, object_name, expires, response_headers)"
_http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED',
ca_certs=certifi.where())
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = uuid.uuid4().__str__()
try:
client.make_bucket(bucket_name)
KB_1 = 1024 # 1KiB.
KB_1_reader = LimitedRandomReader(KB_1)
client.put_object(bucket_name, object_name, KB_1_reader, KB_1)
presigned_get_object_url = client.presigned_get_object(bucket_name,
object_name,
timedelta(seconds=5))
response = _http.urlopen('GET', presigned_get_object_url)
if response.status != 200:
raise ResponseError(response,
'GET',
bucket_name,
object_name).get_exception()
# Wait for 5 seconds for the presigned url to expire
time.sleep(5)
response = _http.urlopen('GET', presigned_get_object_url)
# Success with an expired url is considered to be a failure
if response.status == 200:
raise ValueError('Presigned get url failed to expire!')
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_object(bucket_name, object_name)
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_presigned_get_object_response_headers(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "presigned_get_object(bucket_name, object_name, expires, response_headers)"
_http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED',
ca_certs=certifi.where())
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = uuid.uuid4().__str__()
try:
client.make_bucket(bucket_name)
KB_1 = 1024 # 1KiB.
KB_1_reader = LimitedRandomReader(KB_1)
client.put_object(bucket_name, object_name, KB_1_reader, KB_1)
content_type = 'text/plain'
content_language = 'en_US'
response_headers = {'response-content-type': content_type,
'response-content-language': content_language}
presigned_get_object_url = client.presigned_get_object(bucket_name,
object_name,
timedelta(seconds=5),
response_headers)
response = _http.urlopen('GET', presigned_get_object_url)
returned_content_type = response.headers['Content-Type']
returned_content_language = response.headers['Content-Language']
if response.status != 200 or returned_content_type != content_type or\
returned_content_language != content_language:
raise ResponseError(response,
'GET',
bucket_name,
object_name).get_exception()
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_object(bucket_name, object_name)
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_presigned_put_object_default_expiry(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "presigned_put_object(bucket_name, object_name, expires)"
_http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED',
ca_certs=certifi.where())
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = uuid.uuid4().__str__()
try:
client.make_bucket(bucket_name)
presigned_put_object_url = client.presigned_put_object(bucket_name,
object_name)
MB_1 = 1024*1024 # 1MiB.
response = _http.urlopen('PUT',
presigned_put_object_url,
LimitedRandomReader(MB_1))
if response.status != 200:
raise ResponseError(response,
'PUT',
bucket_name,
object_name).get_exception()
client.stat_object(bucket_name, object_name)
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_object(bucket_name, object_name)
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_presigned_put_object_expiry_5sec(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "presigned_put_object(bucket_name, object_name, expires)"
_http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED',
ca_certs=certifi.where())
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = uuid.uuid4().__str__()
KB_1 = 1024 # 1KiB.
try:
client.make_bucket(bucket_name)
presigned_put_object_url = client.presigned_put_object(bucket_name,
object_name,
timedelta(seconds=5))
# Wait for 5 seconds for the presigned url to expire
time.sleep(5)
response = _http.urlopen('PUT',
presigned_put_object_url,
LimitedRandomReader(KB_1))
if response.status == 200:
raise ValueError('Presigned put url failed to expire!')
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_object(bucket_name, object_name)
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_presigned_post_policy(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "presigned_post_policy(post_policy)"
bucket_name = generate_bucket_name()
no_of_days = 10
prefix = 'objectPrefix/'
try:
client.make_bucket(bucket_name)
# Post policy.
policy = PostPolicy()
policy.set_bucket_name(bucket_name)
policy.set_key_startswith(prefix)
expires_date = datetime.utcnow()+timedelta(days=no_of_days)
policy.set_expires(expires_date)
# post_policy arg is a class. To avoid displaying meaningless value
# for the class, policy settings are made part of the args for
# clarity and debugging purposes.
log_output.args['post_policy'] = {'bucket_name': bucket_name,
'prefix': prefix,
'expires_in_days': no_of_days}
client.presigned_post_policy(policy)
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_thread_safe(client, test_file, log_output):
# Get a unique bucket_name and object_name
no_of_threads = 5
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = uuid.uuid4().__str__()
try:
# Create sha-sum value for the user provided
# source file, 'test_file'
with open(test_file, 'rb') as f:
contents = f.read()
test_file_sha_sum = hashlib.sha256(contents).hexdigest()
# Create the bucket
client.make_bucket(bucket_name)
# Put/Upload 'no_of_threads' many objects
# simultaneously using multi-threading
for i in range(no_of_threads):
thrd = Thread(target=client.fput_object,
args=(bucket_name, object_name, test_file))
thrd.start()
thrd.join()
# A list of exceptions raised by get_object_and_check
# called in multiple threads.
exceptions = []
# get_object_and_check() downloads an object, stores it in a file
# and then calculates its checksum. In case of mismatch, a new
# exception is generated and saved in exceptions.
def get_object_and_check(client, bckt_name, obj_name, no,
expected_sha_sum):
try:
obj_data = client.get_object(bckt_name, obj_name)
local_file = 'copied_file_'+str(no)
# Create a file with the returned data
with open(local_file, 'wb') as file_data:
shutil.copyfileobj(obj_data, file_data)
with open(local_file, 'rb') as f:
contents = f.read()
copied_file_sha_sum = hashlib.sha256(contents).hexdigest()
# Compare sha-sum values of the source file and the copied one
if expected_sha_sum != copied_file_sha_sum:
raise ValueError(
'Sha-sum mismatch on multi-threaded put and get objects')
except Exception as err:
exceptions.append(Exception(err))
finally:
# Remove downloaded file
os.path.isfile(local_file) and os.remove(local_file)
# Get/Download 'no_of_threads' many objects
# simultaneously using multi-threading
thrd_list = []
for i in range(no_of_threads):
# Create dynamic/varying names for to be created threads
thrd_name = 'thread_'+str(i)
vars()[thrd_name] = Thread(target=get_object_and_check,
args=(client, bucket_name,
object_name, i, test_file_sha_sum))
vars()[thrd_name].start()
thrd_list.append(vars()[thrd_name])
# Wait until all threads to finish
for t in thrd_list:
t.join()
if len(exceptions) > 0:
raise exceptions[0]
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_object(bucket_name, object_name)
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_get_bucket_policy(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "get_bucket_policy(bucket_name)"
# Get a unique bucket_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
try:
client.make_bucket(bucket_name)
client.get_bucket_policy(bucket_name)
except APINotImplemented:
print(log_output.json_report(alert='Not Implemented', status=LogOutput.NA))
except NoSuchBucketPolicy:
# Test passes
print(log_output.json_report())
except Exception as err:
raise Exception(err)
finally:
try:
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
def get_policy_actions(stat):
actions = []
for s in stat:
action = s.get('Action')
if action not in actions:
actions.append(action)
# flatten nested lists in actions
flattened_actions = []
for a in actions:
if isinstance(a, list):
for aa in a:
flattened_actions.append(aa)
else:
flattened_actions.append(a)
actions = [s.replace('s3:', '') for s in flattened_actions]
return actions
def policy_validated(client, bucket_name, policy):
policy_dict = json.loads(client.get_bucket_policy(bucket_name).decode("utf-8"))
actions = get_policy_actions(policy_dict.get('Statement'))
actions.sort()
expected_actions = get_policy_actions(policy.get('Statement'))
expected_actions.sort()
if expected_actions != actions:
return False
return True
def test_set_bucket_policy_readonly(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "set_bucket_policy(bucket_name, policy)"
# Get a unique bucket_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
try:
client.make_bucket(bucket_name)
# read-only policy
policy = {
"Version":"2012-10-17",
"Statement":[
{
"Sid":"",
"Effect":"Allow",
"Principal":{"AWS":"*"},
"Action":"s3:GetBucketLocation",
"Resource":"arn:aws:s3:::"+bucket_name
},
{
"Sid":"",
"Effect":"Allow",
"Principal":{"AWS":"*"},
"Action":"s3:ListBucket",
"Resource":"arn:aws:s3:::"+bucket_name
},
{
"Sid":"",
"Effect":"Allow",
"Principal":{"AWS":"*"},
"Action":"s3:GetObject",
"Resource":"arn:aws:s3:::"+bucket_name+"/*"
}
]
}
# Set read-only policy
client.set_bucket_policy(bucket_name, json.dumps(policy))
# Validate if the policy is set correctly
if not policy_validated(client, bucket_name, policy):
raise ValueError('Failed to set ReadOnly bucket policy')
except APINotImplemented:
print(log_output.json_report(alert='Not Implemented',
status=LogOutput.NA))
except Exception as err:
raise Exception(err)
else:
# Test passes
print(log_output.json_report())
finally:
try:
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
def test_set_bucket_policy_readwrite(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "set_bucket_policy(bucket_name, prefix, policy_access)"
# Get a unique bucket_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
try:
client.make_bucket(bucket_name)
# Read-write policy
policy = {
"Version": "2012-10-17",
"Statement": [
{
"Action": ["s3:GetBucketLocation"],
"Sid": "",
"Resource": ["arn:aws:s3:::"+bucket_name],
"Effect": "Allow",
"Principal": {"AWS": "*"}
},
{
"Action": ["s3:ListBucket"],
"Sid": "",
"Resource": ["arn:aws:s3:::"+bucket_name],
"Effect": "Allow",
"Principal": {"AWS": "*"}
},
{
"Action": ["s3:ListBucketMultipartUploads"],
"Sid": "",
"Resource": ["arn:aws:s3:::"+bucket_name],
"Effect": "Allow",
"Principal": {"AWS": "*"}
},
{
"Action": ["s3:ListMultipartUploadParts",
"s3:GetObject",
"s3:AbortMultipartUpload",
"s3:DeleteObject",
"s3:PutObject"],
"Sid": "",
"Resource": ["arn:aws:s3:::"+bucket_name+"/*"],
"Effect": "Allow",
"Principal": {"AWS": "*"}
}
]
}
# Set read-write policy
client.set_bucket_policy(bucket_name, json.dumps(policy))
# Validate if the policy is set correctly
if not policy_validated(client, bucket_name, policy):
raise ValueError('Failed to set ReadOnly bucket policy')
except APINotImplemented:
print(log_output.json_report(alert='Not Implemented', status=LogOutput.NA))
except Exception as err:
raise Exception(err)
else:
# Test passes
print(log_output.json_report())
finally:
try:
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
def test_remove_objects(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "remove_objects(bucket_name, objects_iter)"
# Get a unique bucket_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
try:
MB_1 = 1024*1024 # 1MiB.
client.make_bucket(bucket_name)
# Upload some new objects to prepare for multi-object delete test.
object_names = []
for i in range(10):
curr_object_name = "prefix"+"-{}".format(i)
client.put_object(bucket_name, curr_object_name, LimitedRandomReader(MB_1), MB_1)
object_names.append(curr_object_name)
log_output.args['objects_iter'] = objects_iter = object_names
# delete the objects in a single library call.
for del_err in client.remove_objects(bucket_name, objects_iter):
raise ValueError("Remove objects err: {}".format(del_err))
except Exception as err:
raise Exception(err)
finally:
try:
# Try to clean everything to keep our server intact
for del_err in client.remove_objects(bucket_name, objects_iter):
raise ValueError("Remove objects err: {}".format(del_err))
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def test_remove_bucket(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "remove_bucket(bucket_name)"
# Get a unique bucket_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
try:
if is_s3(client):
log_output.args['location'] = location = 'us-east-1'
client.make_bucket(bucket_name+'.unique', location)
else:
client.make_bucket(bucket_name)
except Exception as err:
raise Exception(err)
finally:
try:
# Removing bucket. This operation will only work if your bucket is empty.
if is_s3(client):
client.remove_bucket(bucket_name+'.unique')
else:
client.remove_bucket(bucket_name)
except Exception as err:
raise Exception(err)
# Test passes
print(log_output.json_report())
def isFullMode():
return os.getenv("MINT_MODE") == "full"
def main():
"""
Functional testing of minio python library.
"""
try:
access_key = os.getenv('ACCESS_KEY', 'Q3AM3UQ867SPQQA43P2F')
secret_key = os.getenv('SECRET_KEY',
'zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG')
server_endpoint = os.getenv('SERVER_ENDPOINT', 'play.minio.io:9000')
secure = os.getenv('ENABLE_HTTPS', '1') == '1'
if server_endpoint == 'play.minio.io:9000':
access_key = 'Q3AM3UQ867SPQQA43P2F'
secret_key = 'zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG'
secure = True
client = Minio(server_endpoint, access_key, secret_key, secure=secure)
# Check if we are running in the mint environment.
data_dir = os.getenv('DATA_DIR')
if data_dir == None:
os.environ['DATA_DIR'] = data_dir = '/mint/data'
is_mint_env = (os.path.exists(data_dir) and\
os.path.exists(os.path.join(data_dir, 'datafile-1-MB')) and\
os.path.exists(os.path.join(data_dir, 'datafile-11-MB')))
# Enable trace
# import sys
# client.trace_on(sys.stderr)
testfile = 'datafile-1-MB'
largefile = 'datafile-11-MB'
if is_mint_env :
## Choose data files
testfile = os.path.join(data_dir, 'datafile-1-MB')
largefile = os.path.join(data_dir, 'datafile-11-MB')
else:
with open(testfile, 'wb') as file_data:
shutil.copyfileobj(LimitedRandomReader(1024*1024), file_data)
with open(largefile, 'wb') as file_data:
shutil.copyfileobj(LimitedRandomReader(11*1024*1024), file_data)
if isFullMode():
log_output = LogOutput(client.make_bucket, 'test_make_bucket_default_region')
test_make_bucket_default_region(client, log_output)
log_output = LogOutput(client.make_bucket, 'test_make_bucket_with_region')
test_make_bucket_with_region(client, log_output)
log_output = LogOutput(client.make_bucket, 'test_negative_make_bucket_invalid_name')
test_negative_make_bucket_invalid_name(client, log_output)
log_output = LogOutput(client.make_bucket, 'test_make_bucket_recreate')
test_make_bucket_recreate(client, log_output)
log_output = LogOutput(client.list_buckets, 'test_list_buckets')
test_list_buckets(client, log_output)
log_output = LogOutput(client.fput_object, 'test_fput_object_small_file')
test_fput_object_small_file(client, testfile, log_output)
log_output = LogOutput(client.fput_object, 'test_fput_object_large_file')
test_fput_object_large_file(client, largefile, log_output)
log_output = LogOutput(client.fput_object, 'test_fput_object_with_content_type')
test_fput_object_with_content_type(client, testfile, log_output)
log_output = LogOutput(client.copy_object, 'test_copy_object_no_copy_condition')
test_copy_object_no_copy_condition(client, log_output)
log_output = LogOutput(client.copy_object, 'test_copy_object_etag_match')
test_copy_object_etag_match(client, log_output)
log_output = LogOutput(client.copy_object, 'test_copy_object_negative_etag_match')
test_copy_object_negative_etag_match(client, log_output)
log_output = LogOutput(client.copy_object, 'test_copy_object_modified_since')
test_copy_object_modified_since(client, log_output)
log_output = LogOutput(client.copy_object, 'test_copy_object_unmodified_since')
test_copy_object_unmodified_since(client, log_output)
log_output = LogOutput(client.put_object, 'test_put_object')
test_put_object(client, log_output)
log_output = LogOutput(client.put_object, 'test_negative_put_object_with_path_segment')
test_negative_put_object_with_path_segment(client, log_output)
log_output = LogOutput(client.stat_object, 'test_stat_object')
test_stat_object(client, log_output)
log_output = LogOutput(client.get_object, 'test_get_object')
test_get_object(client, log_output)
log_output = LogOutput(client.fget_object, 'test_fget_object')
test_fget_object(client, log_output)
log_output = LogOutput(client.get_partial_object, 'test_get_partial_object_with_default_length')
test_get_partial_object_with_default_length(client, log_output)
log_output = LogOutput(client.get_partial_object, 'test_get_partial_object')
test_get_partial_object(client, log_output)
log_output = LogOutput(client.list_objects, 'test_list_objects')
test_list_objects(client, log_output)
log_output = LogOutput(client.list_objects, 'test_list_objects_with_prefix')
test_list_objects_with_prefix(client, log_output)
log_output = LogOutput(client.list_objects, 'test_list_objects_with_1001_files')
test_list_objects_with_1001_files(client, log_output)
log_output = LogOutput(client.remove_incomplete_upload, 'test_remove_incomplete_upload')
test_remove_incomplete_upload(client, log_output)
log_output = LogOutput(client.list_objects_v2, 'test_list_objects_v2')
test_list_objects_v2(client, log_output)
log_output = LogOutput(client.presigned_get_object, 'test_presigned_get_object_default_expiry')
test_presigned_get_object_default_expiry(client, log_output)
log_output = LogOutput(client.presigned_get_object, 'test_presigned_get_object_expiry_5sec')
test_presigned_get_object_expiry_5sec(client, log_output)
log_output = LogOutput(client.presigned_get_object, 'test_presigned_get_object_response_headers')
test_presigned_get_object_response_headers(client, log_output)
log_output = LogOutput(client.presigned_put_object, 'test_presigned_put_object_default_expiry')
test_presigned_put_object_default_expiry(client, log_output)
log_output = LogOutput(client.presigned_put_object, 'test_presigned_put_object_expiry_5sec')
test_presigned_put_object_expiry_5sec(client, log_output)
log_output = LogOutput(client.presigned_post_policy, 'test_presigned_post_policy')
test_presigned_post_policy(client, log_output)
log_output = LogOutput(client.put_object, 'test_thread_safe')
test_thread_safe(client, testfile, log_output)
log_output = LogOutput(client.get_bucket_policy, 'test_get_bucket_policy')
test_get_bucket_policy(client,log_output)
log_output = LogOutput(client.set_bucket_policy, 'test_set_bucket_policy_readonly')
test_set_bucket_policy_readonly(client, log_output)
log_output = LogOutput(client.set_bucket_policy, 'test_set_bucket_policy_readwrite')
test_set_bucket_policy_readwrite(client, log_output)
else:
# Quick mode tests
log_output = LogOutput(client.make_bucket, 'test_make_bucket_default_region')
test_make_bucket_default_region(client, log_output)
log_output = LogOutput(client.list_buckets, 'test_list_buckets')
test_list_buckets(client, log_output)
log_output = LogOutput(client.put_object, 'test_put_object')
test_put_object(client, log_output)
log_output = LogOutput(client.stat_object, 'test_stat_object')
test_stat_object(client, log_output)
log_output = LogOutput(client.get_object, 'test_get_object')
test_get_object(client, log_output)
log_output = LogOutput(client.list_objects, 'test_list_objects')
test_list_objects(client, log_output)
log_output = LogOutput(client.remove_incomplete_upload, 'test_remove_incomplete_upload')
test_remove_incomplete_upload(client, log_output)
log_output = LogOutput(client.presigned_get_object, 'test_presigned_get_object_default_expiry')
test_presigned_get_object_default_expiry(client, log_output)
log_output = LogOutput(client.presigned_put_object, 'test_presigned_put_object_default_expiry')
test_presigned_put_object_default_expiry(client, log_output)
log_output = LogOutput(client.presigned_post_policy, 'test_presigned_post_policy')
test_presigned_post_policy(client, log_output)
log_output = LogOutput(client.copy_object, 'test_copy_object_no_copy_condition')
test_copy_object_no_copy_condition(client, log_output)
log_output = LogOutput(client.get_bucket_policy, 'test_get_bucket_policy')
test_get_bucket_policy(client,log_output)
log_output = LogOutput(client.set_bucket_policy, 'test_set_bucket_policy_readonly')
test_set_bucket_policy_readonly(client, log_output)
# Remove all objects.
log_output = LogOutput(client.remove_object, 'test_remove_object')
test_remove_object(client, log_output)
log_output = LogOutput(client.remove_objects, 'test_remove_objects')
test_remove_objects(client, log_output)
log_output = LogOutput(client.remove_bucket, 'test_remove_bucket')
test_remove_bucket(client, log_output)
# Remove temporary files.
if not is_mint_env:
os.remove(testfile)
os.remove(largefile)
except Exception as err:
print(log_output.json_report(err))
exit(1)
if __name__ == "__main__":
# Execute only if run as a script
main()
| NitishT/minio-py | tests/functional/tests.py | Python | apache-2.0 | 79,042 |
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from connector import channel
from google3.cloud.graphite.mmv2.services.google.identity_toolkit import tenant_pb2
from google3.cloud.graphite.mmv2.services.google.identity_toolkit import tenant_pb2_grpc
from typing import List
class Tenant(object):
def __init__(
self,
name: str = None,
display_name: str = None,
allow_password_signup: bool = None,
enable_email_link_signin: bool = None,
disable_auth: bool = None,
enable_anonymous_user: bool = None,
mfa_config: dict = None,
test_phone_numbers: dict = None,
project: str = None,
service_account_file: str = "",
):
channel.initialize()
self.name = name
self.display_name = display_name
self.allow_password_signup = allow_password_signup
self.enable_email_link_signin = enable_email_link_signin
self.disable_auth = disable_auth
self.enable_anonymous_user = enable_anonymous_user
self.mfa_config = mfa_config
self.test_phone_numbers = test_phone_numbers
self.project = project
self.service_account_file = service_account_file
def apply(self):
stub = tenant_pb2_grpc.IdentitytoolkitBetaTenantServiceStub(channel.Channel())
request = tenant_pb2.ApplyIdentitytoolkitBetaTenantRequest()
if Primitive.to_proto(self.name):
request.resource.name = Primitive.to_proto(self.name)
if Primitive.to_proto(self.display_name):
request.resource.display_name = Primitive.to_proto(self.display_name)
if Primitive.to_proto(self.allow_password_signup):
request.resource.allow_password_signup = Primitive.to_proto(
self.allow_password_signup
)
if Primitive.to_proto(self.enable_email_link_signin):
request.resource.enable_email_link_signin = Primitive.to_proto(
self.enable_email_link_signin
)
if Primitive.to_proto(self.disable_auth):
request.resource.disable_auth = Primitive.to_proto(self.disable_auth)
if Primitive.to_proto(self.enable_anonymous_user):
request.resource.enable_anonymous_user = Primitive.to_proto(
self.enable_anonymous_user
)
if TenantMfaConfig.to_proto(self.mfa_config):
request.resource.mfa_config.CopyFrom(
TenantMfaConfig.to_proto(self.mfa_config)
)
else:
request.resource.ClearField("mfa_config")
if Primitive.to_proto(self.test_phone_numbers):
request.resource.test_phone_numbers = Primitive.to_proto(
self.test_phone_numbers
)
if Primitive.to_proto(self.project):
request.resource.project = Primitive.to_proto(self.project)
request.service_account_file = self.service_account_file
response = stub.ApplyIdentitytoolkitBetaTenant(request)
self.name = Primitive.from_proto(response.name)
self.display_name = Primitive.from_proto(response.display_name)
self.allow_password_signup = Primitive.from_proto(
response.allow_password_signup
)
self.enable_email_link_signin = Primitive.from_proto(
response.enable_email_link_signin
)
self.disable_auth = Primitive.from_proto(response.disable_auth)
self.enable_anonymous_user = Primitive.from_proto(
response.enable_anonymous_user
)
self.mfa_config = TenantMfaConfig.from_proto(response.mfa_config)
self.test_phone_numbers = Primitive.from_proto(response.test_phone_numbers)
self.project = Primitive.from_proto(response.project)
def delete(self):
stub = tenant_pb2_grpc.IdentitytoolkitBetaTenantServiceStub(channel.Channel())
request = tenant_pb2.DeleteIdentitytoolkitBetaTenantRequest()
request.service_account_file = self.service_account_file
if Primitive.to_proto(self.name):
request.resource.name = Primitive.to_proto(self.name)
if Primitive.to_proto(self.display_name):
request.resource.display_name = Primitive.to_proto(self.display_name)
if Primitive.to_proto(self.allow_password_signup):
request.resource.allow_password_signup = Primitive.to_proto(
self.allow_password_signup
)
if Primitive.to_proto(self.enable_email_link_signin):
request.resource.enable_email_link_signin = Primitive.to_proto(
self.enable_email_link_signin
)
if Primitive.to_proto(self.disable_auth):
request.resource.disable_auth = Primitive.to_proto(self.disable_auth)
if Primitive.to_proto(self.enable_anonymous_user):
request.resource.enable_anonymous_user = Primitive.to_proto(
self.enable_anonymous_user
)
if TenantMfaConfig.to_proto(self.mfa_config):
request.resource.mfa_config.CopyFrom(
TenantMfaConfig.to_proto(self.mfa_config)
)
else:
request.resource.ClearField("mfa_config")
if Primitive.to_proto(self.test_phone_numbers):
request.resource.test_phone_numbers = Primitive.to_proto(
self.test_phone_numbers
)
if Primitive.to_proto(self.project):
request.resource.project = Primitive.to_proto(self.project)
response = stub.DeleteIdentitytoolkitBetaTenant(request)
@classmethod
def list(self, project, service_account_file=""):
stub = tenant_pb2_grpc.IdentitytoolkitBetaTenantServiceStub(channel.Channel())
request = tenant_pb2.ListIdentitytoolkitBetaTenantRequest()
request.service_account_file = service_account_file
request.Project = project
return stub.ListIdentitytoolkitBetaTenant(request).items
def to_proto(self):
resource = tenant_pb2.IdentitytoolkitBetaTenant()
if Primitive.to_proto(self.name):
resource.name = Primitive.to_proto(self.name)
if Primitive.to_proto(self.display_name):
resource.display_name = Primitive.to_proto(self.display_name)
if Primitive.to_proto(self.allow_password_signup):
resource.allow_password_signup = Primitive.to_proto(
self.allow_password_signup
)
if Primitive.to_proto(self.enable_email_link_signin):
resource.enable_email_link_signin = Primitive.to_proto(
self.enable_email_link_signin
)
if Primitive.to_proto(self.disable_auth):
resource.disable_auth = Primitive.to_proto(self.disable_auth)
if Primitive.to_proto(self.enable_anonymous_user):
resource.enable_anonymous_user = Primitive.to_proto(
self.enable_anonymous_user
)
if TenantMfaConfig.to_proto(self.mfa_config):
resource.mfa_config.CopyFrom(TenantMfaConfig.to_proto(self.mfa_config))
else:
resource.ClearField("mfa_config")
if Primitive.to_proto(self.test_phone_numbers):
resource.test_phone_numbers = Primitive.to_proto(self.test_phone_numbers)
if Primitive.to_proto(self.project):
resource.project = Primitive.to_proto(self.project)
return resource
class TenantMfaConfig(object):
def __init__(self, state: str = None, enabled_providers: list = None):
self.state = state
self.enabled_providers = enabled_providers
@classmethod
def to_proto(self, resource):
if not resource:
return None
res = tenant_pb2.IdentitytoolkitBetaTenantMfaConfig()
if TenantMfaConfigStateEnum.to_proto(resource.state):
res.state = TenantMfaConfigStateEnum.to_proto(resource.state)
if TenantMfaConfigEnabledProvidersEnumArray.to_proto(
resource.enabled_providers
):
res.enabled_providers.extend(
TenantMfaConfigEnabledProvidersEnumArray.to_proto(
resource.enabled_providers
)
)
return res
@classmethod
def from_proto(self, resource):
if not resource:
return None
return TenantMfaConfig(
state=TenantMfaConfigStateEnum.from_proto(resource.state),
enabled_providers=TenantMfaConfigEnabledProvidersEnumArray.from_proto(
resource.enabled_providers
),
)
class TenantMfaConfigArray(object):
@classmethod
def to_proto(self, resources):
if not resources:
return resources
return [TenantMfaConfig.to_proto(i) for i in resources]
@classmethod
def from_proto(self, resources):
return [TenantMfaConfig.from_proto(i) for i in resources]
class TenantMfaConfigStateEnum(object):
@classmethod
def to_proto(self, resource):
if not resource:
return resource
return tenant_pb2.IdentitytoolkitBetaTenantMfaConfigStateEnum.Value(
"IdentitytoolkitBetaTenantMfaConfigStateEnum%s" % resource
)
@classmethod
def from_proto(self, resource):
if not resource:
return resource
return tenant_pb2.IdentitytoolkitBetaTenantMfaConfigStateEnum.Name(resource)[
len("IdentitytoolkitBetaTenantMfaConfigStateEnum") :
]
class TenantMfaConfigEnabledProvidersEnum(object):
@classmethod
def to_proto(self, resource):
if not resource:
return resource
return tenant_pb2.IdentitytoolkitBetaTenantMfaConfigEnabledProvidersEnum.Value(
"IdentitytoolkitBetaTenantMfaConfigEnabledProvidersEnum%s" % resource
)
@classmethod
def from_proto(self, resource):
if not resource:
return resource
return tenant_pb2.IdentitytoolkitBetaTenantMfaConfigEnabledProvidersEnum.Name(
resource
)[len("IdentitytoolkitBetaTenantMfaConfigEnabledProvidersEnum") :]
class Primitive(object):
@classmethod
def to_proto(self, s):
if not s:
return ""
return s
@classmethod
def from_proto(self, s):
return s
| GoogleCloudPlatform/declarative-resource-client-library | python/services/identitytoolkit/beta/tenant.py | Python | apache-2.0 | 10,862 |
from django.http import HttpResponseRedirect, HttpResponse
from django.shortcuts import render
from django.utils.html import escape
from django.views.decorators.csrf import ensure_csrf_cookie
from django.contrib.auth import authenticate, login, logout
import json
# Create your views here.
@ensure_csrf_cookie
def index(request):
return render(request, 'fin/index.html', {})
def table(request,ticker):
template_name='fin/table_'+ticker+'.html'
# Fill the type of user programmatically - TBD
return render(request, template_name, {'user_profile':'anonymous'})
#return render(request, template_name, {'user_profile':'nameduser'})
#return render(request, template_name, {'user_profile':'premiumuser'})
def jspractice(request):
return render(request, 'fin/js.html', {})
def dfcf_input_modify(request):
txt=""
for key in request.POST:
value = request.POST[key]
txt += str(key) + "::" + str(value) + "<br>"
txt += "<br><br>"
dat = request.POST['dfcf_ip_params']
jdat = json.loads(dat)
for key in jdat:
value = jdat[key]
txt += str(key) + "::" + str(value) + "<br>"
txt += "<br><br>"
for key in jdat:
rev_growth = float(jdat[key]['rev_growth'])
ear_growth = float(jdat[key]['earnings_growth'])
txt += str(key) + "::" + "revenue grows at" + str(100*rev_growth) + "% <br>"
txt += str(key) + "::" + "Earnings grow at" + str(100*ear_growth) + "% <br>"
txt += "<br><br>Changeset details<br><br>"
changeset = request.POST['dfcf_ip_changeset']
jchangeset = json.loads(changeset)
for key in jchangeset:
value = jchangeset[key]
txt += str(key) + "::" + str(value) + "<br>"
txt += "<br><br>"
txt += escape(repr(request))
return HttpResponse(txt)
# return HttpResponse(request.POST['fname'])
# caller should ensure it is a POST etc.
def fin_auth (request):
username = request.POST.get('username')
password = request.POST.get('password')
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
login(request, user)
return True
return False
@ensure_csrf_cookie
def dfcf_input(request, action="none"):
template_name='fin/dfcf_input_parameters.html'
u = request.user
if action == "logout":
logout(request)
return render(request, template_name, {'user_profile':'anonymous'})
if u.is_authenticated():
template_name = 'fin/'+u.username+'/dfcf_input_parameters.html'
return render(request, template_name, {'user_profile':'anonymous'})
if (request.method != 'POST'):
return render(request, template_name, {'user_profile':'anonymous'})
if (fin_auth(request)):
template_name='fin/'+request.POST.get('username')+'/dfcf_input_parameters.html'
return render(request, template_name, {'user_profile':'anonymous'})
#return render(request, template_name, {'user_profile':'nameduser'})
#return render(request, template_name, {'user_profile':'premiumuser'})
| saigkrish/finance | PY/views.py | Python | apache-2.0 | 3,119 |
# -*- coding: utf-8 -*-
"""
This module implements a console output writer.
"""
import tensorflow as tf
from niftynet.engine.application_variables import CONSOLE
from niftynet.engine.signal import ITER_STARTED, ITER_FINISHED
class ConsoleLogger(object):
"""
This class handles iteration events to print output to the console.
"""
def __init__(self, **_unused):
ITER_STARTED.connect(self.read_console_vars)
ITER_FINISHED.connect(self.print_console_vars)
def read_console_vars(self, sender, **msg):
"""
Event handler to add all console output ops to the iteration message
:param sender: a niftynet.application instance
:param msg: an iteration message instance
:return:
"""
msg['iter_msg'].ops_to_run[CONSOLE] = \
sender.outputs_collector.variables(CONSOLE)
def print_console_vars(self, _sender, **msg):
"""
Printing iteration message with ``tf.logging`` interface.
:param _sender:
:param msg: an iteration message instance
:return:
"""
tf.logging.info(msg['iter_msg'].to_console_string())
| NifTK/NiftyNet | niftynet/engine/handler_console.py | Python | apache-2.0 | 1,161 |
default_app_config = 'providers.edu.iowaresearch.apps.AppConfig'
"""
Example Record
<record>
<header>
<identifier>oai:ir.uiowa.edu:iwp_archive-1227</identifier>
<datestamp>2016-07-05T19:23:14Z</datestamp>
<setSpec>publication:iwp</setSpec>
<setSpec>publication:grad</setSpec>
<setSpec>publication:iwp_archive</setSpec>
<setSpec>publication:harvest</setSpec>
<setSpec>publication:fullharvest</setSpec>
</header>
<metadata>
<oai_dc:dc xmlns:oai_dc="http://www.openarchives.org/OAI/2.0/oai_dc/"
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/oai_dc/
http://www.openarchives.org/OAI/2.0/oai_dc.xsd">
<dc:title>Writing Sample</dc:title>
<dc:creator>Gamerro, Carlos</dc:creator>
<dc:description>
Excerpts from The Adventure of the Busts of Eva Perón and The Islands.
</dc:description>
<dc:date>2008-10-01T07:00:00Z</dc:date>
<dc:type>text</dc:type>
<dc:format>application/pdf</dc:format>
<dc:identifier>http://ir.uiowa.edu/iwp_archive/228</dc:identifier>
<dc:identifier>
http://ir.uiowa.edu/cgi/viewcontent.cgi?article=1227&context=iwp_archive
</dc:identifier>
<dc:rights>Copyright © 2008 Carlos Gamerro</dc:rights>
<dc:source>
International Writing Program Archive of Residents' Work
</dc:source>
<dc:language>eng</dc:language>
<dc:publisher>Iowa Research Online</dc:publisher>
</oai_dc:dc>
</metadata>
</record>
"""
| zamattiac/SHARE | providers/edu/iowaresearch/__init__.py | Python | apache-2.0 | 1,863 |
import Queue
class Graph:
def __init__(self, number_of_vertices):
self.number_of_vertices = number_of_vertices
self.vertex_details = {}
self.visited = {}
def add_edge(self, vertex_label, edge):
if self.vertex_details.has_key(vertex_label):
self.vertex_details[vertex_label].append(edge)
else:
self.vertex_details[vertex_label] = []
self.vertex_details[vertex_label].append(edge)
self.visited[vertex_label] = 0
def bfs(self, starting_vertex):
print "Starting breath first search from vertex: ", starting_vertex
bfs_queue = Queue.Queue()
bfs_trace = []
bfs_queue.put(starting_vertex)
self.visited[starting_vertex] = 1
while(not bfs_queue.empty()):
current_vertex = bfs_queue.get()
bfs_trace.append(current_vertex)
adjacent_vertices = self.vertex_details[current_vertex]
for adjacent_vertex in adjacent_vertices:
if self.visited[adjacent_vertex] == 0:
bfs_queue.put(adjacent_vertex)
self.visited[adjacent_vertex] = 1
return bfs_trace
def dfs(self, vertex):
self.visited[vertex] = 1
print vertex," ",
adjacent_vertices = self.vertex_details[vertex]
for adjacent_vertex in adjacent_vertices:
if self.visited[adjacent_vertex] == 0:
self.dfs(adjacent_vertex)
def print_bfs(self, bfs_trace):
print bfs_trace
def main():
g = Graph(4)
g.add_edge(0, 1);
g.add_edge(0, 2);
g.add_edge(1, 2);
g.add_edge(2, 0);
g.add_edge(2, 3);
g.add_edge(3, 3);
# bfs_trace = g.bfs(2)
# g.print_bfs(bfs_trace)
g.dfs(2)
if __name__ == '__main__':
main()
| rdppathak/DataStructuresPython | BinaryTree/TreeTraversal.py | Python | apache-2.0 | 1,537 |
print ("I'm not a function")
def my_function():
print("Hey I'm a function!")
def brett(val):
for i in range(val):
print("I'm a function with args!")
my_function()
brett(5) | CiscoDevNet/coding-skills-sample-code | coding202-parsing-json/call-functions.py | Python | apache-2.0 | 217 |
#
# Copyright (c) 2017 Sugimoto Takaaki
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import urllib
import json
from collections import OrderedDict
# dictionary of api url
d = OrderedDict()
d['btc']='https://api.cryptonator.com/api/ticker/btc-usd'
d['ltc']='https://api.cryptonator.com/api/ticker/ltc-usd'
d['doge']='https://api.cryptonator.com/api/ticker/doge-usd'
d['xrp']='https://api.cryptonator.com/api/ticker/xrp-usd'
d['eth']='https://api.cryptonator.com/api/ticker/eth-usd'
d['mona']='https://api.cryptonator.com/api/ticker/mona-usd'
outputString = ""
for url in d.values():
sock = urllib.urlopen(url)
jsonString = sock.read()
sock.close()
jsonCurrency = json.loads(jsonString)
price = jsonCurrency['ticker']['price']
outputString = outputString + price + " "
print outputString
| sugimotokun/VirtualCurrencySplunk | bin/scripts/vc_usd_nt.py | Python | apache-2.0 | 1,321 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for conversion module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gast
from tensorflow.python.autograph import utils
from tensorflow.python.autograph.core import converter
from tensorflow.python.autograph.impl import api
from tensorflow.python.autograph.impl import conversion
from tensorflow.python.autograph.pyct import compiler
from tensorflow.python.framework import constant_op
from tensorflow.python.keras.engine import training
from tensorflow.python.platform import test
class ConversionTest(test.TestCase):
def _simple_program_ctx(self):
return converter.ProgramContext(
options=converter.ConversionOptions(recursive=True),
autograph_module=api)
def test_is_whitelisted_for_graph(self):
def test_fn():
return constant_op.constant(1)
self.assertFalse(conversion.is_whitelisted_for_graph(test_fn))
self.assertTrue(conversion.is_whitelisted_for_graph(utils))
self.assertTrue(conversion.is_whitelisted_for_graph(constant_op.constant))
def test_convert_entity_to_ast_unsupported_types(self):
with self.assertRaises(NotImplementedError):
program_ctx = self._simple_program_ctx()
conversion.convert_entity_to_ast('dummy', program_ctx)
def test_convert_entity_to_ast_callable(self):
b = 2
def f(a):
return a + b
program_ctx = self._simple_program_ctx()
nodes, name, info = conversion.convert_entity_to_ast(f, program_ctx)
fn_node, = nodes
self.assertIsInstance(fn_node, gast.FunctionDef)
self.assertEqual('tf__f', name)
self.assertIs(info.namespace['b'], b)
def test_convert_entity_to_ast_function_with_defaults(self):
b = 2
c = 1
def f(a, d=c + 1):
return a + b + d
program_ctx = self._simple_program_ctx()
nodes, name, _ = conversion.convert_entity_to_ast(f, program_ctx)
fn_node, = nodes
self.assertIsInstance(fn_node, gast.FunctionDef)
self.assertEqual('tf__f', name)
self.assertEqual(
compiler.ast_to_source(fn_node.args.defaults[0]).strip(), 'None')
def test_convert_entity_to_ast_call_tree(self):
def g(a):
return a
def f(a):
return g(a)
program_ctx = self._simple_program_ctx()
nodes, _, _ = conversion.convert_entity_to_ast(f, program_ctx)
f_node, = nodes
self.assertEqual('tf__f', f_node.name)
def test_convert_entity_to_ast_class_hierarchy(self):
class TestBase(object):
def __init__(self, x='base'):
self.x = x
def foo(self):
return self.x
def bar(self):
return self.x
class TestSubclass(TestBase):
def __init__(self, y):
super(TestSubclass, self).__init__('sub')
self.y = y
def foo(self):
return self.y
def baz(self):
return self.y
program_ctx = self._simple_program_ctx()
with self.assertRaisesRegex(NotImplementedError, 'classes.*whitelisted'):
conversion.convert_entity_to_ast(TestSubclass, program_ctx)
def test_convert_entity_to_ast_class_hierarchy_whitelisted(self):
class TestSubclass(training.Model):
def __init__(self, y):
super(TestSubclass, self).__init__()
self.built = False
def call(self, x):
return 3 * x
program_ctx = self._simple_program_ctx()
(import_node, class_node), name, _ = conversion.convert_entity_to_ast(
TestSubclass, program_ctx)
self.assertEqual(import_node.names[0].name, 'Model')
self.assertEqual(name, 'TfTestSubclass')
self.assertEqual(class_node.name, 'TfTestSubclass')
def test_convert_entity_to_ast_lambda(self):
b = 2
f = lambda x: b * x if x > 0 else -x
program_ctx = self._simple_program_ctx()
(fn_node,), name, entity_info = conversion.convert_entity_to_ast(
f, program_ctx)
self.assertIsInstance(fn_node, gast.Assign)
self.assertIsInstance(fn_node.value, gast.Lambda)
self.assertEqual('tf__lambda', name)
self.assertIs(entity_info.namespace['b'], b)
def test_convert_entity_to_ast_multiple_lambdas(self):
a, b = 1, 2
f, _ = (lambda x: a * x, lambda y: b * y)
program_ctx = self._simple_program_ctx()
(fn_node,), name, entity_info = conversion.convert_entity_to_ast(
f, program_ctx)
self.assertIsInstance(fn_node, gast.Assign)
self.assertIsInstance(fn_node.value, gast.Lambda)
self.assertEqual('tf__lambda', name)
self.assertIs(entity_info.namespace['a'], a)
def test_convert_entity_to_ast_multiple_lambdas_ambiguous_definitions(self):
a, b = 1, 2
f, _ = (lambda x: a * x, lambda x: b * x)
program_ctx = self._simple_program_ctx()
with self.assertRaises(ValueError):
conversion.convert_entity_to_ast(f, program_ctx)
def test_convert_entity_to_ast_lambda_code_with_garbage(self):
# pylint:disable=g-long-lambda
f = ( # intentional wrap
lambda x: (
x # intentional wrap
+ 1),)[0]
# pylint:enable=g-long-lambda
program_ctx = self._simple_program_ctx()
(fn_node,), name, _ = conversion.convert_entity_to_ast(f, program_ctx)
self.assertIsInstance(fn_node, gast.Assign)
self.assertIsInstance(fn_node.value, gast.Lambda)
self.assertEqual('tf__lambda', name)
def test_convert_entity_to_ast_nested_functions(self):
b = 2
def f(x):
def g(x):
return b * x
return g(x)
program_ctx = self._simple_program_ctx()
(fn_node,), name, entity_info = conversion.convert_entity_to_ast(
f, program_ctx)
self.assertIsInstance(fn_node, gast.FunctionDef)
self.assertEqual(fn_node.name, 'tf__f')
self.assertEqual('tf__f', name)
self.assertIs(entity_info.namespace['b'], b)
if __name__ == '__main__':
test.main()
| kevin-coder/tensorflow-fork | tensorflow/python/autograph/impl/conversion_test.py | Python | apache-2.0 | 6,484 |
import os
import shutil
from android_build_system.pre_checks.base import BaseCheck
from android_build_system.config import AAPT, ZIPALIGN
class EnvCheck(BaseCheck):
def __init__(self):
super().__init__("Env check")
def _check(self):
return os.environ.get("ANDROID_HOME", None) is not None
class AAPTCheck(BaseCheck):
def __init__(self):
super().__init__("Binary 'aapt' found")
def _check(self):
return AAPT is not None
class ZIPALIGNCheck(BaseCheck):
def __init__(self):
super().__init__("Binary 'zipalgn' found")
def _check(self):
return ZIPALIGN is not None
class CmdCheck(BaseCheck):
def __init__(self, cmd):
self.cmd = cmd
self.message = "Command '{}' found".format(cmd)
def _check(self):
return shutil.which(self.cmd) is not None | letter113/android-build-system | android_build_system/pre_checks/env_check.py | Python | apache-2.0 | 851 |
# noinspection PyProtectedMember,PyUnresolvedReferences
class CompoundMutableDict(dict):
"""
Resource used for mutable compound dictionaries.
"""
# noinspection PyMissingConstructor
def __init__(self, **kwargs):
self._parent = kwargs.pop('_parent')
self._api = kwargs.pop('api')
for k, v in kwargs.items():
super().__setitem__(k, v)
def __setitem__(self, key, value):
super().__setitem__(key, value)
if self._name not in self._parent._dirty:
self._parent._dirty.update({self._name: {}})
if key in self._parent._data[self._name]:
if self._parent._data[self._name][key] != value:
self._parent._dirty[self._name][key] = value
self._parent._data[self._name][key] = value
else:
self._parent._data[self._name][key] = value
self._parent._dirty[self._name][key] = value
def __repr__(self):
values = {}
for k, _ in self.items():
values[k] = self[k]
return str(values)
__str__ = __repr__
def update(self, e=None, **f):
other = {}
if e:
other.update(e, **f)
else:
other.update(**f)
for k, v in other.items():
if other[k] != self[k]:
self[k] = other[k]
def items(self):
values = []
for k in self.keys():
values.append((k, self[k]))
return values
def equals(self, other):
if not type(other) == type(self):
return False
return (
self is other or
self._parent._data[self._name] == other._parent._data[self._name]
)
| sbg/sevenbridges-python | sevenbridges/meta/comp_mutable_dict.py | Python | apache-2.0 | 1,715 |
"""
Configurations
--------------
Various setups for different app instances
"""
class Config:
"""Default config"""
DEBUG = False
TESTING = False
SESSION_STORE = 'session'
MONGODB_DB = 'default'
SECRET_KEY = 'flask+braiiin=<3'
LIVE = ['v1']
STATIC_PATH = 'static'
HASHING_ROUNDS = 15
INIT = {
'port': 8006,
'host': '127.0.0.1',
}
class ProductionConfig(Config):
"""Production vars"""
INIT = {
'port': 80,
'host': '127.0.0.1',
}
class DevelopmentConfig(Config):
"""For local runs"""
DEBUG = True
MONGODB_DB = 'dev'
class TestConfig(Config):
"""For automated testing"""
TESTING = True
MONGODB_DB = 'test'
| Braiiin/outline-logic | outline_logic/config.py | Python | apache-2.0 | 729 |
# -*- coding: utf-8 -*-
""" watcher test cases """
from .shell_test_case import ShellTestCase
from zk_shell.watcher import ChildWatcher
class WatcherTestCase(ShellTestCase):
""" test watcher """
def test_add_update(self):
watcher = ChildWatcher(self.client, print_func=self.shell.show_output)
path = "%s/watch" % self.tests_path
self.shell.onecmd("create %s ''" % path)
watcher.add(path, True)
# update() calls remove() as well, if the path exists.
watcher.update(path)
expected = "\n/tests/watch:\n\n"
self.assertEquals(expected, self.output.getvalue())
| rgs1/zk_shell | zk_shell/tests/test_watcher.py | Python | apache-2.0 | 633 |
# Django settings for mcjsms project.
import os
DEBUG = False
TEMPLATE_DEBUG = DEBUG
SITE_ROOT = os.path.realpath(os.path.dirname(__file__))
ADMINS = []
MANAGERS = ADMINS
if DEBUG:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': os.path.join(SITE_ROOT, 'data/dev.sqlite'), # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'mcj_sms', # Or path to database file if using sqlite3.
'USER': 'mcj_sms', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '127.0.0.1', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '3306', # Set to empty string for default. Not used with sqlite3.
'OPTIONS': {
'init_command': 'SET storage_engine=INNODB',
}
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Montreal'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
if DEBUG:
MEDIA_ROOT = os.path.join(SITE_ROOT, 'media/')
else:
MEDIA_ROOT = '/home/ramisayar/public/mcj/mcj2011/media/'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/site_media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
if DEBUG:
STATIC_ROOT = os.path.join(SITE_ROOT, 'static')
else:
STATIC_ROOT = '/home/ramisayar/public/mcj/mcj2011/static/'
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(SITE_ROOT, 'global_static'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '3+pefpl6rsg&#smr*4$f(18nasrr0u)wp_4q=lkn50n-qz0rjt'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'sms.urls'
TEMPLATE_DIRS = (os.path.join(SITE_ROOT, 'templates'),)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
#'django.contrib.admin',
'django.contrib.localflavor',
'django_twilio',
'sms.twilio_sms'
)
TWILIO_ACCOUNT_SID = ''
TWILIO_AUTH_TOKEN = ''
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| sayar/mcj2011 | sms/settings.py | Python | apache-2.0 | 5,993 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Set User.last_login automatically in the DB
Revision ID: a65114e48d6f
Revises: 104b4c56862b
Create Date: 2016-06-11 00:28:39.176496
"""
from alembic import op
import sqlalchemy as sa
revision = 'a65114e48d6f'
down_revision = '104b4c56862b'
def upgrade():
op.alter_column(
"accounts_user",
"last_login",
server_default=sa.func.now(),
)
def downgrade():
op.alter_column("accounts_user", "last_login", server_default=None)
| alex/warehouse | warehouse/migrations/versions/a65114e48d6f_set_user_last_login_automatically_in_.py | Python | apache-2.0 | 1,008 |
# coding: utf-8
# Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Controllers for the translation changes."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
from core.controllers import acl_decorators
from core.controllers import base
from core.domain import fs_domain
from core.domain import fs_services
from core.domain import user_services
import feconf
import python_utils
import mutagen
from mutagen import mp3
class AudioUploadHandler(base.BaseHandler):
"""Handles audio file uploads (to Google Cloud Storage in production, and
to the local datastore in dev).
"""
# The string to prefix to the filename (before tacking the whole thing on
# to the end of 'assets/').
_FILENAME_PREFIX = 'audio'
@acl_decorators.can_voiceover_exploration
def post(self, exploration_id):
"""Saves an audio file uploaded by a content creator."""
raw_audio_file = self.request.get('raw_audio_file')
filename = self.payload.get('filename')
allowed_formats = list(feconf.ACCEPTED_AUDIO_EXTENSIONS.keys())
if not raw_audio_file:
raise self.InvalidInputException('No audio supplied')
dot_index = filename.rfind('.')
extension = filename[dot_index + 1:].lower()
if dot_index == -1 or dot_index == 0:
raise self.InvalidInputException(
'No filename extension: it should have '
'one of the following extensions: %s' % allowed_formats)
if extension not in feconf.ACCEPTED_AUDIO_EXTENSIONS:
raise self.InvalidInputException(
'Invalid filename extension: it should have '
'one of the following extensions: %s' % allowed_formats)
tempbuffer = python_utils.string_io()
tempbuffer.write(raw_audio_file)
tempbuffer.seek(0)
try:
# For every accepted extension, use the mutagen-specific
# constructor for that type. This will catch mismatched audio
# types e.g. uploading a flac file with an MP3 extension.
if extension == 'mp3':
audio = mp3.MP3(tempbuffer)
else:
audio = mutagen.File(tempbuffer)
except mutagen.MutagenError:
# The calls to mp3.MP3() versus mutagen.File() seem to behave
# differently upon not being able to interpret the audio.
# mp3.MP3() raises a MutagenError whereas mutagen.File()
# seems to return None. It's not clear if this is always
# the case. Occasionally, mutagen.File() also seems to
# raise a MutagenError.
raise self.InvalidInputException(
'Audio not recognized as a %s file' % extension)
tempbuffer.close()
if audio is None:
raise self.InvalidInputException(
'Audio not recognized as a %s file' % extension)
if audio.info.length > feconf.MAX_AUDIO_FILE_LENGTH_SEC:
raise self.InvalidInputException(
'Audio files must be under %s seconds in length. The uploaded '
'file is %.2f seconds long.' % (
feconf.MAX_AUDIO_FILE_LENGTH_SEC, audio.info.length))
if len(set(audio.mime).intersection(
set(feconf.ACCEPTED_AUDIO_EXTENSIONS[extension]))) == 0:
raise self.InvalidInputException(
'Although the filename extension indicates the file '
'is a %s file, it was not recognized as one. '
'Found mime types: %s' % (extension, audio.mime))
mimetype = audio.mime[0]
# Fetch the audio file duration from the Mutagen metadata.
duration_secs = audio.info.length
# For a strange, unknown reason, the audio variable must be
# deleted before opening cloud storage. If not, cloud storage
# throws a very mysterious error that entails a mutagen
# object being recursively passed around in app engine.
del audio
# Audio files are stored to the datastore in the dev env, and to GCS
# in production.
file_system_class = fs_services.get_entity_file_system_class()
fs = fs_domain.AbstractFileSystem(file_system_class(
feconf.ENTITY_TYPE_EXPLORATION, exploration_id))
fs.commit(
'%s/%s' % (self._FILENAME_PREFIX, filename),
raw_audio_file, mimetype=mimetype)
self.render_json({'filename': filename, 'duration_secs': duration_secs})
class StartedTranslationTutorialEventHandler(base.BaseHandler):
"""Records that this user has started the state translation tutorial."""
@acl_decorators.can_play_exploration
def post(self, unused_exploration_id):
"""Handles POST requests."""
user_services.record_user_started_state_translation_tutorial(
self.user_id)
self.render_json({})
| prasanna08/oppia | core/controllers/voice_artist.py | Python | apache-2.0 | 5,564 |
from JumpScale import j
base = j.tools.cuisine._getBaseClass()
# TODO: *4 unfinished but ok for now
class CuisineHadoop(base):
def _install(self):
if self._cuisine.core.isUbuntu:
C = """\
apt-get install -y apt-get install openjdk-7-jre
cd $tmpDir
wget -c http://www-us.apache.org/dist/hadoop/common/hadoop-2.7.2/hadoop-2.7.2.tar.gz
tar -xf hadoop-2.7.2.tar.gz -C /opt/
"""
C = self._cuisine.bash.replaceEnvironInText(C)
C = self._cuisine.core.args_replace(C)
self._cuisine.core.execute_bash(C, profile=True)
self._cuisine.bash.addPath("/opt/hadoop-2.7.2/bin")
self._cuisine.bash.addPath("/opt/hadoop-2.7.2/sbin")
self._cuisine.bash.environSet("JAVA_HOME", "/usr/lib/jvm/java-7-openjdk-amd64")
self._cuisine.bash.environSet("HADOOP_PREFIX", "/opt/hadoop-2.7.2/")
else:
raise NotImplementedError("unsupported platform")
def install(self):
self._install()
| Jumpscale/jumpscale_core8 | lib/JumpScale/tools/cuisine/apps/CuisineHadoop.py | Python | apache-2.0 | 1,061 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import (
Any,
AsyncIterator,
Awaitable,
Callable,
Sequence,
Tuple,
Optional,
Iterator,
)
from google.cloud.certificate_manager_v1.types import certificate_manager
class ListCertificatesPager:
"""A pager for iterating through ``list_certificates`` requests.
This class thinly wraps an initial
:class:`google.cloud.certificate_manager_v1.types.ListCertificatesResponse` object, and
provides an ``__iter__`` method to iterate through its
``certificates`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListCertificates`` requests and continue to iterate
through the ``certificates`` field on the
corresponding responses.
All the usual :class:`google.cloud.certificate_manager_v1.types.ListCertificatesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., certificate_manager.ListCertificatesResponse],
request: certificate_manager.ListCertificatesRequest,
response: certificate_manager.ListCertificatesResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.certificate_manager_v1.types.ListCertificatesRequest):
The initial request object.
response (google.cloud.certificate_manager_v1.types.ListCertificatesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = certificate_manager.ListCertificatesRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterator[certificate_manager.ListCertificatesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterator[certificate_manager.Certificate]:
for page in self.pages:
yield from page.certificates
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListCertificatesAsyncPager:
"""A pager for iterating through ``list_certificates`` requests.
This class thinly wraps an initial
:class:`google.cloud.certificate_manager_v1.types.ListCertificatesResponse` object, and
provides an ``__aiter__`` method to iterate through its
``certificates`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListCertificates`` requests and continue to iterate
through the ``certificates`` field on the
corresponding responses.
All the usual :class:`google.cloud.certificate_manager_v1.types.ListCertificatesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., Awaitable[certificate_manager.ListCertificatesResponse]],
request: certificate_manager.ListCertificatesRequest,
response: certificate_manager.ListCertificatesResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.certificate_manager_v1.types.ListCertificatesRequest):
The initial request object.
response (google.cloud.certificate_manager_v1.types.ListCertificatesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = certificate_manager.ListCertificatesRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(
self,
) -> AsyncIterator[certificate_manager.ListCertificatesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterator[certificate_manager.Certificate]:
async def async_generator():
async for page in self.pages:
for response in page.certificates:
yield response
return async_generator()
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListCertificateMapsPager:
"""A pager for iterating through ``list_certificate_maps`` requests.
This class thinly wraps an initial
:class:`google.cloud.certificate_manager_v1.types.ListCertificateMapsResponse` object, and
provides an ``__iter__`` method to iterate through its
``certificate_maps`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListCertificateMaps`` requests and continue to iterate
through the ``certificate_maps`` field on the
corresponding responses.
All the usual :class:`google.cloud.certificate_manager_v1.types.ListCertificateMapsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., certificate_manager.ListCertificateMapsResponse],
request: certificate_manager.ListCertificateMapsRequest,
response: certificate_manager.ListCertificateMapsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.certificate_manager_v1.types.ListCertificateMapsRequest):
The initial request object.
response (google.cloud.certificate_manager_v1.types.ListCertificateMapsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = certificate_manager.ListCertificateMapsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterator[certificate_manager.ListCertificateMapsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterator[certificate_manager.CertificateMap]:
for page in self.pages:
yield from page.certificate_maps
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListCertificateMapsAsyncPager:
"""A pager for iterating through ``list_certificate_maps`` requests.
This class thinly wraps an initial
:class:`google.cloud.certificate_manager_v1.types.ListCertificateMapsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``certificate_maps`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListCertificateMaps`` requests and continue to iterate
through the ``certificate_maps`` field on the
corresponding responses.
All the usual :class:`google.cloud.certificate_manager_v1.types.ListCertificateMapsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[
..., Awaitable[certificate_manager.ListCertificateMapsResponse]
],
request: certificate_manager.ListCertificateMapsRequest,
response: certificate_manager.ListCertificateMapsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.certificate_manager_v1.types.ListCertificateMapsRequest):
The initial request object.
response (google.cloud.certificate_manager_v1.types.ListCertificateMapsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = certificate_manager.ListCertificateMapsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(
self,
) -> AsyncIterator[certificate_manager.ListCertificateMapsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterator[certificate_manager.CertificateMap]:
async def async_generator():
async for page in self.pages:
for response in page.certificate_maps:
yield response
return async_generator()
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListCertificateMapEntriesPager:
"""A pager for iterating through ``list_certificate_map_entries`` requests.
This class thinly wraps an initial
:class:`google.cloud.certificate_manager_v1.types.ListCertificateMapEntriesResponse` object, and
provides an ``__iter__`` method to iterate through its
``certificate_map_entries`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListCertificateMapEntries`` requests and continue to iterate
through the ``certificate_map_entries`` field on the
corresponding responses.
All the usual :class:`google.cloud.certificate_manager_v1.types.ListCertificateMapEntriesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., certificate_manager.ListCertificateMapEntriesResponse],
request: certificate_manager.ListCertificateMapEntriesRequest,
response: certificate_manager.ListCertificateMapEntriesResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.certificate_manager_v1.types.ListCertificateMapEntriesRequest):
The initial request object.
response (google.cloud.certificate_manager_v1.types.ListCertificateMapEntriesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = certificate_manager.ListCertificateMapEntriesRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterator[certificate_manager.ListCertificateMapEntriesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterator[certificate_manager.CertificateMapEntry]:
for page in self.pages:
yield from page.certificate_map_entries
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListCertificateMapEntriesAsyncPager:
"""A pager for iterating through ``list_certificate_map_entries`` requests.
This class thinly wraps an initial
:class:`google.cloud.certificate_manager_v1.types.ListCertificateMapEntriesResponse` object, and
provides an ``__aiter__`` method to iterate through its
``certificate_map_entries`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListCertificateMapEntries`` requests and continue to iterate
through the ``certificate_map_entries`` field on the
corresponding responses.
All the usual :class:`google.cloud.certificate_manager_v1.types.ListCertificateMapEntriesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[
..., Awaitable[certificate_manager.ListCertificateMapEntriesResponse]
],
request: certificate_manager.ListCertificateMapEntriesRequest,
response: certificate_manager.ListCertificateMapEntriesResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.certificate_manager_v1.types.ListCertificateMapEntriesRequest):
The initial request object.
response (google.cloud.certificate_manager_v1.types.ListCertificateMapEntriesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = certificate_manager.ListCertificateMapEntriesRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(
self,
) -> AsyncIterator[certificate_manager.ListCertificateMapEntriesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterator[certificate_manager.CertificateMapEntry]:
async def async_generator():
async for page in self.pages:
for response in page.certificate_map_entries:
yield response
return async_generator()
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListDnsAuthorizationsPager:
"""A pager for iterating through ``list_dns_authorizations`` requests.
This class thinly wraps an initial
:class:`google.cloud.certificate_manager_v1.types.ListDnsAuthorizationsResponse` object, and
provides an ``__iter__`` method to iterate through its
``dns_authorizations`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListDnsAuthorizations`` requests and continue to iterate
through the ``dns_authorizations`` field on the
corresponding responses.
All the usual :class:`google.cloud.certificate_manager_v1.types.ListDnsAuthorizationsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., certificate_manager.ListDnsAuthorizationsResponse],
request: certificate_manager.ListDnsAuthorizationsRequest,
response: certificate_manager.ListDnsAuthorizationsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.certificate_manager_v1.types.ListDnsAuthorizationsRequest):
The initial request object.
response (google.cloud.certificate_manager_v1.types.ListDnsAuthorizationsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = certificate_manager.ListDnsAuthorizationsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterator[certificate_manager.ListDnsAuthorizationsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterator[certificate_manager.DnsAuthorization]:
for page in self.pages:
yield from page.dns_authorizations
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListDnsAuthorizationsAsyncPager:
"""A pager for iterating through ``list_dns_authorizations`` requests.
This class thinly wraps an initial
:class:`google.cloud.certificate_manager_v1.types.ListDnsAuthorizationsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``dns_authorizations`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListDnsAuthorizations`` requests and continue to iterate
through the ``dns_authorizations`` field on the
corresponding responses.
All the usual :class:`google.cloud.certificate_manager_v1.types.ListDnsAuthorizationsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[
..., Awaitable[certificate_manager.ListDnsAuthorizationsResponse]
],
request: certificate_manager.ListDnsAuthorizationsRequest,
response: certificate_manager.ListDnsAuthorizationsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.certificate_manager_v1.types.ListDnsAuthorizationsRequest):
The initial request object.
response (google.cloud.certificate_manager_v1.types.ListDnsAuthorizationsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = certificate_manager.ListDnsAuthorizationsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(
self,
) -> AsyncIterator[certificate_manager.ListDnsAuthorizationsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterator[certificate_manager.DnsAuthorization]:
async def async_generator():
async for page in self.pages:
for response in page.dns_authorizations:
yield response
return async_generator()
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
| googleapis/python-certificate-manager | google/cloud/certificate_manager_v1/services/certificate_manager/pagers.py | Python | apache-2.0 | 22,524 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-03-07 02:03
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('medgointranet', '0012_motivoanulacion'),
]
operations = [
migrations.AddField(
model_name='atencion',
name='formulario',
field=models.BooleanField(default=False, verbose_name='¿Doctor completo formulario?'),
),
]
| ElitosGon/medgoproject | medgointranet/migrations/0013_atencion_formulario.py | Python | apache-2.0 | 507 |
from __future__ import print_function
from __future__ import division
import sys
sys.path.insert(1, "../../../")
import h2o
from tests import pyunit_utils
from h2o.estimators.model_selection import H2OModelSelectionEstimator as modelSelection
# test modelselection algorithm for regression only. Make sure the result frame contains the correct information. Make
# sure that we can instantiate the best model from model ID, perform scoring with it.
def test_gaussian_result_frame_model_id():
d = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
my_y = "GLEASON"
my_x = ["AGE","RACE","CAPSULE","DCAPS","PSA","VOL","DPROS"]
maxr_model = modelSelection(seed=12345, max_predictor_number=7, mode="maxr")
maxr_model.train(training_frame=d, x=my_x, y=my_y)
allsubsets_model = modelSelection(seed=12345, max_predictor_number=7, mode="allsubsets")
allsubsets_model.train(training_frame=d, x=my_x, y=my_y)
result_frame_allsubsets = allsubsets_model.result()
numRows = result_frame_allsubsets.nrows
best_r2_allsubsets = allsubsets_model.get_best_R2_values()
result_frame_maxr = maxr_model.result()
best_r2_maxr = maxr_model.get_best_R2_values()
for ind in list(range(numRows)):
# r2 from attributes
best_r2_value_allsubsets = best_r2_allsubsets[ind]
one_model_allsubsets = h2o.get_model(result_frame_allsubsets["model_id"][ind, 0])
pred_allsubsets = one_model_allsubsets.predict(d)
print("last element of predictor frame: {0}".format(pred_allsubsets[pred_allsubsets.nrows-1,pred_allsubsets.ncols-1]))
assert pred_allsubsets.nrows == d.nrows, "expected dataset row: {0}, actual dataset row: " \
"{1}".format(pred_allsubsets.nrows, d.nrows)
best_r2_value_maxr = best_r2_maxr[ind]
one_model_maxr = h2o.get_model(result_frame_maxr["model_id"][ind, 0])
pred_maxr = one_model_maxr.predict(d)
pyunit_utils.compare_frames_local(pred_maxr, pred_allsubsets, prob=1, tol=1e-6) # compare allsubsets and maxr results
# r2 from result frame
frame_r2_allsubsets = result_frame_allsubsets["best_r2_value"][ind,0]
# r2 from model
model_r2_allsubsets = one_model_allsubsets.r2()
# make sure all r2 are equal
assert abs(best_r2_value_allsubsets-frame_r2_allsubsets) < 1e-6, "expected best r2: {0}, actual best r2: " \
"{1}".format(best_r2_value_allsubsets, frame_r2_allsubsets)
assert abs(frame_r2_allsubsets-model_r2_allsubsets) < 1e-6, "expected best r2: {0}, actual best r2: " \
"{1}".format(model_r2_allsubsets, frame_r2_allsubsets)
assert abs(best_r2_value_maxr-model_r2_allsubsets) < 1e-6, "expected best r2: {0}, maxr best r2: {1}" \
"".format(best_r2_value_maxr, model_r2_allsubsets)
if __name__ == "__main__":
pyunit_utils.standalone_test(test_gaussian_result_frame_model_id)
else:
test_gaussian_result_frame_model_id()
| h2oai/h2o-3 | h2o-py/tests/testdir_algos/modelselection/pyunit_PUBDEV_8346_modelselection_result_frame.py | Python | apache-2.0 | 3,191 |
"""
Django settings for sample project.
Generated by 'django-admin startproject' using Django 1.10.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
from warnings import warn
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '$^yawh-48g!2@mq5!bfj3pq0r%ld+xyr+zlpm_q@5k(4$ur1v2'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'sample_app',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'sample_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'sample_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
# FIXME: Testes com um ou multiplos dbs.
DATABASES = {
'default': {
'ENGINE': 'arangodb_driver',
'HOST': 'localhost',
'PORT': '8529',
'NAME': 'teste_python',
'USER': 'root',
'PASSWORD': 'omoomo',
}
}
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': 'defaultbd',
# },
# 'arangodb': {
# 'ENGINE': 'arangodb_driver',
# 'HOST': 'localhost',
# 'PORT': '8529',
# 'NAME': 'teste_python',
# 'USER': 'root',
# 'PASSWORD': 'omoomo',
# }
#
# }
# DATABASE_ROUTERS = ['arangodb_driver.router.GraphRouter']
# ARANGODB: Map model types to database names.
# If not defined, "default" maps to "default".
DB_ROUTES = {'graph': 'arangodb'}
# ARANGODB: The name of the property in the model that defines the type of the model (default: 'model_type').
DB_ROUTES_MODEL_TYPE_PROPERTY = 'model_type' # type: str
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
| pablotcarreira/django-arangodb | sample_project/settings.py | Python | apache-2.0 | 4,023 |
#!/usr/bin/env python
# Copyright 2021 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Analytics Data API sample application demonstrating the usage of
property quota metadata.
See https://developers.google.com/analytics/devguides/reporting/data/v1/rest/v1beta/properties/runReport#body.request_body.FIELDS.return_property_quota
for more information.
"""
# [START analyticsdata_run_report_with_property_quota]
from google.analytics.data_v1beta import BetaAnalyticsDataClient
from google.analytics.data_v1beta.types import DateRange
from google.analytics.data_v1beta.types import Dimension
from google.analytics.data_v1beta.types import Metric
from google.analytics.data_v1beta.types import RunReportRequest
def run_sample():
"""Runs the sample."""
# TODO(developer): Replace this variable with your Google Analytics 4
# property ID before running the sample.
property_id = "YOUR-GA4-PROPERTY-ID"
run_report_with_property_quota(property_id)
def run_report_with_property_quota(property_id="YOUR-GA4-PROPERTY-ID"):
"""Runs a report and prints property quota information."""
client = BetaAnalyticsDataClient()
request = RunReportRequest(
property=f"properties/{property_id}",
return_property_quota=True,
dimensions=[Dimension(name="country")],
metrics=[Metric(name="activeUsers")],
date_ranges=[DateRange(start_date="7daysAgo", end_date="today")],
)
response = client.run_report(request)
# [START analyticsdata_run_report_with_property_quota_print_response]
if response.property_quota:
print(
f"Tokens per day quota consumed: {response.property_quota.tokens_per_day.consumed}, "
f"remaining: {response.property_quota.tokens_per_day.remaining}."
)
print(
f"Tokens per hour quota consumed: {response.property_quota.tokens_per_hour.consumed}, "
f"remaining: {response.property_quota.tokens_per_hour.remaining}."
)
print(
f"Concurrent requests quota consumed: {response.property_quota.concurrent_requests.consumed}, "
f"remaining: {response.property_quota.concurrent_requests.remaining}."
)
print(
f"Server errors per project per hour quota consumed: {response.property_quota.server_errors_per_project_per_hour.consumed}, "
f"remaining: {response.property_quota.server_errors_per_project_per_hour.remaining}."
)
print(
f"Potentially thresholded requests per hour quota consumed: {response.property_quota.potentially_thresholded_requests_per_hour.consumed}, "
f"remaining: {response.property_quota.potentially_thresholded_requests_per_hour.remaining}."
)
# [END analyticsdata_run_report_with_property_quota_print_response]
# [END analyticsdata_run_report_with_property_quota]
if __name__ == "__main__":
run_sample()
| googleapis/python-analytics-data | samples/snippets/run_report_with_property_quota.py | Python | apache-2.0 | 3,462 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import urlparse
from keystoneclient import client
from keystoneclient import exceptions
_logger = logging.getLogger(__name__)
class Client(client.HTTPClient):
"""Client for the OpenStack Keystone pre-version calls API.
:param string endpoint: A user-supplied endpoint URL for the keystone
service.
:param integer timeout: Allows customization of the timeout for client
http requests. (optional)
Example::
>>> from keystoneclient.generic import client
>>> root = client.Client(auth_url=KEYSTONE_URL)
>>> versions = root.discover()
...
>>> from keystoneclient.v2_0 import client as v2client
>>> keystone = v2client.Client(auth_url=versions['v2.0']['url'])
...
>>> user = keystone.users.get(USER_ID)
>>> user.delete()
"""
def __init__(self, endpoint=None, **kwargs):
""" Initialize a new client for the Keystone v2.0 API. """
super(Client, self).__init__(endpoint=endpoint, **kwargs)
self.endpoint = endpoint
def discover(self, url=None):
""" Discover Keystone servers and return API versions supported.
:param url: optional url to test (without version)
Returns::
{
'message': 'Keystone found at http://127.0.0.1:5000/',
'v2.0': {
'status': 'beta',
'url': 'http://127.0.0.1:5000/v2.0/',
'id': 'v2.0'
},
}
"""
if url:
return self._check_keystone_versions(url)
else:
return self._local_keystone_exists()
def _local_keystone_exists(self):
""" Checks if Keystone is available on default local port 35357 """
return self._check_keystone_versions("http://localhost:35357")
def _check_keystone_versions(self, url):
""" Calls Keystone URL and detects the available API versions """
try:
httpclient = client.HTTPClient()
resp, body = httpclient.request(url, "GET",
headers={'Accept': 'application/json'})
if resp.status in (200, 204): # in some cases we get No Content
try:
results = {}
if 'version' in body:
results['message'] = "Keystone found at %s" % url
version = body['version']
# Stable/diablo incorrect format
id, status, version_url = self._get_version_info(
version, url)
results[str(id)] = {"id": id,
"status": status,
"url": version_url}
return results
elif 'versions' in body:
# Correct format
results['message'] = "Keystone found at %s" % url
for version in body['versions']['values']:
id, status, version_url = self._get_version_info(
version, url)
results[str(id)] = {"id": id,
"status": status,
"url": version_url}
return results
else:
results['message'] = "Unrecognized response from %s" \
% url
return results
except KeyError:
raise exceptions.AuthorizationFailure()
elif resp.status == 305:
return self._check_keystone_versions(resp['location'])
else:
raise exceptions.from_response(resp, body)
except Exception as e:
_logger.exception(e)
def discover_extensions(self, url=None):
""" Discover Keystone extensions supported.
:param url: optional url to test (should have a version in it)
Returns::
{
'message': 'Keystone extensions at http://127.0.0.1:35357/v2',
'OS-KSEC2': 'OpenStack EC2 Credentials Extension',
}
"""
if url:
return self._check_keystone_extensions(url)
def _check_keystone_extensions(self, url):
""" Calls Keystone URL and detects the available extensions """
try:
httpclient = client.HTTPClient()
if not url.endswith("/"):
url += '/'
resp, body = httpclient.request("%sextensions" % url, "GET",
headers={'Accept': 'application/json'})
if resp.status in (200, 204): # in some cases we get No Content
try:
results = {}
if 'extensions' in body:
if 'values' in body['extensions']:
# Parse correct format (per contract)
for extension in body['extensions']['values']:
alias, name = self._get_extension_info(
extension['extension'])
results[alias] = name
return results
else:
# Support incorrect, but prevalent format
for extension in body['extensions']:
alias, name = self._get_extension_info(
extension)
results[alias] = name
return results
else:
results['message'] = "Unrecognized extensions" \
" response from %s" % url
return results
except KeyError:
raise exceptions.AuthorizationFailure()
elif resp.status == 305:
return self._check_keystone_extensions(resp['location'])
else:
raise exceptions.from_response(resp, body)
except Exception as e:
_logger.exception(e)
@staticmethod
def _get_version_info(version, root_url):
""" Parses version information
:param version: a dict of a Keystone version response
:param root_url: string url used to construct
the version if no URL is provided.
:returns: tuple - (verionId, versionStatus, versionUrl)
"""
id = version['id']
status = version['status']
ref = urlparse.urljoin(root_url, id)
if 'links' in version:
for link in version['links']:
if link['rel'] == 'self':
ref = link['href']
break
return (id, status, ref)
@staticmethod
def _get_extension_info(extension):
""" Parses extension information
:param extension: a dict of a Keystone extension response
:returns: tuple - (alias, name)
"""
alias = extension['alias']
name = extension['name']
return (alias, name)
| rcbops/python-keystoneclient-buildpackage | keystoneclient/generic/client.py | Python | apache-2.0 | 8,106 |
# -*- coding: utf-8 -*-
'''
Manage nspawn containers
.. versionadded:: 2015.8.0
`systemd-nspawn(1)`__ is a tool used to manage lightweight namespace
containers. This execution module provides several functions to help manage
these containers.
.. __: http://www.freedesktop.org/software/systemd/man/systemd-nspawn.html
Minions running systemd >= 219 will place new containers in
``/var/lib/machines``, while those running systemd < 219 will place them in
``/var/lib/container``.
.. note:
``nsenter(1)`` is required to run commands within containers. It should
already be present on any systemd host, as part of the **util-linux**
package.
'''
# Import python libs
from __future__ import absolute_import
import errno
import functools
import logging
import os
import re
import shutil
import time
import tempfile
# Import Salt libs
import salt.defaults.exitcodes
import salt.utils
import salt.utils.systemd
from salt.exceptions import CommandExecutionError, SaltInvocationError
from salt.ext import six
from salt.ext.six.moves import range # pylint: disable=redefined-builtin
log = logging.getLogger(__name__)
__func_alias__ = {
'list_': 'list',
}
__virtualname__ = 'nspawn'
SEED_MARKER = '/nspawn.initial_seed'
WANT = '/etc/systemd/system/multi-user.target.wants/systemd-nspawn@{0}.service'
EXEC_DRIVER = 'nsenter'
def __virtual__():
'''
Only work on systems that have been booted with systemd
'''
if __grains__['kernel'] == 'Linux' \
and salt.utils.systemd.booted(__context__):
if salt.utils.systemd.version() is None:
log.error('nspawn: Unable to determine systemd version')
else:
return __virtualname__
return False
def _sd_version():
'''
Returns __context__.get('systemd.version', 0), avoiding duplication of the
call to dict.get and making it easier to change how we handle this context
var in the future
'''
return salt.utils.systemd.version(__context__)
def _ensure_exists(wrapped):
'''
Decorator to ensure that the named container exists.
'''
@functools.wraps(wrapped)
def check_exists(name, *args, **kwargs):
if not exists(name):
raise CommandExecutionError(
'Container \'{0}\' does not exist'.format(name)
)
return wrapped(name, *args, **salt.utils.clean_kwargs(**kwargs))
return check_exists
def _root(name='', all_roots=False):
'''
Return the container root directory. Starting with systemd 219, new
images go into /var/lib/machines.
'''
if _sd_version() >= 219:
if all_roots:
return [os.path.join(x, name)
for x in ('/var/lib/machines', '/var/lib/container')]
else:
return os.path.join('/var/lib/machines', name)
else:
ret = os.path.join('/var/lib/container', name)
if all_roots:
return [ret]
else:
return ret
def _make_container_root(name):
'''
Make the container root directory
'''
path = _root(name)
if os.path.exists(path):
__context__['retcode'] = salt.defaults.exitcodes.SALT_BUILD_FAIL
raise CommandExecutionError(
'Container {0} already exists'.format(name)
)
else:
try:
os.makedirs(path)
return path
except OSError as exc:
raise CommandExecutionError(
'Unable to make container root directory {0}: {1}'
.format(name, exc)
)
def _build_failed(dst, name):
try:
__context__['retcode'] = salt.defaults.exitcodes.SALT_BUILD_FAIL
shutil.rmtree(dst)
except OSError as exc:
if exc.errno != errno.ENOENT:
raise CommandExecutionError(
'Unable to cleanup container root dir {0}'.format(dst)
)
raise CommandExecutionError(
'Container {0} failed to build'.format(name)
)
def _bootstrap_arch(name, **kwargs):
'''
Bootstrap an Arch Linux container
'''
if not salt.utils.which('pacstrap'):
raise CommandExecutionError(
'pacstrap not found, is the arch-install-scripts package '
'installed?'
)
dst = _make_container_root(name)
cmd = 'pacstrap -c -d {0} base'.format(dst)
ret = __salt__['cmd.run_all'](cmd, python_shell=False)
if ret['retcode'] != 0:
_build_failed(dst, name)
return ret
def _bootstrap_debian(name, **kwargs):
'''
Bootstrap a Debian Linux container (only unstable is currently supported)
'''
dst = _make_container_root(name)
cmd = 'debootstrap --arch=amd64 unstable {0}'.format(dst)
ret = __salt__['cmd.run_all'](cmd, python_shell=False)
if ret['retcode'] != 0:
_build_failed(dst, name)
return ret
def _bootstrap_fedora(name, **kwargs):
'''
Bootstrap a Fedora container
'''
dst = _make_container_root(name)
if not kwargs.get('version', False):
if __grains__['os'].lower() == 'fedora':
version = __grains__['osrelease']
else:
version = '21'
else:
version = '21'
cmd = ('yum -y --releasever={0} --nogpg --installroot={1} '
'--disablerepo="*" --enablerepo=fedora install systemd passwd yum '
'fedora-release vim-minimal'.format(version, dst))
ret = __salt__['cmd.run_all'](cmd, python_shell=False)
if ret['retcode'] != 0:
_build_failed(dst, name)
return ret
def _clear_context():
'''
Clear any lxc variables set in __context__
'''
for var in [x for x in __context__ if x.startswith('nspawn.')]:
log.trace('Clearing __context__[\'{0}\']'.format(var))
__context__.pop(var, None)
def _ensure_running(name):
'''
Raise an exception if the container does not exist
'''
if state(name) != 'running':
return True
else:
return start(name)
def _ensure_systemd(version):
'''
Raises an exception if the systemd version is not greater than the
passed version.
'''
try:
version = int(version)
except ValueError:
raise CommandExecutionError('Invalid version \'{0}\''.format(version))
try:
installed = _sd_version()
log.debug('nspawn: detected systemd {0}'.format(installed))
except (IndexError, ValueError):
raise CommandExecutionError('nspawn: Unable to get systemd version')
if installed < version:
raise CommandExecutionError(
'This function requires systemd >= {0} '
'(Detected version: {1}).'.format(version, installed)
)
def _machinectl(cmd,
output_loglevel='debug',
ignore_retcode=False,
use_vt=False):
'''
Helper function to run machinectl
'''
prefix = 'machinectl --no-legend --no-pager'
return __salt__['cmd.run_all']('{0} {1}'.format(prefix, cmd),
output_loglevel=output_loglevel,
ignore_retcode=ignore_retcode,
use_vt=use_vt)
@_ensure_exists
def _run(name,
cmd,
output=None,
no_start=False,
stdin=None,
python_shell=True,
preserve_state=False,
output_loglevel='debug',
ignore_retcode=False,
use_vt=False,
keep_env=None):
'''
Common logic for nspawn.run functions
'''
orig_state = state(name)
exc = None
try:
ret = __salt__['container_resource.run'](
name,
cmd,
container_type=__virtualname__,
exec_driver=EXEC_DRIVER,
output=output,
no_start=no_start,
stdin=stdin,
python_shell=python_shell,
output_loglevel=output_loglevel,
ignore_retcode=ignore_retcode,
use_vt=use_vt,
keep_env=keep_env)
except Exception:
raise
finally:
# Make sure we stop the container if necessary, even if an exception
# was raised.
if preserve_state \
and orig_state == 'stopped' \
and state(name) != 'stopped':
stop(name)
if output in (None, 'all'):
return ret
else:
return ret[output]
@_ensure_exists
def pid(name):
'''
Returns the PID of a container
name
Container name
CLI Example:
.. code-block:: bash
salt myminion nspawn.pid arch1
'''
try:
return int(info(name).get('PID'))
except (TypeError, ValueError) as exc:
raise CommandExecutionError(
'Unable to get PID for container \'{0}\': {1}'.format(name, exc)
)
def run(name,
cmd,
no_start=False,
preserve_state=True,
stdin=None,
python_shell=True,
output_loglevel='debug',
use_vt=False,
ignore_retcode=False,
keep_env=None):
'''
Run :mod:`cmd.run <salt.modules.cmdmod.run>` within a container
name
Name of the container in which to run the command
cmd
Command to run
no_start : False
If the container is not running, don't start it
preserve_state : True
After running the command, return the container to its previous state
stdin : None
Standard input to be used for the command
output_loglevel : debug
Level at which to log the output from the command. Set to ``quiet`` to
suppress logging.
use_vt : False
Use SaltStack's utils.vt to stream output to console.
keep_env : None
If not passed, only a sane default PATH environment variable will be
set. If ``True``, all environment variables from the container's host
will be kept. Otherwise, a comma-separated list (or Python list) of
environment variable names can be passed, and those environment
variables will be kept.
CLI Example:
.. code-block:: bash
salt myminion nspawn.run mycontainer 'ifconfig -a'
'''
return _run(name,
cmd,
output=None,
no_start=no_start,
preserve_state=preserve_state,
stdin=stdin,
python_shell=python_shell,
output_loglevel=output_loglevel,
use_vt=use_vt,
ignore_retcode=ignore_retcode,
keep_env=keep_env)
def run_stdout(name,
cmd,
no_start=False,
preserve_state=True,
stdin=None,
python_shell=True,
output_loglevel='debug',
use_vt=False,
ignore_retcode=False,
keep_env=None):
'''
Run :mod:`cmd.run_stdout <salt.modules.cmdmod.run_stdout>` within a container
name
Name of the container in which to run the command
cmd
Command to run
no_start : False
If the container is not running, don't start it
preserve_state : True
After running the command, return the container to its previous state
stdin : None
Standard input to be used for the command
output_loglevel : debug
Level at which to log the output from the command. Set to ``quiet`` to
suppress logging.
use_vt : False
Use SaltStack's utils.vt to stream output to console. Assumes
``output=all``.
keep_env : None
If not passed, only a sane default PATH environment variable will be
set. If ``True``, all environment variables from the container's host
will be kept. Otherwise, a comma-separated list (or Python list) of
environment variable names can be passed, and those environment
variables will be kept.
CLI Example:
.. code-block:: bash
salt myminion nspawn.run_stdout mycontainer 'ifconfig -a'
'''
return _run(name,
cmd,
output='stdout',
no_start=no_start,
preserve_state=preserve_state,
stdin=stdin,
python_shell=python_shell,
output_loglevel=output_loglevel,
use_vt=use_vt,
ignore_retcode=ignore_retcode,
keep_env=keep_env)
def run_stderr(name,
cmd,
no_start=False,
preserve_state=True,
stdin=None,
python_shell=True,
output_loglevel='debug',
use_vt=False,
ignore_retcode=False,
keep_env=None):
'''
Run :mod:`cmd.run_stderr <salt.modules.cmdmod.run_stderr>` within a container
name
Name of the container in which to run the command
cmd
Command to run
no_start : False
If the container is not running, don't start it
preserve_state : True
After running the command, return the container to its previous state
stdin : None
Standard input to be used for the command
output_loglevel : debug
Level at which to log the output from the command. Set to ``quiet`` to
suppress logging.
use_vt : False
Use SaltStack's utils.vt to stream output to console. Assumes
``output=all``.
keep_env : None
If not passed, only a sane default PATH environment variable will be
set. If ``True``, all environment variables from the container's host
will be kept. Otherwise, a comma-separated list (or Python list) of
environment variable names can be passed, and those environment
variables will be kept.
CLI Example:
.. code-block:: bash
salt myminion nspawn.run_stderr mycontainer 'ip addr show'
'''
return _run(name,
cmd,
output='stderr',
no_start=no_start,
preserve_state=preserve_state,
stdin=stdin,
python_shell=python_shell,
output_loglevel=output_loglevel,
use_vt=use_vt,
ignore_retcode=ignore_retcode,
keep_env=keep_env)
def retcode(name,
cmd,
no_start=False,
preserve_state=True,
stdin=None,
python_shell=True,
output_loglevel='debug',
use_vt=False,
ignore_retcode=False,
keep_env=None):
'''
Run :mod:`cmd.retcode <salt.modules.cmdmod.retcode>` within a container
name
Name of the container in which to run the command
cmd
Command to run
no_start : False
If the container is not running, don't start it
preserve_state : True
After running the command, return the container to its previous state
stdin : None
Standard input to be used for the command
output_loglevel : debug
Level at which to log the output from the command. Set to ``quiet`` to
suppress logging.
use_vt : False
Use SaltStack's utils.vt to stream output to console. Assumes
``output=all``.
keep_env : None
If not passed, only a sane default PATH environment variable will be
set. If ``True``, all environment variables from the container's host
will be kept. Otherwise, a comma-separated list (or Python list) of
environment variable names can be passed, and those environment
variables will be kept.
CLI Example:
.. code-block:: bash
salt myminion nspawn.retcode mycontainer 'ip addr show'
'''
return _run(name,
cmd,
output='retcode',
no_start=no_start,
preserve_state=preserve_state,
stdin=stdin,
python_shell=python_shell,
output_loglevel=output_loglevel,
use_vt=use_vt,
ignore_retcode=ignore_retcode,
keep_env=keep_env)
def run_all(name,
cmd,
no_start=False,
preserve_state=True,
stdin=None,
python_shell=True,
output_loglevel='debug',
use_vt=False,
ignore_retcode=False,
keep_env=None):
'''
Run :mod:`cmd.run_all <salt.modules.cmdmod.run_all>` within a container
.. note::
While the command is run within the container, it is initiated from the
host. Therefore, the PID in the return dict is from the host, not from
the container.
name
Name of the container in which to run the command
cmd
Command to run
no_start : False
If the container is not running, don't start it
preserve_state : True
After running the command, return the container to its previous state
stdin : None
Standard input to be used for the command
output_loglevel : debug
Level at which to log the output from the command. Set to ``quiet`` to
suppress logging.
use_vt : False
Use SaltStack's utils.vt to stream output to console. Assumes
``output=all``.
keep_env : None
If not passed, only a sane default PATH environment variable will be
set. If ``True``, all environment variables from the container's host
will be kept. Otherwise, a comma-separated list (or Python list) of
environment variable names can be passed, and those environment
variables will be kept.
CLI Example:
.. code-block:: bash
salt myminion nspawn.run_all mycontainer 'ip addr show'
'''
return _run(name,
cmd,
output='all',
no_start=no_start,
preserve_state=preserve_state,
stdin=stdin,
python_shell=python_shell,
output_loglevel=output_loglevel,
use_vt=use_vt,
ignore_retcode=ignore_retcode,
keep_env=keep_env)
def bootstrap_container(name, dist=None, version=None):
'''
Bootstrap a container from package servers, if dist is None the os the
minion is running as will be created, otherwise the needed bootstrapping
tools will need to be available on the host.
CLI Example:
.. code-block:: bash
salt myminion nspawn.bootstrap_container <name>
'''
if not dist:
dist = __grains__['os'].lower()
log.debug(
'nspawn.bootstrap: no dist provided, defaulting to \'{0}\''
.format(dist)
)
try:
return globals()['_bootstrap_{0}'.format(dist)](name, version=version)
except KeyError:
raise CommandExecutionError('Unsupported distribution "{0}"'.format(dist))
def _needs_install(name):
ret = 0
has_minion = retcode(name, "command -v salt-minion")
# we assume that installing is when no minion is running
# but testing the executable presence is not enougth for custom
# installs where the bootstrap can do much more than installing
# the bare salt binaries.
if has_minion:
processes = run_stdout(name, 'ps aux')
if 'salt-minion' not in processes:
ret = 1
else:
retcode(name, 'salt-call --local service.stop salt-minion')
else:
ret = 1
return ret
def bootstrap_salt(name,
config=None,
approve_key=True,
install=True,
pub_key=None,
priv_key=None,
bootstrap_url=None,
force_install=False,
unconditional_install=False,
bootstrap_delay=None,
bootstrap_args=None,
bootstrap_shell=None):
'''
Bootstrap a container from package servers, if dist is None the os the
minion is running as will be created, otherwise the needed bootstrapping
tools will need to be available on the host.
CLI Example::
salt '*' nspawn.bootstrap_salt arch1
'''
if bootstrap_delay is not None:
try:
time.sleep(bootstrap_delay)
except TypeError:
# Bad input, but assume since a value was passed that
# a delay was desired, and sleep for 5 seconds
time.sleep(5)
c_info = info(name)
if not c_info:
return None
# default set here as we cannot set them
# in def as it can come from a chain of procedures.
if bootstrap_args:
# custom bootstrap args can be totally customized, and user could
# have inserted the placeholder for the config directory.
# For example, some salt bootstrap script do not use at all -c
if '{0}' not in bootstrap_args:
bootstrap_args += ' -c {0}'
else:
bootstrap_args = '-c {0}'
if not bootstrap_shell:
bootstrap_shell = 'sh'
orig_state = _ensure_running(name)
if not orig_state:
return orig_state
if not force_install:
needs_install = _needs_install(name)
else:
needs_install = True
seeded = retcode(name, 'test -e \'{0}\''.format(SEED_MARKER)) == 0
tmp = tempfile.mkdtemp()
if seeded and not unconditional_install:
ret = True
else:
ret = False
cfg_files = __salt__['seed.mkconfig'](
config, tmp=tmp, id_=name, approve_key=approve_key,
pub_key=pub_key, priv_key=priv_key)
if needs_install or force_install or unconditional_install:
if install:
rstr = __salt__['test.rand_str']()
configdir = '/tmp/.c_{0}'.format(rstr)
run(name,
'install -m 0700 -d {0}'.format(configdir),
python_shell=False)
bs_ = __salt__['config.gather_bootstrap_script'](
bootstrap=bootstrap_url)
dest_dir = os.path.join('/tmp', rstr)
for cmd in [
'mkdir -p {0}'.format(dest_dir),
'chmod 700 {0}'.format(dest_dir),
]:
if run_stdout(name, cmd):
log.error(
('tmpdir {0} creation'
' failed ({1}').format(dest_dir, cmd))
return False
copy_to(name,
bs_,
'{0}/bootstrap.sh'.format(dest_dir),
makedirs=True)
copy_to(name, cfg_files['config'],
os.path.join(configdir, 'minion'))
copy_to(name, cfg_files['privkey'],
os.path.join(configdir, 'minion.pem'))
copy_to(name, cfg_files['pubkey'],
os.path.join(configdir, 'minion.pub'))
bootstrap_args = bootstrap_args.format(configdir)
cmd = ('{0} {2}/bootstrap.sh {1}'
.format(bootstrap_shell,
bootstrap_args.replace("'", "''"),
dest_dir))
# log ASAP the forged bootstrap command which can be wrapped
# out of the output in case of unexpected problem
log.info('Running {0} in LXC container \'{1}\''
.format(cmd, name))
ret = retcode(name, cmd, output_loglevel='info',
use_vt=True) == 0
else:
ret = False
else:
minion_config = salt.config.minion_config(cfg_files['config'])
pki_dir = minion_config['pki_dir']
copy_to(name, cfg_files['config'], '/etc/salt/minion')
copy_to(name, cfg_files['privkey'], os.path.join(pki_dir, 'minion.pem'))
copy_to(name, cfg_files['pubkey'], os.path.join(pki_dir, 'minion.pub'))
run(name,
'salt-call --local service.enable salt-minion',
python_shell=False)
ret = True
shutil.rmtree(tmp)
if orig_state == 'stopped':
stop(name)
# mark seeded upon successful install
if ret:
run(name,
'touch \'{0}\''.format(SEED_MARKER),
python_shell=False)
return ret
def list_all():
'''
Lists all nspawn containers
CLI Example:
.. code-block:: bash
salt myminion nspawn.list_all
'''
ret = []
if _sd_version() >= 219:
for line in _machinectl('list-images')['stdout'].splitlines():
try:
ret.append(line.split()[0])
except IndexError:
continue
else:
rootdir = _root()
try:
for dirname in os.listdir(rootdir):
if os.path.isdir(os.path.join(rootdir, dirname)):
ret.append(dirname)
except OSError:
pass
return ret
def list_running():
'''
Lists running nspawn containers
.. note::
``nspawn.list`` also works to list running containers
CLI Example:
.. code-block:: bash
salt myminion nspawn.list_running
salt myminion nspawn.list
'''
ret = []
for line in _machinectl('list')['stdout'].splitlines():
try:
ret.append(line.split()[0])
except IndexError:
pass
return sorted(ret)
# 'machinectl list' shows only running containers, so allow this to work as an
# alias to nspawn.list_running
list_ = salt.utils.alias_function(list_running, 'list_')
def list_stopped():
'''
Lists stopped nspawn containers
CLI Example:
.. code-block:: bash
salt myminion nspawn.list_stopped
'''
return sorted(set(list_all()) - set(list_running()))
def exists(name):
'''
Returns true if the named container exists
CLI Example:
.. code-block:: bash
salt myminion nspawn.exists <name>
'''
contextkey = 'nspawn.exists.{0}'.format(name)
if contextkey in __context__:
return __context__[contextkey]
__context__[contextkey] = name in list_all()
return __context__[contextkey]
@_ensure_exists
def state(name):
'''
Return state of container (running or stopped)
CLI Example:
.. code-block:: bash
salt myminion nspawn.state <name>
'''
try:
cmd = 'show {0} --property=State'.format(name)
return _machinectl(cmd, ignore_retcode=True)['stdout'].split('=')[-1]
except IndexError:
return 'stopped'
def info(name, **kwargs):
'''
Return info about a container
.. note::
The container must be running for ``machinectl`` to gather information
about it. If the container is stopped, then this function will start
it.
start : False
If ``True``, then the container will be started to retrieve the info. A
``Started`` key will be in the return data if the container was
started.
CLI Example:
.. code-block:: bash
salt myminion nspawn.info arch1
salt myminion nspawn.info arch1 force_start=False
'''
kwargs = salt.utils.clean_kwargs(**kwargs)
start_ = kwargs.pop('start', False)
if kwargs:
salt.utils.invalid_kwargs(kwargs)
if not start_:
_ensure_running(name)
elif name not in list_running():
start(name)
# Have to parse 'machinectl status' here since 'machinectl show' doesn't
# contain IP address info or OS info. *shakes fist angrily*
c_info = _machinectl('status {0}'.format(name))
if c_info['retcode'] != 0:
raise CommandExecutionError(
'Unable to get info for container \'{0}\''.format(name)
)
# Better human-readable names. False means key should be ignored.
key_name_map = {
'Iface': 'Network Interface',
'Leader': 'PID',
'Service': False,
'Since': 'Running Since',
}
ret = {}
kv_pair = re.compile(r'^\s+([A-Za-z]+): (.+)$')
tree = re.compile(r'[|`]')
lines = c_info['stdout'].splitlines()
multiline = False
cur_key = None
for idx in range(len(lines)):
match = kv_pair.match(lines[idx])
if match:
key, val = match.groups()
# Get a better key name if one exists
key = key_name_map.get(key, key)
if key is False:
continue
elif key == 'PID':
try:
val = val.split()[0]
except IndexError:
pass
cur_key = key
if multiline:
multiline = False
ret[key] = val
else:
if cur_key is None:
continue
if tree.search(lines[idx]):
# We've reached the process tree, bail out
break
if multiline:
ret[cur_key].append(lines[idx].strip())
else:
ret[cur_key] = [ret[key], lines[idx].strip()]
multiline = True
return ret
@_ensure_exists
def enable(name):
'''
Set the named container to be launched at boot
CLI Example:
.. code-block:: bash
salt myminion nspawn.enable <name>
'''
cmd = 'systemctl enable systemd-nspawn@{0}'.format(name)
if __salt__['cmd.retcode'](cmd, python_shell=False) != 0:
__context__['retcode'] = salt.defaults.exitcodes.EX_UNAVAILABLE
return False
return True
@_ensure_exists
def disable(name):
'''
Set the named container to *not* be launched at boot
CLI Example:
.. code-block:: bash
salt myminion nspawn.enable <name>
'''
cmd = 'systemctl disable systemd-nspawn@{0}'.format(name)
if __salt__['cmd.retcode'](cmd, python_shell=False) != 0:
__context__['retcode'] = salt.defaults.exitcodes.EX_UNAVAILABLE
return False
return True
@_ensure_exists
def start(name):
'''
Start the named container
CLI Example:
.. code-block:: bash
salt myminion nspawn.start <name>
'''
if _sd_version() >= 219:
ret = _machinectl('start {0}'.format(name))
else:
cmd = 'systemctl start systemd-nspawn@{0}'.format(name)
ret = __salt__['cmd.run_all'](cmd, python_shell=False)
if ret['retcode'] != 0:
__context__['retcode'] = salt.defaults.exitcodes.EX_UNAVAILABLE
return False
return True
# This function is hidden from sphinx docs
@_ensure_exists
def stop(name, kill=False):
'''
This is a compatibility function which provides the logic for
nspawn.poweroff and nspawn.terminate.
'''
if _sd_version() >= 219:
if kill:
action = 'terminate'
else:
action = 'poweroff'
ret = _machinectl('{0} {1}'.format(action, name))
else:
cmd = 'systemctl stop systemd-nspawn@{0}'.format(name)
ret = __salt__['cmd.run_all'](cmd, python_shell=False)
if ret['retcode'] != 0:
__context__['retcode'] = salt.defaults.exitcodes.EX_UNAVAILABLE
return False
return True
def poweroff(name):
'''
Issue a clean shutdown to the container. Equivalent to running
``machinectl poweroff`` on the named container.
For convenience, running ``nspawn.stop``(as shown in the CLI examples
below) is equivalent to running ``nspawn.poweroff``.
.. note::
``machinectl poweroff`` is only supported in systemd >= 219. On earlier
systemd versions, running this function will simply issue a clean
shutdown via ``systemctl``.
CLI Examples:
.. code-block:: bash
salt myminion nspawn.poweroff arch1
salt myminion nspawn.stop arch1
'''
return stop(name, kill=False)
def terminate(name):
'''
Kill all processes in the container without issuing a clean shutdown.
Equivalent to running ``machinectl terminate`` on the named container.
For convenience, running ``nspawn.stop`` and passing ``kill=True`` (as
shown in the CLI examples below) is equivalent to running
``nspawn.terminate``.
.. note::
``machinectl terminate`` is only supported in systemd >= 219. On
earlier systemd versions, running this function will simply issue a
clean shutdown via ``systemctl``.
CLI Examples:
.. code-block:: bash
salt myminion nspawn.terminate arch1
salt myminion nspawn.stop arch1 kill=True
'''
return stop(name, kill=True)
# This function is hidden from sphinx docs
def restart(name):
'''
This is a compatibility function which simply calls nspawn.reboot.
'''
return reboot(name)
@_ensure_exists
def reboot(name, kill=False):
'''
Reboot the container by sending a SIGINT to its init process. Equivalent
to running ``machinectl reboot`` on the named container.
For convenience, running ``nspawn.restart`` (as shown in the CLI examples
below) is equivalent to running ``nspawn.reboot``.
.. note::
``machinectl reboot`` is only supported in systemd >= 219. On earlier
systemd versions, running this function will instead restart the
container via ``systemctl``.
CLI Examples:
.. code-block:: bash
salt myminion nspawn.reboot arch1
salt myminion nspawn.restart arch1
'''
if _sd_version() >= 219:
if state(name) == 'running':
ret = _machinectl('reboot {0}'.format(name))
else:
# 'machinectl reboot' will fail on a stopped container
return start(name)
else:
# 'systemctl restart' did not work, at least in my testing. Running
# 'uptime' in the container afterwards showed it had not rebooted. So,
# we need stop and start the container in separate actions.
# First stop the container
cmd = 'systemctl stop systemd-nspawn@{0}'.format(name)
ret = __salt__['cmd.run_all'](cmd, python_shell=False)
# Now check if successful
if ret['retcode'] != 0:
__context__['retcode'] = salt.defaults.exitcodes.EX_UNAVAILABLE
return False
# Finally, start the container back up. No need to check the retcode a
# second time, it'll be checked below once we exit the if/else block.
cmd = 'systemctl start systemd-nspawn@{0}'.format(name)
ret = __salt__['cmd.run_all'](cmd, python_shell=False)
if ret['retcode'] != 0:
__context__['retcode'] = salt.defaults.exitcodes.EX_UNAVAILABLE
return False
return True
@_ensure_exists
def remove(name, stop=False):
'''
Remove the named container
.. warning::
This function will remove all data associated with the container. It
will not, however, remove the btrfs subvolumes created by pulling
container images (:mod:`nspawn.pull_raw
<salt.modules.nspawn.pull_raw>`, :mod:`nspawn.pull_tar
<salt.modules.nspawn.pull_tar>`, :mod:`nspawn.pull_dkr
<salt.modules.nspawn.pull_dkr>`).
stop : False
If ``True``, the container will be destroyed even if it is
running/frozen.
CLI Examples:
.. code-block:: bash
salt '*' nspawn.remove foo
salt '*' nspawn.remove foo stop=True
'''
if not stop and state(name) != 'stopped':
raise CommandExecutionError(
'Container \'{0}\' is not stopped'.format(name)
)
def _failed_remove(name, exc):
raise CommandExecutionError(
'Unable to remove container \'{0}\': {1}'.format(name, exc)
)
if _sd_version() >= 219:
ret = _machinectl('remove {0}'.format(name))
if ret['retcode'] != 0:
__context__['retcode'] = salt.defaults.exitcodes.EX_UNAVAILABLE
_failed_remove(name, ret['stderr'])
else:
try:
shutil.rmtree(os.path.join(_root(), name))
except OSError as exc:
_failed_remove(name, exc)
return True
# Compatibility between LXC and nspawn
destroy = salt.utils.alias_function(remove, 'destroy')
@_ensure_exists
def copy_to(name, source, dest, overwrite=False, makedirs=False):
'''
Copy a file from the host into a container
name
Container name
source
File to be copied to the container
dest
Destination on the container. Must be an absolute path.
overwrite : False
Unless this option is set to ``True``, then if a file exists at the
location specified by the ``dest`` argument, an error will be raised.
makedirs : False
Create the parent directory on the container if it does not already
exist.
CLI Example:
.. code-block:: bash
salt 'minion' nspawn.copy_to /tmp/foo /root/foo
'''
path = source
try:
if source.startswith('salt://'):
cached_source = __salt__['cp.cache_file'](source)
if not cached_source:
raise CommandExecutionError(
'Unable to cache {0}'.format(source)
)
path = cached_source
except AttributeError:
raise SaltInvocationError('Invalid source file {0}'.format(source))
if _sd_version() >= 219:
# TODO: Use machinectl copy-to
pass
return __salt__['container_resource.copy_to'](
name,
path,
dest,
container_type=__virtualname__,
exec_driver=EXEC_DRIVER,
overwrite=overwrite,
makedirs=makedirs)
cp = salt.utils.alias_function(copy_to, 'cp')
# Everything below requres systemd >= 219
# TODO: Write a decorator to keep these functions from being available to older
# systemd versions.
def _pull_image(pull_type, image, name, **kwargs):
'''
Common logic for machinectl pull-* commands
'''
_ensure_systemd(219)
if exists(name):
raise SaltInvocationError(
'Container \'{0}\' already exists'.format(name)
)
if pull_type in ('raw', 'tar'):
valid_kwargs = ('verify',)
elif pull_type == 'dkr':
valid_kwargs = ('index',)
else:
raise SaltInvocationError(
'Unsupported image type \'{0}\''.format(pull_type)
)
kwargs = salt.utils.clean_kwargs(**kwargs)
bad_kwargs = dict(
[(x, y) for x, y in six.iteritems(salt.utils.clean_kwargs(**kwargs))
if x not in valid_kwargs]
)
if bad_kwargs:
salt.utils.invalid_kwargs(bad_kwargs)
pull_opts = []
if pull_type in ('raw', 'tar'):
verify = kwargs.get('verify', False)
if not verify:
pull_opts.append('--verify=no')
else:
def _bad_verify():
raise SaltInvocationError(
'\'verify\' must be one of the following: '
'signature, checksum'
)
try:
verify = verify.lower()
except AttributeError:
_bad_verify()
else:
if verify not in ('signature', 'checksum'):
_bad_verify()
pull_opts.append('--verify={0}'.format(verify))
elif pull_type == 'dkr':
# No need to validate the index URL, machinectl will take care of this
# for us.
if 'index' in kwargs:
pull_opts.append('--dkr-index-url={0}'.format(kwargs['index']))
cmd = 'pull-{0} {1} {2} {3}'.format(
pull_type, ' '.join(pull_opts), image, name
)
result = _machinectl(cmd, use_vt=True)
if result['retcode'] != 0:
msg = 'Error occurred pulling image. Stderr from the pull command ' \
'(if any) follows: '
if result['stderr']:
msg += '\n\n{0}'.format(result['stderr'])
raise CommandExecutionError(msg)
return True
def pull_raw(url, name, verify=False):
'''
Execute a ``machinectl pull-raw`` to download a .qcow2 or raw disk image,
and add it to /var/lib/machines as a new container.
.. note::
**Requires systemd >= 219**
url
URL from which to download the container
name
Name for the new container
verify : False
Perform signature or checksum verification on the container. See the
``machinectl(1)`` man page (section titled "Image Transfer Commands")
for more information on requirements for image verification. To perform
signature verification, use ``verify=signature``. For checksum
verification, use ``verify=checksum``. By default, no verification will
be performed.
CLI Examples:
.. code-block:: bash
salt myminion nspawn.pull_raw http://ftp.halifax.rwth-aachen.de/fedora/linux/releases/21/Cloud/Images/x86_64/Fedora-Cloud-Base-20141203-21.x86_64.raw.xz fedora21
'''
return _pull_image('raw', url, name, verify=verify)
def pull_tar(url, name, verify=False):
'''
Execute a ``machinectl pull-raw`` to download a .tar container image,
and add it to /var/lib/machines as a new container.
.. note::
**Requires systemd >= 219**
url
URL from which to download the container
name
Name for the new container
verify : False
Perform signature or checksum verification on the container. See the
``machinectl(1)`` man page (section titled "Image Transfer Commands")
for more information on requirements for image verification. To perform
signature verification, use ``verify=signature``. For checksum
verification, use ``verify=checksum``. By default, no verification will
be performed.
CLI Examples:
.. code-block:: bash
salt myminion nspawn.pull_tar http://foo.domain.tld/containers/archlinux-2015.02.01.tar.gz arch2
'''
return _pull_image('tar', url, name, verify=verify)
def pull_dkr(url, name, index):
'''
Execute a ``machinectl pull-dkr`` to download a docker image and add it to
/var/lib/machines as a new container.
.. note::
**Requires systemd >= 219**
url
URL from which to download the container
name
Name for the new container
index
URL of the Docker index server from which to pull (must be an
``http://`` or ``https://`` URL).
CLI Examples:
.. code-block:: bash
salt myminion nspawn.pull_dkr centos/centos6 cent6 index=https://get.docker.com
salt myminion nspawn.pull_docker centos/centos6 cent6 index=https://get.docker.com
'''
return _pull_image('dkr', url, name, index=index)
pull_docker = salt.utils.alias_function(pull_dkr, 'pull_docker')
| smallyear/linuxLearn | salt/salt/modules/nspawn.py | Python | apache-2.0 | 42,749 |
"""Test the Waze Travel Time config flow."""
from homeassistant import config_entries, data_entry_flow
from homeassistant.components.waze_travel_time.const import (
CONF_AVOID_FERRIES,
CONF_AVOID_SUBSCRIPTION_ROADS,
CONF_AVOID_TOLL_ROADS,
CONF_DESTINATION,
CONF_EXCL_FILTER,
CONF_INCL_FILTER,
CONF_ORIGIN,
CONF_REALTIME,
CONF_UNITS,
CONF_VEHICLE_TYPE,
DEFAULT_NAME,
DOMAIN,
)
from homeassistant.const import CONF_REGION, CONF_UNIT_SYSTEM_IMPERIAL
from tests.common import MockConfigEntry
async def test_minimum_fields(hass, validate_config_entry, bypass_setup):
"""Test we get the form."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {}
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_ORIGIN: "location1",
CONF_DESTINATION: "location2",
CONF_REGION: "US",
},
)
await hass.async_block_till_done()
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result2["title"] == f"{DEFAULT_NAME}: location1 -> location2"
assert result2["data"] == {
CONF_ORIGIN: "location1",
CONF_DESTINATION: "location2",
CONF_REGION: "US",
}
async def test_options(hass, validate_config_entry, mock_update):
"""Test options flow."""
entry = MockConfigEntry(
domain=DOMAIN,
data={
CONF_ORIGIN: "location1",
CONF_DESTINATION: "location2",
CONF_REGION: "US",
},
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
result = await hass.config_entries.options.async_init(entry.entry_id, data=None)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
CONF_AVOID_FERRIES: True,
CONF_AVOID_SUBSCRIPTION_ROADS: True,
CONF_AVOID_TOLL_ROADS: True,
CONF_EXCL_FILTER: "exclude",
CONF_INCL_FILTER: "include",
CONF_REALTIME: False,
CONF_UNITS: CONF_UNIT_SYSTEM_IMPERIAL,
CONF_VEHICLE_TYPE: "taxi",
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == ""
assert result["data"] == {
CONF_AVOID_FERRIES: True,
CONF_AVOID_SUBSCRIPTION_ROADS: True,
CONF_AVOID_TOLL_ROADS: True,
CONF_EXCL_FILTER: "exclude",
CONF_INCL_FILTER: "include",
CONF_REALTIME: False,
CONF_UNITS: CONF_UNIT_SYSTEM_IMPERIAL,
CONF_VEHICLE_TYPE: "taxi",
}
assert entry.options == {
CONF_AVOID_FERRIES: True,
CONF_AVOID_SUBSCRIPTION_ROADS: True,
CONF_AVOID_TOLL_ROADS: True,
CONF_EXCL_FILTER: "exclude",
CONF_INCL_FILTER: "include",
CONF_REALTIME: False,
CONF_UNITS: CONF_UNIT_SYSTEM_IMPERIAL,
CONF_VEHICLE_TYPE: "taxi",
}
async def test_import(hass, validate_config_entry, mock_update):
"""Test import for config flow."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
CONF_ORIGIN: "location1",
CONF_DESTINATION: "location2",
CONF_REGION: "US",
CONF_AVOID_FERRIES: True,
CONF_AVOID_SUBSCRIPTION_ROADS: True,
CONF_AVOID_TOLL_ROADS: True,
CONF_EXCL_FILTER: "exclude",
CONF_INCL_FILTER: "include",
CONF_REALTIME: False,
CONF_UNITS: CONF_UNIT_SYSTEM_IMPERIAL,
CONF_VEHICLE_TYPE: "taxi",
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
await hass.async_block_till_done()
entry = hass.config_entries.async_entries(DOMAIN)[0]
assert entry.data == {
CONF_ORIGIN: "location1",
CONF_DESTINATION: "location2",
CONF_REGION: "US",
}
assert entry.options == {
CONF_AVOID_FERRIES: True,
CONF_AVOID_SUBSCRIPTION_ROADS: True,
CONF_AVOID_TOLL_ROADS: True,
CONF_EXCL_FILTER: "exclude",
CONF_INCL_FILTER: "include",
CONF_REALTIME: False,
CONF_UNITS: CONF_UNIT_SYSTEM_IMPERIAL,
CONF_VEHICLE_TYPE: "taxi",
}
async def test_dupe_id(hass, validate_config_entry, bypass_setup):
"""Test setting up the same entry twice fails."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {}
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_ORIGIN: "location1",
CONF_DESTINATION: "location2",
CONF_REGION: "US",
},
)
await hass.async_block_till_done()
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {}
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_ORIGIN: "location1",
CONF_DESTINATION: "location2",
CONF_REGION: "US",
},
)
await hass.async_block_till_done()
assert result2["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result2["reason"] == "already_configured"
async def test_invalid_config_entry(hass, invalidate_config_entry):
"""Test we get the form."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {}
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_ORIGIN: "location1",
CONF_DESTINATION: "location2",
CONF_REGION: "US",
},
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result2["errors"] == {"base": "cannot_connect"}
| w1ll1am23/home-assistant | tests/components/waze_travel_time/test_config_flow.py | Python | apache-2.0 | 6,588 |
vowels = ['a','e','i','o','u']
s = 'sudeep'
count = 0
for i in s:
for q in vowels:
if s[i] == vowels[q]:
print 'vowel found'
count = count + 1
print "number of vowels:",str(count)
| sudeep-melekar/google-pthon-excercises | sudicustom/vowel.py | Python | apache-2.0 | 193 |
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from novaclient import exceptions as nova_exceptions
from oslo_log import log as logging
from trove.backup.models import Backup
import trove.common.apischema as apischema
from trove.common.auth import admin_context
from trove.common import exception
from trove.common.i18n import _
from trove.common import wsgi
from trove.extensions.mgmt.instances import models
from trove.extensions.mgmt.instances import views
from trove.extensions.mgmt.instances.views import DiagnosticsView
from trove.extensions.mgmt.instances.views import HwInfoView
from trove.extensions.mysql import models as mysql_models
from trove.instance import models as instance_models
from trove.instance.service import InstanceController
LOG = logging.getLogger(__name__)
class MgmtInstanceController(InstanceController):
"""Controller for instance functionality."""
schemas = apischema.mgmt_instance
@classmethod
def get_action_schema(cls, body, action_schema):
action_type = body.keys()[0]
return action_schema.get(action_type, {})
@admin_context
def index(self, req, tenant_id, detailed=False):
"""Return all instances."""
LOG.info(_("req : '%s'\n\n") % req)
LOG.info(_("Indexing a database instance for tenant '%s'") % tenant_id)
context = req.environ[wsgi.CONTEXT_KEY]
deleted = None
deleted_q = req.GET.get('deleted', '').lower()
if deleted_q in ['true']:
deleted = True
elif deleted_q in ['false']:
deleted = False
clustered_q = req.GET.get('include_clustered', '').lower()
include_clustered = clustered_q == 'true'
try:
instances = models.load_mgmt_instances(
context, deleted=deleted, include_clustered=include_clustered)
except nova_exceptions.ClientException as e:
LOG.error(e)
return wsgi.Result(str(e), 403)
view_cls = views.MgmtInstancesView
return wsgi.Result(view_cls(instances, req=req).data(), 200)
@admin_context
def show(self, req, tenant_id, id):
"""Return a single instance."""
LOG.info(_("req : '%s'\n\n") % req)
LOG.info(_("Showing a database instance for tenant '%s'") % tenant_id)
LOG.info(_("id : '%s'\n\n") % id)
context = req.environ[wsgi.CONTEXT_KEY]
deleted_q = req.GET.get('deleted', '').lower()
include_deleted = deleted_q == 'true'
server = models.DetailedMgmtInstance.load(context, id,
include_deleted)
root_history = mysql_models.RootHistory.load(context=context,
instance_id=id)
return wsgi.Result(
views.MgmtInstanceDetailView(
server,
req=req,
root_history=root_history).data(),
200)
@admin_context
def action(self, req, body, tenant_id, id):
LOG.info("req : '%s'\n\n" % req)
LOG.info("Committing an ACTION against instance %s for tenant '%s'"
% (id, tenant_id))
if not body:
raise exception.BadRequest(_("Invalid request body."))
context = req.environ[wsgi.CONTEXT_KEY]
instance = models.MgmtInstance.load(context=context, id=id)
_actions = {
'stop': self._action_stop,
'reboot': self._action_reboot,
'migrate': self._action_migrate,
'reset-task-status': self._action_reset_task_status
}
selected_action = None
for key in body:
if key in _actions:
if selected_action is not None:
msg = _("Only one action can be specified per request.")
raise exception.BadRequest(msg)
selected_action = _actions[key]
else:
msg = _("Invalid instance action: %s") % key
raise exception.BadRequest(msg)
if selected_action:
return selected_action(context, instance, body)
else:
raise exception.BadRequest(_("Invalid request body."))
def _action_stop(self, context, instance, body):
LOG.debug("Stopping MySQL on instance %s." % instance.id)
instance.stop_db()
return wsgi.Result(None, 202)
def _action_reboot(self, context, instance, body):
LOG.debug("Rebooting instance %s." % instance.id)
instance.reboot()
return wsgi.Result(None, 202)
def _action_migrate(self, context, instance, body):
LOG.debug("Migrating instance %s." % instance.id)
LOG.debug("body['migrate']= %s" % body['migrate'])
host = body['migrate'].get('host', None)
instance.migrate(host)
return wsgi.Result(None, 202)
def _action_reset_task_status(self, context, instance, body):
LOG.debug("Setting Task-Status to NONE on instance %s." %
instance.id)
instance.reset_task_status()
LOG.debug("Failing backups for instance %s." % instance.id)
Backup.fail_for_instance(instance.id)
return wsgi.Result(None, 202)
@admin_context
def root(self, req, tenant_id, id):
"""Return the date and time root was enabled on an instance,
if ever.
"""
LOG.info(_("req : '%s'\n\n") % req)
LOG.info(_("Showing root history for tenant '%s'") % tenant_id)
LOG.info(_("id : '%s'\n\n") % id)
context = req.environ[wsgi.CONTEXT_KEY]
try:
instance_models.Instance.load(context=context, id=id)
except exception.TroveError as e:
LOG.error(e)
return wsgi.Result(str(e), 404)
rhv = views.RootHistoryView(id)
reh = mysql_models.RootHistory.load(context=context, instance_id=id)
if reh:
rhv = views.RootHistoryView(reh.id, enabled=reh.created,
user_id=reh.user)
return wsgi.Result(rhv.data(), 200)
@admin_context
def hwinfo(self, req, tenant_id, id):
"""Return a single instance hardware info."""
LOG.info(_("req : '%s'\n\n") % req)
LOG.info(_("Showing hardware info for instance '%s'") % id)
context = req.environ[wsgi.CONTEXT_KEY]
instance = models.MgmtInstance.load(context=context, id=id)
hwinfo = instance.get_hwinfo()
return wsgi.Result(HwInfoView(id, hwinfo).data(), 200)
@admin_context
def diagnostics(self, req, tenant_id, id):
"""Return a single instance diagnostics."""
LOG.info(_("req : '%s'\n\n") % req)
LOG.info(_("Showing a instance diagnostics for instance '%s'") % id)
LOG.info(_("id : '%s'\n\n") % id)
context = req.environ[wsgi.CONTEXT_KEY]
instance = models.MgmtInstance.load(context=context, id=id)
diagnostics = instance.get_diagnostics()
return wsgi.Result(DiagnosticsView(id, diagnostics).data(), 200)
@admin_context
def rpc_ping(self, req, tenant_id, id):
"""Checks if instance is reachable via rpc."""
LOG.info(_("req : '%s'\n\n") % req)
LOG.info(_("id : '%s'\n\n") % id)
context = req.environ[wsgi.CONTEXT_KEY]
instance = models.MgmtInstance.load(context=context, id=id)
instance.rpc_ping()
return wsgi.Result(None, 204)
| cp16net/trove | trove/extensions/mgmt/instances/service.py | Python | apache-2.0 | 8,014 |
from players.models import Player
from rest_framework import serializers
from teams.models import Team
class PlayerTeamSerializer(serializers.ModelSerializer):
"""Serializer for nesting a Team object inside a Player"""
url = serializers.HyperlinkedIdentityField(view_name='team-detail')
captain = serializers.PrimaryKeyRelatedField(read_only=True)
creator = serializers.PrimaryKeyRelatedField(read_only=True)
@staticmethod
def setup_eager_loading(queryset):
queryset = queryset.select_related(
'captain',
'creator',
)
return queryset
class Meta:
model = Team
fields = (
'id',
'name',
'captain',
'creator',
'url',
)
read_only_fields = (
'id',
'name',
'captain',
'creator',
'url',
)
class BasePlayerSerializer(serializers.ModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='player-detail')
steamid = serializers.CharField(source='user.steamid', required=False)
# steam_friends = serializers.ListField(child=serializers.CharField(), source='user.steam_friends', required=False)
email = serializers.EmailField(source='user.email')
username = serializers.CharField(source='user.username')
avatar = serializers.CharField(source='user.avatar')
avatarfull = serializers.CharField(source='user.avatarfull')
last_login = serializers.CharField(source='user.last_login')
def update(self, instance, validated_data):
user_data = validated_data.pop('user', None)
if user_data:
if 'email' in user_data.keys():
instance.user.email = user_data.get('email')
if 'username' in user_data.keys():
instance.user.username = user_data.get('username')
instance.user.save()
return super(BasePlayerSerializer, self).update(instance, validated_data)
@staticmethod
def setup_eager_loading(queryset):
queryset = queryset.prefetch_related(
'interests',
'languages',
'positions',
'regions',
'teams',
).select_related(
'user'
)
return queryset
class Meta:
model = Player
fields = (
'id',
'url',
'steamid',
# 'steam_friends',
'username',
'bio',
'email',
'last_login',
'regions',
'positions',
'interests',
'languages',
'teams',
'avatar',
'avatarfull',
'mmr',
'mmr_estimate',
'mmr_last_updated',
)
read_only_fields = (
'id',
'url',
'steamid',
# 'steam_friends',
'username',
'last_login',
'avatar',
'avatarfull',
'mmr',
'mmr_estimate',
'mmr_last_updated',
)
class FlatPlayerSerializer(BasePlayerSerializer):
pass
class PlayerSerializer(BasePlayerSerializer):
regions = serializers.PrimaryKeyRelatedField(read_only=True, many=True)
positions = serializers.PrimaryKeyRelatedField(read_only=True, many=True)
interests = serializers.PrimaryKeyRelatedField(read_only=True, many=True)
languages = serializers.PrimaryKeyRelatedField(read_only=True, many=True)
teams = PlayerTeamSerializer(read_only=True, many=True)
| prattl/teamfinder | api/players/api/serializers.py | Python | apache-2.0 | 3,600 |
"""
Support for Wink fans.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/fan.wink/
"""
import logging
from homeassistant.components.fan import (
SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, SUPPORT_DIRECTION,
SUPPORT_SET_SPEED, FanEntity)
from homeassistant.components.wink import DOMAIN, WinkDevice
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['wink']
SPEED_AUTO = 'auto'
SPEED_LOWEST = 'lowest'
SUPPORTED_FEATURES = SUPPORT_DIRECTION + SUPPORT_SET_SPEED
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Wink platform."""
import pywink
for fan in pywink.get_fans():
if fan.object_id() + fan.name() not in hass.data[DOMAIN]['unique_ids']:
add_entities([WinkFanDevice(fan, hass)])
class WinkFanDevice(WinkDevice, FanEntity):
"""Representation of a Wink fan."""
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self.hass.data[DOMAIN]['entities']['fan'].append(self)
def set_direction(self, direction: str) -> None:
"""Set the direction of the fan."""
self.wink.set_fan_direction(direction)
def set_speed(self, speed: str) -> None:
"""Set the speed of the fan."""
self.wink.set_state(True, speed)
def turn_on(self, speed: str = None, **kwargs) -> None:
"""Turn on the fan."""
self.wink.set_state(True, speed)
def turn_off(self, **kwargs) -> None:
"""Turn off the fan."""
self.wink.set_state(False)
@property
def is_on(self):
"""Return true if the entity is on."""
return self.wink.state()
@property
def speed(self) -> str:
"""Return the current speed."""
current_wink_speed = self.wink.current_fan_speed()
if SPEED_AUTO == current_wink_speed:
return SPEED_AUTO
if SPEED_LOWEST == current_wink_speed:
return SPEED_LOWEST
if SPEED_LOW == current_wink_speed:
return SPEED_LOW
if SPEED_MEDIUM == current_wink_speed:
return SPEED_MEDIUM
if SPEED_HIGH == current_wink_speed:
return SPEED_HIGH
return None
@property
def current_direction(self):
"""Return direction of the fan [forward, reverse]."""
return self.wink.current_fan_direction()
@property
def speed_list(self) -> list:
"""Get the list of available speeds."""
wink_supported_speeds = self.wink.fan_speeds()
supported_speeds = []
if SPEED_AUTO in wink_supported_speeds:
supported_speeds.append(SPEED_AUTO)
if SPEED_LOWEST in wink_supported_speeds:
supported_speeds.append(SPEED_LOWEST)
if SPEED_LOW in wink_supported_speeds:
supported_speeds.append(SPEED_LOW)
if SPEED_MEDIUM in wink_supported_speeds:
supported_speeds.append(SPEED_MEDIUM)
if SPEED_HIGH in wink_supported_speeds:
supported_speeds.append(SPEED_HIGH)
return supported_speeds
@property
def supported_features(self) -> int:
"""Flag supported features."""
return SUPPORTED_FEATURES
| PetePriority/home-assistant | homeassistant/components/wink/fan.py | Python | apache-2.0 | 3,231 |
import zmq
import random
import time
import zmq_ports as ports
import zmq_topics as topic
from websocket import create_connection
RCV_DELAY=0.01
WSPORT = 9000
########### WEBSOCKET EVENTS
########### WEBSOCKET EVENTS
if __name__ == '__main__':
#print("Starting comm")
# IPC
context = zmq.Context()
# Publisher
comm_publisher = context.socket(zmq.PUB)
comm_publisher.bind("tcp://*:%s" % ports.COMM_PUB)
# Subscribe to commander
commander_subscriber = context.socket(zmq.SUB)
commander_subscriber.connect("tcp://localhost:%s" % ports.COMMANDER_PUB)
commander_subscriber.setsockopt_string(zmq.SUBSCRIBE, topic.SENSOR_TOPIC)
# Subscribe to Tornado websocket server
browser_subscriber = context.socket(zmq.SUB)
browser_subscriber.connect("tcp://localhost:%s" % ports.TORNADO_PUB)
browser_subscriber.setsockopt_string(zmq.SUBSCRIBE, topic.COMMAND_TOPIC)
# Subscribe to gps
gps_subscriber = context.socket(zmq.SUB)
gps_subscriber.connect("tcp://localhost:%s" % ports.GPS_PUB)
gps_subscriber.setsockopt_string(zmq.SUBSCRIBE, topic.GPS_TOPIC)
# Web scoket for sending data to the browser
ws = create_connection("ws://localhost:9000/ws/")
# Open a browser
#browser_controller.start()
connected=False
while True:
#xcomm.startVideoStream()
# from browser to commander
while True:
try:
msg = browser_subscriber.recv_string(zmq.DONTWAIT)
except zmq.Again:
break
# process task
msg = msg.strip(str(topic.COMMAND_TOPIC) + " ")
print("from browser:", msg)
# Connection state
if(msg[0]=="Q"):
comm_publisher.send_string("%s %s" % (topic.CONNECTION_TOPIC, msg[1]))
connected=msg[1]
else:
comm_publisher.send_string("%s %s" % (topic.COMMAND_TOPIC, msg))
# from commander to browser - sensor data
while True:
try:
msg = commander_subscriber.recv_string(zmq.DONTWAIT)
msg=msg.strip(str(topic.SENSOR_TOPIC)+" ")
#print(msg)
#xcomm.sendMsg(msg)
except zmq.Again:
break
# process task
if(connected):
ws.send("_" + msg)
#print("comm received:", msg)
# from commander to browser - gps data
while True:
try:
msg = gps_subscriber.recv_string(zmq.DONTWAIT)
msg = msg.strip(topic.GPS_TOPIC + " ")
except zmq.Again:
break
# process task
if(connected):
ws.send("_g," + msg)
time.sleep(0.005)
ws.close()
| jeryfast/piflyer | piflyer/zmq_comm.py | Python | apache-2.0 | 2,795 |
# Copyright 2012 VMware, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from neutron_lib.api.definitions import port as port_def
from neutron_lib.api import validators
from neutron_lib.callbacks import events
from neutron_lib.callbacks import exceptions
from neutron_lib.callbacks import registry
from neutron_lib.callbacks import resources
from neutron_lib import constants
from neutron_lib import exceptions as n_exc
from neutron_lib.utils import helpers
from neutron_lib.utils import net
from oslo_utils import uuidutils
import six
from sqlalchemy.orm import scoped_session
from neutron._i18n import _
from neutron.common import constants as n_const
from neutron.common import utils
from neutron.db import _model_query as model_query
from neutron.db import _resource_extend as resource_extend
from neutron.db import _utils as db_utils
from neutron.db import api as db_api
from neutron.db.models import securitygroup as sg_models
from neutron.extensions import securitygroup as ext_sg
from neutron.objects import base as base_obj
from neutron.objects import securitygroup as sg_obj
@resource_extend.has_resource_extenders
@registry.has_registry_receivers
class SecurityGroupDbMixin(ext_sg.SecurityGroupPluginBase):
"""Mixin class to add security group to db_base_plugin_v2."""
__native_bulk_support = True
def create_security_group_bulk(self, context, security_groups):
return self._create_bulk('security_group', context,
security_groups)
def _registry_notify(self, res, event, id=None, exc_cls=None, **kwargs):
# NOTE(armax): a callback exception here will prevent the request
# from being processed. This is a hook point for backend's validation;
# we raise to propagate the reason for the failure.
try:
registry.notify(res, event, self, **kwargs)
except exceptions.CallbackFailure as e:
if exc_cls:
reason = (_('cannot perform %(event)s due to %(reason)s') %
{'event': event, 'reason': e})
raise exc_cls(reason=reason, id=id)
@db_api.retry_if_session_inactive()
def create_security_group(self, context, security_group, default_sg=False):
"""Create security group.
If default_sg is true that means we are a default security group for
a given tenant if it does not exist.
"""
s = security_group['security_group']
kwargs = {
'context': context,
'security_group': s,
'is_default': default_sg,
}
self._registry_notify(resources.SECURITY_GROUP, events.BEFORE_CREATE,
exc_cls=ext_sg.SecurityGroupConflict, **kwargs)
tenant_id = s['tenant_id']
if not default_sg:
self._ensure_default_security_group(context, tenant_id)
else:
existing_def_sg_id = self._get_default_sg_id(context, tenant_id)
if existing_def_sg_id is not None:
# default already exists, return it
return self.get_security_group(context, existing_def_sg_id)
with db_api.context_manager.writer.using(context):
sg = sg_obj.SecurityGroup(
context, id=s.get('id') or uuidutils.generate_uuid(),
description=s['description'], project_id=tenant_id,
name=s['name'], is_default=default_sg)
sg.create()
for ethertype in ext_sg.sg_supported_ethertypes:
if default_sg:
# Allow intercommunication
ingress_rule = sg_obj.SecurityGroupRule(
context, id=uuidutils.generate_uuid(),
project_id=tenant_id, security_group_id=sg.id,
direction='ingress', ethertype=ethertype,
remote_group_id=sg.id)
ingress_rule.create()
sg.rules.append(ingress_rule)
egress_rule = sg_obj.SecurityGroupRule(
context, id=uuidutils.generate_uuid(),
project_id=tenant_id, security_group_id=sg.id,
direction='egress', ethertype=ethertype)
egress_rule.create()
sg.rules.append(egress_rule)
sg.obj_reset_changes(['rules'])
# fetch sg from db to load the sg rules with sg model.
# NOTE(yamamoto): Adding rules above bumps the revision
# of the SG. It would add SG object to the session.
# Expunge it to ensure the following get_object doesn't
# use the instance.
context.session.expunge(model_query.get_by_id(
context, sg_models.SecurityGroup, sg.id))
sg = sg_obj.SecurityGroup.get_object(context, id=sg.id)
secgroup_dict = self._make_security_group_dict(sg)
kwargs['security_group'] = secgroup_dict
self._registry_notify(resources.SECURITY_GROUP,
events.PRECOMMIT_CREATE,
exc_cls=ext_sg.SecurityGroupConflict,
**kwargs)
registry.notify(resources.SECURITY_GROUP, events.AFTER_CREATE, self,
**kwargs)
return secgroup_dict
@db_api.retry_if_session_inactive()
def get_security_groups(self, context, filters=None, fields=None,
sorts=None, limit=None,
marker=None, page_reverse=False, default_sg=False):
# If default_sg is True do not call _ensure_default_security_group()
# so this can be done recursively. Context.tenant_id is checked
# because all the unit tests do not explicitly set the context on
# GETS. TODO(arosen) context handling can probably be improved here.
filters = filters or {}
if not default_sg and context.tenant_id:
tenant_id = filters.get('tenant_id')
if tenant_id:
tenant_id = tenant_id[0]
else:
tenant_id = context.tenant_id
self._ensure_default_security_group(context, tenant_id)
pager = base_obj.Pager(
sorts=sorts, limit=limit, marker=marker, page_reverse=page_reverse)
sg_objs = sg_obj.SecurityGroup.get_objects(
context, _pager=pager, validate_filters=False, **filters)
return [self._make_security_group_dict(obj, fields) for obj in sg_objs]
@db_api.retry_if_session_inactive()
def get_security_groups_count(self, context, filters=None):
filters = filters or {}
return sg_obj.SecurityGroup.count(
context, validate_filters=False, **filters)
@db_api.retry_if_session_inactive()
def get_security_group(self, context, id, fields=None, tenant_id=None):
"""Tenant id is given to handle the case when creating a security
group rule on behalf of another use.
"""
if tenant_id:
tmp_context_tenant_id = context.tenant_id
context.tenant_id = tenant_id
try:
with db_api.context_manager.reader.using(context):
ret = self._make_security_group_dict(self._get_security_group(
context, id), fields)
ret['security_group_rules'] = self.get_security_group_rules(
context, {'security_group_id': [id]})
finally:
if tenant_id:
context.tenant_id = tmp_context_tenant_id
return ret
def _get_security_group(self, context, id):
sg = sg_obj.SecurityGroup.get_object(context, id=id)
if sg is None:
raise ext_sg.SecurityGroupNotFound(id=id)
return sg
@db_api.retry_if_session_inactive()
def delete_security_group(self, context, id):
filters = {'security_group_id': [id]}
with db_api.context_manager.reader.using(context):
ports = self._get_port_security_group_bindings(context, filters)
if ports:
raise ext_sg.SecurityGroupInUse(id=id)
# confirm security group exists
sg = self._get_security_group(context, id)
if sg['name'] == 'default' and not context.is_admin:
raise ext_sg.SecurityGroupCannotRemoveDefault()
kwargs = {
'context': context,
'security_group_id': id,
'security_group': sg,
}
self._registry_notify(resources.SECURITY_GROUP,
events.BEFORE_DELETE,
exc_cls=ext_sg.SecurityGroupInUse, id=id,
**kwargs)
with db_api.context_manager.writer.using(context):
# pass security_group_rule_ids to ensure
# consistency with deleted rules
# get security_group_bindings and security_group one more time
# so that they will be attached for session where sg will be
# deleted
ports = self._get_port_security_group_bindings(context, filters)
sg = self._get_security_group(context, id)
kwargs['security_group_rule_ids'] = [r['id'] for r in sg.rules]
kwargs['security_group'] = self._make_security_group_dict(sg)
self._registry_notify(resources.SECURITY_GROUP,
events.PRECOMMIT_DELETE,
exc_cls=ext_sg.SecurityGroupInUse, id=id,
**kwargs)
sg.delete()
kwargs.pop('security_group')
registry.notify(resources.SECURITY_GROUP, events.AFTER_DELETE,
self, **kwargs)
@db_api.retry_if_session_inactive()
def update_security_group(self, context, id, security_group):
s = security_group['security_group']
kwargs = {
'context': context,
'security_group_id': id,
'security_group': s,
}
self._registry_notify(resources.SECURITY_GROUP, events.BEFORE_UPDATE,
exc_cls=ext_sg.SecurityGroupConflict, **kwargs)
with db_api.context_manager.writer.using(context):
sg = self._get_security_group(context, id)
if sg.name == 'default' and 'name' in s:
raise ext_sg.SecurityGroupCannotUpdateDefault()
sg_dict = self._make_security_group_dict(sg)
kwargs['original_security_group'] = sg_dict
sg.update_fields(s)
sg.update()
sg_dict = self._make_security_group_dict(sg)
kwargs['security_group'] = sg_dict
self._registry_notify(
resources.SECURITY_GROUP,
events.PRECOMMIT_UPDATE,
exc_cls=ext_sg.SecurityGroupConflict, **kwargs)
registry.notify(resources.SECURITY_GROUP, events.AFTER_UPDATE, self,
**kwargs)
return sg_dict
def _make_security_group_dict(self, security_group, fields=None):
res = {'id': security_group['id'],
'name': security_group['name'],
'tenant_id': security_group['tenant_id'],
'description': security_group['description']}
res['security_group_rules'] = [
self._make_security_group_rule_dict(r.db_obj)
for r in security_group.rules
]
resource_extend.apply_funcs(ext_sg.SECURITYGROUPS, res,
security_group.db_obj)
return db_utils.resource_fields(res, fields)
@staticmethod
def _make_security_group_binding_dict(security_group, fields=None):
res = {'port_id': security_group['port_id'],
'security_group_id': security_group['security_group_id']}
return db_utils.resource_fields(res, fields)
@db_api.retry_if_session_inactive()
def _create_port_security_group_binding(self, context, port_id,
security_group_id):
with db_api.context_manager.writer.using(context):
db = sg_models.SecurityGroupPortBinding(port_id=port_id,
security_group_id=security_group_id)
context.session.add(db)
def _get_port_security_group_bindings(self, context,
filters=None, fields=None):
return model_query.get_collection(
context, sg_models.SecurityGroupPortBinding,
self._make_security_group_binding_dict,
filters=filters, fields=fields)
@db_api.retry_if_session_inactive()
def _delete_port_security_group_bindings(self, context, port_id):
with db_api.context_manager.writer.using(context):
query = model_query.query_with_hooks(
context, sg_models.SecurityGroupPortBinding)
bindings = query.filter(
sg_models.SecurityGroupPortBinding.port_id == port_id)
for binding in bindings:
context.session.delete(binding)
@db_api.retry_if_session_inactive()
def create_security_group_rule_bulk(self, context, security_group_rules):
return self._create_bulk('security_group_rule', context,
security_group_rules)
@db_api.retry_if_session_inactive()
def create_security_group_rule_bulk_native(self, context,
security_group_rules):
rules = security_group_rules['security_group_rules']
scoped_session(context.session)
security_group_id = self._validate_security_group_rules(
context, security_group_rules)
with db_api.context_manager.writer.using(context):
if not self.get_security_group(context, security_group_id):
raise ext_sg.SecurityGroupNotFound(id=security_group_id)
self._check_for_duplicate_rules(context, rules)
ret = []
for rule_dict in rules:
res_rule_dict = self._create_security_group_rule(
context, rule_dict, validate=False)
ret.append(res_rule_dict)
for rdict in ret:
registry.notify(
resources.SECURITY_GROUP_RULE, events.AFTER_CREATE, self,
context=context, security_group_rule=rdict)
return ret
@db_api.retry_if_session_inactive()
def create_security_group_rule(self, context, security_group_rule):
res = self._create_security_group_rule(context, security_group_rule)
registry.notify(
resources.SECURITY_GROUP_RULE, events.AFTER_CREATE, self,
context=context, security_group_rule=res)
return res
def _create_security_group_rule(self, context, security_group_rule,
validate=True):
if validate:
self._validate_security_group_rule(context, security_group_rule)
rule_dict = security_group_rule['security_group_rule']
remote_ip_prefix = rule_dict.get('remote_ip_prefix')
if remote_ip_prefix:
remote_ip_prefix = utils.AuthenticIPNetwork(remote_ip_prefix)
protocol = rule_dict.get('protocol')
if protocol:
# object expects strings only
protocol = six.text_type(protocol)
args = {
'id': (rule_dict.get('id') or uuidutils.generate_uuid()),
'project_id': rule_dict['tenant_id'],
'security_group_id': rule_dict['security_group_id'],
'direction': rule_dict['direction'],
'remote_group_id': rule_dict.get('remote_group_id'),
'ethertype': rule_dict['ethertype'],
'protocol': protocol,
'remote_ip_prefix': remote_ip_prefix,
'description': rule_dict.get('description'),
}
port_range_min = self._safe_int(rule_dict['port_range_min'])
if port_range_min is not None:
args['port_range_min'] = port_range_min
port_range_max = self._safe_int(rule_dict['port_range_max'])
if port_range_max is not None:
args['port_range_max'] = port_range_max
kwargs = {
'context': context,
'security_group_rule': args
}
self._registry_notify(resources.SECURITY_GROUP_RULE,
events.BEFORE_CREATE,
exc_cls=ext_sg.SecurityGroupConflict, **kwargs)
with db_api.context_manager.writer.using(context):
if validate:
self._check_for_duplicate_rules_in_db(context,
security_group_rule)
sg_rule = sg_obj.SecurityGroupRule(context, **args)
sg_rule.create()
# fetch sg_rule from db to load the sg rules with sg model
# otherwise a DetachedInstanceError can occur for model extensions
sg_rule = sg_obj.SecurityGroupRule.get_object(context,
id=sg_rule.id)
res_rule_dict = self._make_security_group_rule_dict(sg_rule.db_obj)
kwargs['security_group_rule'] = res_rule_dict
self._registry_notify(resources.SECURITY_GROUP_RULE,
events.PRECOMMIT_CREATE,
exc_cls=ext_sg.SecurityGroupConflict, **kwargs)
return res_rule_dict
def _get_ip_proto_number(self, protocol):
if protocol is None:
return
# According to bug 1381379, protocol is always set to string to avoid
# problems with comparing int and string in PostgreSQL. Here this
# string is converted to int to give an opportunity to use it as
# before.
if protocol in n_const.IP_PROTOCOL_NAME_ALIASES:
protocol = n_const.IP_PROTOCOL_NAME_ALIASES[protocol]
return int(constants.IP_PROTOCOL_MAP.get(protocol, protocol))
def _get_ip_proto_name_and_num(self, protocol):
if protocol is None:
return
protocol = str(protocol)
if protocol in constants.IP_PROTOCOL_MAP:
return [protocol, str(constants.IP_PROTOCOL_MAP.get(protocol))]
elif protocol in n_const.IP_PROTOCOL_NUM_TO_NAME_MAP:
return [n_const.IP_PROTOCOL_NUM_TO_NAME_MAP.get(protocol),
protocol]
return [protocol, protocol]
def _safe_int(self, port_range):
if port_range is None:
return
try:
return int(port_range)
except (ValueError, TypeError):
msg = "port range must be an integer"
raise n_exc.InvalidInput(error_message=msg)
def _validate_port_range(self, rule):
"""Check that port_range is valid."""
if (rule['port_range_min'] is None and
rule['port_range_max'] is None):
return
if not rule['protocol']:
raise ext_sg.SecurityGroupProtocolRequiredWithPorts()
ip_proto = self._get_ip_proto_number(rule['protocol'])
# Not all firewall_driver support all these protocols,
# but being strict here doesn't hurt.
if ip_proto in [constants.PROTO_NUM_DCCP, constants.PROTO_NUM_SCTP,
constants.PROTO_NUM_TCP, constants.PROTO_NUM_UDP,
constants.PROTO_NUM_UDPLITE]:
if rule['port_range_min'] == 0 or rule['port_range_max'] == 0:
raise ext_sg.SecurityGroupInvalidPortValue(port=0)
elif (rule['port_range_min'] is not None and
rule['port_range_max'] is not None and
rule['port_range_min'] <= rule['port_range_max']):
pass
else:
raise ext_sg.SecurityGroupInvalidPortRange()
elif ip_proto in [constants.PROTO_NUM_ICMP,
constants.PROTO_NUM_IPV6_ICMP]:
for attr, field in [('port_range_min', 'type'),
('port_range_max', 'code')]:
if rule[attr] is not None and not (0 <= rule[attr] <= 255):
raise ext_sg.SecurityGroupInvalidIcmpValue(
field=field, attr=attr, value=rule[attr])
if (rule['port_range_min'] is None and
rule['port_range_max'] is not None):
raise ext_sg.SecurityGroupMissingIcmpType(
value=rule['port_range_max'])
def _validate_ethertype_and_protocol(self, rule):
"""Check if given ethertype and protocol are valid or not"""
if rule['protocol'] in [constants.PROTO_NAME_IPV6_ENCAP,
constants.PROTO_NAME_IPV6_FRAG,
constants.PROTO_NAME_IPV6_ICMP,
constants.PROTO_NAME_IPV6_ICMP_LEGACY,
constants.PROTO_NAME_IPV6_NONXT,
constants.PROTO_NAME_IPV6_OPTS,
constants.PROTO_NAME_IPV6_ROUTE,
str(constants.PROTO_NUM_IPV6_ENCAP),
str(constants.PROTO_NUM_IPV6_FRAG),
str(constants.PROTO_NUM_IPV6_ICMP),
str(constants.PROTO_NUM_IPV6_NONXT),
str(constants.PROTO_NUM_IPV6_OPTS),
str(constants.PROTO_NUM_IPV6_ROUTE)]:
if rule['ethertype'] == constants.IPv4:
raise ext_sg.SecurityGroupEthertypeConflictWithProtocol(
ethertype=rule['ethertype'], protocol=rule['protocol'])
def _validate_single_tenant_and_group(self, security_group_rules):
"""Check that all rules belong to the same security group and tenant
"""
sg_groups = set()
tenants = set()
for rule_dict in security_group_rules['security_group_rules']:
rule = rule_dict['security_group_rule']
sg_groups.add(rule['security_group_id'])
if len(sg_groups) > 1:
raise ext_sg.SecurityGroupNotSingleGroupRules()
tenants.add(rule['tenant_id'])
if len(tenants) > 1:
raise ext_sg.SecurityGroupRulesNotSingleTenant()
return sg_groups.pop()
def _validate_security_group_rule(self, context, security_group_rule):
rule = security_group_rule['security_group_rule']
self._validate_port_range(rule)
self._validate_ip_prefix(rule)
self._validate_ethertype_and_protocol(rule)
if rule['remote_ip_prefix'] and rule['remote_group_id']:
raise ext_sg.SecurityGroupRemoteGroupAndRemoteIpPrefix()
remote_group_id = rule['remote_group_id']
# Check that remote_group_id exists for tenant
if remote_group_id:
self.get_security_group(context, remote_group_id,
tenant_id=rule['tenant_id'])
security_group_id = rule['security_group_id']
# Confirm that the tenant has permission
# to add rules to this security group.
self.get_security_group(context, security_group_id,
tenant_id=rule['tenant_id'])
return security_group_id
def _validate_security_group_rules(self, context, security_group_rules):
sg_id = self._validate_single_tenant_and_group(security_group_rules)
for rule in security_group_rules['security_group_rules']:
self._validate_security_group_rule(context, rule)
return sg_id
def _make_security_group_rule_dict(self, security_group_rule, fields=None):
res = {'id': security_group_rule['id'],
'tenant_id': security_group_rule['tenant_id'],
'security_group_id': security_group_rule['security_group_id'],
'ethertype': security_group_rule['ethertype'],
'direction': security_group_rule['direction'],
'protocol': security_group_rule['protocol'],
'port_range_min': security_group_rule['port_range_min'],
'port_range_max': security_group_rule['port_range_max'],
'remote_ip_prefix': security_group_rule['remote_ip_prefix'],
'remote_group_id': security_group_rule['remote_group_id']}
resource_extend.apply_funcs(ext_sg.SECURITYGROUPRULES, res,
security_group_rule)
return db_utils.resource_fields(res, fields)
def _make_security_group_rule_filter_dict(self, security_group_rule):
sgr = security_group_rule['security_group_rule']
res = {'tenant_id': [sgr['tenant_id']],
'security_group_id': [sgr['security_group_id']],
'direction': [sgr['direction']]}
include_if_present = ['protocol', 'port_range_max', 'port_range_min',
'ethertype', 'remote_group_id']
for key in include_if_present:
value = sgr.get(key)
if value:
res[key] = [value]
# protocol field will get corresponding name and number
value = sgr.get('protocol')
if value:
res['protocol'] = self._get_ip_proto_name_and_num(value)
return res
def _rules_equal(self, rule1, rule2):
"""Determines if two rules are equal ignoring id field."""
rule1_copy = rule1.copy()
rule2_copy = rule2.copy()
rule1_copy.pop('id', None)
rule2_copy.pop('id', None)
return rule1_copy == rule2_copy
def _check_for_duplicate_rules(self, context, security_group_rules):
for i in security_group_rules:
found_self = False
for j in security_group_rules:
if self._rules_equal(i['security_group_rule'],
j['security_group_rule']):
if found_self:
raise ext_sg.DuplicateSecurityGroupRuleInPost(rule=i)
found_self = True
self._check_for_duplicate_rules_in_db(context, i)
def _check_for_duplicate_rules_in_db(self, context, security_group_rule):
# Check in database if rule exists
filters = self._make_security_group_rule_filter_dict(
security_group_rule)
rule_dict = security_group_rule['security_group_rule'].copy()
rule_dict.pop('description', None)
keys = rule_dict.keys()
fields = list(keys) + ['id']
if 'remote_ip_prefix' not in fields:
fields += ['remote_ip_prefix']
db_rules = self.get_security_group_rules(context, filters,
fields=fields)
# Note(arosen): the call to get_security_group_rules wildcards
# values in the filter that have a value of [None]. For
# example, filters = {'remote_group_id': [None]} will return
# all security group rules regardless of their value of
# remote_group_id. Therefore it is not possible to do this
# query unless the behavior of _get_collection()
# is changed which cannot be because other methods are already
# relying on this behavior. Therefore, we do the filtering
# below to check for these corner cases.
rule_dict.pop('id', None)
sg_protocol = rule_dict.pop('protocol', None)
remote_ip_prefix = rule_dict.pop('remote_ip_prefix', None)
for db_rule in db_rules:
rule_id = db_rule.pop('id', None)
# remove protocol and match separately for number and type
db_protocol = db_rule.pop('protocol', None)
is_protocol_matching = (
self._get_ip_proto_name_and_num(db_protocol) ==
self._get_ip_proto_name_and_num(sg_protocol))
db_remote_ip_prefix = db_rule.pop('remote_ip_prefix', None)
duplicate_ip_prefix = self._validate_duplicate_ip_prefix(
remote_ip_prefix, db_remote_ip_prefix)
if (is_protocol_matching and duplicate_ip_prefix and
rule_dict == db_rule):
raise ext_sg.SecurityGroupRuleExists(rule_id=rule_id)
def _validate_duplicate_ip_prefix(self, ip_prefix, other_ip_prefix):
if other_ip_prefix is not None:
other_ip_prefix = str(other_ip_prefix)
all_address = ['0.0.0.0/0', '::/0', None]
if ip_prefix == other_ip_prefix:
return True
elif ip_prefix in all_address and other_ip_prefix in all_address:
return True
return False
def _validate_ip_prefix(self, rule):
"""Check that a valid cidr was specified as remote_ip_prefix
No need to check that it is in fact an IP address as this is already
validated by attribute validators.
Check that rule ethertype is consistent with remote_ip_prefix ip type.
Add mask to ip_prefix if absent (192.168.1.10 -> 192.168.1.10/32).
"""
input_prefix = rule['remote_ip_prefix']
if input_prefix:
addr = netaddr.IPNetwork(input_prefix)
# set input_prefix to always include the netmask:
rule['remote_ip_prefix'] = str(addr)
# check consistency of ethertype with addr version
if rule['ethertype'] != "IPv%d" % (addr.version):
raise ext_sg.SecurityGroupRuleParameterConflict(
ethertype=rule['ethertype'], cidr=input_prefix)
@db_api.retry_if_session_inactive()
def get_security_group_rules(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
filters = filters or {}
pager = base_obj.Pager(
sorts=sorts, marker=marker, limit=limit, page_reverse=page_reverse)
rule_objs = sg_obj.SecurityGroupRule.get_objects(
context, _pager=pager, validate_filters=False, **filters
)
return [
self._make_security_group_rule_dict(obj.db_obj, fields)
for obj in rule_objs
]
@db_api.retry_if_session_inactive()
def get_security_group_rules_count(self, context, filters=None):
filters = filters or {}
return sg_obj.SecurityGroupRule.count(
context, validate_filters=False, **filters)
@db_api.retry_if_session_inactive()
def get_security_group_rule(self, context, id, fields=None):
security_group_rule = self._get_security_group_rule(context, id)
return self._make_security_group_rule_dict(
security_group_rule.db_obj, fields)
def _get_security_group_rule(self, context, id):
sgr = sg_obj.SecurityGroupRule.get_object(context, id=id)
if sgr is None:
raise ext_sg.SecurityGroupRuleNotFound(id=id)
return sgr
@db_api.retry_if_session_inactive()
def delete_security_group_rule(self, context, id):
kwargs = {
'context': context,
'security_group_rule_id': id
}
self._registry_notify(resources.SECURITY_GROUP_RULE,
events.BEFORE_DELETE, id=id,
exc_cls=ext_sg.SecurityGroupRuleInUse, **kwargs)
with db_api.context_manager.writer.using(context):
sgr = self._get_security_group_rule(context, id)
kwargs['security_group_id'] = sgr['security_group_id']
self._registry_notify(resources.SECURITY_GROUP_RULE,
events.PRECOMMIT_DELETE,
exc_cls=ext_sg.SecurityGroupRuleInUse, id=id,
**kwargs)
sgr.delete()
registry.notify(
resources.SECURITY_GROUP_RULE, events.AFTER_DELETE, self,
**kwargs)
@staticmethod
@resource_extend.extends([port_def.COLLECTION_NAME])
def _extend_port_dict_security_group(port_res, port_db):
# Security group bindings will be retrieved from the SQLAlchemy
# model. As they're loaded eagerly with ports because of the
# joined load they will not cause an extra query.
security_group_ids = [sec_group_mapping['security_group_id'] for
sec_group_mapping in port_db.security_groups]
port_res[ext_sg.SECURITYGROUPS] = security_group_ids
return port_res
def _process_port_create_security_group(self, context, port,
security_group_ids):
if validators.is_attr_set(security_group_ids):
for security_group_id in security_group_ids:
self._create_port_security_group_binding(context, port['id'],
security_group_id)
# Convert to list as a set might be passed here and
# this has to be serialized
port[ext_sg.SECURITYGROUPS] = (security_group_ids and
list(security_group_ids) or [])
def _get_default_sg_id(self, context, tenant_id):
default_group = sg_obj.DefaultSecurityGroup.get_object(
context,
project_id=tenant_id,
)
if default_group:
return default_group.security_group_id
@registry.receives(resources.PORT, [events.BEFORE_CREATE,
events.BEFORE_UPDATE])
@registry.receives(resources.NETWORK, [events.BEFORE_CREATE])
def _ensure_default_security_group_handler(self, resource, event, trigger,
context, **kwargs):
if event == events.BEFORE_UPDATE:
tenant_id = kwargs['original_' + resource]['tenant_id']
else:
tenant_id = kwargs[resource]['tenant_id']
self._ensure_default_security_group(context, tenant_id)
def _ensure_default_security_group(self, context, tenant_id):
"""Create a default security group if one doesn't exist.
:returns: the default security group id for given tenant.
"""
default_group_id = self._get_default_sg_id(context, tenant_id)
if default_group_id:
return default_group_id
security_group = {
'security_group':
{'name': 'default',
'tenant_id': tenant_id,
'description': _('Default security group')}
}
return self.create_security_group(context, security_group,
default_sg=True)['id']
def _get_security_groups_on_port(self, context, port):
"""Check that all security groups on port belong to tenant.
:returns: all security groups IDs on port belonging to tenant.
"""
port = port['port']
if not validators.is_attr_set(port.get(ext_sg.SECURITYGROUPS)):
return
if port.get('device_owner') and net.is_port_trusted(port):
return
port_sg = port.get(ext_sg.SECURITYGROUPS, [])
filters = {'id': port_sg}
tenant_id = port.get('tenant_id')
if tenant_id:
filters['tenant_id'] = [tenant_id]
valid_groups = set(g['id'] for g in
self.get_security_groups(context, fields=['id'],
filters=filters))
requested_groups = set(port_sg)
port_sg_missing = requested_groups - valid_groups
if port_sg_missing:
raise ext_sg.SecurityGroupNotFound(id=', '.join(port_sg_missing))
return list(requested_groups)
def _ensure_default_security_group_on_port(self, context, port):
# we don't apply security groups for dhcp, router
port = port['port']
if port.get('device_owner') and net.is_port_trusted(port):
return
if not validators.is_attr_set(port.get(ext_sg.SECURITYGROUPS)):
default_sg = self._ensure_default_security_group(context,
port['tenant_id'])
port[ext_sg.SECURITYGROUPS] = [default_sg]
def _check_update_deletes_security_groups(self, port):
"""Return True if port has as a security group and it's value
is either [] or not is_attr_set, otherwise return False
"""
if (ext_sg.SECURITYGROUPS in port['port'] and
not (validators.is_attr_set(port['port'][ext_sg.SECURITYGROUPS])
and port['port'][ext_sg.SECURITYGROUPS] != [])):
return True
return False
def _check_update_has_security_groups(self, port):
"""Return True if port has security_groups attribute set and
its not empty, or False otherwise.
This method is called both for port create and port update.
"""
if (ext_sg.SECURITYGROUPS in port['port'] and
(validators.is_attr_set(port['port'][ext_sg.SECURITYGROUPS]) and
port['port'][ext_sg.SECURITYGROUPS] != [])):
return True
return False
def update_security_group_on_port(self, context, id, port,
original_port, updated_port):
"""Update security groups on port.
This method returns a flag which indicates request notification
is required and does not perform notification itself.
It is because another changes for the port may require notification.
"""
need_notify = False
port_updates = port['port']
if (ext_sg.SECURITYGROUPS in port_updates and
not helpers.compare_elements(
original_port.get(ext_sg.SECURITYGROUPS),
port_updates[ext_sg.SECURITYGROUPS])):
# delete the port binding and read it with the new rules
port_updates[ext_sg.SECURITYGROUPS] = (
self._get_security_groups_on_port(context, port))
self._delete_port_security_group_bindings(context, id)
self._process_port_create_security_group(
context,
updated_port,
port_updates[ext_sg.SECURITYGROUPS])
need_notify = True
else:
updated_port[ext_sg.SECURITYGROUPS] = (
original_port[ext_sg.SECURITYGROUPS])
return need_notify
| eayunstack/neutron | neutron/db/securitygroups_db.py | Python | apache-2.0 | 39,022 |
#! /usr/bin/env python
"""
Copyright [1999-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License")
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
"""
from postgap.DataModel import *
from pprint import pformat
from postgap.Finemap import *
def summarise(obj, **kwparams):
if type(obj) == list:
return summarise_list(obj, **kwparams)
if type(obj) == GWAS_Cluster:
return summarise_gwas_cluster(obj, **kwparams)
if type(obj) == SNP:
return summarise_snp(obj, **kwparams)
if type(obj) == GWAS_SNP:
return summarise_gwas_snp(obj, **kwparams)
if type(obj) == GWAS_Association:
return summarise_generic(obj, **kwparams)
if type(obj) == Cisregulatory_Evidence:
return summarise_generic(obj, **kwparams)
if type(obj) == OneDConfigurationSample:
return str(obj)
if type(obj) == TwoDConfigurationSample:
return str(obj)
raise Exception("Don't know how to summarise a " + str(type(obj)))
def summarise_list(list_of_objects, leadstring = "", **kwparams):
string = leadstring + " * List:\n"
for index, current_object in enumerate(list_of_objects):
string += leadstring + " ** Item " + str(index) + ":\n"
string += summarise(current_object, leadstring = leadstring + " *******") + "\n"
string += leadstring + " ******"
return string
def summarise_gwas_cluster(gwas_cluster, leadstring = ""):
string = "GWAS_Cluster\n"
string += "============\n"
string += "\n"
string += " Gwas snps:\n"
string += " ----------\n"
string += summarise(gwas_cluster.gwas_snps, leadstring = leadstring + " ")
string += "\n"
string += " LD snps:\n"
string += " --------\n"
string += summarise(gwas_cluster.ld_snps, leadstring = leadstring + " ")
return string
def summarise_snp(snp, leadstring = ""):
return leadstring + pformat( snp )
def summarise_generic(obj, leadstring = ""):
return leadstring + pformat( obj )
def summarise_gwas_snp(gwas_snp, leadstring = ""):
string = leadstring + "=============\n"
string += leadstring + "|GWAS_SNP\n"
string += leadstring + "| " + "SNP: " + summarise(gwas_snp.snp, leadstring = "") + "\n"
string += leadstring + "| " + "pvalue: " + str(gwas_snp.pvalue) + "\n"
string += leadstring + "| " + "z_score: " + str(gwas_snp.z_score) + "\n"
string += leadstring + "| " + "evidence:\n"
string += summarise(gwas_snp.evidence, leadstring = leadstring + "| " + " ") + "\n"
string += leadstring + "============="
return string
def concatenate(list):
"""
Shorthand to concatenate a list of lists
Args: [[]]
Returntype: []
"""
return sum(filter(lambda elem: elem is not None, list), [])
def concatenate_hashes(list):
"""
Shorthand to concatenate a list of lists
Args: [[]]
Returntype: []
"""
return dict(sum(map(lambda X: X.items(), filter(lambda elem: elem is not None, list)), []))
def chunks(l, n):
for i in range(0, len(l), n):
yield l[i:i+n]
def isnamedtupleinstance(x):
_type = type(x)
bases = _type.__bases__
if len(bases) != 1 or bases[0] != tuple:
return False
fields = getattr(_type, '_fields', None)
if not isinstance(fields, tuple):
return False
return all(type(i)==str for i in fields)
def objectToDict(obj):
if isinstance(obj, dict):
return {key: objectToDict(value) for key, value in obj.items()}
elif isinstance(obj, list):
return [objectToDict(value) for value in obj]
elif isnamedtupleinstance(obj):
return {key: objectToDict(value) for key, value in obj._asdict().items()}
elif isinstance(obj, tuple):
return tuple(objectToDict(value) for value in obj)
else:
return obj
| Ensembl/cttv024 | lib/postgap/Utils.py | Python | apache-2.0 | 4,340 |
#!/usr/bin/env python
#
# Copyright 2015 Airbus
# Copyright 2017 Fraunhofer Institute for Manufacturing Engineering and Automation (IPA)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class SPLINE:
Ortho = 'ortho'
| ipa-led/airbus_coop | airbus_docgen/src/airbus_docgen/digraph/spline.py | Python | apache-2.0 | 717 |
# -*- coding:utf-8 -*-
from models.base import SqlBaseModel
class Model(SqlBaseModel):
def __init__(self):
super(Model, self).__init__(db_name='blog')
Base = Model().Base
| wecatch/app-turbo | demos/models/blog/base.py | Python | apache-2.0 | 188 |
from builtins import object
import abc
class Event(object):
@abc.abstractmethod
def trigger(self, *args, **kwargs):
pass
| HelioGuilherme66/robotframework-selenium2library | src/Selenium2Library/utils/events/event.py | Python | apache-2.0 | 140 |