repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
prefix
stringlengths
0
8.16k
middle
stringlengths
3
512
suffix
stringlengths
0
8.17k
tanglei528/horizon
openstack_dashboard/test/api_tests/lbaas_tests.py
Python
apache-2.0
16,445
0
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2013, Big Switch Networks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstack_dashboard import api from openstack_dashboard.test import helpers as test from neutronclient.v2_0 import client neutronclient = client.Client class LbaasApiTests(test.APITestCase): @test.create_stubs({neutronclient: ('create_vip',)}) def test_vip_create(self): vip1 = self.api_vips.first() form_data = {'address': vip1['address'], 'name': vip1['name'], 'description': vip1['description'], 'subnet_id': vip1['subnet_id'], 'protocol_port': vip1['protocol_port'], 'protocol': vip1['protocol'], 'pool_id': vip1['pool_id'], 'session_persistence': vip1['session_persistence'], 'connection_limit': vip1['connection_limit'], 'admin_state_up': vip1['admin_state_up'] } vip = {'vip': self.api_vips.first()} neutronclient.create_vip({'vip': form_data}).AndReturn(vip) self.mox.ReplayAll() ret_val = api.lbaas.vip_create(self.request, **form_data) self.assertIsInstance(ret_val, api.lbaas.Vip) @test.create_stubs({neutronclient: ('create_vip',)}) def test_vip_create_skip_address_if_empty(self): vip1 = self.api_vips.first() vipform_data = {'name': vip1['name'], 'description': vip1['description'], 'subnet_id': vip1['subnet_id'], 'protocol_port': vip1['protocol_port'], 'protocol': vip1['protocol'], 'pool_id': vip1['pool_id'], 'session_persistence': vip1['session_persistence'], 'connection_limit': vip1['con
nection_limit'], 'admin_state_up': vip1['admin_state_up'] } neutronclient.create_vip({'vip': vipform_data}).AndReturn(vipform_data) self.mox.ReplayAll() form_data = dict(vipform_data) form_data['address'] = "" ret_val = api.lbaas.vip_create(self.request, **form_data) self.assertIsInstance(ret_val, api.lbaas.Vip) @test.create_stubs({neutronclient: ('list_vips',)}) def
test_vip_list(self): vips = {'vips': [{'id': 'abcdef-c3eb-4fee-9763-12de3338041e', 'address': '10.0.0.100', 'name': 'vip1name', 'description': 'vip1description', 'subnet_id': '12381d38-c3eb-4fee-9763-12de3338041e', 'protocol_port': '80', 'protocol': 'HTTP', 'pool_id': '8913dde8-4915-4b90-8d3e-b95eeedb0d49', 'connection_limit': '10', 'admin_state_up': True }, ]} neutronclient.list_vips().AndReturn(vips) self.mox.ReplayAll() ret_val = api.lbaas.vip_list(self.request) for v in ret_val: self.assertIsInstance(v, api.lbaas.Vip) self.assertTrue(v.id) @test.create_stubs({neutronclient: ('show_vip', 'show_pool'), api.neutron: ('subnet_get', 'port_get')}) def test_vip_get(self): vip = self.api_vips.first() neutronclient.show_vip(vip['id']).AndReturn({'vip': vip}) api.neutron.subnet_get(self.request, vip['subnet_id'] ).AndReturn(self.subnets.first()) api.neutron.port_get(self.request, vip['port_id'] ).AndReturn(self.ports.first()) neutronclient.show_pool(vip['pool_id'] ).AndReturn({'pool': self.api_pools.first()}) self.mox.ReplayAll() ret_val = api.lbaas.vip_get(self.request, vip['id']) self.assertIsInstance(ret_val, api.lbaas.Vip) self.assertIsInstance(ret_val.subnet, api.neutron.Subnet) self.assertEqual(vip['subnet_id'], ret_val.subnet.id) self.assertIsInstance(ret_val.port, api.neutron.Port) self.assertEqual(vip['port_id'], ret_val.port.id) self.assertIsInstance(ret_val.pool, api.lbaas.Pool) self.assertEqual(self.api_pools.first()['id'], ret_val.pool.id) @test.create_stubs({neutronclient: ('update_vip',)}) def test_vip_update(self): form_data = {'address': '10.0.0.100', 'name': 'vip1name', 'description': 'vip1description', 'subnet_id': '12381d38-c3eb-4fee-9763-12de3338041e', 'protocol_port': '80', 'protocol': 'HTTP', 'pool_id': '8913dde8-4915-4b90-8d3e-b95eeedb0d49', 'connection_limit': '10', 'admin_state_up': True } vip = {'vip': {'id': 'abcdef-c3eb-4fee-9763-12de3338041e', 'address': '10.0.0.100', 'name': 'vip1name', 'description': 'vip1description', 'subnet_id': '12381d38-c3eb-4fee-9763-12de3338041e', 'protocol_port': '80', 'protocol': 'HTTP', 'pool_id': '8913dde8-4915-4b90-8d3e-b95eeedb0d49', 'connection_limit': '10', 'admin_state_up': True }} neutronclient.update_vip(vip['vip']['id'], form_data).AndReturn(vip) self.mox.ReplayAll() ret_val = api.lbaas.vip_update(self.request, vip['vip']['id'], **form_data) self.assertIsInstance(ret_val, api.lbaas.Vip) @test.create_stubs({neutronclient: ('create_pool',)}) def test_pool_create(self): form_data = {'name': 'pool1name', 'description': 'pool1description', 'subnet_id': '12381d38-c3eb-4fee-9763-12de3338041e', 'protocol': 'HTTP', 'lb_method': 'ROUND_ROBIN', 'admin_state_up': True, 'provider': 'dummy' } pool = {'pool': {'id': 'abcdef-c3eb-4fee-9763-12de3338041e', 'name': 'pool1name', 'description': 'pool1description', 'subnet_id': '12381d38-c3eb-4fee-9763-12de3338041e', 'protocol': 'HTTP', 'lb_method': 'ROUND_ROBIN', 'admin_state_up': True, 'provider': 'dummy' }} neutronclient.create_pool({'pool': form_data}).AndReturn(pool) self.mox.ReplayAll() ret_val = api.lbaas.pool_create(self.request, **form_data) self.assertIsInstance(ret_val, api.lbaas.Pool) @test.create_stubs({neutronclient: ('list_pools', 'list_vips'), api.neutron: ('subnet_list',)}) def test_pool_list(self): pools = {'pools': self.api_pools.list()} subnets = self.subnets.list() vips = {'vips': self.api_vips.list()} neutronclient.list_pools().AndReturn(pools) api.neutron.subnet_list(self.request).AndReturn(subnets) neutronclient.list_vips().AndReturn(vips) self.mox.ReplayAll() ret_val = api.lbaas.pool_list(self.request) for v in ret_val: self.assertIsInstance(v, api.lbaas.Pool)
nevil-brownlee/pypy-libtrace
doc/examples/copy-icmp6.py
Python
gpl-3.0
961
0.004162
# test-icmp6.py: count types of icmp6 packets # Copyright (C) 2016, Nevil Brownlee, U Auckland | CAIDA | Wand from plt_testing import * from array import * icmp_info = {} # Empty dictionary t = get_example_trace('icmp6-sample.pcap') out_uri = 'pcapfile:icmp6-out.pcap' of = plt.output_trace(out_uri) of.start_output() n = 0 # 1: 28535 2: 20 3: 89094 4: 4413 128: 46447 129: 36085 134: 1382 135: 148720 136: 16188 #t = get_rlt_example_file('icmp6.pcap') # 1: 371 3: 32 4: 12 128: 37 129: 36 134: 5 135: 468 136: 39 for pkt in t: n += 1 icmp6 = pkt.icmp6 if not icmp6:
continue it = icmp6.type if it in icmp_info: icmp_info[it] += 1 else: icmp_info[it] = 1 if icmp_info[it] <= 4: of.write_packet(pk
t) t.close() of.close_output() print "%d packets examined\n" % (n) print "icmp6 types = ", for type in sorted(icmp_info): print "%d: %d " % (type, icmp_info[type]), print
SnowRomance/CMDB
app/migrations/0007_auto_20161228_1021.py
Python
gpl-3.0
869
0.001151
# -*- coding: utf-8 -*- # Generated by Django 1.9.6 on 2016-12-28 02:21 from __future__ import unicode_literals import datetime from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('app', '0006_auto_20161228_1016'), ] operations = [
migrations.AlterField( model_name='hostrequest', name='lease_time', field=models.IntegerField(default=30), ), migrations.A
lterField( model_name='idc', name='create_time', field=models.DateField(default=datetime.datetime(2016, 12, 28, 10, 21, 52, 541000), verbose_name='\u521b\u5efa\u65f6\u95f4'), ), migrations.AlterField( model_name='lease', name='lease_time', field=models.IntegerField(default=30), ), ]
NetASM/NetASM-python
netasm/netasm/core/optimize.py
Python
gpl-2.0
2,688
0.001488
# ################################################################################ # ## # ## https://github.com/NetASM/NetASM-python # ## # ## File: # ## optimize.py # ## # ## Project: # ## NetASM: A Network Assembly Language for Programmable Dataplanes # ## # ## Author: # ## Muhammad Shahbaz # ## # ## Copyright notice: # ## Copyright (C) 2014 Princeton University # ## Network Operations and Internet Security Lab # ## # ## Licence: # ## This file is a part of the NetASM development base package. # ## # ## This file is free code: you can redistribute it and/or modify it under # ## the terms of the GNU Lesser General Public License version 2.1 as # ## published by the Free Software Foundation. # ## # ## This package is distributed in the hope that it will be useful, but # ## WITHOUT ANY WARRANTY; without even the implied warranty of # ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # ## Lesser General Public License for more details. # ## # ## You should have received a copy of the GNU Lesser General Public # ## License along with the NetASM source package. If not, see # ## http://www.gnu.org/licenses/. __author__ = 'shahbaz' from netasm.netasm.core.utilities.profile import time_usage from netasm.netasm.core.syntax import InstructionCollection as I from netasm.netasm.core.transformations import dead_code_elimination as dce from netasm.netasm.core.transformations import redundant_code_elimination as rce from netasm.netasm.core.transformations import add_code_motion as acm from netasm.netasm.core.transformations import rmv_code_motion as rcm # from netasm.netasm.core.transformations import rmv_code_insertion as rci def _optimize_Code(code): code = acm.transform(code) code = rcm.transform(code) code = dce.transform(code) code = rce.transform(code) # code = rci.transform(code) return code def optimize_Code(code): for instruction in code.instructions: if isinstance(instruction, I.CNC): codes = I.Codes() for _code in instruction.codes: codes.
append(optimize_Code(_code)) instruction.codes = codes elif isinstance(instruction, I.ATM): instruction.code = optimize_Code(instruction.code) elif isinstance(instruction, I.SEQ): instruction.code = optimize_Code(inst
ruction.code) return _optimize_Code(code) def optimize_Policy(policy): policy.code = optimize_Code(policy.code) return policy @time_usage def optimize_Policy__time_usage(policy): return optimize_Policy(policy)
smpss91341/2016springcd_aG8
users/a/g8/setup.py
Python
agpl-3.0
297
0.003367
from setuptools import setu
p setup(name='KMOL 2016 project', version='1.0', description='OpenShift App', author='KMOL', author_email='course@mde.tw', url='https://www.python.org/community/sigs/current/distutils-sig',
install_requires=['Flask>=0.10.1'], )
elianerpereira/gtg
GTG/tests/test_interruptible.py
Python
gpl-3.0
2,149
0.000465
# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Getting Things GNOME! - a personal organizer for the GNOME desktop # Copyright (c) 2008-2013 - Lionel Dricot & Bertrand Rousseau # # This program is free software: you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program. If not, see <http://www.gnu.org/licenses/>. # ----------------------------------------------
------------------------------- """ Tests for interrupting cooperative threads """ from threading import Thread, Event import time import un
ittest from GTG.tools.interruptible import interruptible, _cancellation_point class TestInterruptible(unittest.TestCase): """ Tests for interrupting cooperative threads """ def test_interruptible_decorator(self): """ Tests for the @interruptible decorator. """ self.quit_condition = False cancellation_point = lambda: _cancellation_point( lambda: self.quit_condition) self.thread_started = Event() @interruptible def never_ending(cancellation_point): self.thread_started.set() while True: time.sleep(0.1) cancellation_point() thread = Thread(target=never_ending, args=(cancellation_point, )) thread.start() self.thread_started.wait() self.quit_condition = True countdown = 10 while thread.is_alive() and countdown > 0: time.sleep(0.1) countdown -= 1 self.assertFalse(thread.is_alive()) def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(TestInterruptible)
opennode/nodeconductor
waldur_core/users/migrations/0001_squashed_0004.py
Python
mit
3,908
0.005885
# -*- coding: utf-8 -*- from __future__ import unicode_literals import django.utils.timezone import model_utils.fields from django.conf import settings from django.db import migrations, models import waldur_core.core.fields import waldur_core.structure.models class Migration(migrations.Migration): replaces = [('users', '0001_initial'), ('users', '0002_invitation_error_message'), ('users', '0003_invitation_civil_number'), ('users', '0004_migrate_to_new_permissions_model')] initial = True dependencies = [ ('structure', '0001_squashed_0054'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Invitation', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')), ('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')), ('uuid', waldur_core.core.fields.UUIDField()), ('error_message', models.TextField(blank=True)), ('customer_role', waldur_core.structure.models.CustomerRole(blank=True, choices=[('owner', 'Owner'), ( 'support', 'Support')], max_length=30, null=True, verbose_name='organization role')), ('project_role', waldur_core.structure.models.ProjectRole(blank=True, choices=[('admin', 'Administrator'), ('manager', 'Manager'), ('support', 'Support')], max_length=30, null=True)), ('state', models.CharField( choices=[('accepted', 'Accepted'), ('canceled', 'Canceled'), ('pending', 'Pending'), ('expired', 'Expired')], default='pending', max_length=8)), ('link_template', models.CharField( help_text='The template must include {uuid} parameter e.g. http://example.com/invitation/{uuid}', max_length=255)), ('email', models.EmailField( help_text='Invitation link will be sent to this email. Note that user can accept invitation with different email.', max_length=254)), ('civil_number', models.CharField(blank=True, help_text='Civil number of invited user. If civil number is not defined any user can accept invitation.
', max_length=50)), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='invitations', to='structure.Cust
omer', verbose_name='organization')), ('project', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='invitations', to='structure.Project')), ], options={ 'abstract': False, }, ), ]
cmos3511/cmos_linux
python/op/op_site/proj_checker/views.py
Python
gpl-3.0
442
0.004525
from rest_framework import generics from .models import Proj from .serializers import ProjSerializer
# Create your views here. class ProjList(generics.ListCreateAPIView): """be report project list""" queryset = Proj.objects.all() serializer_class = ProjSerializer class ProjDetail(generics.RetrieveUpdateDestroyAPIView): """be report project detail""" queryset = Proj.objects.all() serialize
r_class = ProjSerializer
zlsa/io_object_mu
import_mu.py
Python
gpl-2.0
13,186
0.003337
# vim:ts=4:et # ##### BEGIN GPL LICENSE BLOCK ##### # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1301, USA. # # ##### END GPL LICENSE BLOCK ##### # <pep8 compliant> from struct import unpack import os.path from math import pi, sqrt import bpy from bpy_extras.object_utils import object_data_add from mathutils import Vector,Matrix,Quaternion from .mu import MuEnum, Mu, MuColliderMesh, MuCo
lliderSphere, MuColliderCapsule from .mu import MuColliderBox, MuColliderWheel from .shader import make_shader from . import collider, properties def create_uvs(mu, uvs, mesh, name): uvlay = mesh.uv_textures.new(name) uvloop = mesh.uv_layers[name] for i, uvl in enumerate(uvloop.data): v = mesh.loops[i].vertex_index uvl.uv = uvs[v] def create_mesh(mu, mumesh, name): mesh = bpy.data.meshes.new(name) faces = [] for sm in mumesh.submeshes: faces.extend(sm) mesh.from_pydata(mumesh.verts, [], faces) if mumesh.uvs: create_uvs(mu, mumesh.uvs, mesh, name + ".UV") if mumesh.uv2s: create_uvs(mu, mumesh.uv2s, mesh, name + ".UV2") return mesh def create_mesh_object(name, mesh, transform): obj = bpy.data.objects.new(name, mesh) obj.rotation_mode = 'QUATERNION' if transform: obj.location = Vector(transform.localPosition) obj.rotation_quaternion = Quaternion(transform.localRotation) obj.scale = Vector(transform.localScale) else: obj.location = Vector((0, 0, 0)) obj.rotation_quaternion = Quaternion((1,0,0,0)) obj.scale = Vector((1,1,1)) bpy.context.scene.objects.link(obj) return obj def copy_spring(dst, src): dst.spring = src.spring dst.damper = src.damper dst.targetPosition = src.targetPosition def copy_friction(dst, src): dst.extremumSlip = src.extremumSlip dst.extremumValue = src.extremumValue dst.asymptoteSlip = src.asymptoteSlip dst.extremumValue = src.extremumValue dst.stiffness = src.stiffness def create_light(mu, mulight, transform): ltype = ('SPOT', 'SUN', 'POINT', 'AREA')[mulight.type] light = bpy.data.lamps.new(transform.name, ltype) light.color = mulight.color[:3] light.distance = mulight.range light.energy = mulight.intensity if ltype == 'SPOT' and hasattr(mulight, "spotAngle"): light.spot_size = mulight.spotAngle * pi / 180 obj = bpy.data.objects.new(transform.name, light) obj.rotation_mode = 'QUATERNION' obj.location = Vector(transform.localPosition) # Blender points spotlights along local -Z, unity along local +Z # which is Blender's +Y, so rotate 90 degrees around local X to # go from Unity to Blender rot = Quaternion((0.5**0.5,0.5**0.5,0,0)) obj.rotation_quaternion = rot * Quaternion(transform.localRotation) obj.scale = Vector(transform.localScale) properties.SetPropMask(obj.muproperties.cullingMask, mulight.cullingMask) bpy.context.scene.objects.link(obj) return obj property_map = { "m_LocalPosition.x": ("location", 0, 1), "m_LocalPosition.y": ("location", 2, 1), "m_LocalPosition.z": ("location", 1, 1), "m_LocalRotation.x": ("rotation_quaternion", 1, -1), "m_LocalRotation.y": ("rotation_quaternion", 3, -1), "m_LocalRotation.z": ("rotation_quaternion", 2, -1), "m_LocalRotation.w": ("rotation_quaternion", 0, 1), "m_LocalScale.x": ("scale", 0, 1), "m_LocalScale.y": ("scale", 2, 1), "m_LocalScale.z": ("scale", 1, 1), } def create_fcurve(action, curve): try: dp, ind, mult = property_map[curve.property] except KeyError: print("%s: Unknown property: %s" % (curve.path, curve.property)) return False fps = bpy.context.scene.render.fps fc = action.fcurves.new(data_path = dp, index = ind) fc.keyframe_points.add(len(curve.keys)) for i, key in enumerate(curve.keys): x,y = key.time * fps, key.value * mult fc.keyframe_points[i].co = x, y fc.keyframe_points[i].handle_left_type = 'FREE' fc.keyframe_points[i].handle_right_type = 'FREE' if i > 0: dist = (key.time - curve.keys[i - 1].time) / 3 dx, dy = dist * fps, key.tangent[0] * dist * mult else: dx, dy = 10, 0.0 fc.keyframe_points[i].handle_left = x - dx, y - dy if i < len(curve.keys) - 1: dist = (curve.keys[i + 1].time - key.time) / 3 dx, dy = dist * fps, key.tangent[1] * dist * mult else: dx, dy = 10, 0.0 fc.keyframe_points[i].handle_right = x + dx, y + dy return True def create_action(mu, path, clip): #print(clip.name) actions = {} for curve in clip.curves: if not curve.path: #FIXME need to look into this more as I'm not sure if the animation # is broken or if the property is somewhere weird continue name = ".".join([clip.name, curve.path]) if name not in actions: mu_path = "/".join([path, curve.path]) try: obj = mu.objects[mu_path] except KeyError: print("Unknown path: %s" % (mu_path)) continue actions[name] = bpy.data.actions.new(name), obj act, obj = actions[name] if not create_fcurve(act, curve): continue for name in actions: act, obj = actions[name] if not obj.animation_data: obj.animation_data_create() track = obj.animation_data.nla_tracks.new() track.name = clip.name track.strips.new(act.name, 1.0, act) def create_collider(mu, muobj): col = muobj.collider name = muobj.transform.name if type(col) == MuColliderMesh: name = name + ".collider" mesh = create_mesh(mu, col.mesh, name) elif type(col) == MuColliderSphere: mesh = collider.sphere(name, col.center, col.radius) elif type(col) == MuColliderCapsule: mesh = collider.capsule(name, col.center, col.radius, col.height, col.direction) elif type(col) == MuColliderBox: mesh = collider.box(name, col.center, col.size) elif type(col) == MuColliderWheel: mesh = collider.wheel(name, col.center, col.radius) obj = create_mesh_object(name, mesh, None) obj.muproperties.isTrigger = False if type(col) != MuColliderWheel: obj.muproperties.isTrigger = col.isTrigger if type(col) == MuColliderMesh: obj.muproperties.collider = 'MU_COL_MESH' elif type(col) == MuColliderSphere: obj.muproperties.collider = 'MU_COL_SPHERE' obj.muproperties.radius = col.radius obj.muproperties.center = col.center elif type(col) == MuColliderCapsule: obj.muproperties.collider = 'MU_COL_CAPSULE' obj.muproperties.radius = col.radius obj.muproperties.height = col.height obj.muproperties.direction = properties.dir_map[col.direction] obj.muproperties.center = col.center elif type(col) == MuColliderBox: obj.muproperties.collider = 'MU_COL_BOX' obj.muproperties.size = col.size obj.muproperties.center = col.center elif type(col) == MuColliderWheel: obj.muproperties.collider = 'MU_COL_WHEEL' obj.muproperties.radius = col.radius obj.muproperties.suspensionDistance = col.suspensionDistance obj.muproperties.center = col.center copy_spring(obj.muproperties.suspensionSpring, col.suspensionSpring) copy_friction(
mupi/tecsaladeaula
core/tests/test_views.py
Python
agpl-3.0
13,951
0.00423
# -*- coding: utf-8 -*- import pytest from model_mommy import mommy from conftest import create_user from core.models import Class, Course @pytest.mark.django_db def test_lesson(admin_client): lesson = mommy.make('Lesson', slug='lesson', status='published') response = admin_client.get('/course/' + lesson.course.slug + '/lesson/' + lesson.slug + '/') assert response.status_code == 200 assert lesson.name.encode('utf-8') in response.content def assign_professor_to_course(course, existing_professor=None, new_professor_username=None, role=None): """ :param existing_professor: Existing object TimtecUser :param new_professor_user_name: Non-existing professor username, if not defined, it will be created :param course: The course to assign professor to :param role: Role to assign to professor :return: The created/existing professor """ if existing_professor is not None: professor = existing_professor else: professor = create_user(new_professor_username) mommy.make('CourseProfessor', user=professor, course=course, role=role) return professor @pytest.mark.django_db def test_assistant_professor_cannot_change_class_professor(client): course = mommy.make('Course', slug='dbsql', name='Test course name') coordinator_professor = assign_professor_to_course(course, new_professor_username='coordinator_professor', role='coordinator') assistant_professor = assign_professor_to_course(course, new_professor_username='assistant_professor', role='assistant') another_assistant = assign_professor_to_course(course, new_professor_username='another_assistant', role='assistant') klass = mommy.make('Class', name='Test class name', course=course, assistant=coordinator_professor) client.login(username=assistant_professor.username, password='password') response = client.post('/class/' + str(klass.id) + '/', {'name': 'A class', 'assistant': another_assistant.id}) assert response.status_code == 403 @pytest.mark.django_db def test_coordinator_professor_can_change_class_professor(client): course = mommy.make('Course', slug='dbsql', name='Test course name') coordinator_professor = assign_professor_to_course(course, new_professor_username='coordinator_professor', role='coordinator') assistant_professor = assign_professor_to_course(course, new_professor_username='assistant_professor', role='assistant') another_assistant = assign_professor_to_course(course, new_professor_username='another_assistant', role='assistant') klass = mommy.make('Class', name='A class', course=course, assistant=assistant_professor) client.login(username=coordinator_professor.username, password='password') response = client.post('/class/' + str(klass.id) + '/', {'name': 'A class', 'assistant': another_assistant.id}) # A página redireciona para outro lugar em caso de sucesso assert response.status_code == 302 changed_class = Class.objects.get(id=klass.id) assert changed_class.assistant == another_assistant @pytest.mark.django_db def test_assistant_professor_can_change_other_data_than_professor_on_its_own_class(client): course = mommy.make('Course', slug='dbsql', name='Another course') assign_professor_to_course(course, new_pr
ofessor_username='coordinator_professor', role='coordinator') assistant_professor = assign_professor_to_course(course, new_professor_username='assistant_professor', role='assistant') klass = mommy.make('Class', name='Old class name', course=course, assistant=assistant_professor) client.login(username=assistant_professor.username, password='password') response = client.post('/class/' + str(kl
ass.id) + '/', {'name': 'New class name', 'assistant': assistant_professor.id}) # A página redireciona para outro lugar em caso de sucesso assert response.status_code == 302 changed_class = Class.objects.get(id=klass.id) assert changed_class.name == 'New class name' @pytest.mark.django_db def test_get_courses_user_has_role(client): course = mommy.make('Course', slug='dbsql', name='A course') another_course = mommy.make('Course', slug='mysql', name='Another course') course_whose_professor_coordinate = mommy.make('Course', slug='coordinatedcourse', name='Course whose professor coordinate') another_course_whose_professor_coordinate = mommy.make('Course', slug='anothercoordinatedcourse', name='Another course whose professor coordinate') professor1 = assign_professor_to_course(course, new_professor_username='professor1', role='assistant') assign_professor_to_course(another_course, existing_professor=professor1, role='assistant') assign_professor_to_course(course_whose_professor_coordinate, existing_professor=professor1, role='coordinator') assign_professor_to_course(another_course_whose_professor_coordinate, existing_professor=professor1, role='coordinator') client.login(username=professor1.username, password='password') response = client.get('/my-courses/') assert response.status_code == 200 courses_user_assist = response.context[-1]['courses_user_assist'] assert courses_user_assist courses_user_coordinate = response.context[-1]['courses_user_coordinate'] assert courses_user_coordinate @pytest.mark.django_db def test_cannot_remove_courses_default_class(admin_client): course = mommy.make('Course', slug='mysql', name='A course') klass = course.default_class response = admin_client.post('/class/' + str(klass.id) + '/delete/') assert response.status_code == 403 assert Class.objects.filter(id=klass.id).exists() assert Course.objects.filter(id=course.id).exists() @pytest.mark.django_db def test_course_average_lessons_users_progress_should_return_zero_with_no_students_on_course(): course = mommy.make('Course', slug='dbsql', name='A course') lesson1 = mommy.make('Lesson', course=course, slug='lesson1') mommy.make('Lesson', course=course, slug='lesson2') mommy.make('Unit', lesson=lesson1, title='Title 1') progress_list = course.avg_lessons_users_progress() assert progress_list[0]['slug'] == 'lesson1' assert progress_list[0]['progress'] == 0 assert progress_list[1]['slug'] == 'lesson2' assert progress_list[1]['progress'] == 0 @pytest.mark.django_db def test_user_courses_cannot_show_assistant_and_coordinator_tabs_for_students(client): student = create_user('student') client.login(username=student.username, password='password') response = client.get('/my-courses/') assert 'href="#course-as-teacher"' not in response.content assert 'href="#course-as-coordinator"' not in response.content @pytest.mark.django_db def test_user_courses_must_show_assistant_tab_for_assistant(client): course = mommy.make('Course', slug='dbsql', name='A course') professor = assign_professor_to_course(course, new_professor_username='assistant_professor', role='assistant') client.login(username=professor.username, password='password') response = client.get('/my-courses/') assert 'href="#course-as-teacher"' in response.content assert 'href="#course-as-coordinator"' not in response.content @pytest.mark.django_db def test_user_courses_must_show_coordinator_tab_for_coordinator(client): course = mommy.make('Course', slug='dbsql', name='A course') professor = assign_professor_to_course(course, new_professor_username='coordinator_professor', role='coordinator') client.login(username=professor.username, password='password') response = client.get('/my-courses/') assert 'href="#course-as-teacher"' not in response.content assert 'href="#course-as-coordin
ProfessorX/Config
.PyCharm30/system/python_stubs/-1247971765/apt_pkg/Cdrom.py
Python
gpl-2.0
1,751
0.006853
# encoding: utf-8 # module apt_pkg # from /usr/lib/python3/dist-packages/apt_pkg.cpython-34m-x86_64-linux-gnu.so # by generator 1.135 """ Classes and functions wrapping the apt-pkg library. The apt_pkg module provides several classes and functions for accessing the functionality provided by the apt-pkg library. Typical uses might include reading APT index files and configuration files and installing or removing packages. """ # no imports from .object import object
class Cdrom(object): """ Cdrom() Cdrom objects can be used to identify Debian installation media and to add them to /etc/apt/sources.list. """ def add(self, progress): # real signature unknown; restored from __doc__
""" add(progress: apt_pkg.CdromProgress) -> bool Add the given CD-ROM to the sources.list. Return True on success; raise an error on failure or return False. """ return False def ident(self, progress): # real signature unknown; restored from __doc__ """ ident(progress: apt_pkg.CdromProgress) -> str Try to identify the CD-ROM and if successful return the hexadecimal CDROM-ID (and a integer version suffix separated by -) as a string. Otherwise, return None or raise an error. The ID is created by hashing all file and directory names on the CD-ROM and appending the version. """ return "" def __init__(self): # real signature unknown; restored from __doc__ pass @staticmethod # known case of __new__ def __new__(*args, **kwargs): # real signature unknown """ Create and return a new object. See help(type) for accurate signature. """ pass
apsarath/pyNN
model/crl/deepcorrnet2.py
Python
apache-2.0
24,479
0.007394
__author__ = 'Sarath' from pyNN import * import time from pyNN.optimization.optimization import * from pyNN.util.Initializer import * import pickle class DeepCorrNet2(object): def init(self, numpy_rng, theano_rng=None, l_rate=0.01, optimization="sgd", tied=False, n_visible_left=None, n_visible_right=None, n_hidden=None, n_hidden2=None, n_hidden3=None, lamda=5, W_left=None, W_right=None, b_left=None, b_right=None, W_left_prime=None, W_right_prime=None, b_prime_left=None, b_prime_right=None, W_left2=None, W_right2=None, b_left2=None, b_right2=None, W_left_prime2=None, W_right_prime2=None, b_prime_left2=None, b_prime_right2=None, W_left3=None, W_right3=None, b3=None, W_left_prime3=None, W_right_prime3=None, b_prime_left3=None, b_prime_right3=None, input_left=None, input_right=None, hidden_activation="sigmoid", output_activation="sigmoid", loss_fn = "squarrederror", op_folder=None): self.numpy_rng = numpy_rng if not theano_rng: theano_rng = RandomStreams(numpy_rng.randint(2 ** 30)) self.theano_rng = theano_rng self.optimization = optimization self.l_rate = l_rate self.optimizer = get_optimizer(self.optimization, self.l_rate) self.Initializer = Initializer(self.numpy_rng) self.n_visible_left = n_visible_left self.n_visible_right = n_visible_right self.n_hidden = n_hidden self.n_hidden2 = n_hidden2 self.n_hidden3 = n_hidden3 self.lamda = lamda self.hidden_activation = hidden_activation self.output_activation = output_activation self.loss_fn = loss_fn self.tied = tied self.op_folder = op_folder self.W_left = self.Initializer.fan_based_sigmoid("W_left", W_left, n_visible_left, n_hidden) self.optimizer.register_variable("W_left",n_visible_left,n_hidden) self.W_right = self.Initializer.fan_based_sigmoid("W_right", W_right, n_visible_right, n_hidden) self.optimizer.register_variable("W_right",n_visible_right,n_hidden) self.W_left2 = self.Initializer.fan_based_sigmoid("W_left2", W_left2, n_hidden, n_hidden2) self.optimizer.register_variable("W_left2",n_hidden, n_hidden2) self.W_right2 = self.Initializer.fan_based_sigmoid("W_right2", W_right2, n_hidden, n_hidden2) self.optimizer.register_variable("W_right2", n_hidden, n_hidden2) self.W_left3 = self.Initializer.fan_based_sigmoid("W_left3", W_left3, n_hidden2, n_hidden3) self.optimizer.register_variable("W_left3",n_hidden2, n_hidden3) self.W_right3 = self.Initializer.fan_based_sigmoid("W_right3", W_right3, n_hidden2, n_hidden3) self.optimizer.register_variable("W_right3", n_hidden2, n_hidden3) if not tied: self.W_left_prime = self.Initializer.fan_based_sigmoid("W_left_prime", W_left_prime, n_hidden, n_visible_left) self.optimizer.register_variable("W_left_prime",n_hidden, n_visible_left) self.W_right_prime = self.Initializer.fan_based_sigmoid("W_right_prime", W_right_prime, n_hidden, n_visible_right) self.optimizer.register_variable("W_right_prime",n_hidden, n_visible_right) self.W_left_prime2 = self.Initializer.fan_based_sigmoid("W_left_prime2", W_left_prime2, n_hidden2, n_hidden) self.optimizer.register_variable("W_left_prime2",n_hidden2, n_hidden) self.W_right_prime2 = self.Initializer.fan_based_sigmoid("W_right_prime2", W_right_prime2, n_hidden2, n_hidden) self.optimizer.register_variable("W_right_prime2",n_hidden2, n_hidden) self.W_left_prime3 = self.Initializer.fan_based_sigmoid("W_left_prime3", W_left_prime3, n_hidden3, n_hidden2) self.optimizer.register_variable("W_left_prime3",n_hidden3, n_hidden2) self.W_right_prime3 = self.Initializer.fan_based_sigmoid("W_right_prime3", W_right_prime3, n_hidden3, n_hidden2) self.optimizer.register_variable("W_right_prime3",n_hidden3, n_hidden2) else: self.W_left_prime = self.W_left.T self.W_right_prime = self.W_right.T self.W_left_prime2 = self.W_left2.T self.W_right_prime2 = self.W_right2.T self.W_left_prime3 = self.W_left3.T self.W_right_prime3 = self.W_right3.T self.b_left = self.Initializer.zero_vector("b_left", b_left, n_hidden) self.optimizer.register_variable("b_left",1,n_hidden) self.b_right = self.Initializer.zero_vector("b_right", b_right, n_hidden) self.optimizer.register_variable("b_right",1,n_hidden) self.b_prime_left = self.Initializer.zero_vector("b_prime_left", b_prime_left, n_visible_left) self.optimizer.register_variable("b_prime_left",1,n_visible_left) self.b_prime_right = self.Initializer.zero_vector("b_prime_right", b_prime_right, n_visible_right) self.optimizer.register_variable("b_prime_right",1,n_visible_right) self.b_left2 = self.Initializer.zero_vector("b_left2", b_left2, n_hidden2) self.optimizer.register_variable("b_left2",1,n_hidden2) self.b_right2 = self.Initializer.zero_vector("b_right2", b_right2, n_hidden2) self.optimizer.register_variable("b_right2",1,n_hidden2) self.b_prime_left2 = self.Initializer.zero_vector("b_prime_left2", b_prime_left2, n_hidden) self.optimizer.register_variable("b_prime_left2",1,n_hidden) self.b_prime_right2 = self.Initializer.zero_vector("b_prime_right2", b_prime_right2, n_hidden) self.optimizer.register_variable("b_prime_right2",1,n_hidden) self.b3 = self.Initializer.zero_vector("b3", b3, n_hidden3) self.optimizer.register_variable("b3",1,n_hidden3) self.b_prime_left3 = self.Initializer.zero_vector("b_prime_left3", b_prime_left3, n_hidden2) self.optimizer.register_variable("b_prime_left3",1,n_hidden2) self.b_prime_right3 = self.Initializer.zero_vector("b_prime_right3",
b_prime_right3, n_hidden2) self.optimizer.register_variable("b_prime_right3",1,n_hidden2) if input_left is None: sel
f.x_left = T.matrix(name='x_left') else: self.x_left = input_left if input_right is None: self.x_right = T.matrix(name='x_right') else: self.x_right = input_right if tied: self.params = [self.W_left, self.W_right, self.b_left, self.b_right, self.b_prime_left, self.b_prime_right, self.W_left2, self.W_right2, self.b_left2, self.b_right2, self.b_prime_left2, self.b_prime_right2, self.W_left3, self.W_right3, self.b3, self.b_prime_left3, self.b_prime_right3] self.param_names = ["W_left", "W_right", "b_left", "b_right", "b_prime_left", "b_prime_right", "W_left2", "W_right2", "b_left2", "b_right2" , "b_prime_left2", "b_prime_right2", "W_left3", "W_right3", "b3" , "b_prime_left3", "b_prime_right3"] else: self.params = [self.W_left, self.W_right, self.b_left, self.b_right, self.b_prime_left, self.b_prime_right, self.W_left_prime, self.W_right_prime, self.W_left2, self.W_right2, self.b_left2, self.b_right2, self.b_prime_left2, self.b_prime_right2, self.W_left_prime2, self.W_right_prime2, self.W_left3, self.W_right3, self.b3, self.b_prime_left3, self.b_prime_right3, self.W_left_prime3, self.W_right_prime3] self.param_names = ["W_left", "W_right", "b_left", "b_right", "b_prime_left", "b_prime_right", "W_left_prime", "W_right_prime", "W_left2", "W_right2", "b_left2", "b_right2", "b_prime_left2", "b_prime_right2", "W_left_prime2", "W_right_prime2", "W_left3", "W_right3", "b3", "b_prime_left3", "b_prime_right3", "W_left_prime3", "W_right_prime3"] self.proj_from_left = theano.function([self.x_left],self.project_from_left()) self.proj_from_right = theano.function([self.x_right],self.project_from_right()) self.recon_from_left = theano.function([self.x_left],self.reconstruct_from_left()) self.recon_from_right = theano.function([self.x_right],self.reconstruct_from_right()) self.save_params() def train_common(self,mtype="1111"): y1_pre = T
vasily-v-ryabov/pywinauto
pywinauto/tests/miscvalues.py
Python
bsd-3-clause
4,700
0.001064
# GUI Application automation and testing library # Copyright (C) 2006-2018 Mark Mc Mahon and Contributors # https://github.com/pywinauto/pywinauto/graphs/contributors # http://pywinauto.readthedocs.io/en/latest/credits.html # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with
the distribution. # # * Neither the name of pywinauto nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Miscellaneous Control properties Test **What is checked** This checks various values related to a control in windows. The values tested are class_name The class type of the control style The Style of the control (GetWindowLong) exstyle The Extended Style of the control (GetWindowLong) help_id The Help ID of the control (GetWindowLong) control_id The Control ID of the control (GetWindowLong) user_data The User Data of the control (GetWindowLong) Visibility Whether the control is visible or not **How is it checked** After retrieving the information for the control we compare it to the same information from the reference control. **When is a bug reported** If the information does not match then a bug is reported. **Bug Extra Information** The bug contains the following extra information Name Description ValueType What value is incorrect (see above), String Ref The reference value converted to a string, String Loc The localised value converted to a string, String **Is Reference dialog needed** This test will not run if the reference controls are not available. **False positive bug reports** Some values can change easily without any bug being caused, for example User Data is actually meant for programmers to store information for the control and this can change every time the software is run. **Test Identifier** The identifier for this test/bug is "MiscValues" """ testname = "MiscValues" import six def MiscValuesTest(windows): """Return the bugs from checking miscelaneous values of a control""" bugs = [] for win in windows: if not win.ref: continue diffs = {} if win.class_name() != win.ref.class_name(): diffs[u"class_name"] = (win.class_name(), win.ref.class_name()) if win.style() != win.ref.style(): diffs[u"style"] = (win.style(), win.ref.style()) if win.exstyle() != win.ref.exstyle(): diffs[u"exstyle"] = (win.exstyle(), win.ref.exstyle()) if win.context_help_id() != win.ref.context_help_id(): diffs[u"help_id"] = (win.context_help_id(), win.ref.context_help_id()) if win.control_id() != win.ref.control_id(): diffs[u"control_id"] = (win.control_id(), win.ref.control_id()) if win.is_visible() != win.ref.is_visible(): diffs[u"Visibility"] = (win.is_visible(), win.ref.is_visible()) if win.user_data() != win.ref.user_data(): diffs[u"user_data"] = (win.user_data(), win.ref.user_data()) for diff, vals in diffs.items(): bugs.append(( [win, ], { "ValueType": diff, "Ref": six.text_type(vals[1]), "Loc": six.text_type(vals[0]), }, testname, 0,) ) return bugs
drallensmith/neat-python
neat/reporting.py
Python
bsd-3-clause
5,924
0.002363
""" Makes possible reporter classes, which are triggered on particular events and may provide information to the user, may do something else such as checkpointing, or may do both. """ from __future__ import division, print_function import time from neat.math_util import mean, stdev from neat.six_util import itervalues, iterkeys # TODO: Add a curses-based reporter. class ReporterSet(object): """ Keeps track of the set of reporters and gives methods to dispatch them at appropriate points. """ def __init__(self): self.reporters = [] def add(self, reporter): self.reporters.append(reporter) def remove(self, reporter): self.reporters.remove(reporter) def start_generation(self, gen): for r in self.reporters: r.start_generation(gen) def end_generation(self, config, population, species_set): for r in self.reporters: r.end_generation(config, population, species_set) def post_evaluate(self, config, population, species, best_genome): for r in self.reporters: r.post_evaluate(config, population, species, best_genome) def post_reproduction(self, config, population, species): for r in self.reporters: r.post_reproduction(config, population, species) def complete_extinction(self): for r in self.reporters: r.complete_extinction() def found_solution(self, config, generation, best): for r in self.reporters: r.found_solution(config, generation, best) def species_stagnant(self, sid, species): for r in self.reporters: r.species_stagnant(sid, species) def info(self, msg): for r in self.reporters: r.info(msg) class BaseReporter(object): """Definition of the reporter interface expected by ReporterSet.""" def start_generation(self, generation): pass def end_generation(self, config, population, species_set): pass def post_evaluate(self, config, population, species, best_genome): pass def post_reproduction(self, config, population, species): pass def complete_extinction(self): pass def found_solution(self, config, generation, best): pass def species_stagnant(self, sid, species): pass def info(self, msg): pass class StdOutReporter(BaseReporter): """Uses `print` to output information about the run; an example reporter class.""" def __init__(self, show_species_detail): self.show_species_detail = show_species_detail self.generation = None self.generation_start_time = None self.generation_times = [] self.num_extinctions = 0 def start_generation(self, generation): self.generation = generation print('\n ****** Running generation {0} ****** \n'.format(generation)) self.generation_start_time = time.time() def end_generation(self, config, population, species_
set): ng = len(population) ns = len(species_set.species) if self.show_species_detail: print('Population of {0:d} members in {1:d} species:'.format(ng, ns)) sids = list(iterkeys(species_set.species)) sids.sort() print(" ID age size fitness adj fit stag") print(" ==== === ==== ======= ======= ====") for sid in sids: s = species_set.species[sid]
a = self.generation - s.created n = len(s.members) f = "--" if s.fitness is None else "{:.1f}".format(s.fitness) af = "--" if s.adjusted_fitness is None else "{:.3f}".format(s.adjusted_fitness) st = self.generation - s.last_improved print( " {: >4} {: >3} {: >4} {: >7} {: >7} {: >4}".format(sid, a, n, f, af, st)) else: print('Population of {0:d} members in {1:d} species'.format(ng, ns)) elapsed = time.time() - self.generation_start_time self.generation_times.append(elapsed) self.generation_times = self.generation_times[-10:] average = sum(self.generation_times) / len(self.generation_times) print('Total extinctions: {0:d}'.format(self.num_extinctions)) if len(self.generation_times) > 1: print("Generation time: {0:.3f} sec ({1:.3f} average)".format(elapsed, average)) else: print("Generation time: {0:.3f} sec".format(elapsed)) def post_evaluate(self, config, population, species, best_genome): # pylint: disable=no-self-use fitnesses = [c.fitness for c in itervalues(population)] fit_mean = mean(fitnesses) fit_std = stdev(fitnesses) best_species_id = species.get_species_id(best_genome.key) print('Population\'s average fitness: {0:3.5f} stdev: {1:3.5f}'.format(fit_mean, fit_std)) print( 'Best fitness: {0:3.5f} - size: {1!r} - species {2} - id {3}'.format(best_genome.fitness, best_genome.size(), best_species_id, best_genome.key)) def complete_extinction(self): self.num_extinctions += 1 print('All species extinct.') def found_solution(self, config, generation, best): print('\nBest individual in generation {0} meets fitness threshold - complexity: {1!r}'.format( self.generation, best.size())) def species_stagnant(self, sid, species): if self.show_species_detail: print("\nSpecies {0} with {1} members is stagnated: removing it".format(sid, len(species.members))) def info(self, msg): print(msg)
tuwiendsg/MELA
MELA-Extensions/MELA-ComplexCostEvaluationService/tests/mela-clients/emulateCost.py
Python
apache-2.0
719
0.044506
import urllib, urllib2, sys, httplib url = "/MELA/REST_WS" #HOST_IP="128.130.172.191:8180" newName="EventProcessingTopology_STRATEGY_MELA_COS
T_RECOMMENDATION_EFFICIENCY_Larger_VMs" HOST_IP="localhost:8480" if __name__=='__main__': connection = httplib.HTTPConnection(HOST_IP) description_file = open("./20hstruct_LAST_ADDED_LARGER_VMS.xml", "r") body_content = description_file.read() headers={
'Content-Type':'application/xml; charset=utf-8', 'Accept':'application/json, multipart/related' } connection.request('PUT', url+'/service/emulate/'+newName, body=body_content,headers=headers,) result = connection.getresponse() print result.read()
rabipanda/tensorflow
tensorflow/contrib/tpu/python/ops/tpu_ops.py
Python
apache-2.0
3,909
0.005628
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================= """Operations for TPUs.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import platform from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops if platform.system() != "Windows": # pylint: disable=wildcard-import,unused-import,g-import-not-at-top from tensorflow.contrib.tpu.ops import gen_tpu_ops from tensorflow.contrib.tpu.ops.gen_tpu_ops import * from tensorflow.contrib.util import loader from tensorflow.python.platform import resource_loader # pylint: enable=wildcard-import,unused-import,g-import-not-at-top _tpu_ops = loader.load_op_library( resource_loader.get_path_to_datafile("_tpu_ops.so")) @ops.RegisterGradient("CrossReplicaSum") def _cross_replica_sum_grad(op, grad): del op # Unused # The gradient of a cross replica sum is also a cross-replica sum. return gen_tpu_ops.cross_replica_sum(grad) # This extra type checking exists to give a more helpful error message in # the common case that uint8 and int64 values are infed. Remove when both # types are supported. _SUPPORTED_INFEED_DTYPES = set([ dtypes.bool, dtypes.int32, dtypes.bfloat16, dtypes.float32 ]) def infeed_dequeue(dtype, shape, name=None): """A placeholder op for a value that will be fed into the computation. Args: dtype: A `tf.DType`. The type of elements in the tensor. shape: A `tf.TensorShape` or list of `ints`. The shape of the tensor. name: A name for the operation (optional). Returns: A `Tensor` of type `dtype`. A tensor that will be provided using the infeed mechanism. Raises: TypeError: If 'dtype` is not a supported infeed type. """ if dtype not in _SUPPORTED_INFEED_DTYPES: raise TypeError( "{} is not a supported TPU infeed type. Supported types are: " "{}".format(dtype, list(_SUPPORTED_INFEED_DTYPES))) return gen_tpu_ops.infeed_dequeue(dtype, shape, name=name) # pylint: disable=redefined-outer-name def infeed_dequeue_tuple(dtypes, shapes, name=None): """A placeholder op for values fed into the TPU simultaneously as a tuple. Args: dtypes: A list of `tf.DType`s that has length `>= 1`. The element types of each element in `outputs`. shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`). The shapes of each tensor in `outputs`. name: A name for the operation (optional). Returns: A list of `Tensor` objects of type `dtypes`. A list of tensors that will be provided using the infeed mechanism. Raises: TypeError: If a type in 'dtypes` is not a supported infeed type. """ for dtype in dtypes: if dtype not in _SUPPORTED_INF
EED_DTYPES: raise TypeError( "{} is not a supported TPU infeed type. Supported types are: " "{}".format(dtype, list(_SUPPORTED_INFEED_DTYPES))) return gen_tpu_ops.infeed_dequeue_tuple(dtypes, shapes, name=name) # pylint: enable=redefined-outer-name else: # We have already built the appropriate libraries into the binary via CMake # if we have built contrib, so we don'
t need this pass
pprett/statsmodels
examples/tsa/ex_dates.py
Python
bsd-3-clause
1,296
0.001543
""" Using dates with timeseries models """ import statsmodels.api as sm import numpy as np import pandas # Getting started # --------------- data = sm.datasets.sunspots.load() # Right now an annual date series must be datetimes at the end of the year. from datetime import datetime dates = sm.tsa.datetools.dates_from_range('1700', length=len(data.endog)) # Using Pandas # ------------ # Make a pandas TimeSeries or DataFrame endog = pandas.TimeSeries(data.endog, index=dates) # and instantiate the model ar_model = sm.tsa.AR(endog, freq='A') pandas_ar_res = ar_model.fit(maxlag=9, method='mle', disp=-1) # Let's do some o
ut-of-samp
le prediction pred = pandas_ar_res.predict(start='2005', end='2015') print pred # Using explicit dates # -------------------- ar_model = sm.tsa.AR(data.endog, dates=dates, freq='A') ar_res = ar_model.fit(maxlag=9, method='mle', disp=-1) pred = ar_res.predict(start='2005', end='2015') print pred # This just returns a regular array, but since the model has date information # attached, you can get the prediction dates in a roundabout way. print ar_res._data.predict_dates # This attribute only exists if predict has been called. It holds the dates # associated with the last call to predict. #..TODO: should this be attached to the results instance?
shawnadelic/shuup
shuup/admin/modules/manufacturers/views/list.py
Python
agpl-3.0
872
0
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from shuup.admin.utils.picotable import Column, TextFilter from shuup.admin.utils.vie
ws import PicotableListView from shuup.core.models import Manufacturer class Manufa
cturerListView(PicotableListView): model = Manufacturer default_columns = [ Column( "name", _(u"Name"), sort_field="name", display="name", filter_config=TextFilter( filter_field="name", placeholder=_("Filter by name...") ) ), ]
alphagov/notifications-api
migrations/versions/0133_set_services_sms_prefix.py
Python
mit
1,045
0
import os from app import config """ Revision ID: 0133_set_services_sms_prefix Revises: 0132_add_sms_prefix_setting Create Date: 2017-11-03 15:55:35.657488 """ from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql revision = '0133_set_services_sms_prefix' down_revision = '0132_add_sms_prefix_setting' config = config.configs[os.environ['NOTIFY_ENVIRONMENT']] default_sms_sender = config.FROM_NUMBER def upgrade(): op.execute(""" update services set prefix_sms = True where id in ( select service_id from service_sms_senders where is_default = True and sms_sender = '{}' ) """.format(default_sms_sender)) op.execute(""" update se
rvices set prefix_sms = False where id i
n ( select service_id from service_sms_senders where is_default = True and sms_sender != '{}' ) """.format(default_sms_sender)) def downgrade(): op.execute(""" UPDATE services set prefix_sms = null """)
BontaVlad/ExpirationDate
expirationDate/persons/constants.py
Python
mit
599
0
from django.utils.translation import ugettext_lazy as _ class PersonGender(): NOT_SPECIFIED = 0 MALE = 1 FEMALE = 2 GENDER_OPTIONS = (
(NOT_SPECIFIED, _('Not specified')), (MALE, _('Male')), (FEMALE, _('Female')), ) class PersonReligion(): NOT_SPECIFIED = 0 CHRISTIANITY = 1 ISLAM = 2 HINDUISM = 3 BUDDHISM = 4 RELIGION_OPTION
S = ( (NOT_SPECIFIED, _('Not specified')), (CHRISTIANITY, _('Christianity')), (ISLAM, _('Islam')), (HINDUISM, _('Hinduism')), (BUDDHISM, _('Buddhism')), )
ContextLab/hypertools
examples/chemtrails.py
Python
mit
575
0
# -*- coding: utf-8 -*- """ ============
================= Chemtrails ============================= The past trajectory of an animated plot can be visualized with the chemtrails argument. This displays a low
opacity version of the trace behind the current points being plotted. This can be used in conjunction with the precog argument to plot a low-opacity trace of the entire timeseries. """ # Code source: Andrew Heusser # License: MIT # import import hypertools as hyp # load example data geo = hyp.load('weights_avg') # plot geo.plot(animate=True, chemtrails=True)
jiangzhuo/kbengine
kbe/src/lib/python/Lib/test/test_codecmaps_tw.py
Python
lgpl-3.0
831
0.00361
# # test_codecmaps_tw.py # Codec mapping tests
for ROC encodings # from test import support from test import multibytecodec_support import unittest class TestBIG5Map(multibytecodec_support.TestBase_Mapping,
unittest.TestCase): encoding = 'big5' mapfileurl = 'http://www.unicode.org/Public/MAPPINGS/OBSOLETE/' \ 'EASTASIA/OTHER/BIG5.TXT' class TestCP950Map(multibytecodec_support.TestBase_Mapping, unittest.TestCase): encoding = 'cp950' mapfileurl = 'http://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/' \ 'WINDOWS/CP950.TXT' pass_enctest = [ (b'\xa2\xcc', '\u5341'), (b'\xa2\xce', '\u5345'), ] codectests = ( (b"\xFFxy", "replace", "\ufffdxy"), ) if __name__ == "__main__": unittest.main()
singer-io/singer-python
tests/test_statediff.py
Python
apache-2.0
2,807
0.002494
import unittest import singer.statediff as statediff from singer.statediff import Add, Remove, Change class TestPaths(unittest.TestCase): def test_simple_dict(self): self.assertEqual( [(('a',), 1), (('b',), 2)], statediff.paths({'a': 1, 'b': 2})) def test_nested_dict(self): self.assertEqual( [(('a', 'b'), 1), (('a', 'c'), 2), (('d', 'e'), 3)], statediff.paths( { 'a': { 'b': 1, 'c': 2 }, 'd': { 'e': 3 } } ) ) def test_simple_array(self): self.assertEqual( [((0,), 'blue'), ((1,), 'green')], statediff.paths( ['blue', 'green'])) def test_nested_array(self): self.assertEqual( [((0, 0), 'blue'), ((0, 1), 'red'), ((1, 0), 'green')], statediff.paths([['blue', 'red'], ['green']])) def test_arrays_in_dicts(self): self.assertEqual( [(('a', 0), 'blue'), (('a', 1), 'red'), (('b', 0), 'green')], statediff.paths( { 'a': ['blue', 'red'], 'b': ['green'] } ) ) def test_none(self): self.assertEqual([], statediff.paths(None)) class TestDiff(unittest.TestCase): def test_add(self): self.assertEqual( [Add(('a',), 1), Add(('b',), 2)], statediff.diff({}, {'a': 1, 'b': 2})) def test_remove(self): self.assertEqual( [Remove(('a',), 1), Remove(('b',), 2)], statediff.diff({'a': 1, 'b': 2}, {})) def test_change(self): self.assertEqual( [Change(('a',), 1, 100), Change(('b',), 2, 200)], statediff.diff({'a': 1, 'b': 2}, {'a': 100, 'b': 200})) def test_null_input_for_old(self):
self.assertEqual( [Add(('a',), 1)], statediff.diff(None, {'a': 1})) def test_null_input_for_new(self): self.assertEqual( [Remove(('a',), 1)], statediff.diff({'a': 1}, None)) def test_null_input_for_both(self): self.assertEqual([], statediff.diff(None, None)) def test_null_at_leaf(self): self.assertEqual( [Change(('a',), 1, None),
Change(('b',), None, 2)], statediff.diff({'a': 1, 'b': None}, {'a': None, 'b': 2}))
wjo1212/aliyun-log-python-sdk
aliyun/log/logclient_core.py
Python
mit
6,959
0.003592
import six, json from .logclient_operator import list_more from .logexception import LogException from .pluralize import pluralize from .common_response import * DEFAULT_MAX_LIST_PAGING_SIZE = 500 def create_entity(entity_name, root_resource=None): def fn(self, project, detail): """ Create {entity_title}. Unsuccessful opertaion will cause an LogException. :type project: string :param project: project name :type detail: dict/string :param detail: json string :return: CreateEntityResponse :raise: LogException """ params = {} resource_path = (root_resource and root_resource.lstrip('/')) or "/" + pluralize(entity_name) headers = {"x-log-bodyrawsize": '0', "Content-Type": "application/json"}
if hasattr(detail, 'to_json'): detail = detail.to_json() body_str = six.b(json.dumps(detail)) elif isinstance(detail, six.binary_type): body_str = detail elif isinstance
(detail, six.text_type): body_str = detail.encode('utf8') else: body_str = six.b(json.dumps(detail)) (resp, header) = self._send("POST", project, body_str, resource_path, params, headers) return GetEntityResponse(header, resp) fn.__name__ = 'create_' + entity_name fn.__doc__ = fn.__doc__.format(entity_title=entity_name.title()) return fn def get_entity(entity_name, root_resource=None): def fn(self, project, entity): """Get {entity_title}. Unsuccessful opertaion will cause an LogException. :type project: string :param project: project name :type entity: string :param entity: {entity_name} name :return: GetEntityResponse :raise: LogException """ headers = dict() params = dict() resource_path = ((root_resource and root_resource.rstrip('/')) or ('/' + pluralize(entity_name) + '/')) + entity (resp, header) = self._send("GET", project, None, resource_path, params, headers) return GetEntityResponse(header, resp) fn.__name__ = 'get_' + entity_name fn.__doc__ = fn.__doc__.format(entity_name=entity_name, entity_title=entity_name.title()) return fn def delete_entity(entity_name, root_resource=None): def fn(self, project, entity): """Delete {entity_title}. Unsuccessful opertaion will cause an LogException. :type project: string :param project: project name :type entity: string :param entity: {entity_name} name :return: DeleteEntityResponse :raise: LogException """ headers = {} params = {} resource_path = ((root_resource and root_resource.rstrip('/')) or ('/' + pluralize(entity_name) + '/')) + entity (resp, header) = self._send("DELETE", project, None, resource_path, params, headers) return DeleteEntityResponse(header, resp) fn.__name__ = 'delete_' + entity_name fn.__doc__ = fn.__doc__.format(entity_name=entity_name, entity_title=entity_name.title()) return fn def list_entity(entity_name, root_resource=None, max_batch_size=DEFAULT_MAX_LIST_PAGING_SIZE): def fn(self, project, offset=0, size=100): """ list the {entity_title}, get first 100 items by default Unsuccessful opertaion will cause an LogException. :type project: string :param project: the Project name :type offset: int :param offset: the offset of all the matched names :type size: int :param size: the max return names count, -1 means all :return: ListLogStoreResponse :raise: LogException """ # need to use extended method to get more if int(size) == -1 or int(size) > max_batch_size: return list_more(fn, int(offset), int(size), max_batch_size, project) headers = {} params = {} resource_path = (root_resource and root_resource.lstrip('/')) or "/" + pluralize(entity_name) params['offset'] = str(offset) params['size'] = str(size) (resp, header) = self._send("GET", project, None, resource_path, params, headers) return ListEntityResponse(header, resp, resource_name=resource_path.strip('/')) fn.__name__ = 'list_' + entity_name fn.__doc__ = fn.__doc__.format(entity_title=entity_name.title()) return fn def update_entity(entity_name, name_field=None, root_resource=None): def fn(self, project, detail): """ Update {entity_title}. Unsuccessful opertaion will cause an LogException. :type project: string :param project: project name :type detail: dict/string :param detail: json string :return: UpdateEntityResponse :raise: LogException """ params = {} headers = {} # parse entity value entity = None if hasattr(detail, 'to_json'): detail = detail.to_json() body_str = six.b(json.dumps(detail)) entity = detail.get(name_field or 'name', '') elif isinstance(detail, six.binary_type): body_str = detail elif isinstance(detail, six.text_type): body_str = detail.encode('utf8') else: body_str = six.b(json.dumps(detail)) entity = detail.get(name_field or 'name', '') if entity is None: entity = json.loads(body_str).get(name_field, '') assert entity, LogException('InvalidParameter', 'unknown entity name "{0}" in "{1}"'.format(name_field, detail)) resource_path = ((root_resource and root_resource.rstrip('/')) or ('/' + pluralize(entity_name) + '/')) + entity headers['Content-Type'] = 'application/json' headers['x-log-bodyrawsize'] = str(len(body_str)) (resp, headers) = self._send("PUT", project, body_str, resource_path, params, headers) return UpdateEntityResponse(headers, resp) fn.__name__ = 'update_' + entity_name fn.__doc__ = fn.__doc__.format(entity_title=entity_name.title()) return fn def make_lcrud_methods(obj, entity_name, name_field=None, root_resource=None): setattr(obj, 'list_' + entity_name, list_entity(entity_name, root_resource=root_resource)) setattr(obj, 'get_' + entity_name, get_entity(entity_name, root_resource=root_resource)) setattr(obj, 'delete_' + entity_name, delete_entity(entity_name, root_resource=root_resource)) setattr(obj, 'update_' + entity_name, update_entity(entity_name, root_resource=root_resource, name_field=name_field)) setattr(obj, 'create_' + entity_name, create_entity(entity_name, root_resource=root_resource))
sejust/pykit
modutil/__init__.py
Python
mit
168
0
from .modutil impo
rt ( submodules, submodule_tree, submodule_leaf_tree, ) __all__ = [ "su
bmodules", "submodule_tree", "submodule_leaf_tree", ]
eResearchSA/reporting-unified
unified/apis/__init__.py
Python
apache-2.0
10,242
0.000683
import uuid import requests import logging import logging.handlers import flask_restful from functools import wraps from flask import request from flask_cors import CORS from flask_restful import Resource, reqparse from sqlalchemy.orm.relationships import RelationshipProperty from .. import db, app from ..models import Input restapi = flask_restful.Api(app) cors = CORS(app) QUERY_PARSER = reqparse.RequestParser() QUERY_PARSER.add_argument("filter", action="append", help="Filter") QUERY_PARSER.add_argument("order", help="Ordering", default="id") QUERY_PARSER.add_argument("page", type=int, default=1, help="Page #") QUERY_PARSER.add_argument("count", type=int, default=1000, help="Items per page") # All defalut time range arguments RANGE_PARSER = reqparse.RequestParser() RANGE_PARSER.add_argument("start", type=int, default=0) RANGE_PARSER.add_argument("end", type=int, default=0) INPUT_PARSER = reqparse.RequestParser() INPUT_PARSER.add_argument("name", location="args", required=True) PACKAGE = '' if "ERSA_REPORTING_PACKAGE" in app.config: PACKAGE = app.config["ERSA_REPORTING_PACKAGE"] AUTH_TOKEN = None if "ERSA_AUTH_TOKEN" in app.config: AUTH_TOKEN = app.config["ERSA_AUTH_TOKEN"] if AUTH_TOKEN is not None: AUTH_TOKEN = AUTH_TOKEN.lower() UUID_NAMESPACE = uuid.UUID("aeb7cf1c-a842-4592-82e9-55d2dad00150") if "LOG_DIR" in app.config: LOG_DIR = app.config["LOG_DIR"] else: LOG_DIR = "." if "LOG_LEVEL" in app.config: LOG_LEVEL = getattr(logging, app.config["LOG_LEVEL"].upper(), logging.DEBUG) else: LOG_LEVEL = logging.DEBUG if "LOG_SIZE" in app.config: LOG_SIZE = app.config["LOG_SIZE"] else: LOG_SIZE = 30000000 LOG_FORMAT = '%(asctime)s %(levelname)s %(module)s %(filename)s %(lineno)d: %(message)s' SAN_MS_DATE = '%Y-%m-%d %H:%M:%S' LOG_FORMATTER = logging.Formatter(LOG_FORMAT, SAN_MS_DATE) top_logger = logging.getLogger(__name__) # Logger is created by the calling module with the calling module's name as log name # All other modules use this log def create_logger(module_name): log_name = "%s/%s.log" % (LOG_DIR, module_name) file_handler = logging.handlers.RotatingFileHandler(log_name, maxBytes=LOG_SIZE) file_handler.setFormatter(LOG_FORMATTER) logger = logging.getLogger(__name__) logger.addHandler(file_handler) logger.setLevel(LOG_LEVEL) return logger def identifier(content): """A generator for consistent IDs.""" return str(uuid.uuid5(UUID_NAMESPACE, str(content))) def is_uuid(id): """Verify if a string is an UUID""" try: v = uuid.UUID(id) except ValueError: v = None return isinstance(v, uuid.UUID) def github(deps): """ Format GitHub dependencies. For example: deps = [ ("eresearchsa/flask-util", "ersa-flask-util", "0.4"), ("foo/bar", "my-package-name", "3.141") ] """ return ["https://github.com/%s/archive/v%s.tar.gz#egg=%s-%s" % (dep[0], dep[2], dep[1], dep[2]) for dep in deps] def get_or_create(model, **kwargs): """Fetch object if returned by filter query, else create new.""" item = get(model, **kwargs) if not item: item = model(**kwargs) db.session.add(item) return item def get(model, **kwargs): """Fetch object by query parameters.""" return db.session.query(model).filter_by(**kwargs).first() def commit(): """Commit session.""" db.session.commit() def rollback(): """Rollback session.""" db.session.rollback() def add(item): """Add object.""" db.session.add(item) def delete(item): """Delete object.""" db.session.delete(item) def fetch(model, key): """Fetch by ID.""" return db.session.query(model).get(key) def flush(): """Flush session.""" db.session.flush() def constant_time_compare(val1, val2): """ Borrowed from Django! Returns True if the two strings are equal, False otherwise. The time taken is independent of the number of characters that match. For the sake of simplicity, this function executes in constant time only when the two strings have the same length. It short-circuits when they have different lengths. Since Django only uses it to compare hashes of known expected length, this is acceptable. """ if len(val1) != len(val2): return False result = 0 for x, y in zip(val1, val2): result |= ord(x) ^ ord(y) return result == 0 def require_auth(func): """ Authenticate via the external reporting-auth service. For dev/test purposes: if ERSA_AUTH_TOKEN environment variable exists, check against that instead. """ @wraps(func) def decorated(*args, **kwargs): """Check the header.""" success = False try: token = str(uuid.UUID(request.headers.get("x-ersa-auth-token", ""))).lower() except: # noqa: E722 return "", 403 if AUTH_TOKEN is not None: if constant_time_compare(token, AUTH_TOKEN): success = True else: auth_response = requests.get( "https://reporting.ersa.edu.au/auth?secret=%s" % token) if auth_response.status_code == 200: auth_data = auth_response.json() for endpoint in auth_data["endpoints"]: if endpoint["name"] == PACKAGE: success = True break if success: return func(*args, **kwargs) else: return "", 403 return decorated def dynamic_query(model, query, expression): """ Construct query based on: attribute.operation.expression For example: foo.eq.42 """ key, op, value = expre
ssion.split(".", 2) column
= getattr(model, key, None) if isinstance(column.property, RelationshipProperty): column = getattr(model, key + "_id", None) if op == "in": query_filter = column.in_(value.split(",")) else: attr = None for candidate in ["%s", "%s_", "__%s__"]: if hasattr(column, candidate % op): attr = candidate % op break if value == "null": value = None query_filter = getattr(column, attr)(value) return query.filter(query_filter) def name_or_id(model, name): """Return an _id attribute if one exists.""" name_id = name + "_id" if hasattr(model, name_id): return getattr(model, name_id) elif hasattr(model, name): return getattr(model, name) else: return None def do_query(model): """Perform a query with request-specified filtering and ordering.""" args = QUERY_PARSER.parse_args() query = model.query # filter if args["filter"]: for query_filter in args["filter"]: query = dynamic_query(model, query, query_filter) # order order = [] for order_spec in args["order"].split(","): if not order_spec.startswith("-"): order.append(name_or_id(model, order_spec)) else: order.append(name_or_id(model, order_spec[1:]).desc()) query = query.order_by(*order) # execute return query.paginate(args["page"], per_page=args["count"], error_out=False).items def instance_method(model, method, id, default=[], **kwargs): """Get an instance by an id and call the given method of the instance""" if not (is_uuid(id) and hasattr(model, method)): return default rslt = default instance = model.query.get(id) if instance: imethod = getattr(instance, method) rslt = imethod(**kwargs) return rslt class QueryResource(Resource): """Generic Query""" def get_raw(self): """Query""" try: top_logger.debug("Query: %s" % self.query_class.query) return do_query(self.query_class) except Exception as e: top_logger.error("Query %s failed. Detail: %s" % (self.query_class.query, str(e)))
thopiekar/Uranium
UM/Scene/SceneNodeDecorator.py
Python
lgpl-3.0
934
0.009636
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the LGPLv3 or higher. ## The point of a SceneNodeDecorator is that it can be added to a SceneNode, where it then provides decorations # Decorations are functions of a SceneNodeDecorator that can be called (except for function
s already defined # in SceneNodeDecorator). # \sa SceneNode class SceneNodeDecor
ator: def __init__(self, node = None): super().__init__() self._node = node def setNode(self, node): self._node = node def getNode(self): return self._node ## Clear all data associated with this decorator. This will be called before the decorator is removed def clear(self): pass def __deepcopy__(self, memo): raise NotImplementedError("Subclass {0} of SceneNodeDecorator should implement their own __deepcopy__() method.".format(str(self)))
mozilla/addons-server
src/olympia/devhub/tests/test_views_validation.py
Python
bsd-3-clause
22,989
0.000957
import json from copy import deepcopy from datetime import datetime from django.core.files.storage import default_storage as storage from django.urls import reverse from unittest import mock import waffle from pyquery import PyQuery as pq from olympia import amo from olympia.addons.models import Addon, AddonUser from olympia.amo.tests.test_helpers import get_addon_file from olympia.amo.tests import addon_factory, TestCase, user_factory from olympia.devhub.tests.test_tasks import ValidatorTestCase from olympia.files.models import File, FileUpload, FileValidation from olympia.files.tests.test_models import UploadMixin from olympia.files.utils import check_xpi_info, parse_addon from olympia.reviewers.templatetags.code_manager import code_manager_url from olympia.users.models import UserProfile class TestUploadValidation(ValidatorTestCase, UploadMixin, TestCase): fixtures = ['base/users'] def setUp(self): super().setUp() self.user = UserProfile.objects.get(email='regular@mozilla.com') assert self.client.login(email=self.user.email) self.validation = { 'errors': 1, 'detected_type': 'extension', 'success': False, 'warnings': 0, 'message_tree': { 'testcases_targetapplication': { '__warnings': 0, '__errors': 1, '__messages': [], '__infos': 0, 'test_targetedapplications': { '__warnings': 0, '__errors': 1, '__messages': [], '__infos': 0, 'invalid_min_version': { '__warnings': 0, '__errors': 1, '__messages': ['d67edb08018411e09b13c42c0301fe38'], '__infos': 0, }, }, } }, 'infos': 0, 'messages': [ { 'uid': 'd67edb08018411e09b13c42c0301fe38', 'tier': 1, 'id': [ 'testcases_targetapplication', 'test_targetedapplications', 'invalid_min_version', ], 'file': 'install.rdf', 'message': 'The value of <em:id> is invalid. See ' '<a href="https://mozilla.org">mozilla.org</a> ' 'for more information', 'context': ['<em:description>...', '<foo/>'], 'type': 'error', 'line': 0, 'description': [ '<iframe>', 'Version "3.0b3" isn\'t compatible with ' '{ec8030f7-c20a-464f-9b0e-13a3a9e97384}.', ], 'signing_help': ['<script>&amp;'], } ], 'rejected': False, } def test_only_safe_html_in_messages(self): upload = self.get_upload( abspath=get_addon_file('invalid_webextension.xpi'), user=self.user, with_validation=True, validation=json.dumps(self.validation), ) response = self.client.get( reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json']) ) assert response.status_code == 200 data = json.loads(response.content) msg = data['validation']['messages'][0] assert msg['message'] == ( 'The value of &lt;em:id&gt; is invalid. ' 'See <a href="https://mozilla.org" rel="nofollow">mozilla.org</a> ' 'for more information' ) assert msg['description'][0] == '&lt;iframe&gt;' assert msg['context'] == (['<em:description>...', '<foo/>']) def test_date_on_upload(self): upload = self.get_upload( abspath=get_addon_file('invalid_webextension.xpi'), user=self.user, with_validation=True, validation=json.dumps(self.validation), ) upload.update(created=datetime.fromisoformat('2010-12-06 14:04:46')) response = self.client.get( reverse('devhub.upload_detail', args=[upload.uuid.hex]) ) assert response.status_code == 200 doc = pq(response.content) assert doc('td').text() == 'Dec. 6, 2010' def test_upload_processed_validation_error(self): addon_file = open(get_addon_file('invalid_webextension.xpi'), 'rb') response = self.client.post( reverse('devhub.upload'), {'name': 'addon.xpi', 'upload': addon_file} ) uuid = response.url.split('/')[-2] upload = FileUpload.objects.get(uuid=uuid) assert upload.processed_validation['errors'] == 1 assert upload.processed_validation['messages'][0]['id'] == [ 'validator', 'unexpected_exception', ] def test_login_required(self): upload = self.get_upload( abspath=get_addon_file('invalid_webextension.xpi'), user=self.user, with_validation=True, validation=json.dumps(self.validation), ) url = reverse('devhub.upload_detail', args=[upload.uuid.hex]) assert self.client.head(url).status_code == 20
0 self.
client.logout() assert self.client.head(url).status_code == 302 class TestUploadErrors(UploadMixin, TestCase): fixtures = ('base/addon_3615', 'base/users') def setUp(self): super().setUp() self.user = UserProfile.objects.get(email='regular@mozilla.com') self.client.login(email=self.user.email) @mock.patch.object(waffle, 'flag_is_active') def test_dupe_uuid(self, flag_is_active): flag_is_active.return_value = True addon = Addon.objects.get(pk=3615) data = parse_addon(self.get_upload('webextension.xpi'), user=self.user) addon.update(guid=data['guid']) dupe_xpi = self.get_upload('webextension.xpi', user=self.user) res = self.client.get( reverse('devhub.upload_detail', args=[dupe_xpi.uuid, 'json']) ) assert res.status_code == 400, res.content data = json.loads(res.content) assert data['validation']['messages'] == ( [ { 'tier': 1, 'message': 'Duplicate add-on ID found.', 'type': 'error', 'fatal': True, } ] ) assert data['validation']['ending_tier'] == 1 def test_long_uuid(self): """An add-on uuid may be more than 64 chars, see bug 1203915.""" long_guid = ( 'this_guid_is_longer_than_the_limit_of_64_chars_see_' 'bug_1201176_but_should_not_fail_see_bug_1203915@xpi' ) xpi_info = check_xpi_info({'guid': long_guid, 'version': '1.0'}) assert xpi_info['guid'] == long_guid def test_mv3_error_added(self): validation = deepcopy(amo.VALIDATOR_SKELETON_EXCEPTION_WEBEXT) validation['metadata']['manifestVersion'] = 3 xpi = self.get_upload( 'webextension_mv3.xpi', with_validation=True, validation=json.dumps(validation), user=self.user, ) res = self.client.get(reverse('devhub.upload_detail', args=[xpi.uuid, 'json'])) assert b'https://blog.mozilla.org/addons/2021/05/27/manifest-v3-update/' in ( res.content ) class TestFileValidation(TestCase): fixtures = ['base/users', 'devhub/addon-validation-1'] def setUp(self): super().setUp() assert self.client.login(email='del@icio.us') self.user = UserProfile.objects.get(email='del@icio.us') self.file_validation = FileValidation.objects.get(pk=1) self.file = self.file_validation.file with storage.open(self.file.file_path, 'wb') as f: f.write(b'<pretend this is an xpi>\n')
zhu913104/KMdriod
gapathplanning.py
Python
mit
5,686
0.006184
""" Visualize Genetic Algorithm to find the shortest path for travel sales problem. Visit my tutorial website for more: https://morvanzhou.github.io/tutorials/ """ import matplotlib.pyplot as plt import numpy as np START_POINT = list(input("請輸入起始點")) PASS_POINT = input("輸入要經過的點") PASS_POINT = PASS_POINT.split(" ") # START_POINT = [17] # PASS_POINT = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16] START_POINT.extend(PASS_POINT) N_CITIES = len(START_POINT) # DNA size CROSS_RATE = 0.1 MUTATE_RATE = 0.01 POP_SIZE = 50000 N_GENERATIONS = 5000 class GA(object): def __init__(self, DNA_size, cross_rate, mutation_rate, pop_size,start_point ): self.DNA_size = DNA_size self.cross_rate = cross_rate self.mutate_rate = mutation_rate self.pop_size = pop_size self.start_point = start_point.pop(0) self.pass_point = start_point self.pop = np.vstack([np.hstack(( self.start_point,np.random.permutation(start_point))).astype(np.int64) for _ in range(pop_size)]) def translateDNA(self, DNA, city_position): # get cities' coord in order line_x = np.empty_like(DNA, dtype=np.float64) line_y = np.empty_like(DNA, dtype=np.float64) for i, d in enumerate(DNA): city_coord = city_position[d] line_x[i, :] = city_coord[:, 0] line_y[i, :] = city_coord[:, 1] return line_x, line_y def get_fitness(self, line_x, line_y): total_distance = np.empty((line_x.shape[0],), dtype=np.float64) for i, (xs, ys) in enumerate(zip(line_x, line_y)): total_distance[i] = np.sum(np.sqrt(np.square(np.diff(xs)) + np.square(np.diff(ys)))) fitness = np.exp(self.DNA_size * 2 / total_distance) return fitness, total_distance def select(self, fitness): idx = np.random.choice(np.arange(self.pop_size), size=self.pop_size, replace=True, p=fitness / fitness.sum()) return self.pop[idx] def crossover(self, parent, pop): if np.random.rand() < self.cross_rate: i_ = np.random.randint(0, self.pop_size, size=1) # select another individual from pop cross_points = np.hstack((False, np.random.randint(0, 2, self.DNA_size).astype(np.bool))) # choose crossover points keep_city = parent[~cross_points] # find the city number swap_city = np.setdiff1d(pop[i_, :], keep_city) parent[:] = np.concatenate((keep_city, swap_city)) return parent def mutate(self, child): for point in range(1,self.DNA_size): if np.random.rand() < self.mutate_rate: swap_point = np.random.randint(1, self.DNA_size) swapA, swapB = child[point], child[swap_point] child[point], child[swap_point] = swapB, swapA return child def evolve(self, fitness): pop = self.select(fitness) pop_copy = pop.copy() for parent in pop: # for every parent child = self.crossover(parent, pop_copy) child = self.mutate(child) parent[:] = child self.pop = pop class TravelSalesPerson(object): def __init__(self, n_cities): self.city_position = np.array([[ 0.36774816, 0.48556132], [0.36641813, 0.0957464], [ 0.51269409, 0.60941519], [0.00644122, 0.55532349], [0.6503008 , 0.35550922], [ 0.15919575, 0.72421738], [0.01457005 , 0.76355109], [ 0.08077499, 0.1413901], [0.45753614, 0.04607823], [0.39487359, 0.55118165], [0.75402671 , 0.40564417], [0.61979506, 0.91658641], [ 0.08871058 , 0.4], [0.03156203, 0.05129652], [0.13118489, 0.80425415], [0.96021151, 0.69831614], [0.6, 0.6], [0.82265218, 0.81566013], [0.326443 , 0.98269306], [0.2298539,0.27029802] ]) # self.city_position = np.random.rand(n_cities, 2) plt.ion() def plotting(self, lx, ly, total_d): plt.cla() color = np.linspace(0,1,20) plt.scatter(self.city_position[:, 0].T, self.city_position[:, 1].T, s=100, c=color) plt.plot(lx.T, ly.T, 'k--',) plt.text(-0.05, -0.05, "Total distance=%.2f" % total_d, fontdict={'size': 20
, 'color': 'red'}) plt.xlim((-0.1, 1.1)) plt.ylim((-0.1, 1.1)) plt.p
ause(0.0000000000000000000000000000001) ga = GA(DNA_size=len(PASS_POINT), cross_rate=CROSS_RATE, mutation_rate=MUTATE_RATE, pop_size=POP_SIZE,start_point=START_POINT) env = TravelSalesPerson(N_CITIES) for generation in range(N_GENERATIONS): lx, ly = ga.translateDNA(ga.pop, env.city_position) fitness, total_distance = ga.get_fitness(lx, ly) ga.evolve(fitness) best_idx = np.argmax(fitness) l_pop = ga.pop.tolist() xxx = l_pop[best_idx] print(xxx) print('Gen:', generation, '| best fit: %.2f' % fitness[best_idx], ) env.plotting(lx[best_idx], ly[best_idx], total_distance[best_idx]) plt.ioff() plt.show()
InUrSys/PescArt2.0
GeneratedFiles/ui_codificadores_POT.py
Python
gpl-3.0
3,813
0.0021
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file '/Users/chernomirdinmacuvele/Documents/workspace/PscArt2.0.X/UserInt/ui_codificadores_POT.ui' # # Created by: PyQt5 UI code generator 5.8.2 # # WARNING! All changes made in this file will be lost! from PyQt5 import QtCore, QtGui, QtWidgets class Ui_Form(object): def setupUi(self, Form): Form.setObjectName("Form") Form.resize(306, 332) self.gridLayout = QtWidgets.QGridLayout(Form) self.gridLayout.setObjectName("gridLayout") self.label = QtWidgets.QLabel(Form) self.label.setObjectName("label") self.gridLayout.addWidget(self.label, 0, 0, 1, 1) self.LECodigo = QtWidgets.QLineEdit(Form) self.LECodigo.setMaxLength(3) self.LECodigo.setObjectName("LECodigo") self.gridLayout.addWidget(self.LECodigo, 0, 1, 1, 1) self.label_3 = QtWidgets.QLabel(Form) self.label_3.setObjectName("label_3") self.gridLayout.addWidget(self.label_3, 1, 0, 1, 1) self.LENome = QtWidgets.QLineEdit(Form) self.LENome.setMaxLength(15) self.LENome.setObjectName("LENome") self.gridLayout.addWidget(self.LENome, 1, 1, 1, 1) self.label_4 = QtWidgets.QLabel(Form) self.label_4.setObjectName("label_4") self.gridLayout.addWidget(self.label_4, 2, 0, 1, 1) self.PTEDescricao = QtWidgets.QPlainTextEdit(Form) self.PTEDescricao.setObjectName("PTEDescricao") self.gridLayout.addWidget(self.PTEDescricao, 2, 1, 1, 1) self.label_5 = QtWidgets.QLabel(Form) self.label_5.setObjectName("label_5") self.gridLayout.addWidget(self.label_5, 3, 0, 1, 1) self.PTEComentarios = QtWidgets.QPlainTextEdit(Form)
self.PTEComentarios.setObjectName("PTEComentarios") self.gridLayout.addWidget(self.PTEComentarios, 3, 1, 1, 1) self.CHBActivo = QtWidgets.QCheckBox(Form) self.CHBActivo.setObjectName("CHBActivo") self.gridLayout.addWidget(self.CHBActivo, 4, 1, 1, 1) self.splitter = QtWidgets.QSplitter(Form) self.splitter.setOrientation(QtCore.Qt.Horizontal) self.splitter.setObjectNam
e("splitter") self.PBGuardar = QtWidgets.QPushButton(self.splitter) self.PBGuardar.setText("") icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(":/newPrefix/Icons/002-save.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.PBGuardar.setIcon(icon) self.PBGuardar.setObjectName("PBGuardar") self.PBCancelar = QtWidgets.QPushButton(self.splitter) self.PBCancelar.setText("") icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(":/newPrefix/Icons/003-error.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.PBCancelar.setIcon(icon1) self.PBCancelar.setObjectName("PBCancelar") self.gridLayout.addWidget(self.splitter, 5, 0, 1, 2) self.retranslateUi(Form) QtCore.QMetaObject.connectSlotsByName(Form) def retranslateUi(self, Form): _translate = QtCore.QCoreApplication.translate Form.setWindowTitle(_translate("Form", "Codificador")) self.label.setText(_translate("Form", "Codigo:")) self.LECodigo.setPlaceholderText(_translate("Form", "Ex:AAA")) self.label_3.setText(_translate("Form", "Nome:")) self.LENome.setPlaceholderText(_translate("Form", "Ex:Qualquer Coisa")) self.label_4.setText(_translate("Form", "Descricao:")) self.PTEDescricao.setPlaceholderText(_translate("Form", "Ex:O que faz…")) self.label_5.setText(_translate("Form", "Comentarios:")) self.PTEComentarios.setPlaceholderText(_translate("Form", "Ex:Nota, Obs…")) self.CHBActivo.setText(_translate("Form", "Activo")) import icons_rc
bashrc/zeronet-debian
src/src/Tor/TorManager.py
Python
gpl-2.0
10,295
0.001651
import logging import re import socket import binascii import sys import os import time import gevent import subprocess import atexit from Config import config from Crypt import CryptRsa from Site import SiteManager from lib.PySocks import socks from gevent.coros import RLock from util import helper from Debug import Debug class TorManager: def __init__(self, fileserver_ip=None, fileserver_port=None): self.privatekeys = {} # Onion: Privatekey self.site_onions = {} # Site address: Onion self.tor_exe = "tools/tor/tor.exe" self.tor_process = None self.log = logging.getLogger("TorManager") self.start_onions = None self.conn = None self.lock = RLock() if config.tor == "disable": self.enabled = False self.start_onions = False self.status = "Disabled" else: self.enabled = True self.status = "Waiting" if fileserver_port: self.fileserver_port = fileserver_port else: self.fileserver_port = config.fileserver_port self.ip, self.port = config.tor_controller.split(":") self.port = int(self.port) self.proxy_ip, self.proxy_port = config.tor_proxy.split(":") self.proxy_port = int(self.proxy_port) # Test proxy port if config.tor != "disable": try: assert self.connect(), "No connection" self.log.debug("Tor proxy port %s check ok" % config.tor_proxy) except Exception, err: self.log.debug("Tor proxy port %s check error: %s" % (config.tor_proxy, err)) self.enabled = False # Change to self-bundled Tor ports
from lib.PySocks import socks self.port = 49051 self.proxy_port = 49050 socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", self.proxy_port) if os.
path.isfile(self.tor_exe): # Already, downloaded: sync mode self.startTor() else: # Not downloaded yet: Async mode gevent.spawn(self.startTor) def startTor(self): if sys.platform.startswith("win"): try: if not os.path.isfile(self.tor_exe): self.downloadTor() self.log.info("Starting Tor client %s..." % self.tor_exe) tor_dir = os.path.dirname(self.tor_exe) self.tor_process = subprocess.Popen(r"%s -f torrc" % self.tor_exe, cwd=tor_dir, close_fds=True) for wait in range(1,10): # Wait for startup time.sleep(wait * 0.5) self.enabled = True if self.connect(): break # Terminate on exit atexit.register(self.stopTor) except Exception, err: self.log.error("Error starting Tor client: %s" % Debug.formatException(err)) self.enabled = False return False def stopTor(self): self.log.debug("Stopping...") self.tor_process.terminate() def downloadTor(self): self.log.info("Downloading Tor...") # Check Tor webpage for link download_page = helper.httpRequest("https://www.torproject.org/download/download.html").read() download_url = re.search('href="(.*?tor.*?win32.*?zip)"', download_page).group(1) if not download_url.startswith("http"): download_url = "https://www.torproject.org/download/" + download_url # Download Tor client self.log.info("Downloading %s" % download_url) data = helper.httpRequest(download_url, as_file=True) data_size = data.tell() # Handle redirect if data_size < 1024 and "The document has moved" in data.getvalue(): download_url = re.search('href="(.*?tor.*?win32.*?zip)"', data.getvalue()).group(1) data = helper.httpRequest(download_url, as_file=True) data_size = data.tell() if data_size > 1024: import zipfile zip = zipfile.ZipFile(data) self.log.info("Unpacking Tor") for inner_path in zip.namelist(): if ".." in inner_path: continue dest_path = inner_path dest_path = re.sub("^Data/Tor/", "tools/tor/data/", dest_path) dest_path = re.sub("^Data/", "tools/tor/data/", dest_path) dest_path = re.sub("^Tor/", "tools/tor/", dest_path) dest_dir = os.path.dirname(dest_path) if dest_dir and not os.path.isdir(dest_dir): os.makedirs(dest_dir) if dest_dir != dest_path.strip("/"): data = zip.read(inner_path) if not os.path.isfile(dest_path): open(dest_path, 'wb').write(data) else: self.log.error("Bad response from server: %s" % data.getvalue()) return False def connect(self): if not self.enabled: return False self.site_onions = {} self.privatekeys = {} if "socket_noproxy" in dir(socket): # Socket proxy-patched, use non-proxy one conn = socket.socket_noproxy(socket.AF_INET, socket.SOCK_STREAM) else: conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.log.debug("Connecting to %s:%s" % (self.ip, self.port)) try: with self.lock: conn.connect((self.ip, self.port)) res_protocol = self.send("PROTOCOLINFO", conn) version = re.search('Tor="([0-9\.]+)"', res_protocol).group(1) # Version 0.2.7.5 required because ADD_ONION support assert int(version.replace(".", "0")) >= 20705, "Tor version >=0.2.7.5 required" # Auth cookie file cookie_match = re.search('COOKIEFILE="(.*?)"', res_protocol) if cookie_match: cookie_file = cookie_match.group(1) auth_hex = binascii.b2a_hex(open(cookie_file, "rb").read()) res_auth = self.send("AUTHENTICATE %s" % auth_hex, conn) else: res_auth = self.send("AUTHENTICATE", conn) assert "250 OK" in res_auth, "Authenticate error %s" % res_auth self.status = "Connected (%s)" % res_auth self.conn = conn except Exception, err: self.conn = None self.status = "Error (%s)" % err self.log.error("Tor controller connect error: %s" % err) self.enabled = False return self.conn def disconnect(self): self.conn.close() self.conn = None def startOnions(self): self.log.debug("Start onions") self.start_onions = True # Get new exit node ip def resetCircuits(self): res = self.request("SIGNAL NEWNYM") if "250 OK" not in res: self.status = "Reset circuits error (%s)" % res self.log.error("Tor reset circuits error: %s" % res) def addOnion(self): res = self.request("ADD_ONION NEW:RSA1024 port=%s" % self.fileserver_port) match = re.search("ServiceID=([A-Za-z0-9]+).*PrivateKey=RSA1024:(.*?)[\r\n]", res, re.DOTALL) if match: onion_address, onion_privatekey = match.groups() self.privatekeys[onion_address] = onion_privatekey self.status = "OK (%s onion running)" % len(self.privatekeys) SiteManager.peer_blacklist.append((onion_address + ".onion", self.fileserver_port)) return onion_address else: self.status = "AddOnion error (%s)" % res self.log.error("Tor addOnion error: %s" % res) return False def delOnion(self, address): res = self.request("DEL_ONION %s" % address) if "250 OK" in res: del self.privatekeys[address] self.status = "OK (%s onion running)" % len(self.private
ubiquitypress/rua
src/manager/admin.py
Python
gpl-2.0
496
0
from django.contrib import admin from .models import ( Group, GroupMembership, ) class GroupAdm
in(admin.ModelAdmin): list_display = ('name', 'group_type', 'active', 'sequence') list_filter = ('active',) search_fields = ('name',) class GroupMembershipAdmin(admin.ModelAdmin): list_display = ('group', 'user', 'added', 's
equence') admin_list = [ (Group, GroupAdmin), (GroupMembership, GroupMembershipAdmin), ] [admin.site.register(*t) for t in admin_list]
thinkopensolutions/odoo-saas-tools
saas_portal_async/__openerp__.py
Python
lgpl-3.0
459
0
# -*- coding: utf-8 -*- { 'name': 'SaaS Portal Asynchronous database creation', 'version': '1.0.0', 'author': 'IT-Projects LLC', "s
upport": "apps@it-projects.info", 'website'
: "https://it-projects.info", 'license': 'GPL-3', 'category': 'SaaS', 'depends': [ 'base', 'saas_portal', 'connector', ], 'installable': False, 'application': False, 'data': [ 'views/wizard.xml', ], }
zaneb/heat-convergence-prototype
scenarios/update_interrupt_create.py
Python
apache-2.0
673
0.001486
def check_resource_count(expected_count): test.assertEqual(expected_count, len(reality.all_resources())) example_template = Template({ 'A': RsrcDef({}, []), 'B': RsrcDef({'a': '4alpha'}, ['A']), 'C': RsrcDef({'a': 'foo'}, ['B']), 'D': RsrcDef({'a': 'bar'}, ['C']), }) engine.create_stack('foo', example_templ
ate) engine.noop(1) example_template2 = Template({ 'A':
RsrcDef({}, []), 'B': RsrcDef({'a': '4alpha'}, ['A']), 'C': RsrcDef({'a': 'blarg'}, ['B']), 'D': RsrcDef({'a': 'wibble'}, ['C']), }) engine.update_stack('foo', example_template2) engine.call(check_resource_count, 2) engine.noop(11) engine.call(verify, example_template2)
softglow/samplecheck
samplecheck.py
Python
gpl-3.0
1,713
0.008757
#!/usr/bin/env python3 import argparse import struct import sys SPC_START_OFFSET = 0x100 SPC_RAM_SIZE = 0x10000 INST_TBL = 0x6C00 INST_ENTRY_LEN = 0x6 SAMPLE_TBL = 0x6D00 SAMPLE_ENTRY_LEN = 0x4 SAMPLE_MAX_ID = 0x4F # completely arbitrary limit class InstrEntry (object): srcn = None adsr = None gain = None pitch_adj = None @classmethod def decode (cls, entry): u = struct.unpack("<BHBH", entry) return cls(srcn=u[0], adsr=u[1], gain=u[2], pitch_adj=u[3]) def __init__ (self, **kwargs): self.__dict__.update(kwargs) def encode (self):
return struct.pack("<BHBH", self.srcn, self.adsr, self.gain, self.pitch_adj) def __str__ (self): m = "InstrEntry<srcn={0
:02X} adsr={1:04X} gain={2:02X} pitch_adj={3:04X}" return m.format(self.srcn, self.adsr, self.gain, self.pitch_adj) def parse_fp (f): ram = f.read(SPC_START_OFFSET) ram = f.read(SPC_RAM_SIZE) signatures = [] ptr = INST_TBL for inst in range(0x2a): entry = InstrEntry.decode(ram[ptr:ptr+INST_ENTRY_LEN]) ptr += INST_ENTRY_LEN if (0 <= entry.srcn <= SAMPLE_MAX_ID): signatures.append(entry) return signatures def dump_signature (sig_ary, fn=print): for i, v in enumerate(sig_ary): fn("{0:2X}: {1}".format(i, str(v))) return def main (args, prog='samplecheck'): p = argparse.ArgumentParser() p.add_argument("SPC", help="The SPC file to fingerprint") args = p.parse_args() with open(args.SPC, "rb") as f: dump_signature(parse_fp(f)) if __name__ == "__main__": main(sys.argv[1:], sys.argv[0])
openstax/openstax-cms
pages/migrations/0006_auto_20201105_1620.py
Python
agpl-3.0
631
0.001585
# Generated by Django 3.0.4 on 2020-11-05 22:20 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('wagtaildocs', '0010_document_file_hash'), ('pages', '0005_auto_20201105_1414'), ] operations = [ mi
grations.AlterField( model_name='llphpage', name='book_cover', field=models.For
eignKey(blank=True, help_text='The book cover to be shown on the website.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtaildocs.Document'), ), ]
hbiyik/tribler
src/tribler-core/tribler_core/modules/metadata_store/community/request.py
Python
lgpl-3.0
1,680
0.002381
from ipv8.requestcache import RandomNumberCache from tribler_core.utilities.unicode import hexlify class SearchRequestCache(RandomNumberCache): """ This request cache keeps track of all outstanding search requests within the GigaChannelCommunity. """ def __init__(self, request_cache, uuid, peers): super(SearchRequestCache, self).__init__(request_cache, u"remote-search-request") self.request_cache = request_cache self.requested_peers = {hexlify(peer.mid): False for peer in peers} self.uuid = uuid @property def timeout_delay(self): return 30.0 def on_timeout(self):
pass def process_peer_response(self, peer): """ Returns whether to process this response from
the given peer in the community. If the peer response has already been processed then it is skipped. Moreover, if all the responses from the expected peers are received, the request is removed from the request cache. :param peer: Peer :return: True if peer has not been processed before, else False """ mid = hexlify(peer.mid) if mid in self.requested_peers and not self.requested_peers[mid]: self.requested_peers[mid] = True # Check if all expected responses are received if all(self.requested_peers.values()): self.remove_request() return True return False def remove_request(self): if self.request_cache.has(self.prefix, self.number): try: self.request_cache.pop(self.prefix, self.number) except KeyError: pass
euclidjda/dnn-quant
scripts/deep_mlp_model.py
Python
apache-2.0
7,953
0.020118
# Copyright 2016 Euclidean Technologies Management LLC All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import sys import numpy as np import tensorflow as tf from deep_nn_model import DeepNNModel from tensorflow.python.ops import math_ops from tensorflow.python.ops import random_ops class DeepMlpModel(DeepNNModel): """ A Deep MLP Model that supports a mult-class output with an arbitrary number of fixed width hidden layers. """ def __init__(self, num_layers, num_inputs, num_hidden, num_outputs, num_unrollings, max_grad_norm=5.0, hidden_dropout=True, input_dropout=False, skip_connections=False, embedding_size=0, optimizer='gd'): """ Initialize the model Args: num_layers: number of hidden layers num_inputs: number input units. this should be less than or or equal to width of feature data in the data file num_hidden: number of hidden units in each hidden layer num_unrollings: the size of the time window processed in each step (see step() function below) batch_size: the size of the data batch processed in each step max_grad_norm: max gardient norm size for gradient clipping input_dropout: perform dropout on input layer """ self._num_unrollings = num_unrollings self._num_inputs = num_inputs total_input_size = num_unrollings * num_inputs batch_size = self._batch_size = tf.placeholder(tf.int32, shape=[]) self._seq_lengths = tf.placeholder(tf.int64, shape=[None]) self._keep_prob = tf.placeholder(tf.float32, shape=[]) self._inputs = list() self._targets = list() self._train_mask = list() # Weights for loss functions per example self._valid_mask = list() # Weights for loss functions per example for _ in range(num_unrollings): self._inputs.append( tf.placeholder(tf.float32, shape=[None,num_inputs]) ) self._targets.append( tf.placeholder(tf.float32, shape=[None,num_outputs]) ) self._train_mask.append(tf.placeholder(tf.float32, shape=[None])) self._valid_mask.append(tf.placeholder(tf.float32, shape=[None])) inputs = tf.reverse_sequence(tf.concat( self._inputs, 1 ), self._seq_lengths*num_inputs, seq_axis=1,batch_axis=0) if input_dropout is True: inputs = self._input_dropout(inputs) num_prev = total_input_size outputs = inputs if embedding_size > 0: time_weights = tf.get_variable("t_weights",[num_unrollings,embedding_size,1]) feature_weights = tf.get_variable("f_weights",[1,embedding_size,num_inputs]) embedding_weights = tf.reshape( time_weights*feature_weights, [num_unrollings*num_inputs, embedding_size] ) biases = tf.get_variable("embedding_biases",[embedding_size]) outputs = tf.nn.relu(tf.nn.xw_plus_b(inputs,embedding_weights,biases)) num_prev = embedding_size for i in range(num_layers): weights = tf.get_variable("hidden_w_%d"%i,[num_prev, num_hidden]) biases = tf.get_variable("hidden_b_%d"%i,[num_hidden]) outputs = tf.nn.relu(tf.nn.xw_plus_b(outputs, weights, biases)) if hidden_dropout is True: outputs = tf.nn.dropout(outputs, self._keep_prob) num_prev = num_hidden if skip_connections is True: num_prev = num_inputs+num_prev skip_inputs = tf.slice(inputs, [0, 0], [batch_size, num_inputs] ) outputs = tf.concat( [ skip_inputs, outputs], 1) softmax_b = tf.get_variable("softmax_b", [num_outputs]) softmax_w = tf.get_variable("softmax_w", [num_prev, num_outputs]) logits = tf.nn.xw_plus_b(outputs, softmax_w, softmax_b) targets = tf.unstack(tf.reverse_sequence(tf.reshape( tf.concat(self._targets, 1),[batch_size,num_unrollings,num_outputs] ), self._seq_lengths,seq_axis=1,batch_axis=0),axis=1)[0] agg_loss = tf.nn.softmax_cross_entropy_with_logits(labels=targets,logits=logits) train_mask = tf.unstack(tf.reverse_sequence(tf.transpose( tf.r
eshape( tf.concat(self._train_mask, 0 ), [num_unrollings, batch_size] ) ), self._seq_lengths,seq_axis=1,batch_axis=0),axis=1)[0] valid_mask = tf.unstack(tf.reverse_sequence(tf.transpose( tf.reshape( tf.concat(self._valid_mask, 0), [num_unrollings, batch_s
ize] ) ), self._seq_lengths,seq_axis=1,batch_axis=0),axis=1)[0] train_loss = tf.multiply(agg_loss, train_mask) valid_loss = tf.multiply(agg_loss, valid_mask) self._loss = self._train_loss = train_loss self._valid_loss = valid_loss self._train_evals = tf.reduce_sum( train_mask ) self._valid_evals = tf.reduce_sum( valid_mask ) self._train_cst = tf.reduce_sum( train_loss ) self._valid_cst = tf.reduce_sum( valid_loss ) self._predictions = tf.nn.softmax(logits) self._class_predictions = tf.one_hot(tf.argmax(self._predictions,1), num_outputs, axis=-1) accy = tf.multiply(self._class_predictions, targets) train_accy = tf.multiply(accy,tf.reshape(train_mask, shape=[batch_size,1])) valid_accy = tf.multiply(accy,tf.reshape(valid_mask, shape=[batch_size,1])) self._train_accy = tf.reduce_sum( train_accy ) self._valid_accy = tf.reduce_sum( valid_accy ) self._cost = self._train_cst self._accy = self._train_accy self._evals = self._train_evals self._batch_cst = self._train_cst / (self._train_evals + 1.0) # here is the learning part of the graph tvars = tf.trainable_variables() grads = tf.gradients(self._batch_cst,tvars) if (max_grad_norm > 0): grads, _ = tf.clip_by_global_norm(grads,max_grad_norm) self._lr = tf.Variable(0.0, trainable=False) optim = None if optimizer == 'gd': optim = tf.train.GradientDescentOptimizer(self._lr) elif optimizer == 'adagrad': optim = tf.train.AdagradOptimizer(self._lr) elif optimizer == 'adam': optim = tf.train.AdamOptimizer(self._lr) elif optimizer == 'mo': optim = tf.train.MomentumOptimizer(self._lr) else: raise RuntimeError("Unknown optimizer = %s"%optimizer) self._train_op = optim.apply_gradients(zip(grads, tvars)) def _input_dropout(self,inputs): # This implementation of dropout dropouts an entire feature along the time dim random_tensor = self._keep_prob random_tensor += random_ops.random_uniform([self._batch_size,self._num_inputs], dtype=inputs.dtype) random_tensor = tf.tile(random_tensor,[1,self._num_unrollings]) binary_tensor = math_ops.floor(random_tensor) ret = math_ops.div(inputs, self._keep_prob) * binary_tensor ret.set_shape(inputs.get_shape()) return ret
metacloud/molecule
test/unit/model/v2/test_schema.py
Python
mit
1,235
0
# Copyright (c) 2015-2018 Cisco Systems, Inc. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software withou
t restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # T
he above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. from molecule.model import schema_v2 def test_base_config(_config): assert {} == schema_v2.validate(_config)
jittat/ku-eng-direct-admission
application/views/__init__.py
Python
agpl-3.0
25
0
from
form_views import
*
task123/AutoTT
scriptsForTesting/steeringTest.py
Python
mit
823
0.001215
import TCP import Steering import Motor import time """ add print "right speed: " + str(right_speed) print "left speed: " + str(left_speed) under def receive_message(self, type, message): if (type == "Gyro"
and self.stop == False): and comment out self.motors.set_right_speed(right_speed) self.motors.set_left_speed(left_speed) too test with driving the Mo
tor class """ autoTTCommunication = TCP.AutoTTCommunication(12345) trip_meter = Motor.TripMeter() motors = Motor.Motor(trip_meter) steering = Steering.SteeringWithIOSGyro(motors) autoTTCommunication.gyro_recv = steering autoTTCommunication.stop_cont_recv = steering autoTTCommunication.send_message("Gyro", "0.2") time.sleep(2) autoTTCommunication.send_message("Gyro", "0.2") autoTTCommunication.send_message("Gyro", "0.2") while True: time.sleep(5)
richardtran415/pymatgen
pymatgen/transformations/advanced_transformations.py
Python
mit
88,505
0.001955
# coding: utf-8 # Copyright (c) Pymatgen Development Team. # Distributed under the terms of the MIT License. """ This module implements more advanced transformations. """ import logging import math import warnings from fractions import Fraction from itertools import groupby, product from math import gcd from string import ascii_lowercase from typing import Dict, Optional import numpy as np from monty.dev import requires from monty.fractions import lcm from monty.json import MSONable from pymatgen.analysis.adsorption import AdsorbateSiteFinder from pymatgen.analysis.bond_valence import BVAnalyzer from pymatgen.analysis.energy_models import SymmetryModel from pymatgen.analysis.ewald import EwaldSummation from pymatgen.analysis.gb.grain import GrainBoundaryGenerator from pymatgen.analysis.local_env import MinimumDistanceNN from pymatgen.analysis.structure_matcher import SpinComparator, StructureMatcher from pymatgen.analysis.structure_prediction.substitution_probability import ( SubstitutionPredictor, ) from pymatgen.command_line.enumlib_caller import EnumError, EnumlibAdaptor from pymatgen.command_line.mcsqs_caller import run_mcsqs from pymatgen.core.periodic_table import DummySpecies, Element, Species, get_el_sp from pymatgen.core.structure import Structure from pymatgen.core.surface import SlabGenerator from pymatgen.electronic_structure.core import Spin from pymatgen.io.ase import AseAtomsAdaptor from pymatgen.symmetry.analyzer import SpacegroupAnalyzer from pymatgen.transformations.standard_transformations import ( OrderDisorderedStructureTransformation, SubstitutionTransformation, SupercellTransformation, ) from pymatgen.transformations.transformation_abc import AbstractTransformation try: import hiphive # type: ignore except ImportError: hiphive = None __author__ = "Shyue Ping Ong, Stephen Dacek, Anubhav Jain, Matthew Horton, " "Alex Ganose" logger = logging.getLogger(__name__) class ChargeBalanceTransformation(AbstractTransformation): """ This is a transformation that disorders a structure to make it charge balanced, given an oxidation state-decorated structure. """ def __init__(self, charge_balance_sp): """ Args: charge_balance_sp: specie to add or remove. Currently only removal is supported """ self.charge_balance_sp = str(charge_balance_sp) def apply_transformation(self, structure): """ Applies the transformation. Args: structure: Input Structure Returns: Charge balanced structure. """ charge = structure.charge specie = get_el_sp(self.charge_balance_sp) num_to_remove = charge / specie.oxi_state num_in_structure = structure.composition[specie] removal_fraction = num_to_remove / num_in_structure if removal_fraction < 0: raise ValueError("addition of specie not yet supported by " "ChargeBalanceTransformation") trans = SubstitutionTransformation({self.charge_balance_sp: {self.charge_balance_sp: 1 - removal_fraction}}) return trans.apply_transformation(structure) def __str__(self): return "Charge Balance Transformation : " + "Species to remove = {}".format(str(self.charge_balance_sp)) def __repr__(self): return self.__str__() @property def inverse(self): """Returns: None""" return None @property def is_one_to_many(self): """Returns: False""" return False class SuperTransformation(AbstractTransformation): """ This is a transformation that is inherently one-to-many. It is constructed from a list of transformations and returns one structure for each transformation. The primary use for this class is extending a transmuter object. """ def __init__(self, transformations, nstructures_per_trans=1): """ Args: transformations ([transformations]): List of transformations to apply to a structure. One transformation is applied to each output structure. nstructures_per_trans (int): If the transformations are one-to-many and, nstructures_per_trans structures from each transformation are added to the full list. Defaults to 1, i.e., only best structure. """ self._transformations = transformations self.nstructures_per_trans = nstructures_per_trans def apply_transformation(self, structure, return_ranked_list=False): """ Applies the transformation. Args: structure: Input Structure return_ranked_list: Number of structures to return. Returns: Structures with all transformations applied. """ if not return_ranked_list: raise ValueError("SuperTransformation has no single best structure" " output. Must use return_ranked_list") structures = [] for t in self._transformations: if t.is_one_to_many: for d in t.apply_transformation(structure, return_ranked_list=self.nstructures_per_trans): d["transformation"] = t structures.append(d) else: structures.append( { "transformation": t, "structure": t.apply_transformation(structure), } ) return structures def __str__(self): return "Super Transformation : Transformations = " + "{}".format( " ".join([str(t) for t in self._transformations]) ) def __repr__(self): return self.__str__() @property def inverse(self): """Returns: None""" return None @property def is_one_to_many(self): """Returns: True""" return True class MultipleSubstitutionTransformation: """ Performs multiple substitutions on a structure. For example, can do a fractional replacement of Ge in LiGePS with a list of species, creating one structure for each substitution. Ordering is done using a dummy element so only one ordering must be done per substitution oxidation state. Charge balancing of the structure is optionally performed. .. note:: There are no checks to make sure that removal fractions are possible and rounding may occur. Currently charge balancing only works for removal of species. """ def __init__( self, sp_to_replace, r_fraction, substitution_d
ict, charge_balance_species=None, order=True, ): """ Performs multiple fractional substitutions on a transmuter. Args: sp_to_replace: species to be replaced r_fraction: fraction of that specie to replace substitution_dict: dictionary of the format {2: ["Mg", "Ti", "V", "As", "Cr", "Ta", "N", "Nb"], 3: ["Ru", "Fe", "Co", "Ce", "As", "Cr", "Ta", "N", "Nb"], 4: ["Ru",
"V", "Cr", "Ta", "N", "Nb"], 5: ["Ru", "W", "Mn"] } The number is the charge used for each of the list of elements (an element can be present in multiple lists) charge_balance_species: If specified, will balance the charge on the structure using that specie. """ self.sp_to_replace = sp_to_replace self.r_fraction = r_fraction self.substitution_dict = substitution_dict self.charge_balance_species = charge_balance_species self.order = order def apply_transformation(self, structure, return_ranked_list=False): """ Applies the transformation. Args: structure: Input Structure return_ranked_list: Number of structures to return. Returns: Structures with all substitutions applied. """ if not return_ranked_list: raise ValueError( "MultipleSubstitutionTransformation has n
Learn-Android-app/buck
src/com/facebook/buck/json/buck_test.py
Python
apache-2.0
11,264
0.00071
from buck import format_watchman_query_params, glob_internal, LazyBuildEnvPartial from buck import subdir_glob, BuildFileContext from pathlib import Path, PurePosixPath, PureWindowsPath import os import shutil import tempfile import unittest class FakePathMixin(object): def glob(self, pattern): return self.glob_results.get(pattern) def is_file(self): return True class FakePosixPath(FakePathMixin, PurePosixPath): pass class FakeWindowsPath(FakePathMixin, PureWindowsPath): pass def fake_path(fake_path_class, path, glob_results={}): # Path does magic in __new__ with its arg
s; it's hard to add more without # changing that class. So we use a wrapper function to diddle with # F
akePath's members. result = fake_path_class(path) result.glob_results = {} for pattern, paths in glob_results.iteritems(): result.glob_results[pattern] = [result / fake_path_class(p) for p in paths] return result class TestBuckPlatformBase(object): def test_glob_includes_simple(self): search_base = self.fake_path( 'foo', glob_results={'*.java': ['A.java', 'B.java']}) self.assertGlobMatches( ['A.java', 'B.java'], glob_internal( includes=['*.java'], excludes=[], include_dotfiles=False, search_base=search_base)) def test_glob_includes_sort(self): search_base = self.fake_path( 'foo', glob_results={'*.java': ['A.java', 'E.java', 'D.java', 'C.java', 'B.java']}) self.assertGlobMatches( ['A.java', 'B.java', 'C.java', 'D.java', 'E.java'], glob_internal( includes=['*.java'], excludes=[], include_dotfiles=False, search_base=search_base)) def test_glob_includes_multi(self): search_base = self.fake_path( 'foo', glob_results={ 'bar/*.java': ['bar/A.java', 'bar/B.java'], 'baz/*.java': ['baz/C.java', 'baz/D.java'], }) self.assertGlobMatches( ['bar/A.java', 'bar/B.java', 'baz/C.java', 'baz/D.java'], glob_internal( includes=['bar/*.java', 'baz/*.java'], excludes=[], include_dotfiles=False, search_base=search_base)) def test_glob_excludes_double_star(self): search_base = self.fake_path( 'foo', glob_results={ '**/*.java': ['A.java', 'B.java', 'Test.java'], }) self.assertGlobMatches( ['A.java', 'B.java'], glob_internal( includes=['**/*.java'], excludes=['**/*Test.java'], include_dotfiles=False, search_base=search_base)) def test_glob_excludes_multi(self): search_base = self.fake_path( 'foo', glob_results={ 'bar/*.java': ['bar/A.java', 'bar/B.java'], 'baz/*.java': ['baz/C.java', 'baz/D.java'], }) self.assertGlobMatches( ['bar/B.java', 'baz/D.java'], glob_internal( includes=['bar/*.java', 'baz/*.java'], excludes=['*/[AC].java'], include_dotfiles=False, search_base=search_base)) def test_subdir_glob(self): build_env = BuildFileContext(None, None, None, None, None, None, None, None) search_base = self.fake_path( 'foo', glob_results={ 'lib/bar/*.h': ['lib/bar/A.h', 'lib/bar/B.h'], 'lib/baz/*.h': ['lib/baz/C.h', 'lib/baz/D.h'], }) self.assertGlobMatches( { 'bar/B.h': 'lib/bar/B.h', 'bar/A.h': 'lib/bar/A.h', 'baz/D.h': 'lib/baz/D.h', 'baz/C.h': 'lib/baz/C.h', }, subdir_glob([ ('lib', 'bar/*.h'), ('lib', 'baz/*.h')], build_env=build_env, search_base=search_base)) def test_subdir_glob_with_prefix(self): build_env = BuildFileContext(None, None, None, None, None, None, None, None) search_base = self.fake_path( 'foo', glob_results={ 'lib/bar/*.h': ['lib/bar/A.h', 'lib/bar/B.h'], }) self.assertGlobMatches( { 'Prefix/bar/B.h': 'lib/bar/B.h', 'Prefix/bar/A.h': 'lib/bar/A.h', }, subdir_glob([('lib', 'bar/*.h')], prefix='Prefix', build_env=build_env, search_base=search_base)) def test_glob_excludes_relative(self): search_base = self.fake_path( 'foo', glob_results={ '**/*.java': ['foo/A.java', 'foo/bar/B.java', 'bar/C.java'], }) self.assertGlobMatches( ['foo/A.java', 'foo/bar/B.java'], glob_internal( includes=['**/*.java'], excludes=['bar/*.java'], include_dotfiles=False, search_base=search_base)) def test_glob_includes_skips_dotfiles(self): search_base = self.fake_path( 'foo', glob_results={'*.java': ['A.java', '.B.java']}) self.assertGlobMatches( ['A.java'], glob_internal( includes=['*.java'], excludes=[], include_dotfiles=False, search_base=search_base)) def test_glob_includes_does_not_skip_dotfiles_if_include_dotfiles(self): search_base = self.fake_path( 'foo', glob_results={'*.java': ['A.java', '.B.java']}) self.assertGlobMatches( ['.B.java', 'A.java'], glob_internal( includes=['*.java'], excludes=[], include_dotfiles=True, search_base=search_base)) def test_lazy_build_env_partial(self): def cobol_binary( name, deps=[], build_env=None): return (name, deps, build_env) testLazy = LazyBuildEnvPartial(cobol_binary) testLazy.build_env = {} self.assertEqual( ('HAL', [1, 2, 3], {}), testLazy.invoke(name='HAL', deps=[1, 2, 3])) testLazy.build_env = {'abc': 789} self.assertEqual( ('HAL', [1, 2, 3], {'abc': 789}), testLazy.invoke(name='HAL', deps=[1, 2, 3])) def test_explicit_exclude_with_file_separator_excludes(self): search_base = self.fake_path( 'foo', glob_results={'java/**/*.java': ['java/Include.java', 'java/Exclude.java']}) self.assertGlobMatches( ['java/Include.java'], glob_internal( includes=['java/**/*.java'], excludes=['java/Exclude.java'], include_dotfiles=False, search_base=search_base)) class TestBuckPosix(TestBuckPlatformBase, unittest.TestCase): @staticmethod def fake_path(*args, **kwargs): return fake_path(FakePosixPath, *args, **kwargs) def assertGlobMatches(self, expected, actual): self.assertEqual(expected, actual) class TestBuckWindows(TestBuckPlatformBase, unittest.TestCase): @staticmethod def fake_path(*args, **kwargs): return fake_path(FakeWindowsPath, *args, **kwargs) def assertGlobMatches(self, expected, actual): # Fix the path separator to make test writing easier fixed_expected = None if isinstance(expected, list): fixed_expected = [] for path in expected: fixed_expected.append(path.replace('/', '\\')) else: fixed_expected = {} for key, value in expected.items(): fixed_expected.update({key.replace('/', '\\'): value.replace('/', '\\')}) self.assertEqual(fixed_expected, actual) c
cpacia/Subspace
subspace/__init__.py
Python
mit
179
0.005587
""" Subspace is a modified implementation of the
Kademlia protocol for `Twist
ed <http://twistedmatrix.com>`_. """ version_info = (0, 2) version = '.'.join(map(str, version_info))
markofu/security_monkey
security_monkey/watchers/rds/rds_security_group.py
Python
apache-2.0
5,590
0.001073
# Copyright 2016 Bridgewater Associates # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ .. module: security_monkey.watchers.rds.rds_security_group :platform: Unix .. version:: $$VERSION$$ .. moduleauthor:: Bridgewater OSS <opensource@bwater.com> """ from security_monkey.decorators import record_exception, iter_account_region from security_monkey.watcher import Watcher from security_monkey.watcher import ChangeItem from security_monkey import app class RDSSecurityGroup(Watcher): index = 'rdssecuritygroup' i_am_singular = 'RDS Security Group' i_am_plural = 'RDS Security Groups' def __init__(self, accounts=None, debug=False): super(RDSSecurityGroup, self).__init__(accounts=accounts, debug=debug) @record_exception() def get_all_dbsecurity_groups(self, **kwargs): from security_monkey.common.sts_connect import connect sgs = [] rds = connect(kwargs['account_name'], 'boto3.rds.client', region=kwargs['region'], assumed_role=kwargs['assumed_role']) marker = None while True: if marker: response = self.wrap_aws_rate_limited_call( rds.describe_db_security_groups, Marker=marker) else: response = self.wrap_aws_rate_limited_call( rds.describe_db_security_groups) sgs.extend(response.get('DBSecurityGroups', [])) if response.get('Marker'): marker = response.get('Marker') else: break return sgs def slurp(self): """ :returns: item_list - list of RDS Security Groups. :returns: exception_map - A
dict where the keys are a tuple containing the location of the exception and the value is the actual exception """ self.prep_for_slurp() @iter_account_region(index=self.index, accounts=self.accounts, service_name='rds') def slurp_items(**kwargs): item_list = [] exception_map = {} kwargs['exception_map
'] = exception_map app.logger.debug("Checking {}/{}/{}".format(self.index, kwargs['account_name'], kwargs['region'])) sgs = self.get_all_dbsecurity_groups(**kwargs) if sgs: app.logger.debug("Found {} {}".format( len(sgs), self.i_am_plural)) for sg in sgs: name = sg.get('DBSecurityGroupName') if self.check_ignore_list(name): continue vpc_id = None if hasattr(sg, 'VpcId'): vpc_id = sg.get('VpcId') name = "{} (in {})".format(name, vpc_id) item_config = { "name": name, "description": sg.get('DBSecurityGroupDescription'), "owner_id": sg.get('OwnerId'), "region": kwargs['region'], "ec2_groups": [], "ip_ranges": [], "vpc_id": vpc_id } for ipr in sg.get('IPRanges'): ipr_config = { "cidr_ip": ipr.get('CIDRIP'), "status": ipr.get('Status'), } item_config["ip_ranges"].append(ipr_config) item_config["ip_ranges"] = sorted(item_config["ip_ranges"]) for ec2_sg in sg.get('EC2SecurityGroups'): ec2sg_config = { "name": ec2_sg.get('EC2SecurityGroupName'), "owner_id": ec2_sg.get('EC2SecurityGroupOwnerId'), "Status": ec2_sg.get('Status'), } item_config["ec2_groups"].append(ec2sg_config) item_config["ec2_groups"] = sorted( item_config["ec2_groups"]) arn = sg.get('DBSecurityGroupArn') item_config['arn'] = arn item = RDSSecurityGroupItem(region=kwargs['region'], account=kwargs['account_name'], name=name, arn=arn, config=item_config, source_watcher=self) item_list.append(item) return item_list, exception_map return slurp_items() class RDSSecurityGroupItem(ChangeItem): def __init__(self, region=None, account=None, name=None, arn=None, config=None, source_watcher=None): super(RDSSecurityGroupItem, self).__init__( index=RDSSecurityGroup.index, region=region, account=account, name=name, arn=arn, new_config=config if config else {}, source_watcher=source_watcher)
yw-fang/readingnotes
machine-learning/Matthes-crash-course/chapt09/scripts/user_03.py
Python
apache-2.0
1,263
0.009516
#!/usr/bin/env python # -*- coding: utf-8 -*- __author__ = 'Yue-Wen FANG' __maintainer__ = "Yue-Wen FANG" __email__ = 'fyuewen@gmail.com' __license__ = 'Apache License 2.0' __creation_date__= 'Dec. 28, 2018' """ 9-3. Users: Make a class called User . Create two attributes called first_name and last_name, and then create several other attributes that are typically stored in a user profile . Make a method called describe_user() that prints a summary of the user’s information . Make another method called greet_user() that prints a personalized greeting to the user . Create several instances representing different users, and call both methods for each user .t mv dog.py """ class User: """ a class for User """ def __init__(self, first_name, last_name, gender, age, email='f@cn'): self.name = first_name + last_name sel
f.gender = gender self.age = age self.email = email # if no email is sp
ecified, the default will be used def describe_use(self): print('The profile of ' + self.name + ":") print('Gender: ', self.gender) print('Age: ', self.age) print('Email: ', self.email) Tiantian_Li = User('Tiantian', 'Li', 'Male', '20', email='Li@cn') Tiantian_Li.describe_use()
smallyear/linuxLearn
salt/salt/states/boto_vpc.py
Python
apache-2.0
30,718
0.002637
# -*- coding: utf-8 -*- ''' Manage VPCs ================= .. versionadded:: 2015.8.0 Create and destroy VPCs. Be aware that this interacts with Amazon's services, and so may incur charges. This module uses ``boto``, which can be installed via package, or pip. This module accepts explicit vpc credentials but can also utilize IAM roles assigned to the instance through Instance Profiles. Dynamic credentials are then automatically obtained from AWS API and no further configuration is necessary. More information available `here <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html>`_. If IAM roles are not used you need to specify them either in a pillar file or in the minion's config file: .. code-block:: yaml vpc.keyid: GKTADJGHEIQSXMKKRBJ08H vpc.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs It's also possible to specify ``key``, ``keyid`` and ``region`` via a profile, either passed in as a dict, or as a string to pull from pillars or minion config: .. code-block:: yaml myprofile: keyid: GKTADJGHEIQSXMKKRBJ08H key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs region: us-east-1 .. code-block:: yaml Ensure VPC exists: boto_vpc.present: - name: myvpc - cidr_block: 10.10.11.0/24 - dns_hostnames: True - region: us-east-1 - keyid: GKTADJGHEIQSXMKKRBJ08H - key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs Ensure subnet exists: boto_vpc.subnet_present: - name: mysubnet - vpc_id: vpc-123456 - cidr_block: 10.0.0.0/16 - region: us-east-1 - keyid: GKTADJGHEIQSXMKKRBJ08H - key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs Ensure internet gateway exists: boto_vpc.internet_gateway_present: - name: myigw - vpc_name: myvpc - region: us-east-1 - keyid: GKTADJGHEIQSXMKKRBJ08H - key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs Ensure route table exists: boto_vpc.route_table_present: - name: my_route_table - vpc_id: vpc-123456 - routes: - destination_cidr_block: 0.0.0.0/0 instance_id: i-123456 interface_id: eni-123456 - subnet_names: - subnet1 - subnet2 - region: us-east-1 - keyid: GKTADJGHEIQSXMKKRBJ08H - key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs ''' # Import Python Libs from __future__ import absolute_import import logging # Import Salt Libs import salt.utils.dictupdate as dictupdate log = logging.getLogger(__name__) def __virtual__(): ''' Only load if boto is available. ''' return 'boto_vpc' if 'boto_vpc.exists' in __salt__ else False def present(name, cidr_block, instance_tenancy=None, dns_support=None, dns_hostnames=None, tags=None, region=None, key=None, keyid=None, profile=None): ''' Ensure VPC exists. name Name of the VPC. cidr_block The range of IPs in CIDR format, for example: 10.0.0.0/24. Block size must be between /16 and /28 netmask. instance_tenancy Instances launched in this VPC will be ingle-tenant or dedicated hardware. dns_support Indicates whether the DNS resolution is supported for the VPC. dns_hostnames Indicates whether the instances launched in the VPC get DNS hostnames. tags A list of tags. region Region to connect to. key Secret key to be used. keyid Access key to be used. profile A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {} } r = __salt__['boto_vpc.exists'](name=name, tags=tags, region=region, key=key, keyid=keyid, profile=profile) if 'error' in r: ret['result'] = False ret['comment'] = 'Failed to create VPC: {0}.'.format(r['error']['message']) return ret if not r.get('exists'): if __opts__['test']: ret['comment'] = 'VPC {0} is set to be created.'.format(name) ret['result'] = None return ret r = __salt__['boto_vpc.create'](cidr_block, instance_tenancy, name, dns_support, dns_hostnames, tags, region, key, keyid, profile) if not r.get('created'): ret['result'] = False ret['c
omment'] = 'Failed to create VPC: {0}.'.format(r['error']['message']) return ret _describe = __salt__['boto_vpc.describe'](r['id'], region=region, key=key, keyid=keyid, profile=profile) ret['changes']['old'] = {'vpc': None} ret['changes']['new'] = _describe ret['comment'] = 'VPC {0} created.'.format(name) return ret ret['comment'] = 'VPC present.' return ret def absent(name, tags=None, region=N
one, key=None, keyid=None, profile=None): ''' Ensure VPC with passed properties is absent. name Name of the VPC. tags A list of tags. All tags must match. region Region to connect to. key Secret key to be used. keyid Access key to be used. profile A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {} } r = __salt__['boto_vpc.get_id'](name=name, tags=tags, region=region, key=key, keyid=keyid, profile=profile) if 'error' in r: ret['result'] = False ret['comment'] = 'Failed to delete VPC: {0}.'.format(r['error']['message']) return ret _id = r.get('id') if not _id: ret['comment'] = '{0} VPC does not exist.'.format(name) return ret if __opts__['test']: ret['comment'] = 'VPC {0} is set to be removed.'.format(name) ret['result'] = None return ret r = __salt__['boto_vpc.delete'](name=name, tags=tags, region=region, key=key, keyid=keyid, profile=profile) if not r['deleted']: ret['result'] = False ret['comment'] = 'Failed to delete VPC: {0}.'.format(r['error']['message']) return ret ret['changes']['old'] = {'vpc': _id} ret['changes']['new'] = {'vpc': None} ret['comment'] = 'VPC {0} deleted.'.format(name) return ret def subnet_present(name, cidr_block, vpc_name=None, vpc_id=None, availability_zone=None, tags=None, region=None, key=None, keyid=None, profile=None): ''' Ensure a subnet exists. name Name of the subnet. cidr_block The range if IPs for the subnet, in CIDR format. For example: 10.0.0.0/24. Block size must be between /16 and /28 netmask. vpc_name Name of the VPC in which the subnet should be placed. Either vpc_name or vpc_id must be provided. vpc_id Id of the VPC in which the subnet should be placed. Either vpc_name or vpc_id must be provided. availability_zone AZ in which the subnet should be placed. tags A list of tags. region Region to connect to. key Secret key to be used. keyid Access key to be used. profile A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {} } r = __salt__['boto_vpc.subnet_exists'](subnet_name=name, tags=tags,
SaschaMester/delicium
tools/chrome_proxy/integration_tests/chrome_proxy_benchmark.py
Python
bsd-3-clause
6,825
0.016703
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from common.chrome_proxy_benchmark import ChromeProxyBenchmark from integration_tests import chrome_proxy_measurements as measurements from integration_tests import chrome_proxy_pagesets as pagesets from telemetry import benchmark NON_SAFE_BROWSING_BROWSERS = ['mac', 'linux', 'win', 'chromeos', 'android-webview', 'android-webview-shell'] class ChromeProxyClientVersion(ChromeProxyBenchmark): tag = 'client_version' test = measurements.ChromeProxyClientVersion page_set = pagesets.SyntheticPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.client_version.synthetic' class ChromeProxyClientType(ChromeProxyBenchmark): tag = 'client_type' test = measurements.ChromeProxyClientType page_set = pagesets.ClientTypePageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.client_type.client_type' class ChromeProxyLoFi(ChromeProxyBenchmark): tag = 'lo_fi' test = measurements.ChromeProxyLoFi page_set = pagesets.LoFiPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.lo_fi.lo_fi' class ChromeProxyExpDirective(ChromeProxyBenchmark): tag = 'exp_directive' test = measurements.ChromeProxyExpDirective page_set = pagesets.ExpDirectivePageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.exp_directive.exp_directive' class ChromeProxyPassThrough(ChromeProxyBenchmark): tag = 'pass_through' test = measurements.ChromeProxyPassThrough page_set = pagesets.PassThroughPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.pass_through.pass_through' class ChromeProxyBypass(ChromeProxyBenchmark): tag = 'bypass' test = measurements.ChromeProxyBypass page_set = pagesets.BypassPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.bypass.bypass' class ChromeProxyCorsBypass(ChromeProxyBenchmark): tag = 'bypass' test = measurements.ChromeProxyCorsBypass page_set = pagesets.CorsBypassPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.bypass.corsbypass' class ChromeProxyBlockOnce(ChromeProxyBenchmark): tag = 'block_once' test = measurements.ChromeProxyBlockOnce page_set = pagesets.BlockOncePageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.block_once.block_once' @benchmark.Disabled(*NON_SAFE_BROWSING_BROWSERS) # Safebrowsing is enabled for Android and iOS. class ChromeProxySafeBrowsingOn(ChromeProxyBenchmark): tag = 'safebrowsing_on' test = measurements.ChromeProxySafebrowsingOn # Override CreateStorySet so that we can instantiate SafebrowsingPageSet # with a non default param. def CreateStorySet(self, options): del options # unused return pagesets.SafebrowsingPageSet(expect_timeout=True) @classmethod def Name(cls): return 'chrome_proxy_benchmark.safebrowsing_on.safebrowsing' @benchmark.Enabled(*NON_SAFE_BROWSING_BROWSERS) # Safebrowsing is switched off for Android Webview and all desktop platforms. class ChromeProxySafeBrowsingOff(ChromeProxyBenchmark): tag = 'safebrowsing_off' test = measurements.ChromeProxySafebrowsingOff page_set = pagesets.SafebrowsingPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.safebrowsing_off.safebrowsing' class ChromeProxyHTTPFallbackProbeURL(ChromeProxyBenchmark): tag = 'fallback_probe' test = measurements.ChromeProxyHTTPFallbackProbeURL page_set = pagesets.SyntheticPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.fallback_probe.synthetic' class ChromeProxyHTTPFallbackViaHeader(ChromeProxyBenchmark): tag = 'fallback_viaheader' test = measurements.ChromeProxyHTTPFallbackViaHeader page_set = pagesets.FallbackViaHeaderPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.fallback_viaheader.fallback_viaheader' class ChromeProxyHTTPToDirectFallback(ChromeProxyBenchmark): tag = 'http_to_direct_fallback' test = measurements.ChromeProxyHTTPToDirectFallback page_set = pagesets.HTTPToDirectFallbackPageSet @classmethod def Name(cls): return ('chrome_proxy_benchmark.http_to_direct_fallback.' 'http_to_direct_fallback') class ChromeProxyReenableAfterBypass(ChromeProxyBenchmark): tag = 'reenable_after_bypass' test = measurements.ChromeProxyReenableAfterBypass page_set = pagesets.ReenableAfterBypassPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.reenable_after_bypass.reenable_after_bypass' class ChromeProxySmoke(ChromeProxyBenchmark): tag = 'smoke' test = measurements.ChromeProxySmoke page_set = pagesets.SmokePageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.smoke.smoke' class ChromeProxyClientConfig(ChromeProxyBenchmark): tag = 'client_config' test = measurements.ChromeProxyClientConfig page_set = pagesets.SyntheticPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.client_config.synthetic' @benchmark.Enabled('desktop') class ChromeProxyVideoDirect(benchmark.Benchmark): tag = 'video' test = measurements.ChromeProxyVideoValidation page_set = pagesets.VideoDirectPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.video.direct' @benchmark.Enabled('desktop') class ChromeProxyVideoProxied(benchmark.Benchmark): tag = 'video' test = measurements.ChromeProxyVideoValidation page_set = pagesets.VideoProxiedPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.video.proxied' @benchmark.Enabled('desktop') class ChromeProxyVideoCompare(benchmark.Benchmark): """Comparison of direct and proxied video fetches. This benchmark runs the ChromeProxyVideoDirect and ChromeProxyVideoProxied benchmarks, then compares their results. """ tag = 'video' test = measurements.ChromeProxyVideoValidation
page_set = pagesets.VideoComparePageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.video.compare' @benchmark.Enabled('desktop') class ChromeProxyVideoFrames(benchmark.Benchmark): """Check for video frames similar to original video.""" tag = 'video' test = measurements.ChromeProxyInstrumentedVideoValidation page_set = pagesets.VideoFramePageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.video.frames' @benchmark.Enab
led('desktop') class ChromeProxyVideoAudio(benchmark.Benchmark): """Check that audio is similar to original video.""" tag = 'video' test = measurements.ChromeProxyInstrumentedVideoValidation page_set = pagesets.VideoAudioPageSet @classmethod def Name(cls): return 'chrome_proxy_benchmark.video.audio'
ThomasMiconi/nupic.research
projects/sequence_prediction/discrete_sequences/plotPerturbExperiment.py
Python
agpl-3.0
3,263
0.005516
#!/usr/bin/env python # ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2015, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate licen
se for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero Public License version 3 as # published by the Free Software Foundation. # # This program is
distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero Public License for more details. # # You should have received a copy of the GNU Affero Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- """ Plot sequence prediction & perturbation experiment result """ import os from matplotlib import pyplot as plt import matplotlib as mpl from plot import plotAccuracy from plot import computeAccuracy from plot import readExperiment mpl.rcParams['pdf.fonttype'] = 42 plt.ion() plt.close('all') if __name__ == '__main__': experiments = [] experiments.append(os.path.join("tdnn/results", "high-order-distributed-random-perturbed-long-window/seed0.0learning_window3000.0", "0.log")) experiments.append(os.path.join("tm/results", "high-order-distributed-random-perturbed/seed0.0", "0.log")) experiments.append(os.path.join("lstm/results", "high-order-distributed-random-perturbed", "seed0.0learning_window3000.0", "0.log")) # experiments.append(os.path.join("tdnn/results", # "high-order-distributed-random-perturbed/seed0.0learning_window3000.0", # "0.log")) # experiments.append(os.path.join("tdnn/results", # "high-order-distributed-random-perturbed-short-window/seed0.0learning_window3000.0", # "0.log")) for experiment in experiments: data = readExperiment(experiment) (accuracy, x) = computeAccuracy(data['predictions'], data['truths'], data['iterations'], resets=data['resets'], randoms=data['randoms']) # perturbAt = data['sequenceCounter'][10000] plotAccuracy((accuracy, x), data['trains'], window=200, type=type, label='NoiseExperiment', hideTraining=True, lineSize=1.0) # plt.xlim([1200, 1750]) plt.xlabel('# of sequences seen') plt.axvline(x=10000, color='k') plt.legend(['HTM', 'LSTM-3000', 'TDNN'], loc=4) plt.savefig('./result/model_performance_high_order_prediction.pdf')
DrKylstein/gfxtools
nesimage.py
Python
mit
5,828
0.007378
#! /usr/bin/python3 import sys import argparse import itertools from PIL import Image if __name__ == '__main__': argparser = argparse.ArgumentParser() argparser.add_ar
gument('input') argparser.add_argument('palette',type=argparse.FileType('wb')) argparser.add_argument('pattern',type=argparse.FileType('wb')) argparser.add_argument('name',type=argparse.FileType('wb')) argparser.add_argument('attribute',type=argparse.FileType('wb')) argparser.add_argument('-O','--optimize',acti
on='store_true') args = argparser.parse_args() image = Image.open(args.input) palette = set() tiles = set() attribute_colors = [] for ay in range(15): attribute_colors.append([]) for ax in range(16): colors = set() for ny in [ay*2+i for i in range(2)]: for nx in [ax*2+i for i in range(2)]: for py in [ny*8+i for i in range(8)]: for px in [nx*8+i for i in range(8)]: pixel = image.getpixel((px,py)) colors.add(pixel) if len(colors) > 4: print('Too many colors in attribute {},{}'.format(ax,ay)) exit(1) palette.add(frozenset(colors)) attribute_colors[ay].append(frozenset(colors)) final_palette = set() for line in palette: small = False for other_line in palette: if other_line > line: small = True break if not small: final_palette.add(line) palette = final_palette if len(palette) > 4: print('Too many colors in image, {} sets'.format(len(final_palette))) for line in palette: print(','.join(map(hex,sorted(list(line))))) exit(1) bgs = set() for line in palette: for color in line: valid = True for other_line in palette: if color not in other_line and len(other_line) >= 4: valid = False break if valid: bgs.add(color) if len(bgs) < 1: print('No shared background color!') exit(1) raw_palette = [] bg = sorted(list(bgs))[0] for line in palette: raw = [bg] for color in sorted(list(line)): if color not in raw: raw.append(color) raw_palette.append(raw) lines_p = tuple([tuple(map(lambda i: (bg,*i),itertools.permutations(tuple(line[1:])))) for line in raw_palette]) palette_options = tuple(itertools.product(*lines_p)) pattern_options = [] tile_maps = [] attribute_tables = [] i = 0 for palette_option in palette_options: patterns = set() tile_map = [[None for j in range(32)] for i in range(30)] attribute_table = [] for ay in range(15): attribute_row = [] for ax in range(16): colors = None for line in palette_option: if set(line) >= attribute_colors[ay][ax]: colors = line break attribute_row.append(palette_option.index(colors)) for ny in [ay*2+i for i in range(2)]: for nx in [ax*2+i for i in range(2)]: tile = [] for py in [ny*8+i for i in range(8)]: row = [] for px in [nx*8+i for i in range(8)]: pixel = image.getpixel((px,py)) row.append(colors.index(pixel)) tile.append(tuple(row)) patterns.add(tuple(tile)) tile_map[ny][nx] = tuple(tile) attribute_table.append(attribute_row) print('{} of {}: maybe {} tiles...'.format(i,len(palette_options),len(patterns)).ljust(40), end='\r') i += 1 pattern_options.append(patterns) tile_maps.append(tile_map) attribute_tables.append(attribute_table) if not args.optimize and len(patterns) <= 256: break best_index = pattern_options.index(sorted(pattern_options,key=len)[0]) print() print('{} tiles'.format(len(pattern_options[best_index]))) for l in range(3,-1,-1): print(l) if l < len(palette_options[best_index]): line = palette_options[best_index][l] if len(line) < 4: line += tuple([bg for i in range(4-len(line))]) args.palette.write(bytes(line[::-1])) else: args.palette.write(bytes([bg for i in range(4)])) pattern_table = tuple(pattern_options[best_index]) for row in tile_maps[best_index]: for tile in row: args.name.write(bytes([pattern_table.index(tile)])) for tile in pattern_table: for plane in range(2): for row in tile: bits = 0 for pixel in row: bits <<= 1 if pixel & (plane+1): bits |= 1 args.pattern.write(bytes([bits])) print(len(tile_maps[best_index]),len(tile_maps[best_index][0])) for ay in range(8): for ax in range(8): bits = attribute_tables[best_index][ay*2][ax*2] bits |= attribute_tables[best_index][ay*2][ax*2+1] << 2 if ay < 7: bits |= attribute_tables[best_index][ay*2+1][ax*2] << 4 bits |= attribute_tables[best_index][ay*2+1][ax*2+1] << 6 args.attribute.write(bytes([bits]))
ivannotes/luigi
test/namespace_test.py
Python
apache-2.0
1,739
0.00345
# -*- coding: utf-8 -*- # # Copyright 2012-2015 Spotify AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/license
s/LICE
NSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from helpers import unittest import luigi import namespace_test_helper # declares another Foo in namespace mynamespace class Foo(luigi.Task): pass class FooSubclass(Foo): pass class TestNamespacing(unittest.TestCase): def test_vanilla(self): self.assertEqual(Foo.task_namespace, None) self.assertEqual(Foo.task_family, "Foo") self.assertEqual(str(Foo()), "Foo()") self.assertEqual(FooSubclass.task_namespace, None) self.assertEqual(FooSubclass.task_family, "FooSubclass") self.assertEqual(str(FooSubclass()), "FooSubclass()") def test_namespace(self): self.assertEqual(namespace_test_helper.Foo.task_namespace, "mynamespace") self.assertEqual(namespace_test_helper.Foo.task_family, "mynamespace.Foo") self.assertEqual(str(namespace_test_helper.Foo(1)), "mynamespace.Foo(p=1)") self.assertEqual(namespace_test_helper.Bar.task_namespace, "othernamespace") self.assertEqual(namespace_test_helper.Bar.task_family, "othernamespace.Bar") self.assertEqual(str(namespace_test_helper.Bar(1)), "othernamespace.Bar(p=1)")
yoavfrancis/KeepItUp
KeepItUp/harddrive_enumerator.py
Python
mit
1,589
0.003776
import win32api import os import sys import subprocess import logging from itertools import izip_longest #itertools recipe def grouper(n, iterable, fillvalue=None): "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx" args = [iter(iterable)] * n return izip_longest(fillvalue=fillvalue, *args) def harddrive_enumerator(): """ Generator to get all (fixed) drive letters in the computers Returns tuples of (
DriveName, VolumeName) - eg. ("D:", "Samsung Station") """ logger = logging.getLogger("keepitup") drive
sDetailedList = [] if sys.platform == "win32": logger.debug("Enumerating win32 hard drives") getDrivesProc = subprocess.Popen('wmic logicaldisk where drivetype=3 get name, VolumeName /format:list', shell=True, stdout=subprocess.PIPE) output, err = getDrivesProc.communicate() logger.debug("Enumerated hard drives output: %s", output) drivesDetailedList = output.split(os.linesep) elif sys.platform in ["linux2", "darwin"]: logger.debug("Enumerating linux/osx hard drives") raise NotImplementedError() else: logger.error("Cannot enumeratre hard drives - unrecognized OS: %s", sys.platform) raise NotImplementedError() for name, volumeName in grouper(2, drivesDetailedList): if "Name=" in name and "VolumeName" in volumeName: name = name[len("Name="):].strip() volumeName = volumeName[len("VolumeName="):].strip() yield name, volumeName
oczkers/pdeo
tests/databases/test_sql.py
Python
gpl-3.0
407
0
#!/usr/bin/env py
thon # -*- coding: utf-8 -*- """Tests for pdeo.database.sql""" import unittest # import responses from pdeo.databases import sql # if version_info[0] == 2: # utf8 for python2 # from codecs import open class PdeoDatabaseSqlTestCase(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def testEntryPoints(self):
sql.Database
yugangw-msft/azure-cli
src/azure-cli/azure/cli/command_modules/resource/parameters.py
Python
mit
474
0.00211
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in
the project root for license information. # -------------------------------------------------------------------------------------------- from enum import Enum class TagUpdateOperation(str, Enum): merge = "M
erge" replace = "Replace" delete = "Delete"
lcrees/twoq
twoq/ordering.py
Python
bsd-3-clause
2,474
0
# -*- coding: utf-8 -*- '''twoq ordering mixins''' from threading import local from itertools import product, groupby from random import choice, shuffle, sample from twoq.support import zip_longest, imap class RandomMixin(local): '''random mixin''' def choice(self): '''random choice of/from incoming things''' with self._context(): return self._append(choice(list(self._iterable))) def sample(self, n): ''' random sampling drawn from `n` incoming things @param n: number of incoming things ''' with self._context(): return self._xtend(sample(list(self._iterable), n)) def shuffle(self): '''randomly order incoming things''' with self._conte
xt(): iterable = list(self._iterable) shuffle(iterable) return self._xtend(iterable) class OrderMixin(local): '''order mixin''' def group(self): ''' group incoming things, optionally using current call for key function
''' call_, list_ = self._call, list with self._context(): return self._xtend(imap( lambda x: [x[0], list_(x[1])], groupby(self._iterable, call_) )) def grouper(self, n, fill=None): ''' split incoming things into sequences of length `n`, using `fill` thing to pad incomplete sequences @param n: number of things @param fill: fill thing (default: None) ''' with self._context(): return self._xtend( zip_longest(fillvalue=fill, *[iter(self._iterable)] * n) ) def reverse(self): '''reverse order of incoming things''' with self._context(): return self._xtend(reversed(list(self._iterable))) def sort(self): ''' sort incoming things, optionally using current call as key function ''' call_ = self._call with self._context(): return self._xtend(sorted(self._iterable, key=call_)) class CombineMixin(local): '''combination mixin''' def product(self, n=1): ''' nested for each loops repeated `n` times @param n: number of repetitions (default: 1) ''' with self._context(): return self._xtend(product(*self._iterable, repeat=n)) class OrderingMixin(OrderMixin, RandomMixin, CombineMixin): '''ordering mixin'''
deerwalk/voltdb
tests/sqlcoverage/normalizer/not-a-normalizer.py
Python
agpl-3.0
2,364
0.002538
#!/usr/bin/env python # This file is part of VoltDB. # Copyright (C) 2008-2017 VoltDB Inc. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. from NotANormalizer import NotANormalizer from SQLCovera
geReport import generate_html_reports def safecmp(x, y): """Calls the 'standard' safecmp function, which performs a comparison similar to cmp, including iterating over lists, but two None values are considered equal, and a T
ypeError is avoided when a None value and a datetime are corresponding members of a list. """ return NotANormalizer.safecmp(x,y) def normalize(table, sql): """Do nothing other than returning the table. """ return NotANormalizer.normalize(table, sql) def compare_results(suite, seed, statements_path, hsql_path, jni_path, output_dir, report_invalid, report_all, extra_stats, comparison_database, modified_sql_path, max_mismatches=0, within_minutes=0): """Just calls SQLCoverageReport.generate_html_reports(...). """ return generate_html_reports(suite, seed, statements_path, hsql_path, jni_path, output_dir, report_invalid, report_all, extra_stats, comparison_database, modified_sql_path, max_mismatches, within_minutes, cntonly=True)
klapper/nagios-plugins-mq
check_mq_channel.py
Python
mit
5,483
0.010213
#!/usr/bin/python import getopt import sys import pymqi, CMQC, CMQCFC STATE_OK = 0 STATE_WARNING = 1 STATE_CRITICAL = 2 STATE_UNKNOWN = 3 def usage(): print """Usage: rbh_check_mq_channel_status -H <HostName> -g <QMGRName> -p <PortNumber> -a <ChannelName for connection> -t <ChannelName for test>""" def show_help(): usage() print """ Checks MQ channel status -H, --host Host name -g, --qmgr Queue Manager Name -p, --port-number port number (default 1414) -a, --channel-name-conn channel name for connection -t, --channel-name channel name for test example: rbh_check_mq_channel_status.py -H host1 -g QM1 -a SYSTEM.ADMIN.SVRCONN -t nameofth
echannel """ def exit_with_state(exit_code): global qmgr try: qmgr.disconnect() except: pass sys.exit(exit_code) def main(): try: opts, args = getopt.getopt(sys.argv[1:], "hH:g:p:a:t:", ["help", "host","qmgr=","port=","channel-name=","channel-name-conn="]) except
getopt.GetoptError, err: print str(err) # will print something like "option -a not recognized" usage() sys.exit(2) hostName=None qmgrName=None portNumber=1414 channelNameConn=None channelNameTest=None for o, a in opts: if o in ("-h", "--help"): show_help() sys.exit() elif o in ("-H", "--host"): hostName = a elif o in ("-g", "--qmgr"): qmgrName = a elif o in ("-p", "--port"): portNumber = int(a) elif o in ("-a", "--channel-name-conn"): channelNameConn = a elif o in ("-t", "--channel-name"): channelNameTest = a else: assert False, "unhandled option" if not (hostName and portNumber and channelNameTest and qmgrName and channelNameConn): usage() exit_with_state(STATE_UNKNOWN) # if len(channelNameConn) > MQ_CHANNEL_NAME_LENGTH: # print "UNKNOWN - Channel name are too long." conn_info="%s(%s)" % (hostName,portNumber) global qmgr try: qmgr = pymqi.connect(qmgrName,channelNameConn,conn_info) except pymqi.MQMIError, e: print "UNKNOWN - unable to connect to Qmanager, reason: %s" % (e) exit_with_state(STATE_UNKNOWN) channel_name = '' try: pcf = pymqi.PCFExecute(qmgr) channel_names = pcf.MQCMD_INQUIRE_CHANNEL({CMQCFC.MQCACH_CHANNEL_NAME: channelNameTest}) if channel_names[0]: channel_name = channel_names[0][CMQCFC.MQCACH_CHANNEL_NAME].rstrip() channel_type = channel_names[0][CMQCFC.MQIACH_CHANNEL_TYPE] else: print("CRITICAL - Channel %s does not exists." % (channelNameTest)) exit_with_state(STATE_UNKNOWN) except pymqi.MQMIError,e : print("UNKNOWN - Can not list MQ channels. reason: %s" % (e)) exit_with_state(STATE_UNKNOWN) status_available = True try: attrs = "MQCACH_CHANNEL_NAME MQIACH_BYTES_RCVD MQIACH_BYTES_SENT" pcf = pymqi.PCFExecute(qmgr) channels = pcf.MQCMD_INQUIRE_CHANNEL_STATUS({CMQCFC.MQCACH_CHANNEL_NAME: channelNameTest}) except pymqi.MQMIError, e: if e.comp == CMQC.MQCC_FAILED and e.reason == CMQCFC.MQRCCF_CHL_STATUS_NOT_FOUND: status_available = False pass else: print "UNKNOWN - Can not get status information, reason: %s" % (e) exit_with_state(STATE_UNKNOWN) infomsg = {CMQCFC.MQCHS_INACTIVE:"Channel is inactive", CMQCFC.MQCHS_BINDING:"Channel is negotiating with the partner.", CMQCFC.MQCHS_STARTING:"Channel is waiting to become active.", CMQCFC.MQCHS_RUNNING:"Channel is transferring or waiting for messages.", CMQCFC.MQCHS_PAUSED:"Channel is paused.", CMQCFC.MQCHS_STOPPING:"Channel is in process of stopping.", CMQCFC.MQCHS_RETRYING:"Channel is reattempting to establish connection.", CMQCFC.MQCHS_STOPPED:"Channel is stopped.", CMQCFC.MQCHS_REQUESTING:"Requester channel is requesting connection.", CMQCFC.MQCHS_INITIALIZING:"Channel is initializing."} if status_available: status = channels[0][CMQCFC.MQIACH_CHANNEL_STATUS] msg = "Channel: %s state is %s (%s)" % (channel_name,status,infomsg[status]) if (status == CMQCFC.MQCHS_RUNNING or (status == CMQCFC.MQCHS_INACTIVE and not channel_type in (CMQC.MQCHT_REQUESTER,CMQC.MQCHT_CLUSSDR))): print("OK - %s" % (msg)) exit_with_state(STATE_OK) if status in (CMQCFC.MQCHS_PAUSED,CMQCFC.MQCHS_STOPPED): print("CRITICAL - %s" % (msg)) exit_with_state(STATE_CRITICAL) else: print("WARNING - %s" % (msg)) exit_with_state(STATE_WARNING) else: if channel_type in (CMQC.MQCHT_REQUESTER,CMQC.MQCHT_CLUSSDR): print("CRITICAL - Channel %s is defined, but status is not available. As this channel is defined as CLUSDR or REQUESTER type channel, therefore it should be running." % (channelNameTest)) exit_with_state(STATE_CRITICAL) else: print("OK - Channel %s is defined, but status is not available. This may indicate that the channel has not been used." % (channelNameTest)) exit_with_state(STATE_OK) if __name__ == "__main__": main()
tcstewar/opengl_texture_rendering
sparkle/main_raster.py
Python
gpl-2.0
3,315
0.00181
import time from PyQt4 import QtGui, QtCore, QtOpenGL from PyQt4.QtOpenGL import QGLWidget import OpenGL.GL as gl import OpenGL.arrays.vbo as glvbo import numpy as np import raster import slider import draw_texture import qt_helpers raster_width = 1024 raster_height = 64 raster_n_neurons = 64 spikes_per_frame = 5 class GLPlotWidget(QGLWidget): # default window size width, height = 600, 600 t_last_msg = time.time() spike_count = 0 last_time = None def initializeGL(self): # program for drawing spikes self.raster = raster.RasterProgram(raster_width, raster_height, raster_n_neurons) self.raster.link() # program for fading sparkleplot self.slider = slider.SlideProgram(raster_width, raster_height) self.slider.link() # program for rendering a texture on the screen self.draw_texture = draw_texture.DrawTextureProgram() self.draw_texture.link() def paintGL(self): now = time.time() if self.last_time is None: decay = 0.0 self.dt = None else: dt = now - self.last_time if self.dt is None: self.dt = dt else: #self.dt = dt self.dt = (0.9) * self.dt + (0.1) * dt self.last_time = now if self.dt is not None: self.slider.swap_frame_buffer(int(self.dt/0.001)) self.slider.paint_slid() #data = self.data data = np.random.randint(raster_n_neurons, size=spikes_per_frame).astype('int32') # generate spike data self.spike_count += len(data) # paint the spikes onto the sparkle plot self.slider.swap_frame_buffer(0, False) self.raster.paint_spikes(data) # switch to rendering on the screen gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, 0) gl.glViewport(0, 0, self.width, self.height) # draw the sparkle plot on the screen self.draw_texture.paint(self.slider.get_current_texture()) # print out spike rate now = time.time() if now > self.t_last_msg + 1: dt = now - self.t_last_msg rate = self.spike_count * 0.000001 / dt print 'Mspikes per second = %g' % rate self.spike_count = 0 self.t_last_msg = now # flag a redraw self.update() def resizeGL(self, width, height): """Called upon window resizing: reinitialize the viewport.""" # update the window size self.width, self.height = width, height # paint within the whole window gl.glViewport(0, 0, width, height) if __name__ == '__main__': # define a Qt window with an OpenGL widget inside it class TestWindow(QtGui.QMainWindow): def __init__(self): super(TestWind
ow, self).__init__() # initialize the GL widget self.widget = GLPlotWidget() # put the window at the screen position (100, 100) self.setGeometry(100, 100, self.widget.width, self.widget.height) self.setCentralWidget(self.widget) self.show() # sh
ow the window win = qt_helpers.create_window(TestWindow)
Ictp/indico
indico/tests/python/unit/util.py
Python
gpl-3.0
5,388
0.002598
# -*- coding: utf-8 -*- ## ## ## This file is part of Indico. ## Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN). ## ## Indico is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 3 of the ## License, or (at your option) any later version. ## ## Indico is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Indico;if not, see <http://www.gnu.org/licenses/>. """ Some utils for unit tests """ # system imports from flask import session from functools import wraps import unittest import new import contextlib # indico imports from indico.util.contextManager import ContextManager from indico.util.fossilize import clearCache from indico.util.i18n import setLocale # indico legacy imports from MaKaC.common.logger import Logger from indico.web.flask.app import make_app loadedFeatures = [] class FeatureLoadingObject(object): def __init__(self): self._activeFeatures = [] def _configFeature(self, ftr, obj): global loadedFeatures if type(ftr) == str: modName, ftrName = ftr.split('.') ftrClsName = "%s_Feature" % ftrName mod = __import__('indico.tests.python.unit.%s' % modName, globals(), locals(), [ftrClsName]) ftr = mod.__dict__[ftrClsName] else: pass for name, func in ftr.__dict__.iteritems(): if name.startswith('_action_'): setattr(obj, name[7:], new.instancemethod(func, obj, obj.__class__)) elif name.startswith('_context_'): setattr(obj, name, new.instancemethod(func, obj, obj.__class__)) ftrObj = ftr() if ftr not in loadedFeatures: ftrObj.start(obj) loadedFeatures.append(ftr) return ftrObj def _configFeatures(self, obj): # process requirements for ftr in self._requires: ftrObj = self._configFeature(ftr, obj) self._activeFeatures.append(ftrObj) def _unconfigFeatures(self, obj): global loadedFeatures for ftr in self._activeFeatures[::-1]: ftr.destroy(obj) del loadedFeatures[:] del self._activeFeatures[:] class IndicoTestFeature(FeatureLoadingObject): _requires = [] def start(self, obj): self._configFeatures(obj) def destroy(self, obj): self._unconfigFeatures(obj) def with_context(context): """ Decorator """ def wrapper(method): @wraps(method) def testWrapped(self, *args, **kwargs): with self._context(context): return method(self, *args, **kwargs) return testWrapped return wrapper class ContextManager_Feature(IndicoTestFeature): """ Creates a context manager """ def start(self, obj): super(ContextManager_Feature, self).start(obj) # create the context ContextManager.destroy() def destroy(self, obj): super(ContextManager_Feature, self).destroy(obj) ContextManager.destroy() class RequestEnvironment_Feature(IndicoTestFeature): """ Creates an environment that should be similar to a regular request """ def _action_endRequest(self): self._do._notify('requestFinished') def _action_startRequest(self): self._do._notify('requestStarted') def _action_make_app_request_context(self): app = make_app() env = { 'environ_base': { 'REMOTE_ADDR': '127.0.0.1' } } return app.test_request_context(**env) def _action_mock_session_user(self): # None of the current tests actually require a user in th
e session. # If this changes, assign a avatar mock object here session.user = None def _context_request(self
): self._startRequest() with self._make_app_request_context(): self._mock_session_user() setLocale('en_GB') yield self._endRequest() class IndicoTestCase(unittest.TestCase, FeatureLoadingObject): """ IndicoTestCase is a normal TestCase on steroids. It allows you to load "features" that will empower your test classes """ _requires = [] def __init__(self, *args, **kwargs): unittest.TestCase.__init__(self, *args, **kwargs) FeatureLoadingObject.__init__(self) def setUp(self): setLocale('en_GB') Logger.removeHandler('smtp') clearCache() # init/clear fossil cache self._configFeatures(self) def tearDown(self): self._unconfigFeatures(self) @contextlib.contextmanager def _context(self, *contexts, **kwargs): ctxs = [] res = [] for ctxname in contexts: ctx = getattr(self, '_context_%s' % ctxname)(**kwargs) res.append(ctx.next()) ctxs.append(ctx) yield res if len(res) > 1 else res[0] for ctx in ctxs[::-1]: ctx.next()
jbassen/edx-platform
lms/djangoapps/courseware/tabs.py
Python
agpl-3.0
10,813
0.001665
""" This module is essentially a broker to xmodule/tabs.py -- it was originally introduced to perform some LMS-specific tab display gymnastics for the Entrance Exams feature """ from django.conf import settings from django.utils.translation import ugettext as _, ugettext_noop from courseware.access import has_access from courseware.entrance_exams import user_must_complete_entrance_exam from student.models import UserProfile from openedx.core.lib.course_tabs import CourseTabPluginManager from student.models import CourseEnrollment from xmodule.tabs import CourseTab, CourseTabList, key_checker from xmodule.tabs import StaticTab class EnrolledTab(CourseTab): """ A base class for any view types that require a user to be enrolled. """ @classmethod def is_enabled(cls, course, user=None): if user is None: return True return bool(CourseEnrollment.is_enrolled(user, course.id) or has_access(user, 'staff', course, course.id)) class CoursewareTab(EnrolledTab): """ The main courseware view. """ type = 'courseware' title = ugettext_noop('Courseware') priority = 10 view_name = 'courseware' is_movable = False is_default = False is_visible_to_sneak_peek = True class CourseInfoTab(CourseTab): """ The course info view. """ type = 'course_info' title = ugettext_noop('Course Info') priority = 20 view_name = 'info' tab_id = 'info' is_movable = False is_default = False is_visible_to_sneak_peek = True @classmethod def is_enabled(cls, course, user=None): return True class SyllabusTab(EnrolledTab): """ A tab for the course syllabus. """ type = 'syllabus' title = ugettext_noop('Syllabus') priority = 30 view_name = 'syllabus' allow_multiple = True is_default = False is_visible_to_sneak_peek = True @classmethod def is_enabled(cls, course, user=None): if not super(SyllabusTab, cls).is_enabled(course, user=user): return False return getattr(course, 'syllabus_present', False) class ProgressTab(EnrolledTab): """ The course progress view. """ type = 'progress' title = ugettext_noop('Progress') priority = 40 view_name = 'progress' is_hideable = True is_default = False @classmethod def is_enabled(cls, course, user=None): # pylint: disable=unused-argument if not super(ProgressTab, cls).is_enabled(course, user=user): return False return not course.hide_progress_tab class TextbookTabsBase(CourseTab): """ Abstract class for textbook collection tabs classes. """ # Translators: 'Textbooks' refers to the tab in the course that leads to the course' textbooks title = ugettext_noop("Textbooks") is_collection = True is_default = False @classmethod def is_enabled(cls, course, user=None): # pylint: disable=unused-argument return user is None or user.is_authenticated() @classmethod def items(cls, course): """ A generator for iterating through all the SingleTextbookTab book objects associated with this collection of textbooks. """ raise NotImplementedError() class TextbookTabs(TextbookTabsBase): """ A tab representing the collection of all textbook tabs. """ type = 'textbooks' priority = None view_name = 'book' @classmethod def is_enabled(cls, course, user=None): # pylint: disable=unused-argument parent_is_enabled = super(TextbookTabs, cls).is_enabled(course, user) return settings.F
EATURES.get('ENABLE_TEXTBOOK') and parent_is_enabled @classmethod def items(cls, course):
for index, textbook in enumerate(course.textbooks): yield SingleTextbookTab( name=textbook.title, tab_id='textbook/{0}'.format(index), view_name=cls.view_name, index=index ) class PDFTextbookTabs(TextbookTabsBase): """ A tab representing the collection of all PDF textbook tabs. """ type = 'pdf_textbooks' priority = None view_name = 'pdf_book' @classmethod def items(cls, course): for index, textbook in enumerate(course.pdf_textbooks): yield SingleTextbookTab( name=textbook['tab_title'], tab_id='pdftextbook/{0}'.format(index), view_name=cls.view_name, index=index ) class HtmlTextbookTabs(TextbookTabsBase): """ A tab representing the collection of all Html textbook tabs. """ type = 'html_textbooks' priority = None view_name = 'html_book' @classmethod def items(cls, course): for index, textbook in enumerate(course.html_textbooks): yield SingleTextbookTab( name=textbook['tab_title'], tab_id='htmltextbook/{0}'.format(index), view_name=cls.view_name, index=index ) class LinkTab(CourseTab): """ Abstract class for tabs that contain external links. """ link_value = '' def __init__(self, tab_dict=None, name=None, link=None): self.link_value = tab_dict['link'] if tab_dict else link def link_value_func(_course, _reverse_func): """ Returns the link_value as the link. """ return self.link_value self.type = tab_dict['type'] tab_dict['link_func'] = link_value_func super(LinkTab, self).__init__(tab_dict) def __getitem__(self, key): if key == 'link': return self.link_value else: return super(LinkTab, self).__getitem__(key) def __setitem__(self, key, value): if key == 'link': self.link_value = value else: super(LinkTab, self).__setitem__(key, value) def to_json(self): to_json_val = super(LinkTab, self).to_json() to_json_val.update({'link': self.link_value}) return to_json_val def __eq__(self, other): if not super(LinkTab, self).__eq__(other): return False return self.link_value == other.get('link') @classmethod def is_enabled(cls, course, user=None): # pylint: disable=unused-argument return True class ExternalDiscussionCourseTab(LinkTab): """ A course tab that links to an external discussion service. """ type = 'external_discussion' # Translators: 'Discussion' refers to the tab in the courseware that leads to the discussion forums title = ugettext_noop('Discussion') priority = None is_default = False @classmethod def validate(cls, tab_dict, raise_error=True): """ Validate that the tab_dict for this course tab has the necessary information to render. """ return (super(ExternalDiscussionCourseTab, cls).validate(tab_dict, raise_error) and key_checker(['link'])(tab_dict, raise_error)) @classmethod def is_enabled(cls, course, user=None): # pylint: disable=unused-argument if not super(ExternalDiscussionCourseTab, cls).is_enabled(course, user=user): return False return course.discussion_link class ExternalLinkCourseTab(LinkTab): """ A course tab containing an external link. """ type = 'external_link' priority = None is_default = False # An external link tab is not added to a course by default allow_multiple = True @classmethod def validate(cls, tab_dict, raise_error=True): """ Validate that the tab_dict for this course tab has the necessary information to render. """ return (super(ExternalLinkCourseTab, cls).validate(tab_dict, raise_error) and key_checker(['link', 'name'])(tab_dict, raise_error)) class SingleTextbookTab(CourseTab): """ A tab representing a single textbook. It is created temporarily when enumerating all textbooks within a Textbook collection tab. It should not be serialized or persisted. """ type = 'single_textbook' is_movable = False is
CMUSV-VisTrails/WorkflowRecommendation
vistrails/packages/persistence_exp/__init__.py
Python
bsd-3-clause
2,337
0.0184
############################################################################### ## ## Copyright (C) 2006-2011, University of Utah. ## All rights reserved. ## Contact: contact@vistrails.org ## ## This file is part of VisTrails. ## ## "Redistribution and use in source and binary forms, with or without ## modification, are permitted provided that the following conditions are met: ## ## - Redistributions of source code must retain the above copyright notice, ## this list of conditions and the following disclaimer. ## - Redistributions in binary form must reproduce the above copyright ## notice, this list of conditions and the following disclaimer in the ## documentation and/or other materials provided with the distribution. ## - Neither the name of the University of Utah nor the
names of its ## contributors may be used to endorse or promote products derived from ## this software without specific prior written permission. ## ## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" ## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, ## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR ## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRI
GHT HOLDER OR ## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, ## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, ## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; ## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, ## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR ## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." ## ############################################################################### from core.configuration import ConfigurationObject identifier = 'edu.utah.sci.vistrails.persistence.exp' version = '0.2.0' name = 'Persistence' configuration = ConfigurationObject(global_db=(None, str), local_db=(None, str), git_bin=(None, str), search_dbs=(None, str), compress_by_default=False, debug=False)
probcomp/bdbcontrib
examples/satellites/build_bdbs.py
Python
apache-2.0
9,056
0.002761
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2010-2016, MIT Probabilistic Computing Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Analyze and save .bdb files for Satellites. This script simultaneously serves two purposes: - To prepare an analyzed .bdb file for distribution to clients of the Satellites demo - To prepare a series of .bdb files for consumption by the stability probing script probe.py. Each produced file is named with a 'satellites' prefix. The file name additionally captures - a date stamp, - the running user, - the number of models [analysis snapshots only], and - the number of analysis iterations [analysis snapshots only]. For each foo.bdb, this program saves a foo-meta.txt containing the following information: - name and sha256sum of the file described; - # models; # iterations; time taken; initial entropy; parallelism level; date stamp; user stamp; - crosscat version; bayeslite version; bdbcontrib version (including a full copy of this driver script); and - logscore history plot [distributable .bdb only]. """ # Speed rules of thumb: # - 64 models and 1500 iterations took ~30 minutes on probcomp in late September # - 64 * 5 models and 300 iterations took ~18 minutes on probcomp 10/5/15 # - 64 * 50 models and 30 iterations took ~36 minutes on probcomp 10/5/15 import argparse import datetime import hashlib import logging import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt import os import subprocess import sys import time import bayeslite import bayeslite.metamodels.crosscat import bdbcontrib import bdbcontrib.crosscat_utils import bdbcontrib.bql_utils import crosscat import crosscat.MultiprocessingEngine as ccme from bdbcontrib.experiments.probe import log def doit(out_dir, num_models, num_iters, checkpoint_freq, seed): then = time.time() timestamp = datetime.datetime.fromtimestamp(then).strftime('%Y-%m-%d') user = subprocess.check_output(["whoami"]).strip() host = subprocess.check_output(["hostname"]).strip() filestamp = '-' + timestamp + '-' + user def out_file_name(base, ext): return out_dir + '/' + base + filestamp + ext csv_file = os.path.join(os.path.dirname(__file__), 'satellites.csv') bdb_file = out_file_name('satellites', '.bdb') # so we can build bdb models os.environ['BAYESDB_WIZARD_MODE']='1' if not os.path.isdir(out_dir): os.makedirs(out_dir) if os.path.exists(bdb_file): print 'Error: File', bdb_file, 'already exists. Please remove it.' sys.exit(1) # create database mapped to filesystem log('opening bdb on disk: %s' % bdb_file) bdb = bayeslite.bayesdb_open(pathname=bdb_file, builtin_metamodels=False) def execute(bql): log("executing %s" % bql) bdb.execute(bql) # read csv into table log('reading data from %s' % csv_file) bayeslite.bayesdb_read_csv_file(bdb, 'satellites', csv_file, header=True, create=True, ifnotexists=True) # Add a "not applicable" orbit sub-type log('adding "not applicable" orbit sub-type') bdb.sql_execute('''UPDATE satellites SET type_of_orbit = 'N/A' WHERE (class_of_orbit = 'GEO' OR class_of_orbit = 'MEO') AND type_of_orbit = 'NaN' ''') # nullify "NaN" log('nullifying NaN') bdbcontrib.bql_utils.nullify(bdb, 'satellites', 'NaN') # register crosscat metamodel cc = ccme.MultiprocessingEngine(seed=seed) ccmm = bayeslite.metamodels.crosscat.CrosscatMetamodel(cc) bayeslite.bayesdb_register_metamodel(bdb, ccmm) # create the crosscat generator using execute(''' CREATE GENERATOR satellites_cc FOR satellites USING crosscat ( GUESS(*), name IGNORE, Country_of_Operator CATEGORICAL, Operator_Owner CATEGORICAL, Users CATEGORICAL, Purpose CATEGORICAL, Class_of_Orbit CATEGORICAL, Type_of_Orbit CATEGORICAL, Perigee_km NUMERICAL, Apogee_km NUMERICAL, Eccentricity NUMERICAL, Period_minutes NUMERICAL, Launch_Mass_kg NUMERICAL, Dry_Mass_kg NUMERICAL, Power_watts NUMERICAL, Date_of_Launch NUMERICAL, Anticipated_Lifetime NUMERICAL, Contractor CATEGORICAL, Country_of_Contractor CATEGORICAL, Launch_Site CATEGORICAL, Launch_Vehicle CATEGORICAL, Source_Used_for_Orbital_Data CATEGORICAL, longitude_radians_of_geo NUMERICAL, Inclination_radians NUMERICAL ) ''') execute('INITIALIZE %d MODELS FOR satellites_cc' % (num_models,)) cur_iter_ct = 0 def snapshot(): log('vacuuming') bdb.sql_execute('vacuum') cur_infix = '-%dm-%di' % (num_models, cur_iter_ct) save_file_name = out_file_name('satellites', cur_infix + '.bdb') meta_file_name = out_file_name('satellites', cur_infix + '-meta.txt') log('recording snapshot ' + save_file_name) os.system("cp %s %s" % (bdb_file, save_file_name)) report(save_file_name, meta_file_name) def record_metadata(f, saved_file_name, sha_sum, total_time, plot_file_name=None): f.write("DB file " + saved_file_name + "\n") f.write(sha_sum) f.write("built from " + csv_file + "\n") f.write("by %s@%s\n" % (user, host)) f.write("at seed %s\n" % seed) f.write("in %3.2f seconds\n" % total_time) f.write("with %s models analyzed for %s iterations\n" % (num_models, num_iters)) f.write("by bayeslite %s, with crosscat %s and bdbcontrib %s\n" % (bayeslite.__version__, crosscat.__version__, bdbcontrib.__version__)) if plot_file_name is not None: f.write("diagnostics recorded to %s\n" % plot_file_name) f.flush() def report(saved_file_name, metadata_file, echo=False, plot_file_name=None): sha256 = hashlib.sha256() with open(saved_file_name, 'rb') as fd: for chunk in iter(lambda: fd.read(65536), ''): sha256.update(chunk) sha_sum = sha256.hexdigest() + '\n' total_time = time.time() - then with open(metadata_file, 'w') as fd: record_metadata(fd, saved_file_name, sha_sum, total_time, plot_file_name) fd.write('using script ') fd.write('-' * 57) fd.write('\n') fd.flush() os.system("cat %s >> %s" % (__file__, metadata_file)) if echo: record_metadata(sys.stdout, saved_file_name, sha_sum, total_time, plot_file_name) def final_report(): # create a diagnostics plot plot_file_name
= out_file_name('satellites', '-logscores.pdf') log('writing diagnostic plot to %s' % plot_file_name) _fig = bdbcontrib.crosscat_utils.plot_crosscat_chain_diagnostics( bdb, 'logscore',
'satellites_cc') plt.savefig(plot_file_name) final_metadata_file = out_file_name('satellites', '-meta.txt') report(bdb_file, final_metadata_file, echo=True, plot_file_name=plot_file_name) snapshot() while cur_iter_ct < num_iters: execute('ANALYZE satellites_cc FOR %d ITERATIONS WAIT' % checkpoint_freq) cur_iter_ct += checkpoint_freq snapshot() final_report() log('closing bdb %s' % bdb_file) bdb.close() os.system("cd %s && ln -s satellites%s.bdb satellites.bdb" % (out_dir, filestam
sergiusens/snapcraft
snapcraft/internal/project_loader/grammar_processing/_part_grammar_processor.py
Python
gpl-3.0
4,927
0
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- # # Copyright (C) 2017 Canonical Ltd # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from typing import Any, Dict, Set from snapcraft import project from snapcraft.internal.project_loader import grammar from snapcraft.internal import pluginhandler, repo from ._package_transformer import package_transformer class PartGrammarProcessor: """Process part properties that support grammar. Stage packages example: >>> from unittest import mock >>> import snapcraft >>> # Pretend that all packages are valid >>> repo = mock.Mock() >>> repo.is_valid.return_value = True >>> plugin = mock.Mock() >>> plugin.stage_packages = [{'try': ['foo']}] >>> processor = PartGrammarProcessor( ... plugin=plugin, ... properties={}, ... project=snapcraft.project.Project(), ... repo=repo) >>> processor.get_stage_packages() {'foo'} Build packages example: >>> from unittest import mock >>> import snapcraft >>> # Pretend that all packages are valid >>> repo = mock.Mock() >>> repo.is_valid.return_value = True >>> plugin = mock.Mock() >>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor( ... plugin=plugin, ... properties={}, ... project=snapcraft.project.Project(),
... repo=repo) >>> processor.get_build_packages() {'foo'} Source example: >>> from unittest import mock >>> import snapcraft >>> plugin = mock.Mock() >>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']} >>> processor = PartGrammarProcessor( ... plugin=plugin, ... properties=plugin.properties, ... project=snapcraft.project.Project(), ... repo=None) >>> processor.get_source() 'foo' """ def __init__( self, *, plugin: pluginhandler.PluginHandler, properties: Dict[str, Any], project: project.Project, repo: "repo.Ubuntu" ) -> None: self._project = project self._repo = repo self._build_snap_grammar = getattr(plugin, "build_snaps", []) self.__build_snaps = set() # type: Set[str] self._build_package_grammar = getattr(plugin, "build_packages", []) self.__build_packages = set() # type: Set[str] self._stage_package_grammar = getattr(plugin, "stage_packages", []) self.__stage_packages = set() # type: Set[str] source_grammar = properties.get("source", [""]) if not isinstance(source_grammar, list): self._source_grammar = [source_grammar] else: self._source_grammar = source_grammar self.__source = "" def get_source(self) -> str: if not self.__source: # The grammar is array-based, even though we only support a single # source. processor = grammar.GrammarProcessor( self._source_grammar, self._project, lambda s: True ) source_array = processor.process() if len(source_array) > 0: self.__source = source_array.pop() return self.__source def get_build_snaps(self) -> Set[str]: if not self.__build_snaps: processor = grammar.GrammarProcessor( self._build_snap_grammar, self._project, repo.snaps.SnapPackage.is_valid_snap, ) self.__build_snaps = processor.process() return self.__build_snaps def get_build_packages(self) -> Set[str]: if not self.__build_packages: processor = grammar.GrammarProcessor( self._build_package_grammar, self._project, self._repo.build_package_is_valid, transformer=package_transformer, ) self.__build_packages = processor.process() return self.__build_packages def get_stage_packages(self) -> Set[str]: if not self.__stage_packages: processor = grammar.GrammarProcessor( self._stage_package_grammar, self._project, self._repo.is_valid, transformer=package_transformer, ) self.__stage_packages = processor.process() return self.__stage_packages
GoogleCloudPlatform/professional-services
examples/bq-email-exports/tests/send_email_function/test_main.py
Python
apache-2.0
2,421
0.000413
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIO
NS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Unit tests for send email function""" import os import sys import pytest sys.path.append(os.path.realpath(os.path.dirname(__file__) + "../..")) from send_email_function import main @pytest.fixture def mock_env(monkeypatch): """Setting mock envir
onment variables""" monkeypatch.setenv("SIGNED_URL", "True") monkeypatch.setenv("FROM_EMAIL", "sender@gmail.com") monkeypatch.setenv("TO_EMAILS", "recepient@gmail.com") monkeypatch.setenv("EMAIL_SUBJECT", "BigQuery email export") monkeypatch.setenv("SENDGRID_API_KEY", "SG.key") monkeypatch.setenv("SIGNED_URL_EXPIRATION", "24") def test_raise_exception(): """Tests that KeyError exception is raised when no env vars are set""" with pytest.raises(KeyError): main.get_env('SIGNED_URL') @pytest.mark.parametrize("test_input,expected", [("SIGNED_URL", "True"), ("FROM_EMAIL", "sender@gmail.com"), ("TO_EMAILS", "recepient@gmail.com"), ("EMAIL_SUBJECT", "BigQuery email export"), ("SENDGRID_API_KEY", "SG.key"), ("SIGNED_URL_EXPIRATION", "24")]) def test_get_env(mock_env, test_input, expected): """Tests reading of env vars""" assert main.get_env(test_input) == expected @pytest.mark.parametrize( "test_input,expected", [("gs://bucket/object.txt", "https://storage.cloud.google.com/bucket/object.txt"), ("gs://bucket/dir/object.txt", "https://storage.cloud.google.com/bucket/dir/object.txt"), ("gs://bucket/dir/subdir/object.json", "https://storage.cloud.google.com/bucket/dir/subdir/object.json")]) def test_get_auth_url(test_input, expected): """Tests creation of authenticated GCS URL""" assert main.get_auth_url(test_input) == expected
github/codeql
python/ql/test/library-tests/taint/dataflow/test.py
Python
mit
2,191
0.016431
def test1(): SINK(SOURCE) def test2(): s = SOURCE SINK(s) def
source(): return SOURCE def sink(arg): SINK(arg) def test3(): t = source() SINK(t) def test4(): t = SOURCE sink(t) def test5(): t = source() sink(t) def test6(cond): if cond: t = "Safe" else: t = SOURCE if cond: SINK(t) def test7(cond): if cond: t = SOURC
E else: t = "Safe" if cond: SINK(t) def source2(arg): return source(arg) def sink2(arg): sink(arg) def sink3(cond, arg): if cond: sink(arg) def test8(cond): t = source2() sink2(t) #False positive def test9(cond): if cond: t = "Safe" else: t = SOURCE sink3(cond, t) def test10(cond): if cond: t = SOURCE else: t = "Safe" sink3(cond, t) def hub(arg): return arg def test11(): t = SOURCE t = hub(t) SINK(t) def test12(): t = "safe" t = hub(t) SINK(t) import module def test13(): t = module.dangerous SINK(t) def test14(): t = module.safe SINK(t) def test15(): t = module.safe2 SINK(t) def test16(): t = module.dangerous_func() SINK(t) def test20(cond): if cond: t = CUSTOM_SOURCE else: t = SOURCE if cond: CUSTOM_SINK(t) else: SINK(t) def test21(cond): if cond: t = CUSTOM_SOURCE else: t = SOURCE if not cond: CUSTOM_SINK(t) else: SINK(t) def test22(cond): if cond: t = CUSTOM_SOURCE else: t = SOURCE t = TAINT_FROM_ARG(t) if cond: CUSTOM_SINK(t) else: SINK(t) from module import dangerous as unsafe SINK(unsafe) def test23(): with SOURCE as t: SINK(t) def test24(): s = SOURCE SANITIZE(s) SINK(s) def test_update_extend(x, y): l = [SOURCE] d = {"key" : SOURCE} x.extend(l) y.update(d) SINK(x[0]) SINK(y["key"]) l2 = list(l) d2 = dict(d) def test_truth(): t = SOURCE if t: SINK(t) else: SINK(t) if not t: SINK(t) else: SINK(t)
TheWeiTheTruthAndTheLight/senior-design
src/json_io.py
Python
mit
7,324
0.005188
""" json_io.py Functions related to reading/writing/mapping json """ import json import ijson from re import sub from datetime import datetime from os import listdir, SEEK_END from sys import exc_info, stdout from random import choice, randrange from nlp import feature TWEET_LINK_RE = "https://t.co/(\w)+" TWEET_HANDLE_RE = "@(\w)+" def list_from_json(json_file): """Return a list corresponding to contents of json file""" with open(json_file, 'r') as fp: return json.load(fp) def list_to_json(lst, path, old_format=True): """Save a list of tweets to a json file at corresponding path. old_format (optional, default=true): dump using sorted keys,
indenting. Set to false for streaming friendlier format """ if old_format: with open(path, 'w') as fp: json.dump(lst, fp, sort_keys=True, indent=4) else: with open(path, 'w') as fp: for i, tweet in enumerate(lst): js
on.dump({"text": tweet["text"], "id": tweet['id'], "media": tweet["media"], "urls": tweet["urls"]}, fp) if i != len(lst) - 1: fp.write('\n') def merge_json_filenames(json_lst): """ Return filename encapsulating date range of passed in jsons ex: merge_json_filnames(["path/to/jsons/2017-01-27_2017-02-04.json", "path/to/jsons/2017-02-02_2017-02-09.json"]) returns "2017-01-27_2017-02-09.json" """ # Get earliest and latest date of jsons for naming purposes of merged file. parse_date_from_filename = lambda fn: fn.split('/')[-1].split('.')[0].split('_') sorted_dates = sorted([datetime.strptime(date, "%Y-%m-%d") for fn in json_lst for date in parse_date_from_filename(fn)]) from_date = datetime.strftime(sorted_dates[0], "%Y-%m-%d") to_date = datetime.strftime(sorted_dates[-1], "%Y-%m-%d") return "{}_{}.json".format(from_date, to_date) def tweet_map(json_file, tweet_func, save=False): """ Apply a function to each tweet in a json file json_file - path to tweet json file tweet_func - function that takes in a 'tweet' object, and returns a 'tweet' object save (optional) - overwrite json_file with modified json returns list where each tweet has tweet_func applied to it """ mapped_tweets = [] with open(json_file, 'r') as f: # stream through f using ijson.items for tweet in ijson.items(f, "item"): mapped_tweets.append(tweet_func(tweet)) if save: list_to_json(mapped_tweets, json_file) return mapped_tweets def tweet_map(tweets, tweet_func): """ Apply a function to each tweet in a list of tweets """ return [tweet_func(tweet) for tweet in tweets] def tweet_iterate(json_file, key=None): """ Stream through objects in a json file json_file - path to tweet json file key (optional) - single key value of interest (ex: return only "text" field, or only "id" field of each tweet) """ with open(json_file, 'r') as f: if key: for tweet in ijson.items(f, "item.{}".format(key)): yield tweet else: for tweet in ijson.items(f, "item"): yield tweet def replaceLinksMentions(tweet): """ Take tweet and return tweet with new field "ner_text" where links and handles are replaced by tokens """ # replace embedded urls/media with [url], [media], or [url_media] ner_text = tweet["text"] if tweet["media"] or tweet["urls"]: if tweet['media'] and tweet['urls']: replacement_word = 'UrlMediaTOK' elif tweet['media']: replacement_word = "MediaTOK" else: replacement_word = "UrlTok" # replace twitter links with appropriate tag ner_text = sub(TWEET_LINK_RE, replacement_word, ner_text) # replace handles with appropriate tag ner_text = sub(TWEET_HANDLE_RE, "NameTOK", ner_text) tweet["ner_text"] = ner_text return tweet def fileName(features_path, source, sarcastic, i=None): return features_path + source + ('sarcastic-' if sarcastic else 'serious-') + str(i) + ".json" def openFiles(features_path, sarcastic, source, n, mode='a'): """ takes in a directory path, a sarcastic boolean value, a source type and n Returns n file pointers in the specified (defaul append) mode with a large buffer located in the feature_path directory. feature_path= feats sarcastic = True source = tweet- n=5 Will create files like so: feats/tweet-sarcastic-0.json feats/tweet-sarcastic-1.json ... feats/tweet-sarcastic-5.json """ return [open(fileName(features_path, source, sarcastic, i), mode, buffering=2**24) for i in range(n)] def closeFiles(openFiles): """ Takes in a list of open file pointers flushes the buffer (done in file.close()) and closes the files. """ for file in openFiles: file.close() def processRandomizeJson(sarcastic, json_path, features_path, source, n, cleanTokens): """ takes in a sarcastic boolean, a path to json files, a path to store processed features, a source type an the number of files to create For each json file in the json_path directory it processes the features and saves it randomly to 1 of n files constructed using the openFiles function Periodically prints the file and time it took to process as well as the number of items processed so far. """ files = openFiles(features_path, sarcastic, source, n, mode='a') try: totalCount = 0 for filename in listdir(json_path): startTime = datetime.now() for line in open(json_path+filename): text = json.loads(line)['text'] features = feature(text, cleanTokens) featuresJson = json.dumps(features) + '\n' choice(files).write(featuresJson) totalCount += 1 stopTime = datetime.now() print("File %s\ttime:\t%s" % (filename, (stopTime - startTime))) print("Processed %d json lines"%totalCount) stdout.flush() closeFiles(files) except: closeFiles(files) print("Unexpected error:\n") for e in exc_info(): print(e) def loadProcessedFeatures(features_path, source, sarcastic, n=0, feature_filename=None, random=True, reduce=0): if feature_filename: with open(feature_path+feature_filename) as file: for line in file: yield (json.loads(line), sarcastic) elif random: with open(fileName(features_path, source, sarcastic, randrange(n))) as file: for line in file: yield (json.loads(line), sarcastic) else: if reduce != 0: cache = [] files = openFiles(features_path, sarcastic, source, n, mode='r') for file in files: for line in file: cache.append(line) if len(cache)==reduce: yield (json.loads(choice(cache)), sarcastic) cache = [] else: files = openFiles(features_path, sarcastic, source, n, mode='r') for file in files: for line in file: yield (json.loads(line), sarcastic)
demisto/content
Packs/CrowdStrikeIntel/Integrations/CrowdStrikeFalconIntel_v2/CrowdStrikeFalconIntel_v2.py
Python
mit
23,184
0.002459
import demistomock as demisto from CommonServerPython import * from CommonServerUserPython import * from datetime import datetime, timezone from typing import Union, Any, Dict from dateparser import parse import urllib3 import traceback # Disable insecure warnings urllib3.disable_warnings() ''' GLOBAL VARIABLES ''' MALICIOUS_DICTIONARY: Dict[Any, int] = { 'low': Common.DBotScore.GOOD, 'medium': Common.DBotScore.SUSPICIOUS, 'high': Common.DBotScore.BAD } MALICIOUS_THRESHOLD = MALICIOUS_DICTIONARY.get(demisto.params().get('threshold', 'high')) ''' CLIENT ''' class Client: """ The integration's client """ def __init__(self, params: Dict[str, str]): self.cs_client: CrowdStrikeClient = CrowdStrikeClient(params=params) self.query_params: Dict[str, str] = {'offset': 'offset', 'limit': 'limit', 'sort': 'sort', 'free_search': 'q'} self.date_params: Dict[str, Dict[str, str]] = { 'created_date': {'operator': '', 'api_key': 'created_date'}, 'last_updated_date': {'operator': '', 'api_key': 'last_updated'}, 'max_last_modified_date': {'operator': '<=', 'api_key': 'last_modified_date'}, 'min_last_activity_date': {'operator': '>=', 'api_key': 'first_activity_date'}, 'max_last_activity_date': {'operator': '<=', 'api_key': 'last_activity_date'}, } def build_request_params(self, args: Dict[str, Any]) -> Dict[str, Any]: """ Build the params dict for the request :param args: Cortex XSOAR args :return: The params dict """ params: Dict[str, Any] = {key: args.get(arg) for arg, key in self.query_params.items()} query = args.get('query')
params['filter'] = query if q
uery else self.build_filter_query(args) return assign_params(**params) def build_filter_query(self, args: Dict[str, str]) -> str: """ Builds the filter query in Falcon Query Language (FQL) :param args: Cortex XSOAR args :return: The query """ filter_query: str = str() for key in args: if key not in self.query_params: if key not in self.date_params: values: List[str] = argToList(args[key], ',') for value in values: filter_query += f"{key}:'{value}'+" else: operator: Optional[str] = self.date_params.get(key, {}).get('operator') api_key: Optional[str] = self.date_params.get(key, {}).get('api_key') # Parsing date argument of ISO format or free language into datetime object, # replacing TZ with UTC, taking its timestamp format and rounding it up. filter_query += f"{api_key}:" \ f"{operator}{int(parse(args[key]).replace(tzinfo=timezone.utc).timestamp())}+" if filter_query.endswith('+'): filter_query = filter_query[:-1] return filter_query def get_indicator(self, indicator_value: str, indicator_type: str) -> Dict[str, Any]: # crowdstrike do not allow passing single quotes - so we encode them # we are not encoding the entire indicator value, as the other reserved chars (such as + and &) are allowed indicator_value = indicator_value.replace("'", "%27") args: Dict[str, Any] = { 'indicator': indicator_value, 'limit': 1 } if indicator_type == 'hash': args['type'] = get_indicator_hash_type(indicator_value) elif indicator_type == 'ip': args['type'] = 'ip_address' else: args['type'] = indicator_type params: Dict[str, Any] = self.build_request_params(args) return self.cs_client.http_request(method='GET', url_suffix='intel/combined/indicators/v1', params=params) def cs_actors(self, args: Dict[str, str]) -> Dict[str, Any]: params: Dict[str, Any] = self.build_request_params(args) return self.cs_client.http_request(method='GET', url_suffix='intel/combined/actors/v1', params=params) def cs_indicators(self, args: Dict[str, str]) -> Dict[str, Any]: params: Dict[str, Any] = self.build_request_params(args) return self.cs_client.http_request(method='GET', url_suffix='intel/combined/indicators/v1', params=params) def cs_reports(self, args: Dict[str, str]) -> Dict[str, Any]: params: Dict[str, Any] = self.build_request_params(args) return self.cs_client.http_request(method='GET', url_suffix='intel/combined/reports/v1', params=params) ''' HELPER FUNCTIONS ''' def get_dbot_score_type(indicator_type: str) -> Union[Exception, DBotScoreType, str]: """ Returns the dbot score type :param indicator_type: The indicator type :return: The dbot score type """ if indicator_type == 'ip': return DBotScoreType.IP elif indicator_type == 'domain': return DBotScoreType.DOMAIN elif indicator_type == 'file' or indicator_type == 'hash': return DBotScoreType.FILE elif indicator_type == 'url': return DBotScoreType.URL else: raise DemistoException('Indicator type is not supported.') def get_score_from_resource(r: Dict[str, Any]) -> int: """ Calculates the DBotScore for the resource :param r: The resource :return: The DBotScore """ malicious_confidence: int = MALICIOUS_DICTIONARY.get(r.get('malicious_confidence'), 0) if malicious_confidence == 3 or MALICIOUS_THRESHOLD == 1: score = 3 elif malicious_confidence == 2 or MALICIOUS_THRESHOLD == 2: score = 2 else: score = 1 return score def get_indicator_hash_type(indicator_value: str) -> Union[str, Exception]: """ Calculates the type of the hash :param indicator_value: The hash value :return: The hash type """ length: int = len(indicator_value) if length == 32: return 'hash_md5' elif length == 40: return 'hash_sha1' elif length == 64: return 'hash_sha256' else: raise DemistoException(f'Invalid hash. Hash length is: {length}. Please provide either MD5 (32 length)' f', SHA1 (40 length) or SHA256 (64 length) hash.') def get_indicator_object(indicator_value: Any, indicator_type: str, dbot_score: Common.DBotScore) \ -> Union[Common.IP, Common.URL, Common.File, Common.Domain, None]: """ Returns the corresponding indicator common object :param indicator_value: The indicator value :param indicator_type: The indicator value :param dbot_score: The indicator DBotScore :return: The indicator common object """ if indicator_type == 'ip': return Common.IP( ip=indicator_value, dbot_score=dbot_score ) elif indicator_type == 'url': return Common.URL( url=indicator_value, dbot_score=dbot_score ) elif indicator_type == 'hash': hash_type: Union[str, Exception] = get_indicator_hash_type(indicator_value) if hash_type == 'hash_md5': return Common.File( md5=indicator_value, dbot_score=dbot_score ) elif hash_type == 'hash_sha1': return Common.File( sha1=indicator_value, dbot_score=dbot_score ) else: return Common.File( sha256=indicator_value, dbot_score=dbot_score ) elif indicator_type == 'domain': return Common.Domain( domain=indicator_value, dbot_score=dbot_score ) else: return None def build_indicator(indicator_value: str, indicator_type: str, title: str, client: Client) -> List[CommandResults]: """ Builds an indicator entry :param indicator_value: The indicator value :param indicator_type: The indicator type :param title: The title to show to the user :param client: The integration's client :return: The indicator entry ""
HybridF5/tempest_debug
tempest/services/identity/v2/json/endpoints_client.py
Python
apache-2.0
1,870
0
# Copyright 2016 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo_serialization import jsonutils as json from tempest.lib.common import rest_client class EndpointsClient(rest_client.RestClient): api_version = "v2.0" def create_endpoint(self, service_id, region_id, **kwargs): """Create an endpoint for service.""" post_body = { 'service_id': service_id, 'region': region_id, 'publicurl': kwargs.get('publicurl'), 'adminurl': kwargs.get('adminurl'), 'internalurl': kwargs.get('internalurl')
} post_body = json.dumps({'endpoint': post_body}) resp, body = self.post('/endpoints', post_body) self.expected_success(200, resp.status) body = json
.loads(body) return rest_client.ResponseBody(resp, body) def list_endpoints(self): """List Endpoints - Returns Endpoints.""" resp, body = self.get('/endpoints') self.expected_success(200, resp.status) body = json.loads(body) return rest_client.ResponseBody(resp, body) def delete_endpoint(self, endpoint_id): """Delete an endpoint.""" url = '/endpoints/%s' % endpoint_id resp, body = self.delete(url) self.expected_success(204, resp.status) return rest_client.ResponseBody(resp, body)
austinharris/gem5-riscv
src/arch/x86/isa/insts/general_purpose/input_output/string_io.py
Python
bsd-3-clause
4,418
0
# Copyright (c) 2007-2008 The Hewlett-Packard Development Company # All rights r
eserved. # # The license below extends only to copyright in the
software and shall # not be construed as granting a license to any other intellectual # property including but not limited to intellectual property relating # to a hardware implementation of the functionality of the software # licensed hereunder. You may use the software subject to the license # terms below provided that you ensure that this notice is replicated # unmodified and in its entirety in all distributions of the software, # modified or unmodified, in source code or in binary form. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Gabe Black microcode = ''' def macroop INS_M_R { # Find the constant we need to either add or subtract from rdi ruflag t0, 10 movi t3, t3, dsz, flags=(CEZF,), dataSize=asz subi t4, t0, dsz, dataSize=asz mov t3, t3, t4, flags=(nCEZF,), dataSize=asz zexti t2, reg, 15, dataSize=8 mfence ld t6, intseg, [1, t2, t0], "IntAddrPrefixIO << 3", addressSize=8, \ nonSpec=True st t6, es, [1, t0, rdi] mfence add rdi, rdi, t3, dataSize=asz }; def macroop INS_E_M_R { and t0, rcx, rcx, flags=(EZF,), dataSize=asz br label("end"), flags=(CEZF,) # Find the constant we need to either add or subtract from rdi ruflag t0, 10 movi t3, t3, dsz, flags=(CEZF,), dataSize=asz subi t4, t0, dsz, dataSize=asz mov t3, t3, t4, flags=(nCEZF,), dataSize=asz zexti t2, reg, 15, dataSize=8 mfence topOfLoop: ld t6, intseg, [1, t2, t0], "IntAddrPrefixIO << 3", addressSize=8, \ nonSpec=True st t6, es, [1, t0, rdi] subi rcx, rcx, 1, flags=(EZF,), dataSize=asz add rdi, rdi, t3, dataSize=asz br label("topOfLoop"), flags=(nCEZF,) end: mfence fault "NoFault" }; def macroop OUTS_R_M { # Find the constant we need to either add or subtract from rdi ruflag t0, 10 movi t3, t3, dsz, flags=(CEZF,), dataSize=asz subi t4, t0, dsz, dataSize=asz mov t3, t3, t4, flags=(nCEZF,), dataSize=asz zexti t2, reg, 15, dataSize=8 mfence ld t6, ds, [1, t0, rsi] st t6, intseg, [1, t2, t0], "IntAddrPrefixIO << 3", addressSize=8, \ nonSpec=True mfence add rsi, rsi, t3, dataSize=asz }; def macroop OUTS_E_R_M { and t0, rcx, rcx, flags=(EZF,), dataSize=asz br label("end"), flags=(CEZF,) # Find the constant we need to either add or subtract from rdi ruflag t0, 10 movi t3, t3, dsz, flags=(CEZF,), dataSize=asz subi t4, t0, dsz, dataSize=asz mov t3, t3, t4, flags=(nCEZF,), dataSize=asz zexti t2, reg, 15, dataSize=8 mfence topOfLoop: ld t6, ds, [1, t0, rsi] st t6, intseg, [1, t2, t0], "IntAddrPrefixIO << 3", addressSize=8, \ nonSpec=True subi rcx, rcx, 1, flags=(EZF,), dataSize=asz add rsi, rsi, t3, dataSize=asz br label("topOfLoop"), flags=(nCEZF,) end: mfence fault "NoFault" }; '''
panrobot/ishneECGviewer
ecgReader.py
Python
gpl-2.0
7,145
0.012596
import sys from datetime import datetime, timedelta from array import array from numpy import hsplit, asarray class ECG: '''Checks validity of selected .ecg file. If it is valid .ecg file creates an instance with all the data stored in .ecg file''' def __init__(self, filename, enc='cp1250'): '''Default encoding is set to cp1250 - set accordingly to your needs''' self.leadNamesDict = {0:'Unknown', 1:'Bipolar', 2:'X biploar', 3:'Y bipolar', 4:'Z biploar', \ 5:'I', 6:'II', 7:'III', 8:'VR', 9:'VL', 10:'VF', \ 11:'V1', 12:'V2', 13:'V3', 14:'V4', 15:'V5', 16:'V6', \ 17:'ES', 18:'AS', 19:'AI'} self.fn = filename self.enc = enc if not self.fn: NoneFileSpecified() with open(self.fn, mode='rb') as ecgFile: self.magicNumber = ecgFile.read(8).decode(self.enc) if self.magicNumber != 'ISHNE1.0': raise Exception('File does not have \'ISHNE1.0\' string in the first 8 bytes') self.crc = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True) self.headerFixedLength = 512 self.headerVariableLength = int.from_bytes(ecgFile.read(4), byteorder='little', signed=True) #get back to 10th byte where header starts ecgFile.seek(10) self.headerWhole = ecgFile.read(self.headerFixedLength + self.headerVariableLength) crc = int(self.compute_crc(self.headerWhole),2) if (crc != self.crc): raise Exception('CRC check for file failed. Computed CRC: {0}, CRC in file: {1}'.format(crc, self.crc)) #get back to 14th byte just after headerVariableLength ecgFile.seek(14) self.channelNumberOfSamples = int.from_bytes(ecgFile.read(4), byteorder='little', signed=True) self.headerVariableOffset = int.from_bytes(ecgFile.read(4), byteorder='little', signed=True) self.ecgBytesBlockOffset = int.from_bytes(ecgFile.read(4), byteorder='little', signed=True) self.fileVersion = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True) self.patientFirstName = ecgFile.read(40).decode(self.enc) self.patientFirstName = self.patientFirstName.split('\x00', 1)[0] self.patientLastName = ecgFile.read(40).decode(self.enc) self.patientLastName = self.patientLastName.split('\x00', 1)[0] self.patientID = ecgFile.read(20).decode(self.enc) self.patientID = self.patientID.split('\x00', 1)[0] self.patientSex = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True) self.patientRace = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True) #patient date of birth as [dd,mm,yy] dob = list() for i in range(0,3): dob.append(int.from_bytes(ecgFile.read(2), byteorder='little', signed=True)) self.patientDateOfBirth = datetime(dob[2], dob[1], dob[0]) # date of test recording as [dd,mm,yy] dor = list() for i in range(0,3): dor.append(int.from_bytes(ecgFile.read(2), by
teorder='little', signed=True)) #date of file creation as [dd,mm,yy] dof = list() for i in rang
e(0,3): dof.append(int.from_bytes(ecgFile.read(2), byteorder='little', signed=True)) self.dateOfFileCreation = datetime(dor[2], dor[1], dor[0]) #testStart - time of test begining HH:MM:SS testStart = list() for i in range(0,3): testStart.append(int.from_bytes(ecgFile.read(2), byteorder='little', signed=True)) self.datetimeStartOfTest = datetime(dor[2],dor[1],dor[0],testStart[0],testStart[1],testStart[2]) self.numberOfLeads = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True) self.leadsSpecs = list() self.leadsNames = list() for i in range(0,12): spec = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True) self.leadsSpecs.append(spec) self.leadsNames.append(self.leadNamesDict[spec]) self.leadsQuality = list() for i in range(0,12): self.leadsQuality.append(int.from_bytes(ecgFile.read(2), byteorder='little', signed=True)) self.leadsResolution = list() for i in range(0,12): self.leadsResolution.append(int.from_bytes(ecgFile.read(2), byteorder='little', signed=False)) self.pacemaker = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True) self.recorderType = ecgFile.read(40).decode(self.enc) self.recorderType = self.recorderType.split('\x00', 1)[0] self.samplingRate = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True) self.datetimeEndOfTest = self.datetimeStartOfTest + timedelta(seconds=int(self.channelNumberOfSamples/self.samplingRate)) self.fileProperiaty = ecgFile.read(80).decode(self.enc) self.fileProperiaty = self.fileProperiaty.split('\x00', 1)[0] self.fileCopyright = ecgFile.read(80).decode(self.enc) self.fileCopyright = self.fileCopyright.split('\x00', 1)[0] self.reserved = ecgFile.read(80).decode(self.enc) self.reserved = self.reserved.split('\x00', 1)[0] self.reserved = ecgFile.read(80).decode(self.enc) self.reserved = self.reserved.split('\x00', 1)[0] self.headerVariable = ecgFile.read(self.headerVariableLength).decode(self.enc) if len(self.headerVariable) > 0: self.headerVariable = self.headerVariable.split('\x00', 1)[0] ecgFile.seek(self.ecgBytesBlockOffset) ecgBytes = array('h') ecgBytes.fromfile(ecgFile, self.channelNumberOfSamples * self.numberOfLeads) ecgBytesArray = asarray(ecgBytes) ecgBytesArray = ecgBytesArray.reshape(-1,self.numberOfLeads) self.ecgInChannels = hsplit(ecgBytesArray, self.numberOfLeads) def compute_crc(self, data: bytes): rol = lambda val, r_bits, max_bits: \ (val << r_bits%max_bits) & (2**max_bits-1) | \ ((val & (2**max_bits-1)) >> (max_bits-(r_bits%max_bits))) b = bytearray() data = bytearray(data) crc=0xFFFF crchi, crclo = divmod(crc, 0x100) for a in data: a = a ^ crchi crchi = a a = a >> 4 a = a ^ crchi crchi = crclo crclo = a a = rol(a,4,8) b=a a = rol(a,1,8) a = a & 0x1F crchi = a ^ crchi a = b & 0xF0 crchi = a ^ crchi b = rol(b,1,8) b = b & 0xE0 crclo = b ^ crclo checksum = bin(crchi) + bin(crclo) checksum = checksum[:9] + '0' + checksum[11:] return checksum class NoneFileSpecified(Exception): '''Filename can not be empty'''
ecreall/lagendacommun
lac/content/processes/social_applications_management/definition.py
Python
agpl-3.0
3,478
0.008051
# Copyright (c) 2014 by Ecreall under licence AGPL terms # available on http://www.gnu.org/licenses/agpl.html # licence: AGPL # author: Amen Souissi from dace.processdefinition.processdef import ProcessDefinition from dace.processdefinition.activitydef import ActivityDefinition from dace.processdefinition.gatewaydef import ( ExclusiveGatewayDefinition, ParallelGatewayDefinition) from dace.processdefinition.transitiondef import TransitionDefinition from dace.processdefinition.eventdef import ( StartEventDefinition, EndEventDefinition) from dace.objectofcollaboration.services.processdef_container import ( process_definition) from pontus.core import VisualisableElement from .behaviors import ( Addapplications, AddFacebookApplication, AddTwitterApplication, AddGoogleApplication, SeeApplication, EditApplication, RemoveApplication ) from lac import _ @process_definition(name='socialapplicationsprocess', id='socialapplicationsprocess') class SocialApplicationsProcess(ProcessDefinition, VisualisableElement): isUnique = True def __init__(self, **kwargs): super(SocialApplicationsProcess, self).__init__(**kwargs) self.title = _('Social applications process') self.description = _('Social applications process') def _init_definition(self): self.defineNodes( start = StartEventDefinition(), pg = ParallelGatewayDefinition(), addapplication = ActivityDefinition(contexts=[Addapplications, AddFacebookApplication, AddTwitterApplication, AddGoogleAppli
cation], description=_("Add a social application"), title=_("Add a social application"), groups=[]),
seeapplication = ActivityDefinition(contexts=[SeeApplication], description=_("See the application"), title=_("See the application"), groups=[]), editapplication = ActivityDefinition(contexts=[EditApplication], description=_("Edit the application"), title=_("Edit"), groups=[]), removeapplication = ActivityDefinition(contexts=[RemoveApplication], description=_("Remove the application"), title=_("Remove"), groups=[]), eg = ExclusiveGatewayDefinition(), end = EndEventDefinition(), ) self.defineTransitions( TransitionDefinition('start', 'pg'), TransitionDefinition('pg', 'addapplication'), TransitionDefinition('addapplication', 'eg'), TransitionDefinition('pg', 'seeapplication'), TransitionDefinition('seeapplication', 'eg'), TransitionDefinition('pg', 'editapplication'), TransitionDefinition('editapplication', 'eg'), TransitionDefinition('pg', 'removeapplication'), TransitionDefinition('removeapplication', 'eg'), TransitionDefinition('eg', 'end'), )
aluminiumgeek/goodbye-mihome
mihome.py
Python
bsd-2-clause
4,258
0.001879
import binascii import code import importlib import json import psycopg2 import readline import socket import struct import sys import time from Crypto.Cipher import AES from datetime import datetime from multiprocessing import Process from threading import Thread import config from plugins import sensor_ht, magnet, yeelight from utils import get_store from web.w import run_app as web_app conn = psycopg2.connect("dbname={} user={} password={}".format(config.DBNAME, config.DBUSER, config.DBPASS)) cursor = conn.cursor() MULTICAST = { 'mihome': ('224.0.0.50', 9898), 'yeelight': ('239.255.255.250', 1982) } SOCKET_BUFSIZE = 1024 IV = bytes([0x17, 0x99, 0x6d, 0x09, 0x3d, 0x28, 0xdd, 0xb3, 0xba, 0x69, 0x5a, 0x2e, 0x6f, 0x58, 0x56, 0x2e]) def receiver(service='mihome'): from plugins import gateway assert service in MULTICAST, 'No such service' store = get_store() address, port = MULTICAST.get(service) sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.bind(("0.0.0.0", port)) mreq = struct.pack("=4sl", socket.inet_aton(address), socket.INADDR_ANY) sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 32) sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1) sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, SOCKET_BUFSIZE) sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq) sock.settimeout(20) # 2x of heartbeat period current = {} while True: try: data, _ = sock.recvfrom(SOCKET_BUFSIZE) # buffer size is 1024 bytes except socket.timeout: continue print(datetime.now().isoformat(), data) if service == 'mihome': message = json.loads(data.decode()) data = json.loads(message['data']) if message.get('model') in ('sensor_ht', 'weather.v1') and not sensor_ht.process(conn, cursor, current, message, data): continue elif message.get('model') == 'magnet': magnet.process(store, message, data) elif message.get('model') == 'gateway': gateway.process(store, message, data) current = {} elif service == 'yeelight': yeelight.process(data.decode()) def send_command(command, timeout=10): _, port = MULTICAST.get('mihome') if isinstance(command.get('data'), dict): command['data'] = json.dumps(command['data']) address = get_store().get('gateway_addr') if address is None: print("Didn't receive any heartbeat from gateway yet. Delaying request for 10 seconds.") time.sleep(10) sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.settimeout(timeout) sock.connect((address, port)) sock.send(json.dumps(command).encode('ascii')) data = None try: data, addr = sock.recvfrom(SOCKET_BUFSIZE) except ConnectionRefusedError: print("send_command :: recvfrom() connection refused: {}:{}".format(address.decode(), port)) except socket.timeout: print("send_command :: recvfrom()
timed out: {}:{}".format(address.decode(), port)) finally: sock.close() return data def get_key(): """Get current gateway key""" cipher = AES.new(config.MIHOME_GATEWAY_PASSWORD, AES.MODE_CBC, IV) encrypted = cipher.encrypt(get_store().get('gateway_token')) return binascii.hexlify(encrypted) if __name__ == '__main__': if len(sys.argv) > 1 and sys.argv[1] == 'shell': vars = globals().copy() vars.update(locals()) sh
ell = code.InteractiveConsole(vars) shell.interact() sys.exit() Thread(target=web_app).start() for app_name in config.ENABLED_APPS: try: app = importlib.import_module('apps.{}'.format(app_name)) except ImportError as e: print('Could not import app "{}": {}'.format(app_name, e)) continue kwargs = {'store': get_store(), 'conn': conn, 'cursor': cursor} Process(target=app.run, kwargs=kwargs).start() print('Loaded app: {}'.format(app_name)) for service in MULTICAST: Process(target=receiver, args=(service,)).start() # Discover Yeelight bulbs yeelight.discover()
nigeljonez/newpyfibot
modules/module_oraakkeli.py
Python
bsd-3-clause
459
0.010893
import urllib def command_oraakkeli(bot, user, channel, args): """Asks a question from the oracle (http://www.lintukoto.net/viihde/oraakkeli/)""" if not args: return args = urllib.quote_plus(args) answer = getUrl("
http://www.lintukoto.net/viihde/oraakkeli/index.php
?kysymys=%s&html=0" % args).getContent() answer = unicode(answer) answer = answer.encode("utf-8") return bot.say(channel, "Oraakkeli vastaa: %s" % answer)
nvladimus/zebrascope_targets
MultiviewRegistration/PymageJ-devel/pymagej/roi.py
Python
mit
24,830
0.002416
""" PymageJ Copyright (C) 2015 Jochem Smit This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. """ import numpy as np import struct import re from collections import namedtuple import os import warnings #todo figure out if x,y coords should be relative or absolute! -> relative # http://rsb.info.nih.gov/ij/developer/source/ij/io/RoiDecoder.java.html # http://rsb.info.nih.gov/ij/developer/source/ij/io/RoiEncoder.java.html # clippers polynomial? # Base class for all ROI classes class ROIObject(object): header = {} # todo overwrite get/set and print warnings when not supported by imagej def __init__(self, name=None): self.name = name def area(self): raise NotImplementedError('Area not implemented') class ROIPolygon(ROIObject): type = 'polygon' def __init__(self, top, left, x_coords, y_coords, *args, **kwargs): self.top = top self.left = left self.x_coords = np.array(x_coords) self.y_coords = np.array(y_coords) super(ROIPolygon, self).__init__(*args, **kwargs) @property def bottom(self): return self.y_coords.max() + self.top @property def right(self): return self.x_coords.max() + self.left @property def width(self): return self.x_coords.max() - self.x_coords.min() + 1 @property def height(self): return self.y_coords.max() - self.y_coords.min() + 1 @property def area(self): raise NotImplementedError('Area of polygon ROI is not implemented') def __len__(self): return len(self.x_coords) class ROIRect(ROIObject): type = 'rect' def __init__(self, top, left, bottom, right, arc=0, *args, **kwargs): self.top = top self.left = left self.bottom = bottom self.right = right self.arc = arc super(ROIRect, self).__init__(*args, **kwargs) @property def width(self): return self.right - self.left @property def height(self): return self.bottom - self.top @property def area(self): if self.arc == 0: return self.width * self.height else: warnings.warn(r"Rounded rectangle area value differs from ImageJ value as it first 'smooths' to pixels") return self.width * self.height - ((4 - np.pi)*(self.arc/2.)**2) class ROIOval(ROIObject): type = 'oval' def __init__(self, top, left, bottom, right, *args, **kwargs): self.top = top self.left = left self.bottom = bottom self.right = right super(ROIOval, self).__init__(*args, **kwargs) @property def width(self): return self.right - self.left @property def height(self): return self.bottom - self.top @property def area(self): warnings.warn(r"Oval area value differs from ImageJ value as it first 'smooths' to pixels") return self.width*self.height*np.pi*0.25 class ROILine(ROIObject): type = 'line' def __init__(self, x1, y1, x2, y2, *args, **kwargs): self.x1 = x1 self.y1 = y1 self.x2 = x2 self.y2 = y2 super(ROILine, self).__init__(*args, **kwargs) @property def area(self): return 0 class ROIFreeLine(ROIObject): type = 'freeline' def __init__(self, top, left, x_coords, y_coords, *args, **kwargs): assert (len(x_coords) == len(y_coords)) self.top = top self.left = left self.x_coords = np.array(x_coords) self.y_coords = np.array(y_coords) super(ROIFreeLine, self).__init__(*args, **kwargs) @property def bottom(self): return self.y_coords.max() + self.top @property def right(self): return self.x_coords.max() + self.left @property def width(self): return self.x_coords.max() - self.x_coords.min() + 1 @property def height(self): return self.y_coords.max() - self.y_coords.min() + 1 @property def area(self): raise NotImplementedError("Freeline area not implemented") def __len__(self): return len(self.x_coords) class ROIPolyline(ROIObject): type = 'polyline' def __init__(self, top, left, x_coords, y_coords, *args, **kwargs): assert(len(x_coords) == len(y_coords)) self.top = top self.left = left self.x_coords = np.array(x_coords) self.y_coords = np.array(y_coords) super(ROIPolyline, self).__init__(*args, **kwargs) @property def bottom(self): return self.y_coords.max() + self.top @property def right(self): return self.x_coords.max() + self.left
@property def width(self): return self.x_coords.max() - self.x_coords.min() + 1 @property def height(self): return self.y_coords.max() - self.y_coords.min() + 1 @property def area(self
): raise NotImplementedError("Freeline area not implemented") def __len__(self): return len(self.x_coords) class ROINoRoi(ROIObject): type = 'no_roi' @property def area(self): return 0 class ROIFreehand(ROIObject): type = 'freehand' def __init__(self, top, left, x_coords, y_coords, *args, **kwargs): self.top = top self.left = left self.x_coords = np.array(x_coords) self.y_coords = np.array(y_coords) super(ROIFreehand, self).__init__(*args, **kwargs) @property def bottom(self): return self.y_coords.max() + self.top @property def right(self): return self.x_coords.max() + self.left @property def width(self): return self.x_coords.max() - self.x_coords.min() + 1 @property def height(self): return self.y_coords.max() - self.y_coords.min() + 1 @property def area(self): raise NotImplementedError('Area of freehand ROI is not implemented') def __len__(self): return len(self.x_coords) class ROITraced(ROIObject): type = 'traced' def __init__(self, top, left, bottom, right, x_coords, y_coords, *args, **kwargs): self.top = top self.left = left self.bottom = bottom self.right = right self.x_coords = x_coords self.y_coords = y_coords super(ROITraced, self).__init__(*args, **kwargs) @property def width(self): return self.x_coords.max() - self.x_coords.min() + 1 @property def height(self): return self.y_coords.max() - self.y_coords.min() + 1 @property def area(self): raise NotImplementedError('Area of traced ROI is not implemented') class ROIAngle(ROIObject): @property def area(self): return 0 class ROIPoint(ROIObject): @property def area(self): return 0 HeaderTuple = namedtuple('Header_variables', 'type size offset') class ROIFileObject(object): header1_fields = [ # 'VAR_NAME', 'type', offset' ['MAGIC', '4s', 0], ['VERSION_OFFSET', 'h', 4], ['TYPE', 'b', 6], ['TOP', 'h', 8], ['LEFT', 'h', 10], ['BOTTOM', 'h', 12], ['RIGHT', 'h', 14], ['N_COORDINATES', 'h', 16], ['X1', 'f', 18], ['Y1', 'f', 22], ['X2', 'f', 26], ['Y2', 'f', 30], ['XD', 'f', 18], # D vars for sub pixel resolution ROIs ['YD', 'f', 22], ['WIDTH', 'f', 26], ['HEIGHT', 'f', 30], ['STROKE_WIDTH', 'h', 34], ['SHAPE_ROI_SIZE', 'i', 36], ['STROKE_COLOR'
fbrei/aind
planning/my_air_cargo_problems.py
Python
mit
11,755
0.003318
from aimacode.logic import PropKB from aimacode.planning import Action from aimacode.search import ( Node, Problem, ) from aimacode.utils import expr from lp_utils import ( FluentState, encode_state, decode_state, ) from my_planning_graph import PlanningGraph class AirCargoProblem(Problem): def __init__(self, cargos, planes, airports, initial: FluentState, goal: list): """ :param cargos: list of str cargos in the problem :param planes: list of str planes in the problem :param airports: list of str airports in the problem :param initial: FluentState object positive and negative literal fluents (as expr) describing initial state :param goal: list of expr literal fluents required for goal test """ self.state_map = initial.pos + initial.neg self.initial_state_TF = encode_state(initial, self.state_map) Problem.__init__(self, self.initial_state_TF, goal=goal) self.cargos = cargos self.planes = planes self.airports = airports self.actions_list = self.get_actions() def get_actions(self): ''' This method creates concrete actions (no variables) for all actions in the problem domain action schema and turns them into complete Action objects as defined in the aimacode.planning module. It is computationally expensive to call this method directly; however, it is called in the constructor and the results cached in the `actions_list` property. Returns: ---------- list<Action> list of Action objects ''' # creates concrete Action objects based on the domain action schema for: Load, Unload, and Fly # concrete actions definition: specific literal action that does not include variables as with the schema # for example, the action schema 'Load(c, p, a)' can represent the concrete actions 'Load(C1, P1, SFO)' # or 'Load(C2, P2, JFK)'. The actions for the planning problem must be concrete because the problems in # forward search and Planning Graphs must use Propositional Logic def load_actions(): '''Create all concrete Load actions and return a list :return: list of Action objects ''' loads = [] for a in self.airports:
for p in self.planes: for c in self.cargos: pr
econd_pos = [ expr("At({}, {})".format(c,a)), expr("At({}, {})".format(p,a)), ] precond_neg = [] effect_add = [ expr("In({}, {})".format(c,p)), ] effect_rem = [ expr("At({}, {})".format(c,a)) ] load = Action(expr("Load({}, {}, {})".format(c,p,a)), [precond_pos, precond_neg], [effect_add, effect_rem] ) loads.append(load) return loads def unload_actions(): '''Create all concrete Unload actions and return a list :return: list of Action objects ''' unloads = [] for a in self.airports: for p in self.planes: for c in self.cargos: precond_pos = [ expr("In({}, {})".format(c,p)), expr("At({}, {})".format(p,a)), ] precond_neg = [] effect_add = [ expr("At({}, {})".format(c,a)), ] effect_rem = [ expr("In({}, {})".format(c,p)) ] unload = Action( expr("Unload({}, {}, {})".format(c,p,a)), [precond_pos, precond_neg], [effect_add, effect_rem] ) unloads.append(unload) return unloads def fly_actions(): '''Create all concrete Fly actions and return a list :return: list of Action objects ''' flys = [] for fr in self.airports: for to in self.airports: if fr != to: for p in self.planes: precond_pos = [expr("At({}, {})".format(p, fr)), ] precond_neg = [] effect_add = [expr("At({}, {})".format(p, to))] effect_rem = [expr("At({}, {})".format(p, fr))] fly = Action(expr("Fly({}, {}, {})".format(p, fr, to)), [precond_pos, precond_neg], [effect_add, effect_rem]) flys.append(fly) return flys return load_actions() + unload_actions() + fly_actions() def actions(self, state: str) -> list: """ Return the actions that can be executed in the given state. :param state: str state represented as T/F string of mapped fluents (state variables) e.g. 'FTTTFF' :return: list of Action objects """ possible_actions = [] actual_state = decode_state(state, self.state_map) for a in self.actions_list: sat = True for p in a.precond_pos: if p not in actual_state.pos: sat = False break if sat: for n in a.precond_neg: if n not in actual_state.neg: sat = False break if sat: possible_actions.append(a) return possible_actions def result(self, state: str, action: Action): """ Return the state that results from executing the given action in the given state. The action must be one of self.actions(state). :param state: state entering node :param action: Action applied :return: resulting state after action """ actual_state = decode_state(state,self.state_map) pos_list = action.effect_add + [ p for p in actual_state.pos if p not in action.effect_rem] neg_list = action.effect_rem + [ p for p in actual_state.neg if p not in action.effect_add] new_state = FluentState(pos_list, neg_list) return encode_state(new_state, self.state_map) def goal_test(self, state: str) -> bool: """ Test the state to see if goal is reached :param state: str representing state :return: bool """ actual_state = decode_state(state,self.state_map) for s in self.goal: if s not in actual_state.pos: return False return True def h_1(self, node: Node): # note that this is not a true heuristic h_const = 1 return h_const def h_pg_levelsum(self, node: Node): ''' This heuristic uses a planning graph representation of the problem state space to estimate the sum of all actions that must be carried out from the current state in order to satisfy each individual goal condition. ''' # requires implemented PlanningGraph class pg = PlanningGraph(self, node.state) pg_levelsum = pg.h_levelsum() return pg_levelsum def h_ignore_preconditions(self, node: Node): ''' This heuristic estimates the minimum number of actions that must be carried out from the current state in order to satisfy all of the goal conditions by ignoring the preconditions required for an action to be executed. ''' # TODO implement (see Russell-Norvig Ed-3 10.2.3 or Russell-Norvig Ed-2 11.2) count = 0 goal_tf = encode_state(F
shoopio/shoop
shuup_tests/campaigns/test_catalog_campaign_admin.py
Python
agpl-3.0
7,668
0.002347
# This file is part of Shuup. # # Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved. # # This source code is licensed under the OSL-3.0 license found in the # LICENSE file in the root directory of this source tree. # test that admin actually saves catalog from __future__ import unicode_literals import datetime import pytest import pytz from django.test import override_settings from shuup.apps.provides import override_provides from shuup.campaigns.admin_module.form_parts import CatalogBaseFormPart from shuup.campaigns.admin_module.vi
ews import CatalogCampaignEditView from shuup.campaigns.models.campaigns import CatalogCampaign from shuup.testing.factories import get_default_shop from shuup.testing.utils import apply_request_middleware DEFAULT_CONDITION_FORMS = [ "shuup.campaigns.admin_module.forms:ContactGroup
ConditionForm", "shuup.campaigns.admin_module.forms:ContactConditionForm", ] DEFAULT_FILTER_FORMS = [ "shuup.campaigns.admin_module.forms:ProductTypeFilterForm", "shuup.campaigns.admin_module.forms:ProductFilterForm", "shuup.campaigns.admin_module.forms:CategoryFilterForm", ] DEFAULT_EFFECT_FORMS = [ "shuup.campaigns.admin_module.forms:ProductDiscountAmountForm", "shuup.campaigns.admin_module.forms:ProductDiscountPercentageForm", ] def get_form_parts(request, view, object): with override_provides("campaign_context_condition", DEFAULT_CONDITION_FORMS): with override_provides("campaign_catalog_filter", DEFAULT_FILTER_FORMS): with override_provides("campaign_product_discount_effect_form", DEFAULT_EFFECT_FORMS): initialized_view = view(request=request, kwargs={"pk": object.pk}) return initialized_view.get_form_parts(object) @pytest.mark.django_db def test_admin_campaign_edit_view_works(rf, admin_user): shop = get_default_shop() view_func = CatalogCampaignEditView.as_view() request = apply_request_middleware(rf.get("/"), user=admin_user) campaign = CatalogCampaign.objects.create(name="test campaign", active=True, shop=shop) response = view_func(request, pk=campaign.pk) assert campaign.name in response.rendered_content response = view_func(request, pk=None) assert response.rendered_content @pytest.mark.django_db def test_campaign_new_mode_view_formsets(rf, admin_user): view = CatalogCampaignEditView get_default_shop() request = apply_request_middleware(rf.get("/"), user=admin_user) form_parts = get_form_parts(request, view, view.model()) assert len(form_parts) == 1 assert issubclass(form_parts[0].__class__, CatalogBaseFormPart) @pytest.mark.django_db def test_campaign_edit_view_formsets(rf, admin_user): view = CatalogCampaignEditView shop = get_default_shop() object = CatalogCampaign.objects.create(name="test campaign", active=True, shop=shop) request = apply_request_middleware(rf.get("/"), user=admin_user) form_parts = get_form_parts(request, view, object) # form parts should include forms plus one for the base form assert len(form_parts) == (len(DEFAULT_CONDITION_FORMS) + len(DEFAULT_FILTER_FORMS) + len(DEFAULT_EFFECT_FORMS) + 1) @pytest.mark.django_db def test_campaign_creation(rf, admin_user): """ To make things little bit more simple let's use only english as a language. """ with override_settings(LANGUAGES=[("en", "en")]): view = CatalogCampaignEditView.as_view() data = { "base-name": "Test Campaign", "base-public_name__en": "Test Campaign", "base-shop": get_default_shop().id, "base-active": True, "base-basket_line_text": "Test campaign activated!", } campaigns_before = CatalogCampaign.objects.count() request = apply_request_middleware(rf.post("/", data=data), user=admin_user) response = view(request, pk=None) assert response.status_code in [200, 302] assert CatalogCampaign.objects.count() == (campaigns_before + 1) @pytest.mark.django_db def test_campaign_edit_save(rf, admin_user): """ To make things little bit more simple let's use only english as a language. """ with override_settings(LANGUAGES=[("en", "en")]): shop = get_default_shop() object = CatalogCampaign.objects.create(name="test campaign", active=True, shop=shop) object.save() view = CatalogCampaignEditView.as_view() new_name = "Test Campaign" new_end_datetime = datetime.datetime(year=2016, month=6, day=20) assert object.name != new_name assert object.end_datetime is None data = { "base-name": new_name, "base-public_name__en": "Test Campaign", "base-shop": get_default_shop().id, "base-active": True, "base-basket_line_text": "Test campaign activated!", "base-start_datetime": datetime.datetime(year=2016, month=6, day=19), "base-end_datetime": new_end_datetime, } methods_before = CatalogCampaign.objects.count() # Conditions, effects and effects is tested separately with override_provides("campaign_context_condition", []): with override_provides("campaign_catalog_filter", []): with override_provides("campaign_product_discount_effect_form", []): request = apply_request_middleware(rf.post("/", data=data), user=admin_user) response = view(request, pk=object.pk) assert response.status_code in [200, 302] assert CatalogCampaign.objects.count() == methods_before updated_object = CatalogCampaign.objects.get(pk=object.pk) assert updated_object.name == new_name assert updated_object.end_datetime == new_end_datetime.replace(tzinfo=pytz.UTC) @pytest.mark.django_db def test_campaign_end_date(rf, admin_user): """ To make things little bit more simple let's use only english as a language. """ with override_settings(LANGUAGES=[("en", "en")]): shop = get_default_shop() old_name = "test_campaign" object = CatalogCampaign.objects.create(name=old_name, active=True, shop=shop) object.save() view = CatalogCampaignEditView.as_view() new_name = "Test Campaign" assert object.name != new_name data = { "base-name": new_name, "base-public_name__en": "Test Campaign", "base-shop": get_default_shop().id, "base-active": True, "base-basket_line_text": "Test campaign activated!", "base-start_datetime": datetime.datetime(year=2016, month=6, day=19), "base-end_datetime": datetime.datetime(year=2016, month=6, day=10), } methods_before = CatalogCampaign.objects.count() # Conditions, effects and effects is tested separately with override_provides("campaign_context_condition", []): with override_provides("campaign_catalog_filter", []): with override_provides("campaign_product_discount_effect_form", []): request = apply_request_middleware(rf.post("/", data=data), user=admin_user) response = view(request, pk=object.pk) assert response.status_code in [200, 302] content = response.render().content.decode("utf-8") assert "Campaign end date can&#39;t be before a start date." in content assert CatalogCampaign.objects.count() == methods_before assert CatalogCampaign.objects.get(pk=object.pk).name == old_name
dud225/incubator-airflow
airflow/operators/pig_operator.py
Python
apache-2.0
1,716
0.001166
import logging import re from airflow.hooks import PigCliHook from airflow.models import BaseOperator from airflow.utils.decorators import apply_defaults class PigOperator(BaseOperator): """ Executes pig script. :param pig: the pig latin script to be executed :type pig: string :param pig_cli_conn
_id: reference to the Hive database :type pig_cli_conn_id: string :param pigparams_jinja_translate: when True, pig params-type templating ${var} gets translated into jinja-type templating {{ var }}. Note that you may want to use this along with the ``DAG(user_defined_macros=myargs)`` parameter. View the DAG object documentation for more details. :type pigpa
rams_jinja_translate: boolean """ template_fields = ('pig',) template_ext = ('.pig', '.piglatin',) ui_color = '#f0e4ec' @apply_defaults def __init__( self, pig, pig_cli_conn_id='pig_cli_default', pigparams_jinja_translate=False, *args, **kwargs): super(PigOperator, self).__init__(*args, **kwargs) self.pigparams_jinja_translate = pigparams_jinja_translate self.pig = pig self.pig_cli_conn_id = pig_cli_conn_id def get_hook(self): return PigCliHook(pig_cli_conn_id=self.pig_cli_conn_id) def prepare_template(self): if self.pigparams_jinja_translate: self.pig = re.sub( "(\$([a-zA-Z_][a-zA-Z0-9_]*))", "{{ \g<2> }}", self.pig) def execute(self, context): logging.info('Executing: ' + self.pig) self.hook = self.get_hook() self.hook.run_cli(pig=self.pig) def on_kill(self): self.hook.kill()
googleapis/python-service-control
samples/generated_samples/servicecontrol_v1_generated_service_controller_check_async.py
Python
apache-2.0
1,437
0.000696
# -*- coding: utf-8 -*- # Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Generated code. DO NOT EDIT! # # Snippet for Check # NOTE: This snippet has been automatically generated for
illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: # python3 -m pip install google-cloud-service-control # [START servicecontrol_v1_generated_ServiceContr
oller_Check_async] from google.cloud import servicecontrol_v1 async def sample_check(): # Create a client client = servicecontrol_v1.ServiceControllerAsyncClient() # Initialize request argument(s) request = servicecontrol_v1.CheckRequest( ) # Make the request response = await client.check(request=request) # Handle the response print(response) # [END servicecontrol_v1_generated_ServiceController_Check_async]
0x00/web.py-jinja2-pyjade-bootstrap
app.py
Python
apache-2.0
331
0.036254
import sys sys.path.append("helper") import web from helper import session web.config.debug = False urls = ( "/", "c
ontroller.start.index
", "/1", "controller.start.one", "/2", "controller.start.two", ) app = web.application(urls, globals()) sessions = session.Sessions() if __name__ == "__main__": app.run()
rajarammallya/melange
melange/tests/unit/test_extensions.py
Python
apache-2.0
1,657
0.000604
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2011 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the #
License for the specific language governing permissions and limitations # under the License. from lxml import etree import routes import unittest import webtest from melange import tests from melange.common import config from melange.common import wsgi class TestExtensions(unittest.TestCase): def test_extension_loads_with_melange_xmlns(self): options = {'config_file': tests.test_config_file()} conf, app = config.Config.load_paste_app('melangeapi',
options, None) test_app = webtest.TestApp(app) response = test_app.get("/extensions.xml") root = etree.XML(response.body) self.assertEqual(root.tag.split('extensions')[0], "{http://docs.openstack.org/melange}") class ExtensionsTestApp(wsgi.Router): def __init__(self): mapper = routes.Mapper() super(ExtensionsTestApp, self).__init__(mapper) def app_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) return ExtensionsTestApp()
ericjang/pyN
pyn_examples/6_AxEx_neuron.py
Python
bsd-2-clause
496
0.042339
import os,sys parentdir = os.path.dirname(os.path.dirnam
e(os.path.abspath(__file__))) sys.path.insert(0,parentdir) from pyN import * single_neuron = AdExPopulation(name='neuron', N=1) brain = Network(populations=[single_neuro
n]) stim = [{'start':10,'stop':100,'mV':14,'neurons':[0]}] results = brain.simulate(experiment_name='Single AdEx Neuron',T=100,dt=0.25,integration_time=30,I_ext={'neuron':stim}, save_data='../data/', properties_to_save=['v','w','psc','I_ext']) save_plots(results,'./')
tokyo-jesus/wampum
user/private_views/_change_password.py
Python
gpl-3.0
152
0.013158
# -*- coding: utf-8 -*- from django.views.generic import FormView class ChangePassword(FormView): pass chang
e_p
assword = ChangePassword.as_view()
roshantha9/AbstractManycoreSim
src/util_scripts/ccpbased_remapping_random_params_generation.py
Python
gpl-3.0
2,693
0.026365
import random import numpy as np import pprint import sys def generate_random_params(max_rumtime_mins = 2880, param_seed = 1234): random.seed(param_seed) np.random.seed(param_seed) # duration TIME_TAKEN_FOR_ONE_SIMULATION = 40 # minutes # params we are concerened about #CLUSTER_SIZE = ["2-2"] CLUSTER_SIZE = ["5-5", "2-5"] REMAP_PERIOD = [0.96, 2.4, 4.8, 7.2] LATENESS_RATIO = [0.1, 0.2, 0.4, 0.6, 0.8, 1.0] LOAN_AMOUNT = [1,2,5,8,10] list_of_patterns = {} total_cum_time = 0 total_permutations = len(CLUSTER_SIZE) * \ len(REMAP_PERIOD) * \ len(LOAN_AMOUNT) #print total_permutations #sys.exit() while ((total_cum_time < max_rumtime_mins) and (len(list_of_patterns) < total_permutations)): rand_CLUSTER_SIZE = random.choice(CLUSTER_SIZE) rand_REMAP_PERIOD = random.choice(REMAP_PERIOD)
rand_LATENESS_RATIO = random.choice(LATENESS_RATIO) rand_LOAN_AMOUNT = random.choice(LOAN_AMOUNT) perm_key = "perm_" + \ str(rand_CLUSTER_SIZE) + "_" + \ str(rand_REMAP_PERIOD) + "_" + \ str(rand_LOAN_AMOUNT) + "_"
fname_param_prefix = str(rand_CLUSTER_SIZE) + \ "_" + str(rand_REMAP_PERIOD) + \ "_" + str(rand_LOAN_AMOUNT) if (perm_key not in list_of_patterns): list_of_patterns[perm_key] = { "params" : { "rand_CLUSTER_SIZE" : rand_CLUSTER_SIZE, "rand_REMAP_PERIOD" : rand_REMAP_PERIOD, "rand_LOAN_AMOUNT" : rand_LOAN_AMOUNT, "rand_LATENESS_RATIO" : -1.0, }, "fname_param_prefix" : fname_param_prefix } total_cum_time += TIME_TAKEN_FOR_ONE_SIMULATION else: #ignore i=1 return list_of_patterns #print "finished!" #random_params = generate_random_params(max_rumtime_mins=5760) #print pprint.pprint(random_params) #print len(random_params)
graphql-python/gql
tests/test_graphqlws_exceptions.py
Python
mit
7,525
0.00093
import asyncio from typing import List import pytest from gql import Client, gql from gql.transport.exceptions import ( TransportClosed, TransportProtocolError, TransportQueryError, ) from .conftest import WebSocketServerHelper # Marking all tests in this file with the websockets marker pytestmark = pytest.mark.websockets invalid_query_str = """ query getContinents { continents { code bloh } } """ invalid_query1_server_answer = ( '{{"type":"next","id":"{query_id}",'
'"payload":{{"errors":[' '{{"message":"Cannot query field \\"bloh\\" on type \\"Continent\\".",' '"locations":[{{"line":4,"column":5}}],' '"ex
tensions":{{"code":"INTERNAL_SERVER_ERROR"}}}}]}}}}' ) invalid_query1_server = [invalid_query1_server_answer] @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [invalid_query1_server], indirect=True) @pytest.mark.parametrize("query_str", [invalid_query_str]) async def test_graphqlws_invalid_query( event_loop, client_and_graphqlws_server, query_str ): session, server = client_and_graphqlws_server query = gql(query_str) with pytest.raises(TransportQueryError) as exc_info: await session.execute(query) exception = exc_info.value assert isinstance(exception.errors, List) error = exception.errors[0] assert error["extensions"]["code"] == "INTERNAL_SERVER_ERROR" invalid_subscription_str = """ subscription getContinents { continents { code bloh } } """ async def server_invalid_subscription(ws, path): await WebSocketServerHelper.send_connection_ack(ws) await ws.recv() await ws.send(invalid_query1_server_answer.format(query_id=1)) await WebSocketServerHelper.send_complete(ws, 1) await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize( "graphqlws_server", [server_invalid_subscription], indirect=True ) @pytest.mark.parametrize("query_str", [invalid_subscription_str]) async def test_graphqlws_invalid_subscription( event_loop, client_and_graphqlws_server, query_str ): session, server = client_and_graphqlws_server query = gql(query_str) with pytest.raises(TransportQueryError) as exc_info: async for result in session.subscribe(query): pass exception = exc_info.value assert isinstance(exception.errors, List) error = exception.errors[0] assert error["extensions"]["code"] == "INTERNAL_SERVER_ERROR" async def server_no_ack(ws, path): await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [server_no_ack], indirect=True) @pytest.mark.parametrize("query_str", [invalid_query_str]) async def test_graphqlws_server_does_not_send_ack( event_loop, graphqlws_server, query_str ): from gql.transport.websockets import WebsocketsTransport url = f"ws://{graphqlws_server.hostname}:{graphqlws_server.port}/graphql" sample_transport = WebsocketsTransport(url=url, ack_timeout=1) with pytest.raises(asyncio.TimeoutError): async with Client(transport=sample_transport): pass invalid_query_server_answer = ( '{"id":"1","type":"error","payload":[{"message":"Cannot query field ' '\\"helo\\" on type \\"Query\\". Did you mean \\"hello\\"?",' '"locations":[{"line":2,"column":3}]}]}' ) async def server_invalid_query(ws, path): await WebSocketServerHelper.send_connection_ack(ws) result = await ws.recv() print(f"Server received: {result}") await ws.send(invalid_query_server_answer) await WebSocketServerHelper.wait_connection_terminate(ws) await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [server_invalid_query], indirect=True) async def test_graphqlws_sending_invalid_query(event_loop, client_and_graphqlws_server): session, server = client_and_graphqlws_server query = gql("{helo}") with pytest.raises(TransportQueryError) as exc_info: await session.execute(query) exception = exc_info.value assert isinstance(exception.errors, List) error = exception.errors[0] assert ( error["message"] == 'Cannot query field "helo" on type "Query". Did you mean "hello"?' ) not_json_answer = ["BLAHBLAH"] missing_type_answer = ["{}"] missing_id_answer_1 = ['{"type": "next"}'] missing_id_answer_2 = ['{"type": "error"}'] missing_id_answer_3 = ['{"type": "complete"}'] data_without_payload = ['{"type": "next", "id":"1"}'] error_without_payload = ['{"type": "error", "id":"1"}'] error_with_payload_not_a_list = ['{"type": "error", "id":"1", "payload": "NOT A LIST"}'] payload_is_not_a_dict = ['{"type": "next", "id":"1", "payload": "BLAH"}'] empty_payload = ['{"type": "next", "id":"1", "payload": {}}'] sending_bytes = [b"\x01\x02\x03"] @pytest.mark.asyncio @pytest.mark.parametrize( "graphqlws_server", [ not_json_answer, missing_type_answer, missing_id_answer_1, missing_id_answer_2, missing_id_answer_3, data_without_payload, error_without_payload, payload_is_not_a_dict, error_with_payload_not_a_list, empty_payload, sending_bytes, ], indirect=True, ) async def test_graphqlws_transport_protocol_errors( event_loop, client_and_graphqlws_server ): session, server = client_and_graphqlws_server query = gql("query { hello }") with pytest.raises(TransportProtocolError): await session.execute(query) async def server_without_ack(ws, path): # Sending something else than an ack await WebSocketServerHelper.send_complete(ws, 1) await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [server_without_ack], indirect=True) async def test_graphqlws_server_does_not_ack(event_loop, graphqlws_server): from gql.transport.websockets import WebsocketsTransport url = f"ws://{graphqlws_server.hostname}:{graphqlws_server.port}/graphql" print(f"url = {url}") sample_transport = WebsocketsTransport(url=url) with pytest.raises(TransportProtocolError): async with Client(transport=sample_transport): pass async def server_closing_directly(ws, path): await ws.close() @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [server_closing_directly], indirect=True) async def test_graphqlws_server_closing_directly(event_loop, graphqlws_server): import websockets from gql.transport.websockets import WebsocketsTransport url = f"ws://{graphqlws_server.hostname}:{graphqlws_server.port}/graphql" print(f"url = {url}") sample_transport = WebsocketsTransport(url=url) with pytest.raises(websockets.exceptions.ConnectionClosed): async with Client(transport=sample_transport): pass async def server_closing_after_ack(ws, path): await WebSocketServerHelper.send_connection_ack(ws) await ws.close() @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [server_closing_after_ack], indirect=True) async def test_graphqlws_server_closing_after_ack( event_loop, client_and_graphqlws_server ): import websockets session, server = client_and_graphqlws_server query = gql("query { hello }") with pytest.raises(websockets.exceptions.ConnectionClosed): await session.execute(query) await session.transport.wait_closed() with pytest.raises(TransportClosed): await session.execute(query)
namhyung/uftrace
tests/t036_replay_filter_N.py
Python
gpl-2.0
1,056
0.000947
#!/usr/bin/e
nv python from runtest import TestBase class TestCase(TestBase): def __init__(self): TestBase.__init__(self, 'namespace', lang="C++", result=""" # DURATION TID FUNCTION [ 7102] | main() { 2.697 us [ 7102] | operator new(); 0.842 us [ 7102] | ns::ns1::foo::foo(); [ 7102] | ns::ns1::foo::bar() { [ 7102] | ns::ns1::foo::bar1() { 1.926 us [ 7102] | n
s::ns1::foo::bar2(); 2.169 us [ 7102] | } /* ns::ns1::foo::bar1 */ 1.215 us [ 7102] | free(); 3.897 us [ 7102] | } /* ns::ns1::foo::bar */ 1.865 us [ 7102] | operator delete(); 0.274 us [ 7102] | operator new(); 0.115 us [ 7102] | ns::ns2::foo::foo(); 1.566 us [ 7102] | ns::ns2::foo::bar(); 0.168 us [ 7102] | operator delete(); 78.921 us [ 7102] | } /* main */ """) def prepare(self): self.subcmd = 'record' return self.runcmd() def setup(self): self.subcmd = 'replay' self.option = '-N "bar3$" -Tns::ns2::foo::bar@depth=1'
amelmquist/chrono
src/demos/python/demo_particleclones.py
Python
bsd-3-clause
5,706
0.015247
#------------------------------------------------------------------------------- # Name: demo_python_3 # # This file shows how to create and populate the ChParticleClones object. # Also, shows how to use POV ray for postprocessing, thanks to the # utility functions in the unit_POSTPROCESS of Chrono::Engine. # #------------------------------------------------------------------------------- #!/usr/bin/env python def main(): pass if __name__ == '__main__': main() # Load the Chrono::Engine unit and the postprocessing unit!!! import ChronoEngine_python_core as chrono import ChronoEngine_python_postprocess as postprocess # We will create two directories for saving some files, we need this: import os # Create a physical system, my_system = chrono.ChSystemNSC() # Set the default margins for collision detection, this is epecially # important for very large or very small objects. chrono.ChCollisionModel.SetDefaultSuggestedEnvelope(0.001) chrono.ChCollisionModel.SetDefaultSuggestedMargin(0.001) # Create the set of the particle clones (many rigid bodies that # share the same mass and collision shape, so they are memory efficient # in case you want to simulate granular material) body_particles = chrono.ChParticlesClones() body_particles.SetMass(0.01); inertia = 2/5*(pow(0.005,2))*0.01; body_particles.SetInertiaXX(chrono.ChVectorD(inertia,inertia,inertia)); # Collision shape (shared by all particle clones) Must be defined BEFORE adding particles body_particles.GetCollisionModel().ClearModel() body_particles.GetCollisionModel().AddSphere(0.005) body_particles.GetCollisionModel().BuildModel() body_particles.SetCollide(True) # add particles for ix in range(0,5): for iy in range(0,5): for iz in range(0,3): body_particles.AddParticle(chrono.ChCoordsysD(chrono.ChVectorD(ix/100,0.1+iy/100, iz/100))) # Visualization shape (shared by all particle clones) body_particles_shape = chrono.ChSphereShape() body_particles_shape.GetSphereGeometry().rad = 0.005 body_particles.GetAssets().push_back(body_particles_shape) my_system.Add(body_particles) # Create the floor: a simple fixed rigid body with a collision shape # and a visualization shape body_floor = chrono.ChBody() body_floor.SetBodyFixed(True) # Collision shape body_floor.GetCollisionModel().ClearModel() body_floor.GetCollisionModel().AddBox(0.1, 0.02, 0.1) # hemi sizes body_floor.GetCollisionModel().BuildModel() body_floor.SetCollide(True) # Visualization shape body_floor_shape = chrono.ChBoxShape() body_floor_shape.GetBoxGeometry().Size = chrono.ChVectorD(0.1, 0.02, 0.1) body_floor_shape.SetColor(chrono.ChColor(0.5,0.5,0.5)) body_floor.GetAssets().push_back(body_floor_shape) my_system.Add(body_floor) # Create boxes that fall # This is just for fun. for ix in range(0,2): for iz in range(0,4): body_brick = chrono.ChBody() body_brick.SetPos(chrono.ChVectorD(0.05+ix*0.021,0.04,0+iz*0.021)) body_brick.SetMass(0.02); inertia = 2/5*(pow(0.01,2))*0.02; body_brick.SetInertiaXX(chrono.ChVectorD(inertia,inertia,inertia)); # Collision shape body_brick.GetCollisionModel().ClearModel() body_brick.GetCollisionModel().AddBox(0.01, 0.01, 0.01) # hemi sizes body_brick.GetCollisionModel().BuildModel() body_brick.SetCollide(True) # Visualization shape body_brick_shape = chrono.ChBoxShape() body_brick_shape.GetBoxGeometry().Size = chrono.ChVectorD(0.01, 0.01, 0.01) body_brick.GetAssets().push_back(body_brick_shape) my_system.Add(body_brick) # --------------------------------------------------------------------- # # Render a short animation by generating scripts # to be used with POV-Ray # pov_exporter = postprocess.ChPovRay(my_system) # Sets some file names for in-out processes. pov_exporter.SetTemplateFile ("../../../data/_template_POV.pov") pov_exporter.SetOutputScriptFile ("rendering_frames.pov") if not os.path.exists("output"): os.mkdir("output") if not os.path.exists("anim"): os.mkdir("anim") pov_exporter.SetOutputDataFilebase("output
/my_state") pov_exporter.SetPictureFilebase("anim/picture") pov_exporter.SetCamera(chrono.ChVectorD(0.2,0.3,0.5), chrono.ChVectorD(0,0,0), 35) pov_exporter.SetLight(chrono.ChVectorD(-2,2,-1), chrono.ChColor(1.1,1.2,1.2), True) pov_exporter.SetPictureSize(640,480) pov_exporter.SetAmbientLight(chrono.ChColor(2,2,2)) # Add additional POV objects/lights/materials in the following way pov_exporter.SetCustomPOVcommandsScript( ''
' light_source{ <1,3,1.5> color rgb<1.1,1.1,1.1> } Grid(0.05,0.04, rgb<0.7,0.7,0.7>, rgbt<1,1,1,1>) ''') # Tell which physical items you want to render pov_exporter.AddAll() # Tell that you want to render the contacts pov_exporter.SetShowContacts(True, postprocess.ChPovRay.SYMBOL_VECTOR_SCALELENGTH, 0.2, # scale 0.0007, # width 0.1, # max size True,0,0.5 ) # colormap on, blue at 0, red at 0.5 # 1) Create the two .pov and .ini files for POV-Ray (this must be done # only once at the beginning of the simulation). pov_exporter.ExportScript() #my_system.SetSolverType(chrono.ChSolver.Type_PMINRES) my_system.SetMaxItersSolverSpeed(50) # Perform a short simulation while (my_system.GetChTime() < 0.7) : my_system.DoStepDynamics(0.005) print ('time=', my_system.GetChTime() ) # 2) Create the incremental nnnn.dat and nnnn.pov files that will be load # by the pov .ini script in POV-Ray (do this at each simulation timestep) pov_exporter.ExportData()
bazelbuild/rules_python
gazelle/testdata/relative_imports/package2/module4.py
Python
apache-2.0
40
0
def function4():
re
turn "function4"
intelligent-agent/redeem
tests/gcode/test_M201.py
Python
gpl-3.0
3,414
0.010838
from __future__ import absolute_import from .MockPrinter import MockPrinter import mock from random import random class M201_Tests(MockPrinter): def setUp(self): self.printer.path_planner.native_planner.setAcceleration = mock.Mock() self.printer.axis_config = self.printer.AXIS_CONFIG_XY self.printer.speed_factor = 1.0 def exercise(self): values = {} gcode = "M201" for i, v in enumerate(self.printer.acceleration): axis = self.printer.AXES[i] values[axis] = round(random() * 9000.0, 0) gcode += " {:s}{:.0f}".format(axis, values[axis]) self.execute_gcode(gcode) return { "values": values, "call_args": self.printer.path_planner.native_planner.setAcceleration.call_args[0][0] } def test_gcodes_M201_all_axes_G21_mm(self): test_data = self.exercise() for i, axis in enumerate(self.printer.AXES): expected = round(test_data["values"][axis] * self.printer.factor / 3600.0, 4) result = test_data["call_args"][i] self.assertEqual(expected, result, axis + ": expected {:.0f} but got {:.0f}".format(expected, result)) def test_gcodes_M201_all_axes_G20_inches(self): self.printer.factor = 25.4 test_data = self.exercise() for i, axis in enumerate(self.printer.AXES): expected = round(test_data["values"][axis] * self.printer.factor / 3600.0, 4) result = test_data["call_args
"][i] self.assertEqual(expected, result, axis + ": expected {:.0f} but got {:.0f}".format(expected, result)) def test_gcodes_M201_CoreXY(self): self.printer.axis_config = self.printer.AXIS_CONFIG_CORE_XY while True: # account for remote possibility of two equal random numbers for X and Y test_data = self.exercise() if test_data["values"]["X"]
!= test_data["values"]["Y"]: break self.assertEqual( test_data["call_args"][0], test_data["call_args"][1], "For CoreXY mechanics, X & Y values must match. But X={}, Y={} (mm/min / 3600)".format( test_data["call_args"][0], test_data["call_args"][1])) def test_gcodes_M201_H_belt(self): self.printer.axis_config = self.printer.AXIS_CONFIG_H_BELT while True: # account for remote possibility of two equal random numbers for X and Y test_data = self.exercise() if test_data["values"]["X"] != test_data["values"]["Y"]: break self.assertEqual( test_data["call_args"][0], test_data["call_args"][1], "For H-Belt mechanics, X & Y values must match. But X={}, Y={} (mm/min / 3600)".format( test_data["call_args"][0], test_data["call_args"][1])) def test_gcodes_M201_Delta(self): self.printer.axis_config = self.printer.AXIS_CONFIG_DELTA while True: # account for super, ultra-duper remote possibility of three equal random numbers for X , Y and Z test_data = self.exercise() if (test_data["values"]["X"] + test_data["values"]["Y"] + test_data["values"]["Y"]) != ( test_data["values"]["X"] * 3): break self.assertEqual( test_data["call_args"][0] + test_data["call_args"][1] + test_data["call_args"][2], test_data["call_args"][0] * 3, "For CoreXY mechanics, X & Y values must match. But X={}, Y={} (mm/min / 3600)".format( test_data["call_args"][0], test_data["call_args"][1], test_data["call_args"][2]))
Hybrid-Cloud/badam
patches_tool/aws_patch/aws_deps/libcloud/test/dns/test_google.py
Python
apache-2.0
7,924
0.000883
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and import sys import unittest from libcloud.utils.py3 import httplib from libcloud.dns.types import ZoneDoesNotExistError from libcloud.dns.types import RecordDoesNotExistError from libcloud.dns.drivers.google import GoogleDNSDriver from libcloud.common.google import (GoogleBaseAuthConnection, GoogleInstalledAppAuthConnection, GoogleBaseConnection) from libcloud.test.common.test_google import GoogleAuthMockHttp from libcloud.test import MockHttpTestCase, LibcloudTestCase from libcloud.test.file_fixtures import DNSFileFixtures from libcloud.test.secrets import DNS_PARAMS_GOOGLE, DNS_KEYWORD_PARAMS_GOOGLE class GoogleTests(LibcloudTestCase): GoogleBaseConnection._get_token_info_from_file = lambda x: None GoogleBaseConnection._write_token_info_to_file = lambda x: None GoogleInstalledAppAuthConnection.get_code = lambda x: '1234' def setUp(self): GoogleDNSMockHttp.test = self GoogleDNSDriver.connectionCls.conn_classes = (GoogleDNSMockHttp, GoogleDNSMockHttp) GoogleBaseAuthConnection.conn_classes = (GoogleAuthMockHttp, GoogleAuthMockHttp) GoogleDNSMockHttp.type = None kwargs = DNS_KEYWORD_PARAMS_GOOGLE.copy() kwargs['auth_type'] = 'IA' self.driver = GoogleDNSDriver(*DNS_PARAMS_GOOGLE, **kwargs) def test_default_scopes(self): self.assertEqual(self.driver.scopes, None) def test_list_zones(self): zones = self.driver.list_zones() self.assertEqual(len(zones), 2) def test_list_records(self): zone = self.driver.list_zones()[0] records = self.driver.list_records(zone=zone) self.assertEqual(len(records), 3) def test_get_zone(self): zone = self.driver.get_zone('example-com') self.assertEqual(zone.id, 'example-com') self.assertEqual(zone.domain, 'example.com.') def test_get_zone_does_not_exist(self): GoogleDNSMockHttp.type = 'ZONE_DOES_NOT_EXIST' try: self.driver.get_zone('example-com') except ZoneDoesNotExistError: e = sys.exc_info()[1] self.assertEqual(e.zone_id, 'example-com') else: self.fail('Exception not thrown') def test_get_record(self): GoogleDNSMockHttp.type = 'FILTER_ZONES' zone = self.driver.list_zones()[0] record = self.driver.get_record(zone.id, "A:foo.example.com.") self.assertEqual(record.id, 'A:foo.example.com.') self.assertEqual(record.name, 'foo.example.com.') self.assertEqual(record.type, 'A') self.assertEqual(record.zone.id, 'example-com') def test_get_record_zone_does_not_exist(self): GoogleDNSMockHttp.type = 'ZONE_DOES_NOT_EXIST' try: self.driver.get_record('example-com', 'a:a') except ZoneDoesNotExistError: e = sys.exc_info()[1] self.assertEqual(e.zone_id, 'example-com') else: self.fail('Exception not thrown') def test_get_record_record_does_not_exist(self): GoogleDNSMockHttp.type = 'RECORD_DOES_NOT_EXIST' try: self.driver.get_record('example-com', "A:foo") except RecordDoesNotExistError: e = sys.exc_info()[1] self.assertEqual(e.record_id, 'A:foo') else: self.fail('Exception not thrown') def test_create_zone(self): extra = {'description': 'new domain for example.org'} zone = self.driver.create_zone('example.org.', extra) self.assertEqual(zone.domain, 'example.org.') self.assertEqual(zone.extra['description'], extra['description']) self.assertEqual(len(zone.extra['nameServers']), 4) def test_delete_zone(self): zone = self.driver.get_zone('example-com') res = self.driver.delete_zone(zone) self.assertTrue(res) class GoogleDNSMockHttp(MockHttpTestCase): fixtures = DNSFileFixtures('google') def _dns_v1beta1_projects_project_name_managedZones(self, method, url, body, headers): if method == 'POST': body = self.fixtures.load('zone_create.json') else: body = self.fixtures.load('zone_list.json') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _dns_v1beta1_projects_project_name_managedZones_FILTER_ZONES( self, method, url, body, headers): body = self.fixtures.load('zone_list.json') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _dns_v1beta1_projects_project_name_managedZones_example_com_rrsets_FILTER_ZONES( self, method, url, body, headers): body = self.fixtures.load('record.json') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _dns_v1beta1_projects_project_name_managedZones_example_com_rrsets( self, method, url, body, headers): body = self.fixtures.load('records_list.json') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _dns_v1beta1_projects_project_name_managedZones_example_com( self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('managed_zones_1.json') elif method == 'DELETE': body = None return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _dns_v1beta1_projects_project_name_managedZones_example_com_ZONE_DOES_NOT_EXIST( self, method, url, body, headers): body = self.fixtures.load('get_zone_does_not_exists.json') return (httplib.NOT_FOUND, body, {}, httplib.responses[httplib.NOT_FOUND]) def _dns_v1beta1_projects_project_name_managedZones_example_com_RECORD_DOES_NOT_EXIST( self, method, url, body, headers): body = self.fixtures.load('managed_zones_1.json') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _dns_v1beta1_projects_project_name_managedZones_example_com_rrsets_RECORD_DOES_NOT_EXIST( self, method, url, body, headers): body = self.fixtures.load('no_record.json') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _dns_v1beta1_projects_project_name_managedZones_example_com_rrsets_ZONE_DOES_NOT_EXIST( self, method, url, body, headers): body = self.fixtures.load('get_zone_does_not_exists.json') return (httplib.NOT_FOUND, body, {}, httplib.responses[httplib.NOT_FOUND]) def _dns_v1beta1_proje
cts_project_name_managedZones_example_com_FILTER_ZONES( self, method, url, body, headers): body = self.fixtures.load('zone.json') retu
rn (httplib.OK, body, {}, httplib.responses[httplib.OK]) if __name__ == '__main__': sys.exit(unittest.main())
arnaudsj/milk
milk/supervised/knn.py
Python
mit
1,620
0.006173
# -*- coding: utf-8 -*- # Copyright (C) 2008-2011, Luis Pedro Coelho <luis@luispedro.org> # vim: set ts=4 sts=4 sw=4 expandtab smartindent: # # License: MIT. See COPYING.MIT file in the milk distribution from __future__ import division from collections import defaultdict import numpy as np from .base import supervised_model __all__ = [ 'kNN', ] class kNN(object): ''' k-Nearest Neighbour Classifier Naive implementation of a k-nearest neighbour classifier. C = kNN(k) Attributes: ----------- k : integer number of neighbours to consider ''' def __init__(self, k=1): self.k = k def train(self, features, labels, normalisedlabels=False, copy_features=Fal
se): features = np.asanyarray(features) labels = np.asanyarray(labels) if copy_features: features = features.copy() labels = labels.copy() features2 = np.sum(features**2, axis=1) return kNN_model(self.k, features, features2, labels) class kNN_model(supervised_model): def __init__(self, k, features, features2, labels): self.k = k self.features = features self.f2 = features2
self.labels = labels def apply(self, features): features = np.asanyarray(features) diff2 = np.dot(self.features, (-2.)*features) diff2 += self.f2 neighbours = diff2.argsort()[:self.k] labels = self.labels[neighbours] votes = defaultdict(int) for L in labels: votes[L] += 1 v,L = max( (v,L) for L,v in votes.items() ) return L
johnmcilwain/cdnetworks
api_example_python.py
Python
apache-2.0
5,957
0.008058
# ********************************************************************************************************************************* # # Name: api_example_python.py # # Desc: full api example # # Auth: john mcilwain (jmac) - (jmac@cdnetworks.com) # # Ver : .90 # # License: # # This sample code is provided on an "AS IS" basis. THERE ARE NO # # WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION THE IMPLIED # # WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS FOR A PARTICULAR # # PURPOSE, REGARDING THE SOFTWARE OR ITS USE AND OPERATION ALONE OR IN # # COMBINATION WITH YOUR PRODUCTS. # # ********************************************************************************************************************************* # import os import sys import json import pygal import re import urllib.request USER = os.popen('cat ./_user.db').read() # Create _user.db with your username inside PASS = os.popen('cat ./_pass.db').read() # Create _pass.db with your password inside SVCGRP = 'YourServiceGRP' # Change to your desired SERVICE GROUP APIKEY = 'YourDomainPAD'
# Change to your desired APIKEY (website) TRAFFICDATA = '&fromDate=20170201&toDate=20170201&timeInterval=1' # Change to your desired graph date/time GRAPHFILE = 'api_example_python_graph' # Change to your desired graph filename APIENDPOINT
= 'https://openapi.cdnetworks.com/api/rest/' # Don't change APIFORMAT = '&output=json' # Don't change API_SUCCESS = 0 # Don't change # Command: LOGIN : send login, receive list of service groups (logial grouping, like a directory) print('Control Groups') url = APIENDPOINT + 'login?user=' + USER + '&pass=' + PASS + APIFORMAT; print('\tURL: ' + APIENDPOINT + 'login?user=xxx&pass=xxx') parsed = json.load(urllib.request.urlopen(url)) retval = parsed['loginResponse']['resultCode'] print('\tloginResponse: resultCode = %s' % retval) # Loop through and find SVCGRP specific Service Group sessToken = ''; sessions = parsed['loginResponse']['session'] for session in sessions: if session['svcGroupName'] == SVCGRP: print('\tFound: %s' % session['svcGroupName']) print('\t\tSelected: %s' % session['sessionToken']) sessToken = session['sessionToken'] break # Command: APIKEYLIST : get list of APIs for service groups print('\nAPI Key List') url = APIENDPOINT + 'getApiKeyList?sessionToken=' + sessToken + APIFORMAT; print('\tURL: %s' % url) parsed = json.load(urllib.request.urlopen(url)) retval = parsed['apiKeyInfo']['returnCode'] if retval != API_SUCCESS: print('API Failed, code: %s' % retval) sys.exit() print('\tapiKeyInfo: returnCode = %s' % retval) # Loop through and find the APIKEY specific API Key apiKey = '' apikeys = parsed['apiKeyInfo']['apiKeyInfoItem'] for apikey in apikeys: if apikey['serviceName'] == APIKEY: print('\tFound: %s' % apikey['serviceName']) print('\t\tSelected: %s' % apikey['apiKey']) apiKey = apikey['apiKey'] break # Command: EDGE TRAFFIC : get edge traffic raw data print('\nTraffic/Edge') url = APIENDPOINT + 'traffic/edge?sessionToken=' + sessToken + '&apiKey=' + apiKey + TRAFFICDATA + APIFORMAT; print('\tURL: %s' % url) parsed = json.load(urllib.request.urlopen(url)) retval = parsed['trafficResponse']['returnCode'] if retval != API_SUCCESS: print('API Failed, code: %s' % retval) sys.exit() print('\tapiKeyInfo: returnCode = %s' % retval) # Show all Traffic details chartListTimes = [] chartListTrans = [] trafficItems = parsed['trafficResponse']['trafficItem'] for item in trafficItems: print('\tFound: %s' % item['dateTime']) print('\tFound: %s' % item['dataTransferred']) chartListTimes.append(item['dateTime']) chartListTrans.append(item['dataTransferred']) # Generate and save graph (create nice looking labels first) chartListTimesPretty = [] for date in chartListTimes: #format with hyphens: 201702011700 chartListTimesPretty.append( "%s-%s-%s-%s" % (str(date)[:4], str(date)[4:6], str(date)[6:8], str(date)[8:])) bar_chart = pygal.Bar(width=1024, height=768) bar_chart.title = "Edge Traffic" bar_chart.x_title = "Date/Time" bar_chart.y_title = "Data Transferred (bytes)" bar_chart.x_label_rotation = 270 bar_chart.legend_at_bottom = 1 bar_chart.x_labels = chartListTimesPretty bar_chart.add(APIKEY, chartListTrans) bar_chart.render_to_file(GRAPHFILE + '.svg') bar_chart.render_to_png(GRAPHFILE + '.png') # Command: LOGOUT : send token to invalidate print('\nLogout') url = APIENDPOINT + 'logout?sessionToken=' + sessToken + APIFORMAT print('\tURL: %s' % url) parsed = json.load(urllib.request.urlopen(url)) retval = parsed['logoutResponse']['resultCode'] # Ignoring retval print('\tlogout: resultCode = %s' % retval)
habagat/hugot214
LexerParser.py
Python
gpl-3.0
9,650
0.023316
from ply import lex, yacc class Lexer: reserved = { #'ikawna' : 'IF', #'syapala' : 'ELSE', #'akona' : 'ELSEIF', #'kamipa' : 'WHILE', #'ibalik' : 'RETURN', 'tayona' : 'MAIN', 'ayokona' : 'END', 'nbsb' : 'READ', 'pda' : 'PRINT', #'paasa' : 'FOR', #'habang' : 'DO', 'solo' : 'INTN', 'pafall' : 'FLOATN', 'feelingera' : 'CHARN', 'assumera' : 'STRINGN', 'friendzone' : 'BOOLN', #'lovemosya' : 'LT', #'lovekita' : 'GT',
#'maslovemosya' : 'LEQ', #'maslovekita' : 'GEQ', #'pataskami' : 'EQ', #'lamangsiya' : 'NEQ', #'basted' : 'NOT', #'ot' : 'OR', #'at' : 'AND', } tokens = [ 'INT','FLOAT', 'EOL','ID','STRING', 'PLUS','MINUS','MUL','DIV','MOD','ASSIGN', 'OPENPAR','CLOSEPAR', 'OPENCURLY','CLOSECURLY', 'C
OMMA', #'OPENBRACE','CLOSEBRACE' ] + list(reserved.values()) #tokens += reserved.values() def t_ID(self, t): r'[a-zA-Z_][a-zA-Z_0-9]*' t.type = Lexer.reserved.get(t.value,'ID') # Check for reserved words # redis is case sensitive in hash keys but we want the sql to be case insensitive, # so we lowercase identifiers t.value = t.value.lower() return t # Read in a float. This rule has to be done before the int rule. def t_FLOAT(self, t): r'-?\d+\.\d*(e-?\d+)?' t.value = float(t.value) return t def t_INT(self, t): r'\d+' try: t.value = int(t.value) except ValueError: print("Integer value too large %d", t.value) t.value = 0 return t def t_STRING(self, t): # TODO: unicode... # Note: this regex is from pyparsing, # TODO: may be better to refer to http://docs.python.org/reference/lexical_analysis.html '(?:"(?:[^"\\n\\r\\\\]|(?:"")|(?:\\\\x[0-9a-fA-F]+)|(?:\\\\.))*")|(?:\'(?:[^\'\\n\\r\\\\]|(?:\'\')|(?:\\\\x[0-9a-fA-F]+)| (?:\\\\.))*\')' t.value = eval(t.value) t.value[1:-1] return t # Tokens t_COMMA = r'\,' t_EOL = r';' #t_QUOTE = r'\"' t_OPENCURLY = r'\{' t_CLOSECURLY = r'\}' #t_OPENBRACE = r'\[' #t_CLOSEBRACE = r'\]' t_PLUS = r'\+' t_MINUS = r'-' t_MUL = r'\*' t_DIV = r'/' t_MOD = r'%' t_ASSIGN = r'=' t_OPENPAR = r'\(' t_CLOSEPAR = r'\)' # Ignored characters t_ignore = " \t" ''' literals = [ '{', '}' ] def t_lbrace(self, t): r'\{' t.type = '{' # Set token type to the expected literal return t def t_rbrace(self, t): r'\}' t.type = '}' # Set token type to the expected literal return t ''' def t_COMMENT(self,t): r'\#.*' pass # No return value. Token discarded def t_newline(self,t): r'\n+' t.lexer.lineno += t.value.count("\n") ''' # Compute column. # input is the input text string # token is a token instance def find_column(input,token): last_cr = input.rfind('\n',0,token.lexpos) if last_cr < 0: last_cr = 0 column = (token.lexpos - last_cr) + 1 return column ''' def t_error(self,t): print("Illegal character '%s'" % t.value[0]) t.lexer.skip(1) ''' # EOF handling rule def t_eof(self, t): # Get more input (Example) more = raw_input('... ') if more: self.lexer.input(more) #return self.lexer.token() return None ''' # Build the lexer def build(self, **kwargs): self.lexer = lex.lex(module=self, **kwargs) return self.lexer ''' # Test it output def test(self,data): self.lexer.input(data) while True: tok = self.lexer.token() if not tok: break print(tok) ''' # Build the lexer and try it out #m = Lexer() #m.build() #m.test("solo x = 4") #m.test("ayokona 0;") # Build the lexer #m.test("pda () { x1 = [ 4 + 3 ] ; }") # Test it #m.test("#\"hello\"") #m.test("\"Hello World\"") #m.test("\'Hi Universe!\'") #m.test(" syapala() { \'Hi Universe!\' }") variableNames=[] statementlist=[] # dictionary of names names = { } class Parser: tokens = Lexer.tokens # Parsing Rules precedence = ( ('left','PLUS','MINUS'), ('left','MUL','DIV', 'MOD'), ('right','UMINUS'), # ('left', 'OR'), # ('left', 'AND'), # ('left', 'EQ', 'NEQ', 'LT', 'GT') ) def p_program_start_start(self, t): 'progStart : programHeading OPENCURLY decl statement endprog CLOSECURLY' t[0] = 0 def p_program_main(self, t): 'programHeading : MAIN OPENPAR CLOSEPAR' t[0] = 0 def p_program_decl(self, t): 'decl : type ID nextdecl EOL decl' variableNames.append(t[2]) names[t[2]]= '' #print(names) def p_program_vempty(self, t): 'decl : empty' def p_program_decl_value(self, t): 'decl : type ID ASSIGN value nextdecl EOL decl ' #print(variableNames) #variableNames.append(t[2]) #for j in range(0, len(variableNames)): #if variableNames[j]==t[2]: # print(variableNames[j]) # print("variable already exist") # break #else: variableNames.append(t[2]) names[t[2]] = t[4] def p_program_nextdecl(self, t): 'nextdecl : COMMA ID nextdecl' variableNames.append(t[2]) names[t[2]]= '' def p_program_declassign(self, t): 'nextdecl : COMMA ID ASSIGN value nextdecl ' variableNames.append(t[2]) names[t[2]]=t[4] def p_program_emptydecl(self, t): 'nextdecl : empty' def p_program_number(self, t): '''value : INT | FLOAT''' t[0] = t[1] def p_program_type(self, t): '''type : INTN | FLOATN | CHARN | STRINGN | BOOLN''' t[0] = t[1] #def p_program_assignvar(self, t): # 'assignvar : ID ASSIGN expression EOL assignvar' # names[t[1]] = t[3] #def p_program_varempty(self, t): # 'assignvar : empty' def p_program_print(self, t): '''statement : PRINT OPENPAR STRING CLOSEPAR EOL statement | PRINT OPENPAR statement CLOSEPAR EOL statement''' #state = i + 1 statementlist.append(t[3]) #print(t[3]) #print(statementlist) #print(state) def p_program_input(self, t): 'statement : READ OPENPAR ID CLOSEPAR EOL statement' #x = input() statementlist.append("SCANVALUE") names[t[3]] = 'SCANVAL' #statementlist.append(t[3]) #def p_program_if(self, t): # 'statement : FOR OPENPAR logicOp CLOSEPAR OPENCURLY statement CLOSECURLY' #def p_program_cond(self, t): # 'logicOp : ID EQ ID' def p_statement_assign(self, t): 'statement : ID ASSIGN expression EOL statement' #print(t[3]) #if(names[t[1]] == ''): names[t[1]] = t[3] def p_statement_emptyState(self, t): 'statement : empty' pass def p_statement_expr(self, t): 'statement : expression' t[0] = t[1] #print(t[1]) # prints the value of evaluated expression def p_expression_binop(self, t): '''expression : expression PLUS expression | expression MINUS expression | expression MUL expression | expression DIV expression | expression MOD expression''' if t[2] == '+' : t[0] = t[1
mffiedler/svt
openshift_tooling/node_tuning_operator/nto_test_custom_tuning.py
Python
apache-2.0
5,119
0.003907
#!/usr/bin/env python from utils import * ######################################################## # Test: Node Tuning Operator: custom tuning is working # #####################################################
### # Changes: # # skordas: # # Updating Test Case to work with OCP 4.4 # ######################################################## def cleanup(): print("Cleaning after test") execute_command("oc delete tuned nf-conntrack-max -n openshift-cluster-node-tuning-operator") execute_command("oc delete project my-logging-project") def test(): # Test execut
ion: print_title("Node Tuning Operator: Custom tuning is working") # Getting all nodes print_step("Gathering information about nodes") nodes = execute_command("oc get nodes --no-headers -o=custom-columns=NAME:.metadata.name").split("\n") del nodes[-1] passed("List of nodes:\n" + str(nodes)) # Getting all tuned pods in project print_step("Getting information about tuned pods pods") tuned_pods = execute_command("oc get pods -n openshift-cluster-node-tuning-operator --no-headers -o=custom-columns=NAME:.metadata.name | grep tuned").split("\n") del tuned_pods[-1] # split method is giving extra empty field after last line from response passed("List of tuned pods:\n" + str(tuned_pods)) # Creating test project print_step("Create project and get information where app is running") execute_command("oc new-project my-logging-project") execute_command("oc create -f https://raw.githubusercontent.com/hongkailiu/svt-case-doc/master/files/pod_test.yaml") countdown(10) execute_command("oc label pod web -n my-logging-project tuned.openshift.io/elasticsearch=") # Getting node where pod with 'web' name is running node_where_app_is_running = execute_command("oc get pod web --no-headers -o=custom-columns=NODE:.spec.nodeName").rstrip() tuned_operator_pod = execute_command("oc get pods -n openshift-cluster-node-tuning-operator -o wide | grep {} | cut -d ' ' -f 1".format(node_where_app_is_running)) # Creation a new profile print_step("Create new profile: router") execute_command("oc create -f content/tuned-nf-conntrack-max.yml") # Verification if new tuned exist print_step("Verify if new tuned exist") number_of_tuned_router = int(execute_command("oc get tuned -n openshift-cluster-node-tuning-operator | grep -c nf-conntrack-max")) print("Number of tuned nf-conntrack-max: {}".format(number_of_tuned_router)) if number_of_tuned_router == 1: passed(None) else: fail("There should be one tuned router but it was: {}".format(number_of_tuned_router), cleanup) return False # Verification if correct tuned applied on node print_step("Verify if correct profile is active on node") tuned_profiles_actual = execute_command("oc get profiles.tuned.openshift.io {} -n openshift-cluster-node-tuning-operator -o json | jq -r '.spec.config.tunedProfile'".format(node_where_app_is_running)).rstrip() if tuned_profiles_actual.replace(" ", "") == "nf-conntrack-max": passed(None) else: fail("Expected value:\nnf-conntrack-max\nActual value:\n{}".format(tuned_profiles_actual), cleanup) return False # Checking all nodes for net.ipv4.ip_local_port_range values on all nodes: print_step("Check all nodes for etfilter.nf_conntrack_max value") for node in nodes: conntrack_max = execute_command_on_node(node, "sysctl net.netfilter.nf_conntrack_max | cut -d ' ' -f 3 | sed 's/\t/ /g'").rstrip() print("Node: {}".format(node)) print("etfilter.nf_conntrack_max: {}".format(conntrack_max)) if (node in node_where_app_is_running and conntrack_max != "1048578") or (node not in node_where_app_is_running and conntrack_max == "1048578"): fail("On node {} net.netfilter.nf_conntrack_max is {}".format(node, conntrack_max), cleanup) return False passed(None) # Checking logs on every pod: print_step("Check logs on every pod") for pod in tuned_pods: log = execute_command("oc logs {} -n openshift-cluster-node-tuning-operator | grep profile | tail -n1".format(pod)).rstrip() print("Pod: {}".format(pod)) print('Log: {}'.format(log)) if (pod in tuned_operator_pod and "nf-conntrack-max" not in log) or (pod not in tuned_operator_pod and "nf-conntrack-max" in log): fail("On pod: {} founded log: {}".format(pod, log), cleanup) return False passed(None) # Cleaning after test print_step("Cleaning after test") cleanup() number_of_tuned_router = int(execute_command("oc get tuned | grep -c nf-conntrack-max")) if number_of_tuned_router == 0: passed(None) else: fail("It shouldn't be any tuned nf-conntrack-max, but it was: {}".format(number_of_tuned_router), cleanup) return False # All steps passed return True if __name__ == "__main__": test()
greencoder/hopefullysunny-django
forecasts/management/commands/send_daily_forecasts.py
Python
mit
1,298
0.007704
# -*- coding: utf-8 -*- import sys import datetime from django.core.management.base import BaseCommand, CommandError from django.core.cache import cache from registrations.models import Registration from forecasts import handlers from forecasts.lib.forecast import Forecast class Command(BaseCommand): def handle(self, *args, **options): print "Starting Daily Email Run: %s" % datetime.datetime.now() for registration in Registration.objects.fi
lter(status=1, latitude__isnull=False, longitude__isnull=False): cache_key = "%.2f,%.2f" % (registration.latitude, registration.longitude) forecasts_list = cache.get(cache_key) # If we don't have a value, it was not found in the cache. Look up and cache it
. if not forecasts_list: forecasts_list = Forecast.get_forecast(registration.latitude, registration.longitude) print "Caching the fetched forecasts for %s" % cache_key cache.set(cache_key, forecasts_list, 3600) success = handlers.send_forecast_email(registration, forecasts_list) if success: print "Forecast Email Sent: %s" % registration.email else: print "Error sending email."
PhilLidar-DAD/geonode
geonode/layers/admin.py
Python
gpl-3.0
3,475
0.000288
# -*- coding: utf-8 -*- ######################################################################### # # Copyright (C) 2012 OpenPlans # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ######################################################################### from django.contrib import admin from geonode.base.admin import MediaTranslationAdmin, ResourceBaseAdminForm from geonode.layers.models import Layer, Attribute, Style from geonode.layers.models import LayerFile, UploadSession import autocomplete_light class LayerAdminForm(ResourceBaseAdminForm): class Meta: model = Layer class AttributeInline(admin.TabularInline): model = Attribute class LayerAdmin(MediaTranslationAdmin): list_display = ( 'id', 'typename', 'service_type', 'title', 'Floodplains', 'SUC', 'date', 'category') list_display_links = ('id',) list_editable = ('title', 'category') list_filter = ('owner', 'category', 'restriction_code_type__identifier', 'date', 'date_type') # def get_queryset(self, request): # return super(LayerAdmin, # self).get_queryset(request).prefetch_related('floodplain_tag','SUC_tag') def Floodplains(self, obj): return u", ".join(o.name for o in obj.floodplain_tag.all()) def SUC(self, obj): return u", ".join(o.name for o in obj.SUC_tag.all()) # def get_queryset(self, request): # return super(LayerAdmin, self).get_queryset(request).prefetch_related('SUC_tag') # def SUC(self, obj): # return u", ".join(o.name for o in obj.SUC_tag.all()) inlines = [AttributeInline] search_fields = ('typename', '
title', 'abstract', 'purpose',) filter_horizontal = ('contacts',) date_hierarchy = 'date' readonly_fields = ('uuid', 'typename', 'workspace') form = LayerAdminForm class AttributeAdmin(admin.ModelAdmin): model = Attribute list_display_links = ('id',) list_display = ( 'id', 'layer', 'attribute', 'description', 'attribute_label', 'attribute_type', 'display_order')
list_filter = ('layer', 'attribute_type') search_fields = ('attribute', 'attribute_label',) class StyleAdmin(admin.ModelAdmin): model = Style list_display_links = ('sld_title',) list_display = ('id', 'name', 'sld_title', 'workspace', 'sld_url') list_filter = ('workspace',) search_fields = ('name', 'workspace',) class LayerFileInline(admin.TabularInline): model = LayerFile class UploadSessionAdmin(admin.ModelAdmin): model = UploadSession list_display = ('date', 'user', 'processed') inlines = [LayerFileInline] admin.site.register(Layer, LayerAdmin) admin.site.register(Attribute, AttributeAdmin) admin.site.register(Style, StyleAdmin) admin.site.register(UploadSession, UploadSessionAdmin)
ChristinaZografou/sympy
sympy/polys/tests/test_polytools.py
Python
bsd-3-clause
106,569
0.001314
"""Tests for user-friendly public interface to polynomial functions. """ from sympy.polys.polytools import ( Poly, PurePoly, poly, parallel_poly_from_expr, degree, degree_list, LC, LM, LT, pdiv, prem, pquo, pexquo, div, rem, quo, exquo, half_gcdex, gcdex, invert, subresultants, resultant, discriminant, terms_gcd, cofactors, gcd, gcd_list, lcm, lcm_list, trunc, monic, content, primitive, compose, decompose, sturm, gff_list, gff, sqf_norm, sqf_part, sqf_list, sqf, factor_list, factor, intervals, refine_root, count_roots, real_roots, nroots, ground_roots, nth_power_roots_poly,
cancel, reduced, groebner, GroebnerBasis, is_zero_dimensional, _tor
ational_factor_list, to_rational_coeffs) from sympy.polys.polyerrors import ( MultivariatePolynomialError, ExactQuotientFailed, PolificationFailed, ComputationFailed, UnificationFailed, RefinementFailed, GeneratorsNeeded, GeneratorsError, PolynomialError, CoercionFailed, DomainError, OptionError, FlagError) from sympy.polys.polyclasses import DMP from sympy.polys.fields import field from sympy.polys.domains import FF, ZZ, QQ, RR, EX from sympy.polys.domains.realfield import RealField from sympy.polys.orderings import lex, grlex, grevlex from sympy import ( S, Integer, Rational, Float, Mul, Symbol, sqrt, Piecewise, Derivative, exp, sin, tanh, expand, oo, I, pi, re, im, rootof, Eq, Tuple, Expr, diff) from sympy.core.basic import _aresame from sympy.core.compatibility import iterable from sympy.core.mul import _keep_coeff from sympy.utilities.pytest import raises, XFAIL from sympy.simplify import simplify from sympy.abc import a, b, c, d, p, q, t, w, x, y, z from sympy import MatrixSymbol def _epsilon_eq(a, b): for x, y in zip(a, b): if abs(x - y) > 1e-10: return False return True def _strict_eq(a, b): if type(a) == type(b): if iterable(a): if len(a) == len(b): return all(_strict_eq(c, d) for c, d in zip(a, b)) else: return False else: return isinstance(a, Poly) and a.eq(b, strict=True) else: return False def test_Poly_from_dict(): K = FF(3) assert Poly.from_dict( {0: 1, 1: 2}, gens=x, domain=K).rep == DMP([K(2), K(1)], K) assert Poly.from_dict( {0: 1, 1: 5}, gens=x, domain=K).rep == DMP([K(2), K(1)], K) assert Poly.from_dict( {(0,): 1, (1,): 2}, gens=x, domain=K).rep == DMP([K(2), K(1)], K) assert Poly.from_dict( {(0,): 1, (1,): 5}, gens=x, domain=K).rep == DMP([K(2), K(1)], K) assert Poly.from_dict({(0, 0): 1, (1, 1): 2}, gens=( x, y), domain=K).rep == DMP([[K(2), K(0)], [K(1)]], K) assert Poly.from_dict({0: 1, 1: 2}, gens=x).rep == DMP([ZZ(2), ZZ(1)], ZZ) assert Poly.from_dict( {0: 1, 1: 2}, gens=x, field=True).rep == DMP([QQ(2), QQ(1)], QQ) assert Poly.from_dict( {0: 1, 1: 2}, gens=x, domain=ZZ).rep == DMP([ZZ(2), ZZ(1)], ZZ) assert Poly.from_dict( {0: 1, 1: 2}, gens=x, domain=QQ).rep == DMP([QQ(2), QQ(1)], QQ) assert Poly.from_dict( {(0,): 1, (1,): 2}, gens=x).rep == DMP([ZZ(2), ZZ(1)], ZZ) assert Poly.from_dict( {(0,): 1, (1,): 2}, gens=x, field=True).rep == DMP([QQ(2), QQ(1)], QQ) assert Poly.from_dict( {(0,): 1, (1,): 2}, gens=x, domain=ZZ).rep == DMP([ZZ(2), ZZ(1)], ZZ) assert Poly.from_dict( {(0,): 1, (1,): 2}, gens=x, domain=QQ).rep == DMP([QQ(2), QQ(1)], QQ) assert Poly.from_dict({(1,): sin(y)}, gens=x, composite=False) == \ Poly(sin(y)*x, x, domain='EX') assert Poly.from_dict({(1,): y}, gens=x, composite=False) == \ Poly(y*x, x, domain='EX') assert Poly.from_dict({(1, 1): 1}, gens=(x, y), composite=False) == \ Poly(x*y, x, y, domain='ZZ') assert Poly.from_dict({(1, 0): y}, gens=(x, z), composite=False) == \ Poly(y*x, x, z, domain='EX') def test_Poly_from_list(): K = FF(3) assert Poly.from_list([2, 1], gens=x, domain=K).rep == DMP([K(2), K(1)], K) assert Poly.from_list([5, 1], gens=x, domain=K).rep == DMP([K(2), K(1)], K) assert Poly.from_list([2, 1], gens=x).rep == DMP([ZZ(2), ZZ(1)], ZZ) assert Poly.from_list([2, 1], gens=x, field=True).rep == DMP([QQ(2), QQ(1)], QQ) assert Poly.from_list([2, 1], gens=x, domain=ZZ).rep == DMP([ZZ(2), ZZ(1)], ZZ) assert Poly.from_list([2, 1], gens=x, domain=QQ).rep == DMP([QQ(2), QQ(1)], QQ) assert Poly.from_list([0, 1.0], gens=x).rep == DMP([RR(1.0)], RR) assert Poly.from_list([1.0, 0], gens=x).rep == DMP([RR(1.0), RR(0.0)], RR) raises(MultivariatePolynomialError, lambda: Poly.from_list([[]], gens=(x, y))) def test_Poly_from_poly(): f = Poly(x + 7, x, domain=ZZ) g = Poly(x + 2, x, modulus=3) h = Poly(x + y, x, y, domain=ZZ) K = FF(3) assert Poly.from_poly(f) == f assert Poly.from_poly(f, domain=K).rep == DMP([K(1), K(1)], K) assert Poly.from_poly(f, domain=ZZ).rep == DMP([1, 7], ZZ) assert Poly.from_poly(f, domain=QQ).rep == DMP([1, 7], QQ) assert Poly.from_poly(f, gens=x) == f assert Poly.from_poly(f, gens=x, domain=K).rep == DMP([K(1), K(1)], K) assert Poly.from_poly(f, gens=x, domain=ZZ).rep == DMP([1, 7], ZZ) assert Poly.from_poly(f, gens=x, domain=QQ).rep == DMP([1, 7], QQ) assert Poly.from_poly(f, gens=y) == Poly(x + 7, y, domain='ZZ[x]') raises(CoercionFailed, lambda: Poly.from_poly(f, gens=y, domain=K)) raises(CoercionFailed, lambda: Poly.from_poly(f, gens=y, domain=ZZ)) raises(CoercionFailed, lambda: Poly.from_poly(f, gens=y, domain=QQ)) assert Poly.from_poly(f, gens=(x, y)) == Poly(x + 7, x, y, domain='ZZ') assert Poly.from_poly( f, gens=(x, y), domain=ZZ) == Poly(x + 7, x, y, domain='ZZ') assert Poly.from_poly( f, gens=(x, y), domain=QQ) == Poly(x + 7, x, y, domain='QQ') assert Poly.from_poly( f, gens=(x, y), modulus=3) == Poly(x + 7, x, y, domain='FF(3)') K = FF(2) assert Poly.from_poly(g) == g assert Poly.from_poly(g, domain=ZZ).rep == DMP([1, -1], ZZ) raises(CoercionFailed, lambda: Poly.from_poly(g, domain=QQ)) assert Poly.from_poly(g, domain=K).rep == DMP([K(1), K(0)], K) assert Poly.from_poly(g, gens=x) == g assert Poly.from_poly(g, gens=x, domain=ZZ).rep == DMP([1, -1], ZZ) raises(CoercionFailed, lambda: Poly.from_poly(g, gens=x, domain=QQ)) assert Poly.from_poly(g, gens=x, domain=K).rep == DMP([K(1), K(0)], K) K = FF(3) assert Poly.from_poly(h) == h assert Poly.from_poly( h, domain=ZZ).rep == DMP([[ZZ(1)], [ZZ(1), ZZ(0)]], ZZ) assert Poly.from_poly( h, domain=QQ).rep == DMP([[QQ(1)], [QQ(1), QQ(0)]], QQ) assert Poly.from_poly(h, domain=K).rep == DMP([[K(1)], [K(1), K(0)]], K) assert Poly.from_poly(h, gens=x) == Poly(x + y, x, domain=ZZ[y]) raises(CoercionFailed, lambda: Poly.from_poly(h, gens=x, domain=ZZ)) assert Poly.from_poly( h, gens=x, domain=ZZ[y]) == Poly(x + y, x, domain=ZZ[y]) raises(CoercionFailed, lambda: Poly.from_poly(h, gens=x, domain=QQ)) assert Poly.from_poly( h, gens=x, domain=QQ[y]) == Poly(x + y, x, domain=QQ[y]) raises(CoercionFailed, lambda: Poly.from_poly(h, gens=x, modulus=3)) assert Poly.from_poly(h, gens=y) == Poly(x + y, y, domain=ZZ[x]) raises(CoercionFailed, lambda: Poly.from_poly(h, gens=y, domain=ZZ)) assert Poly.from_poly( h, gens=y, domain=ZZ[x]) == Poly(x + y, y, domain=ZZ[x]) raises(CoercionFailed, lambda: Poly.from_poly(h, gens=y, domain=QQ)) assert Poly.from_poly( h, gens=y, domain=QQ[x]) == Poly(x + y, y, domain=QQ[x]) raises(CoercionFailed, lambda: Poly.from_poly(h, gens=y, modulus=3)) assert Poly.from_poly(h, gens=(x, y)) == h assert Poly.from_poly( h, gens=(x, y), domain=ZZ).rep == DMP([[ZZ(1)], [ZZ(1), ZZ(0)]], ZZ) assert Poly.from_poly( h, gens=(x, y), domain=QQ).rep == DMP([[QQ(1)], [QQ(1), QQ(0)]], QQ) assert Poly.from_poly( h, gen
elkeschaper/hts
hts/plate/plate.py
Python
gpl-2.0
35,353
0.006082
# (C) 2015, 2016 Elke Schaper """ :synopsis: The Plate Class. .. moduleauthor:: Elke Schaper <elke.schaper@isb-sib.ch> """ import itertools import logging import pickle import random import re import string import GPy import numpy as np im
port pylab import scipy.stats import hts.data_tasks.gaussian_processes from hts.data_tasks import prediction from hts.plate_data import plate_data, data_issue, meta_data, plate_layout, readout KNOWN_DATA_TYPES = ["plate_layout", "readout", "data_issue", "config_data"] LETTERS = list(string.ascii_uppercase) + ["".join(i) for i in
itertools.product(string.ascii_uppercase, string.ascii_uppercase)] MAX_WIDTH = 48 MAX_HEIGHT = 32 TRANSLATE_HUMANREADABLE_COORDINATE = {(LETTERS[cc[0]], str(cc[1] + 1)): cc for cc in itertools.product(range(MAX_HEIGHT), range(MAX_WIDTH))} TRANSLATE_COORDINATE_HUMANREADABLE = {cc: (LETTERS[cc[0]], str(cc[0] + 1), str(cc[1] + 1)) for cc in itertools.product(range(MAX_HEIGHT), range(MAX_WIDTH))} LOG = logging.getLogger(__name__) ## TODO: Instead of creating a matrix for both coordinates, simply create a list each to safe memory. def translate_coordinate_humanreadable(coordinate, pattern=None): coordinate_human = TRANSLATE_COORDINATE_HUMANREADABLE[coordinate] if pattern: return pattern.format(coordinate_human[0], int(coordinate_human[2])) else: return coordinate_human def translate_humanreadable_coordinate(humanreadable): pattern = re.compile('([a-zA-Z]+)0*(\d+)') match = re.match(pattern, humanreadable) if not match: LOG.error("pattern: {} did not match {}".format(pattern, humanreadable)) humanreadable = (match.group(1), match.group(2)) return TRANSLATE_HUMANREADABLE_COORDINATE[humanreadable] class Plate: """ ``Plate`` describes all information connected to the readout_dict of a high throughput screen. This could be either several readouts of a plate, or the same plate across several plates. Attributes: name (str): Name of the plate width (int): Width of the plate height (int): Height of the plate KNOWN_DATA_TYPES[i] (subclass of plate_data.PlateData): The data associated to this Plate, e.g. a plate layout, or readouts. """ def __str__(self): """ Create string for Plate instance. """ if hasattr(self, "name"): name = self.name else: name = "<not named>" try: readout_dict = ("<Plate instance>\nname: {}\nread_outs: {}" "\nNumber of read_outs: {}\nwidth: {}\nheight: {}".format(name, str(self.readout.data.keys()), len(self.readout.data), self.width, self.height)) except: readout_dict = "<Plate instance>" LOG.warning("Could not create string of Plate instance.") return readout_dict def __init__(self, data, name, **kwargs): LOG.debug(data) self.name = name for data_type in KNOWN_DATA_TYPES: if data_type in data: if not isinstance(data[data_type], plate_data.PlateData): raise Exception( "type of {} data is {}, not plate_data.PlateData.".format(data_type, type(data[data_type]))) setattr(self, data_type, data[data_type]) else: setattr(self, data_type, None) if "height" in kwargs: self.height = kwargs.pop("height") if "width" in kwargs: self.width = kwargs.pop("width") # You are using this construct in many an __init__ . Consider turning into decorator. for key, value in kwargs.items(): if not hasattr(self, key): setattr(self, key, value) """ FORMERLY: # Make sure all readouts are equal in height and width. plate_heights = [i.height for i in self.readout.data.values()] plate_widths = [i.width for i in self.readout.data.values()] if len(set(plate_heights)) != 1 or len(set(plate_widths)) != 1: raise Exception("Plate widths and lengths in the parsed output " "files are not all equal: plate_heights: {}, plate_widths: {} " "".format(plate_heights, plate_widths)) """ def create(format, name=None, **kwargs): """ Create ``Plate`` instance. Create ``Plate`` instance. Args: path (str): Path to input file or directory format (str): Format of the input file, at current not specified """ if format == "config": data = {} if "meta_data" in kwargs: data["meta_data"] = meta_data.MetaData.create(**kwargs["meta_data"]) if "plate_layout" in kwargs: data["plate_layout"] = plate_layout.PlateLayout.create(**kwargs["plate_layout"]) if "data_issue" in kwargs: data["data_issue"] = data_issue.DataIssue.create(**kwargs["data_issue"]) if "readout" in kwargs: data["readout"] = readout.Readout.create(**kwargs["readout"]) height = len(next(iter(next(iter(data.values())).data.values()))) width = len(next(iter(next(iter(data.values())).data.values()))[0]) if not name: name = next(iter(data.values())).name return Plate(data=data, height=height, width=width, name=name) elif format == 'pickle': with open(kwargs["path"], 'rb') as fh: return pickle.load(fh) else: raise Exception("Format: {} is not implemented in " "Plate.create()".format(format)) def add_data(self, data_type, data, force=False, tag=None): """ Add `data` of `data_type` to `self.config_data` Add `data` of `data_type` to `self.config_data` """ if data_type == "meta_data" and not isinstance(data, meta_data.MetaData): raise Exception('data is not of type config_data.MetaData, but {}'.format(type(data))) elif data_type == "plate_layout" and not isinstance(data, plate_layout.PlateLayout): raise Exception('data is not of type plate_layout.PlateLayout, but {}'.format(type(data))) elif data_type == "data_issue" and not isinstance(data, data_issue.DataIssue): raise Exception('data is not of type data_issue.DataIssue, but {}'.format(type(data))) elif data_type == "readout" and not isinstance(data, readout.Readout): raise Exception('data is not of type readout.Readout, but {}'.format(type(data))) if force or not hasattr(self, data_type) or not isinstance(getattr(self, data_type), plate_data.PlateData): setattr(self, data_type, data) else: getattr(self, data_type).add_data(data=data, tag=tag) def write(self, format, path=None, return_string=None, *args): """ Serialize and write ``Plate`` instances. Serialize ``Plate`` instance using the stated ``format``. Args: format (str): The output format: Currently only "pickle". path (str): Path to output file .. todo:: Write checks for ``format`` and ``path``. """ if format == 'pickle': with open(path, 'wb') as fh: pickle.dump(self, fh) else: raise Exception('Format is unknown: {}'.format(format)) if path: with open(path, 'w') as fh: fh.write(output) if return_string: return output def filter(self, value_data_type, value_data_tag, value_type=None, condition_data_type=None, conditi
opendroid-Team/enigma2-4.1
lib/python/Tools/KeyBindings.py
Python
gpl-2.0
4,901
0.030402
keyBindings = { } from keyids import KEYIDS from Components.config import config from Components.RcModel import rc_model keyDescriptions = [{ KEYIDS["BTN_0"]: ("UP", "fp"), KEYIDS["BTN_1"]: ("DOWN", "fp"), KEYIDS["KEY_OK"]: ("OK", ""), KEYIDS["KEY_UP"]: ("UP",), KEYIDS["KEY_DOWN"]: ("DOWN",), KEYIDS["KEY_POWER"]: ("POWER",), KEYIDS["KEY_RED"]: ("RED",), KEYIDS["KEY_BLUE"]: ("BLUE",), KEYIDS["KEY_GREEN"]: ("GREEN",), KEYIDS["KEY_YELLOW"]: ("YELLOW",), KEYIDS["KEY_MENU"]: ("MENU",), KEYIDS["KEY_LEFT"]: ("LEFT",), KEYIDS["KEY_RIGHT"]: ("RIGHT",), KEYIDS["KEY_VIDEO"]: ("PVR",), KEYIDS["KEY_INFO"]: ("INFO",), KEYIDS["KEY_AUDIO"]: ("YELLOW",), KEYIDS["KEY_TV"]: ("TV",), KEYIDS["KEY_RADIO"]: ("RADIO",), KEYIDS["KEY_TEXT"]: ("TEXT",), KEYIDS["KEY_NEXT"]: ("ARROWRIGHT",), KEYIDS["KEY_PREVIOUS"]: ("ARROWLEFT",), KEYIDS["KEY_PREVIOUSSONG"]: ("REWIND",), KEYIDS["KEY_PLAYPAUSE"]: ("PLAYPAUSE",), KEYIDS["KEY_PLAY"]: ("PLAYPAUSE",), KEYIDS["KEY_NEXTSONG"]: ("FASTFORWARD",), KEYIDS["KEY_CHANNELUP"]: ("BOUQUET+",), KEYIDS["KEY_CHANNELDOWN"]: ("BOUQUET-",), KEYIDS["KEY_0"]: ("0",), KEYIDS["KEY_1"]: ("1",), KEYIDS["KEY_2"]: ("2",), KEYIDS["KEY_3"]: ("3",), KEYIDS["KEY_4"]: ("4",), KEYIDS["KEY_5"]: ("5",), KEYIDS["KEY_6"]: ("6",), KEYIDS["KEY_7"]: ("7",), KEYIDS["KEY_8"]: ("8",), KEYIDS["KEY_9"]: ("9",), KEYIDS["KEY_EXIT"]: ("EXIT",), KEYIDS["KEY_STOP"]: ("STOP",), KEYIDS["KEY_RECORD"]: ("RECORD",) }, { KEYIDS["BTN_0"]: ("UP", "fp"), KEYIDS["BTN_1"]: ("DOWN", "fp"), KEYIDS["KEY_OK"]: ("OK", ""), KEYIDS["KEY_UP"]: ("UP",), KEYIDS["KEY_DOWN"]: ("DOWN",), KEYIDS["KEY_POWER"]: ("POWER",), KEYIDS["KEY_RED"]: ("RED",), KEYIDS["KEY_BLUE"]: ("BLUE",), KEYIDS["KEY_GREEN"]: ("GREEN",), KEYIDS["KEY_YELLOW"]: ("YELLOW",), KEYIDS["KEY_MENU"]: ("MENU",), KEYIDS["KEY_LEFT"]: ("LEFT",), KEYIDS["KEY_RIGHT"]: ("RIGHT",), KEYIDS["KEY_VIDEO"]: ("VIDEO",), KEYIDS["KEY_INFO"]: ("INFO",), KEYIDS["KEY_AUDIO"]: ("AUDIO",), KEYIDS["KEY_TV"]: ("TV",), KEYIDS["KEY_RADIO"]: ("RADIO",), KEYIDS["KEY_TEXT"]: ("TEXT",), KEYIDS["KEY_NEXT"]: ("ARROWRIGHT",), KEYIDS["KEY_PREVIOUS"]: ("ARROWLEFT",), KEYIDS["KEY_PREVIOUSSONG"]: ("RED", "SHIFT"), KEYIDS["KEY_PLAYPAUSE"]: ("YELLOW", "SHIFT"), KEYIDS["KEY_PLAY"]: ("GREEN", "SHIFT"), KEYIDS["KEY_NEXTSONG"]: ("BLUE", "SHIFT"), KEYIDS["KEY_CHANNELUP"]: ("BOUQUET+",), KEYIDS["KEY_CHANNELDOWN"]: ("BOUQUET-",), KEYIDS["KEY_0"]: ("0",), KEYIDS["KEY_1"]: ("1",), KEYIDS["KEY_2"]: ("2",), KEYIDS["KEY_3"]: ("3",), KEYIDS["KEY_4"]: ("4",), KEYIDS["KEY_5"]: ("5",), KEYIDS["KEY_6"]: ("6",), KEYIDS["KEY_7"]: ("7",), KEYIDS["KEY_8"]: ("8",), KEYIDS["KEY_9"]: ("9",), KEYIDS["KEY_EXIT"]: ("EXIT",), KEYIDS["KEY_STOP"]: ("TV", "SHIFT"), KEYIDS["KEY_RECORD"]: ("RADIO", "SHIFT") }, { KEYIDS["BTN_0"]: ("UP", "fp"), KEYIDS["BTN_1"]: ("DOWN", "fp"), KEYIDS["KEY_OK"]: ("OK", ""), KEYIDS["KEY_UP"]: ("UP",), KEYIDS["KEY_DOWN"]: ("DOWN",), KEYIDS["KEY_POWER"]: ("POWER",), KEYIDS["KEY_RED"]: ("RED",), KEYIDS["KEY_BLUE"]: ("BLUE",), KEYIDS["KEY_GREEN"]: ("GREEN",), KEYIDS["KEY_YELLOW"]: ("YELLOW",), KEYIDS["KEY_MENU"]: ("MENU",), KEYIDS["KEY_LEFT"]: ("LEFT",), KEYIDS["KEY_RIGHT"]: ("RIGHT",), KEYIDS["KEY_VIDEO"]: ("PVR",), KEYIDS["KEY_INFO"]: ("INFO",), KEYIDS["KEY_AUDIO"]: ("AUDIO",), KEYIDS["KEY_TV"]: ("TV",), KEYIDS["KEY_RADIO"]: ("RADIO",), KEYIDS["KEY_TEXT"]: ("TEXT",), KEYIDS["KEY_NEXT"]: ("ARROWRIGHT",), KEYIDS["KEY_PREVIOUS"]: ("ARROWLEFT",), KEYIDS["KEY_PREVIOUSSONG"]: ("REWIND",), KEYIDS["KEY_PLAYPAUSE"]: ("PAUSE",), KEYIDS["KEY_PLAY"]: ("PLAY",), KEYIDS["K
EY_NEXTSONG"]: ("FASTFORWARD",), KEYIDS["KEY_CHANNELUP"]: ("BOUQUET+",), KEYIDS["KEY_CHANNELDOWN"]: ("BOUQUET-",), KEYIDS["KEY_0"]: ("0",), KEYIDS["KEY_1"]: ("1",), KEYIDS["KEY_2"]: ("2",), KEYIDS["KEY_3"]: ("3",), KEYIDS["KEY_4"]: ("4",), KEYIDS["KEY_5"]: ("5",), KEYIDS["KEY_6"]: ("6",), KEYIDS["KEY_7"]: ("7",), KEYIDS["KEY_8"]: ("8",), KEYIDS["KEY_9"]: ("9",), KEYIDS["KEY_EXIT"]: ("EXIT",), KEYIDS["KEY_STOP"]: ("STOP",), KEYIDS["KEY_RECORD"
]: ("RECORD",) }, ] def addKeyBinding(domain, key, context, action, flags): keyBindings.setdefault((context, action), []).append((key, domain, flags)) # returns a list of (key, flags) for a specified action def queryKeyBinding(context, action): if (context, action) in keyBindings: return [(x[0], x[2]) for x in keyBindings[(context, action)]] else: return [ ] def getKeyDescription(key): if rc_model.rcIsDefault(): idx = config.misc.rcused.value else: idx = 2 if key in keyDescriptions[idx]: return keyDescriptions[idx].get(key, [ ]) def removeKeyBindings(domain): # remove all entries of domain 'domain' for x in keyBindings: keyBindings[x] = filter(lambda e: e[1] != domain, keyBindings[x])
ksmaheshkumar/gitfs
gitfs/views/read_only.py
Python
apache-2.0
1,744
0
# Copyright 2014 PressLabs SRL # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from errno import EROFS from fuse import FuseOSError, ENOTSUP from .view import View class ReadOnlyView(View): def getxattr(self, path, name, *args): raise F
useOSError(ENOTSUP) def open(self, path, flags): write_flags = (os.O_WRONLY | os.O_RDWR | os.O_APPEND | os.O_TRUNC | os.O_CREAT) if write_flags & flags: raise FuseOSError(EROFS) return 0 def create(self, path, fh): raise FuseOSError(EROFS
) def write(self, path, fh): raise FuseOSError(EROFS) def opendir(self, path): return 0 def releasedir(self, path, fi): return 0 def flush(self, path, fh): return 0 def release(self, path, fh): return 0 def access(self, path, amode): if amode & os.W_OK: raise FuseOSError(EROFS) return 0 def mkdir(self, path, mode): raise FuseOSError(EROFS) def utimens(self, path, times=None): raise FuseOSError(EROFS) def chown(self, path, uid, gid): raise FuseOSError(EROFS) def chmod(self, path, mode): raise FuseOSError(EROFS)
nayas360/pyterm
bin/set.py
Python
mit
1,471
0
# set command to set global variables from lib.utils import * def _help(): usage
= ''' Usage: set [options] (var) [value] [options]: -h Print this help. -del (var) Delete variable (var) if defined. where (var) is a valid global variable if [value] is not given, current value is returned ''' print(usage) def main(argv): if '-h' in argv: _help() return # The shell doesnt send the # command name in the arg list # so the next line is not needed # any
more # argv.pop(0) #remove arg # to show all vars if len(argv) < 1: for i in prop.vars(): print(i, ' = ', prop.get(i)) return if '-del' in argv: try: var = argv[1] # detect system vars if var == 'save_state' or var == 'c_char': err(4, add='Cant delete system variable "' + var + '"') return prop.delete(var) return except IndexError: err(4, add='variable name was missing') return var = argv[0] if len(argv) < 2: val = prop.get(var) if val == NULL: err(4, var) return print(val) return # remove name of var argv.pop(0) # make the rest the val val = make_s(argv) try: prop.set(var, val) except ValueError: err(4, add="can't create this variable")