content
stringlengths 5
1.05M
|
|---|
try:
from graphql_example.model import Author, Book
except ModuleNotFoundError:
from model import Author, Book
from mimesis import Generic
# fake data generator
generate = Generic('en')
def author_factory(**replace):
kwargs = dict(
first_name=generate.personal.name(),
last_name=generate.personal.last_name(),
age=generate.personal.age(),
books=None
)
kwargs.update(replace)
return Author(**kwargs)
def book_factory(**replace):
kwargs = dict(
title=generate.text.title(),
author=author_factory(),
published=generate.datetime.date()
)
kwargs.update(replace)
return Book(**kwargs)
|
# Copyright (c) 2020 - 2021 Persanix LLC. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Generic, TypeVar, Optional
from pydantic.generics import GenericModel
from endrpi.model.message import MessageData
# Pydantic generic
T = TypeVar('T')
class ActionResult(GenericModel, Generic[T]):
"""
Interface used to represent the result of performing a generic action (i.e. running a command).
Example of a successful generic temperature action result:
ActionResult[str](success=True, data='System temperature: 50F', error=None)
Example of a failed generic temperature action result:
ActionResult[str](success=False, data=None, error='Requires elevated privilege.')
"""
success: bool
data: Optional[T]
error: Optional[MessageData]
def success_action_result(data: any = None) -> ActionResult:
"""Returns an :class:`ActionResult` preconfigured for successful actions"""
return ActionResult(success=True, data=data, error=None)
def error_action_result(message: str) -> ActionResult:
"""Returns an :class:`ActionResult` preconfigured for failed actions"""
message_data = MessageData(message=message)
return ActionResult(success=False, data=None, error=message_data)
|
from fparser.Fortran2003 import *
from fparser.api import get_reader
from nose.tools import assert_equal
def assertRaises(exc, cls, s):
try:
cls(s)
raise AssertionError('Expected %s but got nothing' % exc)
except exc:
pass
###############################################################################
############################### SECTION 2 ####################################
###############################################################################
def test_Program(): # R201
cls = Program
reader = get_reader('''\
subroutine foo
end subroutine foo
subroutine bar
end
''')
a = cls(reader)
assert isinstance(a, cls),`a`
assert_equal(str(a), 'SUBROUTINE foo\nEND SUBROUTINE foo\nSUBROUTINE bar\nEND SUBROUTINE bar')
reader = get_reader('''\
subroutine foo (*)
end subroutine foo
''')
a = cls(reader)
assert isinstance(a, cls),`a`
assert_equal(str(a), 'SUBROUTINE foo(*)\nEND SUBROUTINE foo')
def test_Specification_Part(): # R204
reader = get_reader('''\
integer a''')
cls = Specification_Part
a = cls(reader)
assert isinstance(a, cls),`a`
assert_equal(str(a),'INTEGER :: a')
assert_equal(repr(a), "Specification_Part(Type_Declaration_Stmt(Intrinsic_Type_Spec('INTEGER', None), None, Entity_Decl(Name('a'), None, None, None)))")
a = cls(get_reader('''
type a
end type a
type b
end type b
'''))
assert isinstance(a, cls),`a`
assert_equal(str(a),'TYPE :: a\nEND TYPE a\nTYPE :: b\nEND TYPE b')
###############################################################################
############################### SECTION 3 ####################################
###############################################################################
def test_Name(): # R304
a = Name('a')
assert isinstance(a,Name),`a`
a = Name('a2')
assert isinstance(a,Name),`a`
a = Designator('a')
assert isinstance(a,Name),`a`
a = Constant('a')
assert isinstance(a,Name),`a`
a = Expr('a')
assert isinstance(a,Name),`a`
def test_Literal_Constant(): # R305
cls = Constant
a = cls('.false.')
assert isinstance(a, Logical_Literal_Constant), `a`
assert str(a)=='.FALSE.'
def test_Literal_Constant(): # R306
cls = Literal_Constant
a = cls('.false.')
assert isinstance(a, Logical_Literal_Constant), `a`
assert str(a)=='.FALSE.'
###############################################################################
############################### SECTION 4 ####################################
###############################################################################
def test_Type_Param_Value(): # 402
cls = Type_Param_Value
a = cls('*')
assert isinstance(a,cls),`a`
assert_equal(str(a),'*')
assert_equal(repr(a),"Type_Param_Value('*')")
a = cls(':')
assert isinstance(a,cls),`a`
assert_equal(str(a),':')
a = cls('1+2')
assert isinstance(a,Level_2_Expr),`a`
assert_equal(str(a),'1 + 2')
def test_Intrinsic_Type_Spec(): # R403
cls = Intrinsic_Type_Spec
a = cls('INTEGER')
assert isinstance(a,cls),`a`
assert_equal(str(a),'INTEGER')
assert_equal(repr(a), "Intrinsic_Type_Spec('INTEGER', None)")
a = cls('Integer*2')
assert isinstance(a,cls),`a`
assert_equal(str(a),'INTEGER*2')
a = cls('real*2')
assert isinstance(a,cls),`a`
assert_equal(str(a),'REAL*2')
a = cls('logical*2')
assert isinstance(a,cls),`a`
assert_equal(str(a),'LOGICAL*2')
a = cls('complex*2')
assert isinstance(a,cls),`a`
assert_equal(str(a),'COMPLEX*2')
a = cls('character*2')
assert isinstance(a,cls),`a`
assert_equal(str(a),'CHARACTER*2')
a = cls('double complex')
assert isinstance(a,cls),`a`
assert_equal(str(a),'DOUBLE COMPLEX')
a = cls('double precision')
assert isinstance(a,cls),`a`
assert_equal(str(a),'DOUBLE PRECISION')
def test_Kind_Selector(): # R404
cls = Kind_Selector
a = cls('(1)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(KIND = 1)')
assert_equal(repr(a),"Kind_Selector('(', Int_Literal_Constant('1', None), ')')")
a = cls('(kind=1+2)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(KIND = 1 + 2)')
a = cls('* 1')
assert isinstance(a,cls),`a`
assert_equal(str(a),'*1')
def test_Signed_Int_Literal_Constant(): # R405
cls = Signed_Int_Literal_Constant
a = cls('1')
assert isinstance(a,cls),`a`
assert_equal(str(a),'1')
assert_equal(repr(a),"%s('1', None)" % (cls.__name__))
a = cls('+ 21_2')
assert isinstance(a,cls),`a`
assert_equal(str(a),'+21_2')
assert_equal(repr(a),"%s('+21', '2')" % (cls.__name__))
a = cls('-21_SHORT')
assert isinstance(a,cls),`a`
assert_equal(str(a),'-21_SHORT')
a = cls('21_short')
assert isinstance(a,cls),`a`
assert_equal(str(a),'21_short')
a = cls('+1976354279568241_8')
assert isinstance(a,cls),`a`
assert_equal(str(a),'+1976354279568241_8')
def test_Int_Literal_Constant(): # R406
cls = Int_Literal_Constant
a = cls('1')
assert isinstance(a,cls),`a`
assert_equal(str(a),'1')
assert_equal(repr(a),"%s('1', None)" % (cls.__name__))
a = cls('21_2')
assert isinstance(a,cls),`a`
assert_equal(str(a),'21_2')
assert_equal(repr(a),"%s('21', '2')" % (cls.__name__))
a = cls('21_SHORT')
assert isinstance(a,cls),`a`
assert_equal(str(a),'21_SHORT')
a = cls('21_short')
assert isinstance(a,cls),`a`
assert_equal(str(a),'21_short')
a = cls('1976354279568241_8')
assert isinstance(a,cls),`a`
assert_equal(str(a),'1976354279568241_8')
def test_Binary_Constant(): # R412
cls = Boz_Literal_Constant
bcls = Binary_Constant
a = cls('B"01"')
assert isinstance(a,bcls),`a`
assert_equal(str(a),'B"01"')
assert_equal(repr(a),"%s('B\"01\"')" % (bcls.__name__))
def test_Octal_Constant(): # R413
cls = Boz_Literal_Constant
ocls = Octal_Constant
a = cls('O"017"')
assert isinstance(a,ocls),`a`
assert_equal(str(a),'O"017"')
assert_equal(repr(a),"%s('O\"017\"')" % (ocls.__name__))
def test_Hex_Constant(): # R414
cls = Boz_Literal_Constant
zcls = Hex_Constant
a = cls('Z"01A"')
assert isinstance(a,zcls),`a`
assert_equal(str(a),'Z"01A"')
assert_equal(repr(a),"%s('Z\"01A\"')" % (zcls.__name__))
def test_Signed_Real_Literal_Constant(): # R416
cls = Signed_Real_Literal_Constant
a = cls('12.78')
assert isinstance(a,cls),`a`
assert_equal(str(a),'12.78')
assert_equal(repr(a),"%s('12.78', None)" % (cls.__name__))
a = cls('+12.78_8')
assert isinstance(a,cls),`a`
assert_equal(str(a),'+12.78_8')
assert_equal(repr(a),"%s('+12.78', '8')" % (cls.__name__))
a = cls('- 12.')
assert isinstance(a,cls),`a`
assert_equal(str(a),'-12.')
a = cls('1.6E3')
assert isinstance(a,cls),`a`
assert_equal(str(a),'1.6E3')
a = cls('+1.6E3_8')
assert isinstance(a,cls),`a`
assert_equal(str(a),'+1.6E3_8')
a = cls('1.6D3')
assert isinstance(a,cls),`a`
assert_equal(str(a),'1.6D3')
a = cls('-1.6E-3')
assert isinstance(a,cls),`a`
assert_equal(str(a),'-1.6E-3')
a = cls('1.6E+3')
assert isinstance(a,cls),`a`
assert_equal(str(a),'1.6E+3')
a = cls('3E4')
assert isinstance(a,cls),`a`
assert_equal(str(a),'3E4')
a = cls('.123')
assert isinstance(a,cls),`a`
assert_equal(str(a),'.123')
a = cls('+1.6E-3')
assert isinstance(a,cls),`a`
assert_equal(str(a),'+1.6E-3')
a = cls('10.9E7_QUAD')
assert isinstance(a,cls),`a`
assert_equal(str(a),'10.9E7_QUAD')
a = cls('-10.9e-17_quad')
assert isinstance(a,cls),`a`
assert_equal(str(a),'-10.9E-17_quad')
def test_Real_Literal_Constant(): # R417
cls = Real_Literal_Constant
a = cls('12.78')
assert isinstance(a,cls),`a`
assert_equal(str(a),'12.78')
assert_equal(repr(a),"%s('12.78', None)" % (cls.__name__))
a = cls('12.78_8')
assert isinstance(a,cls),`a`
assert_equal(str(a),'12.78_8')
assert_equal(repr(a),"%s('12.78', '8')" % (cls.__name__))
a = cls('12.')
assert isinstance(a,cls),`a`
assert_equal(str(a),'12.')
a = cls('1.6E3')
assert isinstance(a,cls),`a`
assert_equal(str(a),'1.6E3')
a = cls('1.6E3_8')
assert isinstance(a,cls),`a`
assert_equal(str(a),'1.6E3_8')
a = cls('1.6D3')
assert isinstance(a,cls),`a`
assert_equal(str(a),'1.6D3')
a = cls('1.6E-3')
assert isinstance(a,cls),`a`
assert_equal(str(a),'1.6E-3')
a = cls('1.6E+3')
assert isinstance(a,cls),`a`
assert_equal(str(a),'1.6E+3')
a = cls('3E4')
assert isinstance(a,cls),`a`
assert_equal(str(a),'3E4')
a = cls('.123')
assert isinstance(a,cls),`a`
assert_equal(str(a),'.123')
a = cls('1.6E-3')
assert isinstance(a,cls),`a`
assert_equal(str(a),'1.6E-3')
a = cls('10.9E7_QUAD')
assert isinstance(a,cls),`a`
assert_equal(str(a),'10.9E7_QUAD')
a = cls('10.9e-17_quad')
assert isinstance(a,cls),`a`
assert_equal(str(a),'10.9E-17_quad')
a = cls('0.0D+0')
assert isinstance(a,cls),`a`
assert_equal(str(a),'0.0D+0')
def test_Char_Selector(): # R424
cls = Char_Selector
a = cls('(len=2, kind=8)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(LEN = 2, KIND = 8)')
assert_equal(repr(a),"Char_Selector(Int_Literal_Constant('2', None), Int_Literal_Constant('8', None))")
a = cls('(2, kind=8)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(LEN = 2, KIND = 8)')
a = cls('(2, 8)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(LEN = 2, KIND = 8)')
a = cls('(kind=8)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(KIND = 8)')
a = cls('(kind=8,len=2)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(LEN = 2, KIND = 8)')
def test_Complex_Literal_Constant(): # R421
cls = Complex_Literal_Constant
a = cls('(1.0, -1.0)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(1.0, -1.0)')
assert_equal(repr(a),"Complex_Literal_Constant(Signed_Real_Literal_Constant('1.0', None), Signed_Real_Literal_Constant('-1.0', None))")
a = cls('(3,3.1E6)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(3, 3.1E6)')
a = cls('(4.0_4, 3.6E7_8)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(4.0_4, 3.6E7_8)')
a = cls('( 0., PI)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(0., PI)')
def test_Type_Name(): # C424
cls = Type_Name
a = cls('a')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a')
assert_equal(repr(a),"Type_Name('a')")
assertRaises(NoMatchError,cls,'integer')
assertRaises(NoMatchError,cls,'doubleprecision')
def test_Length_Selector(): # R425
cls = Length_Selector
a = cls('( len = *)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(LEN = *)')
assert_equal(repr(a),"Length_Selector('(', Type_Param_Value('*'), ')')")
a = cls('*2,')
assert isinstance(a,cls),`a`
assert_equal(str(a),'*2')
def test_Char_Length(): # R426
cls = Char_Length
a = cls('(1)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(1)')
assert_equal(repr(a),"Char_Length('(', Int_Literal_Constant('1', None), ')')")
a = cls('1')
assert isinstance(a,Int_Literal_Constant),`a`
assert_equal(str(a),'1')
a = cls('(*)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(*)')
a = cls('(:)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(:)')
def test_Char_Literal_Constant(): # R427
cls = Char_Literal_Constant
a = cls('NIH_"DO"')
assert isinstance(a,cls),`a`
assert_equal(str(a),'NIH_"DO"')
assert_equal(repr(a),'Char_Literal_Constant(\'"DO"\', \'NIH\')')
a = cls("'DO'")
assert isinstance(a,cls),`a`
assert_equal(str(a),"'DO'")
assert_equal(repr(a),'Char_Literal_Constant("\'DO\'", None)')
a = cls("'DON''T'")
assert isinstance(a,cls),`a`
assert_equal(str(a),"'DON''T'")
a = cls('"DON\'T"')
assert isinstance(a,cls),`a`
assert_equal(str(a),'"DON\'T"')
a = cls('""')
assert isinstance(a,cls),`a`
assert_equal(str(a),'""')
a = cls("''")
assert isinstance(a,cls),`a`
assert_equal(str(a),"''")
a = cls('"hey ha(ada)\t"')
assert isinstance(a,cls),`a`
assert_equal(str(a),'"hey ha(ada)\t"')
def test_Logical_Literal_Constant(): # R428
cls = Logical_Literal_Constant
a = cls('.TRUE.')
assert isinstance(a,cls),`a`
assert_equal(str(a),'.TRUE.')
assert_equal(repr(a),"%s('.TRUE.', None)" % (cls.__name__))
a = cls('.True.')
assert isinstance(a,cls),`a`
assert_equal(str(a),'.TRUE.')
a = cls('.FALSE.')
assert isinstance(a,cls),`a`
assert_equal(str(a),'.FALSE.')
a = cls('.false.')
assert isinstance(a,cls),`a`
assert_equal(str(a),'.FALSE.')
a = cls('.TRUE._HA')
assert isinstance(a,cls),`a`
assert_equal(str(a),'.TRUE._HA')
def test_Derived_Type_Stmt(): # R430
cls = Derived_Type_Stmt
a = cls('type a')
assert isinstance(a, cls),`a`
assert_equal(str(a),'TYPE :: a')
assert_equal(repr(a),"Derived_Type_Stmt(None, Type_Name('a'), None)")
a = cls('type ::a(b,c)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'TYPE :: a(b, c)')
a = cls('type, private, abstract::a(b,c)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'TYPE, PRIVATE, ABSTRACT :: a(b, c)')
def test_Type_Name(): # C423
cls = Type_Name
a = cls('a')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a')
assert_equal(repr(a),"Type_Name('a')")
def test_Type_Attr_Spec(): # R431
cls = Type_Attr_Spec
a = cls('abstract')
assert isinstance(a, cls),`a`
assert_equal(str(a),'ABSTRACT')
assert_equal(repr(a),"Type_Attr_Spec('ABSTRACT', None)")
a = cls('bind (c )')
assert isinstance(a, cls),`a`
assert_equal(str(a),'BIND(C)')
a = cls('extends(a)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'EXTENDS(a)')
a = cls('private')
assert isinstance(a, Access_Spec),`a`
assert_equal(str(a),'PRIVATE')
def test_End_Type_Stmt(): # R433
cls = End_Type_Stmt
a = cls('end type')
assert isinstance(a, cls),`a`
assert_equal(str(a),'END TYPE')
assert_equal(repr(a),"End_Type_Stmt('TYPE', None)")
a = cls('end type a')
assert isinstance(a, cls),`a`
assert_equal(str(a),'END TYPE a')
def test_Sequence_Stmt(): # R434
cls = Sequence_Stmt
a = cls('sequence')
assert isinstance(a, cls),`a`
assert_equal(str(a),'SEQUENCE')
assert_equal(repr(a),"Sequence_Stmt('SEQUENCE')")
def test_Type_Param_Def_Stmt(): # R435
cls = Type_Param_Def_Stmt
a = cls('integer ,kind :: a')
assert isinstance(a, cls),`a`
assert_equal(str(a),'INTEGER, KIND :: a')
assert_equal(repr(a),"Type_Param_Def_Stmt(None, Type_Param_Attr_Spec('KIND'), Name('a'))")
a = cls('integer*2 ,len :: a=3, b=2+c')
assert isinstance(a, cls),`a`
assert_equal(str(a),'INTEGER*2, LEN :: a = 3, b = 2 + c')
def test_Type_Param_Decl(): # R436
cls = Type_Param_Decl
a = cls('a=2')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a = 2')
assert_equal(repr(a),"Type_Param_Decl(Name('a'), '=', Int_Literal_Constant('2', None))")
a = cls('a')
assert isinstance(a, Name),`a`
assert_equal(str(a),'a')
def test_Type_Param_Attr_Spec(): # R437
cls = Type_Param_Attr_Spec
a = cls('kind')
assert isinstance(a, cls),`a`
assert_equal(str(a),'KIND')
assert_equal(repr(a),"Type_Param_Attr_Spec('KIND')")
a = cls('len')
assert isinstance(a, cls),`a`
assert_equal(str(a),'LEN')
def test_Component_Attr_Spec(): # R441
cls = Component_Attr_Spec
a = cls('pointer')
assert isinstance(a, cls),`a`
assert_equal(str(a),'POINTER')
assert_equal(repr(a),"Component_Attr_Spec('POINTER')")
a = cls('allocatable')
assert isinstance(a, cls),`a`
assert_equal(str(a),'ALLOCATABLE')
a = cls('dimension(a)')
assert isinstance(a, Dimension_Component_Attr_Spec),`a`
assert_equal(str(a),'DIMENSION(a)')
a = cls('private')
assert isinstance(a, Access_Spec),`a`
assert_equal(str(a),'PRIVATE')
def test_Component_Decl(): # R442
cls = Component_Decl
a = cls('a(1)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a(1)')
assert_equal(repr(a),"Component_Decl(Name('a'), Explicit_Shape_Spec(None, Int_Literal_Constant('1', None)), None, None)")
a = cls('a(1)*(3)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a(1)*(3)')
a = cls('a(1)*(3) = 2')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a(1)*(3) = 2')
a = cls('a(1) => NULL')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a(1) => NULL')
def test_Proc_Component_Def_Stmt(): # R445
cls = Proc_Component_Def_Stmt
a = cls('procedure(), pointer :: a')
assert isinstance(a, cls),`a`
assert_equal(str(a),'PROCEDURE(), POINTER :: a')
a = cls('procedure(real*8), pointer, pass(n) :: a, b')
assert isinstance(a, cls),`a`
assert_equal(str(a),'PROCEDURE(REAL*8), POINTER, PASS(n) :: a, b')
def test_Type_Bound_Procedure_Part(): # R448
cls = Type_Bound_Procedure_Part
a = cls(get_reader('''
contains
procedure, pass :: length => point_length
'''))
assert isinstance(a, cls),`a`
assert_equal(str(a),'CONTAINS\nPROCEDURE, PASS :: length => point_length')
def test_Proc_Binding_Stmt(): # R450
cls = Proc_Binding_Stmt
a = cls('procedure, pass :: length => point_length')
assert isinstance(a, Specific_Binding),`a`
assert_equal(str(a),'PROCEDURE, PASS :: length => point_length')
def test_Specific_Binding(): # R451
cls = Specific_Binding
a = cls('procedure, pass :: length => point_length')
assert isinstance(a, cls),`a`
assert_equal(str(a),'PROCEDURE, PASS :: length => point_length')
def test_Generic_Binding(): # R452
cls = Generic_Binding
a = cls('generic :: a => b')
assert isinstance(a, cls),`a`
assert_equal(str(a),'GENERIC :: a => b')
a = cls('generic, private :: read(formatted) => b,c')
assert isinstance(a, cls),`a`
assert_equal(str(a),'GENERIC, PRIVATE :: READ(FORMATTED) => b, c')
def test_Final_Binding(): # R454
cls = Final_Binding
a = cls('final a, b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'FINAL :: a, b')
assert_equal(repr(a),"Final_Binding('FINAL', Final_Subroutine_Name_List(',', (Name('a'), Name('b'))))")
a = cls('final::a')
assert isinstance(a,cls),`a`
assert_equal(str(a),'FINAL :: a')
def test_Derived_Type_Spec(): # R455
cls = Derived_Type_Spec
a = cls('a(b)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a(b)')
assert_equal(repr(a),"Derived_Type_Spec(Type_Name('a'), Name('b'))")
a = cls('a(b,c,g=1)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a(b, c, g = 1)')
a = cls('a')
assert isinstance(a,Name),`a`
assert_equal(str(a),'a')
a = cls('a()')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a()')
def test_Type_Param_Spec(): # R456
cls = Type_Param_Spec
a = cls('a=1')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a = 1')
assert_equal(repr(a),"Type_Param_Spec(Name('a'), Int_Literal_Constant('1', None))")
a = cls('k=a')
assert isinstance(a,cls),`a`
assert_equal(str(a),'k = a')
a = cls('k=:')
assert isinstance(a,cls),`a`
assert_equal(str(a),'k = :')
def test_Type_Param_Spec_List(): # R456-list
cls = Type_Param_Spec_List
a = cls('a,b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a, b')
assert_equal(repr(a),"Type_Param_Spec_List(',', (Name('a'), Name('b')))")
a = cls('a')
assert isinstance(a,Name),`a`
a = cls('k=a,c,g=1')
assert isinstance(a,cls),`a`
assert_equal(str(a),'k = a, c, g = 1')
def test_Structure_Constructor_2(): # R457.b
cls = Structure_Constructor_2
a = cls('k=a')
assert isinstance(a,cls),`a`
assert_equal(str(a),'k = a')
assert_equal(repr(a),"Structure_Constructor_2(Name('k'), Name('a'))")
a = cls('a')
assert isinstance(a,Name),`a`
assert_equal(str(a),'a')
def test_Structure_Constructor(): # R457
cls = Structure_Constructor
a = cls('t()')
assert isinstance(a,cls),`a`
assert_equal(str(a),'t()')
assert_equal(repr(a),"Structure_Constructor(Type_Name('t'), None)")
a = cls('t(s=1, a)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'t(s = 1, a)')
a = cls('a=k')
assert isinstance(a,Structure_Constructor_2),`a`
assert_equal(str(a),'a = k')
assert_equal(repr(a),"Structure_Constructor_2(Name('a'), Name('k'))")
a = cls('a')
assert isinstance(a,Name),`a`
assert_equal(str(a),'a')
def test_Component_Spec(): # R458
cls = Component_Spec
a = cls('k=a')
assert isinstance(a,cls),`a`
assert_equal(str(a),'k = a')
assert_equal(repr(a),"Component_Spec(Name('k'), Name('a'))")
a = cls('a')
assert isinstance(a,Name),`a`
assert_equal(str(a),'a')
a = cls('a % b')
assert isinstance(a, Proc_Component_Ref),`a`
assert_equal(str(a),'a % b')
a = cls('s =a % b')
assert isinstance(a, Component_Spec),`a`
assert_equal(str(a),'s = a % b')
def test_Component_Spec_List(): # R458-list
cls = Component_Spec_List
a = cls('k=a, b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'k = a, b')
assert_equal(repr(a),"Component_Spec_List(',', (Component_Spec(Name('k'), Name('a')), Name('b')))")
a = cls('k=a, c')
assert isinstance(a,cls),`a`
assert_equal(str(a),'k = a, c')
def test_Enum_Def(): # R460
cls = Enum_Def
a = cls(get_reader('''
enum, bind(c)
enumerator :: red = 4, blue = 9
enumerator yellow
end enum
'''))
assert isinstance(a,cls),`a`
assert_equal(str(a),'ENUM, BIND(C)\n ENUMERATOR :: red = 4, blue = 9\n ENUMERATOR :: yellow\nEND ENUM')
def test_Enum_Def_Stmt(): # R461
cls = Enum_Def_Stmt
a = cls('enum, bind(c)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'ENUM, BIND(C)')
def test_Array_Constructor(): # R465
cls = Array_Constructor
a = cls('(/a/)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(/a/)')
assert_equal(repr(a),"Array_Constructor('(/', Name('a'), '/)')")
a = cls('[a]')
assert isinstance(a,cls),`a`
assert_equal(str(a),'[a]')
assert_equal(repr(a),"Array_Constructor('[', Name('a'), ']')")
a = cls('[integer::a]')
assert isinstance(a,cls),`a`
assert_equal(str(a),'[INTEGER :: a]')
a = cls('[integer::a,b]')
assert isinstance(a,cls),`a`
assert_equal(str(a),'[INTEGER :: a, b]')
def test_Ac_Spec(): # R466
cls = Ac_Spec
a = cls('integer ::')
assert isinstance(a,cls),`a`
assert_equal(str(a),'INTEGER ::')
assert_equal(repr(a),"Ac_Spec(Intrinsic_Type_Spec('INTEGER', None), None)")
a = cls('integer :: a,b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'INTEGER :: a, b')
a = cls('a,b')
assert isinstance(a,Ac_Value_List),`a`
assert_equal(str(a),'a, b')
a = cls('integer :: a, (a, b, n = 1, 5)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'INTEGER :: a, (a, b, n = 1, 5)')
def test_Ac_Value_List(): # R469-list
cls = Ac_Value_List
a = cls('a, b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a, b')
assert_equal(repr(a),"Ac_Value_List(',', (Name('a'), Name('b')))")
a = cls('a')
assert isinstance(a,Name),`a`
assert_equal(str(a),'a')
def test_Ac_Implied_Do(): # R470
cls = Ac_Implied_Do
a = cls('( a, b, n = 1, 5 )')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(a, b, n = 1, 5)')
assert_equal(repr(a),"Ac_Implied_Do(Ac_Value_List(',', (Name('a'), Name('b'))), Ac_Implied_Do_Control(Name('n'), [Int_Literal_Constant('1', None), Int_Literal_Constant('5', None)]))")
def test_Ac_Implied_Do_Control(): # R471
cls = Ac_Implied_Do_Control
a = cls('n = 3, 5')
assert isinstance(a,cls),`a`
assert_equal(str(a),'n = 3, 5')
assert_equal(repr(a),"Ac_Implied_Do_Control(Name('n'), [Int_Literal_Constant('3', None), Int_Literal_Constant('5', None)])")
a = cls('n = 3+1, 5, 1')
assert isinstance(a,cls),`a`
assert_equal(str(a),'n = 3 + 1, 5, 1')
###############################################################################
############################### SECTION 5 ####################################
###############################################################################
def test_Type_Declaration_Stmt(): # R501
cls = Type_Declaration_Stmt
a = cls('integer a')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'INTEGER :: a')
assert_equal(repr(a), "Type_Declaration_Stmt(Intrinsic_Type_Spec('INTEGER', None), None, Entity_Decl(Name('a'), None, None, None))")
a = cls('integer ,dimension(2):: a*3')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'INTEGER, DIMENSION(2) :: a*3')
a = cls('real a')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'REAL :: a')
assert_equal(repr(a), "Type_Declaration_Stmt(Intrinsic_Type_Spec('REAL', None), None, Entity_Decl(Name('a'), None, None, None))")
a = cls('REAL A( LDA, * ), B( LDB, * )')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'REAL :: A(LDA, *), B(LDB, *)')
a = cls('DOUBLE PRECISION ALPHA, BETA')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'DOUBLE PRECISION :: ALPHA, BETA')
a = cls('logical,parameter:: T=.true.')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'LOGICAL, PARAMETER :: T = .TRUE.')
a = cls('character(n),private:: x(n)')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'CHARACTER(LEN = n), PRIVATE :: x(n)')
a = cls('character(lenmax),private:: x(n)')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'CHARACTER(LEN = lenmax), PRIVATE :: x(n)')
def test_Declaration_Type_Spec(): # R502
cls = Declaration_Type_Spec
a = cls('Integer*2')
assert isinstance(a, Intrinsic_Type_Spec),`a`
assert_equal(str(a), 'INTEGER*2')
a = cls('type(foo)')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'TYPE(foo)')
assert_equal(repr(a), "Declaration_Type_Spec('TYPE', Type_Name('foo'))")
def test_Attr_Spec(): # R503
cls = Attr_Spec
a = cls('allocatable')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'ALLOCATABLE')
a = cls('dimension(a)')
assert isinstance(a, Dimension_Attr_Spec),`a`
assert_equal(str(a),'DIMENSION(a)')
def test_Dimension_Attr_Spec(): # R503.d
cls = Dimension_Attr_Spec
a = cls('dimension(a)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'DIMENSION(a)')
assert_equal(repr(a),"Dimension_Attr_Spec('DIMENSION', Explicit_Shape_Spec(None, Name('a')))")
def test_Intent_Attr_Spec(): # R503.f
cls = Intent_Attr_Spec
a = cls('intent(in)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'INTENT(IN)')
assert_equal(repr(a),"Intent_Attr_Spec('INTENT', Intent_Spec('IN'))")
def test_Entity_Decl(): # 504
cls = Entity_Decl
a = cls('a(1)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a(1)')
assert_equal(repr(a),"Entity_Decl(Name('a'), Explicit_Shape_Spec(None, Int_Literal_Constant('1', None)), None, None)")
a = cls('a(1)*(3)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a(1)*(3)')
a = cls('a(1)*(3) = 2')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a(1)*(3) = 2')
a = cls('a = 2')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a = 2')
a = cls('a=2')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a = 2')
a = cls('a = "abc "')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a = "abc "')
a = cls('a = .true.')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a = .TRUE.')
def test_Target_Entity_Decl():
cls = Target_Entity_Decl
a = cls('a(1)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a(1)')
assert_equal(repr(a),"Target_Entity_Decl(Name('a'), Explicit_Shape_Spec(None, Int_Literal_Constant('1', None)), None, None)")
def test_Access_Spec(): # R508
cls = Access_Spec
a = cls('private')
assert isinstance(a, cls),`a`
assert_equal(str(a),'PRIVATE')
assert_equal(repr(a),"Access_Spec('PRIVATE')")
a = cls('public')
assert isinstance(a, cls),`a`
assert_equal(str(a),'PUBLIC')
def test_Language_Binding_Spec(): # R509
cls = Language_Binding_Spec
a = cls('bind(c)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'BIND(C)')
assert_equal(repr(a),'Language_Binding_Spec(None)')
a = cls('bind(c, name="hey")')
assert isinstance(a, cls),`a`
assert_equal(str(a),'BIND(C, NAME = "hey")')
def test_Explicit_Shape_Spec(): # R511
cls = Explicit_Shape_Spec
a = cls('a:b')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a : b')
assert_equal(repr(a),"Explicit_Shape_Spec(Name('a'), Name('b'))")
a = cls('a')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a')
def test_Upper_Bound(): # R513
cls = Upper_Bound
a = cls('a')
assert isinstance(a, Name),`a`
assert_equal(str(a),'a')
assertRaises(NoMatchError,cls,'*')
def test_Assumed_Shape_Spec(): # R514
cls = Assumed_Shape_Spec
a = cls(':')
assert isinstance(a, cls),`a`
assert_equal(str(a),':')
assert_equal(repr(a),'Assumed_Shape_Spec(None, None)')
a = cls('a :')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a :')
def test_Deferred_Shape_Spec(): # R515
cls = Deferred_Shape_Spec
a = cls(':')
assert isinstance(a, cls),`a`
assert_equal(str(a),':')
assert_equal(repr(a),'Deferred_Shape_Spec(None, None)')
def test_Assumed_Size_Spec(): # R516
cls = Assumed_Size_Spec
a = cls('*')
assert isinstance(a, cls),`a`
assert_equal(str(a),'*')
assert_equal(repr(a),'Assumed_Size_Spec(None, None)')
a = cls('1:*')
assert isinstance(a, cls),`a`
assert_equal(str(a),'1 : *')
a = cls('a,1:*')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a, 1 : *')
a = cls('a:b,1:*')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a : b, 1 : *')
def test_Access_Stmt(): # R518
cls = Access_Stmt
a = cls('private')
assert isinstance(a, cls),`a`
assert_equal(str(a),'PRIVATE')
assert_equal(repr(a),"Access_Stmt('PRIVATE', None)")
a = cls('public a,b')
assert isinstance(a, cls),`a`
assert_equal(str(a),'PUBLIC :: a, b')
a = cls('public ::a')
assert isinstance(a, cls),`a`
assert_equal(str(a),'PUBLIC :: a')
def test_Data_Stmt(): #R524
cls = Data_Stmt
a = cls('DATA YOURNAME % AGE, YOURNAME % NAME / 35, "FRED BROWN" /')
assert isinstance(a, cls),`a`
assert_equal(str(a),'DATA YOURNAME % AGE, YOURNAME % NAME / 35, "FRED BROWN" /')
a = cls('DATA NAME / "JOHN DOE" / MILES / 10 * 0 /')
assert isinstance(a, cls),`a`
assert_equal(str(a),'DATA NAME / "JOHN DOE" /, MILES / 10 * 0 /')
a = cls('DATA MYNAME / PERSON (21, \'JOHN SMITH\') /')
assert isinstance(a, cls),`a`
assert_equal(str(a),'DATA MYNAME / PERSON(21, \'JOHN SMITH\') /')
def test_Data_Stmt_Set(): #R525
cls = Data_Stmt_Set
a = cls('MILES / 10 * "2/3" /')
assert isinstance(a, cls),`a`
assert_equal(str(a),'MILES / 10 * "2/3" /')
def test_Data_Implied_Do(): # R527
cls = Data_Implied_Do
a = cls('((SKEW (K, J), J = 1, K), K = 1, 100)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'((SKEW(K, J), J = 1, K), K = 1, 100)')
def test_Parameter_Stmt(): # R538
cls = Parameter_Stmt
a = cls('parameter(a=1)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'PARAMETER(a = 1)')
assert_equal(repr(a),"Parameter_Stmt('PARAMETER', Named_Constant_Def(Name('a'), Int_Literal_Constant('1', None)))")
a = cls('parameter(a=1, b=a+2)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'PARAMETER(a = 1, b = a + 2)')
a = cls('PARAMETER ( ONE = 1.0D+0, ZERO = 0.0D+0 )')
assert isinstance(a, cls),`a`
assert_equal(str(a),'PARAMETER(ONE = 1.0D+0, ZERO = 0.0D+0)')
def test_Named_Constant_Def(): # R539
cls = Named_Constant_Def
a = cls('a=1')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a = 1')
assert_equal(repr(a),"Named_Constant_Def(Name('a'), Int_Literal_Constant('1', None))")
def test_Pointer_Decl(): # R541
cls = Pointer_Decl
a = cls('a(:)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a(:)')
assert_equal(repr(a),"Pointer_Decl(Name('a'), Deferred_Shape_Spec(None, None))")
a = cls('a(:,:)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a(:, :)')
def test_Target_Stmt(): # R546
cls = Target_Stmt
a = cls('target a, b(1000, 1000)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'TARGET :: a, b(1000, 1000)')
a = cls('target :: a, c')
assert isinstance(a, cls),`a`
assert_equal(str(a),'TARGET :: a, c')
def test_Value_Stmt(): # R547
cls = Value_Stmt
a = cls('value a')
assert isinstance(a, cls),`a`
assert_equal(str(a),'VALUE :: a')
a = cls('value:: a, c')
assert isinstance(a, cls),`a`
assert_equal(str(a),'VALUE :: a, c')
def test_Volatile_Stmt(): # R548
cls = Volatile_Stmt
a = cls('volatile a')
assert isinstance(a, cls),`a`
assert_equal(str(a),'VOLATILE :: a')
a = cls('volatile :: a, c')
assert isinstance(a, cls),`a`
assert_equal(str(a),'VOLATILE :: a, c')
def test_Implicit_Stmt(): # R549
cls = Implicit_Stmt
a = cls('implicitnone')
assert isinstance(a, cls),`a`
assert_equal(str(a),'IMPLICIT NONE')
assert_equal(repr(a),"Implicit_Stmt('NONE')")
a = cls('implicit real(a-d), double precision(r-t,x), type(a) (y-z)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'IMPLICIT REAL(A - D), DOUBLE PRECISION(R - T, X), TYPE(a)(Y - Z)')
def test_Implicit_Spec(): # R550
cls = Implicit_Spec
a = cls('integer (a-z)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'INTEGER(A - Z)')
assert_equal(repr(a),"Implicit_Spec(Intrinsic_Type_Spec('INTEGER', None), Letter_Spec('A', 'Z'))")
a = cls('double complex (r,d-g)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'DOUBLE COMPLEX(R, D - G)')
def test_Letter_Spec(): # R551
cls = Letter_Spec
a = cls('a-z')
assert isinstance(a, cls),`a`
assert_equal(str(a),'A - Z')
assert_equal(repr(a),"Letter_Spec('A', 'Z')")
a = cls('d')
assert isinstance(a, cls),`a`
assert_equal(str(a),'D')
def test_Namelist_Stmt(): # R552
cls = Namelist_Stmt
a = cls('namelist / nlist / a')
assert isinstance(a, cls),`a`
assert_equal(str(a),'NAMELIST /nlist/ a')
a = cls('namelist / nlist / a, /mlist/ b,c /klist/ d,e')
assert_equal(str(a),'NAMELIST /nlist/ a, /mlist/ b, c, /klist/ d, e')
def test_Equivalence_Stmt(): # R554
cls = Equivalence_Stmt
a = cls('equivalence (a, b ,z)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'EQUIVALENCE(a, b, z)')
assert_equal(repr(a),"Equivalence_Stmt('EQUIVALENCE', Equivalence_Set(Name('a'), Equivalence_Object_List(',', (Name('b'), Name('z')))))")
a = cls('equivalence (a, b ,z),(b,l)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'EQUIVALENCE(a, b, z), (b, l)')
def test_Common_Stmt(): # R557
cls = Common_Stmt
a = cls('common a')
assert isinstance(a, cls),`a`
assert_equal(str(a),'COMMON // a')
assert_equal(repr(a),"Common_Stmt([(None, Name('a'))])")
a = cls('common // a,b')
assert isinstance(a, cls),`a`
assert_equal(str(a),'COMMON // a, b')
a = cls('common /name/ a,b')
assert isinstance(a, cls),`a`
assert_equal(str(a),'COMMON /name/ a, b')
a = cls('common /name/ a,b(4,5) // c, /ljuks/ g(2)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'COMMON /name/ a, b(4, 5) // c /ljuks/ g(2)')
def test_Common_Block_Object(): # R558
cls = Common_Block_Object
a = cls('a(2)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a(2)')
assert_equal(repr(a),"Common_Block_Object(Name('a'), Explicit_Shape_Spec(None, Int_Literal_Constant('2', None)))")
a = cls('a')
assert isinstance(a, Name),`a`
assert_equal(str(a),'a')
###############################################################################
############################### SECTION 6 ####################################
###############################################################################
def test_Substring(): # R609
cls = Substring
a = cls('a(:)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a(:)')
assert_equal(repr(a),"Substring(Name('a'), Substring_Range(None, None))")
a = cls('a(1:2)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a(1 : 2)')
assert_equal(repr(a),"Substring(Name('a'), Substring_Range(Int_Literal_Constant('1', None), Int_Literal_Constant('2', None)))")
def test_Substring_Range(): # R611
cls = Substring_Range
a = cls(':')
assert isinstance(a, cls),`a`
assert_equal(str(a),':')
assert_equal(repr(a),"Substring_Range(None, None)")
a = cls('a+1:')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a + 1 :')
a = cls('a+1: c/foo(g)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a + 1 : c / foo(g)')
a = cls('a:b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a : b')
assert_equal(repr(a),"Substring_Range(Name('a'), Name('b'))")
a = cls('a:')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a :')
a = cls(':b')
assert isinstance(a,cls),`a`
assert_equal(str(a),': b')
def test_Data_Ref(): # R612
cls = Data_Ref
a = cls('a%b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a % b')
assert_equal(repr(a),"Data_Ref('%', (Name('a'), Name('b')))")
a = cls('a')
assert isinstance(a,Name),`a`
assert_equal(str(a),'a')
def test_Part_Ref(): # R613
cls = Part_Ref
a = cls('a')
assert isinstance(a, Name),`a`
assert_equal(str(a),'a')
def test_Type_Param_Inquiry(): # R615
cls = Type_Param_Inquiry
a = cls('a % b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a % b')
assert_equal(repr(a),"Type_Param_Inquiry(Name('a'), '%', Name('b'))")
def test_Array_Section(): # R617
cls = Array_Section
a = cls('a(:)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a(:)')
assert_equal(repr(a),"Array_Section(Name('a'), Substring_Range(None, None))")
a = cls('a(2:)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a(2 :)')
def test_Section_Subscript(): # R619
cls = Section_Subscript
a = cls('1:2')
assert isinstance(a, Subscript_Triplet),`a`
assert_equal(str(a),'1 : 2')
a = cls('zzz')
assert isinstance(a, Name),`a`
assert_equal(str(a),'zzz')
def test_Section_Subscript_List(): # R619-list
cls = Section_Subscript_List
a = cls('a,2')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a, 2')
assert_equal(repr(a),"Section_Subscript_List(',', (Name('a'), Int_Literal_Constant('2', None)))")
a = cls('::1')
assert isinstance(a,Subscript_Triplet),`a`
assert_equal(str(a),': : 1')
a = cls('::1, 3')
assert isinstance(a,cls),`a`
assert_equal(str(a),': : 1, 3')
def test_Subscript_Triplet(): # R620
cls = Subscript_Triplet
a = cls('a:b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a : b')
assert_equal(repr(a),"Subscript_Triplet(Name('a'), Name('b'), None)")
a = cls('a:b:1')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a : b : 1')
a = cls(':')
assert isinstance(a,cls),`a`
assert_equal(str(a),':')
a = cls('::5')
assert isinstance(a,cls),`a`
assert_equal(str(a),': : 5')
a = cls(':5')
assert isinstance(a,cls),`a`
assert_equal(str(a),': 5')
a = cls('a+1 :')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a + 1 :')
def test_Allocate_Stmt(): # R623
cls = Allocate_Stmt
a = cls('allocate(a,b)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'ALLOCATE(a, b)')
a = cls('allocate(real::a)')
assert_equal(str(a),'ALLOCATE(REAL::a)')
a = cls('allocate(real(kind=8)::a, stat=b, source=c//d)')
assert_equal(str(a),'ALLOCATE(REAL(KIND = 8)::a, STAT = b, SOURCE = c // d)')
def test_Alloc_Opt(): # R624
cls = Alloc_Opt
a = cls('stat=a')
assert isinstance(a, cls),`a`
assert_equal(str(a),'STAT = a')
assert_equal(repr(a),"Alloc_Opt('STAT', Name('a'))")
def test_Nullify_Stmt(): # R633
cls = Nullify_Stmt
a = cls('nullify (a)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'NULLIFY(a)')
assert_equal(repr(a),"Nullify_Stmt('NULLIFY', Name('a'))")
a = cls('nullify (a,c)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'NULLIFY(a, c)')
def test_Deallocate_Stmt(): # R635
cls = Deallocate_Stmt
a = cls('deallocate (a)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'DEALLOCATE(a)')
a = cls('deallocate (a,stat=b)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'DEALLOCATE(a, STAT = b)')
a = cls('deallocate (a,c,stat=b,errmsg=d)')
assert_equal(str(a),'DEALLOCATE(a, c, STAT = b, ERRMSG = d)')
###############################################################################
############################### SECTION 7 ####################################
###############################################################################
def test_Primary(): # R701
cls = Primary
a = cls('a')
assert isinstance(a,Name),`a`
assert_equal(str(a),'a')
a = cls('(a)')
assert isinstance(a,Parenthesis),`a`
assert_equal(str(a),'(a)')
a = cls('1')
assert isinstance(a,Int_Literal_Constant),`a`
assert_equal(str(a),'1')
a = cls('1.')
assert isinstance(a,Real_Literal_Constant),`a`
assert_equal(str(a),'1.')
a = cls('(1, n)')
assert isinstance(a,Complex_Literal_Constant),`a`
assert_equal(str(a),'(1, n)')
a = cls('.true.')
assert isinstance(a,Logical_Literal_Constant),`a`
assert_equal(str(a),'.TRUE.')
a = cls('"hey a()c"')
assert isinstance(a,Char_Literal_Constant),`a`
assert_equal(str(a),'"hey a()c"')
a = cls('b"0101"')
assert isinstance(a,Binary_Constant),`a`
assert_equal(str(a),'B"0101"')
a = cls('o"0107"')
assert isinstance(a,Octal_Constant),`a`
assert_equal(str(a),'O"0107"')
a = cls('z"a107"')
assert isinstance(a,Hex_Constant),`a`
assert_equal(str(a),'Z"A107"')
a = cls('a % b')
assert isinstance(a,Data_Ref),`a`
assert_equal(str(a),'a % b')
a = cls('a(:)')
assert isinstance(a,Array_Section),`a`
assert_equal(str(a),'a(:)')
a = cls('0.0E-1')
assert isinstance(a,Real_Literal_Constant),`a`
assert_equal(str(a),'0.0E-1')
def test_Parenthesis(): # R701.h
cls = Parenthesis
a = cls('(a)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(a)')
assert_equal(repr(a),"Parenthesis('(', Name('a'), ')')")
a = cls('(a+1)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(a + 1)')
a = cls('((a))')
assert isinstance(a,cls),`a`
assert_equal(str(a),'((a))')
a = cls('(a+(a+c))')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(a + (a + c))')
def test_Level_1_Expr(): # R702
cls = Level_1_Expr
a = cls('.hey. a')
assert isinstance(a,cls),`a`
assert_equal(str(a),'.HEY. a')
assert_equal(repr(a),"Level_1_Expr('.HEY.', Name('a'))")
#assertRaises(NoMatchError,cls,'.not. a')
a = cls('.false.')
assert isinstance(a,Logical_Literal_Constant),`a`
def test_Mult_Operand(): # R704
cls = Mult_Operand
a = cls('a**b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a ** b')
assert_equal(repr(a),"Mult_Operand(Name('a'), '**', Name('b'))")
a = cls('a**2')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a ** 2')
a = cls('(a+b)**2')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(a + b) ** 2')
a = cls('0.0E-1')
assert isinstance(a,Real_Literal_Constant),`a`
assert_equal(str(a),'0.0E-1')
def test_Add_Operand(): # R705
cls = Add_Operand
a = cls('a*b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a * b')
assert_equal(repr(a),"Add_Operand(Name('a'), '*', Name('b'))")
a = cls('a/b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a / b')
a = cls('a**b')
assert isinstance(a,Mult_Operand),`a`
assert_equal(str(a),'a ** b')
a = cls('0.0E-1')
assert isinstance(a,Real_Literal_Constant),`a`
assert_equal(str(a),'0.0E-1')
def test_Level_2_Expr(): # R706
cls = Level_2_Expr
a = cls('a+b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a + b')
assert_equal(repr(a),"Level_2_Expr(Name('a'), '+', Name('b'))")
a = cls('a-b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a - b')
a = cls('a+b+c')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a + b + c')
a = cls('+a')
assert isinstance(a,Level_2_Unary_Expr),`a`
assert_equal(str(a),'+ a')
a = cls('+1')
assert isinstance(a,Level_2_Unary_Expr),`a`
assert_equal(str(a),'+ 1')
a = cls('+a+b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'+ a + b')
a = cls('0.0E-1')
assert isinstance(a,Real_Literal_Constant),`a`
assert_equal(str(a),'0.0E-1')
def test_Level_2_Unary_Expr():
cls = Level_2_Unary_Expr
a = cls('+a')
assert isinstance(a,cls),`a`
assert_equal(str(a),'+ a')
assert_equal(repr(a),"Level_2_Unary_Expr('+', Name('a'))")
a = cls('-a')
assert isinstance(a,cls),`a`
assert_equal(str(a),'- a')
a = cls('+1')
assert isinstance(a,cls),`a`
assert_equal(str(a),'+ 1')
a = cls('0.0E-1')
assert isinstance(a,Real_Literal_Constant),`a`
assert_equal(str(a),'0.0E-1')
def test_Level_3_Expr(): # R710
cls = Level_3_Expr
a = cls('a//b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a // b')
assert_equal(repr(a),"Level_3_Expr(Name('a'), '//', Name('b'))")
a = cls('"a"//"b"')
assert isinstance(a,cls),`a`
assert_equal(str(a),'"a" // "b"')
def test_Level_4_Expr(): # R712
cls = Level_4_Expr
a = cls('a.eq.b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a .EQ. b')
assert_equal(repr(a),"Level_4_Expr(Name('a'), '.EQ.', Name('b'))")
a = cls('a.ne.b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a .NE. b')
a = cls('a.lt.b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a .LT. b')
a = cls('a.gt.b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a .GT. b')
a = cls('a.ge.b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a .GE. b')
a = cls('a==b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a == b')
a = cls('a/=b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a /= b')
a = cls('a<b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a < b')
a = cls('a<=b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a <= b')
a = cls('a>=b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a >= b')
a = cls('a>b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a > b')
def test_And_Operand(): # R714
cls = And_Operand
a = cls('.not.a')
assert isinstance(a,cls),`a`
assert_equal(str(a),'.NOT. a')
assert_equal(repr(a),"And_Operand('.NOT.', Name('a'))")
def test_Or_Operand(): # R715
cls = Or_Operand
a = cls('a.and.b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a .AND. b')
assert_equal(repr(a),"Or_Operand(Name('a'), '.AND.', Name('b'))")
def test_Equiv_Operand(): # R716
cls = Equiv_Operand
a = cls('a.or.b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a .OR. b')
assert_equal(repr(a),"Equiv_Operand(Name('a'), '.OR.', Name('b'))")
def test_Level_5_Expr(): # R717
cls = Level_5_Expr
a = cls('a.eqv.b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a .EQV. b')
assert_equal(repr(a),"Level_5_Expr(Name('a'), '.EQV.', Name('b'))")
a = cls('a.neqv.b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a .NEQV. b')
a = cls('a.eq.b')
assert isinstance(a,Level_4_Expr),`a`
assert_equal(str(a),'a .EQ. b')
def test_Expr(): # R722
cls = Expr
a = cls('a .op. b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a .OP. b')
assert_equal(repr(a),"Expr(Name('a'), '.OP.', Name('b'))")
a = cls('a')
assert isinstance(a,Name),`a`
assert_equal(str(a),'a')
a = cls('3.e2')
assert isinstance(a,Real_Literal_Constant),`a`
a = cls('0.0E-1')
assert isinstance(a,Real_Literal_Constant),`a`
assert_equal(str(a),'0.0E-1')
a = cls('123')
assert isinstance(a,Int_Literal_Constant),`a`
assert_equal(str(a),'123')
a = cls('.false.')
assert isinstance(a,Logical_Literal_Constant),`a`
assert_equal(str(a),'.FALSE.')
assertRaises(NoMatchError,Scalar_Int_Expr,'a,b')
def test_Logical_Expr(): # R724
cls = Logical_Expr
a = cls('(f0 .lt. f1) .and. abs(x1-x0) .gt. abs(x2) .or. .not. root')
assert isinstance(a,Equiv_Operand),`a`
assert_equal(str(a),'(f0 .LT. f1) .AND. abs(x1 - x0) .GT. abs(x2) .OR. .NOT. root')
def test_Logical_Initialization_Expr(): # R733
cls = Logical_Initialization_Expr
a = cls('.false.')
assert isinstance(a, Logical_Literal_Constant), `a`
assert str(a)=='.FALSE.'
def test_Assignment_Stmt(): # R734
cls = Assignment_Stmt
a = cls('a = b')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a = b')
assert_equal(repr(a),"Assignment_Stmt(Name('a'), '=', Name('b'))")
a = cls('a(3:4) = b+c')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a(3 : 4) = b + c')
a = cls('a%c = b+c')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a % c = b + c')
a = cls('a = .FALSE.')
assert isinstance(a, cls),`a`
assert_equal(repr(a),"Assignment_Stmt(Name('a'), '=', Logical_Literal_Constant('.FALSE.', None))")
a = cls('a(n)(k:m) = 5')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a(n)(k : m) = 5')
a = cls('b = a + 1 d - 8')
assert isinstance(a, cls),`a`
assert_equal(str(a),'b = a + 1D-8')
a = cls('b = a + 1 d - 8 + 1.1e+3')
assert isinstance(a, cls),`a`
assert_equal(str(a),'b = a + 1D-8 + 1.1E+3')
def test_Pointer_Assignment_Stmt(): # R735
cls = Pointer_Assignment_Stmt
a = cls('new_node % left => current_node')
assert isinstance(a, cls),`a`
assert_equal(str(a),'new_node % left => current_node')
a = cls('simple_name => target_structure % substruct % component')
assert isinstance(a, cls),`a`
assert_equal(str(a),'simple_name => target_structure % substruct % component')
for stmt in '''\
PTR => NULL()
ROW => MAT2D(N, :)
WINDOW => MAT2D(I - 1 : I + 1, J - 1 : J + 1)
POINTER_OBJECT => POINTER_FUNCTION(ARG_1, ARG_2)
EVERY_OTHER => VECTOR(1 : N : 2)
WINDOW2(0 :, 0 :) => MAT2D(ML : MU, NL : NU)
P => BESSEL
STRUCT % COMPONENT => BESSEL'''.split('\n'):
a = cls(stmt)
assert isinstance(a, cls),`a`
assert_equal(str(a), stmt)
def test_Proc_Component_Ref(): # R741
cls = Proc_Component_Ref
a = cls('a % b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a % b')
assert_equal(repr(a),"Proc_Component_Ref(Name('a'), '%', Name('b'))")
def test_Where_Stmt(): # R743
cls = Where_Stmt
a = cls('where (a) c=2')
assert isinstance(a,cls),`a`
assert_equal(str(a),'WHERE (a) c = 2')
assert_equal(repr(a),"Where_Stmt(Name('a'), Assignment_Stmt(Name('c'), '=', Int_Literal_Constant('2', None)))")
def test_Where_Construct(): # R745
cls = Where_Construct
a = cls(get_reader('''
where (pressure <= 1.0)
pressure = pressure + inc_pressure
temp = temp - 5.0
elsewhere
raining = .true.
end where
'''))
assert isinstance(a,cls),`a`
assert_equal(str(a),'WHERE (pressure <= 1.0)\n pressure = pressure + inc_pressure\n temp = temp - 5.0\nELSEWHERE\n raining = .TRUE.\nEND WHERE')
a = cls(get_reader('''
where (cond1)
elsewhere (cond2)
end where
'''))
assert isinstance(a,cls),`a`
assert_equal(str(a),'WHERE (cond1)\nELSEWHERE(cond2)\nEND WHERE')
a = cls(get_reader('''
n:where (cond1)
elsewhere (cond2) n
elsewhere n
end where n
'''))
assert isinstance(a,cls),`a`
assert_equal(str(a),'n:WHERE (cond1)\nELSEWHERE(cond2) n\nELSEWHERE n\nEND WHERE n')
def test_Where_Construct_Stmt(): # R745
cls = Where_Construct_Stmt
a = cls('where (a)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'WHERE (a)')
assert_equal(repr(a),"Where_Construct_Stmt(Name('a'))")
def test_Forall_Construct(): # R752
cls = Forall_Construct
a = cls(get_reader('''
forall (i = 1:10, j = 1:10, b(i, j) /= 0.0)
a(i, j) = real (i + j - 2)
b(i, j) = a(i, j) + b(i, j) * real (i * j)
end forall
'''))
assert isinstance(a,cls),`a`
assert_equal(str(a),'FORALL(i = 1 : 10, j = 1 : 10, b(i, j) /= 0.0)\n a(i, j) = real(i + j - 2)\n b(i, j) = a(i, j) + b(i, j) * real(i * j)\nEND FORALL')
a = cls(get_reader('''
n: forall (x = 1:5:2, j = 1:4)
a(x, j) = j
end forall n
'''))
assert isinstance(a,cls),`a`
assert_equal(str(a),'n:FORALL(x = 1 : 5 : 2, j = 1 : 4)\n a(x, j) = j\nEND FORALL n')
def test_Forall_Header(): # R754
cls = Forall_Header
a = cls('(n=1:2, a+1)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(n = 1 : 2, a + 1)')
a = cls('(n=1:2, m=1:x-1:z(a))')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(n = 1 : 2, m = 1 : x - 1 : z(a))')
def test_Forall_Triplet_Spec(): # R755
cls = Forall_Triplet_Spec
a = cls('n = 1: 2')
assert isinstance(a,cls),`a`
assert_equal(str(a),'n = 1 : 2')
a = cls('n = f(x): 2-b:a+1')
assert isinstance(a,cls),`a`
assert_equal(str(a),'n = f(x) : 2 - b : a + 1')
###############################################################################
############################### SECTION 8 ####################################
###############################################################################
def test_If_Construct(): # R802
cls = If_Construct
a = cls(get_reader('''
if (expr) then
a = 1
end if
'''))
assert isinstance(a,cls),`a`
assert_equal(str(a),'IF (expr) THEN\n a = 1\nEND IF')
a = cls(get_reader('''
name: if (expr) then
a = 1
end if name
'''))
assert_equal(str(a),'name:IF (expr) THEN\n a = 1\nEND IF name')
a = cls(get_reader('''
if (expr) then
a = 1
if (expr2) then
a = 2
endif
a = 3
end if
'''))
assert isinstance(a,cls),`a`
assert_equal(str(a),'IF (expr) THEN\n a = 1\n IF (expr2) THEN\n a = 2\n END IF\n a = 3\nEND IF')
a = cls(get_reader('''
if (expr) then
a = 1
else if (expr2) then
a = 2
end if
'''))
assert isinstance(a,cls),`a`
assert_equal(str(a),'IF (expr) THEN\n a = 1\nELSE IF (expr2) THEN\n a = 2\nEND IF')
a = cls(get_reader('''
if (expr) then
a = 1
else
a = 2
end if
'''))
assert_equal(str(a),'IF (expr) THEN\n a = 1\nELSE\n a = 2\nEND IF')
a = cls(get_reader('''
if (expr) then
a = 1
else if (expr2) then
a = 2
else
a = 3
end if
'''))
assert_equal(str(a),'IF (expr) THEN\n a = 1\nELSE IF (expr2) THEN\n a = 2\nELSE\n a = 3\nEND IF')
a = cls(get_reader('''
named: if (expr) then
a = 1
else named
a = 2
end if named
'''))
assert_equal(str(a),'named:IF (expr) THEN\n a = 1\nELSE named\n a = 2\nEND IF named')
a = cls(get_reader('''
named: if (expr) then
a = 1
named2: if (expr2) then
a = 2
end if named2
end if named
'''))
assert_equal(str(a),'named:IF (expr) THEN\n a = 1\n named2:IF (expr2) THEN\n a = 2\n END IF named2\nEND IF named')
a = cls(get_reader('''
if (expr) then
a = 1
else if (expr2) then
a = 2
else if (expr3) then
a = 3
end if
'''))
assert isinstance(a,cls),`a`
assert_equal(str(a),'IF (expr) THEN\n a = 1\nELSE IF (expr2) THEN\n a = 2\nELSE IF (expr3) THEN\n a = 3\nEND IF')
a = cls(get_reader('''
if (dxmx .gt. 0d0) then
diff = 0
do 80 k = 1, n
80 diff = max(diff,abs(xnew(k)-xin(k)))
if (diff .gt. dxmx) then
betx = dxmx/diff
call awrit3(' broyj: max shift = %1;3g'//
. ' is larger than dxmx = %1;3g. Scale by %1;3g',
. ' ',80,i1mach(2),diff,dxmx,dxmx/diff)
do 82 k = 1, n
82 xnew(k) = xin(k) + betx*(xnew(k)-xin(k))
endif
endif
'''))
def test_if_nonblock_do():
cls = If_Construct
a = cls(get_reader('''
if (expr) then
do 20 i = 1, 3
a = 1
do 20 j = 1, 3
a = 2
do 20 k = 1, 3
a = 3
20 rotm(i,j) = r2(j,i)
endif
'''))
assert isinstance(a,cls),`a`
assert len(a.content)==3,`a`
a = a.content[1]
assert isinstance(a, Action_Term_Do_Construct),`a`
assert_equal(str(a),'DO 20 , i = 1, 3\n a = 1\n DO 20 , j = 1, 3\n a = 2\n DO 20 , k = 1, 3\n a = 3\n20 rotm(i, j) = r2(j, i)')
a = cls(get_reader('''
if (expr) then
do 50 i = n, m, -1
50 call foo(a)
endif'''))
assert isinstance(a,cls),`a`
assert len(a.content)==3,`a`
a = a.content[1]
assert isinstance(a, Action_Term_Do_Construct),`a`
def test_Case_Construct(): # R808
cls = Case_Construct
a = cls(get_reader('''
select case (n)
case (:-1)
signum = -1
case (0)
signum = 0
case (1:)
signum = 1
end select
'''))
assert isinstance(a,cls),`a`
assert_equal(str(a),'SELECT CASE (n)\nCASE (: - 1)\n signum = - 1\nCASE (0)\n signum = 0\nCASE (1 :)\n signum = 1\nEND SELECT')
def test_Case_Selector(): # R813
cls = Case_Selector
a = cls('default')
assert isinstance(a,cls),`a`
assert_equal(str(a),'DEFAULT')
a = cls('(2)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(2)')
a = cls('(2:3, c+2:, :-a)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'(2 : 3, c + 2 :, : - a)')
def test_Associate_Construct(): # R816
cls = Associate_Construct
a = cls(get_reader('''
ASSOCIATE ( Z => EXP(-(X**2+Y**2)) * COS(THETA) )
PRINT *, A+Z, A-Z
END ASSOCIATE
'''))
assert isinstance(a,cls),`a`
assert_equal(str(a),'ASSOCIATE(Z => EXP(- (X ** 2 + Y ** 2)) * COS(THETA))\n PRINT *, A + Z, A - Z\nEND ASSOCIATE')
a = cls(get_reader('''
name:ASSOCIATE ( XC => AX%B(I,J)%C )
XC%DV = XC%DV + PRODUCT(XC%EV(1:N))
END ASSOCIATE name
'''))
assert isinstance(a,cls),`a`
assert_equal(str(a),'name:ASSOCIATE(XC => AX % B(I, J) % C)\n XC % DV = XC % DV + PRODUCT(XC % EV(1 : N))\nEND ASSOCIATE name')
a = cls(get_reader('''
ASSOCIATE ( W => RESULT(I,J)%W, ZX => AX%B(I,J)%D, ZY => AY%B(I,J)%D )
W = ZX*X + ZY*Y
END ASSOCIATE
'''))
assert_equal(str(a),'ASSOCIATE(W => RESULT(I, J) % W, ZX => AX % B(I, J) % D, ZY => AY % B(I, J) % D)\n W = ZX * X + ZY * Y\nEND ASSOCIATE')
def test_Select_Type_Construct(): # R821
cls = Select_Type_Construct
a = cls(get_reader('''
n:SELECT TYPE ( A => P_OR_C )
CLASS IS ( POINT )
PRINT *, A%X, A%Y ! This block gets executed
TYPE IS ( POINT_3D )
PRINT *, A%X, A%Y, A%Z
END SELECT n
'''))
assert isinstance(a,cls),`a`
assert_equal(str(a),'n:SELECT TYPE(A=>P_OR_C)\n CLASS IS (POINT)\n PRINT *, A % X, A % Y\n TYPE IS (POINT_3D)\n PRINT *, A % X, A % Y, A % Z\nEND SELECT n')
def test_Select_Type_Stmt(): # R822
cls = Select_Type_Stmt
a = cls('select type(a=>b)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'SELECT TYPE(a=>b)')
a = cls('select type(a)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'SELECT TYPE(a)')
def test_Type_Guard_Stmt(): # R823
cls = Type_Guard_Stmt
a = cls('type is (real*8)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'TYPE IS (REAL*8)')
a = cls('class is (mytype) name')
assert isinstance(a,cls),`a`
assert_equal(str(a),'CLASS IS (mytype) name')
a = cls('classdefault')
assert isinstance(a,cls),`a`
assert_equal(str(a),'CLASS DEFAULT')
def test_Block_Label_Do_Construct(): # R826_1
cls = Block_Label_Do_Construct
a = cls(get_reader('''
do 12
a = 1
12 continue
'''))
assert isinstance(a,cls),`a`
assert_equal(str(a),'DO 12\n a = 1\n12 CONTINUE')
a = cls(get_reader('''
do 12
do 13
a = 1
13 continue
12 continue
'''))
assert_equal(str(a),'DO 12\n DO 13\n a = 1\n13 CONTINUE\n12 CONTINUE')
assert len(a.content)==3,`len(a.content)`
assert_equal(str(a.content[1]), 'DO 13\n a = 1\n13 CONTINUE')
def test_Block_Nonlabel_Do_Construct(): # # R826_2
cls = Block_Nonlabel_Do_Construct
a = cls(get_reader('''
do i=1,10
a = 1
end do
'''))
assert isinstance(a,cls),`a`
assert_equal(str(a),'DO , i = 1, 10\n a = 1\nEND DO')
a = cls(get_reader('''
foo:do i=1,10
a = 1
end do foo
'''))
assert isinstance(a,cls),`a`
assert_equal(str(a),'foo:DO , i = 1, 10\n a = 1\nEND DO foo')
a = cls(get_reader('''
do j=1,2
foo:do i=1,10
a = 1
end do foo
end do
'''))
assert isinstance(a,cls),`a`
assert_equal(str(a),'DO , j = 1, 2\n foo:DO , i = 1, 10\n a = 1\n END DO foo\nEND DO')
def test_Label_Do_Stmt(): # R828
cls = Label_Do_Stmt
a = cls('do 12')
assert isinstance(a,cls),`a`
assert_equal(str(a),'DO 12')
assert_equal(repr(a),"Label_Do_Stmt(None, Label('12'), None)")
def test_Nonblock_Do_Construct(): # R835
cls = Nonblock_Do_Construct
a = cls(get_reader('''
do 20 i = 1, 3
20 rotm(i,j) = r2(j,i)
'''))
assert isinstance(a,Action_Term_Do_Construct),`a`
assert_equal(str(a),'DO 20 , i = 1, 3\n20 rotm(i, j) = r2(j, i)')
a = cls(get_reader('''
do 20 i = 1, 3
k = 3
do 20 j = 1, 3
l = 3
20 rotm(i,j) = r2(j,i)
'''))
assert isinstance(a,Action_Term_Do_Construct),`a`
assert_equal(str(a),'DO 20 , i = 1, 3\n k = 3\n DO 20 , j = 1, 3\n l = 3\n20 rotm(i, j) = r2(j, i)')
a = cls(get_reader('''
do 20 i = 1, 3
20 rotm(i,j) = r2(j,i)
'''))
assert isinstance(a,Action_Term_Do_Construct),`a`
assert_equal(str(a),'DO 20 , i = 1, 3\n20 rotm(i, j) = r2(j, i)')
a = cls(get_reader('''
do 50 i = n, m, -1
50 call foo(a)
'''))
assert isinstance(a,Action_Term_Do_Construct),`a`
assert_equal(str(a),'DO 50 , i = n, m, - 1\n50 CALL foo(a)')
def test_Continue_Stmt(): # R848
cls = Continue_Stmt
a = cls('continue')
assert isinstance(a, cls),`a`
assert_equal(str(a),'CONTINUE')
assert_equal(repr(a),"Continue_Stmt('CONTINUE')")
def test_Stop_Stmt(): # R849
cls = Stop_Stmt
a = cls('stop')
assert isinstance(a, cls),`a`
assert_equal(str(a),'STOP')
a = cls('stop 123')
assert isinstance(a, cls),`a`
assert_equal(str(a),'STOP 123')
a = cls('stop \'hey you\'')
assert isinstance(a, cls),`a`
assert_equal(str(a),"STOP 'hey you'")
###############################################################################
############################### SECTION 9 ####################################
###############################################################################
def test_Io_Unit(): # R901
cls = Io_Unit
a = cls('*')
assert isinstance(a, cls),`a`
assert_equal(str(a),'*')
a = cls('a')
assert isinstance(a, Name),`a`
assert_equal(str(a),'a')
def test_Read_Stmt(): # R910
cls = Read_Stmt
a = cls('read(123)')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'READ(UNIT = 123)')
a = cls('read(123) a')
assert_equal(str(a), 'READ(UNIT = 123) a')
a = cls('read(123) a( 2)')
assert_equal(str(a), 'READ(UNIT = 123) a(2)')
a = cls('read*, a( 2), b')
assert_equal(str(a), 'READ *, a(2), b')
def test_Write_Stmt(): # R911
cls = Write_Stmt
a = cls('write (123)"hey"')
assert isinstance(a, cls),`a`
assert_equal(str(a),'WRITE(UNIT = 123) "hey"')
assert_equal(repr(a),'Write_Stmt(Io_Control_Spec_List(\',\', (Io_Control_Spec(\'UNIT\', Int_Literal_Constant(\'123\', None)),)), Char_Literal_Constant(\'"hey"\', None))')
def test_Print_Stmt(): # R912
cls = Print_Stmt
a = cls('print 123')
assert isinstance(a, cls),`a`
assert_equal(str(a),'PRINT 123')
assert_equal(repr(a),"Print_Stmt(Label('123'), None)")
a = cls('print *,"a=",a')
assert isinstance(a, cls),`a`
assert_equal(str(a),'PRINT *, "a=", a')
def test_Io_Control_Spec(): # R913
cls = Io_Control_Spec
a = cls('end=123')
assert isinstance(a, cls),`a`
assert_equal(str(a),'END = 123')
assert_equal(repr(a),"Io_Control_Spec('END', Label('123'))")
def test_Io_Control_Spec_List(): # R913-list
cls = Io_Control_Spec_List
a = cls('end=123')
assert isinstance(a, cls),`a`
assert_equal(str(a),'END = 123')
assert_equal(repr(a),"Io_Control_Spec_List(',', (Io_Control_Spec('END', Label('123')),))")
a = cls('123')
assert isinstance(a, cls),`a`
assert_equal(str(a),'UNIT = 123')
a = cls('123,*')
assert isinstance(a, cls),`a`
assert_equal(str(a),'UNIT = 123, FMT = *')
a = cls('123,fmt=a')
assert isinstance(a, cls),`a`
assert_equal(str(a),'UNIT = 123, FMT = a')
if 0:
# see todo note in Io_Control_Spec_List
a = cls('123,a')
assert isinstance(a, cls),`a`
assert_equal(str(a),'UNIT = 123, NML = a')
def test_Format(): # R914
cls = Format
a = cls('*')
assert isinstance(a, cls),`a`
assert_equal(str(a),'*')
assert_equal(repr(a),"Format('*')")
a = cls('a')
assert isinstance(a, Name),`a`
assert_equal(str(a),'a')
a = cls('123')
assert isinstance(a, Label),`a`
assert_equal(str(a),'123')
def test_Io_Implied_Do(): # R917
cls = Io_Implied_Do
a = cls('(a, i=1,2)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'(a, i = 1, 2)')
a = cls('((i+j,j=3,4,1), i=1,2)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'((i + j, j = 3, 4, 1), i = 1, 2)')
def test_Io_Implied_Do_Control(): # R919
cls = Io_Implied_Do_Control
a = cls('i=1,2')
assert isinstance(a, cls),`a`
assert_equal(str(a),'i = 1, 2')
a = cls('i=f(2),2-1,a+2')
assert isinstance(a, cls),`a`
assert_equal(str(a),'i = f(2), 2 - 1, a + 2')
def test_Wait_Stmt(): # R921
cls = Wait_Stmt
a = cls('wait (123)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'WAIT(UNIT = 123)')
def test_Wait_Spec(): # R922
cls = Wait_Spec
a = cls('123')
assert isinstance(a, cls),`a`
assert_equal(str(a),'UNIT = 123')
assert_equal(repr(a),"Wait_Spec('UNIT', Int_Literal_Constant('123', None))")
a = cls('err=1')
assert isinstance(a, cls),`a`
assert_equal(str(a),'ERR = 1')
def test_Backspace_Stmt(): # R923
cls = Backspace_Stmt
a = cls('backspace 1')
assert isinstance(a, cls),`a`
assert_equal(str(a),'BACKSPACE 1')
a = cls('backspace (unit=1,err=2)')
assert_equal(str(a),'BACKSPACE(UNIT = 1, ERR = 2)')
def test_Endfile_Stmt(): # R924
cls = Endfile_Stmt
a = cls('endfile 1')
assert isinstance(a, cls),`a`
assert_equal(str(a),'ENDFILE 1')
a = cls('endfile (unit=1,err=2)')
assert_equal(str(a),'ENDFILE(UNIT = 1, ERR = 2)')
def test_Rewind_Stmt(): # R925
cls = Rewind_Stmt
a = cls('rewind 1')
assert isinstance(a, cls),`a`
assert_equal(str(a),'REWIND 1')
a = cls('rewind (unit=1,err=2)')
assert_equal(str(a),'REWIND(UNIT = 1, ERR = 2)')
def test_Position_Spec(): # R926
cls = Position_Spec
a = cls('1')
assert isinstance(a, cls),`a`
assert_equal(str(a),'UNIT = 1')
a = cls('unit=1')
assert_equal(str(a),'UNIT = 1')
a = cls('err=2')
assert_equal(str(a),'ERR = 2')
a = cls('iomsg=a')
assert_equal(str(a),'IOMSG = a')
a = cls('iostat=a')
assert_equal(str(a),'IOSTAT = a')
def test_Flush_Stmt(): # R927
cls = Flush_Stmt
a = cls('flush 1')
assert isinstance(a, cls),`a`
assert_equal(str(a),'FLUSH 1')
a = cls('flush (unit=1,err=2)')
assert_equal(str(a),'FLUSH(UNIT = 1, ERR = 2)')
def test_Flush_Spec(): # R928
cls = Flush_Spec
a = cls('1')
assert isinstance(a, cls),`a`
assert_equal(str(a),'UNIT = 1')
a = cls('unit=1')
assert_equal(str(a),'UNIT = 1')
a = cls('err=2')
assert_equal(str(a),'ERR = 2')
a = cls('iomsg=a')
assert_equal(str(a),'IOMSG = a')
a = cls('iostat=a')
assert_equal(str(a),'IOSTAT = a')
def test_Inquire_Stmt(): # R929
cls = Inquire_Stmt
a = cls('inquire(1,file=a)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'INQUIRE(UNIT = 1, FILE = a)')
a = cls('inquire(iolength=n) a, b')
assert_equal(str(a),'INQUIRE(IOLENGTH=n) a, b')
def test_Inquire_Spec(): # R930
cls = Inquire_Spec
a = cls('1')
assert isinstance(a, cls),`a`
assert_equal(str(a),'UNIT = 1')
a = cls('file=fn')
assert isinstance(a, cls),`a`
assert_equal(str(a),'FILE = fn')
a = cls('access=a')
assert isinstance(a, cls),`a`
assert_equal(str(a),'ACCESS = a')
###############################################################################
############################### SECTION 10 ####################################
###############################################################################
def test_Format_Stmt(): # R1001
cls = Format_Stmt
a = cls('format (3f9.4)')
assert isinstance(a, cls),`type(a)`
assert_equal(str(a),'FORMAT(3F9.4)')
a = cls("format (' ',3f9.4)")
assert isinstance(a, cls),`type(a)`
assert_equal(str(a),"FORMAT(' ', 3F9.4)")
a = cls('format(i6,f12.6,2x,f12.6)')
assert isinstance(a, cls),`type(a)`
assert_equal(str(a),'FORMAT(I6, F12.6, 2X, F12.6)')
a = cls("format(' Enter smth',$)")
assert isinstance(a, cls),`type(a)`
assert_equal(str(a),"FORMAT(' Enter smth', $)")
a = cls("format(/'a' /'b')")
assert isinstance(a, cls),`type(a)`
assert_equal(str(a),"FORMAT(/, 'a', /, 'b')")
a = cls("format('a:':' b')")
assert isinstance(a, cls),`type(a)`
assert_equal(str(a),"FORMAT('a:', :, ' b')")
return
a = cls("format('text=',' '")
assert_equal(str(a),'')
def test_Format_Specification(): # R1002
cls = Format_Specification
a = cls('(3f9.4, 2f8.1)')
assert isinstance(a, cls),`type(a)`
assert_equal(str(a),'(3F9.4, 2F8.1)')
a = cls("(' ', 2f8.1)")
assert isinstance(a, cls),`type(a)`
assert_equal(str(a),"(' ', 2F8.1)")
def test_Format_Item(): # R1003
cls = Format_Item
a = cls('3f9.4')
assert isinstance(a, cls),`type(a)`
assert_equal(str(a),'3F9.4')
a = cls("' '")
assert isinstance(a, Char_Literal_Constant),`type(a)`
assert_equal(str(a),"' '")
a = cls('i4/')
assert isinstance(a, Format_Item_C1002),`type(a)`
assert_equal(str(a),'I4, /')
a = cls('3f12.6/')
assert_equal(str(a),'3F12.6, /')
a = cls("/' '")
assert_equal(str(a),"/, ' '")
a = cls("' '/")
assert_equal(str(a),"' ', /")
a = cls("' '/' '")
assert_equal(str(a),"' ', /, ' '")
def test_Format_Item_List():
cls = Format_Item_List
a = cls('3f9.4')
assert isinstance(a, Format_Item),`type(a)`
assert_equal(str(a),'3F9.4')
a = cls('3f9.4, 2f8.1')
assert isinstance(a, Format_Item_List),`type(a)`
assert_equal(str(a),'3F9.4, 2F8.1')
a = cls("' ', 2f8.1")
assert isinstance(a, Format_Item_List),`type(a)`
assert_equal(str(a),"' ', 2F8.1")
a = cls("' ', ' '")
assert_equal(str(a),"' ', ' '")
###############################################################################
############################### SECTION 11 ####################################
###############################################################################
def test_Main_Program(): # R1101
cls = Main_Program
a = cls(get_reader('''
program a
end
'''))
assert isinstance(a, cls),`a`
assert_equal(str(a),'PROGRAM a\nEND PROGRAM a')
a = cls(get_reader('''
program a
real b
b = 1
contains
subroutine foo
end
end
'''))
assert isinstance(a, cls),`a`
assert_equal(str(a),'PROGRAM a\n REAL :: b\n b = 1\n CONTAINS\n SUBROUTINE foo\n END SUBROUTINE foo\nEND PROGRAM a')
a = Main_Program0(get_reader('''
end
'''))
assert isinstance(a, Main_Program0),`a`
assert_equal(str(a),'END PROGRAM')
a = Main_Program0(get_reader('''
contains
function foo()
end
end
'''))
assert isinstance(a, Main_Program0),`a`
assert_equal(str(a),'CONTAINS\nFUNCTION foo()\nEND FUNCTION\nEND PROGRAM')
def test_Module(): # R1104
cls = Module
a = cls(get_reader('''
module m
end
'''))
assert isinstance(a, cls),`a`
assert_equal(str(a),'MODULE m\nEND MODULE m')
a = cls(get_reader('''
module m
type a
end type
type b
end type b
end
'''))
assert isinstance(a, cls),`a`
assert_equal(str(a),'MODULE m\n TYPE :: a\n END TYPE a\n TYPE :: b\n END TYPE b\nEND MODULE m')
def test_Module_Subprogram_Part(): # R1107
cls = Module_Subprogram_Part
a = cls(get_reader('''
contains
subroutine foo(a)
real a
a = 1.0
end
'''))
assert isinstance(a, cls),`a`
assert_equal(str(a),'CONTAINS\nSUBROUTINE foo(a)\n REAL :: a\n a = 1.0\nEND SUBROUTINE foo')
def test_Use_Stmt(): # R1109
cls = Use_Stmt
a = cls('use a')
assert isinstance(a, cls),`a`
assert_equal(str(a),'USE :: a')
assert_equal(repr(a),"Use_Stmt(None, Name('a'), '', None)")
a = cls('use :: a, c=>d')
assert isinstance(a, cls),`a`
assert_equal(str(a),'USE :: a, c => d')
a = cls('use :: a, operator(.hey.)=>operator(.hoo.)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'USE :: a, OPERATOR(.HEY.) => OPERATOR(.HOO.)')
a = cls('use, intrinsic :: a, operator(.hey.)=>operator(.hoo.), c=>g')
assert isinstance(a, cls),`a`
assert_equal(str(a),'USE, INTRINSIC :: a, OPERATOR(.HEY.) => OPERATOR(.HOO.), c => g')
def test_Module_Nature(): # R1110
cls = Module_Nature
a = cls('intrinsic')
assert isinstance(a, cls),`a`
assert_equal(str(a),'INTRINSIC')
assert_equal(repr(a),"Module_Nature('INTRINSIC')")
a = cls('non_intrinsic')
assert isinstance(a, cls),`a`
assert_equal(str(a),'NON_INTRINSIC')
def test_Rename(): # R1111
cls = Rename
a = cls('a=>b')
assert isinstance(a, cls),`a`
assert_equal(str(a),'a => b')
a = cls('operator(.foo.)=>operator(.bar.)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'OPERATOR(.FOO.) => OPERATOR(.BAR.)')
def test_Block_Data(): # R1116
cls = Block_Data
a = cls(get_reader('''
block data a
real b
end block data
'''))
assert isinstance(a, cls),`a`
assert_equal(str(a),'BLOCK DATA a\n REAL :: b\nEND BLOCK DATA a')
###############################################################################
############################### SECTION 12 ####################################
###############################################################################
def test_Interface_Block(): # R1201
cls = Interface_Block
a = cls(get_reader('''\
interface
end interface'''))
assert isinstance(a, cls),`a`
assert_equal(str(a),'INTERFACE\nEND INTERFACE')
a = cls(get_reader('''\
abstract interface
procedure a
module procedure b,c
end interface
'''))
assert isinstance(a, cls),`a`
assert_equal(str(a),'ABSTRACT INTERFACE\n MODULE PROCEDURE a\n MODULE PROCEDURE b, c\nEND INTERFACE')
def test_Interface_Specification(): # R1202
cls = Interface_Specification
a = cls(get_reader('''
function foo()
end
'''))
assert isinstance(a, Function_Body),`a`
assert_equal(str(a),'FUNCTION foo()\nEND FUNCTION')
def test_Interface_Stmt(): # R1203
cls = Interface_Stmt
a = cls('interface')
assert isinstance(a, cls),`a`
assert_equal(str(a),'INTERFACE')
a = cls('interface assignment(=)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'INTERFACE ASSIGNMENT(=)')
a = cls('abstract interface')
assert isinstance(a, cls),`a`
assert_equal(str(a),'ABSTRACT INTERFACE')
def test_End_Interface_Stmt(): # R1204
cls = End_Interface_Stmt
a = cls('end interface')
assert isinstance(a, cls),`a`
assert_equal(str(a),'END INTERFACE')
a = cls('end interface read(formatted)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'END INTERFACE READ(FORMATTED)')
def test_Interface_Body(): # R1205
cls = Interface_Body
a = cls(get_reader('''\
subroutine foo
end subroutine foo
'''))
assert isinstance(a, Subroutine_Body),`a`
assert_equal(str(a),'SUBROUTINE foo\nEND SUBROUTINE foo')
a = cls(get_reader('''\
function foo(a) result(c)
real a, c
end
'''))
assert isinstance(a, Function_Body),`a`
assert_equal(str(a),'FUNCTION foo(a) RESULT(c)\n REAL :: a, c\nEND FUNCTION')
def test_Subroutine_Body():
pass
def test_Function_Body():
pass
def test_Procedure_Stmt(): # R1206
cls = Procedure_Stmt
a = cls('module procedure a')
assert isinstance(a, cls),`a`
assert_equal(str(a),'MODULE PROCEDURE a')
a = cls('procedure a, b')
assert isinstance(a, cls),`a`
assert_equal(str(a),'MODULE PROCEDURE a, b')
def test_Generic_Spec(): # R1207
cls = Generic_Spec
a = cls('a')
assert isinstance(a, Name),`a`
assert_equal(str(a), 'a')
a = cls('read(formatted)')
assert isinstance(a, Dtio_Generic_Spec),`a`
assert_equal(str(a), 'READ(FORMATTED)')
a = cls('assignment ( = )')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'ASSIGNMENT(=)')
return # TODO
a = cls('operator(.foo.)')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'OPERATOR(.foo.)')
def test_Dtio_Generic_Spec(): # R1208
cls = Dtio_Generic_Spec
a = cls('read ( formatted )')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'READ(FORMATTED)')
a = cls('write ( formatted )')
assert_equal(str(a), 'WRITE(FORMATTED)')
a = cls('read ( unformatted )')
assert_equal(str(a), 'READ(UNFORMATTED)')
a = cls('write ( unformatted )')
assert_equal(str(a), 'WRITE(UNFORMATTED)')
def test_Import_Stmt(): # R1209
cls = Import_Stmt
a = cls('import :: a, b')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'IMPORT :: a, b')
a = cls('import a')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'IMPORT :: a')
def test_External_Stmt(): # R1210
cls = External_Stmt
a = cls('external :: a, b')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'EXTERNAL :: a, b')
a = cls('external a')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'EXTERNAL :: a')
def test_Procedure_Declaration_Stmt(): # R1211
cls = Procedure_Declaration_Stmt
a = cls('procedure () a')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'PROCEDURE() a')
a = cls('procedure (n) a')
assert_equal(str(a), 'PROCEDURE(n) a')
a = cls('procedure (real*8) a')
assert_equal(str(a), 'PROCEDURE(REAL*8) a')
a = cls('procedure (real(kind=8)) a')
assert_equal(str(a), 'PROCEDURE(REAL(KIND = 8)) a')
a = cls('procedure (real*8) :: a')
assert_equal(str(a), 'PROCEDURE(REAL*8) a')
a = cls('procedure (real*8), intent(in), bind(c) :: a, b')
assert_equal(str(a), 'PROCEDURE(REAL*8), INTENT(IN), BIND(C) :: a, b')
def test_Proc_Attr_Spec(): # R1213
cls = Proc_Attr_Spec
a = cls('intent(in)')
assert isinstance(a, cls)
assert_equal(str(a),'INTENT(IN)')
a = cls('optional')
assert isinstance(a, cls)
assert_equal(str(a),'OPTIONAL')
a = cls('save')
assert isinstance(a, cls)
assert_equal(str(a),'SAVE')
a = cls('private')
assert isinstance(a, Access_Spec),`type(a)`
assert_equal(str(a),'PRIVATE')
a = cls('bind(c)')
assert isinstance(a, Language_Binding_Spec),`a`
assert_equal(str(a),'BIND(C)')
def test_Proc_Decl(): # R1214
cls = Proc_Decl
a = cls('a => NULL')
assert isinstance(a, cls)
assert_equal(str(a),'a => NULL')
a = cls('a')
assert isinstance(a, Name),`type(a)`
assert_equal(str(a),'a')
def test_Intrinsic_Stmt(): # R1216
cls = Intrinsic_Stmt
a = cls('intrinsic :: a, b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'INTRINSIC :: a, b')
a = cls('intrinsic a, b')
assert_equal(str(a),'INTRINSIC :: a, b')
a = cls('intrinsic a')
assert_equal(str(a),'INTRINSIC :: a')
def test_Function_Reference(): # R1217
cls = Function_Reference
a = cls('f()')
assert isinstance(a,cls),`a`
assert_equal(str(a),'f()')
assert_equal(repr(a),"Function_Reference(Name('f'), None)")
a = cls('f(2,k=1,a)')
assert isinstance(a,cls),`a`
assert_equal(str(a),'f(2, k = 1, a)')
def test_Call_Stmt(): # R1218
cls = Call_Stmt
a = cls('call a')
assert isinstance(a, cls)
assert_equal(str(a), 'CALL a')
a = cls('call a()')
assert_equal(str(a), 'CALL a')
a = cls('call a(b,c)')
assert_equal(str(a), 'CALL a(b, c)')
def test_Procedure_Designator(): # R1219
cls = Procedure_Designator
a = cls('a%b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a % b')
assert_equal(repr(a),"Procedure_Designator(Name('a'), '%', Name('b'))")
def test_Actual_Arg_Spec(): # R1220
cls = Actual_Arg_Spec
a = cls('k=a')
assert isinstance(a,cls),`a`
assert_equal(str(a),'k = a')
assert_equal(repr(a),"Actual_Arg_Spec(Name('k'), Name('a'))")
a = cls('a')
assert isinstance(a,Name),`a`
assert_equal(str(a),'a')
def test_Actual_Arg_Spec_List():
cls = Actual_Arg_Spec_List
a = cls('a,b')
assert isinstance(a,cls),`a`
assert_equal(str(a),'a, b')
assert_equal(repr(a),"Actual_Arg_Spec_List(',', (Name('a'), Name('b')))")
a = cls('a = k')
assert isinstance(a,Actual_Arg_Spec),`a`
assert_equal(str(a),'a = k')
a = cls('a = k,b')
assert isinstance(a,Actual_Arg_Spec_List),`a`
assert_equal(str(a),'a = k, b')
a = cls('a')
assert isinstance(a,Name),`a`
assert_equal(str(a),'a')
def test_Alt_Return_Spec(): # R1222
cls = Alt_Return_Spec
a = cls('* 123')
assert isinstance(a,cls),`a`
assert_equal(str(a),'*123')
assert_equal(repr(a),"Alt_Return_Spec(Label('123'))")
def test_Function_Subprogram(): # R1223
reader = get_reader('''\
function foo()
end function foo''')
cls = Function_Subprogram
a = cls(reader)
assert isinstance(a, cls),`a`
assert_equal(str(a),'FUNCTION foo()\nEND FUNCTION foo')
assert_equal(repr(a),"Function_Subprogram(Function_Stmt(None, Name('foo'), None, None), End_Function_Stmt('FUNCTION', Name('foo')))")
reader = get_reader('''\
pure real function foo(a) result(b) bind(c)
integer a
end function foo''')
cls = Function_Subprogram
a = cls(reader)
assert isinstance(a, cls),`a`
assert_equal(str(a),'PURE REAL FUNCTION foo(a) RESULT(b) BIND(C)\n INTEGER :: a\nEND FUNCTION foo')
def test_Function_Stmt(): # R1224
cls = Function_Stmt
a = cls('function foo()')
assert isinstance(a, cls),`a`
assert_equal(str(a),'FUNCTION foo()')
assert_equal(repr(a),"Function_Stmt(None, Name('foo'), None, None)")
a = cls('function foo(a,b)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'FUNCTION foo(a, b)')
assert_equal(repr(a),"Function_Stmt(None, Name('foo'), Dummy_Arg_List(',', (Name('a'), Name('b'))), None)")
a = cls('function foo(a)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'FUNCTION foo(a)')
a = cls('real function foo(a)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'REAL FUNCTION foo(a)')
a = cls('real recursive function foo(a)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'REAL RECURSIVE FUNCTION foo(a)')
a = cls('real function foo(a) bind(c)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'REAL FUNCTION foo(a) BIND(C)')
a = cls('real function foo(a) result (b)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'REAL FUNCTION foo(a) RESULT(b)')
a = cls('real function foo(a) bind(c) result(b)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'REAL FUNCTION foo(a) RESULT(b) BIND(C)')
def test_Dummy_Arg_Name(): # R1226
cls = Dummy_Arg_Name
a = cls('a')
assert isinstance(a, Name),`a`
assert_equal(str(a),'a')
def test_Prefix(): # R1227
cls = Prefix
a = cls('pure recursive')
assert isinstance(a, cls),`a`
assert_equal(str(a),'PURE RECURSIVE')
assert_equal(repr(a), "Prefix(' ', (Prefix_Spec('PURE'), Prefix_Spec('RECURSIVE')))")
a = cls('integer * 2 pure')
assert isinstance(a, cls),`a`
assert_equal(str(a),'INTEGER*2 PURE')
def test_Prefix_Spec(): # R1228
cls = Prefix_Spec
a = cls('pure')
assert isinstance(a, cls),`a`
assert_equal(str(a),'PURE')
assert_equal(repr(a),"Prefix_Spec('PURE')")
a = cls('elemental')
assert isinstance(a, cls),`a`
assert_equal(str(a),'ELEMENTAL')
a = cls('recursive')
assert isinstance(a, cls),`a`
assert_equal(str(a),'RECURSIVE')
a = cls('integer * 2')
assert isinstance(a, Intrinsic_Type_Spec),`a`
assert_equal(str(a),'INTEGER*2')
def test_Suffix(): # R1229
cls = Suffix
a = cls('bind(c)')
assert isinstance(a, Language_Binding_Spec),`a`
assert_equal(str(a),'BIND(C)')
assert_equal(repr(a),"Language_Binding_Spec(None)")
a = cls('result(a)')
assert isinstance(a, Suffix),`a`
assert_equal(str(a),'RESULT(a)')
a = cls('bind(c) result(a)')
assert isinstance(a, Suffix),`a`
assert_equal(str(a),'RESULT(a) BIND(C)')
a = cls('result(a) bind(c)')
assert isinstance(a, Suffix),`a`
assert_equal(str(a),'RESULT(a) BIND(C)')
def test_End_Function_Stmt(): # R1230
cls = End_Function_Stmt
a = cls('end')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'END FUNCTION')
a = cls('endfunction')
assert_equal(str(a), 'END FUNCTION')
a = cls('endfunction foo')
assert_equal(str(a), 'END FUNCTION foo')
def test_Subroutine_Subprogram(): # R1231
reader = get_reader('''\
subroutine foo
end subroutine foo''')
cls = Subroutine_Subprogram
a = cls(reader)
assert isinstance(a, cls),`a`
assert_equal(str(a),'SUBROUTINE foo\nEND SUBROUTINE foo')
assert_equal(repr(a),"Subroutine_Subprogram(Subroutine_Stmt(None, Name('foo'), None, None), End_Subroutine_Stmt('SUBROUTINE', Name('foo')))")
reader = get_reader('''\
subroutine foo
integer a
end subroutine foo''')
cls = Subroutine_Subprogram
a = cls(reader)
assert isinstance(a, cls),`a`
assert_equal(str(a),'SUBROUTINE foo\n INTEGER :: a\nEND SUBROUTINE foo')
def test_Subroutine_Stmt(): # R1232
cls = Subroutine_Stmt
a = cls('subroutine foo')
assert isinstance(a, cls),`a`
assert_equal(str(a),'SUBROUTINE foo')
assert_equal(repr(a),"Subroutine_Stmt(None, Name('foo'), None, None)")
a = cls('pure subroutine foo')
assert isinstance(a, cls),`a`
assert_equal(str(a),'PURE SUBROUTINE foo')
a = cls('pure subroutine foo(a,b)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'PURE SUBROUTINE foo(a, b)')
a = cls('subroutine foo() bind(c)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'SUBROUTINE foo BIND(C)')
a = cls('subroutine foo(a)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'SUBROUTINE foo(a)')
a = cls('subroutine foo(a, b)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'SUBROUTINE foo(a, b)')
a = cls('subroutine foo(a,*)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'SUBROUTINE foo(a, *)')
a = cls('subroutine foo(*)')
assert isinstance(a, cls),`a`
assert_equal(str(a),'SUBROUTINE foo(*)')
def test_Dummy_Arg(): # R1233
cls = Dummy_Arg
a = cls('a')
assert isinstance(a, Name),`a`
assert_equal(str(a),'a')
a = cls('*')
assert isinstance(a, cls),`a`
assert_equal(str(a),'*')
def test_End_Subroutine_Stmt(): # R1234
cls = End_Subroutine_Stmt
a = cls('end subroutine foo')
assert isinstance(a, cls),`a`
assert_equal(str(a),'END SUBROUTINE foo')
assert_equal(repr(a),"End_Subroutine_Stmt('SUBROUTINE', Name('foo'))")
a = cls('end')
assert isinstance(a, cls),`a`
assert_equal(str(a),'END SUBROUTINE')
a = cls('endsubroutine')
assert isinstance(a, cls),`a`
assert_equal(str(a),'END SUBROUTINE')
def test_Entry_Stmt(): # R1235
cls = Entry_Stmt
a = cls('entry a')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'ENTRY a()')
a = cls('entry a()')
assert_equal(str(a), 'ENTRY a()')
a = cls('entry a(b, c)')
assert_equal(str(a), 'ENTRY a(b, c)')
a = cls('entry a(b, c) bind(c)')
assert_equal(str(a), 'ENTRY a(b, c) BIND(C)')
def test_Return_Stmt(): # R1236
cls = Return_Stmt
a = cls('return')
assert isinstance(a, cls),`a`
assert_equal(str(a), 'RETURN')
assert_equal(repr(a), 'Return_Stmt(None)')
def test_Contains(): # R1237
cls = Contains_Stmt
a = cls('Contains')
assert isinstance(a, cls),`a`
assert_equal(str(a),'CONTAINS')
assert_equal(repr(a),"Contains_Stmt('CONTAINS')")
if 1:
nof_needed_tests = 0
nof_needed_match = 0
total_needs = 0
total_classes = 0
for name in dir():
obj = eval(name)
if not isinstance(obj, ClassType): continue
if not issubclass(obj, Base): continue
clsname = obj.__name__
if clsname.endswith('Base'): continue
total_classes += 1
subclass_names = obj.__dict__.get('subclass_names',None)
use_names = obj.__dict__.get('use_names',None)
if not use_names: continue
match = obj.__dict__.get('match',None)
try:
test_cls = eval('test_%s' % (clsname))
except NameError:
test_cls = None
total_needs += 1
if match is None:
if test_cls is None:
print 'Needs tests:', clsname
print 'Needs match implementation:', clsname
nof_needed_tests += 1
nof_needed_match += 1
else:
print 'Needs match implementation:', clsname
nof_needed_match += 1
else:
if test_cls is None:
print 'Needs tests:', clsname
nof_needed_tests += 1
continue
print '-----'
print 'Nof match implementation needs:',nof_needed_match,'out of',total_needs
print 'Nof tests needs:',nof_needed_tests,'out of',total_needs
print 'Total number of classes:',total_classes
print '-----'
|
import importlib
def DetourBackend(filename, data_fallback=None, base_address=None, try_pdf_removal=True, replace_note_segment=False, try_without_cfg=False, variant=None):
with open(filename, "rb") as f:
start_bytes = f.read(0x14)
if start_bytes.startswith(b"\x7fCGC"):
detourbackendclass = getattr(importlib.import_module("patcherex.backends.detourbackends.cgc"), "DetourBackendCgc")
return detourbackendclass(filename, data_fallback=data_fallback, try_pdf_removal=try_pdf_removal)
elif start_bytes.startswith(b"\x7fELF"):
# more details can be found at glibc/elf/elf.h
if start_bytes.startswith(b"\x03\x00", 0x12): # EM_386
detourbackendclass = getattr(importlib.import_module("patcherex.backends.detourbackends.i386"), "DetourBackendi386")
elif start_bytes.startswith(b"\x3e\x00", 0x12): # EM_X86_64
detourbackendclass = getattr(importlib.import_module("patcherex.backends.detourbackends.i386"), "DetourBackendi386")
elif start_bytes.startswith(b"\xb7\x00", 0x12): # EM_AARCH64
detourbackendclass = getattr(importlib.import_module("patcherex.backends.detourbackends.aarch64"), "DetourBackendAarch64")
elif start_bytes.startswith(b"\x28\x00", 0x12): # EM_ARM
if variant == "stm32":
detourbackendclass = getattr(importlib.import_module("patcherex.backends.detourbackends.arm_stm32"), "DetourBackendArmStm32")
else:
detourbackendclass = getattr(importlib.import_module("patcherex.backends.detourbackends.arm"), "DetourBackendArm")
elif start_bytes.startswith(b"\x08\x00", 0x12) or \
start_bytes.startswith(b"\x00\x08", 0x12): # EM_MIPS
detourbackendclass = getattr(importlib.import_module("patcherex.backends.detourbackends.mips"), "DetourBackendMips")
elif start_bytes.startswith(b"\x14\x00", 0x12) or \
start_bytes.startswith(b"\x00\x14", 0x12) or \
start_bytes.startswith(b"\x15\x00", 0x12) or \
start_bytes.startswith(b"\x00\x15", 0x12): # EM_PPC / EM_PPC64
detourbackendclass = getattr(importlib.import_module("patcherex.backends.detourbackends.ppc"), "DetourBackendPpc")
elif start_bytes.startswith(b"\x53\x00", 0x12):
detourbackendclass = getattr(importlib.import_module("patcherex.backends.detourbackends.avr"), "DetourBackendAVR")
else:
raise Exception("Unsupported architecture.")
return detourbackendclass(filename, base_address=base_address, replace_note_segment=replace_note_segment, try_without_cfg=try_without_cfg)
else:
raise Exception("Unsupported file type.")
def init_backend(program_name, options):
return DetourBackend(program_name, **options)
|
# Python program to generate WordCloud
# importing all necessery modules
from wordcloud import WordCloud, STOPWORDS
import matplotlib.pyplot as plt
import pandas as pd
# Reads 'Youtube04-Eminem.csv' file
df = pd.read_csv("wc_test.csv", encoding ="utf-8", delimiter=' ')
comment_words = ''
stopwords = set(STOPWORDS)
# iterate through the csv file
for val in df.CONTENT:
# typecaste each val to string
val = str(val)
# split the value
tokens = val.split()
# Converts each token into lowercase
for i in range(len(tokens)):
tokens[i] = tokens[i].lower()
comment_words += " ".join(tokens)+" "
wordcloud = WordCloud(width = 800, height = 800,
background_color ='white',
stopwords = stopwords,
min_font_size = 10).generate(comment_words)
# plot the WordCloud image
plt.figure(figsize = (8, 8), facecolor = None)
plt.imshow(wordcloud)
plt.axis("off")
plt.tight_layout(pad = 0)
plt.show()
|
import torch
from jet20.backend.const import LINEAR,QUADRATIC
class LeConstraitConflict(Exception):
pass
class EqConstraitConflict(Exception):
pass
class Constraints(object):
def __call__(self,x):
raise NotImplementedError("")
def validate(self,x,*args,**kwargs):
raise NotImplementedError("")
def type(self):
raise NotImplementedError("")
def size(self):
raise NotImplementedError("")
def float(self):
raise NotImplementedError("")
def double(self):
raise NotImplementedError("")
def to(self,device):
raise NotImplementedError("")
class LinearConstraints(Constraints):
def __init__(self,A,b):
super(LinearConstraints,self).__init__()
self.A = A
self.b = b
def __call__(self,x):
return self.A @ x - self.b
def validate(self,x,*args,**kwargs):
raise NotImplementedError("")
def type(self):
return LINEAR
def size(self):
return self.A.size(0)
def float(self):
A = self.A.float()
b = self.b.float()
return self.__class__(A,b)
def double(self):
A = self.A.double()
b = self.b.double()
return self.__class__(A,b)
def to(self,device):
A = self.A.to(device)
b = self.b.to(device)
return self.__class__(A,b)
class LinearEqConstraints(LinearConstraints):
def __init__(self,A,b):
super(LinearEqConstraints,self).__init__(A,b)
def validate(self,x,tolerance=1e-8):
x = torch.abs(self(x))
neq = x > tolerance
return neq.float().sum() == 0
class LinearLeConstraints(LinearConstraints):
def __init__(self,A,b):
super(LinearLeConstraints,self).__init__(A,b)
def validate(self,x):
nle = self(x) > 0
return nle.float().sum() == 0
|
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 19 16:44:37 2017
@author: Usman
"""
import random,time,numpy as np,threading
from matplotlib import pyplot as plt
def multiply(rows,columns,matrix,matrix2,matrix3):
for i in range(0,int(rows),1):
for j in range(0,int(columns),1):
value=0
for k in range(0,int(columns),1):
value+= matrix[i][k]*matrix2[k][j]
matrix3[i][j]=value
#print ("Sequential: ",matrix3)
def multiparalell(min_row_matA,max_row_matA,min_col_matB,max_col_matB,columns,lock,i,matrix,matrix2,matrix3):
lock.acquire() #Acquiring Lock
try:
#print ("Before Matrix: ",matrix3)
for i in range(min_row_matA,max_row_matA,1):
for j in range(0,columns,1):
value=0
for k in range(0,columns,1):
value+= matrix[i][k]*matrix2[k][j]
matrix3[i][j]=value
#print ("Paralell Matrix: ",matrix3)
finally:
lock.release()
def main():
rows=int(input("Input the dimensions for NxN matrix:"))
nthreads=4#input("Input the number of threads: ")
columns=rows
min_row_matA=0 #Variables used to divide matrix in the chunks
max_row_matA=0
min_col_matB=0
max_col_matB=0
threads=[]
step=int(rows)/int(nthreads) #deciding how far each thread should process the first matrix
lock=threading.Lock() #making a new lock object
final_chunk=int(step)*int(nthreads)
matrix = [[1 for i in range(int(rows))] for i in range(int(columns))] #declaring the matrices
matrix2 = [[1 for i in range(int(rows))] for i in range(int(columns))]
matrix3 = [[0 for i in range(int(rows))] for i in range(int(columns))]
#print (matrix)
#print (matrix2)
for i in range(0,int(rows),int(step)):
#print("Step: ",int(step))
if(i+int(step)<=rows and max_row_matA!=rows): #If number of threads are even
#print(max_row_matA)
min_row_matA=i #For VectorA - dividing it into parts
max_row_matA=i+int(step)
min_col_matB=i
max_col_matB=i+int(step)
#print("First IF Called")
if(rows%int(nthreads)!=0 and i+int(step)==final_chunk): #If final chunk has been reached and still one some rows remain
# print("Second IF Called") #Extend the number of rows for the last thread.
max_row_matA=max_row_matA+(rows-final_chunk)
max_col_matB=max_col_matB+(rows-final_chunk)
time.sleep(0.5)
start = time.clock()
#print("Thread: ",i,"(",min_row_matA,",",max_row_matA,")")
#print("Thread: ",i,"(",min_col_matB,",",max_col_matB,")")
t=threading.Thread(target=multiparalell,args=(int(min_row_matA),int(max_row_matA),int(min_col_matB),int(max_col_matB),columns,lock,i,matrix,matrix2,matrix3))
t.start()
threads.append(t)
for x in range(len(threads)):
t.join()
end = time.clock()
#print("Paralell Matrix: ",matrix3)
#print ("Processing Time for Paralell Addition: ",round(end - start,4))
startSeq = time.clock()
multiply(rows,columns,matrix,matrix2,matrix3)
endSeq = time.clock()
print("Process Time for Sequential multiplication: ",round(endSeq-startSeq,4)) #Printing Parallell Time
print("Sequential Time - Paralell Time :",round((endSeq-startSeq)-(end-start),4)) #Printing Sequential Time
if((endSeq-startSeq)>(end-start)):
print("Paralell Mechanism was",round((((endSeq-startSeq))-((end-start)))/(end-start),4),"% Faster")
if((endSeq-startSeq)<(end-start)):
print("Sequential Mechanism was",round((((end-start))-((endSeq-startSeq)))/(endSeq-startSeq),4),"% Faster")
if((endSeq-startSeq)==(end-start)):
print("Sequential and Paralell were same")
x_axis=["Seq Mech Time","Par Mech Time"]
y_axis=[round((endSeq-startSeq),4),round((end-start),4)]
ind=np.arange(len(x_axis))
print("Graph shows the times for Paralell and Sequential Mechanisms")
plt.bar(ind,y_axis)
plt.xticks(ind,x_axis)
if __name__ == "__main__":
main()
|
import os
import re
import smtplib
import ssl
from contextlib import contextmanager
from datetime import datetime
from email.headerregistry import Address
from email.message import EmailMessage
from email.utils import format_datetime, make_msgid
from typing import Any, Dict, Generator, List, Optional, Tuple, Union
from lxml import html as lxml_html # type: ignore
from lxml.html.clean import clean_html # type: ignore
from ...shared.env import is_truthy
from ...shared.exceptions import ActionException
SendErrors = Dict[str, Tuple[int, bytes]]
class ConnectionSecurity:
"""constants for email connection ssl/tls"""
NONE = "NONE"
SSLTLS = "SSL/TLS"
STARTTLS = "STARTTLS"
@classmethod
def list(cls) -> List[str]:
return [
value
for attr in dir(cls)
if not callable(value := getattr(cls, attr)) and not attr.startswith("_")
]
class EmailSettings:
host: str = os.environ.get("EMAIL_HOST", "localhost")
port: int = int(os.environ.get("EMAIL_PORT", "465"))
user: str = os.environ.get("EMAIL_HOST_USER", "")
password: str = os.environ.get("EMAIL_HOST_PASSWORD", "")
connection_security: str = os.environ.get("EMAIL_CONNECTION_SECURITY", "SSL/TLS")
timeout: int = int(os.environ.get("EMAIL_TIMEOUT", "5"))
accept_self_signed_certificate: bool = is_truthy(
os.environ.get("EMAIL_ACCEPT_SELF_SIGNED_CERTIFICATE", "false")
)
default_from_email = os.environ.get("DEFAULT_FROM_EMAIL", "noreply@example.com")
@classmethod
def check_settings(cls) -> None:
if cls.connection_security not in ConnectionSecurity.list():
raise ActionException(
'Email-configuration: Choose one of "NONE", "STARTTLS" or "SSL/TLS" for EMAIL_CONNECTION_SECURITY environment variable'
)
EmailSettings.check_settings()
class EmailMixin:
@staticmethod
def check_email(email: str) -> bool:
"""returns True with valid email, else False"""
email_regex = r"[A-Z0-9._+\-ÄÖÜ]+@[A-Z0-9.\-ÄÖÜ]+\.[A-ZÄÖÜ]{2,}"
return bool(re.fullmatch(email_regex, email, flags=(re.IGNORECASE)))
@staticmethod
def get_ssl_default_context() -> ssl.SSLContext:
if EmailSettings.accept_self_signed_certificate:
return ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
else:
return ssl.create_default_context()
@staticmethod
@contextmanager
def get_mail_connection() -> Generator[
Union[smtplib.SMTP, smtplib.SMTP_SSL], None, None
]:
connection: Optional[Union[smtplib.SMTP, smtplib.SMTP_SSL]] = None
try:
if EmailSettings.connection_security == ConnectionSecurity.SSLTLS:
connection = smtplib.SMTP_SSL(
EmailSettings.host,
EmailSettings.port,
context=EmailMixin.get_ssl_default_context(),
timeout=EmailSettings.timeout,
)
elif EmailSettings.connection_security == ConnectionSecurity.STARTTLS:
connection = smtplib.SMTP( # type: ignore
EmailSettings.host,
EmailSettings.port,
timeout=EmailSettings.timeout,
)
connection.starttls(context=EmailMixin.get_ssl_default_context())
else:
connection = smtplib.SMTP( # type: ignore
EmailSettings.host,
EmailSettings.port,
timeout=EmailSettings.timeout,
)
if EmailSettings.user and EmailSettings.password:
connection.login(EmailSettings.user, EmailSettings.password)
yield connection
finally:
if connection:
connection.close()
@staticmethod
def send_email(
client: Union[smtplib.SMTP, smtplib.SMTP_SSL],
from_: Union[str, Address],
to: Union[str, List[str]],
subject: str,
content: str,
contentplain: str = "",
reply_to: str = "",
html: bool = True,
) -> SendErrors:
"""
Construct and send the email on the given connect
Default is a html-email with 'html' set to True.
Then 'content' is expected to be the html-body of the mail
and 'contentplain' the alternative plaintext.
If 'contentplain' is an empty string, the method will build a
plaintext-content from 'content'. If 'contentplain' is
a None, the client will send a html-only mail.
With 'html' set to False, 'content' will be used as plaintext.
Return value: The method only returns, if the mail was sent to
minimum 1 recipient. Otherwise the method throws an exception.
The return value is a dict of errors, where the key is a
recipients email address and the value a tuple of SMTP-error-code
and error message.
You only have to check the methods return code, if there
could be more than one recipient in the to-address of an email,
see smtplib's method sendmail.
"""
message = EmailMessage()
if html:
if contentplain == "":
tree = lxml_html.fromstring(content)
contentplain = clean_html(tree).text_content().strip()
if contentplain:
message.set_content(contentplain)
message.add_alternative(content, subtype="html")
else:
message.set_content(content) if content else message.set_content(
contentplain
)
message["From"] = from_
message["To"] = to
message.preamble = "You will not see this in a MIME-aware mail reader.\n"
message.add_header("Subject", subject)
message.add_header("Date", format_datetime(datetime.now()))
message.add_header("Message-ID", make_msgid(domain=EmailSettings.host))
if reply_to:
message.add_header("Reply-To", reply_to)
return client.send_message(message)
@staticmethod
def send_email_safe(
client: Union[smtplib.SMTP, smtplib.SMTP_SSL],
logger: Any,
from_: Union[str, Address],
to: Union[str, List[str]],
subject: str,
content: str,
contentplain: str = "",
reply_to: str = "",
html: bool = True,
) -> Tuple[bool, SendErrors]:
try:
return True, EmailMixin.send_email(
client, from_, to, subject, content, contentplain, reply_to, html
)
except smtplib.SMTPRecipientsRefused as e:
logger.error(f"SMTPRecipientsRefused: {str(e)}")
return False, {}
except smtplib.SMTPServerDisconnected as e:
logger.error(f"SMTPServerDisconnected: {str(e)}")
return False, {}
except smtplib.SMTPDataError as e:
logger.error(f"SMTPDataError: {str(e)}")
return False, {}
return True, {}
|
from __future__ import annotations
import datetime
import inspect
from typing import Any, ClassVar, Optional
from basic_notion.base import NotionItemBaseMetaclass, NotionItemBase
from basic_notion.schema import Schema
from basic_notion.property_schema import PropertySchema, TextListSchema
from basic_notion.attr import ItemAttrDescriptor
from basic_notion.property import PropertyList, TextProperty
from basic_notion.field import NotionField
from basic_notion.utils import deserialize_date
def _make_schema_for_block_cls(page_cls: type) -> Schema:
return Schema({
name: prop
for name, prop in inspect.getmembers(page_cls)
if isinstance(prop, PropertySchema)
})
class NotionBlockMetaclass(NotionItemBaseMetaclass):
"""Metaclass for NotionBlock that adds `schema` to its attributes"""
def __new__(cls, name: str, bases: tuple[type, ...], dct: dict):
new_cls = super().__new__(cls, name, bases, dct)
new_cls.__notion_schema__ = _make_schema_for_block_cls(new_cls) # type: ignore
return new_cls
class NotionBlock(NotionItemBase, metaclass=NotionBlockMetaclass):
__notion_schema__: Schema = None # type: ignore # defined in metaclass
OBJECT_TYPE_KEY_STR = 'object'
OBJECT_TYPE_STR = 'block'
BLOCK_TYPE_STR: ClassVar[str]
id: ItemAttrDescriptor[str] = ItemAttrDescriptor()
type: ItemAttrDescriptor[str] = ItemAttrDescriptor(editable=False)
archived: ItemAttrDescriptor[bool] = ItemAttrDescriptor(editable=True)
created_time: ItemAttrDescriptor[Optional[datetime.datetime]] = ItemAttrDescriptor(
derived=True, get_converter=deserialize_date)
created_time_str: ItemAttrDescriptor[str] = ItemAttrDescriptor(derived=True)
last_edited_time: ItemAttrDescriptor[Optional[datetime.datetime]] = ItemAttrDescriptor(
derived=True, get_converter=deserialize_date)
last_edited_time_str: ItemAttrDescriptor[str] = ItemAttrDescriptor(derived=True)
@property
def _custom_data(self) -> dict:
return self.data[self.BLOCK_TYPE_STR]
@classmethod
@property
def schema(cls) -> Schema:
return cls.__notion_schema__
@classmethod
def _make_inst_prop_dict(cls, kwargs: dict[str, Any]) -> dict:
data = {}
for name, prop_sch in cls.schema.items(): # type: ignore
if name not in kwargs:
continue
data[prop_sch.property_name] = prop_sch.make_prop_from_value(value=kwargs[name]).data
return data
@classmethod
def _make_inst_dict(cls, kwargs: dict[str, Any]) -> dict:
data = super()._make_inst_dict(kwargs)
data['type'] = cls.BLOCK_TYPE_STR
data[cls.BLOCK_TYPE_STR] = data.get(cls.BLOCK_TYPE_STR, {})
data[cls.BLOCK_TYPE_STR].update(cls._make_inst_prop_dict(kwargs))
return data
@classmethod
def make_as_children_data(cls, block_id: str, **kwargs) -> dict:
return {
'block_id': block_id,
'children': [cls.make(**kwargs).data],
}
class TextListField(NotionField[TextListSchema, PropertyList[TextProperty]]):
__slots__ = ()
PROP_SCHEMA_CLS = TextListSchema
PROP_CLS = TextProperty
IS_LIST = True
class _BlockWithText(NotionBlock):
BLOCK_TYPE_STR = 'paragraph'
text: TextListField = TextListField(root_key=(BLOCK_TYPE_STR,))
class ParagraphBlock(NotionBlock):
BLOCK_TYPE_STR = 'paragraph'
text: TextListField = TextListField(root_key=(BLOCK_TYPE_STR,))
class Heading1Block(NotionBlock):
BLOCK_TYPE_STR = 'heading_1'
text: TextListField = TextListField(root_key=(BLOCK_TYPE_STR,))
class Heading2Block(NotionBlock):
BLOCK_TYPE_STR = 'heading_2'
text: TextListField = TextListField(root_key=(BLOCK_TYPE_STR,))
class Heading3Block(NotionBlock):
BLOCK_TYPE_STR = 'heading_3'
text: TextListField = TextListField(root_key=(BLOCK_TYPE_STR,))
class CalloutBlock(NotionBlock):
BLOCK_TYPE_STR = 'callout'
text: TextListField = TextListField(root_key=(BLOCK_TYPE_STR,))
# TODO: icon
class BulletedListItemBlock(NotionBlock):
BLOCK_TYPE_STR = 'bulleted_list_item'
text: TextListField = TextListField(root_key=(BLOCK_TYPE_STR,))
class NumberedListItemBlock(NotionBlock):
BLOCK_TYPE_STR = 'numbered_list_item'
text: TextListField = TextListField(root_key=(BLOCK_TYPE_STR,))
class ToDoBlock(NotionBlock):
BLOCK_TYPE_STR = 'to_do'
text: TextListField = TextListField(root_key=(BLOCK_TYPE_STR,))
checked: ItemAttrDescriptor[str] = ItemAttrDescriptor(key=(BLOCK_TYPE_STR, 'checked'), editable=True)
class ToggleBlock(NotionBlock):
BLOCK_TYPE_STR = 'toggle'
text: TextListField = TextListField(root_key=(BLOCK_TYPE_STR,))
class CodeBlock(NotionBlock):
BLOCK_TYPE_STR = 'code'
text: TextListField = TextListField(root_key=(BLOCK_TYPE_STR,))
caption: TextListField = TextListField(root_key=(BLOCK_TYPE_STR,))
language: ItemAttrDescriptor[str] = ItemAttrDescriptor(key=(BLOCK_TYPE_STR, 'language'), editable=True)
class ChildPageBlock(NotionBlock):
BLOCK_TYPE_STR = 'child_page'
title: ItemAttrDescriptor[str] = ItemAttrDescriptor(key=(BLOCK_TYPE_STR, 'title'), editable=True)
class ChildDatabaseBlock(NotionBlock):
BLOCK_TYPE_STR = 'child_database'
title: ItemAttrDescriptor[str] = ItemAttrDescriptor(key=(BLOCK_TYPE_STR, 'title'), editable=True)
|
# Copyright (c) 2014 Dark Secret Software Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
import logging
import six
logger = logging.getLogger(__name__)
class NoOpGroup(object):
def match(self):
return True
def mismatch(self, reason):
return False
def check(self, value, reason):
return value
class Group(object):
def __init__(self, name):
self._name = name # Group name
self._match = 0
self._mismatch = 0
self._reasons = {}
def match(self):
self._match += 1
return True
def mismatch(self, reason):
count = self._reasons.get(reason, 0)
self._reasons[reason] = count + 1
self._mismatch += 1
return False
def check(self, value, reason):
if value:
return self.match()
return self.mismatch(reason)
@six.add_metaclass(abc.ABCMeta)
class BaseDebugger(object):
@abc.abstractmethod
def reset(self):
pass
@abc.abstractmethod
def get_group(self, name):
pass
@abc.abstractmethod
def bump_counter(self, name, inc=1):
pass
@abc.abstractmethod
def get_debug_level(self):
pass
class NoOpDebugger(BaseDebugger):
def __init__(self, *args, **kwargs):
self.noop_group = NoOpGroup()
def reset(self):
pass
def get_group(self, name):
return self.noop_group
def bump_counter(self, name, inc=1):
pass
def get_debug_level(self):
return 0
class DetailedDebugger(BaseDebugger):
def __init__(self, name, debug_level):
super(DetailedDebugger, self).__init__()
self._name = name
self._debug_level = debug_level
self.reset()
def reset(self):
# If it's not a match or a mismatch it was a fatal error.
self._groups = {}
self._counters = {}
def get_group(self, name):
group = self._groups.get(name, Group(name))
self._groups[name] = group
return group
def bump_counter(self, name, inc=1):
self._counters[name] = self._counters.get(name, 0) + inc
def get_debug_level(self):
return self._debug_level
class DebugManager(object):
def __init__(self):
self._debuggers = {}
def get_debugger(self, trigger_def):
name = "n/a"
level = 2 # Default these unknowns to full debug.
if trigger_def is not None:
name = trigger_def.name
level = trigger_def.debug_level
debugger = self._debuggers.get(name)
if not debugger:
if level > 0:
debugger = DetailedDebugger(name, level)
else:
debugger = NoOpDebugger()
self._debuggers[name] = debugger
return debugger
def dump_group(self, debugger, group_name):
group = debugger.get_group(group_name)
logger.info("%s Criteria: %d checks, %d passed" %
(group._name,
group._match + group._mismatch, group._match))
if debugger.get_debug_level() > 1:
for kv in group._reasons.items():
logger.info(" - %s = %d" % kv)
def dump_counters(self, debugger):
for kv in debugger._counters.items():
logger.info("Counter \"%s\" = %d" % kv)
def dump_debuggers(self):
for debugger in self._debuggers.values():
if debugger.get_debug_level() == 0:
continue
logger.info("---- Trigger Definition: %s ----" % debugger._name)
for name in debugger._groups.keys():
self.dump_group(debugger, name)
self.dump_counters(debugger)
debugger.reset()
logger.info("----------------------------")
|
from pathlib import Path
from datetime import datetime
from Players.BasePlayers import BaseExItPlayer
from keras.models import load_model as load_keras_model
from Games.GameLogic import GameResult
from tqdm import tqdm, trange
import os.path
import re
from copy import deepcopy
# ******************** GENERAL ********************
def create_path_folders_if_needed(*args):
""" Created folders for all arguments in args if they does not exist """
path = "./"
for i, arg in enumerate(args):
path += arg
if not Path(path).exists():
os.makedirs(path)
if i != len(args)-1:
path += "/"
return path
def write_ex_it_model_info(file, ex_it_algorithm):
file.write(" optimizer = " + type(ex_it_algorithm.apprentice.optimizer).__name__ + "\n")
file.write(" n_neurons = " + str(ex_it_algorithm.apprentice.n_neurons) + "\n")
file.write(" n_layers = " + str(ex_it_algorithm.apprentice.n_layers) + "\n")
file.write(" dropout_rate = " + str(ex_it_algorithm.apprentice.dropout_rate) + "\n")
def load_trained_models(game_class, raw_players, versions):
""" Load trained model into the players """
players = []
num_ex_it_players = len([1 for p in raw_players if isinstance(p, BaseExItPlayer)])
progress_bar = tqdm(range(len(versions) * num_ex_it_players))
progress_bar.set_description("load_trained_model")
for p in raw_players:
if isinstance(p, BaseExItPlayer):
for v in versions:
version = v+1
new_player = p.new()
new_player.__name__ = new_player.__name__ + "_" + str(version)
trained_model = load_model(
game_name=game_class.__name__,
ex_it_algorithm=new_player.ex_it_algorithm,
iteration=str(version)
)
new_player.ex_it_algorithm.apprentice.set_model(trained_model)
players.append(new_player)
progress_bar.update(1)
else:
players.append(p)
progress_bar.close()
return players
# ******************** ELO ********************
def create_elo_folders(game_class):
""" Create Elo folders and return the base path.
Also ensures that no overwriting is taking place. """
base_path = create_path_folders_if_needed("Elo", game_class.__name__)
if Path(base_path + "/tournament.pgn").exists():
raise Exception("'" + base_path + "/tournament.pgn' already exist. ")
return base_path
def create_elo_meta_file(base_path, game_class, raw_players, num_matches, num_versions, randomness, total_train_time):
with open(base_path + "/meta.txt", 'x') as file:
file.write("Datetime = " + str(datetime.now().strftime('%Y-%m-%d___%H:%M:%S')) + "\n")
file.write("Game = " + game_class.__name__ + "\n")
file.write("Randomness = " + str(randomness) + "\n")
file.write("Number of versions = " + str(num_versions) + "\n")
file.write("Number of matches = " + str(num_matches) + "\n")
file.write("\n")
file.write("Training time pr version:\n")
for i, (name, time) in enumerate(total_train_time):
file.write(str(name))
if i < len(total_train_time) - 1:
file.write(", ")
file.write("\n")
for i, (name, time) in enumerate(total_train_time):
file.write(str(time))
if i < len(total_train_time) - 1:
file.write(", ")
file.write("\n")
file.write("\n")
file.write("Players: \n")
for i, p in enumerate(raw_players):
file.write(" - " + p.__name__ + "\n")
def save_game_to_pgn(base_path, game_handler, p1, p2):
""" Convert game match into pgn format and writes it to file """
with open(base_path + "/tournament.pgn", 'a') as file:
file.write("[Game \"" + str(game_handler.game_class.__name__) + "\"]\n")
file.write("[White \"" + p1.__name__ + "\"]\n")
file.write("[Black \"" + p2.__name__ + "\"]\n")
file.write("[Result \"" + game_handler.result_text + "\"]\n")
file.write("\n")
file.write(game_handler.move_text)
file.write("\n")
file.write("\n")
file.write("\n")
def read_ratings(game_class):
with open("./Elo/" + game_class.__name__ + "/ratings.txt", 'r') as file:
tournament = {}
lines = []
for i, line in enumerate(file):
if i == 0:
continue
words = line.split()
if words[1].startswith("ExIt"):
version = re.findall(r'\d+', str(words[1]))[-1]
version = int(version)
else:
version = 1
lines.append((version, words))
# Make sure versions are added in order.
lines.sort()
for version, words in lines:
def add_info(player_name, info):
if player_name in tournament.keys():
for key, value in tournament[player_name].items():
tournament[player_name][key].append(info[key][0])
else:
tournament[player_name] = deepcopy(info)
info = {
"elo": [float(words[2])],
"uncertainty-": [float(words[3])],
"uncertainty+": [-float(words[4])],
#"games": [int(words[5])],
#"score": [float(words[6][:-1])],
#"oppo": [float(words[7])],
#"draws": [float(words[8][:-1])]
}
if words[1].startswith("ExIt"):
# This will remove the model number and one underline "_" from the name.
player_name = words[1][0:-len(str(version))-1]
add_info(player_name, info)
else:
player_name = words[1]
add_info(player_name, info)
# Convert to list and sort:
tournament = list([(max(value["elo"]), {key: value}) for key, value in tournament.items()])
# Custom sorting is required due to unreliable results from "sorted" and ".sort"...
t3 = []
for _ in range(len(tournament)):
ind_max = -1
val = float("-inf")
for i, (elo, dic) in enumerate(tournament):
if elo > val:
ind_max = i
val = elo
t3.append(tournament[ind_max])
tournament.pop(ind_max)
tournament = [dic for elo, dic in t3]
return tournament
def load_train_version_time(game_class, raw_players):
version_times = []
for p in raw_players:
vt = None
if isinstance(p, BaseExItPlayer):
base_path = "./Trained_models/" + game_class.__name__ + "/" + p.__name__
with open(base_path + "/meta.txt", 'r') as file:
lines = [line for line in file]
version_time = re.findall("\d+\.\d+", str(lines[3]))[0]
vt = version_time
version_times.append((p.__name__, vt))
return version_times
def load_elo_version_time(game_class):
path = "./Elo/" + game_class.__name__ + "/meta.txt"
version_times_dict = {}
with open(path, 'r') as file:
lines = [line for line in file]
players = lines[7].rstrip()
players = players.split(", ")
version_times = lines[8].rstrip()
version_times = version_times.split(", ")
for p, vt in zip(players, version_times):
version_times_dict[p] = 0 if vt == "None" else float(vt)
return version_times_dict
# ******************** TRAINING ********************
def save_model(model, base_path, version):
""" Store the model as a HDF5 file """
model.save(base_path + "/" + version + ".h5")
def load_model(game_name, ex_it_algorithm, iteration):
path = "./Trained_models/" + game_name + "/" + ex_it_algorithm.__name__ + "/" + str(iteration) + ".h5"
# Hard-coding the custom loss.
custom_objects = None
if ex_it_algorithm.apprentice.use_custom_loss:
from ExIt.Apprentice.Nn import custom_loss
custom_objects = {'custom_loss': custom_loss}
return load_keras_model(path, custom_objects=custom_objects)
def create_training_folders(game_class, p):
base_path = create_path_folders_if_needed("Trained_models", game_class.__name__, p.__name__)
if Path(base_path + "/meta.txt").exists():
raise Exception("'" + base_path + "/meta.txt' already exist. ")
return base_path
def create_training_meta_file(base_path, p: BaseExItPlayer, search_time, training_timer):
with open(base_path + "/meta.txt", 'x') as file:
file.write("Datetime = " + str(datetime.now().strftime('%Y-%m-%d___%H:%M:%S')) + "\n")
file.write("Training time = " + str(training_timer.time_limit) + "\n")
file.write("Number of versions = " + str(training_timer.num_versions) + "\n")
file.write("Training time pr version = "
+ str(training_timer.time_limit / training_timer.num_versions) + "\n")
file.write("Search_time = " + str(search_time) + "\n")
file.write("\n")
file.write("Policy = " + str(p.ex_it_algorithm.policy.value) + "\n")
file.write("State branch degree = " + str(p.ex_it_algorithm.state_branch_degree) + "\n")
file.write("Dataset type = " + type(p.ex_it_algorithm.memory).__name__ + "\n")
file.write("\n")
file.write(p.__name__ + "\n")
write_ex_it_model_info(file, p.ex_it_algorithm)
# ******************** COMPARISON 1v1 ********************
def get_comparison_base_path(folder):
return './Comparison1v1' + "/" + folder + "/"
def create_comparison_folders():
return create_path_folders_if_needed(
"Comparison1v1", str(datetime.now().strftime('%Y-%m-%d___%H-%M-%S'))
)
def create_comparison_meta_file(game_class, base_path, players, num_matches,
training_timer, search_time, versions=None):
with open(base_path + "/meta.txt", 'x') as file:
file.write("Game = " + str(game_class.__name__) + "\n")
file.write("Date time = " + str(datetime.now().strftime('%Y-%m-%d___%H:%M:%S')) + "\n")
file.write("Number of matches = " + str(num_matches) + "\n")
if (training_timer, search_time) != (None, None):
file.write("Training time = " + str(training_timer.time_limit) + "\n")
file.write("Number of versions = " + str(training_timer.num_versions) + "\n")
file.write("Time pr version = "
+ str(training_timer.time_limit / training_timer.num_versions) + "\n")
file.write("Search time = " + str(search_time) + "\n")
if versions is not None:
file.write("versions = " + str([v+1 for v in versions]) + "\n")
file.write("\n")
for i, p in enumerate(players):
file.write(p.__name__ + "\n")
if isinstance(p, BaseExItPlayer):
write_ex_it_model_info(file, p.ex_it_algorithm)
if i != len(players) - 1:
file.write("\n")
def create_comparison_files(base_path, players):
for p in players:
with open(base_path + "/" + p.__name__ + ".csv", 'x') as file:
file.write("iteration,win,loss,draw" + "\n")
def save_comparison_result(base_path, results: [GameResult], version, p):
""" Saves results to disk.
NB: This function assumes that results_list is ordered by player index """
with open(base_path + "/" + p.__name__ + ".csv", 'a') as file:
file.write(str(version) + "," + GameResult.get_string_results(results) + "\n")
|
import numpy as np
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.core.cache import cache
from django.views.decorators.cache import cache_page
import django
from svweb.models import LebOrigin
from svweb.plotting_utils import view_wave
from matplotlib.figure import Figure
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from sigvisa.plotting.event_heatmap import EventHeatmap
from sigvisa import Sigvisa
from sigvisa.database.dataset import *
import sigvisa.utils.geog as geog
from sigvisa.signals.io import fetch_waveform
from sigvisa.source.event import get_event
from sigvisa.graph.sigvisa_graph import predict_phases_sta
from sigvisa.models.ttime import tt_predict
import hashlib
def event_view(request, evid):
ss_only = request.GET.get('ss_only', 'false').lower().startswith('t')
ev = LebOrigin.objects.get(evid=evid)
s = Sigvisa()
cursor = s.dbconn.cursor()
sql_query = "select iarr.sta, iarr.arid, iarr.time, iarr.deltim, iarr.azimuth, iarr.delaz, iarr.slow, iarr.delslo, iarr.snr, iarr.iphase, iarr.amp, iarr.per from leb_arrival iarr, leb_assoc iass, leb_origin ior where iarr.snr > 0 and iarr.arid=iass.arid and iass.orid=ior.orid and ior.evid=%d order by iarr.snr desc" % (
int(evid),)
cursor.execute(sql_query)
rawdets = cursor.fetchall()
dets = []
for rd in rawdets:
try:
sta = rd[0]
try:
a = s.earthmodel.site_info(sta, 0)
site_ll = tuple(a[0:2])
except KeyError:
continue
site_type = 'ar' if s.earthmodel.site_info(sta, 0)[3] == 1 else 'ss'
if ss_only and site_type != 'ss':
continue
ev_ll = (ev.lon, ev.lat)
dist = geog.dist_km(site_ll, ev_ll)
azimuth = geog.azimuth(site_ll, ev_ll)
phase = rd[DET_PHASE_COL]
det = {
'sta': sta,
'arr': site_type,
'phase': phase,
'snr': rd[DET_SNR_COL],
'dist': dist,
'time': rd[DET_TIME_COL],
'amp': rd[DET_AMP_COL],
'det_azi': rd[DET_AZI_COL],
'true_azi': azimuth,
'slo': rd[DET_SLO_COL],
}
dets.append(det)
except:
raise
continue
return render_to_response('svweb/event.html', {
'ev': ev,
'dets': dets,
}, context_instance=RequestContext(request))
def event_context_img_view(request, evid):
ev = LebOrigin.objects.get(evid=evid)
s = Sigvisa()
hm = EventHeatmap(f=None, calc=False, center=(ev.lon, ev.lat), width=100)
hm.add_stations(s.sitenames)
hm.set_true_event(ev.lon, ev.lat)
fig = Figure(figsize=(6, 6), dpi=144)
fig.patch.set_facecolor('white')
axes = fig.add_subplot(1, 1, 1)
hm.plot(axes=axes, colorbar=False, offmap_station_arrows=False)
fig.subplots_adjust(bottom=0.05, top=1, left=0, right=0.9)
canvas = FigureCanvas(fig)
response = django.http.HttpResponse(content_type='image/png')
canvas.print_png(response)
return response
def event_wave_view(request, evid):
sta = str(request.GET.get('sta', None))
chan = str(request.GET.get('chan', None))
filter_str = str(request.GET.get('filter_str', None))
ev = LebOrigin.objects.get(evid=evid)
event = get_event(evid=ev.evid)
phases = predict_phases_sta(ev=event, sta=sta, phases="auto")
phase_atimes = dict([(phase, event.time + tt_predict(event=event, sta=sta, phase=phase)) for phase in phases])
stime = np.min(phase_atimes.values()) - 10
etime = np.max(phase_atimes.values()) + 200
wave = fetch_waveform(station=sta, chan=chan, stime=stime, etime=etime).filter(filter_str)
return view_wave(request=request, wave=wave, color='black', linewidth=1.0, plot_predictions=phase_atimes)
def events_in_region(left_lon, right_lon, top_lat, bottom_lat, start_time, end_time, min_mb, max_mb, min_depth, max_depth, detected_at_station=None):
s = Sigvisa()
# get all events arriving at this station in an arbitrary three-month
# period (which happens to be March-May 2009)
cursor = s.dbconn.cursor()
if left_lon < right_lon:
lon_cond = "lebo.lon between %f and %f" % (left_lon, right_lon)
else:
lon_cond = "( (lon > %f) or (lon < %f) )" % (left_lon, right_lon)
lat_cond = "lebo.lat between %f and %f" % (bottom_lat, top_lat)
if not detected_at_station:
sql_query = "select lebo.lon, lebo.lat, lebo.depth, lebo.time, lebo.mb, lebo.evid from leb_origin lebo where lebo.time between %f and %f and %s and %s and lebo.mb between %f and %f and lebo.depth between %f and %f" % (start_time, end_time, lon_cond, lat_cond, min_mb, max_mb, min_depth, max_depth)
else:
sql_query = "select distinct lebo.lon, lebo.lat, lebo.depth, lebo.time, lebo.mb, lebo.evid from leb_origin lebo, leb_assoc leba, leb_arrival l where lebo.time between %f and %f and %s and %s and lebo.mb between %f and %f and lebo.depth between %f and %f and l.sta='%s' and l.arid=leba.arid and leba.orid=lebo.orid" % (start_time, end_time, lon_cond, lat_cond, min_mb, max_mb, min_depth, max_depth, detected_at_station)
cache_key = hashlib.sha1(sql_query).hexdigest()
evs = cache.get(cache_key)
if not evs:
cursor.execute(sql_query)
evs = cursor.fetchall()
cache.set(cache_key, evs, 60 * 60 * 24 * 365)
cursor.close()
return evs
def regional_event_view(request):
left_lon = float(request.GET.get('left_lon', -180))
right_lon = float(request.GET.get('right_lon', 180))
top_lat = float(request.GET.get('top_lat', 90))
bottom_lat = float(request.GET.get('bottom_lat', -90))
start_time = float(request.GET.get('start_time', 1238889600))
end_time = float(request.GET.get('end_time', 1245456000))
min_mb = float(request.GET.get('min_mb', 2.5))
max_mb = float(request.GET.get('max_mb', 99))
min_depth = float(request.GET.get('min_depth', 0))
max_depth = float(request.GET.get('max_depth', 999))
detected_at_station = str(request.GET.get('detected_at_station', ''))
evs = events_in_region(left_lon, right_lon, top_lat, bottom_lat, start_time, end_time, min_mb, max_mb, min_depth, max_depth, detected_at_station=detected_at_station)
print left_lon, right_lon, top_lat
return render_to_response('svweb/event_region.html', {
'evs': evs,
'left_lon': left_lon,
'right_lon': right_lon,
'top_lat': top_lat,
'bottom_lat': bottom_lat,
'start_time': start_time,
'end_time': end_time,
'min_mb': min_mb,
'max_mb': max_mb,
'min_depth': min_depth,
'max_depth': max_depth,
'detected_at_station': detected_at_station,
'n_evs': len(evs),
}, context_instance=RequestContext(request))
@cache_page(60 * 60)
def regional_event_image_view(request):
left_lon = float(request.GET.get('left_lon', -180))
right_lon = float(request.GET.get('right_lon', 180))
top_lat = float(request.GET.get('top_lat', 90))
bottom_lat = float(request.GET.get('bottom_lat', -90))
start_time = float(request.GET.get('start_time', 1238889600))
end_time = float(request.GET.get('end_time', 1245456000))
min_mb = float(request.GET.get('min_mb', 2.5))
max_mb = float(request.GET.get('max_mb', 99))
min_depth = float(request.GET.get('min_depth', 0))
max_depth = float(request.GET.get('max_depth', 999))
detected_at_station = str(request.GET.get('detected_at_station', ''))
s = Sigvisa()
hm = EventHeatmap(f=None, calc=False, top_lat=top_lat, bottom_lat=bottom_lat, left_lon=left_lon, right_lon=right_lon)
hm.add_stations(s.sitenames)
evs = events_in_region(left_lon, right_lon, top_lat, bottom_lat, start_time, end_time, min_mb, max_mb, min_depth, max_depth, detected_at_station=detected_at_station)
hm.add_events(evs)
fig = Figure(dpi=288)
fig.patch.set_facecolor('white')
axes = fig.add_subplot(1, 1, 1)
hm.plot(axes=axes, event_alpha=0.2, colorbar=False, offmap_station_arrows=False, label_stations=False)
fig.subplots_adjust(bottom=0.05, top=1, left=0, right=0.9)
canvas = FigureCanvas(fig)
response = django.http.HttpResponse(content_type='image/png')
canvas.print_png(response, bbox_inches="tight")
return response
|
from utils import *
from ImageSplit import *
from keras.utils.np_utils import to_categorical
from keras.applications import InceptionV3
from keras import models, layers
#Normalize (Rescale data to same range)
X_train, X_test = X_train/255, X_test/255
#one hot encode the labels
y_train = to_categorical(y_train, num_classes= 2)
def fine_tuned_inception():
pt_inception = InceptionV3(
include_top=False,
weights='imagenet',
input_shape=(IMAGE_SIZE, IMAGE_SIZE, 3),
)
for layer in pt_inception.layers[:-4]:
layer.trainable = False
ft_inception = models.Sequential()
ft_inception.add(pt_inception)
ft_inception.add(layers.Flatten())
ft_inception.add(layers.Dense(1024, activation='relu'))
ft_inception.add(layers.Dropout(0.8))
ft_inception.add(layers.Dense(2, activation='softmax'))
return ft_inception
#InceptionV3 model
#Accuracy ==> 84.2
#Precision ==> 83%, 86%
#Recall ==> 89%, 78%
|
#filename show.py
#coding:utf-8
from django.template.loader import get_template
from django.template import Context
from django.http import HttpResponse, Http404
from django.shortcuts import render_to_response
import datetime
from django.utils.translation import ugettext
import urllib
from xml.dom import minidom
def index(request):
return HttpResponse('this is show')
def my_image(request):
image_data = open("F:/Django-1.5.1/mysite/mysite/2000.jpg","rb").read()
return HttpResponse(image_data,mimetype="image/jpg")
def my_tran(request):
output = ugettext("Welcome to my site.")
return HttpResponse(output)
def current_time(request):
now = datetime.datetime.now()
t = get_template('current_time.html')
html = t.render(Context({'currxent_time':now}))
#return HttpResponse(html)
return render_to_response('current_time.html',{'current_time':now})
def showbase(request):
return render_to_response('show.html',{'current_time':datetime.datetime.now()})
def weatherinfo(request):
page = urllib.urlopen("http://www.webxml.com.cn/webservices/weatherwebservice.asmx/getWeatherbyCityName?theCityName=武汉")
body = page.readlines()
page.close()
return HttpResponse(body)
#return render_to_response('weather.html',{'data': body})
def javaplayer(request):
return render_to_response('javaplayer.html')
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import time
from oslo.config import cfg
from cinder.openstack.common.gettextutils import _
from cinder.openstack.common import log as logging
from cinder.openstack.common import timeutils
periodic_opts = [
cfg.BoolOpt('run_external_periodic_tasks',
default=True,
help=('Some periodic tasks can be run in a separate process. '
'Should we run them here?')),
]
CONF = cfg.CONF
CONF.register_opts(periodic_opts)
LOG = logging.getLogger(__name__)
DEFAULT_INTERVAL = 60.0
class InvalidPeriodicTaskArg(Exception):
message = _("Unexpected argument for periodic task creation: %(arg)s.")
def periodic_task(*args, **kwargs):
"""Decorator to indicate that a method is a periodic task.
This decorator can be used in two ways:
1. Without arguments '@periodic_task', this will be run on every cycle
of the periodic scheduler.
2. With arguments:
@periodic_task(spacing=N [, run_immediately=[True|False]])
this will be run on approximately every N seconds. If this number is
negative the periodic task will be disabled. If the run_immediately
argument is provided and has a value of 'True', the first run of the
task will be shortly after task scheduler starts. If
run_immediately is omitted or set to 'False', the first time the
task runs will be approximately N seconds after the task scheduler
starts.
"""
def decorator(f):
# Test for old style invocation
if 'ticks_between_runs' in kwargs:
raise InvalidPeriodicTaskArg(arg='ticks_between_runs')
# Control if run at all
f._periodic_task = True
f._periodic_external_ok = kwargs.pop('external_process_ok', False)
if f._periodic_external_ok and not CONF.run_external_periodic_tasks:
f._periodic_enabled = False
else:
f._periodic_enabled = kwargs.pop('enabled', True)
# Control frequency
f._periodic_spacing = kwargs.pop('spacing', 0)
f._periodic_immediate = kwargs.pop('run_immediately', False)
if f._periodic_immediate:
f._periodic_last_run = None
else:
f._periodic_last_run = timeutils.utcnow()
return f
# NOTE(sirp): The `if` is necessary to allow the decorator to be used with
# and without parens.
#
# In the 'with-parens' case (with kwargs present), this function needs to
# return a decorator function since the interpreter will invoke it like:
#
# periodic_task(*args, **kwargs)(f)
#
# In the 'without-parens' case, the original function will be passed
# in as the first argument, like:
#
# periodic_task(f)
if kwargs:
return decorator
else:
return decorator(args[0])
class _PeriodicTasksMeta(type):
def __init__(cls, names, bases, dict_):
"""Metaclass that allows us to collect decorated periodic tasks."""
super(_PeriodicTasksMeta, cls).__init__(names, bases, dict_)
# NOTE(sirp): if the attribute is not present then we must be the base
# class, so, go ahead an initialize it. If the attribute is present,
# then we're a subclass so make a copy of it so we don't step on our
# parent's toes.
try:
cls._periodic_tasks = cls._periodic_tasks[:]
except AttributeError:
cls._periodic_tasks = []
try:
cls._periodic_last_run = cls._periodic_last_run.copy()
except AttributeError:
cls._periodic_last_run = {}
try:
cls._periodic_spacing = cls._periodic_spacing.copy()
except AttributeError:
cls._periodic_spacing = {}
for value in cls.__dict__.values():
if getattr(value, '_periodic_task', False):
task = value
name = task.__name__
if task._periodic_spacing < 0:
LOG.info(_('Skipping periodic task %(task)s because '
'its interval is negative'),
{'task': name})
continue
if not task._periodic_enabled:
LOG.info(_('Skipping periodic task %(task)s because '
'it is disabled'),
{'task': name})
continue
# A periodic spacing of zero indicates that this task should
# be run every pass
if task._periodic_spacing == 0:
task._periodic_spacing = None
cls._periodic_tasks.append((name, task))
cls._periodic_spacing[name] = task._periodic_spacing
cls._periodic_last_run[name] = task._periodic_last_run
class PeriodicTasks(object):
__metaclass__ = _PeriodicTasksMeta
def run_periodic_tasks(self, context, raise_on_error=False):
"""Tasks to be run at a periodic interval."""
idle_for = DEFAULT_INTERVAL
for task_name, task in self._periodic_tasks:
full_task_name = '.'.join([self.__class__.__name__, task_name])
now = timeutils.utcnow()
spacing = self._periodic_spacing[task_name]
last_run = self._periodic_last_run[task_name]
# If a periodic task is _nearly_ due, then we'll run it early
if spacing is not None and last_run is not None:
due = last_run + datetime.timedelta(seconds=spacing)
if not timeutils.is_soon(due, 0.2):
idle_for = min(idle_for, timeutils.delta_seconds(now, due))
continue
if spacing is not None:
idle_for = min(idle_for, spacing)
LOG.debug(_("Running periodic task %(full_task_name)s"), locals())
self._periodic_last_run[task_name] = timeutils.utcnow()
try:
task(self, context)
except Exception as e:
if raise_on_error:
raise
LOG.exception(_("Error during %(full_task_name)s: %(e)s"),
locals())
time.sleep(0)
return idle_for
|
import sqlalchemy
from sqlalchemy import create_engine
engine = create_engine('sqlite:///:memory:', echo=True)
print(sqlalchemy.__version__ )
|
#!/usr/bin/env python
#coding:utf-8
"""
Author: coreyjones --<>
Purpose: Download YouTube Audio and Video Files.
Created: 12/12/2018
"""
import os, pafy, logging
url = "https://www.youtube.com/watch?v=SLsTskih7_I" #YouTube Video Link
dlcheck = False #dlcheck - Set to "True" to download - Set to "False for dryrun"
video = pafy.new(url)
downloadFolder = 'C:\\youtube_videoDownlods'
loggingFile = '%s\\Program_Log' % (downloadFolder)
vidTitle = video.title
vidRating = video.rating
vidViews = video.viewcount
vidAuthor = video.author
vidLength = video.length
vidDuration = video.duration
vidLikes = video.likes
vidDislikes = video.dislikes
vidDesc = video.description
if not os.path.exists(loggingFile):
swap = open(loggingFile, mode='w+')
swap.write('BEGIN DEBUG LOG' + '\n\n\n\n')
swap.close()
assert os.path.exists(loggingFile), 'Download folder is missing; doesn\'t exist: ' + str(loggingFile)
#logging.disable(logging.DEBUG)
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
#logging.basicConfig(filename=loggingFile, level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
#logging.DEBUG
#logging.INFO
#logging.WARNING
#logging.ERROR
#logging.CRITICAL
os.chdir(downloadFolder)
pwd = os.getcwd()
logging.info('PWD: %s' % (pwd))
streams = video.streams
for s in streams:
print(s)
print(s.resolution, s.extension, s.get_filesize(), s.url)
allstreams = video.allstreams
for s in allstreams:
print(s.mediatype, s.extension, s.quality)
best = video.getbest() # it's not downlaoding the absolute best file.
bestPref = video.getbest(preftype='webm') #mv4,webm,flv,3gp
#get url, for download of streaming in vlc
#print('BEST URL')
#print(best.url)
#print('BEST PREF URL')
#print(bestPref.url)
filename_path = "%s\\%s." % (downloadFolder, vidTitle)
if not os.path.exists(filename_path):
logging.info('filepath: %s' % (filename_path))
logging.info('File to Download: %s' % (filename_path))
if dlcheck == True:
filename = best.download(filepath=filename_path + best.extension)
print("Best video file available to download is: %s and has finished downloading." % (best))
print("Video Title: %s" % (vidTitle))
print("Video Author: %s" % (vidAuthor))
print("Views: %s" % (vidViews))
print("Likes: %s Dislikes: %s\n\n" % (vidLikes, vidDislikes))
print("Description: %s" % (vidDesc))
audioHelp = """
#audiostreams = video.audiostreams
#for a in audiostreams:
#print(a.bitrate, a.extension, a.get_filesize())
256k m4a 331379079
192k ogg 172524223
128k m4a 166863001
128k ogg 108981120
48k m4a 62700449
Downlad the 2nd audio stream from above list.
audiostreams[1].download()
#bestAudio = video.getbestaudio()
#bestAudioPref = video.getbestaudio(preftype="m4a")
#bestBitRate = bestAudio.bitrate
#print(bestAudio)
#print(bestBitRate)
"""
|
# An empty __init__ file to get pytest to detect this as a valid test folder.
pass
|
import argparse, yaml, os
from PyBenchFCN import Factory
from CuckooSearch import evolve
os.makedirs("./tmp", exist_ok=True)
parser = argparse.ArgumentParser()
parser.add_argument('prob', type=argparse.FileType('r'))
parser.add_argument('prof', type=argparse.FileType('r'))
args = parser.parse_args()
problems = yaml.safe_load(args.prob)
prof = yaml.safe_load(args.prof)
epsilons = yaml.safe_load(open("epsilons.yml", "r"))
for problem_name in problems["names"]:
os.makedirs(f"./tmp/{problem_name}", exist_ok=True)
problem = Factory.set_sop(problem_name, prof["n_var"])
for seed in range(prof["seed"][0], prof["seed"][1]):
X, F, c_eval = \
evolve(problem, n_eval=prof["n_eval"], n_pop=prof["n_pop"],
levy_mode=prof["levy_mode"], replace_mode=prof["replace_mode"],
alpha=prof["alpha"], beta=prof["beta"], pa=prof["pa"],
adapt_params=prof["adapt_params"], adapt_strategy=prof["adapt_strategy"],
params_of_adapt_strategy=prof["params_of_adapt_strategy"],
indicator=prof["indicator"], n_step=prof["n_step"],
epsilon=epsilons["CS"][problem_name], seed=seed, is_print=prof["is_print"],
file=f"./tmp/{problem_name}/{prof['file_prefix']}{seed}.csv")
|
from __future__ import absolute_import
from duo_client.https_wrapper import CertValidatingHTTPSConnection
import unittest
import mock
import ssl
class TestSSLContextCreation(unittest.TestCase):
""" Test that the SSL context used to wrap sockets is configured correctly """
def test_no_ca_certs(self):
conn = CertValidatingHTTPSConnection('fake host')
self.assertEqual(conn.default_ssl_context.verify_mode, ssl.CERT_NONE)
@mock.patch('ssl.SSLContext.load_verify_locations')
def test_with_ca_certs(self, mock_load):
mock_load.return_value = None
conn = CertValidatingHTTPSConnection('fake host', ca_certs='cafilepath')
self.assertEqual(conn.default_ssl_context.verify_mode, ssl.CERT_REQUIRED)
mock_load.assert_called_with(cafile='cafilepath')
@mock.patch('ssl.SSLContext.load_cert_chain')
def test_with_certfile(self, mock_load):
mock_load.return_value = None
CertValidatingHTTPSConnection('fake host', cert_file='certfilepath')
mock_load.assert_called_with('certfilepath', None)
def test_ssl2_ssl3_off(self):
conn = CertValidatingHTTPSConnection('fake host')
self.assertEqual(conn.default_ssl_context.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2)
self.assertEqual(conn.default_ssl_context.options & ssl.OP_NO_SSLv3, ssl.OP_NO_SSLv3)
|
# Here we choose which app implementation to use by exporting it as `app`
from app.examples.library.app import app
|
"""Constants used by the legacy Nest component."""
DOMAIN = "nest"
DATA_NEST = "nest"
DATA_NEST_CONFIG = "nest_config"
SIGNAL_NEST_UPDATE = "nest_update"
|
from ..core import Field, SystemObject
from ..core.api.special_values import Autogenerate
from ..core.bindings import RelatedObjectBinding
from ..core.translators_and_types import MunchType
class Plugin(SystemObject):
FIELDS = [
Field("id", type=int, is_identity=True, is_filterable=True, is_sortable=True),
Field("type", type=str, is_filterable=True, creation_parameter=True, cached=True),
Field("name", type=str, is_filterable=True, mutable=True, creation_parameter=True, is_sortable=True,
default=Autogenerate("plugin_{uuid}")),
Field("version", type=str, is_filterable=True, creation_parameter=True, mutable=True),
Field("api_redirect_suffix", type=str, is_sortable=True, creation_parameter=True,
mutable=True, is_filterable=True),
Field("management_url", type=str, creation_parameter=True, mutable=True),
Field("max_sec_without_heartbeat", type=int, creation_parameter=True, mutable=True),
Field("last_heartbeat", type=int, mutable=True, is_sortable=True, is_filterable=True),
Field("heartbeat_valid", type=bool, is_filterable=True),
Field("heartbeat", type=MunchType, mutable=True, add_updater=False),
Field("tenant", api_name="tenant_id", binding=RelatedObjectBinding('tenants'), cached=True,
type='infinisdk.infinibox.tenant:Tenant', feature_name='tenants', is_filterable=True, is_sortable=True),
]
def send_heartbeat(self, data):
return self.system.api.put(self.get_this_url_path().add_path('heartbeat'), data=data)
@classmethod
def is_supported(cls, system):
return system.compat.has_tenants()
|
# -*- coding: utf-8 -*-
"""
Created on Sat Aug 05 23:55:12 2018
@author: Kazushige Okayasu, Hirokatsu Kataoka
"""
import datetime
import os
import random
import time
import torch
import torchvision
import torch.nn as nn
import torch.optim as optim
import torch.backends.cudnn as cudnn
import torchvision.models as models
import torchvision.datasets as datasets
import torchvision.transforms as transforms
from args import conf
from alex import bn_alexnet
from vgg import vgg16_bn, vgg19_bn
from resnet import *
from resnext import *
from densenet import *
from train_val import train, validate
def worker_init_fn(worker_id):
random.seed(worker_id)
def model_select(args):
if args.usenet == "bn_alexnet":
model = bn_alexnet(pretrained=False,num_classes=args.numof_classes).to(device)
return model
elif args.usenet == "vgg16":
model = vgg16_bn(pretrained=False,num_classes=args.numof_classes).to(device)
return model
elif args.usenet == "vgg19":
model = vgg19_bn(pretrained=False,num_classes=args.numof_classes).to(device)
return model
elif args.usenet == "resnet18":
model = resnet18(pretrained=False, num_classes=args.numof_classes).to(device)
return model
elif args.usenet == "resnet34":
model = resnet34(pretrained=False, num_classes=args.numof_classes).to(device)
return model
elif args.usenet == "resnet50":
model = resnet50(pretrained=False, num_classes=args.numof_classes).to(device)
return model
elif args.usenet == "resnet101":
model = resnet101(pretrained=False, num_classes=args.numof_classes).to(device)
return model
elif args.usenet == "resnet152":
model = resnet152(pretrained=False, num_classes=args.numof_classes).to(device)
return model
elif args.usenet == "resnet200":
model = resnet200(pretrained=False, num_classes=args.numof_classes).to(device)
return model
elif args.usenet == "resnext101":
model = resnext101(pretrained=False, num_classes=args.numof_classes).to(device)
return model
elif args.usenet == "densenet161":
model = densenet161(pretrained=False, num_classes=args.numof_classes).to(device)
return model
if __name__== "__main__":
# Processing time
starttime = time.time()
# Option
args = conf()
print(args)
# GPUs
use_cuda = not args.no_cuda and torch.cuda.is_available()
device = torch.device("cuda" if use_cuda else "cpu")
#to deterministic
cudnn.deterministic = True
random.seed(args.seed)
torch.manual_seed(args.seed)
# Training settings
normalize = transforms.Normalize(mean=[0.2, 0.2, 0.2], std=[0.5, 0.5, 0.5])
train_transform = transforms.Compose([transforms.RandomCrop((args.crop_size,args.crop_size)),
transforms.ToTensor(), normalize])
train_dataset = datasets.ImageFolder(args.path2traindb, transform=train_transform)
train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True,
num_workers=args.num_workers, pin_memory=True, drop_last=True, worker_init_fn=worker_init_fn)
# Basically, the FractalDB pre-training doesn't require validation phase
if args.val:
val_transform = transforms.Compose([transforms.Resize((args.crop_size,args.crop_size), interpolation=2),
transforms.ToTensor(), normalize])
val_dataset = datasets.ImageFolder(args.path2valdb, transform=val_transform)
val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=args.batch_size, shuffle=False,
num_workers=args.num_workers, pin_memory=True, drop_last=False, worker_init_fn=worker_init_fn)
# Model & optimizer
model = model_select(args)
optimizer = optim.SGD(model.parameters(), lr=args.lr, momentum=args.momentum,weight_decay=args.weight_decay)
criterion = nn.CrossEntropyLoss().to(device)
scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=[30,60], gamma=0.1)
# optionally resume from a checkpoint
if args.resume:
assert os.path.isfile(args.resume), "=> no checkpoint found at '{}'".format(args.resume)
print("=> loading checkpoint '{}'".format(args.resume))
checkpoint = torch.load(args.resume, map_location=lambda storage, loc: storage)
args.start_epoch = checkpoint['epoch']
model.load_state_dict(checkpoint['state_dict'])
optimizer.load_state_dict(checkpoint['optimizer'])
scheduler.load_state_dict(checkpoint['scheduler'])
print("=> loaded checkpoint '{}' (epoch {})".format(args.resume, checkpoint['epoch']))
if not args.no_multigpu:
model = nn.DataParallel(model)
# FractalDB Pre-training
iteration = (args.start_epoch-1)*len(train_loader)
for epoch in range(args.start_epoch, args.epochs + 1):
train(args, model, device, train_loader, optimizer, criterion, epoch)
scheduler.step()
iteration += len(train_loader)
if args.val:
validation_loss = validate(args, model, device, val_loader, criterion, iteration)
if epoch % args.save_interval == 0:
if args.no_multigpu:
model_state = model.cpu().state_dict()
else:
model_state = model.module.cpu().state_dict()
saved_weight = "{}/{}_{}_epoch{}.pth.tar".format(args.path2weight, args.dataset, args.usenet, epoch)
torch.save(model_state, saved_weight.replace('.tar',''))
checkpoint = "{}/{}_{}_checkpoint.pth.tar".format(args.path2weight, args.dataset, args.usenet)
torch.save({'epoch': epoch + 1,
'state_dict': model_state,
'optimizer' : optimizer.state_dict(),
'scheduler' : scheduler.state_dict(),}, checkpoint)
model = model.to(device)
torch.save(model_state, saved_weight.replace('.tar',''))
# Processing time
endtime = time.time()
interval = endtime - starttime
print("elapsed time = {0:d}h {1:d}m {2:d}s".format(int(interval/3600), int((interval%3600)/60), int((interval%3600)%60)))
|
"""Mono-objective available algorithms
"""
# main imports
import logging
# module imports
from macop.algorithms.base import Algorithm
class HillClimberFirstImprovment(Algorithm):
"""Hill Climber First Improvment used as quick exploration optimisation algorithm
- First, this algorithm do a neighborhood exploration of a new generated solution (by doing operation on the current solution obtained) in order to find a better solution from the neighborhood space;
- Then replace the current solution by the first one from the neighbordhood space which is better than the current solution;
- And do these steps until a number of evaluation (stopping criterion) is reached.
Attributes:
initalizer: {function} -- basic function strategy to initialise solution
evaluator: {:class:`~macop.evaluators.base.Evaluator`} -- evaluator instance in order to obtained fitness (mono or multiple objectives)
operators: {[:class:`~macop.operators.base.Operator`]} -- list of operator to use when launching algorithm
policy: {:class:`~macop.policies.base.Policy`} -- Policy class implementation strategy to select operators
validator: {function} -- basic function to check if solution is valid or not under some constraints
maximise: {bool} -- specify kind of optimisation problem
currentSolution: {:class:`~macop.solutions.base.Solution`} -- current solution managed for current evaluation
bestSolution: {:class:`~macop.solutions.base.Solution`} -- best solution found so far during running algorithm
callbacks: {[:class:`~macop.callbacks.base.Callback`]} -- list of Callback class implementation to do some instructions every number of evaluations and `load` when initialising algorithm
parent: {:class:`~macop.algorithms.base.Algorithm`} -- parent algorithm reference in case of inner Algorithm instance (optional)
Example:
>>> import random
>>>
>>> # operators import
>>> from macop.operators.discrete.crossovers import SimpleCrossover
>>> from macop.operators.discrete.mutators import SimpleMutation
>>>
>>> # policy import
>>> from macop.policies.classicals import RandomPolicy
>>>
>>> # solution and algorithm imports
>>> from macop.solutions.discrete import BinarySolution
>>> from macop.algorithms.mono import HillClimberFirstImprovment
>>>
>>> # evaluator import
>>> from macop.evaluators.discrete.mono import KnapsackEvaluator
>>>
>>> # evaluator initialization (worths objects passed into data)
>>> problem_size = 20
>>> worths = [ random.randint(0, 20) for i in range(problem_size) ]
>>> evaluator = KnapsackEvaluator(data={'worths': worths})
>>>
>>> # validator specification (based on weights of each objects)
>>> weights = [ random.randint(5, 30) for i in range(problem_size) ]
>>> validator = lambda solution: True if sum([weights[i] for i, value in enumerate(solution.data) if value == 1]) < 200 else False
>>>
>>> # initialiser function for binary solution using specific solution size
>>> initialiser = lambda x=20: BinarySolution.random(x, validator)
>>>
>>> # operators list with crossover and mutation
>>> operators = [SimpleCrossover(), SimpleMutation()]
>>> policy = RandomPolicy(operators)
>>> algo = HillClimberFirstImprovment(initialiser, evaluator, operators, policy, validator, maximise=True, verbose=False)
>>>
>>> # run the algorithm
>>> solution = algo.run(100)
>>> solution._score
128
"""
def run(self, evaluations):
"""
Run the local search algorithm
Args:
evaluations: {int} -- number of Local search evaluations
Returns:
{:class:`~macop.solutions.base.Solution`}: best solution found
"""
# by default use of mother method to initialise variables
super().run(evaluations)
# initialise current solution and best solution
self.initRun()
solutionSize = self._currentSolution.size
# local search algorithm implementation
while not self.stop():
for _ in range(solutionSize):
# update current solution using policy
newSolution = self.update(self._currentSolution)
# if better solution than currently, replace it and stop current exploration (first improvment)
if self.isBetter(newSolution):
self._bestSolution = newSolution
break
# increase number of evaluations
self.increaseEvaluation()
self.progress()
logging.info(
f"---- Current {newSolution} - SCORE {newSolution.fitness}"
)
# stop algorithm if necessary
if self.stop():
break
# set new current solution using best solution found in this neighbor search
self._currentSolution = self._bestSolution
logging.info(
f"End of {type(self).__name__}, best solution found {self._bestSolution}"
)
return self._bestSolution
class HillClimberBestImprovment(Algorithm):
"""Hill Climber Best Improvment used as exploitation optimisation algorithm
- First, this algorithm do a neighborhood exploration of a new generated solution (by doing operation on the current solution obtained) in order to find the best solution from the neighborhood space;
- Then replace the best solution found from the neighbordhood space as current solution to use;
- And do these steps until a number of evaluation (stopping criterion) is reached.
Attributes:
initalizer: {function} -- basic function strategy to initialise solution
evaluator: {:class:`~macop.evaluators.base.Evaluator`} -- evaluator instance in order to obtained fitness (mono or multiple objectives)
operators: {[:class:`~macop.operators.base.Operator`]} -- list of operator to use when launching algorithm
policy: {:class:`~macop.policies.base.Policy`} -- Policy class implementation strategy to select operators
validator: {function} -- basic function to check if solution is valid or not under some constraints
maximise: {bool} -- specify kind of optimisation problem
currentSolution: {:class:`~macop.solutions.base.Solution`} -- current solution managed for current evaluation
bestSolution: {:class:`~macop.solutions.base.Solution`} -- best solution found so far during running algorithm
callbacks: {[:class:`~macop.callbacks.base.Callback`]} -- list of Callback class implementation to do some instructions every number of evaluations and `load` when initialising algorithm
parent: {:class:`~macop.algorithms.base.Algorithm`} -- parent algorithm reference in case of inner Algorithm instance (optional)
Example:
>>> import random
>>>
>>> # operators import
>>> from macop.operators.discrete.crossovers import SimpleCrossover
>>> from macop.operators.discrete.mutators import SimpleMutation
>>>
>>> # policy import
>>> from macop.policies.classicals import RandomPolicy
>>>
>>> # solution and algorithm imports
>>> from macop.solutions.discrete import BinarySolution
>>> from macop.algorithms.mono import HillClimberBestImprovment
>>>
>>> # evaluator import
>>> from macop.evaluators.discrete.mono import KnapsackEvaluator
>>>
>>> # evaluator initialization (worths objects passed into data)
>>> problem_size = 20
>>> worths = [ random.randint(0, 20) for i in range(problem_size) ]
>>> evaluator = KnapsackEvaluator(data={'worths': worths})
>>>
>>> # validator specification (based on weights of each objects)
>>> weights = [ random.randint(5, 30) for i in range(problem_size) ]
>>> validator = lambda solution: True if sum([weights[i] for i, value in enumerate(solution.data) if value == 1]) < 200 else False
>>>
>>> # initialiser function for binary solution using specific solution size
>>> initialiser = lambda x=20: BinarySolution.random(x, validator)
>>>
>>> # operators list with crossover and mutation
>>> operators = [SimpleCrossover(), SimpleMutation()]
>>> policy = RandomPolicy(operators)
>>> algo = HillClimberBestImprovment(initialiser, evaluator, operators, policy, validator, maximise=True, verbose=False)
>>>
>>> # run the algorithm
>>> solution = algo.run(100)
>>> solution._score
104
"""
def run(self, evaluations):
"""
Run the local search algorithm
Args:
evaluations: {int} -- number of Local search evaluations
Returns:
{:class:`~macop.solutions.base.Solution`}: best solution found
"""
# by default use of mother method to initialise variables
super().run(evaluations)
# initialise current solution and best solution
self.initRun()
solutionSize = self._currentSolution.size
# local search algorithm implementation
while not self.stop():
for _ in range(solutionSize):
# update current solution using policy
newSolution = self.update(self._currentSolution)
# if better solution than currently, replace it
if self.isBetter(newSolution):
self._bestSolution = newSolution
# increase number of evaluations
self.increaseEvaluation()
self.progress()
logging.info(
f"---- Current {newSolution} - SCORE {newSolution.fitness}"
)
# stop algorithm if necessary
if self.stop():
break
# set new current solution using best solution found in this neighbor search
self._currentSolution = self._bestSolution
logging.info(
f"End of {type(self).__name__}, best solution found {self._bestSolution}"
)
return self._bestSolution
class IteratedLocalSearch(Algorithm):
"""Iterated Local Search (ILS) used to avoid local optima and increave EvE (Exploration vs Exploitation) compromise
- A number of evaluations (`ls_evaluations`) is dedicated to local search process, here `HillClimberFirstImprovment` algorithm;
- Starting with the new generated solution, the local search algorithm will return a new solution;
- If the obtained solution is better than the best solution known into `IteratedLocalSearch`, then the solution is replaced;
- Restart this process until stopping critirion (number of expected evaluations).
Attributes:
initalizer: {function} -- basic function strategy to initialise solution
evaluator: {function} -- basic function in order to obtained fitness (mono or multiple objectives)
operators: {[:class:`~macop.operators.base.Operator`]} -- list of operator to use when launching algorithm
policy: {:class:`~macop.policies.base.Policy`} -- Policy class implementation strategy to select operators
validator: {function} -- basic function to check if solution is valid or not under some constraints
maximise: {bool} -- specify kind of optimisation problem
currentSolution: {:class:`~macop.solutions.base.Solution`} -- current solution managed for current evaluation
bestSolution: {:class:`~macop.solutions.base.Solution`} -- best solution found so far during running algorithm
localSearch: {:class:`~macop.algorithms.base.Algorithm`} -- current local search into ILS
callbacks: {[:class:`~macop.callbacks.base.Callback`]} -- list of Callback class implementation to do some instructions every number of evaluations and `load` when initialising algorithm
parent: {:class:`~macop.algorithms.base.Algorithm`} -- parent algorithm reference in case of inner Algorithm instance (optional)
Example:
>>> import random
>>>
>>> # operators import
>>> from macop.operators.discrete.crossovers import SimpleCrossover
>>> from macop.operators.discrete.mutators import SimpleMutation
>>>
>>> # policy import
>>> from macop.policies.classicals import RandomPolicy
>>>
>>> # import for solution and algorithm
>>> from macop.solutions.discrete import BinarySolution
>>> from macop.algorithms.mono import IteratedLocalSearch
>>> from macop.algorithms.mono import HillClimberFirstImprovment
>>>
>>> # evaluator import
>>> from macop.evaluators.discrete.mono import KnapsackEvaluator
>>>
>>> # evaluator initialization (worths objects passed into data)
>>> problem_size = 20
>>> worths = [ random.randint(0, 20) for i in range(problem_size) ]
>>> evaluator = KnapsackEvaluator(data={'worths': worths})
>>>
>>> # validator specification (based on weights of each objects)
>>> weights = [ random.randint(5, 30) for i in range(problem_size) ]
>>> validator = lambda solution: True if sum([weights[i] for i, value in enumerate(solution.data) if value == 1]) < 200 else False
>>>
>>> # initialiser function with lambda function
>>> initialiser = lambda x=20: BinarySolution.random(x, validator)
>>>
>>> # operators list with crossover and mutation
>>> operators = [SimpleCrossover(), SimpleMutation()]
>>> policy = RandomPolicy(operators)
>>> local_search = HillClimberFirstImprovment(initialiser, evaluator, operators, policy, validator, maximise=True, verbose=False)
>>> algo = IteratedLocalSearch(initialiser, evaluator, operators, policy, validator, localSearch=local_search, maximise=True, verbose=False)
>>>
>>> # run the algorithm using specific number of evaluations for local search
>>> solution = algo.run(100, ls_evaluations=10)
>>> solution._score
137
"""
def __init__(self,
initialiser,
evaluator,
operators,
policy,
validator,
localSearch,
maximise=True,
parent=None,
verbose=True):
"""Iterated Local Search Algorithm initialisation with use of specific LocalSearch {:class:`~macop.algorithms.base.Algorithm`} instance
Args:
initialiser: {function} -- basic function strategy to initialise solution
evaluator: {:class:`~macop.evaluators.base.Evaluator`} -- evaluator instance in order to obtained fitness (mono or multiple objectives)
operators: {[:class:`~macop.operators.base.Operator`]} -- list of operator to use when launching algorithm
policy: {:class:`~macop.policies.base.Policy`} -- Policy implementation strategy to select operators
validator: {function} -- basic function to check if solution is valid or not under some constraints
localSearch: {:class:`~macop.algorithms.base.Algorithm`} -- current local search into ILS
maximise: {bool} -- specify kind of optimisation problem
parent: {:class:`~macop.algorithms.base.Algorithm`} -- parent algorithm reference in case of inner Algorithm instance (optional)
verbose: {bool} -- verbose or not information about the algorithm
"""
super().__init__(initialiser, evaluator, operators, policy, validator,
maximise, parent, verbose)
# specific local search associated with current algorithm
self._localSearch = localSearch
# need to attach current algorithm as parent
self._localSearch.setParent(self)
def run(self, evaluations, ls_evaluations=100):
"""
Run the iterated local search algorithm using local search (EvE compromise)
Args:
evaluations: {int} -- number of global evaluations for ILS
ls_evaluations: {int} -- number of Local search evaluations (default: 100)
Returns:
{:class:`~macop.solutions.base.Solution`}: best solution found
"""
# by default use of mother method to initialise variables
super().run(evaluations)
# add same callbacks
for callback in self._callbacks:
self._localSearch.addCallback(callback)
# enable resuming for ILS
self.resume()
# initialise current solution
self.initRun()
# local search algorithm implementation
while not self.stop():
# create and search solution from local search
newSolution = self._localSearch.run(ls_evaluations)
# if better solution than currently, replace it
if self.isBetter(newSolution):
self._bestSolution = newSolution
# number of evaluatins increased from LocalSearch
# increase number of evaluations and progress are then not necessary there
#self.increaseEvaluation()
#self.progress()
self.information()
logging.info(
f"End of {type(self).__name__}, best solution found {self._bestSolution}"
)
self.end()
return self._bestSolution
|
print("this a line of error")
print("this is a second line")
exit(1)
|
from __future__ import unicode_literals
from math import sqrt
import subprocess
import time
import os
import logging
import codecs
from collections import defaultdict
from functools import partial
#from traceback import print_exc
from lxml import etree
import enchant
from py4j.java_gateway import JavaGateway
from django.conf import settings
from django.db import transaction
from django.db.models import F, Q
from codeutil.parser import is_valid_match, find_parent_reference,\
create_match
from codeutil.xml_element import XMLStrategy, XML_LANGUAGE, is_xml_snippet,\
is_xml_lines
from codeutil.java_element import ClassMethodStrategy, MethodStrategy,\
FieldStrategy, OtherStrategy, AnnotationStrategy, SQLFilter,\
BuilderFilter, JAVA_LANGUAGE, is_java_snippet, is_java_lines,\
is_exception_trace_lines, JAVA_EXCEPTION_TRACE, clean_java_name,\
can_merge_java, MacroFilter
from codeutil.other_element import FileStrategy, IgnoreStrategy,\
IGNORE_KIND, EMAIL_PATTERN_RE, URL_PATTERN_RE, OTHER_LANGUAGE,\
is_empty_lines, is_log_lines, LOG_LANGUAGE
from codeutil.reply_element import REPLY_LANGUAGE, is_reply_lines,\
is_reply_header, STOP_LANGUAGE, is_rest_reply
from docutil.str_util import tokenize, find_sentence, find_paragraph, split_pos
from docutil.cache_util import get_value, get_codebase_key
from docutil.commands_util import mkdir_safe, import_clazz, download_html_tree
from docutil.progress_monitor import CLILockProgressMonitor, CLIProgressMonitor
from docutil import cache_util
from project.models import ProjectRelease, Project
from project.actions import CODEBASE_PATH
from codebase.models import CodeBase, CodeElementKind, CodeElement,\
SingleCodeReference, CodeSnippet, CodeElementFilter, ReleaseLinkSet
from codebase.parser.java_diff import JavaDiffer
PROJECT_FILE = '.project'
CLASSPATH_FILE = '.classpath'
BIN_FOLDER = 'bin'
SRC_FOLDER = 'src'
LIB_FOLDER = 'lib'
PARSERS = dict(settings.CODE_PARSERS, **settings.CUSTOM_CODE_PARSERS)
SNIPPET_PARSERS = dict(
settings.CODE_SNIPPET_PARSERS,
**settings.CUSTOM_CODE_SNIPPET_PARSERS)
LINKERS = dict(settings.LINKERS, **settings.CUSTOM_LINKERS)
PREFIX_CODEBASE_CODE_WORDS = settings.CACHE_MIDDLEWARE_KEY_PREFIX +\
'cb_codewords'
PREFIX_PROJECT_CODE_WORDS = settings.CACHE_MIDDLEWARE_KEY_PREFIX +\
'project_codewords'
PREFIX_CODEBASE_FILTERS = settings.CACHE_MIDDLEWARE_KEY_PREFIX +\
'cb_filters'
JAVA_KINDS_HIERARCHY = {'field': 'class',
'method': 'class',
'method parameter': 'method'}
XML_KINDS_HIERARCHY = {'xml attribute': 'xml element',
'xml attribute value': 'xml attribute'}
ALL_KINDS_HIERARCHIES = dict(JAVA_KINDS_HIERARCHY, **XML_KINDS_HIERARCHY)
# Constants used by filter
xtext = etree.XPath("string()")
xpackage = etree.XPath("//h2")
xmember_tables = etree.XPath("//body/table")
xmembers = etree.XPath("tr/td[1]")
logger = logging.getLogger("recodoc.codebase.actions")
def start_eclipse():
eclipse_call = settings.ECLIPSE_COMMAND
p = subprocess.Popen([eclipse_call])
print('Process started: {0}'.format(p.pid))
time.sleep(7)
check_eclipse()
return p.pid
def stop_eclipse():
gateway = JavaGateway()
try:
gateway.entry_point.closeEclipse()
time.sleep(1)
gateway.shutdown()
except Exception:
pass
try:
gateway.close()
except Exception:
pass
def check_eclipse():
'''Check that Eclipse is started and that recodoc can communicate with
it.'''
gateway = JavaGateway()
try:
success = gateway.entry_point.getServer().getListeningPort() > 0
except Exception:
success = False
if success:
print('Connection to Eclipse: OK')
else:
print('Connection to Eclipse: ERROR')
gateway.close()
return success
def get_codebase_path(pname, bname='', release='', root=False):
project_key = pname + bname + release
basepath = settings.PROJECT_FS_ROOT
if not root:
return os.path.join(basepath, pname, CODEBASE_PATH, project_key)
else:
return os.path.join(basepath, pname, CODEBASE_PATH)
def create_code_db(pname, bname, release):
prelease = ProjectRelease.objects.filter(project__dir_name=pname).\
filter(release=release)[0]
codeBase = CodeBase(name=bname, project_release=prelease)
codeBase.save()
return codeBase
def create_code_local(pname, bname, release):
'''Create an Eclipse Java Project on the filesystem.'''
project_key = pname + bname + release
codebase_path = get_codebase_path(pname, bname, release)
mkdir_safe(codebase_path)
with open(os.path.join(codebase_path, PROJECT_FILE), 'w') as project_file:
project_file.write("""<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>{0}</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>
""".format(project_key))
with open(os.path.join(codebase_path, CLASSPATH_FILE), 'w') as \
classpath_file:
classpath_file.write("""<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="output" path="bin"/>
</classpath>
""")
mkdir_safe(os.path.join(codebase_path, SRC_FOLDER))
mkdir_safe(os.path.join(codebase_path, BIN_FOLDER))
mkdir_safe(os.path.join(codebase_path, LIB_FOLDER))
def link_eclipse(pname, bname, release):
'''Add the Java Project created with create_code_local to the Eclipse
workspace.'''
project_key = pname + bname + release
codebase_path = get_codebase_path(pname, bname, release)
gateway = JavaGateway()
workspace = gateway.jvm.org.eclipse.core.resources.ResourcesPlugin.\
getWorkspace()
root = workspace.getRoot()
path = gateway.jvm.org.eclipse.core.runtime.Path(os.path.join(
codebase_path, PROJECT_FILE))
project_desc = workspace.loadProjectDescription(path)
new_project = root.getProject(project_key)
nmonitor = gateway.jvm.org.eclipse.core.runtime.NullProgressMonitor()
# gateway.jvm.py4j.GatewayServer.turnLoggingOn()
# To avoid workbench problem (don't know why it needs some time).
time.sleep(1)
new_project.create(project_desc, nmonitor)
new_project.open(nmonitor)
gateway.close()
def list_code_db(pname):
code_bases = []
for code_base in CodeBase.objects.\
filter(project_release__project__dir_name=pname):
code_bases.append('{0}: {1} ({2})'.format(
code_base.pk,
code_base.project_release.project.dir_name,
code_base.project_release.release))
return code_bases
def list_code_local(pname):
basepath = settings.PROJECT_FS_ROOT
code_path = os.path.join(basepath, pname, CODEBASE_PATH)
local_code_bases = []
for member in os.listdir(code_path):
if os.path.isdir(os.path.join(code_path, member)):
local_code_bases.append(member)
return local_code_bases
@transaction.commit_on_success
def create_code_element_kinds():
kinds = []
#NonType
kinds.append(CodeElementKind(kind='package', is_type=False))
# Type
kinds.append(CodeElementKind(kind='class', is_type=True))
kinds.append(CodeElementKind(kind='annotation', is_type=True))
kinds.append(CodeElementKind(kind='enumeration', is_type=True))
# kinds.append(CodeElementKind(kind='interface', is_type = True))
# Members
kinds.append(CodeElementKind(kind='method'))
kinds.append(CodeElementKind(kind='method family'))
kinds.append(CodeElementKind(kind='method parameter', is_attribute=True))
kinds.append(CodeElementKind(kind='field'))
kinds.append(CodeElementKind(kind='enumeration value'))
kinds.append(CodeElementKind(kind='annotation field'))
# XML
kinds.append(CodeElementKind(kind='xml type', is_type=True))
kinds.append(CodeElementKind(kind='xml element'))
kinds.append(CodeElementKind(kind='xml attribute', is_attribute=True))
kinds.append(CodeElementKind(kind='xml attribute value', is_value=True))
kinds.append(CodeElementKind(kind='xml element type', is_type=True))
kinds.append(CodeElementKind(kind='xml attribute type', is_type=True))
kinds.append(CodeElementKind(kind='xml attribute value type',
is_type=True))
kinds.append(CodeElementKind(kind='property type', is_type=True))
kinds.append(CodeElementKind(kind='property name'))
kinds.append(CodeElementKind(kind='property value', is_value=True))
#Files
kinds.append(CodeElementKind(kind='xml file', is_file=True))
kinds.append(CodeElementKind(kind='ini file', is_file=True))
kinds.append(CodeElementKind(kind='conf file', is_file=True))
kinds.append(CodeElementKind(kind='properties file', is_file=True))
kinds.append(CodeElementKind(kind='log file', is_file=True))
kinds.append(CodeElementKind(kind='jar file', is_file=True))
kinds.append(CodeElementKind(kind='java file', is_file=True))
kinds.append(CodeElementKind(kind='python file', is_file=True))
kinds.append(CodeElementKind(kind='hbm file', is_file=True))
# Other
kinds.append(CodeElementKind(kind='unknown'))
for kind in kinds:
kind.save()
@transaction.autocommit
def parse_code(pname, bname, release, parser_name, opt_input=None):
'''
autocommit is necessary here to prevent goofs. Parsers can be
multi-threaded and transaction management in django uses thread local...
'''
project_key = pname + bname + release
prelease = ProjectRelease.objects.filter(project__dir_name=pname).\
filter(release=release)[0]
codebase = CodeBase.objects.filter(project_release=prelease).\
filter(name=bname)[0]
parser_cls_name = PARSERS[parser_name]
parser_cls = import_clazz(parser_cls_name)
parser = parser_cls(codebase, project_key, opt_input)
parser.parse(CLILockProgressMonitor())
return codebase
@transaction.autocommit
def parse_snippets(pname, source, parser_name):
project = Project.objects.get(dir_name=pname)
parser_cls_name = SNIPPET_PARSERS[parser_name]
parser_cls = import_clazz(parser_cls_name)
snippet_parser = parser_cls(project, source)
snippet_parser.parse(CLILockProgressMonitor())
def clear_snippets(pname, language, source):
project = Project.objects.get(dir_name=pname)
to_delete = SingleCodeReference.objects.\
filter(snippet__language=language).\
filter(source=source).\
filter(project=project)
print('Snippets to delete: %i' % to_delete.count())
to_delete.delete()
def clear_code_elements(pname, bname, release, parser_name='-1'):
prelease = ProjectRelease.objects.filter(project__dir_name=pname).\
filter(release=release)[0]
codebase = CodeBase.objects.filter(project_release=prelease).\
filter(name=bname)[0]
query = CodeElement.objects.filter(codebase=codebase)
if parser_name != '-1':
query = query.filter(parser=parser_name)
query.delete()
def diff_codebases(pname, bname, release1, release2):
prelease1 = ProjectRelease.objects.filter(project__dir_name=pname).\
filter(release=release1)[0]
codebase_from = CodeBase.objects.filter(project_release=prelease1).\
filter(name=bname)[0]
prelease2 = ProjectRelease.objects.filter(project__dir_name=pname).\
filter(release=release2)[0]
codebase_to = CodeBase.objects.filter(project_release=prelease2).\
filter(name=bname)[0]
# Maybe later, this will be more generic
differ = JavaDiffer()
return differ.diff(codebase_from, codebase_to)
def create_filter_file(file_path, url):
new_file_path = os.path.join(settings.PROJECT_FS_ROOT, file_path)
if os.path.exists(new_file_path):
mode = 'a'
else:
mode = 'w'
with open(new_file_path, mode) as afile:
tree = download_html_tree(url)
package_name = get_package_name(tree)
tables = xmember_tables(tree)
for table in tables[1:-1]:
for member in xmembers(table):
member_string = "{0}.{1}".format(package_name, xtext(member))
afile.write(member_string + '\n')
print(member_string)
def add_filter(pname, bname, release, filter_files):
prelease = ProjectRelease.objects.filter(project__dir_name=pname).\
filter(release=release)[0]
codebase = CodeBase.objects.filter(project_release=prelease).\
filter(name=bname)[0]
count = countfilter = 0
for filterfile in filter_files.split(','):
file_path = os.path.join(settings.PROJECT_FS_ROOT,
filterfile.strip() + '.txt')
with open(file_path) as afile:
for line in afile.readlines():
code_filter = CodeElementFilter(
codebase=codebase,
fqn=line.strip())
code_filter.save()
countfilter += 1
count += 1
print('Added {0} filter groups and {1} individual filters.'
.format(count, countfilter))
def add_a_filter(pname, bname, release, filter_fqn, include_snippet=True,
one_ref_only=False, include_member=False):
prelease = ProjectRelease.objects.filter(project__dir_name=pname).\
filter(release=release)[0]
codebase = CodeBase.objects.filter(project_release=prelease).\
filter(name=bname)[0]
code_filter = CodeElementFilter(
codebase=codebase,
fqn=filter_fqn,
include_snippet=include_snippet,
one_ref_only=one_ref_only,
include_member=include_member)
code_filter.save()
def link_code(pname, bname, release, linker_name, source, source_release=None,
local_object_id=None, filtered_ids_path=None, filtered_ids_level=None):
project = Project.objects.get(dir_name=pname)
prelease = ProjectRelease.objects.filter(project=project).\
filter(release=release)[0]
if source_release is not None and source_release != '-1':
srelease = ProjectRelease.objects.filter(project=project).\
filter(release=source_release)[0]
else:
srelease = None
codebase = CodeBase.objects.filter(project_release=prelease).\
filter(name=bname)[0]
(f_ids, f_ids_level) = compute_f_ids(filtered_ids_path, filtered_ids_level)
if f_ids is not None:
count = len(f_ids)
else:
count = 0
linker_cls_name = LINKERS[linker_name]
linker_cls = import_clazz(linker_cls_name)
linker = linker_cls(project, prelease, codebase, source, srelease,
(f_ids, f_ids_level))
progress_monitor = CLIProgressMonitor(min_step=1.0)
progress_monitor.info('Cache Count {0} miss of {1}'
.format(cache_util.cache_miss, cache_util.cache_total))
progress_monitor.info('Ref ids to keep: {0}'.format(count))
start = time.clock()
linker.link_references(progress_monitor, local_object_id)
stop = time.clock()
progress_monitor.info('Cache Count {0} miss of {1}'
.format(cache_util.cache_miss, cache_util.cache_total))
progress_monitor.info('Time: {0}'.format(stop - start))
def clear_links(pname, release, source='-1'):
prelease = ProjectRelease.objects.filter(project__dir_name=pname).\
filter(release=release)[0]
query = ReleaseLinkSet.objects.filter(project_release=prelease)
if source != '-1':
query = query.filter(code_reference__source=source)
query.delete()
def restore_kinds(pname, release='-1', source='-1'):
project = Project.objects.get(dir_name=pname)
query = SingleCodeReference.objects.filter(project=project)
if release != '-1':
prelease = ProjectRelease.objects.filter(project=project).\
filter(release=release)[0]
query = query.filter(project_release=prelease)
if source != '-1':
query = query.filter(source=source)
count = query.count()
progress_monitor = CLIProgressMonitor(min_step=1.0)
progress_monitor.start('Restoring {0} references'.format(count), count)
query.update(kind_hint=F('original_kind_hint'))
#for reference in query.iterator():
#reference.kind_hint = reference.original_kind_hint
#reference.save()
#progress_monitor.work(1)
progress_monitor.done()
def recommend_filters(pname, bname, release, nofilter=False):
prelease = ProjectRelease.objects.filter(project__dir_name=pname).\
filter(release=release)[0]
codebase = CodeBase.objects.filter(project_release=prelease).\
filter(name=bname)[0]
if not nofilter:
(simple_filters, _) = get_filters(codebase)
else:
simple_filters = []
d = enchant.Dict('en-US')
single_types = recommend_single_types(codebase, simple_filters, d)
acronyms = recommend_acronyms(codebase, simple_filters)
single_fields = recommend_single_fields(codebase, simple_filters)
print_recommendations(single_types, acronyms, single_fields)
### ACTIONS USED BY OTHER ACTIONS ###
class LogEntry(object):
def __init__(self):
self.origin_size = 0
self.final_size = 0
self.custom_filtered = False
self.filters = {}
self.from_snippet = False
self.unique_types = 0
self.temp_types = []
def compute_unique_types(self):
types = set()
for t in self.temp_types:
index = t.rfind('.')
types.add(t[:index])
self.unique_types = len(types)
def analyze_all_logs(base_dir, project, version, source):
analyze_class_log(base_dir, project, version, source)
print()
analyze_post_log(base_dir, project, version, source)
print()
analyze_method_log(base_dir, project, version, source)
print()
analyze_field_log(base_dir, project, version, source)
def analyze_post_log(base_dir, project, version, source):
path = '{0}/linking-type-{1}-{2}-javapostclass-{3}.log'.format(base_dir,
project, version, source)
count = 0
high_freq = 0
depth = 0
with codecs.open(path, 'r', 'utf-8') as finput:
for line in finput:
line = line.strip()
if line.startswith('Type'):
count += 1
elif line == 'Rationale: highest_frequency':
high_freq += 1
elif line == 'Rationale: heuristic_depth':
depth += 1
print('Report for post-class')
print('Count: {0}'.format(count))
print('Filtered: {0}'.format(high_freq + depth))
print('Heuristic depth: {0}'.format(depth))
print('Highest Frequency: {0}'.format(high_freq))
def analyze_class_log(base_dir, project, version, source):
files = [
'{0}/linking-annotation-{1}-{2}-javaclass-{3}.log'.format(base_dir,
project, version, source),
'{0}/linking-enumeration-{1}-{2}-javaclass-{3}.log'.format(base_dir,
project, version, source),
'{0}/linking-class-{1}-{2}-javaclass-{3}.log'.format(base_dir,
project, version, source),
'{0}/linking-generic-class-{1}-{2}-javageneric-{3}.log'.format(base_dir,
project, version, source),
]
log_entries = process_log_files(files)
log_stats(log_entries, 'class')
def analyze_method_log(base_dir, project, version, source):
files = [
'{0}/linking-method-{1}-{2}-javamethod-{3}.log'.format(base_dir,
project, version, source),
'{0}/linking-generic-method-{1}-{2}-javageneric-{3}.log'.format(base_dir,
project, version, source),
]
log_entries = process_log_files(files)
log_stats(log_entries, 'method')
def analyze_field_log(base_dir, project, version, source):
files = [
'{0}/linking-annotation-field-{1}-{2}-javafield-{3}.log'.format(base_dir,
project, version, source),
'{0}/linking-enumeration value-{1}-{2}-javafield-{3}.log'.format(base_dir,
project, version, source),
'{0}/linking-field-{1}-{2}-javafield-{3}.log'.format(base_dir,
project, version, source),
'{0}/linking-generic-field-{1}-{2}-javageneric-{3}.log'.format(base_dir,
project, version, source),
]
log_entries = process_log_files(files)
log_stats(log_entries, 'field')
def log_stats(log_entries, title):
nonzero = 0
filters = defaultdict(int)
count = defaultdict(int)
m = defaultdict(int)
s = defaultdict(int)
maxv = defaultdict(int)
count0 = defaultdict(int)
m0 = defaultdict(int)
s0 = defaultdict(int)
for log_entry in log_entries:
if log_entry.origin_size > 0:
nonzero += 1
# Original
record_stat_entry(count, m, s, maxv, 'original',
log_entry.origin_size)
record_stat_entry(count, m, s, maxv, 'finalsize',
log_entry.final_size)
record_stat_entry(count, m, s, maxv, 'unique',
log_entry.unique_types)
if log_entry.unique_types > 1 and log_entry.final_size > 0:
count['hard'] += 1
if log_entry.final_size > 0:
count['linked'] += 1
if log_entry.from_snippet:
count['snippet'] += 1
if log_entry.custom_filtered:
count['custom'] += 1
for filter in log_entry.filters:
if log_entry.filters[filter][0]:
filters[filter] += 1
else:
# To ensure that all filters are reported
filters[filter] += 0
record_stat_entry(count0, m0, s0, None, 'original',
log_entry.origin_size)
if log_entry.from_snippet:
count0['snippet'] += 1
print('Report for {0}'.format(title))
print('Number of code-like terms: {0}'.format(len(log_entries)))
print('Number of code-like terms that matched at least one elem: {0}'
.format(nonzero))
print('Number of code-like terms linked: {0}'.format(count['linked']))
print('Number of code-like terms difficult to link: {0}'.format(count['hard']))
print('Number of code-like terms from snippets: {0}'
.format(count['snippet']))
print('Number of code-like terms from snippets with 0: {0}'
.format(count0['snippet']))
print('Number of code-like terms custom filtered: {0}'
.format(count['custom']))
print('Original size: {0}:{1}:{2}'.format(m['original'], sqrt(s['original']
/ float(max(1, count['original']))), maxv['original']))
print('Original size with 0: {0}:{1}'.format(m0['original'],
sqrt(s0['original'] / float(max(1, count0['original'])))))
print('Final size: {0}:{1}:{2}'.format(m['finalsize'], sqrt(s['finalsize']
/ float(max(1, count['finalsize']))), maxv['finalsize']))
print('Unique Types: {0}:{1}:{2}'.format(m['unique'], sqrt(s['unique']
/ float(max(1, count['unique']))), maxv['unique']))
print('Filters:')
for filter in filters:
print('{0}: {1}'.format(filter, filters[filter]))
# original
# original no zero
# final no zero
# unique no zero
# snippet
# snippet no zero
# custom no zero
# filters: activated no zero
def record_stat_entry(count, m, s, maxv, key, val):
count[key] += 1
temp = m[key]
m[key] += (val - temp) / float(count[key])
s[key] += (val - temp) * (val - m[key])
if maxv is not None:
if val > maxv[key]:
maxv[key] = val
def process_log_files(files):
log_entries = []
entry = None
filtering = False
visited = set()
skip = False
for f in files:
if not os.path.exists(f):
continue
with codecs.open(f, 'r', 'utf-8') as finput:
for line in finput:
line = line.strip()
size = len(line)
if line.startswith('Type ') or line.startswith('Method ') or \
line.startswith('Field '):
filtering = False
if entry is not None and not skip:
entry.compute_unique_types()
log_entries.append(entry)
entry = LogEntry()
skip = False
elif line.startswith('Original Size:'):
entry.origin_size = int(line[15:].strip())
elif line.startswith('Final Size:'):
entry.final_size = int(line[12:].strip())
elif line.startswith('Snippet'):
entry.from_snippet = line.find('True') > -1
elif line.startswith('Custom Filtered'):
entry.custom_filtered = line.find('True') > -1
elif line.startswith('Ref pk:'):
ref = line[8:]
if ref in visited:
skip = True
else:
visited.add(ref)
elif line.startswith('Filtering'):
filtering = True
elif line.startswith('Element:'):
filtering = False
elif line.startswith('Original:'):
filtering = False
entry.temp_types.append(line[10:].strip())
elif filtering and size > 0:
index = line.find(':')
if index < 0:
continue
name = line[:index].strip()
index2 = line.rfind('-')
activated = line[index:index2].find('True') > -1
number = int(line[index2+1:].strip())
entry.filters[name] = (activated, number)
if entry is not None and not skip:
log_entries.append(entry)
entry = None
filtering = False
skip = False
return log_entries
def recommend_single_types(codebase, simple_filters, d):
single_types = set()
types = CodeElement.objects.\
filter(codebase=codebase).\
filter(kind__is_type=True).\
iterator()
for element in types:
simple_name = element.simple_name
tokens = tokenize(simple_name)
if len(tokens) == 1:
lower = simple_name.lower()
if lower not in simple_filters and d.check(lower):
single_types.add(simple_name)
return single_types
def recommend_acronyms(codebase, simple_filters):
acronyms = set()
types = CodeElement.objects.\
filter(codebase=codebase).\
filter(kind__is_type=True).\
iterator()
for element in types:
simple_name = element.simple_name
if simple_name.replace('_', '').isupper() and \
simple_name.lower() not in simple_filters:
acronyms.add(simple_name)
return acronyms
def recommend_single_fields(codebase, simple_filters):
single_fields = set()
fields = CodeElement.objects.\
filter(Q(kind__kind='field') |
Q(kind__kind='enumeration value') |
Q(kind__kind='annotation field')).iterator()
for element in fields:
simple_name = element.simple_name
if simple_name.replace('_','a').isupper() and \
simple_name.lower() not in simple_filters:
single_fields.add(simple_name)
return single_fields
def print_recommendations(single_types, acronyms, single_fields):
print('FILTER RECOMMENDATIONS')
print('\nSINGLE TYPE THAT LOOK LIKE WORDS')
for single_type in single_types:
print(single_type)
print('\nSINGLE TYPES THAT LOOK LIKE ACRONYMS')
for acronym in acronyms:
print(acronym)
print('\nFIELDS THAT LOOK LIKE ACRONYMS/WORDS')
for single_field in single_fields:
print(single_field)
def compute_f_ids(filtered_ids_path, filtered_ids_level):
if filtered_ids_level is not None:
if filtered_ids_level == 'g':
f_level = 'global'
else:
f_level = 'local'
else:
f_level = None
if filtered_ids_path is not None:
f_ids = set()
with codecs.open(filtered_ids_path, 'r', 'utf8') as f:
for line in f:
f_ids.add(int(line.strip()))
else:
f_ids = None
return (f_ids, f_level)
def compute_filters(codebase):
filters = CodeElementFilter.objects.filter(codebase=codebase).all()
simple_filters = defaultdict(list)
for cfilter in filters:
simple_name = clean_java_name(cfilter.fqn)[0].lower()
simple_filters[simple_name].append(cfilter)
fqn_filters = {clean_java_name(cfilter.fqn.lower())[1]: cfilter
for cfilter in filters}
return (simple_filters, fqn_filters)
def get_filters(codebase):
return get_value(PREFIX_CODEBASE_FILTERS,
get_codebase_key(codebase),
compute_filters,
[codebase])
def get_package_name(tree):
package_text = xtext(xpackage(tree)[0]).strip()
return package_text[len('Package '):]
def compute_code_words(codebase):
code_words = set()
d = enchant.Dict('en-US')
elements = CodeElement.objects.\
filter(codebase=codebase).\
filter(kind__is_type=True).\
iterator()
for element in elements:
simple_name = element.simple_name
tokens = tokenize(simple_name)
if len(tokens) > 1:
code_words.add(simple_name.lower())
else:
simple_name = simple_name.lower()
if not d.check(simple_name):
code_words.add(simple_name)
logger.debug('Computed {0} code words for codebase {1}'.format(
len(code_words), str(codebase)))
return code_words
def compute_project_code_words(codebases):
code_words = set()
for codebase in codebases:
code_words.update(
get_value(PREFIX_CODEBASE_CODE_WORDS,
get_codebase_key(codebase),
compute_code_words,
[codebase])
)
return code_words
def get_project_code_words(project):
codebases = CodeBase.objects.filter(project_release__project=project).all()
value = get_value(
PREFIX_PROJECT_CODE_WORDS,
project.pk,
compute_project_code_words,
[codebases]
)
return value
def get_default_kind_dict():
kinds = {}
kinds['unknown'] = CodeElementKind.objects.get(kind='unknown')
kinds['class'] = CodeElementKind.objects.get(kind='class')
kinds['annotation'] = CodeElementKind.objects.get(kind='annotation')
kinds['method'] = CodeElementKind.objects.get(kind='method')
kinds['field'] = CodeElementKind.objects.get(kind='field')
kinds['xml element'] = CodeElementKind.objects.get(kind='xml element')
kinds['xml attribute'] = CodeElementKind.objects.get(kind='xml attribute')
kinds['xml attribute value'] = \
CodeElementKind.objects.get(kind='xml attribute value')
kinds['xml file'] = CodeElementKind.objects.get(kind='xml file')
kinds['hbm file'] = CodeElementKind.objects.get(kind='hbm file')
kinds['ini file'] = CodeElementKind.objects.get(kind='ini file')
kinds['conf file'] = CodeElementKind.objects.get(kind='conf file')
kinds['properties file'] = \
CodeElementKind.objects.get(kind='properties file')
kinds['log file'] = CodeElementKind.objects.get(kind='log file')
kinds['jar file'] = CodeElementKind.objects.get(kind='jar file')
kinds['java file'] = CodeElementKind.objects.get(kind='java file')
kinds['python file'] = CodeElementKind.objects.get(kind='python file')
return kinds
def get_java_strategies():
strategies = [
FileStrategy(), XMLStrategy(), ClassMethodStrategy(),
MethodStrategy(), FieldStrategy(), AnnotationStrategy(),
OtherStrategy(), IgnoreStrategy([EMAIL_PATTERN_RE, URL_PATTERN_RE])
]
method_strategies = [ClassMethodStrategy(), MethodStrategy()]
class_strategies = [AnnotationStrategy(), OtherStrategy()]
kind_strategies = {
'method': method_strategies,
'class': class_strategies,
'unknown': strategies
}
return kind_strategies
def get_default_filters():
filters = {
JAVA_LANGUAGE: [SQLFilter(), BuilderFilter(), MacroFilter()],
XML_LANGUAGE: [],
OTHER_LANGUAGE: [],
}
return filters
def classify_code_snippet(text, filters):
code = None
try:
if is_xml_snippet(text)[0]:
language = XML_LANGUAGE
elif is_java_snippet(text, filters[JAVA_LANGUAGE])[0]:
language = JAVA_LANGUAGE
else:
language = OTHER_LANGUAGE
code = CodeSnippet(
language=language,
snippet_text=text,
)
code.save()
except Exception:
logger.exception('Error while classifying snippet.')
return code
def parse_text_code_words(text, code_words):
# Because there is a chance that the FQN will match...
priority = 1
matches = []
words = split_pos(text)
for (word, start, end) in words:
if word in code_words:
# Because at this stage, we force it to choose one only...
matches.append(create_match((start, end, 'class', priority)))
return matches
def process_children_matches(text, matches, children, index, single_refs,
kinds, kinds_hierarchies, save_index, find_context):
for i, child in enumerate(children):
content = text[child[0]:child[1]]
parent_reference = find_parent_reference(child[2], single_refs,
kinds_hierarchies)
child_reference = SingleCodeReference(
content=content,
kind_hint=kinds[child[2]],
original_kind_hint=kinds[child[2]],
child_index=i,
parent_reference=parent_reference)
if save_index:
child_reference.index = index
if find_context:
child_reference.sentence = find_sentence(text, child[0],
child[1])
child_reference.paragraph = find_paragraph(text, child[0],
child[1])
child_reference.save()
single_refs.append(child_reference)
def process_matches(text, matches, single_refs, kinds, kinds_hierarchies,
save_index, find_context, existing_refs):
filtered = set()
index = 0
avoided = False
for match in matches:
if is_valid_match(match, matches, filtered):
(parent, children) = match
content = text[parent[0]:parent[1]]
if parent[2] == IGNORE_KIND:
avoided = True
continue
# This is a list of refs to avoid
try:
index = existing_refs.index(content)
del(existing_refs[index])
continue
except ValueError:
# That's ok, we can proceed!
pass
main_reference = SingleCodeReference(
content=content,
original_kind_hint=kinds[parent[2]],
kind_hint=kinds[parent[2]])
#print('Main reference: {0}'.format(content))
if save_index:
main_reference.index = index
if find_context:
main_reference.sentence = find_sentence(text, parent[0],
parent[1])
main_reference.paragraph = find_paragraph(text, parent[0],
parent[1])
main_reference.save()
#print('Main reference pk: {0}'.format(main_reference.pk))
single_refs.append(main_reference)
# Process children
process_children_matches(text, matches, children, index,
single_refs, kinds, kinds_hierarchies, save_index,
find_context)
index += 1
else:
filtered.add(match)
return avoided
def parse_single_code_references(text, kind_hint, kind_strategies, kinds,
kinds_hierarchies=ALL_KINDS_HIERARCHIES, save_index=False,
strict=False, find_context=False, code_words=None, existing_refs=None):
single_refs = []
matches = []
kind_text = kind_hint.kind
if kind_text not in kind_strategies:
kind_text = 'unknown'
if existing_refs is None:
existing_refs = []
for strategy in kind_strategies[kind_text]:
matches.extend(strategy.match(text))
if code_words is not None:
matches.extend(parse_text_code_words(text, code_words))
# Sort to get correct indices
matches.sort(key=lambda match: match[0][0])
avoided = process_matches(text, matches, single_refs, kinds,
kinds_hierarchies, save_index, find_context, existing_refs)
if len(single_refs) == 0 and not avoided and not strict:
code = SingleCodeReference(content=text, kind_hint=kind_hint,
original_kind_hint=kind_hint)
code.save()
single_refs.append(code)
return single_refs
def get_default_p_classifiers(include_stop=True):
p_classifiers = []
p_classifiers.append((is_empty_lines, REPLY_LANGUAGE))
p_classifiers.append((is_reply_lines, REPLY_LANGUAGE))
p_classifiers.append((is_reply_header, REPLY_LANGUAGE))
if include_stop:
p_classifiers.append((is_rest_reply, STOP_LANGUAGE))
p_classifiers.append((
partial(is_java_lines, filters=get_default_filters()[JAVA_LANGUAGE]),
JAVA_LANGUAGE))
p_classifiers.append((is_exception_trace_lines, JAVA_EXCEPTION_TRACE))
p_classifiers.append((is_log_lines, LOG_LANGUAGE))
p_classifiers.append((is_xml_lines, XML_LANGUAGE))
return p_classifiers
def get_default_s_classifiers():
s_classifiers = {}
s_classifiers[JAVA_LANGUAGE] = can_merge_java
return s_classifiers
def restore_original_kind(path, kind_str):
kind = CodeElementKind.objects.get(kind=kind_str)
with codecs.open(path, 'r', 'utf8') as f:
for line in f:
new_line = line.strip()
if new_line.startswith('Ref pk:'):
pk = int(new_line[8:].strip())
ref = SingleCodeReference.objects.get(pk=pk)
ref.original_kind_hint = kind
ref.save()
|
#!/usr/bin/env python2
import unittest
from hypothesis import given
from hypothesis.strategies import text
from comp_ui import _PromptLen
class PromptTest(unittest.TestCase):
@given(text())
def testNeverPanics(self, s):
self.assertIs(_PromptLen(s) >= 0, True)
if __name__ == '__main__':
unittest.main()
|
from __future__ import annotations
import logging
import re
from abc import ABC, abstractmethod
from typing import Sequence
from home_connect_async import Appliance, Events
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
class EntityBase(ABC):
"""Base class with common methods for all the entities """
should_poll = False
_appliance: Appliance = None
def __init__(self, appliance:Appliance, key:str=None, conf:dict=None) -> None:
"""Initialize the sensor."""
self._appliance = appliance
self._key = key
self._conf = conf if conf else {}
self.entity_id = f'home_connect.{self.unique_id}'
@property
def haId(self) -> str:
""" The haID of the appliance """
return self._appliance.haId.lower().replace('-','_')
@property
def device_info(self):
"""Return information to link this entity with the correct device."""
return {
"identifiers": {(DOMAIN, self.haId)},
"name": self._appliance.name,
"manufacturer": self._appliance.brand,
"model": self._appliance.vib,
}
@property
def device_class(self) -> str:
""" Return the device class, if defined """
if self._conf:
return self._conf.get('class')
else:
return None
@property
def unique_id(self) -> str:
"""" The unique ID oif the entity """
return f"{self.haId}_{self._key.lower().replace('.','_')}"
@property
def name_ext(self) -> str|None:
""" Provide the suffix of the name, can be be overriden by sub-classes to provide a custom or translated display name """
return None
@property
def name(self) -> str:
"""" The name of the entity """
appliance_name = self._appliance.name if self._appliance.name else self._appliance.type
name = self.name_ext if self.name_ext else self.pretty_enum(self._key)
return f"{self._appliance.brand} {appliance_name} - {name}"
# This property is important to let HA know if this entity is online or not.
# If an entity is offline (return False), the UI will refelect this.
@property
def available(self) -> bool:
""" Avilability of the enity """
return self._appliance.connected
@property
def program_option_available(self) -> bool:
""" Helper to be used for program options controls """
return self._appliance.connected \
and self._appliance.selected_program \
and (self._key in self._appliance.selected_program.options) \
and self._appliance.available_programs \
and (self._appliance.selected_program.key in self._appliance.available_programs) \
and not self._appliance.active_program \
and (self._key in self._appliance.available_programs[self._appliance.selected_program.key].options) \
and (
"BSH.Common.Status.RemoteControlActive" not in self._appliance.status or
self._appliance.status["BSH.Common.Status.RemoteControlActive"]
)
async def async_added_to_hass(self):
"""Run when this Entity has been added to HA."""
events = [Events.CONNECTION_CHANGED, Events.DATA_CHANGED]
if self._key:
events.append(self._key)
self._appliance.register_callback(self.async_on_update, events)
async def async_will_remove_from_hass(self):
"""Entity being removed from hass."""
events = [Events.CONNECTION_CHANGED, Events.DATA_CHANGED]
if self._key:
events.append(self._key)
self._appliance.deregister_callback(self.async_on_update, events)
@abstractmethod
async def async_on_update(self, appliance:Appliance, key:str, value) -> None:
pass
def pretty_enum(self, val:str) -> str:
""" Extract display string from a Home COnnect Enum string """
name = val.split('.')[-1]
parts = re.findall('[A-Z0-9]+[^A-Z]*', name)
return' '.join(parts)
class EntityManager():
""" Helper class for managing entity registration
Dupliaction might happen because there is a race condition between the task that
loads data from the Home Connect service and the initialization of the platforms.
This class prevents that from happening
"""
def __init__(self, async_add_entities:AddEntitiesCallback):
self._existing_ids = set()
self._pending_entities:dict[str, Entity] = {}
self._entity_appliance_map = {}
self._async_add_entities = async_add_entities
def add(self, entity:Entity) -> None:
""" Add a new entiity unless it already esists """
if entity and (entity.unique_id not in self._existing_ids) and (entity.unique_id not in self._pending_entities):
self._pending_entities[entity.unique_id] = entity
def register(self) -> None:
""" register the pending entities with Home Assistant """
new_ids = set(self._pending_entities.keys())
new_entities = list(self._pending_entities.values())
for entity in new_entities:
if entity.haId not in self._entity_appliance_map:
self._entity_appliance_map[entity.haId] = set()
self._entity_appliance_map[entity.haId].add(entity.unique_id)
self._async_add_entities(new_entities)
self._existing_ids |= new_ids
self._pending_entities = {}
def remove_appliance(self, appliance:Appliance):
""" Remove an appliance and all its registered entities """
if appliance.haId in self._entity_appliance_map:
self._existing_ids -= self._entity_appliance_map[appliance.haId]
del self._entity_appliance_map[appliance.haId]
|
import FWCore.ParameterSet.Config as cms
dqmXMLFileGetter=cms.EDAnalyzer("DQMXMLFileEventSetupAnalyzer",
labelToGet = cms.string('GenericXML')
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
from CreatePSI import SerialiseEITSection
from Axon.Component import component
class CreateEventInformationTable(component):
def __init__(self):
super(CreateEventInformationTable,self).__init__()
self.serialiser = SerialiseEITSection()
def shutdown(self):
while self.dataReady("control"):
msg = self.recv("control")
self.send(msg,"signal")
if isinstance(msg, (shutdownMicroprocess, producerFinished)):
return True
return False
def main(self):
while not self.shutdown():
while self.dataReady("inbox"):
section = self.recv("inbox")
serialisedSection = self.serialiser.serialise(section)
self.send(serialisedSection,"outbox")
self.pause()
yield 1
if __name__ == "__main__":
from Kamaelia.Chassis.Pipeline import Pipeline
from Kamaelia.File.Reading import RateControlledFileReader
from Kamaelia.Device.DVB.Core import DVB_Demuxer
from Kamaelia.Util.Console import ConsoleEchoer
from Kamaelia.Device.DVB.Parse.ParseEventInformationTable import ParseEventInformationTable_Subset
from Kamaelia.Device.DVB.Parse.PrettifyTables import PrettifyEventInformationTable
from Kamaelia.Device.DVB.SoftDemux import DVB_SoftDemuxer
from Kamaelia.Device.DVB.Parse.ReassemblePSITables import ReassemblePSITables
from Kamaelia.Util.PureTransformer import PureTransformer
from Kamaelia.Chassis.Graphline import Graphline
from Kamaelia.Util.Comparator import Comparator
from Kamaelia.Util.Splitter import Plug, PlugSplitter
from Kamaelia.Util.PassThrough import PassThrough
from Kamaelia.File.Writing import SimpleFileWriter
TS_FILE = "/home/matteh/dvb/2008-05-16 11.27.13 MUX1_EIT_TOT_TDT.ts"
# def AddInVersion():
# def transf(x):
# x["version"] = 0
# return x
# return PureTransformer(transf)
print "run a diff over the two output files to compare the results"
splitter=PlugSplitter()
Pipeline(
RateControlledFileReader(TS_FILE, readmode="bytes", rate=1000*1000, chunksize=188),
DVB_SoftDemuxer( {0x12:["outbox"]} ),
ReassemblePSITables(),
ParseEventInformationTable_Subset( \
actual_presentFollowing = True,
other_presentFollowing = True,
actual_schedule = True,
other_schedule = True,
),
splitter
).activate()
Plug(splitter, Pipeline(
PrettifyEventInformationTable(),
SimpleFileWriter("original_eit_parsed.text"),
)).activate()
Plug(splitter, Pipeline(
CreateEventInformationTable(),
ParseEventInformationTable_Subset( \
actual_presentFollowing = True,
other_presentFollowing = True,
actual_schedule = True,
other_schedule = True,
),
PrettifyEventInformationTable(),
SimpleFileWriter("regenerated_eit_parsed.text"),
)).run()
|
#-*-coding: utf-8-*-
#todo p.246 ~ p.249
#todo code 6-1 ~ code 6-4
#todo 6.1.1 단어와 문자의 원-핫 인코딩
# 단어 수준의 원-핫 인코딩하기
import numpy as np
samples = ['The cat sat on the mat.', 'The dog ate my homework.']
token_index = {}
for sample in samples:
for word in sample.split():
if word not in token_index:
token_index[word] = len(token_index) +1
max_length = 10
results = np.zeros(shape=(len(samples),
max_length,
max(token_index.values()) + 1))
for i, sample in enumerate(samples):
for j, word in list(enumerate(sample.split()))[:max_length]:
index = token_index.get(word)
results[i, j, index] = 1.
print(results.shape)
# 문자 수준 원-핫 인코딩하기
import string
samples = ['The cat sat on the mat.', 'The dog ate my homework.']
characters = string.printable
token_index = dict(zip(characters, range(1, len(characters) + 1)))
max_length = 50
results = np.zeros((len(samples), max_length, max(token_index.values()) + 1))
for i, sample in enumerate(samples):
for j, character in enumerate(sample):
index = token_index.get(character)
results[i, j, index] = 1.
# 케라스를 사용한 단어 수준의 원-핫 인코딩하기
from keras.preprocessing.text import Tokenizer
samples = ['The cat sat on the mat.', 'The dog ate my homework.']
tokenizer = Tokenizer(num_words=1000)
tokenizer.fit_on_texts(samples) # 단어 인덱스를 구축
sequences = tokenizer.texts_to_sequences(samples) # 문자열을 정수 인덱스의 리스트로 변환
one_hot_results = tokenizer.texts_to_matrix(samples, mode='binary') # 직접 원-핫 이진 벡터 표현을 얻음
word_index = tokenizer.word_index
print('%s개의 고유한 토큰을 찾았습니다.' % len(word_index))
# 해싱 기법을 사용한 단어 수준의 원-핫 인코딩하기
import numpy as np
samples = ['The cat sat on the mat.', 'The dog ate my homework.']
dimensionality = 1000
max_length = 10 # 문장당 최대 단어 개수
results = np.zeros((len(samples), max_length, dimensionality))
for i, sample in enumerate(samples):
for j, word in list(enumerate(sample.split()))[:max_length]:
index = abs(hash(word)) % dimensionality # hash(): hash 함수를 커쳐 hash value 출력 (음수도 가능)
results[i, j, index] = 1.
|
import sys
import json
from facebook_scraper import get_posts
post = next(get_posts(post_urls=[sys.argv[1]], cookies="src/services/cookies.txt"))
post.pop('time')
print(json.dumps(post))
|
import cv2
import numpy as np
import socket
import SocketServer
# KERAS stuff
from keras.layers import Dense, Activation
from keras.models import Sequential
import keras.models
SIGMA = 0.25
class NeuralNetwork(object):
def __init__(self):
self.model = keras.models.load_model('nn_h5/nn.h5')
def preprocess(self, frame):
image_array = frame.reshape(1, 38400).astype(np.float32)
image_array = image_array / 255.
return image_array
def predict(self, image):
image_array = self.preprocess(image)
y_hat = self.model.predict(image_array)
i_max = np.argmax(y_hat)
y_hat_final = np.zeros((1,4))
np.put(y_hat_final, i_max, 1)
return y_hat_final[0]
class RCDriver(object):
def steer(self, prediction):
# FORWARD
if np.all(prediction == [ 0., 0., 1.0, 0.]):
# car.forward(100)
# car.pause(500)
print("Forward")
# FORWARD-LEFT
elif np.all(prediction == [ 1., 0., 0.,0.]):
# car.left(300)
# car.forward_left(200)
# car.left(700)
# car.pause(500)
print("Left")
# FORWARD-RIGHT
elif np.all(prediction == [ 0., 1., 0.,0.]):
# car.right(300)
# car.forward_right(200)
# car.right(700)
# car.pause(500)
print("Right")
class VideoStreamHandler(object):
model = NeuralNetwork()
driver = RCDriver()
def __init__(self):
self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server_socket.bind(('172.14.1.126', 8000)) # The IP address of your computer (Paul's MacBook Air). This script should run before the one on the Pi.
print 'Listening...'
self.server_socket.listen(0)
# Accept a single connection ('rb' is 'read binary')
self.connection = self.server_socket.accept()[0].makefile('rb')
# Establish a condition that RaspPi should be sending images.
self.send_inst = True
# Start handling video feed, predict, and drive
self.handle()
def auto_canny(self, blurred):
# Compute the median of the single channel pixel intensities
global SIGMA
v = np.median(blurred)
# Apply automatic Canny edge detection using the computed median of the image
lower = int(max(0, (1.0 - SIGMA) * v))
upper = int(min(255, (1.0 + SIGMA) * v))
edged = cv2.Canny(blurred, lower, upper)
return edged
def handle(self):
model = NeuralNetwork()
driver = RCDriver()
# Stream video frames one by one.
try:
stream_bytes = ' '
while self.send_inst:
stream_bytes += self.connection.read(1024)
first = stream_bytes.find('\xff\xd8') # ? What is this string and where did it come from?
last = stream_bytes.find('\xff\xd9')
if first != -1 and last != -1:
jpg = stream_bytes[first:last + 2]
stream_bytes = stream_bytes[last + 2:]
gray = cv2.imdecode(np.fromstring(jpg, dtype=np.uint8), cv2.IMREAD_GRAYSCALE)
# image = cv2.imdecode(np.fromstring(jpg, dtype=np.uint8), cv2.IMREAD_UNCHANGED)
# Lower half of the grayscale image
roi = gray[120:240, :]
# Apply GuassianBlur (reduces noise)
blurred = cv2.GaussianBlur(roi, (3, 3), 0)
# Apply Canny filter
auto = self.auto_canny(blurred)
# Show streaming images
cv2.imshow('What the model sees', auto)
# cv2.imshow('Original', image)
# Neural network model makes prediciton
prediction = model.predict(auto)
# Send prediction to driver to tell it how to steer
driver.steer(prediction)
if cv2.waitKey(1) & 0xFF == ord('q'):
self.send_inst = False
break
cv2.destroyAllWindows()
finally:
self.connection.close()
self.server_socket.close
print 'Connection closed'
print 'Socket closed'
if __name__ == '__main__':
print '\n \"Hold on to your butts.\" \n'
VideoStreamHandler()
|
import json
import time
from pprint import pprint
import keyboard
# with open('bindings.json') as f:
# data = json.load(f)
#
# action = data['spaceship_general']['v_exit']
# pprint(action)
def run():
self_destruct()
eject()
def self_destruct():
for i in range(3):
keyboard.press('alt+backspace')
time.sleep(0.1)
keyboard.release('alt+backspace')
def eject():
keyboard.press_and_release('alt+f')
run()
|
from random import randint
import time
for i in range(1, 60):
print('')
s = ''
for i in range(1, 1000):
count = randint(1, 500)
while count > 0:
s += ' '
count -= 1
if i % 10 == 0:
print(s + 'Happy New Year')
elif i % 91 == 0:
print(s + 'Merry Chritmas')
else:
rand = randint(0, 2)
if rand >= 1:
print(s + "*")
else:
print(s + "X")
s = ''
time.sleep(0.15)
|
# -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
from dsGameSolver.gameSolver import dsSolve
alpha = 0.25 ## productivity parameter
beta = 0.95 ## discount factor
gamma = 2 ## relative risk aversion
delta = 0 ## capital depreciation
## utility function
def u(c):
return (c**(1-gamma)) / (1-gamma)
## production function
def f(k):
return ((1-beta)/(alpha*beta)) * k**alpha
## state space K[a] (capital stock)
k_min, k_max, k_step = 0.4, 1.6, 0.1
num_k = int(1 + (k_max-k_min) / k_step)
K = np.linspace(k_min, k_max, num_k)
## action space C[s,a] (consumption)
## state dependent to let c_{t}(k_{t}) -> k_{t+1}
## with restriction c_{t} >= 0
C = np.nan * np.ones((num_k,num_k))
for s in range(num_k):
C[s] = (1-delta)*K[s] + f(K[s]) - K
C[C < 0] = np.nan
## numbers of actions in each state
nums_a = np.zeros(num_k, dtype=np.int32)
for s in range(num_k):
nums_a[s] = len(C[s][~np.isnan(C[s])])
payoffMatrices = []
for s in range(num_k):
payoffMatrix = np.nan * np.ones((1, nums_a[s]))
for a in range(nums_a[s]):
payoffMatrix[0,a] = u(C[s,a])
payoffMatrices.append( payoffMatrix )
transitionMatrices = []
for s in range(num_k):
transitionMatrix = np.zeros((nums_a[s], num_k))
for a in range(nums_a[s]):
for s_ in range(num_k):
if a == s_:
transitionMatrix[a,s_] = 1
transitionMatrices.append( transitionMatrix )
equilibrium = dsSolve(
payoffMatrices, transitionMatrices, beta,
showProgress=True, plotPath=True)
# Dynamic stochastic game with 13 states, 1 players and 109 actions.
# Initial value for homotopy continuation successfully found.
# ==================================================
# Start homotopy continuation
# Step 7006: t = 29629.89, s = 121765.69, ds = 1000.00
# Final Result: max|y-y_|/ds = 0.0E+00, max|H| = 4.9E-09
# Time elapsed = 0:01:37
# End homotopy continuation
# ==================================================
policies = np.nan * np.ones(num_k)
values = np.nan * np.ones(num_k)
for s in range(num_k):
## get optimal actions from pure-strategy equilibrium
a = np.where((np.round(equilibrium['strategies'][s,0]) == 1))[0]
policies[s] = C[s, a]
values[s] = equilibrium['stateValues'][s,0]
fig = plt.figure(figsize=(12,4))
ax1 = fig.add_subplot(121)
ax1.set_title('Policy Function')
ax1.set_xlabel(r'capital stock $k_{t}$')
ax1.set_ylabel(r'consumption $c_{t}$')
ax1.plot(K, policies)
ax1.grid()
ax2 = fig.add_subplot(122)
ax2.set_title('Value Function')
ax2.set_xlabel(r'capital stock $k_{t}$')
ax2.set_ylabel(r'present value of utility $V(k_{t})$')
ax2.plot(K, values)
ax2.grid()
plt.show()
"""
## get plot of policy and value functions
fig.savefig('OptimalGrowthModel_functions.pdf', bbox_inches='tight')
## get plot of path
from dsGameSolver.gameClass import dsGame
game = dsGame(payoffMatrices, transitionMatrices, beta)
game.init()
game.solve(trackingMethod='normal', showProgress=True)
fig = game.plot()
fig.savefig('OptimalGrowthModel_path.pdf', bbox_inches='tight')
"""
## ============================================================================
## end of file
## ============================================================================
|
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pytorch_lightning.metrics.functional.classification import (
accuracy,
auc,
auroc,
average_precision,
confusion_matrix,
dice_score,
f1_score,
fbeta_score,
multiclass_precision_recall_curve,
multiclass_roc,
precision,
precision_recall,
precision_recall_curve,
recall,
roc,
stat_scores,
stat_scores_multiple_classes,
to_categorical,
to_onehot,
iou,
)
from pytorch_lightning.metrics.functional.nlp import bleu_score
from pytorch_lightning.metrics.functional.regression import (
mae,
mse,
psnr,
rmse,
rmsle,
ssim
)
from pytorch_lightning.metrics.functional.self_supervised import (
embedding_similarity
)
|
import cv2
import numpy as np
import Queue
class Digit(object):
'''
Extracts the digit from a cell.
Implements the classic `Largest connected component` algorithm.
'''
def __init__(self, image):
self.graph = image.copy()
self.W, self.H = self.graph.shape
self.visited = [[False for _ in xrange(
self.H)] for _ in xrange(self.W)]
self.digit = [[None for _ in xrange(self.H)] for _ in xrange(self.W)]
self.buildDigit()
def buildDigit(self):
componentId = 0
A, C = self.H / 4, 3 * self.H / 4 + 1
B, D = self.W / 4, 3 * self.W / 4 + 1
for i in xrange(A, C):
for j in xrange(B, D):
if not self.visited[i][j]:
self.bfs(i, j, componentId)
componentId += 1
componentSizes = [0 for _ in xrange(componentId)]
for row in self.digit:
for cell in row:
if cell is not None:
componentSizes[cell] += 1
largest = componentSizes.index(max(componentSizes))
for i in xrange(self.H):
for j in xrange(self.W):
self.digit[i][j] = 255 if self.digit[i][j] == largest else 0
self.digit = np.asarray(self.digit, dtype=np.uint8)
def bfs(self, i, j, num):
q = Queue.Queue()
q.put((i, j))
while not q.empty():
i, j = q.get()
inValidRow = i not in xrange(0, self.H)
inValidCol = j not in xrange(0, self.W)
invalidCell = inValidRow or inValidCol
invalidPixel = invalidCell or self.graph[i][j] != 255
if invalidPixel or self.visited[i][j]:
continue
self.digit[i][j] = num
self.visited[i][j] = True
for di in [-1, 0, 1]:
for dj in [-1, 0, 1]:
q.put((i + di, j + dj))
|
#!/usr/bin/python
'''
NAMEGEN.PY V1.0
Got it working, generating the names from one or many of several databases,
being Goblin, Western (m+f), Elf (m+f), Orc, and Greek Gods. Also is cross-
platform (only checks for which backslash it should use), which should save time
between two files. Currently the names are generated and scored through bigrams,
but I would like to have a better testing so that less nonsensical names (like
RONONONOR) aren't generated. Also added the number_names parameter to be edited
by the user.
NAMEGEN.PY V1.01
Added japanese names, reshaped gui to be much neater.
NAMEGEN.PY V1.1
Adding stat generation and bonuses for your selected race, which you can
(obviously) now select.
NAMEGEN.PY V1.2
Reshaped the GUI again, still not entirely happy, but it will do. Added gender,
height, weight, age, and language generators, which can all be locked and
entered by the user so that they are not generated. All of those metrics are
affectedby the race of the character, too, and differ between male and female
(height and weight, anyway). Height and weight are generated from a gaussian
(normal) distribution, and age is generated by a uniform distribution.
NAMEGEN.PY V1.2.1
Added modifiers for the stats, as well as AC.
'''
import random, os, sys
import platform
import tkFont
import itertools, numpy as np
from Tkinter import *
import ttk
alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
vowels = {"a", "e", "i", "o", "u", "A", "E", "I", "O", "U"}
system = platform.system()
if system == 'Windows':
folderslash = '\\'
else:
folderslash = '/'
DEFAULT_NUMBER_NAMES = 1
BASE_ARMOUR = 10
folders = ['Western_female','Western_male','elf_female','elf_male','goblin','orc','dwarf','greek','japanese_male','japanese_female']
classes = ['human','elf','goblin','half-orc','dwarf','halfling','dragonborn','half-giant','tiefling']
stat_names = ['Str', 'Dex', 'Con', 'Int', 'Wis', 'Char']
info_labels =['Name','Age','Height','Weight','Languages','Armor Class']
default_stats = [15,14,13,12,10,8]
stat_bonuses = [[1, 1, 1, 1, 1, 1], #human
[0, 2, 0, 0, 0, 0], #elf
[0, 2, 0, 0, 0, 0], #goblin
[2, 1, 0, 0, 0, 0], #half-orc
[0, 0, 2, 0, 0, 0], #dwarf
[0, 2, 0, 0, 0, 0], #halfing
[2, 0, 0, 0, 0, 1],#dragonborn
[2, 0, 0, 0, 0, 0], #half-giant
[0, 0, 0, 1, 0, 2] #tiefling
]
stat_preferences = [[1, 1, 1, 1, 1, 1], #human
[5, 1, 3, 4, 2, 5], #elf
[3, 1, 2, 3, 3, 3], #goblin
[1, 2, 3, 4, 4, 4], #half-orc
[2, 3, 1, 4, 6, 5], #dwarf
[6, 1, 5, 4, 3, 2], #halfing
[1, 5, 4, 6, 3, 2],#dragonborn
[1, 4, 2, 6, 5, 3], #half-giant
[6, 3, 4, 2, 5, 1] #tiefling
]
age = [[18, 60], #human
[100,700], #elf
[3,15], #goblin
[14,55] , #half-orc
[50,300], #dwarf
[20, 120], #halfing
[15,60],#dragonborn
[20, 70], #half-giant
[18, 70] #tiefling
]
height = [[5, 6.25], #human
[5.2, 6.5], #elf
[3, 4], #goblin
[5.2,6.5], #half-orc
[4,5], #dwarf
[2.5, 3.5], #halfing
[6, 7],#dragonborn
[7, 10], #half-giant
[5, 6] #tiefling
]
weight = [[100, 220], #human
[80, 150], #elf
[30, 55], #goblin
[130, 250], #half-orc
[100,220], #dwarf
[30,60], #halfing
[150,300],#dragonborn
[250,500], #half-giant
[85,180] #tiefling
]
speed = [30, #human
30, #elf
30, #goblin
30, #half-orc
25, #dwarf
25, #halfing
30,#dragonborn
30, #half-giant
30 #tiefling
]
languages = [['Common'], #human
['Common','Elvish'], #elf
['Common','Goblin'], #goblin
['Common','Orc'], #half-orc
['Common','Dwarvish'], #dwarf
['Common','Halfling'], #halfing
['Common','Draconic'],#dragonborn
['Common','Giant'], #half-giant
['Common','Infernal'] #tiefling
]
def resource_path(relative_path):
""" Get absolute path to resource, works for dev and for PyInstaller """
try:
# PyInstaller creates a temp folder and stores path in _MEIPASS
base_path = sys._MEIPASS
except Exception:
base_path = os.path.abspath("")
return os.path.join(base_path, relative_path)
def find_ngrams(input_list, n):
concat_list = ''.join(input_list)
return [concat_list[i:i+n] for i in range(len(concat_list)-n+1)]
class Application(Frame):
""" Gui application for this stuff"""
def __init__(self,master):
"""Initialise the frame"""
Frame.__init__(self,master)
self.grid()
self.create_widgets()
self.loadfile = ''
self.name = dict((e1,[]) for e1 in alphabet)
self.race_bonuses = dict((classes[i],stat_bonuses[i]) for i in range(len(classes)))
self.race_preference = dict((classes[i],stat_preferences[i]) for i in range(len(classes)))
self.race_age =dict((classes[i],age[i]) for i in range(len(classes)))
self.race_height =dict((classes[i],height[i]) for i in range(len(classes)))
self.race_weight =dict((classes[i],weight[i]) for i in range(len(classes)))
self.race_speed =dict((classes[i],speed[i]) for i in range(len(classes)))
self.race_languages =dict((classes[i],languages[i]) for i in range(len(classes)))
self.player_info = [0] * len(info_labels)
#next is the list of functions: going in order of name(nothing), age, height, weight, languages, armour class
self.info_function_list = ['',
lambda scale:random.uniform(*self.race_age[self.race_var.get()]),
lambda scale:random.gauss(np.mean(self.race_height[self.race_var.get()]),float(np.diff(self.race_height[self.race_var.get()]))/4)*scale,
lambda scale:random.gauss(scale*np.mean(self.race_weight[self.race_var.get()])+(self.player_info[2]-np.mean(self.race_height[self.race_var.get()]))*30,np.diff (self.race_weight[self.race_var.get()])/4)*scale,
lambda scale:self.race_languages[self.race_var.get()],
lambda scale:BASE_ARMOUR + self.player_mod[1]
]
def create_widgets(self):
"""Create buttons that do stuff"""
#create first button
self.instruction = Label(self, text = 'Step 1: Choose name ethnicity:')
self.instruction.grid(row = 0, column = 0, columnspan = 3, sticky = W)
f = tkFont.Font(self.instruction, self.instruction.cget("font"))
f.configure(underline = True,weight = 'bold')
self.instruction.configure(font=f)
self.instruction1 = Label(self, text = 'Preset styles:')
self.instruction1.grid(row = 1, column = 0, sticky = W)
self.cb_var = []
for i,k in enumerate(folders):
var = StringVar()
self.cb_var.append(var)
l = Checkbutton(self,text=k,variable=self.cb_var[i],onvalue=resource_path('namedb'+folderslash+k+'_names.txt'),offvalue='')
#print(int(i%np.floor(len(folders)/4)+2),int(np.floor(i/np.floor(len(folders)/4))))
currentrow = int(i%np.floor(len(folders)/4)+2)
currentcol = int(np.floor(i/np.floor(len(folders)/4)))
l.grid(row = currentrow,column = currentcol,sticky = W)
currentrow+=1
self.instruction2 = Label(self, text = ' OR ')
self.instruction2.grid(row = currentrow, column = 0,columnspan=2, sticky = W)
currentrow+=1
self.instruction3 = Label(self, text = 'Your own file location:')
self.instruction3.grid(row = currentrow, column = 0, sticky = W)
currentrow+=1
self.flocation = Entry(self)
self.flocation.grid(row = currentrow, column = 0,columnspan=2, sticky = W)
self.load_button = Button(self,text = 'Step 2: Load Data',command = self.loadngrams)
self.load_button.grid(row = currentrow, column = 3,columnspan=3, sticky = W)
currentrow+=1
self.load_button.configure(font=f)
self.race_var = StringVar()
self.instruction4 = Label(self, text = 'Step 3: Select race and gender:')
self.instruction4.grid(row = currentrow, column = 0,columnspan=2,sticky = W)
currentrow+=1
self.race = ttk.Combobox(self,values=classes, textvariable = self.race_var)
self.race.current(0)
self.race.grid(row = currentrow, column = 0,columnspan=2,sticky=W)
currentrow+=1
self.instruction4.configure(font=f)
self.gender_var = StringVar()
self.gender = ttk.Combobox(self,values=['Male','Female'], textvariable = self.gender_var)
self.gender.current(0)
self.gender.grid(row = currentrow, column = 0,columnspan=2,sticky=W)
currentrow+=1
#self.instruction4 = Label(self, text = 'Number required:')
#self.instruction4.grid(row = currentrow+5, column = 0, sticky = W)
#self.number = Entry(self,width=10)
self.submit_button = Button(self,text = 'Step 4: Generate!',command = self.getnames)
self.submit_button.grid(row = currentrow, column = 1,columnspan = 2, sticky = W)
self.submit_button.configure(font=f)
self.lock = Label(self, text = 'Lock')
self.lock.grid(row=currentrow,column=3,sticky=W)
currentrow+=1
self.char_info = []
self.char_labels = []
self.char_info_lock = []
currentrow = 12
for i,k in enumerate(info_labels):
self.char_info.append(Entry(self))
self.char_info[i].grid(row = currentrow, column = 1,columnspan=2, sticky = E)
self.char_labels.append(Label(self, text = k+': '))
self.char_labels[i].grid(row = currentrow, column = 0,sticky = E)
var = IntVar()
self.char_info_lock.append(var)
l = Checkbutton(self,variable=self.char_info_lock[i],onvalue=1,offvalue=0)
l.grid(row = currentrow, column = 3,sticky = W)
currentrow+=1
self.stats = []
self.stat_labels = []
for i,k in enumerate(stat_names):
self.stat_labels.append(Label(self, text = k+': '))
self.stat_labels[i].grid(row = currentrow, column = 0, sticky = E)
self.stats.append(Entry(self))
self.stats[i].grid(row = currentrow, column = 1,columnspan=2, sticky = E)
currentrow+=1
def loadngrams(self):
if self.flocation.get()!="":
self.style = self.flocation.get()
text_file = open(self.flocation.get(), "r")
lines = text_file.readlines()
self.lines = [i.upper() for i in lines]
#print(self.lines)
text_file.close()
else:
self.lines = []
for i in self.cb_var:
if i.get():
text_file = open(i.get(),'r')
lines = text_file.readlines()
self.lines.append([i.upper() for i in lines])
text_file.close()
self.lines = list(itertools.chain.from_iterable(self.lines))
self.loadbigrams()
#self.text.delete(0.0,END)
message = 'Bigrams loaded!' + self.flocation.get() + '\n'
#self.text.insert(0.0,message)
self.loadtrigrams()
message = 'Trigrams loaded!' + self.flocation.get() + '\n'
#self.text.insert(0.0,message)
def loadbigrams(self):
bg = find_ngrams(self.lines,2)
self.bg_new = [i.upper() for i in bg if i[0]!='\n']
[self.name[i[0]].append(i) for i in self.bg_new]
self.bgl = [(g[0], (float(len(list(g[1])))/len(self.bg_new))) for g in itertools.groupby(sorted(self.bg_new))]
self.bscores = dict(self.bgl)
min_bscore = min(self.bscores.values())
for i in alphabet:
for j in alphabet:
key = i+j
if key not in self.bscores.keys():
self.bscores[key] = min_bscore
def loadtrigrams(self):
tg = find_ngrams(self.lines,3)
self.tg_new = [i.upper() for i in tg if i[0]!='\n']
[self.name[i[0]].append(i) for i in self.bg_new]
self.tgl = [(g[0], (float(len(list(g[1])))/len(self.tg_new))) for g in itertools.groupby(sorted(self.tg_new))]
self.tscores = dict(self.tgl)
min_tscore = min(self.tscores.values())
for i in alphabet:
for j in alphabet:
for k in alphabet:
key = i+j+k
if key not in self.tscores.keys():
self.tscores[key] = min_tscore
def getnames(self):
self.thresh2 = np.mean([np.mean([self.bscores[new_name[i:i+2]] for i in range(len(new_name)-1)]) for new_name in self.lines])
self.thresh3 = np.mean([np.mean([self.tscores[new_name[i:i+3]] for i in range(len(new_name)-2)]) for new_name in self.lines])
#print(self.thresh)
#if self.number.get():
# number_names = int(self.number.get())
# else:
number_names = DEFAULT_NUMBER_NAMES
#print(number_names)
self.resultant_names = []
self.scores = []
#self.text.delete(0.0,END)
while len(self.resultant_names) < number_names:
new_name = self.bg_new[random.randint(0,len(self.bg_new)-1)]
while new_name[1] == '\n':
new_name = self.bg_new[random.randint(0,len(self.bg_new)-1)]
end_name = False
while 1:
if random.randint(3,10) < len(new_name)+1:
break
hi = self.name[new_name[-1]][random.randint(0,len(self.name[new_name[-1]])-1)]
while hi[1] == '\n':
hi = self.name[new_name[-1]][random.randint(0,len(self.name[new_name[-1]])-1)]
new_name = new_name + hi[1]
#print(new_name)
score3 = np.mean([self.tscores[new_name[i:i+3]] for i in range(len(new_name)-2)])
score2 = np.mean([self.bscores[new_name[i:i+2]] for i in range(len(new_name)-1)])
#print(new_name)
if score2 > self.thresh2/1.4:
self.resultant_names.append(new_name)
self.scores.append([score2, score3])
self.scores, self.resultant_names = (list(t) for t in zip(*sorted(zip(self.scores, self.resultant_names))))
for n in self.resultant_names:
self.getinfo()
#THIS IS WHERE THE PRINTING HAPPENS!!
self.player_info[0] = n
#print the player information first
print_str = ["%s","%i","%ift %i","%ilbs","%s","%i"]
for i,k in enumerate(info_labels):
if self.char_info_lock[i].get() == 0:
self.char_info[i].delete(0,END)
if i==2: #height
self.char_info[i].insert(0,print_str[i]%(self.player_info[i],(self.player_info[i]%1)*12))
else:
self.char_info[i].insert(0,print_str[i]%(self.player_info[i]))
#then go through the player stats
for i,k in enumerate(self.player_stats):
self.stats[i].delete(0,END)
self.stats[i].insert(0,"%i\t(%i)"%(self.player_stats[i],self.player_mod[i]))
def getinfo(self):
player_stats = [sum(sorted(np.random.randint(6, size=4)+1)[1:]) for i in range(6)]
if sum(player_stats) < sum(default_stats) - 1:
player_stats = default_stats
#print(player_stats)
player_pref = self.race_preference[self.race_var.get()][:]
for j in range(1,len(stat_preferences[0])):
inds = [i for i,val in enumerate(self.race_preference[self.race_var.get()]) if val==j]
if len(inds) > 1:
matching_pref = range(len(inds))
random.shuffle(matching_pref)
for i,k, in enumerate(inds):
player_pref[k] = player_pref[k] + matching_pref[i]
#have to sort them according to the race, now
player_stats = [sorted(player_stats,reverse=True)[i-1] for i in player_pref]
self.player_stats = [sum(x) for x in zip(player_stats, self.race_bonuses[self.race_var.get()])]
self.player_mod = [np.floor(i/2)-5 for i in self.player_stats]
self.player_gender = self.gender_var.get()
if self.player_gender == 'Male':
scale = 1
else:
scale = 0.95
for i in range(1,len(info_labels)):
if self.char_info_lock[i].get() == 0:
self.player_info[i] = self.info_function_list[i](scale)
else:
self.player_info[i] = self.char_info[i].get()
#print(self.player_info)
#print(self.player_stats)
root = Tk()
root.title('NPC generator')
root.geometry("500x510")
app = Application(root)
root.mainloop()
|
test = { 'name': 'q3a',
'points': 3,
'suites': [ { 'cases': [ { 'code': '>>> '
'print(np.mean(train_scores,axis=1).round(3))\n'
'[0.954 0.958 0.959 0.962 0.963 '
'0.967 0.97 0.973 0.979 '
'0.981]\n',
'hidden': False,
'locked': False},
{ 'code': '>>> '
'print(np.mean(val_scores,axis=1).round(3))\n'
'[0.947 0.949 0.949 0.952 0.952 '
'0.949 0.954 0.958 0.954 '
'0.954]\n',
'hidden': False,
'locked': False}],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'}]}
|
import os
import time
import socket
import struct
import hashlib
from pathlib import Path
from enum import Enum
import numpy as np
class OP(Enum):
RRQ = 1
WRQ = 2
DAT = 3
ACK = 4
ERR = 5
class TFTPClient:
def __init__(self, remote, basedir):
self.remote = remote
self.basedir = Path(basedir)
def createRequest(self, op, fn, mode=b'octet'):
return struct.pack('!H', op.value) + b'%b\x00%b\x00' % (fn, mode)
def createRequestBadOp(self, op, fn, mode=b'octet'):
return struct.pack('!H', op) + b'%b\x00%b\x00' % (fn, mode)
def createACK(self, bn):
return struct.pack('!HH', OP.ACK.value, bn)
def createDATBuf(self, buf, bn):
return struct.pack('!HH', OP.DAT.value, bn) + buf
def parsePacket(self, buf):
rv = {'op': OP(struct.unpack('!H', buf[0:2])[0])}
if rv['op'] == OP.ACK:
rv['bn'] = struct.unpack('!H', buf[2:4])[0]
if rv['op'] == OP.DAT:
rv['bn'] = struct.unpack('!H', buf[2:4])[0]
rv['data'] = buf[4:]
if rv['op'] == OP.ERR:
rv['code'] = struct.unpack('!H', buf[2:4])[0]
rv['msg'] = str(buf[4:-1], 'utf-8')
return rv
def fileBufEq(self, fn, buf):
fc = open(self.basedir / os.fsdecode(fn), 'rb').read()
fc_h = hashlib.sha256()
fc_h.update(fc)
buf_h = hashlib.sha256()
buf_h.update(buf)
return fc_h.digest() == buf_h.digest()
def newSocket(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.settimeout(10)
return sock
def checkACK(self, op, e):
if op != OP.ACK:
raise ValueError(f'Opcode should be ACK is {op}.')
if e:
raise ValueError(f'Invalid block number.')
def getFile(self, fn, mode=b'octet'):
buf = b''
with self.newSocket() as sock:
req = self.createRequest(OP.RRQ, fn, mode)
sock.sendto(req, self.remote)
ebn = 1
while True:
resp, ca = sock.recvfrom(1024)
pkt = self.parsePacket(resp)
if pkt['op'] != OP.DAT:
raise ValueError(f'Opcode should be DAT, is {pkt["op"]}.')
if pkt['bn'] != ebn:
raise ValueError(f'Block num should be {ebn}, is {pkt["bn"]}.')
buf += pkt['data']
req = self.createACK(pkt['bn'])
sock.sendto(req, ca)
if len(pkt['data']) < 512:
break
ebn += 1
if not self.fileBufEq(fn, buf):
raise ValueError('File and buffer are not the same.')
return True
def getMultiBlockFileFailAck(self, fn, n, mode=b'octet'):
buf = b''
with self.newSocket() as sock:
req = self.createRequest(OP.RRQ, fn, mode)
sock.sendto(req, self.remote)
ebn = 1
while True:
for i in range(n + 1):
resp, ca = sock.recvfrom(1024)
pkt = self.parsePacket(resp)
if pkt['op'] != OP.DAT:
raise ValueError(f'Opcode should be DAT is {pkt["op"]}.')
if pkt['bn'] != ebn:
raise ValueError(f'Block num should be {ebn} is {pkt["bn"]}.')
if i != n:
continue
buf += pkt['data']
req = self.createACK(pkt['bn'])
sock.sendto(req, ca)
if len(pkt['data']) < 512:
break
ebn += 1
if not self.fileBufEq(fn, buf):
raise ValueError('Files are not the same.')
return True
def getFileNotExists(self, fn, mode=b'octet'):
with self.newSocket() as sock:
req = self.createRequest(OP.RRQ, fn, mode)
sock.sendto(req, self.remote)
resp, ca = sock.recvfrom(1024)
pkt = self.parsePacket(resp)
if pkt['op'] != OP.ERR:
raise ValueError(f'Expected OP.ERR got {pkt["op"]}')
if pkt['code'] != 1:
raise ValueError(f'Expected error code 1 got {pkt["code"]}')
return True
def sendBadOp(self, op, mode=b'octet'):
with self.newSocket() as sock:
req = self.createRequestBadOp(op, b'fn', mode)
sock.sendto(req, self.remote)
try:
resp, ca = sock.recvfrom(1024)
pkt = self.parsePacket(resp)
if pkt['op'] != OP.ERR:
raise ValueError(f'Expected OP.ERR got {pkt["op"]}.')
if pkt['code'] != 4:
raise ValueError(f'Expected error code 4 got {pkt["code"]}.')
except socket.timeout:
pass # Ok to not respond to a bad request
return True
def putFileBytes(self, fn, sz, mode=b'octet'):
with self.newSocket() as sock:
req = self.createRequest(OP.WRQ, fn, mode)
sock.sendto(req, self.remote)
resp, ca = sock.recvfrom(1024)
pkt = self.parsePacket(resp)
self.checkACK(pkt['op'], pkt['bn'] != 0)
sbuf = np.random.bytes(sz)
req = self.createDATBuf(sbuf, 1)
sock.sendto(req, ca)
try:
resp, ca = sock.recvfrom(1024)
pkt = self.parsePacket(resp)
self.checkACK(pkt['op'], pkt['bn'] != 1)
except socket.timeout:
raise ValueError('Timeout waiting for ACK.')
time.sleep(5)
if not self.fileBufEq(fn, sbuf):
raise ValueError('Files are not the same.')
return True
def putFileBlocks(self, fn, sz, mode=b'octet'):
fc = b''
with self.newSocket() as sock:
req = self.createRequest(OP.WRQ, fn, mode)
sock.sendto(req, self.remote)
resp, ca = sock.recvfrom(1024)
pkt = self.parsePacket(resp)
self.checkACK(pkt['op'], pkt['bn'] != 0)
for blk in range(1, sz + 1):
sbuf = np.random.bytes(512)
req = self.createDATBuf(sbuf, blk)
sock.sendto(req, ca)
try:
resp, ca = sock.recvfrom(1024)
pkt = self.parsePacket(resp)
self.checkACK(pkt['op'], pkt['bn'] != blk)
except socket.timeout:
raise ValueError('Timeout waiting for ACK.')
fc += sbuf
req = self.createDATBuf(b'', sz + 1)
sock.sendto(req, ca)
try:
resp, ca = sock.recvfrom(1024)
pkt = self.parsePacket(resp)
self.checkACK(pkt['op'], pkt['bn'] != sz + 1)
except socket.timeout:
raise ValueError('Timeout waiting for ACK.')
time.sleep(5)
if not self.fileBufEq(fn, fc):
raise ValueError('Files are not the same.')
return True
|
EPICS_enabled = True
description = 'Sample slits vert. gap'
prefix = '14IDB:m27'
target = 0.070125
|
from __future__ import absolute_import, division, print_function
import numpy as np
import pandas as pd
import dask.dataframe as dd
from dask.array import Array
from xarray import DataArray
from collections import OrderedDict
from .utils import Dispatcher, ngjit, calc_res, calc_bbox, orient_array, compute_coords, get_indices, dshape_from_pandas, dshape_from_dask, categorical_in_dtypes
from .resampling import (resample_2d, US_NEAREST, US_LINEAR, DS_FIRST, DS_LAST,
DS_MEAN, DS_MODE, DS_VAR, DS_STD, DS_MIN, DS_MAX)
class Expr(object):
"""Base class for expression-like objects.
Implements hashing and equality checks. Subclasses should implement an
``inputs`` attribute/property, containing a tuple of everything that fully
defines that expression.
"""
def __hash__(self):
return hash((type(self), self.inputs))
def __eq__(self, other):
return type(self) is type(other) and self.inputs == other.inputs
def __ne__(self, other):
return not self == other
class Axis(object):
"""Interface for implementing axis transformations.
Instances hold implementations of transformations to and from axis space.
The default implementation is equivalent to:
>>> def forward_transform(data_x):
... scale * mapper(data_x) + t
>>> def inverse_transform(axis_x):
... inverse_mapper((axis_x - t)/s)
Where ``mapper`` and ``inverse_mapper`` are elementwise functions mapping
to and from axis-space respectively, and ``scale`` and ``transform`` are
parameters describing a linear scale and translate transformation, computed
by the ``compute_scale_and_translate`` method.
"""
def compute_scale_and_translate(self, range, n):
"""Compute the scale and translate parameters for a linear transformation
``output = s * input + t``, mapping from data space to axis space.
Parameters
----------
range : tuple
A tuple representing the range ``[min, max]`` along the axis, in
data space. Both min and max are inclusive.
n : int
The number of bins along the axis.
Returns
-------
s, t : floats
"""
start, end = map(self.mapper, range)
s = n/(end - start)
t = -start * s
return s, t
def compute_index(self, st, n):
"""Compute a 1D array representing the axis index.
Parameters
----------
st : tuple
A tuple of ``(scale, translate)`` parameters.
n : int
The number of bins along the dimension.
Returns
-------
index : ndarray
"""
px = np.arange(n)+0.5
s, t = st
return self.inverse_mapper((px - t)/s)
def mapper(val):
"""A mapping from data space to axis space"""
raise NotImplementedError
def inverse_mapper(val):
"""A mapping from axis space to data space"""
raise NotImplementedError
def validate(self, range):
"""Given a range (low,high), raise an error if the range is invalid for this axis"""
pass
class LinearAxis(Axis):
"""A linear Axis"""
@staticmethod
@ngjit
def mapper(val):
return val
@staticmethod
@ngjit
def inverse_mapper(val):
return val
class LogAxis(Axis):
"""A base-10 logarithmic Axis"""
@staticmethod
@ngjit
def mapper(val):
return np.log10(val)
@staticmethod
@ngjit
def inverse_mapper(val):
return 10**val
def validate(self, range):
low, high = map(self.mapper, range)
if not (np.isfinite(low) and np.isfinite(high)):
raise ValueError('Range values must be >0 for a LogAxis')
_axis_lookup = {'linear': LinearAxis(), 'log': LogAxis()}
class Canvas(object):
"""An abstract canvas representing the space in which to bin.
Parameters
----------
plot_width, plot_height : int, optional
Width and height of the output aggregate in pixels.
x_range, y_range : tuple, optional
A tuple representing the bounds inclusive space ``[min, max]`` along
the axis.
x_axis_type, y_axis_type : str, optional
The type of the axis. Valid options are ``'linear'`` [default], and
``'log'``.
"""
def __init__(self, plot_width=600, plot_height=600,
x_range=None, y_range=None,
x_axis_type='linear', y_axis_type='linear'):
self.plot_width = plot_width
self.plot_height = plot_height
self.x_range = None if x_range is None else tuple(x_range)
self.y_range = None if y_range is None else tuple(y_range)
self.x_axis = _axis_lookup[x_axis_type]
self.y_axis = _axis_lookup[y_axis_type]
def points(self, source, x, y, agg=None):
"""Compute a reduction by pixel, mapping data to pixels as points.
Parameters
----------
source : pandas.DataFrame, dask.DataFrame
The input datasource.
x, y : str
Column names for the x and y coordinates of each point.
agg : Reduction, optional
Reduction to compute. Default is ``count()``.
"""
from .glyphs import Point
from .reductions import count as count_rdn
if agg is None:
agg = count_rdn()
return bypixel(source, self, Point(x, y), agg)
def line(self, source, x, y, agg=None):
"""Compute a reduction by pixel, mapping data to pixels as a line.
For aggregates that take in extra fields, the interpolated bins will
receive the fields from the previous point. In pseudocode:
>>> for i in range(len(rows) - 1): # doctest: +SKIP
... row0 = rows[i]
... row1 = rows[i + 1]
... for xi, yi in interpolate(row0.x, row0.y, row1.x, row1.y):
... add_to_aggregate(xi, yi, row0)
Parameters
----------
source : pandas.DataFrame, dask.DataFrame
The input datasource.
x, y : str
Column names for the x and y coordinates of each vertex.
agg : Reduction, optional
Reduction to compute. Default is ``any()``.
"""
from .glyphs import Line
from .reductions import any as any_rdn
if agg is None:
agg = any_rdn()
return bypixel(source, self, Line(x, y), agg)
def trimesh(self, vertices, simplices, mesh=None, agg=None, interp=True):
"""Compute a reduction by pixel, mapping data to pixels as a triangle.
>>> import datashader as ds
>>> verts = pd.DataFrame({'x': [0, 5, 10],
... 'y': [0, 10, 0],
... 'weight': [1, 5, 3]},
... columns=['x', 'y', 'weight'])
>>> tris = pd.DataFrame({'v0': [2], 'v1': [0], 'v2': [1]},
... columns=['v0', 'v1', 'v2'])
>>> cvs = ds.Canvas(x_range=(verts.x.min(), verts.x.max()),
... y_range=(verts.y.min(), verts.y.max()))
>>> cvs.trimesh(verts, tris)
Parameters
----------
vertices : pandas.DataFrame, dask.DataFrame
The input datasource for triangle vertex coordinates. These can be
interpreted as the x/y coordinates of the vertices, with optional
weights for value interpolation. Columns should be ordered
corresponding to 'x', 'y', followed by zero or more (optional)
columns containing vertex values. The rows need not be ordered.
The column data types must be floating point or integer.
simplices : pandas.DataFrame, dask.DataFrame
The input datasource for triangle (simplex) definitions. These can
be interpreted as rows of ``vertices``, aka positions in the
``vertices`` index. Columns should be ordered corresponding to
'vertex0', 'vertex1', and 'vertex2'. Order of the vertices can be
clockwise or counter-clockwise; it does not matter as long as the
data is consistent for all simplices in the dataframe. The
rows need not be ordered. The data type for the first
three columns in the dataframe must be integer.
agg : Reduction, optional
Reduction to compute. Default is ``mean()``.
mesh : pandas.DataFrame, optional
An ordered triangle mesh in tabular form, used for optimization
purposes. This dataframe is expected to have come from
``datashader.utils.mesh()``. If this argument is not None, the first
two arguments are ignored.
interp : boolean, optional
Specify whether to do bilinear interpolation of the pixels within each
triangle. This can be thought of as a "weighted average" of the vertex
values. Defaults to True.
"""
from .glyphs import Triangles
from .reductions import mean as mean_rdn
from .utils import mesh as create_mesh
source = mesh
# Validation is done inside the [pd]d_mesh utility functions
if source is None:
source = create_mesh(vertices, simplices)
verts_have_weights = len(vertices.columns) > 2
if verts_have_weights:
weight_col = vertices.columns[2]
else:
weight_col = simplices.columns[3]
if agg is None:
agg = mean_rdn(weight_col)
elif agg.column is None:
agg.column = weight_col
cols = source.columns
x, y, weights = cols[0], cols[1], cols[2:]
return bypixel(source, self, Triangles(x, y, weights, weight_type=verts_have_weights, interp=interp), agg)
def raster(self,
source,
layer=None,
upsample_method='linear',
downsample_method='mean',
nan_value=None):
"""Sample a raster dataset by canvas size and bounds.
Handles 2D or 3D xarray DataArrays, assuming that the last two
array dimensions are the y- and x-axis that are to be
resampled. If a 3D array is supplied a layer may be specified
to resample to select the layer along the first dimension to
resample.
Missing values (those having the value indicated by the
"nodata" attribute of the raster) are replaced with `NaN` if
floats, and 0 if int.
Parameters
----------
source : xarray.DataArray
input datasource most likely obtain from `xr.open_rasterio()`.
layer : int
source layer number : optional default=None
upsample_method : str, optional default=linear
resample mode when upsampling raster.
options include: nearest, linear.
downsample_method : str, optional default=mean
resample mode when downsampling raster.
options include: first, last, mean, mode, var, std
nan_value : int or float, optional
Optional nan_value which will be masked out when applying
the resampling.
Returns
-------
data : xarray.Dataset
"""
upsample_methods = dict(nearest=US_NEAREST,
linear=US_LINEAR)
downsample_methods = dict(first=DS_FIRST,
last=DS_LAST,
mean=DS_MEAN,
mode=DS_MODE,
var=DS_VAR,
std=DS_STD,
min=DS_MIN,
max=DS_MAX)
if upsample_method not in upsample_methods.keys():
raise ValueError('Invalid upsample method: options include {}'.format(list(upsample_methods.keys())))
if downsample_method not in downsample_methods.keys():
raise ValueError('Invalid downsample method: options include {}'.format(list(downsample_methods.keys())))
res = calc_res(source)
ydim, xdim = source.dims[-2:]
xvals, yvals = source[xdim].values, source[ydim].values
left, bottom, right, top = calc_bbox(xvals, yvals, res)
array = orient_array(source, res, layer)
dtype = array.dtype
if nan_value is not None:
mask = array==nan_value
array = np.ma.masked_array(array, mask=mask, fill_value=nan_value)
fill_value = nan_value
else:
fill_value = np.NaN
# window coordinates
xmin = max(self.x_range[0], left)
ymin = max(self.y_range[0], bottom)
xmax = min(self.x_range[1], right)
ymax = min(self.y_range[1], top)
width_ratio = (xmax - xmin) / (self.x_range[1] - self.x_range[0])
height_ratio = (ymax - ymin) / (self.y_range[1] - self.y_range[0])
if np.isclose(width_ratio, 0) or np.isclose(height_ratio, 0):
raise ValueError('Canvas x_range or y_range values do not match closely-enough with the data source to be able to accurately rasterize. Please provide ranges that are more accurate.')
w = int(np.ceil(self.plot_width * width_ratio))
h = int(np.ceil(self.plot_height * height_ratio))
cmin, cmax = get_indices(xmin, xmax, xvals, res[0])
rmin, rmax = get_indices(ymin, ymax, yvals, res[1])
kwargs = dict(w=w, h=h, ds_method=downsample_methods[downsample_method],
us_method=upsample_methods[upsample_method], fill_value=fill_value)
if array.ndim == 2:
source_window = array[rmin:rmax+1, cmin:cmax+1]
if isinstance(source_window, Array):
source_window = source_window.compute()
if downsample_method in ['var', 'std']:
source_window = source_window.astype('f')
data = resample_2d(source_window, **kwargs)
layers = 1
else:
source_window = array[:, rmin:rmax+1, cmin:cmax+1]
if downsample_method in ['var', 'std']:
source_window = source_window.astype('f')
arrays = []
for arr in source_window:
if isinstance(arr, Array):
arr = arr.compute()
arrays.append(resample_2d(arr, **kwargs))
data = np.dstack(arrays)
layers = len(arrays)
if w != self.plot_width or h != self.plot_height:
num_height = self.plot_height - h
num_width = self.plot_width - w
lpad = xmin - self.x_range[0]
rpad = self.x_range[1] - xmax
lpct = lpad / (lpad + rpad) if lpad + rpad > 0 else 0
left = int(np.ceil(num_width * lpct))
right = num_width - left
lshape, rshape = (self.plot_height, left), (self.plot_height, right)
if layers > 1:
lshape, rshape = lshape + (layers,), rshape + (layers,)
left_pad = np.full(lshape, fill_value, source_window.dtype)
right_pad = np.full(rshape, fill_value, source_window.dtype)
tpad = ymin - self.y_range[0]
bpad = self.y_range[1] - ymax
tpct = tpad / (tpad + bpad) if tpad + bpad > 0 else 0
top = int(np.ceil(num_height * tpct))
bottom = num_height - top
tshape, bshape = (top, w), (bottom, w)
if layers > 1:
tshape, bshape = tshape + (layers,), bshape + (layers,)
top_pad = np.full(tshape, fill_value, source_window.dtype)
bottom_pad = np.full(bshape, fill_value, source_window.dtype)
data = np.concatenate((top_pad, data, bottom_pad), axis=0)
data = np.concatenate((left_pad, data, right_pad), axis=1)
# Reorient array to original orientation
if res[1] > 0: data = data[::-1]
if res[0] < 0: data = data[:, ::-1]
# Restore nan_value from masked array
if nan_value is not None:
data = data.filled()
# Restore original dtype
if dtype != data.dtype:
data = data.astype(dtype)
# Compute DataArray metadata
xs, ys = compute_coords(self.plot_width, self.plot_height, self.x_range, self.y_range, res)
coords = {xdim: xs, ydim: ys}
dims = [ydim, xdim]
attrs = dict(res=res[0])
if source._file_obj is not None:
attrs['nodata'] = source._file_obj.nodata
# Handle DataArray with layers
if data.ndim == 3:
data = data.transpose([2, 0, 1])
layer_dim = source.dims[0]
coords[layer_dim] = source.coords[layer_dim]
dims = [layer_dim]+dims
return DataArray(data, coords=coords, dims=dims, attrs=attrs)
def validate(self):
"""Check that parameter settings are valid for this object"""
self.x_axis.validate(self.x_range)
self.y_axis.validate(self.y_range)
def bypixel(source, canvas, glyph, agg):
"""Compute an aggregate grouped by pixel sized bins.
Aggregate input data ``source`` into a grid with shape and axis matching
``canvas``, mapping data to bins by ``glyph``, and aggregating by reduction
``agg``.
Parameters
----------
source : pandas.DataFrame, dask.DataFrame
Input datasource
canvas : Canvas
glyph : Glyph
agg : Reduction
"""
# Avoid datashape.Categorical instantiation bottleneck
# by only retaining the necessary columns:
# https://github.com/bokeh/datashader/issues/396
if categorical_in_dtypes(source.dtypes.values):
# Preserve column ordering without duplicates
cols_to_keep = OrderedDict({col: False for col in source.columns})
cols_to_keep[glyph.x] = True
cols_to_keep[glyph.y] = True
if hasattr(glyph, 'z'):
cols_to_keep[glyph.z] = True
if hasattr(agg, 'values'):
for subagg in agg.values:
if subagg.column is not None:
cols_to_keep[subagg.column] = True
elif agg.column is not None:
cols_to_keep[agg.column] = True
src = source[[col for col, keepit in cols_to_keep.items() if keepit]]
else:
src = source
if isinstance(src, pd.DataFrame):
dshape = dshape_from_pandas(src)
elif isinstance(src, dd.DataFrame):
dshape = dshape_from_dask(src)
else:
raise ValueError("source must be a pandas or dask DataFrame")
schema = dshape.measure
glyph.validate(schema)
agg.validate(schema)
canvas.validate()
return bypixel.pipeline(source, schema, canvas, glyph, agg)
bypixel.pipeline = Dispatcher()
|
# importing modules
import helper
import random
import pathlib
import json
# setup
root = pathlib.Path(__file__).parent.parent.resolve()
with open( root / "config/quotes.json", 'r') as filehandle:
random_quote = random.choice(json.load(filehandle))
random_quote = random_quote.replace("-","<br/> -")
# processing
if __name__ == "__main__":
all_news = ""
index_page = root / "index.html"
index_contents = index_page.open().read()
final_output = helper.replace_chunk(index_contents, "quote_marker", f"<blockquote>\n{random_quote}\n</blockquote>\n")
index_page.open("w").write(final_output)
|
from .data import *
from .core import *
from .cam import *
from .models.inception_time import *
|
from django.conf import settings
from django.core import management
from django.core.management.base import BaseCommand
from playlists.management import playlist
from playlists.models import Playlist
from talks.models import Talk
from youtube_data_api3.video import get_video_youtube_url
from youtube_data_api3.playlist import get_playlist_code
from youtube_data_api3.playlist import fetch_playlist_data
from youtube_data_api3.playlist import fetch_playlist_items
class Command(BaseCommand):
help = 'Create Playlist into the database, given its Youtube URL'
def add_arguments(self, parser):
parser.add_argument('youtube_url_playlist', type=str)
def handle(self, *args, **options):
youtube_url_playlist = options['youtube_url_playlist']
# Get code from youtube url
playlist_code = ""
try:
playlist_code = get_playlist_code(youtube_url_playlist)
except Exception:
msg = "ERROR: Invalid URL playlist {:s}".format(youtube_url_playlist)
self.stdout.write(self.style.ERROR(msg))
return
# Check if the playlist is already on the database
if Playlist.objects.filter(code=playlist_code).exists():
msg = "ERROR: Playlist {:s} is already present on the database".format(playlist_code)
self.stdout.write(self.style.NOTICE(msg))
# Call to update command instead
management.call_command("update_playlist", youtube_url_playlist)
return
msg = "Creating playlist code:{:s}".format(playlist_code)
self.stdout.write(msg)
# Fetch channel data from Youtube API
playlist_json_data = fetch_playlist_data(settings.YOUTUBE_API_KEY, playlist_code)
# If no data is received do nothing
if playlist_json_data is None:
msg = "ERROR: Youtube Data API does not return anything for playlist {:s}".format(playlist_code)
self.stdout.write(self.style.ERROR(msg))
return
playlist_obj = playlist.create_playlist(playlist_json_data)
msg = "Playlist id:{:d} - title:{:s} created successfully".format(playlist_obj.id, playlist_obj.title)
self.stdout.write(self.style.SUCCESS(msg))
# Fetch playlist items data from Youtube API
youtube_playlist_items_data = fetch_playlist_items(settings.YOUTUBE_API_KEY, playlist_obj.code)
# If no data is received do nothing
if youtube_playlist_items_data is None:
msg = "ERROR: Youtube Data API does not return anything for playlist items {:s}".format(playlist_obj.code)
self.stdout.write(self.style.ERROR(msg))
return
print("{:d} talks on playlist {:s}".format(len(youtube_playlist_items_data), playlist_obj.code))
for video_code in youtube_playlist_items_data:
youtube_video_url = get_video_youtube_url(video_code)
if not Talk.objects.filter(code=video_code).exists():
management.call_command("create_talk", youtube_video_url, "--playlist={:s}".format(playlist_obj.code))
else:
management.call_command("update_talk", youtube_video_url, "--playlist={:s}".format(playlist_obj.code))
|
import random
import time
from flask import Flask, request, abort
from imgurpython import ImgurClient
from linebot import (
LineBotApi, WebhookHandler
)
from linebot.exceptions import (
InvalidSignatureError
)
from linebot.models import *
import tempfile, os
from config import client_id, client_secret, album_id, album_id_lucky, access_token, refresh_token, line_channel_access_token, \
line_channel_secret
import json
app = Flask(__name__)
line_bot_api = LineBotApi(line_channel_access_token)
handler = WebhookHandler(line_channel_secret)
static_tmp_path = os.path.join(os.path.dirname(__file__), 'static', 'tmp')
@app.route("/callback", methods=['POST'])
def callback():
# get X-Line-Signature header value
signature = request.headers['X-Line-Signature']
# get request body as text
body = request.get_data(as_text=True)
# print("body:",body)
app.logger.info("Request body: " + body)
# handle webhook body
try:
handler.handle(body, signature)
except InvalidSignatureError:
abort(400)
return 'ok'
@handler.add(MessageEvent, message=(ImageMessage, TextMessage, VideoMessage, AudioMessage))
def handle_message(event):
if isinstance(event.message, ImageMessage):
msgSource = line_bot_api.get_profile(event.source.user_id)
userDisplayName = msgSource.display_name
ext = 'jpg'
message_content = line_bot_api.get_message_content(event.message.id)
with tempfile.NamedTemporaryFile(dir=static_tmp_path, prefix=ext + '-', delete=False) as tf:
for chunk in message_content.iter_content():
tf.write(chunk)
tempfile_path = tf.name
dist_path = tempfile_path + '.' + ext
dist_name = os.path.basename(dist_path)
os.rename(tempfile_path, dist_path)
try:
client = ImgurClient(client_id, client_secret, access_token, refresh_token)
config = {
'album': album_id,
'name': userDisplayName,
'title': userDisplayName,
'description': userDisplayName + ' uploaded'
}
path = os.path.join('static', 'tmp', dist_name)
client.upload_from_path(path, config=config, anon=False)
os.remove(path)
print(path)
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text='您的照片上傳成功,請保留相片至婚禮結束,謝謝您!'))
except:
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text='您的照片上傳失敗,請重新試試!'))
return 0
elif isinstance(event.message, VideoMessage):
ext = 'mp4'
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text='您上傳的影片無法投影在相片牆'))
elif isinstance(event.message, AudioMessage):
ext = 'm4a'
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text='您上傳的聲音訊息無法投影在相片牆'))
elif isinstance(event.message, TextMessage):
if event.message.text == "啾咪神之手":
imageSize = "h"
client = ImgurClient(client_id, client_secret)
images = client.get_album_images(album_id_lucky)
index = [0,0]
imgurNameA = "A"
imgurNameB = "A"
while imgurNameA == imgurNameB:
index = [random.randint(0, len(images) - 1) for _ in range(2)]
imgurNameA = images[index[0]].title
imgurNameB = images[index[1]].title
print(index, imgurNameA, imgurNameB)
imgurNameList = [imgurNameA, imgurNameB]
imgurNameListIndex = 0
image_message_list = []
for i in index:
imgurLink = images[i].link
linkIndex = imgurLink.find('.jpg')
trgUrl = imgurLink[:linkIndex] + imageSize + imgurLink[linkIndex:]
# image_message_list.append(ImageSendMessage(
# original_content_url=trgUrl,
# preview_image_url=trgUrl
# ))
image_message_list.append(FlexSendMessage(
alt_text = 'hello',
contents = {
"type": "bubble",
"hero": {
"type": "image",
"url": trgUrl,
"size": "full",
"aspectRatio": "30:30",
"aspectMode": "cover"
},
"body": {
"type": "box",
"layout": "vertical",
"contents": [
{
"type": "text",
"text": imgurNameList[imgurNameListIndex],
"size": "3xl",
"weight": "bold",
"color": "#0000E3"
},
{
"type": "text",
"text": '恭喜幸運中獎!!!',
"size": "xl",
"weight": "bold",
"color": "#FF0000"
}
]
}
}
))
imgurNameListIndex += 1
print(image_message_list)
# line_bot_api.reply_message(
# event.reply_token, [
# image_message_list[0],
# TextSendMessage(text='恭喜{'+ imgurNameA + '}幸運中獎!!!'),
# image_message_list[1],
# TextSendMessage(text='恭喜{'+ imgurNameB + '}幸運中獎!!!'),
# ])
line_bot_api.reply_message(
event.reply_token, [
image_message_list[0],
image_message_list[1],
])
return 0
#測試msg
# elif event.message.text == '婚禮資訊':
# with open('./asset/weddingInfo.json','r') as winfo:
# weddingInfo = json.load(winfo)
# print(weddingInfo)
# flex_message = FlexSendMessage(
# alt_text='婚禮資訊',
# contents = weddingInfo
# )
# print(flex_message)
# line_bot_api.reply_message(
# event.reply_token, [
# flex_message,
# TextSendMessage(text=' yoyo'),
# TextSendMessage(text='請傳一張圖片給我')
# ])
#測試msg
else:
# line_bot_api.reply_message(
# event.reply_token, [
# TextSendMessage(text=' yoyo'),
# TextSendMessage(text='請傳一張圖片給我')
# ])
return 0
@handler.add(PostbackEvent)
def handle_postback(event):
print(event)
if(event.postback.data=='action=weddingInfo'):
with open('./asset/weddingInfo.json','r') as winfo:
weddingInfo = json.load(winfo)
flex_message = FlexSendMessage(
alt_text = '婚禮資訊',
contents = weddingInfo
)
line_bot_api.reply_message(
event.reply_token, [
flex_message
])
return 0
elif(event.postback.data=='action=trafficInfo'):
with open('./asset/trafficInfo.json','r') as trffo:
trafficInfo = json.load(trffo)
flex_message = FlexSendMessage(
alt_text = '交通資訊',
contents = trafficInfo
)
line_bot_api.reply_message(
event.reply_token, [
flex_message
])
return 0
if __name__ == '__main__':
app.run()
|
#!/bin/python3
import requests
import argparse
from html.parser import HTMLParser
class WFarm():
class CustomHTMLParser(HTMLParser):
def __init__(self, nextUrls, url, args, wordsFound):
HTMLParser.__init__(self)
self.nextUrls = nextUrls
self.url = url
self.args = args
self.wordsFound = wordsFound
def handle_starttag(self, tag, attrs):
if (tag == 'a'):
for attr in attrs:
if (attr[0] == 'href'):
link = attr[1]
if (link.startswith(tuple(['http', 'https']))):
self.nextUrls.add(link)
elif (link.startswith('/')):
self.nextUrls.add(self.url + link)
def handle_data(self, data):
words = data.split(' ')
for word in words:
if (len(self.wordsFound) >= int(self.args.l)):
return
word = word.strip()
word = word.replace(',', '')
word = word.replace('?', '')
word = word.replace('{', '')
word = word.replace('}', '')
word = word.replace('(', '')
word = word.replace(')', '')
word = word.replace('\n', '')
word = word.replace('\t', '')
if (len(word) >= int(self.args.min) and len(word) <= int(self.args.max)):
self.wordsFound.add(word)
if (self.args.c):
self.wordsFound.add(word.upper())
self.wordsFound.add(word.lower())
def __init__(self):
super().__init__()
self.parser = argparse.ArgumentParser()
self.wordsFound = set()
self.foundUrls = set()
self.nextUrls = set()
self.totalUrls = 1
self.attemptCount = 0
self.parseArgs()
self.spiderUrls()
self.writeWordlist()
def parseArgs(self):
self.parser.add_argument('url', help='url to spider.')
self.parser.add_argument('-min', help='minimum length of word. Default is 4.', default=4)
self.parser.add_argument('-max', help='maximum length of word. Default is 10.', default=10)
self.parser.add_argument('-d', help='depth of urls found on page to visit. Default is 2.', default=2)
self.parser.add_argument('-o', help='path of output file of wordlist.', default='wordlist.txt')
self.parser.add_argument('-c', help='include both upper and lower case.', action='store_true')
self.parser.add_argument('-l', help='max limit of words to add to wordlist. Default is 200k', default=200000)
self.args = self.parser.parse_args()
def spiderUrls(self):
try:
self.nextUrls.add(self.args.url)
for depth in range(int(self.args.d)):
if (len(self.nextUrls) == 0):
return
self.totalUrls += len(self.nextUrls)
self.foundUrls = self.nextUrls.copy()
self.nextUrls.clear()
for url in self.foundUrls:
self.attemptCount += 1
print('{}/{}'.format(self.attemptCount, self.totalUrls), end='\r')
print('Trying: {} .... '.format(url), end='')
self.getWordsFromUrl(url)
except KeyboardInterrupt:
print('\nProgram terminated by user!')
def getWordsFromUrl(self, url):
try:
resp = requests.get(url, headers={'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:45.0) Gecko/20100101 Firefox/45.0'})
print('[{}]'.format(resp.status_code))
if (resp.status_code == 200):
htmlParser = self.CustomHTMLParser(self.nextUrls, url, self.args, self.wordsFound)
htmlParser.feed(resp.text)
if (len(self.wordsFound) >= int(self.args.l)):
print('Maximum word limit reached!')
self.writeWordlist()
exit()
except Exception as e:
print('A problem occured! Failed to get this url!')
def writeWordlist(self):
print('Writing wordlist to file....')
with open(self.args.o, 'w') as wordlistFile:
wordlistFile.write('\n'.join(self.wordsFound))
WFarm()
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import binascii
import os
import shutil
import struct
import tempfile
import unittest
from swift.cli import relinker
from swift.common import exceptions, ring, utils
from swift.common import storage_policy
from swift.common.storage_policy import (
StoragePolicy, StoragePolicyCollection, POLICIES)
from swift.obj.diskfile import write_metadata
from test.unit import FakeLogger, skip_if_no_xattrs
class TestRelinker(unittest.TestCase):
def setUp(self):
skip_if_no_xattrs()
self.logger = FakeLogger()
self.testdir = tempfile.mkdtemp()
self.devices = os.path.join(self.testdir, 'node')
shutil.rmtree(self.testdir, ignore_errors=1)
os.mkdir(self.testdir)
os.mkdir(self.devices)
self.rb = ring.RingBuilder(8, 6.0, 1)
for i in range(6):
ip = "127.0.0.%s" % i
self.rb.add_dev({'id': i, 'region': 0, 'zone': 0, 'weight': 1,
'ip': ip, 'port': 10000, 'device': 'sda1'})
self.rb.rebalance(seed=1)
self.existing_device = 'sda1'
os.mkdir(os.path.join(self.devices, self.existing_device))
self.objects = os.path.join(self.devices, self.existing_device,
'objects')
os.mkdir(self.objects)
self._hash = utils.hash_path('a/c/o')
digest = binascii.unhexlify(self._hash)
part = struct.unpack_from('>I', digest)[0] >> 24
self.next_part = struct.unpack_from('>I', digest)[0] >> 23
self.objdir = os.path.join(
self.objects, str(part), self._hash[-3:], self._hash)
os.makedirs(self.objdir)
self.object_fname = "1278553064.00000.data"
self.objname = os.path.join(self.objdir, self.object_fname)
with open(self.objname, "wb") as dummy:
dummy.write(b"Hello World!")
write_metadata(dummy, {'name': '/a/c/o', 'Content-Length': '12'})
test_policies = [StoragePolicy(0, 'platin', True)]
storage_policy._POLICIES = StoragePolicyCollection(test_policies)
self.expected_dir = os.path.join(
self.objects, str(self.next_part), self._hash[-3:], self._hash)
self.expected_file = os.path.join(self.expected_dir, self.object_fname)
def _save_ring(self):
rd = self.rb.get_ring()
for policy in POLICIES:
rd.save(os.path.join(
self.testdir, '%s.ring.gz' % policy.ring_name))
# Enforce ring reloading in relinker
policy.object_ring = None
def tearDown(self):
shutil.rmtree(self.testdir, ignore_errors=1)
storage_policy.reload_storage_policies()
def test_relink(self):
self.rb.prepare_increase_partition_power()
self._save_ring()
relinker.relink(self.testdir, self.devices, True)
self.assertTrue(os.path.isdir(self.expected_dir))
self.assertTrue(os.path.isfile(self.expected_file))
stat_old = os.stat(os.path.join(self.objdir, self.object_fname))
stat_new = os.stat(self.expected_file)
self.assertEqual(stat_old.st_ino, stat_new.st_ino)
def _common_test_cleanup(self, relink=True):
# Create a ring that has prev_part_power set
self.rb.prepare_increase_partition_power()
self.rb.increase_partition_power()
self._save_ring()
os.makedirs(self.expected_dir)
if relink:
# Create a hardlink to the original object name. This is expected
# after a normal relinker run
os.link(os.path.join(self.objdir, self.object_fname),
self.expected_file)
def test_cleanup(self):
self._common_test_cleanup()
self.assertEqual(0, relinker.cleanup(self.testdir, self.devices, True))
# Old objectname should be removed, new should still exist
self.assertTrue(os.path.isdir(self.expected_dir))
self.assertTrue(os.path.isfile(self.expected_file))
self.assertFalse(os.path.isfile(
os.path.join(self.objdir, self.object_fname)))
def test_cleanup_not_yet_relinked(self):
self._common_test_cleanup(relink=False)
self.assertEqual(1, relinker.cleanup(self.testdir, self.devices, True))
self.assertTrue(os.path.isfile(
os.path.join(self.objdir, self.object_fname)))
def test_cleanup_deleted(self):
self._common_test_cleanup()
# Pretend the object got deleted inbetween and there is a tombstone
fname_ts = self.expected_file[:-4] + "ts"
os.rename(self.expected_file, fname_ts)
self.assertEqual(0, relinker.cleanup(self.testdir, self.devices, True))
def test_cleanup_doesnotexist(self):
self._common_test_cleanup()
# Pretend the file in the new place got deleted inbetween
os.remove(self.expected_file)
self.assertEqual(
1, relinker.cleanup(self.testdir, self.devices, True, self.logger))
self.assertEqual(self.logger.get_lines_for_level('warning'),
['Error cleaning up %s: %s' % (self.objname,
repr(exceptions.DiskFileNotExist()))])
def test_cleanup_non_durable_fragment(self):
self._common_test_cleanup()
# Actually all fragments are non-durable and raise and DiskFileNotExist
# in EC in this test. However, if the counterpart exists in the new
# location, this is ok - it will be fixed by the reconstructor later on
storage_policy._POLICIES[0].policy_type = 'erasure_coding'
self.assertEqual(
0, relinker.cleanup(self.testdir, self.devices, True, self.logger))
self.assertEqual(self.logger.get_lines_for_level('warning'), [])
def test_cleanup_quarantined(self):
self._common_test_cleanup()
# Pretend the object in the new place got corrupted
with open(self.expected_file, "wb") as obj:
obj.write(b'trash')
self.assertEqual(
1, relinker.cleanup(self.testdir, self.devices, True, self.logger))
self.assertIn('failed audit and was quarantined',
self.logger.get_lines_for_level('warning')[0])
|
from flask import *
from flask_bootstrap import *
from flask_moment import Moment
from datetime import datetime
from flask_mail import *
import os
localpass = "admin"
app = Flask(__name__)
Bootstrap(app)
Moment(app)
print('no yeet')
app.config['SECRET_KEY'] = "SomeSecretText"
users = {"kairan.quazi@gmail.com": {'first_name': 'Kairan', 'last_name': 'Quazi', 'email': 'kairan.quazi@gmail.com',
'password': "@Kairan69!"}}
session = {}
print('yeey')
mail_settings = {
"MAIL_SERVER": 'smtp.gmail.com',
"MAIL_PORT": 465,
"MAIL_USE_TLS": False,
"MAIL_USE_SSL": True,
"MAIL_USERNAME": 'jeffthefreshavocado',
"MAIL_PASSWORD": 'chickennuggets'
}
app.config.update(mail_settings)
mail = Mail(app)
@app.route('/', methods=['POST', 'GET'])
def registration():
print(users)
return render_template('registration.html', current_time=datetime.utcnow())
# return 'wut'
@app.route('/registeruser', methods=['GET', 'POST'])
def register():
first_name = request.form['first_name']
last_name = request.form['last_name']
email = request.form['email']
password = request.form['password']
if email in users:
flash('User registered already, please login')
return render_template('registration.html', current_time=datetime.utcnow())
else:
users[email] = {'first_name': first_name, 'last_name': last_name, 'email': email, 'password': password}
return redirect('/login')
@app.route('/login', methods=['GET', 'POST'])
def login():
return render_template('login.html')
@app.route('/loginuser', methods=['GET', 'POST'])
def loginuser():
email = request.form['email']
password = request.form['password']
if email not in users:
flash('Please register')
return redirect('/invalid')
elif users[email]['password'] == password:
with app.app_context():
msg = Message(subject="Hello",
sender='jeffthefreshavocado@gmail.com',
recipients=['jeffthefreshavocado@gmail.com', email],
body="Hiiiii, this is Kairan. My Discord server is https://discord.gg/QetKWzc")
mail.send(msg)
session['user'] = users[email]
first_name = session['user']['first_name']
return redirect('/home')
else:
flash('wrong password')
return redirect('/login')
@app.route('/home')
def home():
if 'user' not in session:
return redirect('/login')
return render_template('displaycontent.html', current_time=datetime.utcnow())
@app.route('/invalid')
def invalid():
return render_template('invalid.html')
@app.route('/logout', methods=['GET', 'POST'])
def logout():
del session['user']
return redirect('/login')
@app.errorhandler(404)
def page_not_found(e):
return render_template('error404.html', error=e, type=404), 404
@app.route("/registration")
def yaaaaaa():
return redirect('/')
@app.errorhandler(400)
def bad_request(e):
return render_template('error400.html', error=e, type=400), 400
@app.errorhandler(500)
def internal_server_error(e):
return render_template('error500.html', error=e, type=500), 500
@app.route("/exit/<passss>",methods=["GET"])
def exitt(passss):
if passss == localpass:
exit()
@app.route("/changepass/<oldpass>/<newpass>")
def changepass(oldpass,newpass):
if oldpass == localpass:
localpass = newpass
return "changed"
if __name__ == '__main__':
app.run(debug=True,port = 80)
|
#!/usr/bin/env python3
import time
import sys
import threading
import copy
import random
from optparse import OptionParser
from PyQt5.QtCore import Qt, QTimer
from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QInputDialog
from PyQt5.QtWidgets import QLabel, QTextEdit, QFrame
from PyQt5.QtWidgets import QPushButton, QSlider, QHBoxLayout, QVBoxLayout
from PyQt5.QtGui import QImage, QPixmap, QPainter, QColor
# Gym environment used by the Baby AI Game
import gym
import gym_minigrid
from gym_minigrid import minigrid
import babyai
class ImgWidget(QLabel):
"""
Widget to intercept clicks on the full image view
"""
def __init__(self, window):
super().__init__()
self.window = window
def mousePressEvent(self, event):
self.window.imageClick(event.x(), event.y())
class AIGameWindow(QMainWindow):
"""Application window for the baby AI game"""
def __init__(self, env):
super().__init__()
self.initUI()
# By default, manual stepping only
self.fpsLimit = 0
self.env = env
self.lastObs = None
self.resetEnv()
self.stepTimer = QTimer()
self.stepTimer.setInterval(0)
self.stepTimer.setSingleShot(False)
self.stepTimer.timeout.connect(self.stepClicked)
# Pointing and naming data
self.pointingData = []
def initUI(self):
"""Create and connect the UI elements"""
self.resize(512, 512)
self.setWindowTitle('Baby AI Game')
# Full render view (large view)
self.imgLabel = ImgWidget(self)
self.imgLabel.setFrameStyle(QFrame.Panel | QFrame.Sunken)
leftBox = QVBoxLayout()
leftBox.addStretch(1)
leftBox.addWidget(self.imgLabel)
leftBox.addStretch(1)
# Area on the right of the large view
rightBox = self.createRightArea()
# Arrange widgets horizontally
hbox = QHBoxLayout()
hbox.addLayout(leftBox)
hbox.addLayout(rightBox)
# Create a main widget for the window
mainWidget = QWidget(self)
self.setCentralWidget(mainWidget)
mainWidget.setLayout(hbox)
# Show the application window
self.show()
self.setFocus()
def createRightArea(self):
# Agent render view (partially observable)
self.obsImgLabel = QLabel()
self.obsImgLabel.setFrameStyle(QFrame.Panel | QFrame.Sunken)
miniViewBox = QHBoxLayout()
miniViewBox.addStretch(1)
miniViewBox.addWidget(self.obsImgLabel)
miniViewBox.addStretch(1)
self.missionBox = QTextEdit()
self.missionBox.setMinimumSize(500, 100)
self.missionBox.textChanged.connect(self.missionEdit)
buttonBox = self.createButtons()
self.stepsLabel = QLabel()
self.stepsLabel.setFrameStyle(QFrame.Panel | QFrame.Sunken)
self.stepsLabel.setAlignment(Qt.AlignCenter)
self.stepsLabel.setMinimumSize(60, 10)
resetBtn = QPushButton("Reset")
resetBtn.clicked.connect(self.resetEnv)
stepsBox = QHBoxLayout()
stepsBox.addStretch(1)
stepsBox.addWidget(QLabel("Steps remaining"))
stepsBox.addWidget(self.stepsLabel)
stepsBox.addWidget(resetBtn)
stepsBox.addStretch(1)
hline2 = QFrame()
hline2.setFrameShape(QFrame.HLine)
hline2.setFrameShadow(QFrame.Sunken)
# Stack everything up in a vetical layout
vbox = QVBoxLayout()
vbox.addLayout(miniViewBox)
vbox.addLayout(stepsBox)
vbox.addWidget(hline2)
vbox.addWidget(QLabel("Mission"))
vbox.addWidget(self.missionBox)
vbox.addLayout(buttonBox)
return vbox
def createButtons(self):
"""Create the row of UI buttons"""
stepButton = QPushButton("Step")
stepButton.clicked.connect(self.stepClicked)
minusButton = QPushButton("- Reward")
minusButton.clicked.connect(self.minusReward)
plusButton = QPushButton("+ Reward")
plusButton.clicked.connect(self.plusReward)
slider = QSlider(Qt.Horizontal, self)
slider.setFocusPolicy(Qt.NoFocus)
slider.setMinimum(0)
slider.setMaximum(100)
slider.setValue(0)
slider.valueChanged.connect(self.setFrameRate)
self.fpsLabel = QLabel("Manual")
self.fpsLabel.setFrameStyle(QFrame.Panel | QFrame.Sunken)
self.fpsLabel.setAlignment(Qt.AlignCenter)
self.fpsLabel.setMinimumSize(80, 10)
# Assemble the buttons into a horizontal layout
hbox = QHBoxLayout()
hbox.addStretch(1)
hbox.addWidget(stepButton)
hbox.addWidget(slider)
hbox.addWidget(self.fpsLabel)
hbox.addStretch(1)
hbox.addWidget(minusButton)
hbox.addWidget(plusButton)
hbox.addStretch(1)
return hbox
def keyPressEvent(self, e):
# Manual agent control
actions = self.env.unwrapped.actions
if e.key() == Qt.Key_Left:
self.stepEnv(actions.left)
elif e.key() == Qt.Key_Right:
self.stepEnv(actions.right)
elif e.key() == Qt.Key_Up:
self.stepEnv(actions.forward)
elif e.key() == Qt.Key_PageUp:
self.stepEnv(actions.pickup)
elif e.key() == Qt.Key_PageDown:
self.stepEnv(actions.drop)
elif e.key() == Qt.Key_Space:
self.stepEnv(actions.toggle)
elif e.key() == Qt.Key_Backspace:
self.resetEnv()
elif e.key() == Qt.Key_Escape:
self.close()
def mousePressEvent(self, event):
"""
Clear the focus of the text boxes and buttons if somewhere
else on the window is clicked
"""
# Set the focus on the full render image
self.imgLabel.setFocus()
QMainWindow.mousePressEvent(self, event)
def imageClick(self, x, y):
"""
Pointing and naming logic
"""
# Set the focus on the full render image
self.imgLabel.setFocus()
env = self.env.unwrapped
imgW = self.imgLabel.size().width()
imgH = self.imgLabel.size().height()
i = (env.grid.width * x) // imgW
j = (env.grid.height * y) // imgH
assert i < env.grid.width
assert j < env.grid.height
print('grid clicked: i=%d, j=%d' % (i, j))
desc, ok = QInputDialog.getText(self, 'Pointing & Naming', 'Enter Description:')
desc = str(desc)
if not ok or len(desc) == 0:
return
pointObj = env.grid.get(i, j)
if pointObj is None:
return
print('description: "%s"' % desc)
print('object: %s %s' % (pointObj.color, pointObj.type))
viewSz = minigrid.AGENT_VIEW_SIZE
NUM_TARGET = 50
numItrs = 0
numPos = 0
numNeg = 0
while (numPos < NUM_TARGET or numNeg < NUM_TARGET) and numItrs < 300:
env2 = copy.deepcopy(env)
# Randomly place the agent around the selected point
x, y = i, j
x += random.randint(-viewSz, viewSz)
y += random.randint(-viewSz, viewSz)
x = max(0, min(x, env2.grid.width - 1))
y = max(0, min(y, env2.grid.height - 1))
env2.agent_pos = (x, y)
env2.agent_dir = random.randint(0, 3)
# Don't want to place the agent on top of something
if env2.grid.get(*env2.agent_pos) != None:
continue
agent_sees = env2.agent_sees(i, j)
obs = env2.gen_obs()
img = obs['image'] if isinstance(obs, dict) else obs
obsGrid = minigrid.Grid.decode(img)
datum = {
'desc': desc,
'img': img,
'pos': (i, j),
'present': agent_sees
}
if agent_sees and numPos < NUM_TARGET:
self.pointingData.append(datum)
numPos += 1
if not agent_sees and numNeg < NUM_TARGET:
# Don't want identical object in mismatch examples
if (pointObj.color, pointObj.type) not in obsGrid:
self.pointingData.append(datum)
numNeg += 1
numItrs += 1
print('positive examples: %d' % numPos)
print('negative examples: %d' % numNeg)
print('total examples: %d' % len(self.pointingData))
def missionEdit(self):
# The agent will get the mission as an observation
# before performing the next action
text = self.missionBox.toPlainText()
self.lastObs['mission'] = text
def plusReward(self):
print('+reward')
self.env.setReward(1)
def minusReward(self):
print('-reward')
self.env.setReward(-1)
def stepClicked(self):
self.stepEnv(action=None)
def setFrameRate(self, value):
"""Set the frame rate limit. Zero for manual stepping."""
print('Set frame rate: %s' % value)
self.fpsLimit = int(value)
if value == 0:
self.fpsLabel.setText("Manual")
self.stepTimer.stop()
elif value == 100:
self.fpsLabel.setText("Fastest")
self.stepTimer.setInterval(0)
self.stepTimer.start()
else:
self.fpsLabel.setText("%s FPS" % value)
self.stepTimer.setInterval(int(1000 / self.fpsLimit))
self.stepTimer.start()
def resetEnv(self):
obs = self.env.reset()
self.lastObs = obs
self.showEnv(obs)
def showEnv(self, obs):
unwrapped = self.env.unwrapped
# Render and display the environment
pixmap = self.env.render(mode='pixmap')
self.imgLabel.setPixmap(pixmap)
# Render and display the agent's view
image = obs['image']
obsPixmap = unwrapped.get_obs_render(image)
self.obsImgLabel.setPixmap(obsPixmap)
# Update the mission text
mission = obs['mission']
self.missionBox.setPlainText(mission)
# Set the steps remaining
stepsRem = unwrapped.steps_remaining
self.stepsLabel.setText(str(stepsRem))
def stepEnv(self, action=None):
# If no manual action was specified by the user
if action == None:
action = random.randint(0, self.env.action_space.n - 1)
obs, reward, done, info = self.env.step(action)
self.showEnv(obs)
self.lastObs = obs
if done:
self.resetEnv()
def main(argv):
parser = OptionParser()
parser.add_option(
"--env-name",
help="gym environment to load",
default='MiniGrid-MultiRoom-N6-v0'
)
(options, args) = parser.parse_args()
# Load the gym environment
env = gym.make(options.env_name)
# Create the application window
app = QApplication(sys.argv)
window = AIGameWindow(env)
# Run the application
sys.exit(app.exec_())
if __name__ == '__main__':
main(sys.argv)
|
#!/usr/bin/env python
import sys, os
import numpy as np
import torch
import matplotlib.pyplot as plt
plt.style.use('seaborn-paper')
import yaml
import glob
fields = ['coll_intensity', 'gp_mse', 'in_collision', 'max_penetration', 'num_iters', 'ext_cost_per_iter', 'task_loss_per_iter',
'constraint_violation']
filetype = '.yaml'
def print_stats(all_results, best_fixed):
global fields
sorted_epochs = sorted(all_results.keys())
for epoch in sorted_epochs:
print('####################### Epoch = {} ##############'.format(epoch))
data = all_results[epoch]
for field in fields:
if field not in data:
continue
d = data[field]
if field == 'ext_cost_per_iter' or field == 'task_loss_per_iter':
task_cost = []
for iter_data in d:
task_cost.append(iter_data[-1])
print(('avg {} = {}').format(field, np.mean(task_cost)))
print(('std {} = {}').format(field, np.std(task_cost)))
else:
print(('avg {} = {}').format(field, np.mean(d)))
print(('std {} = {}').format(field, np.std(d)))
def print_stats_succ(data, file_name):
#Only print statistics for successful trials
global fields
print('####################### {} successful only ##############'.format(file_name))
coll_results = np.array(data['in_collision'])
succ_ids = np.where(coll_results==False)[0]
for field in fields:
if field not in data:
continue
d = np.array(data[field])
succ_data = d[succ_ids]
if field == 'ext_cost_per_iter' or field == 'task_loss_per_iter':
task_cost = []
for iter_data in succ_data:
task_cost.append(iter_data[-1])
print(('avg {} = {}').format(field, np.mean(task_cost)))
print(('std {} = {}').format(field, np.std(task_cost)))
else:
print(('avg {} = {}').format(field, np.mean(succ_data)))
print(('std {} = {}').format(field, np.std(succ_data)))
def plot_stats(all_results, best_fixed):
sorted_epochs = sorted(all_results.keys())
num_unsolved = []
num_iters = []
gp_mse = []
loss = []
pos_loss = []
vel_loss = []
for epoch in sorted_epochs:
data = all_results[epoch]
num_unsolved.append(np.mean(data['in_collision']))
num_iters.append(np.mean(data['num_iters']))
gp_mse.append(np.mean(data['gp_mse']))
mean_loss_env = []
mean_pos_loss_env = []
mean_vel_loss_env = []
for d in data['loss_per_iter']:
mean_loss_env.append(d[-1])
loss.append(np.mean(mean_loss_env))
for d in data['pos_loss_per_iter']:
mean_pos_loss_env.append(d[-1])
pos_loss.append(np.mean(mean_pos_loss_env))
for d in data['vel_loss_per_iter']:
mean_vel_loss_env.append(d[-1])
vel_loss.append(np.mean(mean_vel_loss_env))
num_unsolved_best = [np.mean(best_fixed['in_collision'])]*len(sorted_epochs)
num_iters_best = [np.mean(best_fixed['num_iters'])]*len(sorted_epochs)
gp_mse_best = [np.mean(best_fixed['gp_mse'])]*len(sorted_epochs)
print "Best fixed covariance. In collision = {}, Avg. num iters = {}, GP_MSE = {}, Coll intensity = {}".format(np.mean(best_fixed['in_collision']),
np.mean(best_fixed['num_iters']),
np.mean(best_fixed['gp_mse']),
np.mean(best_fixed['coll_intensity']))
fig, (ax1, ax2, ax3, ax4, ax5, ax6) = plt.subplots(nrows=6,ncols=1)
ax1.plot(sorted_epochs, num_unsolved)
ax1.plot(sorted_epochs, num_unsolved_best, 'r--')
ax1.set_xlabel('epoch')
ax1.set_ylabel('Mean unsolved')
ax1.set_xlim(-1, sorted_epochs[-1])
ax2.plot(sorted_epochs, gp_mse)
ax2.plot(sorted_epochs, gp_mse_best, 'r--')
ax2.set_xlabel('epoch')
ax2.set_ylabel('Mean gp mse')
ax2.set_xlim(-1, sorted_epochs[-1])
ax3.plot(sorted_epochs, num_iters)
ax3.plot(sorted_epochs, num_iters_best, 'r--')
ax3.set_xlabel('epoch')
ax3.set_ylabel('Mean iters')
ax3.set_xlim(-1, sorted_epochs[-1])
ax4.plot(sorted_epochs, loss)
ax4.set_xlabel('epoch')
ax4.set_ylabel('Mean regression loss')
ax4.set_xlim(-1, sorted_epochs[-1])
ax5.plot(sorted_epochs, pos_loss)
ax5.set_xlabel('epoch')
ax5.set_ylabel('Mean regression loss (position only)')
ax5.set_xlim(-1, sorted_epochs[-1])
ax6.plot(sorted_epochs, vel_loss)
ax6.set_xlabel('epoch')
ax6.set_ylabel('Mean regression loss (velocity only)')
ax6.set_xlim(-1, sorted_epochs[-1])
plt.show()
def main(args):
in_folder = os.path.abspath(args.in_folder)
if args.env_type is None:
files = glob.glob(in_folder + "/epoch_*_valid_results.yaml")
epoch_id = -3
init_file = os.path.join(in_folder, 'init_planner_valid_results.yaml')
print(files)
else:
files = glob.glob(in_folder + "/epoch_*_valid_results_"+args.env_type+".yaml")
init_file = os.path.join(in_folder, 'init_planner_valid_results' + '.yaml')
epoch_id = -4
all_results = {}
with open(init_file, 'r') as fp:
data = yaml.load(fp)
all_results[-1] = data
for file in files:
epoch_num = int(file.split('_')[epoch_id])
with open(file, 'r') as fp:
data = yaml.load(fp)
all_results[epoch_num] = data
best_file = os.path.abspath(os.path.join(in_folder, args.best_cov_file+".yaml"))
with open(best_file, 'r') as fp:
best_fixed = yaml.load(fp)
print_stats(all_results, best_fixed)
plot_stats(all_results, best_fixed)
plt.show()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='PyTorch Example')
parser.add_argument('--in_folder', type=str, required=True, help='folder with results file')
parser.add_argument('--best_cov_file', type=str)
parser.add_argument('--env_type', type=str)
args = parser.parse_args()
main(args)
|
from django.conf import settings
class NoDefault:
pass
JWT_DEFAULT_SETTINGS = {
'JWT_CLIENT.RENAME_ATTRIBUTES': {},
'JWT_CLIENT.DEFAULT_ATTRIBUTES': {},
'JWT_CLIENT.CREATE_USER': False,
'JWT_CLIENT.COOKIE_NAME': 'id_token',
'JWT_SERVER.JWK_EXPIRATION_TIME': 3600,
'JWT_SERVER.JWT_EXPIRATION_TIME': 14400,
'LOGOUT_URL': 'logout',
}
def convert_url(url):
if len(url) > 0 and url[-1] == '/':
url = url[:-1]
return url
JWT_DEFAULT_CONVERTERS = {
'JWT_CLIENT.OPENID2_URL': convert_url,
}
def get_setting(names):
default = JWT_DEFAULT_SETTINGS.get(names, NoDefault)
value = settings
for name in names.split('.'):
if isinstance(value, dict):
value = value.get(name, default)
else:
value = getattr(value, name, default)
if value == NoDefault:
raise Exception('Setting %s is required' % names)
converter = JWT_DEFAULT_CONVERTERS.get(names, lambda x: x)
value = converter(value)
return value
def get_domain_from_url(url):
return '/'.join(url.split('/')[0:3])
|
from datetime import datetime
class PriceOfferTransformPipeline(object):
def process_item(self, item, spider):
if item['price']:
item['price'] = float(item['price'])
# Optional attributes
if 'discounted_price' in item and item['discounted_price']:
item['discounted_price'] = float(item['discounted_price'])
if not ('time' in item and item['time']):
item['time'] = datetime.utcnow().isoformat()
return item
|
"""Solution to Project Euler Problem 1
https://projecteuler.net/problem=1
"""
NUMBERS = 3, 5
MAXIMUM = 1000
def compute(*numbers, maximum=MAXIMUM):
"""Compute the sum of the multiples of `numbers` below `maximum`."""
if not numbers:
numbers = NUMBERS
multiples = tuple(set(range(0, maximum, number)) for number in numbers)
return sum(set().union(*multiples))
|
from objects.modulebase import ModuleBase
from objects.permissions import PermissionKickMembers
from utils.funcs import find_user, request_reaction_confirmation
class Module(ModuleBase):
usage_doc = '{prefix}{aliases} <user> [reason]'
short_doc = 'Kick user from server'
name = 'kick'
aliases = (name, )
category = 'Moderation'
min_args = 1
guild_only = True
bot_perms = (PermissionKickMembers(), )
user_perms = (PermissionKickMembers(), )
async def on_call(self, ctx, args, **flags):
guild_member = await find_user(args[1], ctx.message, strict_guild=True)
if not guild_member:
return '{warning} User not found'
reason = args[2:] or ''
if guild_member == ctx.guild.owner:
return '{warning} Can\'t kick guild owner'
if ctx.me.top_role <= guild_member.top_role:
return '{warning} My top role is lower or equal to member\'s top role, can\'t kick'
if ctx.author.top_role <= guild_member.top_role and ctx.guild.owner != ctx.author:
return '{warning} Your top role is lower or equal to member\'s top role, can\'t kick'
kick_msg = await ctx.send(
(
f'Are you sure you want to kick **{guild_member}** ?' +
(f'\nReason:```\n{reason}```' if reason else '\n') +
f'React with ✅ to continue'
)
)
if await request_reaction_confirmation(kick_msg, ctx.author):
kick_notification = await self.bot.send_message(
guild_member,
f'You were kicked from **{ctx.guild.name}**\n' +
(f'Reason:```\n{reason}```' if reason else 'No reason given')
)
try:
await ctx.guild.kick(
guild_member, reason=reason + f' kicked by {ctx.author}')
except Exception:
await self.bot.delete_message(kick_notification)
raise
await self.bot.edit_message(
kick_msg,
content=(
f'Successefully kicked **{guild_member}** [{guild_member.id}]' +
(f'\nReason: ```\n{reason}```' if reason else '')
)
)
else:
await self.bot.edit_message(kick_msg, content=f'Cancelled kick of **{guild_member}**')
|
from java.awt.geom import AffineTransform
from org.geotools.referencing.operation.matrix import AffineTransform2D
from org.geotools.data import WorldFileReader, WorldFileWriter
from geoscript import util
class WorldFile(object):
"""
World file reader and writer.
"""
def __init__(self, file):
self.file = file
def read(self):
f = util.toFile(self.file)
if f is None or not f.exists():
raise Exception('No such file %s' % self.file)
return WorldFileReader(f)
def pixelSize(self):
wf = self.read()
return (wf.getXPixelSize(), wf.getYPixelSize())
def rotation(self):
wf = self.read()
return (wf.getRotationX(), wf.getRotationY())
def ulc(self):
wf = self.read()
return (wf.getXULC(), wf.getYULC())
def write(self, bbox, size):
scx, scy = bbox.width / size[0], -1*bbox.height / size[1]
at = AffineTransform(scx, 0, 0, scy, bbox.west+scx/2.0, bbox.north+scy/2.0)
f = util.toFile(self.file)
WorldFileWriter(f, at)
|
from ipykernel.kernelbase import Kernel
from iarm.arm import Arm
import re
import warnings
import iarm.exceptions
class ArmKernel(Kernel):
implementation = 'IArm'
implementation_version = '0.1.0'
language = 'ARM'
language_version = iarm.__version__
language_info = {
'name': 'ARM Coretex M0+ Thumb Assembly',
'mimetype': 'text/x-asm',
'file_extension': '.s'
}
banner = "Interpreted ARM"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.interpreter = Arm(1024) # 1K memory
self.magics = {
'run': self.magic_run,
'register': self.magic_register,
'reg': self.magic_register,
'memory': self.magic_memory,
'mem': self.magic_memory,
'signed': self.magic_signed_rep,
'unsigned': self.magic_unsigned_rep,
'hex': self.magic_hex_rep,
'help': self.magic_help,
'generate_random': self.magic_generate_random,
'postpone_execution': self.magic_postpone_execution
}
self.number_representation = ''
self.magic_unsigned_rep('') # Default to unsigned representation
def convert_representation(self, i):
"""
Return the proper representation for the given integer
"""
if self.number_representation == 'unsigned':
return i
elif self.number_representation == 'signed':
if i & (1 << self.interpreter._bit_width - 1):
return -((~i + 1) & (2**self.interpreter._bit_width - 1))
else:
return i
elif self.number_representation == 'hex':
return hex(i)
def magic_generate_random(self, line):
"""
Set the generate random flag, unset registers and memory will return a random value.
Usage:
Call the magic by itself or with `true` to have registers and memory return a random value
if they are unset and read from, much like how real hardware would work.
Defaults to False, or to not generate random values
`%generate_random`
or
`%generate_random true`
or
`%generate_random false`
"""
line = line.strip().lower()
if not line or line == 'true':
self.interpreter.generate_random = True
elif line == 'false':
self.interpreter.generate_random = False
else:
stream_content = {'name': 'stderr', 'text': "unknwon value '{}'".format(line)}
self.send_response(self.iopub_socket, 'stream', stream_content)
return {'status': 'error',
'execution_count': self.execution_count,
'ename': ValueError.__name__,
'evalue': "unknwon value '{}'".format(line),
'traceback': '???'}
def magic_postpone_execution(self, line):
"""
Postpone execution of instructions until explicitly run
Usage:
Call this magic with `true` or nothing to postpone execution,
or call with `false` to execute each instruction when evaluated.
This defaults to True.
Note that each cell is executed only executed after all lines in
the cell have been evaluated properly.
`%postpone_execution`
or
`%postpone_execution true`
or
`%postpone_execution false`
"""
line = line.strip().lower()
if not line or line == 'true':
self.interpreter.postpone_execution = True
elif line == 'false':
self.interpreter.postpone_execution = False
else:
stream_content = {'name': 'stderr', 'text': "unknwon value '{}'".format(line)}
self.send_response(self.iopub_socket, 'stream', stream_content)
return {'status': 'error',
'execution_count': self.execution_count,
'ename': ValueError.__name__,
'evalue': "unknwon value '{}'".format(line),
'traceback': '???'}
def magic_signed_rep(self, line):
"""
Convert all values to it's signed representation
Usage:
Just call this magic
`%signed`
"""
self.number_representation = 'signed'
def magic_unsigned_rep(self, line):
"""
All outputted values will be displayed with their unsigned representation
Usage:
Just call this magic
`%unsigned`
"""
self.number_representation = 'unsigned'
def magic_hex_rep(self, line):
"""
All outputed values will be displayed with their hexadecimal representation
Usage:
Just call this magic
`%hex`
"""
self.number_representation = 'hex'
def magic_register(self, line):
"""
Print out the current value of a register
Usage:
Pass in the register, or a list of registers separated by spaces
A list of registeres can be entered by separating them by a hyphen
`%reg R1`
or
`%reg R0 R5 R6`
or
`%reg R8-R12`
"""
message = ""
for reg in [i.strip() for i in line.replace(',', '').split()]:
if '-' in reg:
# We have a range (Rn-Rk)
r1, r2 = reg.split('-')
# TODO do we want to allow just numbers?
n1 = re.search(self.interpreter.REGISTER_REGEX, r1).groups()[0]
n2 = re.search(self.interpreter.REGISTER_REGEX, r2).groups()[0]
n1 = self.interpreter.convert_to_integer(n1)
n2 = self.interpreter.convert_to_integer(n2)
for i in range(n1, n2+1):
val = self.interpreter.register[r1[0] + str(i)]
val = self.convert_representation(val)
message += "{}: {}\n".format(r1[0] + str(i), val)
else:
val = self.interpreter.register[reg]
val = self.convert_representation(val)
message += "{}: {}\n".format(reg, val)
stream_content = {'name': 'stdout', 'text': message}
self.send_response(self.iopub_socket, 'stream', stream_content)
def magic_memory(self, line):
"""
Print out the current value of memory
Usage:
Pass in the byte of memory to read, separated by spaced
A list of memory contents can be entered by separating them by a hyphen
`%mem 4 5`
or
`%mem 8-12`
"""
# TODO add support for directives
message = ""
for address in [i.strip() for i in line.replace(',', '').split()]:
if '-' in address:
# We have a range (n-k)
m1, m2 = address.split('-')
n1 = re.search(self.interpreter.IMMEDIATE_NUMBER, m1).groups()[0]
n2 = re.search(self.interpreter.IMMEDIATE_NUMBER, m2).groups()[0]
n1 = self.interpreter.convert_to_integer(n1)
n2 = self.interpreter.convert_to_integer(n2)
for i in range(n1, n2 + 1):
val = self.interpreter.memory[i]
val = self.convert_representation(val)
message += "{}: {}\n".format(str(i), val)
else:
# TODO fix what is the key for memory (currently it's an int, but registers are strings, should it be the same?)
val = self.interpreter.memory[self.interpreter.convert_to_integer(address)]
val = self.convert_representation(val)
message += "{}: {}\n".format(address, val)
stream_content = {'name': 'stdout', 'text': message}
self.send_response(self.iopub_socket, 'stream', stream_content)
def magic_run(self, line):
"""
Run the current program
Usage:
Call with a numbe rto run that many steps,
or call with no arguments to run to the end of the current program
`%run`
or
`%run 1`
"""
i = float('inf')
if line.strip():
i = int(line)
try:
with warnings.catch_warnings(record=True) as w:
self.interpreter.run(i)
for warning_message in w:
# TODO should this be stdout or stderr
stream_content = {'name': 'stdout', 'text': 'Warning: ' + str(warning_message.message) + '\n'}
self.send_response(self.iopub_socket, 'stream', stream_content)
except iarm.exceptions.EndOfProgram as e:
f_name = self.interpreter.program[self.interpreter.register['PC'] - 1].__name__
f_name = f_name[:f_name.find('_')]
message = "Error in {}: ".format(f_name)
stream_content = {'name': 'stdout', 'text': message + str(e) + '\n'}
self.send_response(self.iopub_socket, 'stream', stream_content)
except Exception as e:
for err in e.args:
stream_content = {'name': 'stderr', 'text': str(err)}
self.send_response(self.iopub_socket, 'stream', stream_content)
return {'status': 'error',
'execution_count': self.execution_count,
'ename': type(e).__name__,
'evalue': str(e),
'traceback': '???'}
def magic_help(self, line):
"""
Print out the help for magics
Usage:
Call help with no arguments to list all magics,
or call it with a magic to print out it's help info.
`%help`
or
`%help run
"""
line = line.strip()
if not line:
for magic in self.magics:
stream_content = {'name': 'stdout', 'text': "%{}\n".format(magic)}
self.send_response(self.iopub_socket, 'stream', stream_content)
elif line in self.magics:
# its a magic
stream_content = {'name': 'stdout', 'text': "{}\n{}".format(line, self.magics[line].__doc__)}
self.send_response(self.iopub_socket, 'stream', stream_content)
elif line in self.interpreter.ops:
# it's an instruction
stream_content = {'name': 'stdout', 'text': "{}\n{}".format(line, self.interpreter.ops[line].__doc__)}
self.send_response(self.iopub_socket, 'stream', stream_content)
else:
stream_content = {'name': 'stderr', 'text': "'{}' not a known magic or instruction".format(line)}
self.send_response(self.iopub_socket, 'stream', stream_content)
# TODO add tab completion
# TODO add completeness (can be used to return the prompt back to the user in case of an error)
def run_magic(self, line):
# TODO allow magics at end of code block
# TODO allow more than one magic per block
if line.startswith('%'):
loc = line.find(' ')
params = ""
if loc > 0:
params = line[loc + 1:]
op = line[1:loc]
else:
op = line[1:]
return self.magics[op](params)
def run_code(self, code):
if not code:
return
try:
with warnings.catch_warnings(record=True) as w:
self.interpreter.evaluate(code)
for warning_message in w:
# TODO should this be stdout or stderr
stream_content = {'name': 'stdout', 'text': 'Warning: ' + str(warning_message.message) + '\n'}
self.send_response(self.iopub_socket, 'stream', stream_content)
except Exception as e:
for err in e.args:
stream_content = {'name': 'stderr', 'text': "{}\n{}".format(type(e).__name__, str(err))}
self.send_response(self.iopub_socket, 'stream', stream_content)
return {'status': 'error',
'execution_count': self.execution_count,
'ename': type(e).__name__,
'evalue': str(e),
'traceback': '???'}
def do_execute(self, code, silent, store_history=True,
user_expressions=None, allow_stdin=False):
instructions = ""
for line in code.split('\n'):
if line.startswith('%'):
# TODO run current code, run magic, then continue
ret = self.run_code(instructions)
if ret:
return ret
instructions = ""
ret = self.run_magic(line)
if ret:
return ret
else:
instructions += line + '\n'
ret = self.run_code(instructions)
if ret:
return ret
return {'status': 'ok',
'execution_count': self.execution_count,
'payload': [],
'user_expressions': {}
}
if __name__ == '__main__':
from ipykernel.kernelapp import IPKernelApp
IPKernelApp.launch_instance(kernel_class=ArmKernel)
|
from astropy.table import QTable
from astropy.table import Table
from astropy.io import ascii
import numpy as np
import os
def savetable_S3(filename, time, wave_1d, stdspec, stdvar, optspec, opterr):
"""
Saves data in an event as .txt using astropy
Parameters
----------
event : An Event instance.
Description
-----------
Saves data stored in an event object as an table
Returns
-------
.txt file
Revisions
---------
"""
dims = stdspec.shape #tuple (integration, wavelength position)
orig_shapes = [str(time.shape), str(wave_1d.shape), str(stdspec.shape), str(stdvar.shape), str(optspec.shape), str(opterr.shape)]
time = np.repeat(time, dims[1])
wave_1d = np.tile(wave_1d, dims[0])
stdspec = stdspec.flatten()
stdvar = stdvar.flatten()
optspec = optspec.flatten()
opterr = opterr.flatten()
arr = [time, wave_1d, stdspec, stdvar, optspec, opterr]
try:
table = QTable(arr, names=('time', 'wave_1d', 'stdspec', 'stdvar', 'optspec', 'opterr'))
ascii.write(table, filename, format='ecsv', overwrite=True, fast_writer=True)
except ValueError as e:
raise ValueError("There was a shape mismatch between your arrays which had shapes:\n"+
"time, wave_1d, stdspec, stdvar, optspec, opterr\n"+
",".join(orig_shapes)) from e
def readtable(filename):
t = ascii.read(filename, format='ecsv')
return t
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云(BlueKing) available.
Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and limitations under the License.
"""
import datetime
from django.db import models
class dbExchangeLogManager(models.Manager):
def save_record(self, data):
"""
保存数据库切换记录
"""
try:
dbExchangeLog.objects.create(
main_ip=data.get('main_ip'),
stadnby_ip=data.get('stadnby_ip'),
exchange_direction=data.get('exchange_direction'),
operator=data.get('operator'),
create_time=data.get('create_time'),
)
result = {'result': True, 'message': u"保存成功"}
except Exception, e:
result = {'result': False, 'message': u"保存失败, %s" % e}
return result
class dbExchangeLog(models.Model):
"""
db切换记录
"""
main_ip = models.CharField(u"主库ip", max_length=15)
stadnby_ip = models.CharField(u"备库ip", max_length=15)
exchange_direction = models.CharField(u"切换方向", max_length=15)
create_time = models.DateTimeField(u"切换时间", default=datetime.datetime.now())
operator = models.CharField(u"记录人", max_length=64)
objects = dbExchangeLogManager()
def __unicode__(self):
return self.theme
class Meta:
verbose_name = u"数据库切换记录"
verbose_name_plural = u"数据库切换记录"
class RackManager(models.Manager):
def save_record(self, data):
"""
保存机柜记录
"""
try:
Rack.objects.create(
name=data.get('name'),
height=data.get('height'),
row_num=data.get('row_num'),
column_num=data.get('column_num'),
machine_room=data.get('machine_room'),
operator=data.get('operator')
)
result = {'result': True, 'message': u"保存成功"}
except Exception, e:
result = {'result': False, 'message': u"保存失败, %s" % e}
return result
class Rack(models.Model):
"""
机柜
"""
name = models.CharField(u"机柜名称", max_length=15,unique=True)
height = models.CharField(u"机柜高度", max_length=2)
row_num = models.CharField(u"所在行", max_length=2)
column_num = models.CharField(u"所在列", max_length=2)
machine_room = models.CharField(u"所在机房", max_length=2)
create_time = models.DateTimeField(u"创建时间", default=datetime.datetime.now())
operator = models.CharField(u"记录人", max_length=64)
def __unicode__(self):
return self.name
class Meta:
verbose_name = u"机柜"
verbose_name_plural = u"机柜"
|
from django.test import TestCase
# Create your tests here.
from django.urls import reverse
from licornes.models import Licorne
from licornes.models import User
from licornes.models import Etape
from django.conf import settings
from bs4 import BeautifulSoup
import re
import os
class IndexViewTest(TestCase):
@classmethod
def setUpTestData(cls):
# On crée des utilisateurs et on leur attribue x licornes à chacun
number_of_creators = 2
number_of_licornes = 3
cls.total_licornes = number_of_creators * number_of_licornes
for user_id in range(number_of_creators):
User.objects.create(username=f"utilisateur {user_id}")
u = User.objects.get(username=f"utilisateur {user_id}")
for licorne_id in range(number_of_licornes):
Licorne.objects.create(
nom=f'Licorne {licorne_id} de {user_id}',
identifiant=f'{user_id}-{licorne_id}',
createur=u,
)
def test_view_url_exists_at_desired_location(self):
response = self.client.get('/licornes/')
self.assertEqual(response.status_code, 200)
def test_view_url_accessible_by_name(self):
response = self.client.get(reverse('index'))
self.assertEqual(response.status_code, 200)
def test_view_uses_correct_template(self):
response = self.client.get(reverse('index'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'licornes/index.html')
def test_licornes_are_present(self):
response = self.client.get(reverse('index'))
self.assertEqual(response.status_code, 200)
self.assertTrue('meslicornes' in response.context)
#self.assertTrue(response.context['meslicornes'] == True)
self.assertTrue(len(response.context['meslicornes']) == self.total_licornes)
#print(str(response.content))
self.assertTrue("Licorne 0 de 0" in str(response.content))
def test_licornes_ont_badge(self):
response = self.client.get(reverse('index'))
soup = BeautifulSoup(response.content, features="html.parser")
h2s = soup.find_all("h2")
badges_de_licornes = 0
for h2 in h2s:
if h2.span and "badge" in h2.span["class"]:
badges_de_licornes += 1
self.assertTrue(badges_de_licornes)
self.assertEqual(badges_de_licornes, self.total_licornes)
def test_titres_present(self):
response = self.client.get(reverse('index'))
self.assertEqual(response.status_code, 200)
self.assertInHTML("Mes licornes", str(response.content))
self.assertInHTML("Trajet", str(response.content))
def test_bouton_ajouter_present(self):
response = self.client.get(reverse('index'))
self.assertEqual(response.status_code, 200)
self.assertTrue("+ Ajouter une licorne" in str(response.content))
def test_div_map_present(self):
response = self.client.get(reverse('index'))
soup = BeautifulSoup(response.content, features="html.parser")
divs = soup.find_all("div")
div_map_in_divs = False
for d in divs:
if d.has_attr("id") and d["id"] == "map":
div_map_in_divs = True
self.assertTrue(div_map_in_divs)
def test_liens_vers_licornes_presents(self):
response = self.client.get(reverse('index'))
soup = BeautifulSoup(response.content, features="html.parser")
a = soup.find_all("a")
lien_vers_1_dans_liens = False
for l in a:
if "licorne/1" in l["href"]:
lien_vers_1_dans_liens = True
break
self.assertTrue(lien_vers_1_dans_liens)
def test_aucune_licorne_nest_active(self):
response = self.client.get(reverse('index'))
soup = BeautifulSoup(response.content, features="html.parser")
a = soup.find_all("a")
active_in_a_class = 0
for l in a:
if l.has_attr("class"):
classes = l["class"]
if "active" in classes:
active_in_a_class += 1
self.assertFalse(active_in_a_class)
def test_pas_de_polyline(self):
response = self.client.get(reverse('index'))
self.assertFalse("google.maps.Polyline" in str(response.content))
class AddViewTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.identifiant_existant = "777"
cls.identifiant_inexistant = "666"
User.objects.create(username=f"kuala")
u = User.objects.get(username=f"kuala")
Licorne.objects.create(
nom=f'Licorne de {u}',
identifiant=f'{cls.identifiant_existant}',
createur=u,
)
cls.u = u
cls.l = Licorne.objects.get(identifiant=cls.identifiant_existant)
def test_view_url_exists_at_desired_location(self):
response = self.client.get('/licornes/add/')
self.assertEqual(response.status_code, 200)
def test_view_url_accessible_by_name(self):
response = self.client.get(reverse('add'))
self.assertEqual(response.status_code, 200)
def test_view_uses_correct_template(self):
response = self.client.get(reverse('add'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'licornes/licorne_form.html')
def test_view_titre(self):
response = self.client.get(reverse('add'))
self.assertEqual(response.status_code, 200)
self.assertTrue("Ajouter une licorne" in str(response.content))
def test_view_fields_presents(self):
response = self.client.get(reverse('add'))
self.assertEqual(response.status_code, 200)
self.assertTrue("Nom" in str(response.content))
self.assertTrue("Identifiant" in str(response.content))
self.assertFalse("Photo" in str(response.content))
self.assertTrue("Image" in str(response.content))
self.assertFalse("+ Ajouter une licorne" in str(response.content))
def test_redirects_to_etape_on_success(self):
#response = self.client.get(reverse('etape', args=[self.identifiant_existant]))
#self.assertEqual(response.status_code, 200)
with open(os.path.join("licornes/tests", "image-test.jpg"), "rb") as i:
response = self.client.post(reverse('add'), {"nom": "Bouou", "identifiant": self.identifiant_inexistant, "createur": self.u.id, "image": i})
self.assertRedirects(response, reverse('etape', args=[self.identifiant_inexistant]))
def test_nom_ne_peut_pas_etre_vide(self):
response = self.client.post(reverse('add'), {"nom": "", "identifiant": self.identifiant_inexistant, "createur": self.u.id})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'nom', 'Ce champ est obligatoire.')
def test_identifiant_ne_peut_pas_etre_vide(self):
response = self.client.post(reverse('add'), {"nom": "UIOU", "identifiant": "", "createur": self.u.id})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'identifiant', 'Ce champ est obligatoire.')
def test_champ_image_peut_etre_vide(self):
response = self.client.post(reverse('add'), {"nom": "Bouou", "identifiant": self.identifiant_inexistant, "createur": self.u.id, "image": ""})
self.assertRedirects(response, reverse('etape', args=[self.identifiant_inexistant]))
def test_champ_image_doit_etre_une_image(self):
with open(os.path.join("licornes/tests", "spam.txt"), "r") as i:
response = self.client.post(reverse('add'), {"nom": "Bouou", "identifiant": self.identifiant_inexistant, "createur": self.u.id, "image": i})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'image', "Téléversez une image valide. Le fichier que vous avez transféré n'est pas une image ou bien est corrompu.")
class EtapeViewTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.identifiant_existant = "777"
cls.identifiant_inexistant = "666"
User.objects.create(username=f"kuala")
u = User.objects.get(username=f"kuala")
Licorne.objects.create(
nom=f'Licorne de {u}',
identifiant=f'{cls.identifiant_existant}',
createur=u,
)
cls.u = u
cls.l = Licorne.objects.get(identifiant=cls.identifiant_existant)
# On ne peut plus utiliser la version sans argument
def test_view_url_returns_404_if_no_licorne(self):
response = self.client.get('/licornes/etape/')
self.assertEqual(response.status_code, 404)
def test_view_url_by_name_404_if_no_licorne(self):
response = self.client.get(reverse('etape'))
self.assertEqual(response.status_code, 404)
# Version avec argument
def test_view_url_exists_at_desired_location(self):
response = self.client.get('/licornes/etape/%s/' % (self.identifiant_existant))
self.assertEqual(response.status_code, 200)
def test_view_url_accessible_by_name(self):
response = self.client.get(reverse('etape', args=[self.identifiant_existant]))
self.assertEqual(response.status_code, 200)
def test_view_uses_correct_template(self):
response = self.client.get(reverse('etape', args=[self.identifiant_existant]))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'licornes/etape_form.html')
def test_view_titre(self):
licorne = Licorne.objects.get(identifiant=self.identifiant_existant)
response = self.client.get(reverse('etape', args=[self.identifiant_existant]))
self.assertEqual(response.status_code, 200)
soup = BeautifulSoup(response.content, features="html.parser")
h1 = soup.h1.string
self.assertEqual(h1, "Ajouter une étape pour %s" % (licorne))
def test_view_fields_presents(self):
response = self.client.get(reverse('etape', args=[self.identifiant_existant]))
self.assertEqual(response.status_code, 200)
soup = BeautifulSoup(response.content, features="html.parser")
lbls = soup.find_all("label")
labels = []
for l in lbls:
labels.append(l["for"])
self.assertTrue("id_localisation" in labels)
self.assertFalse("id_current" in labels)
self.assertTrue("id_auteur" in labels)
self.assertTrue("id_media" in labels)
# Champ input hidden pour la licorne
inputs = soup.find_all("input")
licorne_in_hidden_field = False
for i in inputs:
if i["type"] == "hidden" and i["name"] == "licorne":
licorne_in_hidden_field = True
break
self.assertTrue(licorne_in_hidden_field)
def test_view_autocomplete_present(self):
response = self.client.get(reverse('etape', args=[self.identifiant_existant]))
self.assertEqual(response.status_code, 200)
soup = BeautifulSoup(response.content, features="html.parser")
scripts = soup.find_all("script")
autocomplete_in_src = False
#print(scripts)
for s in scripts:
if s.has_attr("src"):
src = s["src"]
if "autocomplete.js" in src:
autocomplete_in_src = True
#autocomplete_in_src = True
self.assertTrue(autocomplete_in_src)
def test_view_creer_si_inexistante(self):
# Si l'identifiant de licorne fourni ne correspond pas à une licorne
# existante, on propose de la créer
response = self.client.get(reverse('etape', args=[self.identifiant_inexistant]))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'licornes/creer.html')
soup = BeautifulSoup(response.content, features="html.parser")
t = soup.title
self.assertTrue("J'irai où tu iras" in t)
h1 = soup.h1.string
self.assertTrue("Licorne inexistante" in h1)
a = soup.find_all("a")
add_in_href = False
for l in a:
if "/add" in l["href"]:
add_in_href = True
self.assertTrue(add_in_href)
self.assertTrue(f"{self.identifiant_inexistant}" in str(response.content))
def test_form_etape_valeur_initiale_licorne(self):
response = self.client.get(reverse('etape', args=[self.identifiant_existant]))
self.assertEqual(response.status_code, 200)
licorne = Licorne.objects.get(identifiant=self.identifiant_existant)
self.assertEqual(response.context['form'].initial['licorne'], licorne)
def test_redirects_to_index_on_success(self):
#response = self.client.get(reverse('etape', args=[self.identifiant_existant]))
#self.assertEqual(response.status_code, 200)
response = self.client.post(reverse('etape', args=[self.l.identifiant]), {"localisation": "Pau, France", "auteur": self.u.id, "media": "Tagalok", "licorne": self.l.id})
self.assertRedirects(response, reverse('index'))
def test_form_invalid_licorne(self):
wrong_id = 78787897873
response = self.client.post(reverse('etape', args=[self.l.identifiant]), {"localisation": "Pau, France", "auteur": self.u.id, "media": "Tagalok", "licorne": wrong_id})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'licorne', 'Sélectionnez un choix valide. Ce choix ne fait pas partie de ceux disponibles.')
def test_form_invalid_localisation(self):
response = self.client.post(reverse('etape', args=[self.l.identifiant]), {"localisation": "", "auteur": self.u.id, "media": "Tagalok", "licorne": self.l.id})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'localisation', 'Ce champ est obligatoire.')
def test_form_invalid_auteur(self):
wrong_id = 78787897873
response = self.client.post(reverse('etape', args=[self.l.identifiant]), {"localisation": "Pau, France", "auteur": wrong_id, "media": "Tagalok", "licorne": self.l.id})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'auteur', 'Sélectionnez un choix valide. Ce choix ne fait pas partie de ceux disponibles.')
# def test_form_invalid_renewal_date_future(self):
# login = self.client.login(username='testuser2', password='2HJ1vRV0Z&3iD')
# invalid_date_in_future = datetime.date.today() + datetime.timedelta(weeks=5)
# response = self.client.post(reverse('renew-book-librarian', kwargs={'pk': self.test_bookinstance1.pk}), {'renewal_date': invalid_date_in_future})
# self.assertEqual(response.status_code, 200)
# self.assertFormError(response, 'form', 'renewal_date', 'Invalid date - renewal more than 4 weeks ahead')
class LicorneViewTest(TestCase):
@classmethod
def setUpTestData(cls):
# On crée des utilisateurs et on leur attribue x licornes à chacun
number_of_creators = 2
number_of_licornes = 3
cls.total_licornes = number_of_creators * number_of_licornes
cls.licornes_de_test = []
for user_id in range(number_of_creators):
User.objects.create(username=f"utilisateur {user_id}")
u = User.objects.get(username=f"utilisateur {user_id}")
for licorne_id in range(number_of_licornes):
Licorne.objects.create(
nom=f'Licorne {licorne_id} de {user_id}',
identifiant=f'{user_id}-{licorne_id}',
createur=u,
image=f'{licorne_id}.png',
)
cls.licornes_de_test.append(Licorne.objects.latest("id"))
def test_view_url_exists_at_desired_location(self):
id_lic = self.licornes_de_test[3].id
response = self.client.get(f'/licornes/licorne/{id_lic}/')
self.assertEqual(response.status_code, 200)
def test_view_url_redirected_if_no_trailing_slash(self):
id_lic = self.licornes_de_test[3].id
response = self.client.get(f'/licornes/licorne/{id_lic}')
self.assertEqual(response.status_code, 301)
def test_view_url_accessible_by_name(self):
id_lic = self.licornes_de_test[3].id
response = self.client.get(reverse('licorne', args=[id_lic]))
self.assertEqual(response.status_code, 200)
def test_view_uses_correct_template(self):
id_lic = self.licornes_de_test[3].id
response = self.client.get(reverse('licorne', args=[id_lic]))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'licornes/licorne.html')
def test_licornes_are_present(self):
id_lic = self.licornes_de_test[3].id
response = self.client.get(reverse('licorne', args=[id_lic]))
self.assertEqual(response.status_code, 200)
self.assertTrue('meslicornes' in response.context)
#self.assertTrue(response.context['meslicornes'] == True)
self.assertTrue(len(response.context['meslicornes']) == self.total_licornes)
#print(str(response.content))
self.assertTrue("Licorne 0 de 0" in str(response.content))
def test_titres_present(self):
id_lic = self.licornes_de_test[3].id
response = self.client.get(reverse('licorne', args=[id_lic]))
self.assertEqual(response.status_code, 200)
self.assertTrue("Mes licornes" in str(response.content))
self.assertInHTML("Trajet", str(response.content))
def test_bouton_ajouter_present(self):
id_lic = self.licornes_de_test[3].id
response = self.client.get(reverse('licorne', args=[id_lic]))
self.assertEqual(response.status_code, 200)
self.assertTrue("+ Ajouter une licorne" in str(response.content))
def test_div_map_present(self):
id_lic = self.licornes_de_test[3].id
response = self.client.get(reverse('licorne', args=[id_lic]))
soup = BeautifulSoup(response.content, features="html.parser")
divs = soup.find_all("div")
div_map_in_divs = False
for d in divs:
if d.has_attr("id") and d["id"] == "map":
div_map_in_divs = True
self.assertTrue(div_map_in_divs)
def test_liens_vers_licornes_presents(self):
id_lic = self.licornes_de_test[3].id
response = self.client.get(reverse('licorne', args=[id_lic]))
soup = BeautifulSoup(response.content, features="html.parser")
a = soup.find_all("a")
lien_vers_1_dans_liens = False
for l in a:
if "licorne/1" in l["href"]:
lien_vers_1_dans_liens = True
break
self.assertTrue(lien_vers_1_dans_liens)
def test_une_licorne_est_active(self):
id_lic = self.licornes_de_test[3].id
response = self.client.get(reverse('licorne', args=[id_lic]))
soup = BeautifulSoup(response.content, features="html.parser")
a = soup.find_all("a")
active_in_a_class = 0
for l in a:
if l.has_attr("class"):
classes = l["class"]
if "active" in classes:
active_in_a_class += 1
self.assertTrue(active_in_a_class)
self.assertEqual(active_in_a_class, 1)
def test_licornes_ont_badge(self):
response = self.client.get(reverse('index'))
soup = BeautifulSoup(response.content, features="html.parser")
h2s = soup.find_all("h2")
badges_de_licornes = 0
for h2 in h2s:
if h2.span and "badge" in h2.span["class"]:
badges_de_licornes += 1
self.assertTrue(badges_de_licornes)
self.assertEqual(badges_de_licornes, self.total_licornes)
def test_licornes_ont_image(self):
response = self.client.get(reverse('index'))
soup = BeautifulSoup(response.content, features="html.parser")
lics = soup.find_all(attrs={"class": "list-group-item"})
lic_img = 0
bons_noms_dimages = 0
for l in lics:
numero = re.sub("Licorne ([0-9]+).*", "\\1", l.h2.text, re.M)[0:4].strip()
if l.img:
lic_img += 1
if os.path.basename(l.img["src"]) == f'{numero}.png':
bons_noms_dimages += 1
self.assertTrue(lic_img)
self.assertTrue(bons_noms_dimages)
self.assertEqual(lic_img, len(lics))
self.assertEqual(bons_noms_dimages, len(lics))
class MediaViewTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.identifiant_existant = "777"
cls.identifiant_inexistant = "666"
User.objects.create(username=f"kuala")
u = User.objects.get(username=f"kuala")
Licorne.objects.create(
nom=f'Licorne de {u}',
identifiant=f'{cls.identifiant_existant}',
createur=u,
)
l = Licorne.objects.get(nom=f'Licorne de {u}')
e0 = Etape.objects.create(licorne=l, auteur=u, localisation="Paris, France")
e0.save()
e1 = Etape.objects.create(licorne=l, auteur=u, localisation="Berlin, Allemagne")
e1.save()
e2 = Etape.objects.create(licorne=l, auteur=u, localisation="San Francisco")
e2.save()
# Version avec argument
def test_view_url_exists_at_desired_location(self):
e1 = Etape.objects.get(localisation="Berlin, Allemagne")
u = '/licornes/media/%s/' % (e1.id)
response = self.client.get(u)
self.assertEqual(response.status_code, 200)
def test_view_url_accessible_by_name(self):
e1 = Etape.objects.get(localisation="Berlin, Allemagne")
response = self.client.get(reverse('media', args=[e1.id]))
self.assertEqual(response.status_code, 200)
def test_404_if_nonexistant_id(self):
response = self.client.get(reverse('media', args=[11111111]))
self.assertEqual(response.status_code, 404)
|
print('======= DESAFIO 11 =======')
l = float(input('Largura da parede (m): '))
h = float(input('Altura da parede (m): '))
a = l * h
tinta = a/2
print('Para pintar uma parede de área igual a {} m², será(ão) necessário(s) {} L de tinta!'.format(a, tinta))
|
#####################################################################
##### IMPORT STANDARD MODULES
#####################################################################
from __future__ import print_function
from ..data import DataBlock
from ..preprocess import PreProcess
import pandas as pd
import numpy as np
from sklearn.datasets import load_iris
from random import sample
#####################################################################
##### TESTS FOR DATABLOCK
#####################################################################
def test_datablock(datablock):
assert datablock.train.shape == (150, 5)
assert datablock.test.shape == (150, 5)
assert datablock.predict.shape == (150, 5)
#####################################################################
##### TESTS FOR PREPROCESS
#####################################################################
def test_check_missing_no_missing(datablock):
pp = PreProcess(datablock)
result = pp.check_missing(printResult=False,returnResult=True)
for df,miss in result.items():
print(df,miss)
assert miss.sum()==0
def test_check_missing_missing_induced(datablock):
df = pd.DataFrame(datablock.train,copy=True)
pp = PreProcess(DataBlock(df,df,df,'target'))
num_miss=25
for data in pp.datablock.data_present().values():
data.iloc[sample(range(150),num_miss),0] = np.nan
result = pp.check_missing(printResult=False,returnResult=True)
for key,miss in result.items():
assert miss.sum()==num_miss
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import datetime
from threading import Thread, current_thread
from modules.databridge.agent import *
from config import Config
from modules.util import cartridgeagentutils
from exception import DataPublisherException
import constants
class LogPublisher(Thread):
def __init__(self, file_path, stream_definition, tenant_id, alias, date_time, member_id):
Thread.__init__(self)
self.log = LogFactory().get_log(__name__)
self.file_path = file_path
self.thrift_publisher = ThriftPublisher(
DataPublisherConfiguration.get_instance().monitoring_server_ip,
DataPublisherConfiguration.get_instance().monitoring_server_secure_port,
DataPublisherConfiguration.get_instance().admin_username,
DataPublisherConfiguration.get_instance().admin_password,
stream_definition)
self.tenant_id = tenant_id
self.alias = alias
self.date_time = date_time
self.member_id = member_id
self.terminated = False
def run(self):
if os.path.isfile(self.file_path) and os.access(self.file_path, os.R_OK):
self.log.info("Starting log publisher for file: " + self.file_path + ", thread: " + str(current_thread()))
# open file and keep reading for new entries
# with open(self.file_path, "r") as read_file:
read_file = open(self.file_path, "r")
read_file.seek(os.stat(self.file_path)[6]) # go to the end of the file
while not self.terminated:
where = read_file.tell() # where the seeker is in the file
line = read_file.readline() # read the current line
if not line:
# no new line entered
self.log.debug("No new log entries detected to publish.")
time.sleep(1)
read_file.seek(where) # set seeker
else:
# new line detected, create event object
self.log.debug("Log entry/entries detected. Publishing to monitoring server.")
event = ThriftEvent()
event.metaData.append(self.member_id)
event.payloadData.append(self.tenant_id)
event.payloadData.append(self.alias)
event.payloadData.append("")
event.payloadData.append(self.date_time)
event.payloadData.append("")
event.payloadData.append(line)
event.payloadData.append("")
event.payloadData.append("")
event.payloadData.append(self.member_id)
event.payloadData.append("")
self.thrift_publisher.publish(event)
self.log.debug("Log event published.")
self.thrift_publisher.disconnect() # disconnect the publisher upon being terminated
self.log.debug("Log publisher for path \"%s\" terminated" % self.file_path)
else:
raise DataPublisherException("Unable to read the file at path \"%s\"" % self.file_path)
def terminate(self):
"""
Allows the LogPublisher thread to be terminated to stop publishing to BAM/CEP. Allow a minimum of 1 second delay
to take effect.
"""
self.terminated = True
class LogPublisherManager(Thread):
"""
A log publishing thread management thread which maintains a log publisher for each log file. Also defines a stream
definition and the BAM/CEP server information for a single publishing context.
"""
@staticmethod
def define_stream(tenant_id, alias, date_time):
"""
Creates a stream definition for Log Publishing
:return: A StreamDefinition object with the required attributes added
:rtype : StreamDefinition
"""
# stream definition
stream_definition = StreamDefinition()
stream_name = "logs." + tenant_id + "." + alias + "." + date_time
stream_version = "1.0.0"
stream_nickname = "log entries from instance"
stream_description = "Apache Stratos Instance Log Publisher"
stream_definition.name = stream_name
stream_definition.version = stream_version
stream_definition.description = stream_description
stream_definition.nickname = stream_nickname
stream_definition.add_metadata_attribute("memberId", StreamDefinition.STRING)
stream_definition.add_payloaddata_attribute("tenantID", StreamDefinition.STRING)
stream_definition.add_payloaddata_attribute("serverName", StreamDefinition.STRING)
stream_definition.add_payloaddata_attribute("appName", StreamDefinition.STRING)
stream_definition.add_payloaddata_attribute("logTime", StreamDefinition.STRING)
stream_definition.add_payloaddata_attribute("priority", StreamDefinition.STRING)
stream_definition.add_payloaddata_attribute("message", StreamDefinition.STRING)
stream_definition.add_payloaddata_attribute("logger", StreamDefinition.STRING)
stream_definition.add_payloaddata_attribute("ip", StreamDefinition.STRING)
stream_definition.add_payloaddata_attribute("instance", StreamDefinition.STRING)
stream_definition.add_payloaddata_attribute("stacktrace", StreamDefinition.STRING)
return stream_definition
def __init__(self, logfile_paths):
Thread.__init__(self)
self.log = LogFactory().get_log(__name__)
self.logfile_paths = logfile_paths
self.publishers = {}
self.ports = []
self.ports.append(DataPublisherConfiguration.get_instance().monitoring_server_port)
self.ports.append(DataPublisherConfiguration.get_instance().monitoring_server_secure_port)
self.log.debug("Checking if Monitoring server is active.")
ports_active = cartridgeagentutils.wait_until_ports_active(
DataPublisherConfiguration.get_instance().monitoring_server_ip,
self.ports,
int(Config.port_check_timeout))
if not ports_active:
self.log.debug("Monitoring server is not active")
raise DataPublisherException("Monitoring server not active, data publishing is aborted")
self.log.debug("Monitoring server is up and running. Log Publisher Manager started.")
self.tenant_id = LogPublisherManager.get_valid_tenant_id(Config.tenant_id)
self.alias = LogPublisherManager.get_alias(Config.cluster_id)
self.date_time = LogPublisherManager.get_current_date()
self.stream_definition = self.define_stream(self.tenant_id, self.alias, self.date_time)
def run(self):
if self.logfile_paths is not None and len(self.logfile_paths):
for log_path in self.logfile_paths:
# thread for each log file
publisher = self.get_publisher(log_path)
publisher.start()
self.log.debug("Log publisher for path \"%s\" started." % log_path)
def get_publisher(self, log_path):
"""
Retrieve the publisher for the specified log file path. Creates a new LogPublisher if one is not available
:return: The LogPublisher object
:rtype : LogPublisher
"""
if log_path not in self.publishers:
self.log.debug("Creating a Log publisher for path \"%s\"" % log_path)
self.publishers[log_path] = LogPublisher(
log_path,
self.stream_definition,
self.tenant_id,
self.alias,
self.date_time,
Config.member_id)
return self.publishers[log_path]
def terminate_publisher(self, log_path):
"""
Terminates the LogPublisher thread associated with the specified log file
"""
if log_path in self.publishers:
self.publishers[log_path].terminate()
def terminate_all_publishers(self):
"""
Terminates all LogPublisher threads
"""
for publisher in self.publishers:
publisher.terminate()
@staticmethod
def get_valid_tenant_id(tenant_id):
if tenant_id == constants.INVALID_TENANT_ID or tenant_id == constants.SUPER_TENANT_ID:
return "0"
return tenant_id
@staticmethod
def get_alias(cluster_id):
try:
alias = cluster_id.split("\\.")[0]
except:
alias = cluster_id
return alias
@staticmethod
def get_current_date():
"""
Returns the current date formatted as yyyy-MM-dd
:return: Formatted date string
:rtype : str
"""
return datetime.date.today().strftime(constants.DATE_FORMAT)
class DataPublisherConfiguration:
"""
A singleton implementation to access configuration information for data publishing to BAM/CEP
TODO: get rid of this
"""
__instance = None
log = LogFactory().get_log(__name__)
@staticmethod
def get_instance():
"""
Singleton instance retriever
:return: Instance
:rtype : DataPublisherConfiguration
"""
if DataPublisherConfiguration.__instance is None:
DataPublisherConfiguration.__instance = DataPublisherConfiguration()
return DataPublisherConfiguration.__instance
def __init__(self):
self.enabled = False
self.monitoring_server_ip = None
self.monitoring_server_port = None
self.monitoring_server_secure_port = None
self.admin_username = None
self.admin_password = None
self.read_config()
def read_config(self):
self.enabled = Config.read_property(constants.MONITORING_PUBLISHER_ENABLED, False)
if not self.enabled:
DataPublisherConfiguration.log.info("Data Publisher disabled")
return
DataPublisherConfiguration.log.info("Data Publisher enabled")
self.monitoring_server_ip = Config.read_property(constants.MONITORING_RECEIVER_IP, False)
if self.monitoring_server_ip is None:
raise RuntimeError("System property not found: " + constants.MONITORING_RECEIVER_IP)
self.monitoring_server_port = Config.read_property(constants.MONITORING_RECEIVER_PORT, False)
if self.monitoring_server_port is None:
raise RuntimeError("System property not found: " + constants.MONITORING_RECEIVER_PORT)
self.monitoring_server_secure_port = Config.read_property("monitoring.server.secure.port", False)
if self.monitoring_server_secure_port is None:
raise RuntimeError("System property not found: monitoring.server.secure.port")
self.admin_username = Config.read_property(constants.MONITORING_SERVER_ADMIN_USERNAME, False)
if self.admin_username is None:
raise RuntimeError("System property not found: " + constants.MONITORING_SERVER_ADMIN_USERNAME)
self.admin_password = Config.read_property(constants.MONITORING_SERVER_ADMIN_PASSWORD, False)
if self.admin_password is None:
raise RuntimeError("System property not found: " + constants.MONITORING_SERVER_ADMIN_PASSWORD)
DataPublisherConfiguration.log.info("Data Publisher configuration initialized")
|
#!/usr/bin/python3
def safe_print_list(my_list=[], x=0):
number = 0
for i in range(x):
try:
print("{}".format(my_list[i]), end="")
number += 1
except IndexError:
print("")
return number
print("")
return number
|
import logging
import numpy as np
import matplotlib.pyplot as plt
from sklearn.neighbors import NearestNeighbors
import anatomist.api as anatomist
from soma import aims
import colorado as cld
"""Inspired from lightly https://docs.lightly.ai/tutorials/package/tutorial_simclr_clothing.html
"""
log = logging.getLogger(__name__)
def get_image_as_np_array(filename: str):
"""Returns an image as an numpy array
"""
img = Image.open(filename)
return np.asarray(img)
def get_input(dataset, filenames, idx):
"""gets input numbered idx"""
(views, filename) = dataset[idx//2]
if filename != filenames[idx]:
log.error("filenames dont match: {} != {}".format(filename, filenames[idx]))
return views[idx%2]
def plot_knn_examples(embeddings, filenames, dataset, n_neighbors=3, num_examples=6):
"""Plots multiple rows of random images with their nearest neighbors
"""
# lets look at the nearest neighbors for some samples
# we use the sklearn library
nbrs = NearestNeighbors(n_neighbors=n_neighbors).fit(embeddings)
distances, indices = nbrs.kneighbors(embeddings)
# get 5 random samples
samples_idx = np.random.choice(
len(indices), size=num_examples, replace=False)
# loop through our randomly picked samples
for idx in samples_idx:
fig = plt.figure()
# loop through their nearest neighbors
for plot_x_offset, neighbor_idx in enumerate(indices[idx]):
# add the subplot
ax = fig.add_subplot(1, len(indices[idx]), plot_x_offset + 1)
# Recovers input
view = get_input(dataset, filenames, neighbor_idx)
# plot the image
plt.imshow(view[0,view.shape[1]//2, :, :].numpy())
# set the title to the distance of the neighbor
ax.set_title(f'd={distances[idx][plot_x_offset]:.3f}')
# let's disable the axis
plt.axis('off')
plt.show()
def create_array_without_hull(view):
im2 = view[0,:].numpy()
for k in range(len(im2)):
for i in range(len(im2[k])):
for j in range(len(im2[k][i])):
vox = im2[k][i][j]
if vox>1 and vox != 11: # On est sur un sillon
if im2[k-1][i][j]==11 or im2[k+1][i][j]==11 or im2[k][i-1][j]==11 or im2[k][i+1][j]==11 or im2[k][i][j-1]==11 or im2[k][i][j+1]==11:
im2[k][i][j]=11
im2[im2==0] = 10
im2[im2!=10] =0
return im2
def create_mesh_from_array(im):
input_vol = aims.Volume(im)
input_mesh = cld.aims_tools.volume_to_mesh(input_vol)
return input_mesh
def plot_knn_meshes(embeddings, filenames, dataset, n_neighbors=3, num_examples=6):
"""Plots multiple rows of random images with their nearest neighbors
"""
# lets look at the nearest neighbors for some samples
# we use the sklearn library
nbrs = NearestNeighbors(n_neighbors=n_neighbors).fit(embeddings)
distances, indices = nbrs.kneighbors(embeddings)
# get 5 random samples
samples_idx = np.random.choice(
len(indices), size=num_examples, replace=False)
a = anatomist.Anatomist()
view = get_input(dataset, filenames, 0)
im = create_array_without_hull(view)
mesh = create_mesh_from_array(im)
aw = a.createWindow('3D')
am = a.toAObject(mesh)
a.addObjects(am, aw)
# block = a.AWindowsBlock(a, n_neighbors)
# # loop through our randomly picked samples
# for idx in samples_idx:
# # loop through their nearest neighbors
# for plot_x_offset, neighbor_idx in enumerate(indices[idx]):
# # add the subplot
# ax = fig.add_subplot(1, len(indices[idx]), plot_x_offset + 1)
# # Recovers input
# view = get_input(dataset, filenames, neighbor_idx)
# # plot the image
# plt.imshow(view[0,view.shape[1]//2, :, :].numpy())
# # set the title to the distance of the neighbor
# ax.set_title(f'd={distances[idx][plot_x_offset]:.3f}')
# # let's disable the axis
# plt.axis('off')
if __name__ == "__main__":
n_samples = 20
n_features = 10
embeddings = np.random.rand(n_samples, n_features)
plot_knn_examples(embeddings)
|
HANZI_BREAKER_MAP = {
'卧': ('臣', '卜'),
'项': ('工', '页'),
'功': ('工', '力'),
'攻': ('工', '攵'),
'荆': ('茾', '刂'),
'邪': ('牙', '阝'),
'雅': ('牙', '隹'),
'期': ('其', '月'),
'欺': ('其', '欠'),
'斯': ('其', '斤'),
'鞭': ('革', '便'),
'勒': ('革', '力'),
'划': ('戈', '刂'),
'敬': ('苟', '攵'),
'鹳': ('雚', '鸟'),
'欧': ('区', '欠'),
'切': ('七', '刀'),
'鞋': ('革', '圭'),
'鄞': ('堇', '阝'),
'勤': ('堇', '力'),
'陌': ('阝', '百'),
'陈': ('阝', '东'),
'隐': ('阝', '急'),
'降': ('阝', '夅'),
'队': ('阝', '人'),
'防': ('阝', '方'),
'院': ('阝', '完'),
'阳': ('阝', '日'),
'际': ('阝', '示'),
'阴': ('阝', '月'),
'除': ('阝', '余'),
'险': ('阝', '佥'),
'隔': ('阝', '鬲'),
'障': ('阝', '章'),
'阶': ('阝', '介'),
'陀': ('阝', '它'),
'阵': ('阝', '车'),
'阿': ('阝', '可'),
'隘': ('阝', '益'),
'陵': ('阝', '夌'),
'陷': ('阝', '臽'),
'陶': ('阝', '匋'),
'陪': ('阝', '咅'),
'陕': ('阝', '夹'),
'陆': ('阝', '击'),
'阻': ('阝', '且'),
'阳': ('阝', '日'),
'孙': ('子', '小'),
'孔': ('子', '乚'),
'孩': ('子', '亥'),
'孤': ('子', '瓜'),
'职': ('耳', '只'),
'聩': ('耳', '贵'),
'聘': ('耳', '甹'),
'取': ('耳', '又'),
'聊': ('耳', '卯'),
'聪': ('耳', '总'),
'耻': ('耳', '止'),
'联': ('耳', '关'),
'聆': ('耳', '令'),
'耿': ('耳', '火'),
'耽': ('耳', '冘'),
'预': ('予', '页'),
'豫': ('予', '象'),
'双': ('又', '又'),
'对': ('又', '寸'),
'戏': ('又', '戈'),
'欢': ('又', '欠'),
'观': ('又', '见'),
'难': ('又', '隹'),
'鸡': ('又', '鸟'),
'艰': ('又', '艮'),
'驻': ('马', '主'),
'骚': ('马', '蚤'),
'驯': ('马', '川'),
'骆': ('马', '各'),
'骑': ('马', '奇'),
'驱': ('马', '区'),
'驰': ('马', '也'),
'骇': ('马', '亥'),
'驶': ('马', '史'),
'验': ('马', '佥'),
'骏': ('马', '夋'),
'骄': ('马', '乔'),
'驴': ('马', '户'),
'骤': ('马', '聚'),
'驳': ('马', '爻'),
'胡': ('古', '月'),
'故': ('古', '攵'),
'鸪': ('古', '鸟'),
'郁': ('有', '阝'),
'耐': ('而', '寸'),
'肆': ('镸', '聿'),
'雄': ('厷', '隹'),
'励': ('厉', '力'),
'耗': ('耒', '毛'),
'艳': ('丰', '色'),
'耕': ('耒', '井'),
'确': ('石', '角'),
'破': ('石', '皮'),
'础': ('石', '出'),
'碑': ('石', '卑'),
'研': ('石', '开'),
'碎': ('石', '卒'),
'碾': ('石', '展'),
'硕': ('石', '页'),
'磁': ('石', '兹'),
'碟': ('石', '枼'),
'砸': ('石', '匝'),
'碌': ('石', '录'),
'砖': ('石', '专'),
'碗': ('石', '宛'),
'砰': ('石', '平'),
'磕': ('石', '盍'),
'硬': ('石', '更'),
'砍': ('石', '欠'),
'碰': ('石', '並'),
'码': ('石', '马'),
'砌': ('石', '切'),
'彩': ('采', '彡'),
'乳': ('孚', '乚'),
'须': ('彡', '页'),
'助': ('且', '力'),
'肢': ('月', '支'),
'朦': ('月', '蒙'),
'鹏': ('月', '月', '鸟'),
'脱': ('月', '兑'),
'朋': ('月', '月'),
'胜': ('月', '生'),
'股': ('月', '殳'),
'脚': ('月', '去', '卩'),
'腊': ('月', '昔'),
'腋': ('月', '夜'),
'脉': ('月', '永'),
'胸': ('月', '匈'),
'脂': ('月', '旨'),
'肤': ('月', '夫'),
'脾': ('月', '卑'),
'脆': ('月', '危'),
'胆': ('月', '旦'),
'肚': ('月', '土'),
'脏': ('月', '庄'),
'膀': ('月', '旁'),
'脖': ('月', '孛'),
'胖': ('月', '半'),
'膛': ('月', '堂'),
'腕': ('月', '宛'),
'膊': ('月', '尃'),
'肝': ('月', '干'),
'腮': ('月', '思'),
'胀': ('月', '长'),
'腻': ('月', '贰'),
'肪': ('月', '方'),
'膝': ('月', '桼'),
'脯': ('月', '甫'),
'胱': ('月', '光'),
'腰': ('月', '要'),
'腺': ('月', '泉'),
'肋': ('月', '力'),
'肥': ('月', '巴'),
'腹': ('月', '复'),
'臊': ('月', '喿'),
'胶': ('月', '交'),
'腴': ('月', '臾'),
'肿': ('月', '中'),
'膨': ('月', '彭'),
'胳': ('月', '各'),
'脬': ('月', '孚'),
'肌': ('月', '几'),
'胴': ('月', '同'),
'脐': ('月', '齐'),
'胎': ('月', '台'),
'膜': ('月', '莫'),
'肛': ('月', '工'),
'肮': ('月', '亢'),
'献': ('南', '犬'),
'韬': ('韦', '舀'),
'懿': ('壹', '恣'),
'都': ('者', '阝'),
'鼓': ('壴', '支'),
'颠': ('真', '页'),
'趣': ('走', '取'),
'起': ('走', '已'),
'颉': ('吉', '页'),
'动': ('云', '力'),
'劫': ('去', '力'),
'顽': ('元', '页'),
'魂': ('云', '鬼'),
'协': ('十', '办'),
'赫': ('赤', '赤'),
'博': ('十', '尃'),
'却': ('去', '卩'),
'救': ('求', '攵'),
'教': ('孝', '攵'),
'刊': ('干', '刂'),
'勃': ('孛', '力'),
'规': ('夫', '见'),
'封': ('圭', '寸'),
'卦': ('圭', '卜'),
'埋': ('土', '里'),
'址': ('土', '止'),
'堪': ('土', '甚'),
'堤': ('土', '是'),
'坯': ('土', '丕'),
'坟': ('土', '文'),
'城': ('土', '成'),
'垢': ('土', '后'),
'坝': ('土', '贝'),
'坪': ('土', '平'),
'坎': ('土', '欠'),
'垮': ('土', '夸'),
'坏': ('土', '不'),
'地': ('土', '也'),
'境': ('土', '竟'),
'坛': ('土', '云'),
'坡': ('土', '皮'),
'块': ('土', '夬'),
'坦': ('土', '旦'),
'堆': ('土', '隹'),
'域': ('土', '或'),
'填': ('土', '真'),
'增': ('土', '曾'),
'塔': ('土', '荅'),
'垃': ('土', '立'),
'圾': ('土', '及'),
'圳': ('土', '川'),
'埃': ('土', '矣'),
'墙': ('土', '啬'),
'堵': ('土', '者'),
'均': ('土', '匀'),
'坂': ('土', '反'),
'壤': ('土', '襄'),
'静': ('青', '争'),
'靓': ('青', '见'),
'颊': ('夹', '页'),
'鹉': ('武', '鸟'),
'殉': ('歹', '旬'),
'歼': ('歹', '千'),
'鹂': ('丽', '鸟'),
'敕': ('束', '攵'),
'敷': ('旉', '攵'),
'到': ('至', '刂'),
'邳': ('丕', '阝'),
'融': ('鬲', '虫'),
'刺': ('朿', '刂'),
'赖': ('束', '负'),
'致': ('至', '攵'),
'政': ('正', '攵'),
'殊': ('歹', '朱'),
'殁': ('歹', '殳'),
'殃': ('歹', '央'),
'殓': ('歹', '佥'),
'殒': ('歹', '员'),
'刑': ('开', '刂'),
'副': ('畐', '刂'),
'斑': ('玟', '王'),
'璨': ('王', '粲'),
'现': ('王', '见'),
'环': ('王', '不'),
'理': ('王', '里'),
'球': ('王', '求'),
'珊': ('王', '册'),
'璀': ('王', '崔'),
'玩': ('王', '元'),
'瑄': ('王', '宣'),
'琅': ('王', '良'),
'瑞': ('王', '耑'),
'玻': ('王', '皮'),
'璃': ('王', '离'),
'琢': ('王', '豖'),
'珠': ('王', '朱'),
'玛': ('王', '马'),
'瑜': ('王', '俞'),
'此': ('止', '匕'),
'歧': ('止', '支'),
'颇': ('皮', '页'),
'雌': ('此', '隹'),
'龄': ('齿', '令'),
'战': ('占', '戈'),
'旧': ('丨', '日'),
'频': ('步', '页'),
'眼': ('目', '艮'),
'眦': ('目', '此'),
'睛': ('目', '青'),
'睐': ('目', '来'),
'瞬': ('目', '舜'),
'盼': ('目', '分'),
'眺': ('目', '兆'),
'瞑': ('目', '冥'),
'眶': ('目', '匡'),
'眩': ('目', '玄'),
'睡': ('目', '垂'),
'眨': ('目', '乏'),
'睫': ('目', '疌'),
'眠': ('目', '民'),
'瞄': ('目', '苗'),
'瞪': ('目', '登'),
'睬': ('目', '采'),
'盯': ('目', '丁'),
'睨': ('目', '兒'),
'瞰': ('目', '敢'),
'眯': ('目', '米'),
'睹': ('目', '者'),
'睁': ('目', '争'),
'睦': ('目', '坴'),
'眸': ('目', '牟'),
'瞧': ('目', '焦'),
'瞎': ('目', '害'),
'雎': ('目', '隹'),
'敞': ('尚', '攵'),
'辉': ('光', '军'),
'削': ('肖', '刂'),
'淋': ('沐', '木'),
'滩': ('汉', '隹'),
'没': ('氵', '殳'),
'消': ('氵', '肖'),
'润': ('氵', '闰'),
'清': ('氵', '青'),
'江': ('氵', '工'),
'涛': ('氵', '寿'),
'汪': ('氵', '王'),
'海': ('氵', '每'),
'洋': ('氵', '羊'),
'洁': ('氵', '吉'),
'洗': ('氵', '先'),
'波': ('氵', '皮'),
'深': ('氵', '罙'),
'法': ('氵', '去'),
'津': ('氵', '聿'),
'测': ('氵', '则'),
'泄': ('氵', '世'),
'漫': ('氵', '曼'),
'汉': ('氵', '又'),
'泛': ('氵', '乏'),
'游': ('氵', '斿'),
'汁': ('氵', '十'),
'溯': ('氵', '朔'),
'混': ('氵', '昆'),
'漆': ('氵', '桼'),
'沼': ('氵', '召'),
'汇': ('氵', '匚'),
'源': ('氵', '原'),
'泡': ('氵', '包'),
'滋': ('氵', '兹'),
'浅': ('氵', '戋'),
'溅': ('氵', '贱'),
'沙': ('氵', '少'),
'涵': ('氵', '函'),
'沟': ('氵', '勾'),
'洵': ('氵', '旬'),
'淆': ('氵', '肴'),
'浪': ('氵', '良'),
'澳': ('氵', '奥'),
'湾': ('氵', '弯'),
'港': ('氵', '巷'),
'汽': ('氵', '气'),
'漏': ('氵', '屚'),
'洞': ('氵', '同'),
'浑': ('氵', '军'),
'浏': ('氵', '刘'),
'沉': ('氵', '冗'),
'池': ('氵', '也'),
'滤': ('氵', '虑'),
'漂': ('氵', '票'),
'淡': ('氵', '炎'),
'浙': ('氵', '折'),
'淀': ('氵', '定'),
'涧': ('氵', '间'),
'泊': ('氵', '白'),
'溢': ('氵', '益'),
'滴': ('氵', '啇'),
'渺': ('氵', '目', '少'),
'温': ('氵', '昷'),
'涂': ('氵', '余'),
'灌': ('氵', '雚'),
'淇': ('氵', '其'),
'污': ('氵', '亏'),
'湿': ('氵', '显'),
'沪': ('氵', '户'),
'滥': ('氵', '监'),
'治': ('氵', '台'),
'潮': ('氵', '朝'),
'潜': ('氵', '替'),
'沈': ('氵', '冘'),
'演': ('氵', '寅'),
'汗': ('氵', '干'),
'漓': ('氵', '离'),
'浇': ('氵', '尧'),
'淮': ('氵', '隹'),
'泻': ('氵', '写'),
'漠': ('氵', '莫'),
'浓': ('氵', '农'),
'潇': ('氵', '萧'),
'洒': ('氵', '西'),
'浮': ('氵', '孚'),
'泓': ('氵', '弘'),
'涟': ('氵', '连'),
'漪': ('氵', '猗'),
'泪': ('氵', '目'),
'渴': ('氵', '曷'),
'沾': ('氵', '占'),
'渗': ('氵', '参'),
'涔': ('氵', '岑'),
'泣': ('氵', '立'),
'渔': ('氵', '鱼'),
'浃': ('氵', '夹'),
'油': ('氵', '由'),
'滑': ('氵', '骨'),
'液': ('氵', '夜'),
'沧': ('氵', '仓'),
'沌': ('氵', '屯'),
'淑': ('氵', '叔'),
'澡': ('氵', '喿'),
'渍': ('氵', '责'),
'洲': ('氵', '州'),
'溜': ('氵', '留'),
'泌': ('氵', '必'),
'沸': ('氵', '弗'),
'潦': ('氵', '尞'),
'沦': ('氵', '仑'),
'洛': ('氵', '各'),
'沛': ('氵', '巿'),
'涌': ('氵', '甬'),
'泚': ('氵', '此'),
'沫': ('氵', '末'),
'涕': ('氵', '弟'),
'涯': ('氵', '厓'),
'涎': ('氵', '延'),
'淌': ('氵', '尚'),
'汹': ('氵', '凶'),
'河': ('氵', '可'),
'滚': ('氵', '衮'),
'酒': ('氵', '酉'),
'渐': ('氵', '斩'),
'洪': ('氵', '共'),
'汜': ('氵', '巳'),
'活': ('氵', '舌'),
'渭': ('氵', '胃'),
'涨': ('氵', '张'),
'溃': ('氵', '贵'),
'浦': ('氵', '甫'),
'沃': ('氵', '夭'),
'涉': ('氵', '步'),
'淝': ('氵', '肥'),
'湖': ('氵', '胡'),
'渡': ('氵', '度'),
'沮': ('氵', '且'),
'浩': ('氵', '告'),
'淹': ('氵', '奄'),
'漉': ('氵', '鹿'),
'沐': ('氵', '木'),
'浴': ('氵', '谷'),
'淳': ('氵', '享'),
'涣': ('氵', '奂'),
'泥': ('氵', '尼'),
'涸': ('氵', '固'),
'济': ('氵', '齐'),
'滞': ('氵', '带'),
'澄': ('氵', '登'),
'颗': ('果', '页'),
'歇': ('曷', '欠'),
'昭': ('日', '召'),
'时': ('日', '寸'),
'曦': ('日', '羲'),
'明': ('日', '月'),
'晚': ('日', '免'),
'映': ('日', '央'),
'暗': ('日', '音'),
'曝': ('日', '暴'),
'晰': ('日', '析'),
'晓': ('日', '尧'),
'晦': ('日', '每'),
'昨': ('日', '乍'),
'暇': ('日', '叚'),
'晌': ('日', '向'),
'曙': ('日', '署'),
'晒': ('日', '西'),
'昧': ('日', '未'),
'旷': ('日', '广'),
'晖': ('日', '军'),
'晴': ('日', '青'),
'蛾': ('虫', '我'),
'虾': ('虫', '下'),
'蠕': ('虫', '需'),
'蝶': ('虫', '枼'),
'蜂': ('虫', '夆'),
'虹': ('虫', '工'),
'蛇': ('虫', '它'),
'蚊': ('虫', '文'),
'蜡': ('虫', '昔'),
'蛤': ('虫', '合'),
'蟆': ('虫', '莫'),
'螺': ('虫', '累'),
'蜗': ('虫', '呙'),
'蚂': ('虫', '马'),
'蚁': ('虫', '义'),
'影': ('景', '彡'),
'题': ('是', '页'),
'川': ('丿', '〢'),
'顺': ('川', '页'),
'别': ('另', '刂'),
'鄙': ('啚', '阝'),
'踉': ('', '良'),
'跄': ('', '仓'),
'蹭': ('', '曾'),
'踩': ('', '采'),
'踹': ('', '耑'),
'踏': ('', '沓'),
'躁': ('', '喿'),
'跳': ('', '兆'),
'路': ('', '各'),
'踢': ('', '易'),
'距': ('', '巨'),
'蹑': ('', '聂'),
'踊': ('', '甬'),
'跨': ('', '夸'),
'趺': ('', '夫'),
'跃': ('', '夭'),
'践': ('', '戋'),
'趾': ('', '止'),
'跺': ('', '朵'),
'踪': ('', '宗'),
'跑': ('', '包'),
'跌': ('', '失'),
'跟': ('', '艮'),
'趴': ('', '八'),
'蹁': ('', '扁'),
'蹈': ('', '舀'),
'蹬': ('', '登'),
'跪': ('', '危'),
'踱': ('', '度'),
'跷': ('', '尧'),
'蹲': ('', '尊'),
'蹂': ('', '柔'),
'躏': ('', '蔺'),
'呀': ('口', '牙'),
'呢': ('口', '尼'),
'哈': ('口', '合'),
'啊': ('口', '阿'),
'叫': ('口', '丩'),
'哪': ('口', '那'),
'唉': ('口', '矣'),
'哇': ('口', '圭'),
'听': ('口', '斤'),
'吧': ('口', '巴'),
'吗': ('口', '马'),
'哦': ('口', '我'),
'吃': ('口', '乞'),
'噪': ('口', '喿'),
'喇': ('口', '剌'),
'叭': ('口', '八'),
'呼': ('口', '乎'),
'吸': ('口', '及'),
'啃': ('口', '肯'),
'嘱': ('口', '属'),
'唬': ('口', '虎'),
'吓': ('口', '下'),
'咳': ('口', '亥'),
'卟': ('口', '卜'),
'味': ('口', '未'),
'叶': ('口', '十'),
'唱': ('口', '昌'),
'吻': ('口', '勿'),
'嗷': ('口', '敖'),
'啥': ('口', '舍'),
'叹': ('口', '又'),
'咱': ('口', '自'),
'呓': ('口', '艺'),
'嘴': ('口', '觜'),
'喷': ('口', '贲'),
'吵': ('口', '少'),
'噜': ('口', '鲁'),
'喻': ('口', '俞'),
'喀': ('口', '客'),
'咏': ('口', '永'),
'啦': ('口', '拉'),
'哋': ('口', '地'),
'唔': ('口', '吾'),
'嘿': ('口', '黑'),
'呗': ('口', '贝'),
'嘘': ('口', '虚'),
'哧': ('口', '赤'),
'吐': ('口', '土'),
'喝': ('口', '曷'),
'咬': ('口', '交'),
'哄': ('口', '共'),
'哼': ('口', '亨'),
'叽': ('口', '几'),
'嘛': ('口', '麻'),
'啤': ('口', '卑'),
'呛': ('口', '仓'),
'呻': ('口', '申'),
'吟': ('口', '今'),
'喂': ('口', '畏'),
'嘟': ('口', '都'),
'哽': ('口', '更'),
'喃': ('口', '南'),
'嗨': ('口', '海'),
'噻': ('口', '塞'),
'咖': ('口', '加'),
'啡': ('口', '非'),
'哆': ('口', '多'),
'嗦': ('口', '索'),
'咆': ('口', '包'),
'哮': ('口', '孝'),
'吼': ('口', '孔'),
'喊': ('口', '咸'),
'呲': ('口', '此'),
'哎': ('口', '艾'),
'嘈': ('口', '曹'),
'嘶': ('口', '斯'),
'哑': ('口', '亚'),
'喘': ('口', '耑'),
'咧': ('口', '列'),
'咿': ('口', '伊'),
'噎': ('口', '壹'),
'嚯': ('口', '霍'),
'咐': ('口', '付'),
'咦': ('口', '夷'),
'唧': ('口', '即'),
'哨': ('口', '肖'),
'吱': ('口', '支'),
'啼': ('口', '帝'),
'嘀': ('口', '商'),
'嗝': ('口', '鬲'),
'吮': ('口', '允'),
'呜': ('口', '乌'),
'嘤': ('口', '婴'),
'咕': ('口', '古'),
'咂': ('口', '匝'),
'咔': ('口', '卡'),
'嚓': ('口', '察'),
'嘎': ('口', '戛'),
'咯': ('口', '各'),
'嗯': ('口', '恩'),
'吹': ('口', '欠'),
'咋': ('口', '乍'),
'咀': ('口', '且'),
'嚼': ('口', '爵'),
'嗲': ('口', '爹'),
'咚': ('口', '冬'),
'嗡': ('口', '翁'),
'吭': ('口', '亢'),
'哗': ('口', '华'),
'嘻': ('口', '喜'),
'噼': ('口', '辟'),
'哩': ('口', '里'),
'啪': ('口', '拍'),
'唏': ('口', '希'),
'喧': ('口', '宣'),
'囔': ('口', '囊'),
'噢': ('口', '奥'),
'喔': ('口', '屋'),
'叨': ('口', '刀'),
'唯': ('口', '隹'),
'咽': ('口', '因'),
'喉': ('口', '侯'),
'喽': ('口', '娄'),
'嗓': ('口', '桑'),
'嘹': ('口', '尞'),
'叮': ('口', '丁'),
'喁': ('口', '禺'),
'噙': ('口', '禽'),
'呵': ('口', '可'),
'嗅': ('口', '臭'),
'吆': ('口', '幺'),
'呕': ('口', '区'),
'哐': ('口', '匡'),
'咙': ('口', '龙'),
'嚷': ('口', '襄'),
'唠': ('口', '劳'),
'鸭': ('甲', '鸟'),
'转': ('车', '专'),
'辑': ('车', '咠'),
'斩': ('车', '斤'),
'加': ('力', '口'),
'较': ('车', '交'),
'轮': ('车', '仑'),
'辆': ('车', '两'),
'辖': ('车', '害'),
'轨': ('车', '九'),
'辐': ('车', '畐'),
'轿': ('车', '乔'),
'软': ('车', '欠'),
'辎': ('车', '甾'),
'辅': ('车', '甫'),
'输': ('车', '俞'),
'辗': ('车', '展'),
'畔': ('田', '半'),
'略': ('田', '各'),
'畴': ('田', '寿'),
'毗': ('田', '比'),
'黠': ('黑', '吉'),
'默': ('黑', '犬'),
'黯': ('黑', '音'),
'剁': ('朵', '刂'),
'峰': ('山', '夆'),
'岭': ('山', '令'),
'岐': ('山', '支'),
'鹦': ('婴', '鸟'),
'购': ('贝', '勾'),
'贱': ('贝', '戋'),
'则': ('贝', '刂'),
'败': ('贝', '攵'),
'贼': ('贝', '戎'),
'赠': ('贝', '曾'),
'赋': ('贝', '武'),
'赌': ('贝', '者'),
'赐': ('贝', '易'),
'贴': ('贝', '占'),
'贩': ('贝', '反'),
'财': ('贝', '才'),
'赚': ('贝', '兼'),
'删': ('册', '刂'),
'邮': ('由', '阝'),
'刚': ('冈', '刂'),
'帆': ('巾', '凡'),
'幅': ('巾', '畐'),
'帜': ('巾', '只'),
'帖': ('巾', '占'),
'帐': ('巾', '长'),
'帽': ('巾', '冒'),
'雕': ('周', '隹'),
'收': ('丩', '攵'),
'剧': ('居', '刂'),
'羽': ('习', '习'),
'情': ('忄', '青'),
'快': ('忄', '夬'),
'性': ('忄', '生'),
'懂': ('忄', '董'),
'忆': ('忄', '乙'),
'怙': ('忄', '古'),
'悛': ('忄', '夋'),
'恢': ('忄', '灰'),
'慎': ('忄', '真'),
'悼': ('忄', '卓'),
'怪': ('忄', '圣'),
'恰': ('忄', '合'),
'恒': ('忄', '亘'),
'怀': ('忄', '不'),
'怜': ('忄', '令'),
'怡': ('忄', '台'),
'惕': ('忄', '易'),
'慨': ('忄', '既'),
'忙': ('忄', '亡'),
'慌': ('忄', '荒'),
'怔': ('忄', '正'),
'惘': ('忄', '罔'),
'憔': ('忄', '焦'),
'悴': ('忄', '卒'),
'恹': ('忄', '厌'),
'懊': ('忄', '奥'),
'悔': ('忄', '每'),
'惯': ('忄', '贯'),
'惶': ('忄', '皇'),
'恍': ('忄', '光'),
'惚': ('忄', '忽'),
'愧': ('忄', '鬼'),
'怅': ('忄', '长'),
'愉': ('忄', '俞'),
'怦': ('忄', '平'),
'惭': ('忄', '斩'),
'怯': ('忄', '去'),
'悯': ('忄', '闵'),
'憾': ('忄', '感'),
'懒': ('忄', '赖'),
'怖': ('忄', '布'),
'懵': ('忄', '瞢'),
'悻': ('忄', '幸'),
'怕': ('忄', '白'),
'惋': ('忄', '宛'),
'惜': ('忄', '昔'),
'忧': ('忄', '尤'),
'憎': ('忄', '曾'),
'惨': ('忄', '参'),
'愤': ('忄', '贲'),
'恨': ('忄', '艮'),
'憧': ('忄', '童'),
'憬': ('忄', '景'),
'恸': ('忄', '动'),
'忖': ('忄', '寸'),
'惆': ('忄', '周'),
'惊': ('忄', '京'),
'慵': ('忄', '庸'),
'慷': ('忄', '康'),
'怆': ('忄', '仓'),
'悦': ('忄', '兑'),
'邺': ('业', '阝'),
'数': ('娄', '攵'),
'糕': ('米', '羔'),
'籽': ('米', '子'),
'粗': ('米', '且'),
'精': ('米', '青'),
'粘': ('米', '占'),
'料': ('米', '斗'),
'粉': ('米', '分'),
'糨': ('米', '强'),
'粮': ('米', '良'),
'糖': ('米', '唐'),
'糟': ('米', '曹'),
'糊': ('米', '胡'),
'粒': ('米', '立'),
'烧': ('火', '尧'),
'烁': ('火', '乐'),
'燃': ('火', '然'),
'烤': ('火', '考'),
'烘': ('火', '共'),
'煤': ('火', '某'),
'灶': ('火', '土'),
'炒': ('火', '少'),
'烛': ('火', '虫'),
'炽': ('火', '只'),
'烟': ('火', '因'),
'灿': ('火', '山'),
'炮': ('火', '包'),
'煌': ('火', '皇'),
'灯': ('火', '丁'),
'炉': ('火', '户'),
'焰': ('火', '臽'),
'烽': ('火', '夆'),
'烦': ('火', '页'),
'焊': ('火', '旱'),
'炸': ('火', '乍'),
'烂': ('火', '兰'),
'烩': ('火', '会'),
'炖': ('火', '屯'),
'炫': ('火', '玄'),
'熄': ('火', '息'),
'爆': ('火', '暴'),
'鹤': ('隺', '鸟'),
'额': ('客', '页'),
'豁': ('害', '谷'),
'割': ('害', '刂'),
'鲜': ('鱼', '羊'),
'初': ('衤', '刀'),
'被': ('衤', '皮'),
'袍': ('衤', '包'),
'补': ('衤', '卜'),
'袖': ('衤', '由'),
'裸': ('衤', '果'),
'裤': ('衤', '库'),
'衬': ('衤', '寸'),
'衫': ('衤', '彡'),
'袜': ('衤', '末'),
'襟': ('衤', '禁'),
'裙': ('衤', '君'),
'褚': ('衤', '者'),
'褪': ('衤', '退'),
'裆': ('衤', '当'),
'袄': ('衤', '夭'),
'裕': ('衤', '谷'),
'袂': ('衤', '夬'),
'袱': ('衤', '伏'),
'襦': ('衤', '需'),
'禅': ('礻', '单'),
'祥': ('礻', '羊'),
'祸': ('礻', '呙'),
'祛': ('礻', '去'),
'礼': ('礻', '乚'),
'视': ('礻', '见'),
'祖': ('礻', '且'),
'祝': ('礻', '兄'),
'福': ('礻', '畐'),
'社': ('礻', '土'),
'祷': ('礻', '寿'),
'神': ('礻', '申'),
'祈': ('礻', '斤'),
'褶': ('礻', '習'),
'褂': ('礻', '卦'),
'禳': ('礻', '襄'),
'够': ('句', '多'),
'触': ('角', '虫'),
'皱': ('刍', '皮'),
'邹': ('刍', '阝'),
'雏': ('刍', '隹'),
'孵': ('卵', '孚'),
'鲍': ('鱼', '包'),
'鲇': ('鱼', '占'),
'刹': ('杀', '刂'),
'外': ('夕', '卜'),
'钱': ('钅', '戋'),
'钢': ('钅', '冈'),
'银': ('钅', '艮'),
'针': ('钅', '十'),
'销': ('钅', '肖'),
'锭': ('钅', '定'),
'锤': ('钅', '垂'),
'镜': ('钅', '竟'),
'铭': ('钅', '名'),
'铠': ('钅', '岂'),
'钮': ('钅', '丑'),
'镶': ('钅', '襄'),
'铺': ('钅', '甫'),
'铃': ('钅', '令'),
'铲': ('钅', '产'),
'锅': ('钅', '呙'),
'钥': ('钅', '月'),
'锌': ('钅', '辛'),
'锗': ('钅', '者'),
'锂': ('钅', '里'),
'钙': ('钅', '丐'),
'锢': ('钅', '固'),
'钟': ('钅', '中'),
'钦': ('钅', '欠'),
'铁': ('钅', '失'),
'链': ('钅', '连'),
'镇': ('钅', '真'),
'钻': ('钅', '占'),
'钧': ('钅', '匀'),
'锦': ('钅', '帛'),
'锋': ('钅', '夆'),
'错': ('钅', '昔'),
'铜': ('钅', '同'),
'钛': ('钅', '太'),
'钗': ('钅', '叉'),
'钎': ('钅', '千'),
'铛': ('钅', '当'),
'铸': ('钅', '寿'),
'锄': ('钅', '助'),
'狡': ('犭', '交'),
'猾': ('犭', '骨'),
'猥': ('犭', '畏'),
'猫': ('犭', '苗'),
'狸': ('犭', '里'),
'狗': ('犭', '句'),
'猎': ('犭', '昔'),
'猪': ('犭', '者'),
'狠': ('犭', '艮'),
'犹': ('犭', '尤'),
'猜': ('犭', '青'),
'猛': ('犭', '孟'),
'狐': ('犭', '瓜'),
'猴': ('犭', '侯'),
'狭': ('犭', '夹'),
'独': ('犭', '虫'),
'狂': ('犭', '王'),
'狼': ('犭', '良'),
'狱': ('犭', '讠', '犬'),
'饼': ('饣', '并'),
'饿': ('饣', '我'),
'饭': ('饣', '反'),
'馈': ('饣', '贵'),
'饱': ('饣', '包'),
'饥': ('饣', '几'),
'馒': ('饣', '曼'),
'饶': ('饣', '尧'),
'饯': ('饣', '戋'),
'饮': ('饣', '欠'),
'蚀': ('饣', '虫'),
'的': ('白', '勺'),
'翱': ('皋', '羽'),
'欣': ('斤', '欠'),
'所': ('戶', '斤'),
'缺': ('缶', '夬'),
'罐': ('缶', '雚'),
'缸': ('缶', '工'),
'掰': ('手', '分', '手'),
'按': ('扌', '安'),
'描': ('扌', '苗'),
'挟': ('扌', '夹'),
'抢': ('扌', '仓'),
'抗': ('扌', '亢'),
'擅': ('扌', '亶'),
'扰': ('扌', '尤'),
'扯': ('扌', '止'),
'撕': ('扌', '斯'),
'捎': ('扌', '肖'),
'搏': ('扌', '尃'),
'控': ('扌', '空'),
'抓': ('扌', '爪'),
'抄': ('扌', '少'),
'捕': ('扌', '甫'),
'抱': ('扌', '包'),
'授': ('扌', '受'),
'拦': ('扌', '兰'),
'找': ('扌', '戈'),
'捉': ('扌', '足'),
'探': ('扌', '罙'),
'打': ('扌', '丁'),
'扫': ('扌', '彐'),
'把': ('扌', '巴'),
'拆': ('扌', '斥'),
'折': ('扌', '斤'),
'护': ('扌', '户'),
'搞': ('扌', '高'),
'技': ('扌', '支'),
'接': ('扌', '妾'),
'拼': ('扌', '并'),
'持': ('扌', '寺'),
'排': ('扌', '非'),
'抵': ('扌', '氐'),
'换': ('扌', '奂'),
'投': ('扌', '殳'),
'扣': ('扌', '口'),
'批': ('扌', '比'),
'据': ('扌', '居'),
'提': ('扌', '是'),
'推': ('扌', '隹'),
'托': ('扌', '乇'),
'搜': ('扌', '叟'),
'拔': ('扌', '犮'),
'操': ('扌', '喿'),
'指': ('扌', '旨'),
'拯': ('扌', '丞'),
'捷': ('扌', '疌'),
'损': ('扌', '员'),
'招': ('扌', '召'),
'括': ('扌', '舌'),
'捺': ('扌', '奈'),
'抬': ('扌', '台'),
'撰': ('扌', '巽'),
'拍': ('扌', '白'),
'挪': ('扌', '那'),
'播': ('扌', '番'),
'拐': ('扌', '另'),
'摆': ('扌', '罢'),
'抽': ('扌', '由'),
'扶': ('扌', '夫'),
'拷': ('扌', '考'),
'拉': ('扌', '立'),
'摘': ('扌', '啇'),
'握': ('扌', '屋'),
'搭': ('扌', '荅'),
'撇': ('扌', '敝'),
'抛': ('扌', '九', '力'),
'摄': ('扌', '聂'),
'拟': ('扌', '以'),
'拨': ('扌', '发'),
'掀': ('扌', '欣'),
'拓': ('扌', '石'),
'揽': ('扌', '览'),
'抹': ('扌', '末'),
'插': ('扌', '臿'),
'撼': ('扌', '感'),
'挂': ('扌', '圭'),
'擦': ('扌', '察'),
'扎': ('扌', '乚'),
'扮': ('扌', '分'),
'措': ('扌', '昔'),
'担': ('扌', '旦'),
'揭': ('扌', '曷'),
'撞': ('扌', '童'),
'掉': ('扌', '卓'),
'抑': ('扌', '卬'),
'抿': ('扌', '民'),
'摊': ('扌', '难'),
'摸': ('扌', '莫'),
'振': ('扌', '辰'),
'挺': ('扌', '廷'),
'掘': ('扌', '屈'),
'扔': ('扌', '乃'),
'捧': ('扌', '奉'),
'拎': ('扌', '令'),
'撒': ('扌', '散'),
'拘': ('扌', '句'),
'抚': ('扌', '无'),
'掐': ('扌', '臽'),
'搁': ('扌', '阁'),
'搐': ('扌', '畜'),
'攥': ('扌', '纂'),
'搓': ('扌', '差'),
'揍': ('扌', '奏'),
'挤': ('扌', '齐'),
'抖': ('扌', '斗'),
'捂': ('扌', '吾'),
'披': ('扌', '皮'),
'搬': ('扌', '般'),
'捏': ('扌', '圼'),
'掏': ('扌', '匋'),
'捡': ('扌', '佥'),
'扭': ('扌', '丑'),
'拱': ('扌', '共'),
'搅': ('扌', '觉'),
'拌': ('扌', '半'),
'挫': ('扌', '坐'),
'掠': ('扌', '京'),
'挨': ('扌', '矣'),
'拭': ('扌', '式'),
'揉': ('扌', '柔'),
'扒': ('扌', '八'),
'拧': ('扌', '宁'),
'撅': ('扌', '厥'),
'捣': ('扌', '岛'),
'搂': ('扌', '娄'),
'拾': ('扌', '合'),
'捐': ('扌', '肙'),
'揣': ('扌', '耑'),
'攆': ('扌', '輦'),
'撵': ('扌', '辇'),
'拂': ('扌', '弗'),
'摁': ('扌', '恩'),
'撮': ('扌', '最'),
'撩': ('扌', '尞'),
'拢': ('扌', '龙'),
'拽': ('扌', '曳'),
'拗': ('扌', '幼'),
'挠': ('扌', '尧'),
'捅': ('扌', '甬'),
'攒': ('扌', '赞'),
'拴': ('扌', '全'),
'扑': ('扌', '卜'),
'押': ('扌', '甲'),
'携': ('扌', '隽'),
'执': ('扌', '丸'),
'扩': ('扌', '广'),
'挣': ('扌', '争'),
'拒': ('扌', '巨'),
'撑': ('扌', '掌'),
'挥': ('扌', '军'),
'掩': ('扌', '奄'),
'挡': ('扌', '当'),
'抒': ('扌', '予'),
'搔': ('扌', '蚤'),
'挑': ('扌', '兆'),
'揪': ('扌', '秋'),
'拙': ('扌', '出'),
'摒': ('扌', '屏'),
'挞': ('扌', '达'),
'掷': ('扌', '郑'),
'捶': ('扌', '垂'),
'撂': ('扌', '畧'),
'歌': ('哥', '欠'),
'飘': ('票', '风'),
'瓢': ('票', '瓜'),
'酷': ('酉', '告'),
'酸': ('酉', '夋'),
'醉': ('酉', '卒'),
'酵': ('酉', '孝'),
'酥': ('酉', '禾'),
'醋': ('酉', '昔'),
'酗': ('酉', '凶'),
'酩': ('酉', '名'),
'酊': ('酉', '丁'),
'醺': ('酉', '熏'),
'酬': ('酉', '州'),
'配': ('酉', '己'),
'醒': ('酉', '星'),
'醇': ('酉', '享'),
'顶': ('丁', '页'),
'柄': ('木', '丙'),
'林': ('木', '木'),
'柱': ('木', '主'),
'杨': ('木', '昜'),
'样': ('木', '羊'),
'标': ('木', '示'),
'樱': ('木', '婴'),
'桃': ('木', '兆'),
'构': ('木', '勾'),
'杭': ('木', '亢'),
'柿': ('木', '市'),
'机': ('木', '几'),
'析': ('木', '斤'),
'核': ('木', '亥'),
'棋': ('木', '其'),
'相': ('木', '目'),
'棍': ('木', '昆'),
'板': ('木', '反'),
'校': ('木', '交'),
'模': ('木', '莫'),
'检': ('木', '佥'),
'栈': ('木', '戋'),
'枝': ('木', '支'),
'栏': ('木', '兰'),
'框': ('木', '匡'),
'横': ('木', '黄'),
'概': ('木', '既'),
'梯': ('木', '弟'),
'楷': ('木', '皆'),
'桂': ('木', '圭'),
'棒': ('木', '奉'),
'材': ('木', '才'),
'棚': ('木', '朋'),
'椅': ('木', '奇'),
'杯': ('木', '不'),
'档': ('木', '当'),
'枫': ('木', '风'),
'杜': ('木', '土'),
'枕': ('木', '冘'),
'梭': ('木', '夋'),
'柏': ('木', '白'),
'桶': ('木', '甬'),
'朴': ('木', '卜'),
'枰': ('木', '平'),
'楸': ('木', '秋'),
'枉': ('木', '王'),
'桩': ('木', '庄'),
'械': ('木', '戒'),
'柜': ('木', '巨'),
'槽': ('木', '曹'),
'杆': ('木', '干'),
'橱': ('木', '厨'),
'株': ('木', '朱'),
'栩': ('木', '羽'),
'柳': ('木', '卯'),
'橡': ('木', '象'),
'椭': ('木', '陏'),
'棉': ('木', '帛'),
'梢': ('木', '肖'),
'村': ('木', '寸'),
'根': ('木', '艮'),
'楼': ('木', '娄'),
'树': ('木', '对'),
'桥': ('木', '乔'),
'极': ('木', '及'),
'梅': ('木', '每'),
'枪': ('木', '仓'),
'格': ('木', '各'),
'檐': ('木', '詹'),
'权': ('木', '又'),
'松': ('木', '公'),
'枯': ('木', '古'),
'槁': ('木', '高'),
'植': ('木', '直'),
'鹅': ('我', '鸟'),
'翻': ('番', '羽'),
'射': ('身', '寸'),
'躺': ('身', '尚'),
'躲': ('身', '朵'),
'躯': ('身', '区'),
'稻': ('禾', '舀'),
'利': ('禾', '刂'),
'私': ('禾', '厶'),
'种': ('禾', '中'),
'程': ('禾', '呈'),
'移': ('禾', '多'),
'租': ('禾', '且'),
'和': ('禾', '口'),
'称': ('禾', '尔'),
'科': ('禾', '斗'),
'税': ('禾', '兑'),
'秋': ('禾', '火'),
'秸': ('禾', '吉'),
'秆': ('禾', '干'),
'稿': ('禾', '高'),
'稀': ('禾', '希'),
'秒': ('禾', '少'),
'稼': ('禾', '家'),
'秽': ('禾', '岁'),
'稳': ('禾', '急'),
'秘': ('禾', '必'),
'稍': ('禾', '肖'),
'秣': ('禾', '末'),
'穰': ('禾', '襄'),
'积': ('禾', '只'),
'稚': ('禾', '隹'),
'徒': ('彳', '走'),
'彼': ('彳', '皮'),
'徘': ('彳', '非'),
'徊': ('彳', '回'),
'行': ('彳', '亍'),
'律': ('彳', '聿'),
'待': ('彳', '寺'),
'很': ('彳', '艮'),
'循': ('彳', '盾'),
'御': ('彳', '卸'),
'衍': ('彳', '氵', '亍'),
'街': ('彳', '圭', '亍'),
'衔': ('彳', '钅', '亍'),
'彻': ('彳', '切'),
'徐': ('彳', '余'),
'征': ('彳', '正'),
'往': ('彳', '主'),
'物': ('牜', '勿'),
'特': ('牜', '寺'),
'牺': ('牜', '西'),
'牲': ('牜', '生'),
'牧': ('牜', '攵'),
'犊': ('牛', '卖'),
'知': ('矢', '口'),
'矩': ('矢', '巨'),
'矮': ('矢', '委'),
'敏': ('每', '攵'),
'舰': ('舟', '见'),
'般': ('舟', '殳'),
'航': ('舟', '亢'),
'鹄': ('告', '鸟'),
'剩': ('乘', '刂'),
'敌': ('舌', '攵'),
'乱': ('舌', '乚'),
'辞': ('舌', '辛'),
'甜': ('舌', '甘'),
'鼾': ('鼻', '干'),
'牍': ('片', '卖'),
'版': ('片', '反'),
'牌': ('片', '卑'),
'牒': ('片', '枼'),
'新': ('亲', '斤'),
'瓶': ('并', '瓦'),
'剃': ('弟', '刂'),
'站': ('立', '占'),
'靖': ('立', '青'),
'竣': ('立', '夋'),
'端': ('立', '耑'),
'颜': ('彦', '页'),
'部': ('咅', '阝'),
'剖': ('咅', '刂'),
'韵': ('音', '匀'),
'郑': ('关', '阝'),
'效': ('交', '攵'),
'歉': ('兼', '欠'),
'翔': ('羊', '羽'),
'壮': ('丬', '士'),
'妆': ('丬', '女'),
'将': ('丬', '寽'),
'状': ('丬', '犬'),
'况': ('冫', '兄'),
'次': ('冫', '欠'),
'减': ('冫', '咸'),
'凝': ('冫', '疑'),
'凛': ('冫', '禀'),
'冯': ('冫', '马'),
'决': ('冫', '夬'),
'凑': ('冫', '奏'),
'凄': ('冫', '妻'),
'准': ('冫', '隹'),
'冲': ('冫', '中'),
'冰': ('冫', '水'),
'凉': ('冫', '京'),
'凌': ('冫', '夌'),
'冷': ('冫', '令'),
'净': ('冫', '争'),
'邵': ('召', '阝'),
'郡': ('君', '阝'),
'群': ('君', '羊'),
'剥': ('录', '刂'),
'鸠': ('九', '鸟'),
'骗': ('马', '扁'),
'劝': ('又', '力'),
'妖': ('女', '夭'),
'奴': ('女', '又'),
'婵': ('女', '单'),
'娜': ('女', '那'),
'好': ('女', '子'),
'姚': ('女', '兆'),
'如': ('女', '口'),
'娃': ('女', '圭'),
'她': ('女', '也'),
'姓': ('女', '生'),
'奸': ('女', '干'),
'始': ('女', '台'),
'嫌': ('女', '兼'),
'婚': ('女', '昏'),
'娱': ('女', '吴'),
'媒': ('女', '某'),
'姐': ('女', '且'),
'姗': ('女', '册'),
'妇': ('女', '彐'),
'嫩': ('女', '敕'),
'娆': ('女', '尧'),
'嫉': ('女', '疾'),
'妒': ('女', '户'),
'妈': ('女', '马'),
'嫁': ('女', '家'),
'娇': ('女', '乔'),
'嫖': ('女', '票'),
'姑': ('女', '古'),
'妩': ('女', '无'),
'媚': ('女', '眉'),
'婿': ('女', '胥'),
'妹': ('女', '未'),
'姣': ('女', '交'),
'奶': ('女', '乃'),
'姆': ('女', '母'),
'姥': ('女', '老'),
'妃': ('女', '己'),
'妞': ('女', '丑'),
'姻': ('女', '因'),
'嫂': ('女', '叟'),
'娴': ('女', '闲'),
'姨': ('女', '夷'),
'婉': ('女', '宛'),
'媳': ('女', '息'),
'嬉': ('女', '喜'),
'妙': ('女', '少'),
'娘': ('女', '良'),
'妓': ('女', '支'),
'舒': ('舍', '予'),
'领': ('令', '页'),
'颔': ('含', '页'),
'邻': ('令', '阝'),
'剑': ('佥', '刂'),
'叙': ('余', '又'),
'斜': ('余', '斗'),
'创': ('仓', '刂'),
'敛': ('佥', '攵'),
'颂': ('公', '页'),
'欲': ('谷', '欠'),
'伟': ('亻', '韦'),
'储': ('亻', '诸'),
'你': ('亻', '尔'),
'他': ('亻', '也'),
'偏': ('亻', '扁'),
'信': ('亻', '言'),
'何': ('亻', '可'),
'但': ('亻', '旦'),
'件': ('亻', '牛'),
'保': ('亻', '呆'),
'像': ('亻', '象'),
'做': ('亻', '故'),
'什': ('亻', '十'),
'位': ('亻', '立'),
'住': ('亻', '主'),
'停': ('亻', '亭'),
'供': ('亻', '共'),
'代': ('亻', '弋'),
'们': ('亻', '门'),
'优': ('亻', '尤'),
'值': ('亻', '直'),
'传': ('亻', '专'),
'作': ('亻', '乍'),
'任': ('亻', '壬'),
'使': ('亻', '吏'),
'倚': ('亻', '奇'),
'化': ('亻', '匕'),
'仅': ('亻', '又'),
'仿': ('亻', '方'),
'偃': ('亻', '匽'),
'例': ('亻', '列'),
'俗': ('亻', '谷'),
'价': ('亻', '介'),
'俄': ('亻', '我'),
'亿': ('亻', '乙'),
'伙': ('亻', '火'),
'伴': ('亻', '半'),
'佼': ('亻', '交'),
'催': ('亻', '崔'),
'健': ('亻', '建'),
'俊': ('亻', '夋'),
'傲': ('亻', '敖'),
'佬': ('亻', '老'),
'侦': ('亻', '贞'),
'佑': ('亻', '右'),
'佛': ('亻', '弗'),
'侮': ('亻', '每'),
'份': ('亻', '分'),
'仕': ('亻', '士'),
'倡': ('亻', '昌'),
'仲': ('亻', '中'),
'仔': ('亻', '子'),
'倍': ('亻', '咅'),
'仪': ('亻', '义'),
'伯': ('亻', '白'),
'伦': ('亻', '仑'),
'偷': ('亻', '俞'),
'傅': ('亻', '尃'),
'伸': ('亻', '申'),
'似': ('亻', '以'),
'付': ('亻', '寸'),
'估': ('亻', '古'),
'倜': ('亻', '周'),
'傥': ('亻', '党'),
'债': ('亻', '责'),
'侣': ('亻', '吕'),
'仙': ('亻', '山'),
'俯': ('亻', '府'),
'俩': ('亻', '两'),
'俱': ('亻', '具'),
'俺': ('亻', '奄'),
'仨': ('亻', '三'),
'僻': ('亻', '辟'),
'俨': ('亻', '严'),
'偎': ('亻', '畏'),
'伶': ('亻', '令'),
'俐': ('亻', '利'),
'侥': ('亻', '尧'),
'偌': ('亻', '若'),
'借': ('亻', '昔'),
'仰': ('亻', '卬'),
'仗': ('亻', '丈'),
'休': ('亻', '木'),
'俘': ('亻', '孚'),
'伍': ('亻', '五'),
'倒': ('亻', '到'),
'便': ('亻', '更'),
'仁': ('亻', '二'),
'依': ('亻', '衣'),
'伐': ('亻', '戈'),
'侧': ('亻', '则'),
'低': ('亻', '氐'),
'体': ('亻', '本'),
'仍': ('亻', '乃'),
'侍': ('亻', '寺'),
'促': ('亻', '足'),
'仇': ('亻', '九'),
'儒': ('亻', '需'),
'佐': ('亻', '左'),
'伪': ('亻', '为'),
'侩': ('亻', '会'),
'假': ('亻', '叚'),
'佳': ('亻', '圭'),
'伏': ('亻', '犬'),
'偶': ('亻', '禺'),
'偿': ('亻', '尝'),
'倦': ('亻', '卷'),
'张': ('弓', '长'),
'强': ('弓', '虽'),
'弹': ('弓', '单'),
'引': ('弓', '丨'),
'弛': ('弓', '也'),
'弧': ('弓', '瓜'),
'弦': ('弓', '玄'),
'弥': ('弓', '尔'),
'弘': ('弓', '厶'),
'比': ('匕', '匕'),
'幼': ('幺', '力'),
'绰': ('纟', '卓'),
'纵': ('纟', '从'),
'纷': ('纟', '分'),
'纤': ('纟', '千'),
'缚': ('纟', '尃'),
'绷': ('纟', '朋'),
'纫': ('纟', '刃'),
'绢': ('纟', '肙'),
'组': ('纟', '且'),
'给': ('纟', '合'),
'红': ('纟', '工'),
'纸': ('纟', '氏'),
'结': ('纟', '吉'),
'纪': ('纟', '己'),
'编': ('纟', '扁'),
'线': ('纟', '戋'),
'统': ('纟', '充'),
'级': ('纟', '及'),
'络': ('纟', '各'),
'续': ('纟', '卖'),
'绞': ('纟', '交'),
'终': ('纟', '冬'),
'绝': ('纟', '色'),
'绘': ('纟', '会'),
'纯': ('纟', '屯'),
'绵': ('纟', '帛'),
'综': ('纟', '宗'),
'纠': ('纟', '丩'),
'绕': ('纟', '尧'),
'缕': ('纟', '娄'),
'绪': ('纟', '者'),
'绩': ('纟', '责'),
'织': ('纟', '只'),
'纱': ('纟', '少'),
'绿': ('纟', '录'),
'绽': ('纟', '定'),
'缩': ('纟', '宿'),
'纨': ('纟', '丸'),
'绔': ('纟', '夸'),
'缀': ('纟', '叕'),
'缰': ('纟', '畺'),
'缝': ('纟', '逢'),
'绳': ('纟', '黾'),
'绊': ('纟', '半'),
'绯': ('纟', '非'),
'缄': ('纟', '咸'),
'缆': ('纟', '览'),
'缴': ('纟', '敫'),
'绅': ('纟', '申'),
'纳': ('纟', '内'),
'约': ('纟', '勺'),
'绍': ('纟', '召'),
'绶': ('纟', '受'),
'纶': ('纟', '仑'),
'细': ('纟', '田'),
'缢': ('纟', '益'),
'维': ('纟', '隹'),
'敲': ('高', '攴'),
'就': ('京', '尤'),
'敦': ('享', '攵'),
'刘': ('文', '刂'),
'斓': ('文', '阑'),
'放': ('方', '攵'),
'颤': ('亶', '页'),
'氓': ('亡', '民'),
'剂': ('齐', '刂'),
'刻': ('亥', '刂'),
'郭': ('享', '阝'),
'鹧': ('庶', '鸟'),
'麟': ('鹿', '粦'),
'郊': ('交', '阝'),
'谈': ('讠', '炎'),
'谁': ('讠', '隹'),
'说': ('讠', '兑'),
'话': ('讠', '舌'),
'许': ('讠', '午'),
'该': ('讠', '亥'),
'调': ('讠', '周'),
'试': ('讠', '式'),
'记': ('讠', '己'),
'设': ('讠', '殳'),
'语': ('讠', '吾'),
'让': ('讠', '上'),
'请': ('讠', '青'),
'认': ('讠', '人'),
'讲': ('讠', '井'),
'识': ('讠', '只'),
'谋': ('讠', '某'),
'详': ('讠', '羊'),
'诸': ('讠', '者'),
'证': ('讠', '正'),
'访': ('讠', '方'),
'读': ('讠', '卖'),
'词': ('讠', '司'),
'诗': ('讠', '寺'),
'谜': ('讠', '迷'),
'诠': ('讠', '全'),
'课': ('讠', '果'),
'订': ('讠', '丁'),
'谐': ('讠', '皆'),
'谨': ('讠', '堇'),
'讹': ('讠', '化'),
'训': ('讠', '川'),
'诀': ('讠', '夬'),
'谍': ('讠', '枼'),
'诅': ('讠', '且'),
'谎': ('讠', '荒'),
'谬': ('讠', '翏'),
'谢': ('讠', '射'),
'讼': ('讠', '公'),
'谱': ('讠', '普'),
'谊': ('讠', '宜'),
'诵': ('讠', '甬'),
'诧': ('讠', '宅'),
'谯': ('讠', '焦'),
'诞': ('讠', '延'),
'讨': ('讠', '寸'),
'诚': ('讠', '成'),
'讧': ('讠', '工'),
'评': ('讠', '平'),
'诏': ('讠', '召'),
'谓': ('讠', '胃'),
'误': ('讠', '吴'),
'讯': ('讠', '卂'),
'论': ('讠', '仑'),
'议': ('讠', '义'),
'计': ('讠', '十'),
'谦': ('讠', '兼'),
'谅': ('讠', '京'),
'询': ('讠', '旬'),
'诈': ('讠', '乍'),
'诉': ('讠', '斥'),
'诱': ('讠', '秀'),
'讶': ('讠', '牙'),
'讷': ('讠', '内'),
'讽': ('讠', '风'),
'诫': ('讠', '戒'),
'诺': ('讠', '若'),
'诋': ('讠', '氐'),
'谴': ('讠', '遣'),
'限': ('阝', '艮'),
'帕': ('巾', '白'),
'伊': ('亻', '尹'),
'掖': ('扌', '夜'),
'列': ('歹', '刂'),
'呃': ('口', '厄'),
'颁': ('分', '页'),
'纽': ('纟', '丑'),
'瑚': ('王', '胡'),
'键': ('钅', '建'),
'捆': ('扌', '困'),
'绑': ('纟', '邦'),
'剽': ('票', '刂'),
'蹦': ('足', '崩'),
'猖': ('犭', '昌'),
'獗': ('犭', '厥'),
'栋': ('木', '东'),
'悚': ('忄', '束'),
'幌': ('巾', '晃'),
'赔': ('贝', '咅'),
'吁': ('口', '于'),
'锐': ('钅', '兑'),
'哟': ('口', '约'),
'剔': ('易', '刂'),
'朽': ('木', '丂'),
'吖': ('口', '丫'),
'儆': ('亻', '敬'),
'锈': ('钅', '秀'),
'附': ('阝', '付'),
'滔': ('氵', '舀'),
'婊': ('女', '表'),
'坊': ('土', '方'),
'彰': ('章', '彡'),
'懈': ('忄', '解'),
'湛': ('氵', '甚'),
'粥': ('弓', '米', '弓'),
'妨': ('女', '方'),
'胁': ('月', '办'),
'腿': ('月', '退'),
'邓': ('又', '阝'),
'嗖': ('口', '叟')
}
def is_breakable(c):
""" is breakable
Determine whether Chinese characters can be split
:arg
c[string]: characters to be test
:return -> Boolean:
True: it can be split
False: it cannot be split
"""
return c in HANZI_BREAKER_MAP
def get(c):
""" get components
get all parts of a Chinese character
:arg:
c[string]: character
:return -> list:
a list including all parts of a Chinese character
"""
if c in HANZI_BREAKER_MAP:
return HANZI_BREAKER_MAP[c]
else:
return None
|
from pytorch_lightning.loggers import NeptuneLogger as _NeptuneLogger
from pytorch_lightning.loggers import CSVLogger as _CSVLogger
class Logger:
def log_metrics(self, metric_dict, step, save=False):
pass
def log_hparams(self, hparams_dict):
pass
class CSVLogger(Logger):
def __init__(self, directory='./', name='logs', save_stride=1):
self.logger = _CSVLogger(directory, name=name)
self.count = 0
self.stride = save_stride
def log_metrics(self, metrics, step=None,save=False):
self.count += 1
self.logger.log_metrics(metrics, step=step)
if self.count % self.stride == 0:
self.logger.save()
self.logger.metrics = []
if self.count > self.stride * 10:
self.count = 0
if save:
self.logger.save()
def log_hparams(self, hparams_dict):
self.logger.log_hyperparams(hparams_dict)
self.logger.save()
class NeptuneLogger(Logger):
def __init__(self, project_name, api_key, save_folder='./'):
self.directory = save_folder
self.logger = _NeptuneLogger(api_key=api_key, project_name=project_name)
def log_metrics(self, metrics, step=None):
self.logger.log_metrics(metrics,step=step)
def log_hparams(self, hparams_dict):
self.logger.log_hyperparams(hparams_dict)
|
import nanome
from nanome.util import Logs
from nanome.util import Color
from nanome.api.ui import Dropdown, DropdownItem
from functools import partial
import pathlib
import os
BASE_PATH = os.path.dirname(os.path.realpath(__file__))
IMG_REFRESH_PATH = os.path.join(BASE_PATH, 'icons', 'refresh.png')
class KNIMEmenu():
def __init__(self, knime_plugin):
self.menu = knime_plugin.menu
self._plugin = knime_plugin
self._selected_protein = None
self._selected_ligands = []
self._selected_grid = None
self._run_button = None
self._grid_folder = self._plugin._grid_dir
self._run_button = None
self._no_reset = False
def _request_refresh(self):
self._plugin.request_refresh()
# Call back to the KNIMEExamplePlugin.py plugin class and run the workflow
def _run_workflow(self):
self._plugin.run_workflow()
## Appropriating functions from Muzhou's docking plugin ##
# Get complex data from Nanome workspace
def populate_protein_ligand_dropdown(self, complex_list):
Logs.debug("calling reset from change_complex_list")
if self._plugin._running:
self._no_reset = True
elif self._no_reset:
self._no_reset = False
else:
self.reset(update_menu=False)
## Populate the ligand and protein dropdown lists with loaded complexes ##
ligand_list = []
protein_list = []
for complex in complex_list:
dd_item1 = DropdownItem()
dd_item2 = DropdownItem()
dd_item1.complex = complex
dd_item2.complex = complex
dd_item1._name = complex.full_name
dd_item2._name = complex.full_name
ligand_list.append(dd_item1)
protein_list.append(dd_item2)
# pass the complex lists to the dropdown menus, handle selection behavior
self._ligand_dropdown.items = ligand_list
self._protein_dropdown.items = protein_list
self._ligand_dropdown.register_item_clicked_callback(
partial(self.handle_dropdown_pressed, self._selected_ligands, 'ligand'))
self._protein_dropdown.register_item_clicked_callback(
partial(self.handle_dropdown_pressed, self._selected_protein, 'protein'))
self._plugin.update_menu(self._menu)
# Get grid data from specified location in plugin machine's filesystem
def populate_grid_dropdown(self):
## Update the Docking Grid dropdown with files from grid folder ##
grid_list = []
for filename in os.listdir(self._grid_folder):
grid_dd_item = DropdownItem()
grid_dd_item._name = os.path.splitext(filename)[0]
grid_list.append(grid_dd_item)
self._grid_dropdown.items = grid_list
self._grid_dropdown.register_item_clicked_callback(
partial(self.handle_dropdown_pressed, self._selected_grid, 'grid'))
self._plugin.update_menu(self._menu)
# Control selection behavior upon interaction with dropdowns
def handle_dropdown_pressed(self, docking_component, component_name, dropdown, item):
if component_name == 'ligand':
#cur_index = item.complex.index
# if cur_index not in [x.complex.index for x in self._selected_ligands]:
if not self._selected_ligands:
self._selected_ligands.append(item)
item.selected = True
else:
# for x in self._selected_ligands:
# if x.complex.index == cur_index:
# self._selected_ligands.remove(x)
# break
if (len(self._selected_ligands) > 1) or\
(len(self._selected_ligands) == 1 and self._selected_ligands[0].complex.index != item.complex.index):
self._selected_ligands = [item]
item.selected = True
else:
self._selected_ligands = []
item.selected = False
# if len(self._selected_ligands) > 1:
# self._ligand_txt._text_value = 'Multiple'
# self._ligand_dropdown.use_permanent_title = True
# self._ligand_dropdown.permanent_title = "Multiple"
if len(self._selected_ligands) == 1:
#self._ligand_txt._text_value = item.complex.full_name if len(item.complex.full_name) <= 4 else item.complex.full_name[:8]+'...'
self._ligand_dropdown.use_permanent_title = False
elif len(self._selected_ligands) == 0:
self._ligand_dropdown.use_permanent_title = True
self._ligand_dropdown.permanent_title = "None"
#self._ligand_txt._text_value = "Ligand"
elif component_name == 'protein':
if self._selected_protein and self._selected_protein.index == item.complex.index:
self._selected_protein = None
else:
self._selected_protein = item.complex
if self._selected_protein:
self._protein_dropdown.use_permanent_title = False
#self._protein_txt._text_value = item.complex.full_name if len(item.complex.full_name) <= 4 else item.complex.full_name[:8]+'...'
else:
#self._protein_txt._text_value = "protein"
self._protein_dropdown.use_permanent_title = True
self._protein_dropdown.permanent_title = "None"
elif component_name == 'grid':
if not self._selected_grid or self._selected_grid != item:
self._selected_grid = item
else:
self._selected_grid = None
item.selected = False
if self._selected_grid:
Logs.debug('selected grid:', self._selected_grid)
self._grid_dropdown.use_permanent_title = False
self._grid_dropdown.permanent_title = item._name
else:
self._grid_dropdown.use_permanent_title = True
self._grid_dropdown.permanent_title = "None"
# self.update_icons()
self.refresh_run_btn_unusable()
self._plugin.update_menu(self._menu)
# Only handles one ligand for now. For implementations of KNIME plugins with multiple ligands, the
# self._selected_ligands variable will need to be changed to an array, and the plugin.run_workflow
# and KNIMErunner.run_knime methods will probably also need to be adjusted.
def get_ligands(self):
# ligands = []
# for item in self._selected_ligands:
# ligands.append(item.complex)
# return ligands
if self._selected_ligands == []:
return None
return self._selected_ligands[0].complex
def get_protein(self):
if self._selected_protein == None:
return None
return self._selected_protein
def make_plugin_usable(self, state=True):
self._run_button.unusable = (
not state) | self.refresh_run_btn_unusable(update=False)
self._plugin.update_content(self._run_button)
def refresh_run_btn_unusable(self, update=True, after=False):
grid_requirement_met = self._selected_grid != None
Logs.debug("selected protein is: ", self._selected_protein)
Logs.debug("selected ligand is: ", self._selected_ligands)
Logs.debug("selected grid is: ", self._selected_grid)
Logs.debug("after is: ", after)
if self._selected_protein != None and len(self._selected_ligands) > 0 and grid_requirement_met and self._plugin._running:
Logs.debug("run button unusable case 1")
self._grid_dropdown.use_permanent_title = True
self._run_button.text.value_unusable = "Running..."
self._run_button.unusable = False
elif self._selected_protein != None and len(self._selected_ligands) > 0 and grid_requirement_met and not self._plugin._running:
Logs.debug("run button unusable case 3")
self._grid_dropdown.use_permanent_title = True
self._run_button.text.value_unusable = "Remove Hydrogens"
self._run_button.unusable = False
else:
Logs.debug('run button unusable case 2')
self._grid_dropdown.use_permanent_title = True
self._run_button.text.value_unusable = "Remove Hydrogens"
self._run_button.unusable = True
if update:
self._plugin.update_content(self._run_button)
return self._run_button.unusable
def reset(self, update_menu=True):
Logs.debug('reset called')
self._selected_grid = None
self._selected_ligands = []
self._selected_protein = None
self.make_plugin_usable()
self._plugin.update_menu(self._menu)
def clear_dropdown(self, dropdown):
dropdown.use_permanent_title = True
dropdown.permanent_title = "None"
def set_all_dropdowns_to_none(self):
dropdown_list = [self._ligand_dropdown,
self._protein_dropdown, self._grid_dropdown]
for dropdown in dropdown_list:
self.clear_dropdown(dropdown)
# I guess everything that happens (interactions w/menu) are handled in this function
def build_menu(self):
# import the json file of the new UI
menu = nanome.ui.Menu.io.from_json(os.path.join(
os.path.dirname(__file__), 'KNIME_menu_POC_dropdown.json'))
self._plugin.menu = menu
# defining callbacks
# what to do when the run button (in secondary menu) is pressed
def run_button_pressed_callback(button):
self._run_workflow()
# Populate the empty dropdown nodes on the menu with /dropdown/ content
# Needed because stack studio currently does not support dropdown content
# Ligand dropdown
self._ligand_dropdown = menu.root.find_node(
"LigandDropdown").add_new_dropdown()
self._ligand_dropdown.use_permanent_title = True
self._ligand_dropdown.permanent_title = "None"
# Protein dropdown
self._protein_dropdown = menu.root.find_node(
"ComplexDropdown").add_new_dropdown()
self._protein_dropdown.use_permanent_title = True
self._protein_dropdown.permanent_title = "None"
# GLIDE grid dropdown
self._grid_dropdown = menu._root.find_node(
"GridDropdown").add_new_dropdown()
self._grid_dropdown.use_permanent_title = True
self._grid_dropdown.permanent_title = "None"
# update button
self.refresh_button = menu.root.find_node("RefreshButton").get_content()
self.refresh_button.icon.value.set_all(IMG_REFRESH_PATH)
def refresh(button):
self.populate_grid_dropdown()
self._plugin.request_complex_list(self._plugin.on_complex_list_received)
self.refresh_button.register_pressed_callback(refresh)
# run button
self.ln_run_button = menu.root.find_node("RunButton")
run_button = self.ln_run_button.get_content()
run_button.register_pressed_callback(run_button_pressed_callback)
self._run_button = run_button
self._run_button.enabled = False
self.refresh_run_btn_unusable()
# Update the menu
self._menu = menu
self._plugin.update_menu(menu)
|
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import xarray as xr
sns.set()
def plot_range(xlabel, ylabel, title, x, values):
"""x and values should have the same size"""
plt.plot(x, values, 'r-', linewidth=2)
plt.gcf().set_size_inches(8, 2)
plt.title(title)
plt.xlabel(xlabel)
plt.ylabel(ylabel)
def plot_year_multi(*args):
"""*args should be iterateble with 2 elements (value, label);
value should be an array of 365 elements"""
fig = plt.figure(figsize=(8, 3))
ax = fig.add_subplot(1, 1, 1)
ox = np.arange(1, 366, 1)
for arg in args:
ax.plot(ox, arg[0], linewidth=2, label=arg[1])
ax.set_ylabel(r'$values$')
ax.set_xlabel(r'$days$')
ax.legend(loc='best')
def extract_alk(data_train):
ds = xr.open_dataset(data_train[0])
alk_df = ds['B_C_Alk'].to_dataframe()
alk_surface = alk_df.groupby('z').get_group(data_train[1])
alk = alk_surface.loc['2011-01-01':'2011-12-31']
alk = alk.reset_index()
return alk
def show_alk(data_train):
fig = plt.figure(figsize=(10, 2))
ax = fig.add_subplot(1, 1, 1)
for item in data_train:
ax.plot(item[0]['time'],
item[0]['B_C_Alk'], linewidth=2, label=item[1])
ax.legend(loc='best')
ax.set_title('Alkalinity in the surface layer')
plt.show()
if __name__ == '__main__':
print('This is a plot functions module')
|
import numpy as np
def bbox2feat_grid(bbox, stride_H, stride_W, feat_H, feat_W):
x1, y1, w, h = bbox
x2 = x1 + w - 1
y2 = y1 + h - 1
# map the bbox coordinates to feature grid
x1 = x1 * 1. / stride_W - 0.5
y1 = y1 * 1. / stride_H - 0.5
x2 = x2 * 1. / stride_W - 0.5
y2 = y2 * 1. / stride_H - 0.5
xc = min(max(int(round((x1 + x2) / 2.)), 0), feat_W - 1)
yc = min(max(int(round((y1 + y2) / 2.)), 0), feat_H - 1)
ind = yc * feat_W + xc
offset = x1 - xc, y1 - yc, x2 - xc, y2 - yc
return ind, offset
def feat_grid2bbox(ind, offset, stride_H, stride_W, feat_H, feat_W):
xc = ind % feat_W
yc = ind // feat_W
x1 = (xc + offset[0] + 0.5) * stride_W
y1 = (yc + offset[1] + 0.5) * stride_H
x2 = (xc + offset[2] + 0.5) * stride_W
y2 = (yc + offset[3] + 0.5) * stride_H
w = x2 - x1 + 1
h = y2 - y1 + 1
bbox = x1, y1, w, h
return bbox
def bbox_iou(bbox_1, bbox_2):
x1_1, y1_1, w_1, h_1 = bbox_1
x2_1 = x1_1 + w_1 - 1
y2_1 = y1_1 + h_1 - 1
A_1 = w_1 * h_1
x1_2, y1_2, w_2, h_2 = bbox_2
x2_2 = x1_2 + w_2 - 1
y2_2 = y1_2 + h_2 - 1
A_2 = w_2 * h_2
w_i = max(min(x2_1, x2_2) - max(x1_1, x1_2) + 1, 0)
h_i = max(min(y2_1, y2_2) - max(y1_1, y1_2) + 1, 0)
A_i = w_i * h_i
IoU = A_i / (A_1 + A_2 - A_i)
return IoU
def batch_bbox2feat_grid(bbox, stride_H, stride_W, feat_H, feat_W):
x1, y1, w, h = bbox.T
x2 = x1 + w - 1
y2 = y1 + h - 1
# map the bbox coordinates to feature grid
x1 = x1 * 1. / stride_W - 0.5
y1 = y1 * 1. / stride_H - 0.5
x2 = x2 * 1. / stride_W - 0.5
y2 = y2 * 1. / stride_H - 0.5
xc = np.minimum(
np.maximum(np.int32(np.round((x1 + x2) / 2.)), 0), feat_W - 1)
yc = np.minimum(
np.maximum(np.int32(np.round((y1 + y2) / 2.)), 0), feat_H - 1)
ind = yc * feat_W + xc
offset = x1 - xc, y1 - yc, x2 - xc, y2 - yc
return ind, offset
def batch_feat_grid2bbox(ind, offset, stride_H, stride_W, feat_H, feat_W):
xc = ind % feat_W
yc = ind // feat_W
x1 = (xc + offset[:, 0] + 0.5) * stride_W
y1 = (yc + offset[:, 1] + 0.5) * stride_H
x2 = (xc + offset[:, 2] + 0.5) * stride_W
y2 = (yc + offset[:, 3] + 0.5) * stride_H
w = x2 - x1 + 1
h = y2 - y1 + 1
bbox = np.stack((x1, y1, w, h), axis=1)
return bbox
def batch_bbox_iou(bbox_1, bbox_2):
x1_1, y1_1, w_1, h_1 = bbox_1.T
x2_1 = x1_1 + w_1 - 1
y2_1 = y1_1 + h_1 - 1
A_1 = w_1 * h_1
x1_2, y1_2, w_2, h_2 = bbox_2.T
x2_2 = x1_2 + w_2 - 1
y2_2 = y1_2 + h_2 - 1
A_2 = w_2 * h_2
w_i = np.maximum(np.minimum(x2_1, x2_2) - np.maximum(x1_1, x1_2) + 1, 0)
h_i = np.maximum(np.minimum(y2_1, y2_2) - np.maximum(y1_1, y1_2) + 1, 0)
A_i = w_i * h_i
IoU = A_i / (A_1 + A_2 - A_i)
return IoU
|
from django import forms
from categoria.models import Categoria
class CategoriaFormulario(forms.ModelForm):
class Meta:
model = Categoria
# el atributo de categoria que no quiero que aprezca en la view
exclude = ('user',)
# todos los atributos de la clase Categoria
fields = '__all__'
|
import json
import os
import csv
# Rename the data files to start with "indicator_".
"""
for filename in os.listdir('data'):
if filename.endswith(".csv") and not filename.startswith('indicator_'):
path_from = os.path.join('data', filename)
path_to = os.path.join('data', 'indicator_' + filename)
os.rename(path_from, path_to)
"""
for filename in os.listdir('meta'):
if not filename.endswith('.json'):
continue
with open(os.path.join('meta', filename), 'r') as stream:
meta = json.load(stream)
del meta['ID']
del meta['DataID']
del meta['indicator_number']
del meta['goal_number']
del meta['goal_name']
del meta['target_number']
del meta['target_name']
del meta['indicator_name']
del meta['UnitofMeasure']
del meta['national_geographical_coverage']
del meta['indicator']
del meta['target_id']
del meta['sdg_goal']
del meta['reporting_status']
del meta['indicator_sort_order']
meta['data_non_statistical'] = False
destination = os.path.join('meta', filename.replace('.json', '.csv'))
with open(destination, 'w') as csv_file:
writer = csv.writer(csv_file)
for key, value in meta.items():
writer.writerow([key, value])
|
import random
import exrex as ex
import numpy as np
def transponiraj(a1, a2):
vnos = np.array([a1, a2])
vnos = np.transpose(vnos)
return vnos
#----MODEL---------
capList = [50, 100, 150, 200, 300]
tezaList = [10, 12, 20, 20, 23]
vnosModel = transponiraj(capList, tezaList)
def napolniTabeloModel(vnos):
komanda = """INSERT INTO model(kapaciteta, teza) values"""
for val in vnos:
niz = "({}, {}),".format(val[0], val[1])
komanda += niz
return komanda[:-1] #znebimo se zadnje vejice
#----VLAK----------------
def napolniTabeloVlak(con, cur, stVnosov):
cur.execute(""" SELECT id from model""")
modeliNaVoljo = cur.fetchall()
letaIzdelava = [random.randint(1980, 2020) for m in range(stVnosov)]
modeli = [modeliNaVoljo[random.randint(0, len(modeliNaVoljo)-1)][0] for m in range(stVnosov)]
vnosVlak = transponiraj(letaIzdelava, modeli)
komanda = """INSERT INTO vlak(leto_izdelave, model) values"""
for val in vnosVlak:
niz = "({}, {}),".format(val[0], val[1])
komanda += niz
return komanda[:-1]
def izbrisiCeloTabelo(tabela):
komanda = """ DELETE FROM {}
""".format(tabela)
return komanda
|
from django.db import models
from django.db.models import CharField
from django_mysql.models import ListCharField #This is a custom field that requires mysql
# Create your models here.
class Excursion(models.Model):
Active = 'active'
Inactive = 'inactive'
Excursion_status = [
(Active, 'active'),
(Inactive, 'inactive'),
]
id = models.BigIntegerField(primary_key=True,unique=True)
name = models.CharField(max_length=200)
detailPageName = models.CharField(max_length=400)
portID = models.CharField(max_length=10)
type = models.CharField(max_length=10)
topology = ListCharField(base_field= CharField(max_length=10),size=6,max_length=(6*11)) # * 10 inputs plus comma. This is a custom field that requires mysql
activityLevel = models.CharField(max_length=20)
collectionType = models.CharField(max_length=30)
duration = models.CharField(max_length=100)
language = ListCharField(base_field= CharField(max_length=10),size=6,max_length=(6*11)) # * 10 inputs plus comma, This is a custom field that requires mysql
priceLevel = models.IntegerField(null=True)
currency = models.CharField(max_length=10)
mealInfo = models.CharField(max_length=100,blank=True)
status = models.CharField(max_length=10, choices = Excursion_status,default=Active)
shortDescription = models.CharField(max_length=200,blank=True)
longDescription = models.TextField()
externalContent = models.BooleanField(default=False)
minimumAge = models.CharField(max_length=10,blank=True)
wheelChairAccecsible = models.BooleanField(default=False)
featured = models.BooleanField(default=True)
def __str__(self):
return self.name
class Meta:
ordering = ['id']
|
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.7
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_SimModel_PyCallBack', [dirname(__file__)])
except ImportError:
import _SimModel_PyCallBack
return _SimModel_PyCallBack
if fp is not None:
try:
_mod = imp.load_module('_SimModel_PyCallBack', fp, pathname, description)
finally:
fp.close()
return _mod
_SimModel_PyCallBack = swig_import_helper()
del swig_import_helper
else:
import _SimModel_PyCallBack
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr_nondynamic(self, class_type, name, static=1):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
if (not static):
return object.__getattr__(self, name)
else:
raise AttributeError(name)
def _swig_getattr(self, class_type, name):
return _swig_getattr_nondynamic(self, class_type, name, 0)
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object:
pass
_newclass = 0
try:
import weakref
weakref_proxy = weakref.proxy
except:
weakref_proxy = lambda x: x
class SwigPyIterator(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SwigPyIterator, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _SimModel_PyCallBack.delete_SwigPyIterator
__del__ = lambda self: None
def value(self):
return _SimModel_PyCallBack.SwigPyIterator_value(self)
def incr(self, n=1):
return _SimModel_PyCallBack.SwigPyIterator_incr(self, n)
def decr(self, n=1):
return _SimModel_PyCallBack.SwigPyIterator_decr(self, n)
def distance(self, x):
return _SimModel_PyCallBack.SwigPyIterator_distance(self, x)
def equal(self, x):
return _SimModel_PyCallBack.SwigPyIterator_equal(self, x)
def copy(self):
return _SimModel_PyCallBack.SwigPyIterator_copy(self)
def next(self):
return _SimModel_PyCallBack.SwigPyIterator_next(self)
def __next__(self):
return _SimModel_PyCallBack.SwigPyIterator___next__(self)
def previous(self):
return _SimModel_PyCallBack.SwigPyIterator_previous(self)
def advance(self, n):
return _SimModel_PyCallBack.SwigPyIterator_advance(self, n)
def __eq__(self, x):
return _SimModel_PyCallBack.SwigPyIterator___eq__(self, x)
def __ne__(self, x):
return _SimModel_PyCallBack.SwigPyIterator___ne__(self, x)
def __iadd__(self, n):
return _SimModel_PyCallBack.SwigPyIterator___iadd__(self, n)
def __isub__(self, n):
return _SimModel_PyCallBack.SwigPyIterator___isub__(self, n)
def __add__(self, n):
return _SimModel_PyCallBack.SwigPyIterator___add__(self, n)
def __sub__(self, *args):
return _SimModel_PyCallBack.SwigPyIterator___sub__(self, *args)
def __iter__(self):
return self
SwigPyIterator_swigregister = _SimModel_PyCallBack.SwigPyIterator_swigregister
SwigPyIterator_swigregister(SwigPyIterator)
import SimModel
import base
class SimPyCallBack(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SimPyCallBack, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SimPyCallBack, name)
__repr__ = _swig_repr
def __init__(self):
if self.__class__ == SimPyCallBack:
_self = None
else:
_self = self
this = _SimModel_PyCallBack.new_SimPyCallBack(_self, )
try:
self.this.append(this)
except:
self.this = this
__swig_destroy__ = _SimModel_PyCallBack.delete_SimPyCallBack
__del__ = lambda self: None
def loadSimClassObj(self, _dataName, _simClassList):
return _SimModel_PyCallBack.SimPyCallBack_loadSimClassObj(self, _dataName, _simClassList)
def loadSimGeomClassObj(self, _geomDataName, _simGeomClassList):
return _SimModel_PyCallBack.SimPyCallBack_loadSimGeomClassObj(self, _geomDataName, _simGeomClassList)
def loadSimSysClassObj(self, _sysDataName, _simSysClassList):
return _SimModel_PyCallBack.SimPyCallBack_loadSimSysClassObj(self, _sysDataName, _simSysClassList)
def getSimClassObj(self, _id):
return _SimModel_PyCallBack.SimPyCallBack_getSimClassObj(self, _id)
def getReferenceId(self, _classId, _propertyName):
return _SimModel_PyCallBack.SimPyCallBack_getReferenceId(self, _classId, _propertyName)
def getIO(self, _name):
return _SimModel_PyCallBack.SimPyCallBack_getIO(self, _name)
def getRefValueType(self, _classId, _propertyName):
return _SimModel_PyCallBack.SimPyCallBack_getRefValueType(self, _classId, _propertyName)
def getRefNumberValue(self, _classId, _propertyName):
return _SimModel_PyCallBack.SimPyCallBack_getRefNumberValue(self, _classId, _propertyName)
def getRefStringValue(self, _classId, _propertyName):
return _SimModel_PyCallBack.SimPyCallBack_getRefStringValue(self, _classId, _propertyName)
def __disown__(self):
self.this.disown()
_SimModel_PyCallBack.disown_SimPyCallBack(self)
return weakref_proxy(self)
SimPyCallBack_swigregister = _SimModel_PyCallBack.SimPyCallBack_swigregister
SimPyCallBack_swigregister(SimPyCallBack)
class CallBack(SimPyCallBack):
def __init__(self):
super(CallBack, self).__init__()
self._sim = None
self._simGeom = None
self._simSys = None
self._dict = {}
def loadSimClassObj(self, _dataName, _simClassList):
if self._sim == None:
self._sim = SimModel.SimModel_(_dataName)
for id in range(0, _simClassList.size()):
_objList = getattr(self._sim, _simClassList[id])()
for subId in range(0, _objList.sizeInt()):
self._dict[_objList.at(subId).RefId()] = _objList.at(subId)
def loadSimGeomClassObj(self, _geomDataName, _simGeomClassList):
if self._simGeom == None:
self._simGeom = SimModel.SimModel_(_geomDataName)
for id in range(0, _simGeomClassList.size()):
_objList = getattr(self._simGeom, _simGeomClassList[id])()
for subId in range(0, _objList.sizeInt()):
self._dict[_objList.at(subId).RefId()] = _objList.at(subId)
def loadSimSysClassObj(self, _sysDataName, _simSysClassList):
if self._simSys == None:
self._simSys = SimModel.SimModel_(_sysDataName)
for id in range(0, _simSysClassList.size()):
_objList = getattr(self._simSys, _simSysClassList[id])()
for subId in range(0, _objList.sizeInt()):
self._dict[_objList.at(subId).RefId()] = _objList.at(subId)
def getSimClassObj(self, _id):
if _id in self._dict:
return self._dict[_id]
else:
return None
def getReferenceId(self, _classId, _propertyName):
_classObj = self.getSimClassObj(_classId)
if _classObj is not None and getattr(_classObj, _propertyName)().present():
return getattr(_classObj, _propertyName)().getValue()
def getIO(self, _name):
print(_name)
def getRefValueType(self, _classId, _propertyName):
_classObj = self.getSimClassObj(_classId)
if _classObj is not None and getattr(_classObj, _propertyName)().present():
if type(getattr(_classObj, _propertyName)().getValue()) is str:
return "String"
else:
return "Number"
def getRefNumberValue(self, _classId, _propertyName):
_classObj = self.getSimClassObj(_classId)
if _classObj is not None and getattr(_classObj, _propertyName)().present():
return getattr(_classObj, _propertyName)().getValue()
def getRefStringValue(self, _classId, _propertyName):
_classObj = self.getSimClassObj(_classId)
if _classObj is not None and getattr(_classObj, _propertyName)().present():
return getattr(_classObj, _propertyName)().getValue()
# This file is compatible with both classic and new-style classes.
|
class Mem(object):
def __init__(self, addr, size, isUsed):
self.addr = addr
self.size = size
self.isUsed = isUsed
class Function(object):
argc = 0
testcases = (
(None, None),
)
def __init__(self, emu, startEa, endEa):
self.emu = emu
self.startEa = startEa
self.endEa = endEa
self.ownMem = []
self.debug = False
self.debug_func = None
def dlog(self, s):
if self.debug == True:
if self.debug_func is None:
print(s)
else:
self.debug_func(s)
def __del__(self):
self.unmapAllMem()
def cleanup(self):
self.emu.restore()
self.emu.preStack()
def test(self):
for case in self.testcases:
self.cleanup()
if not self.checkOne(case):
return False
self.clearMem()
if not self.checkTwo():
return False
return True
def start(self):
try:
self.emu.startEmu(self.startEa, self.endEa)
except Exception as e:
raise e
def getMem(self, size):
if self.ownMem:
for mem in self.ownMem:
if mem.isUsed:
continue
elif mem.size <= size:
mem.isUsed = True
return mem.addr
addr = self.emu.allocMem(size)
self.ownMem.append(Mem(addr, self.emu.pageAlign(size), True))
return addr
def clearMem(self):
for mem in self.ownMem:
if mem.isUsed:
self.emu.writeMem(mem.addr, b'\x00'*mem.size)
mem.isUsed = False
def unmapAllMem(self):
while self.ownMem:
mem = self.ownMem.pop()
self.emu.target.mem_unmap(mem.addr, mem.size)
def setArgWithMem(self, i, size, data=None):
addr = self.getMem(size)
if data:
self.emu.writeMem(addr, data)
self.emu.setArgv(i, addr)
return addr
def setArgWithImm(self, i, value):
self.emu.setArgv(i, value)
def checkOne(self, case):
pass
def checkTwo(self):
'''
do more check when things get funny
'''
return True
|
"""
Author: Andrew Harris
Python Version: Python3.8.3
"""
import argparse
from pathlib import Path
from gevent import monkey
monkey.patch_all()
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
from gevent.pywsgi import WSGIServer
from thex.app import app, server
from thex.apps import homepage
from thex.apps.tree_viewer import tv_layout
from thex.apps.signal_tracer import signalTracer_layout
from thex.apps.docs import docs_layout
app.layout = html.Div([
dcc.Location(id='url', refresh=False),
html.Div(id='page-content')
])
@app.callback(Output('page-content', 'children'),
[Input('url', 'pathname')])
def display_page(pathname):
if pathname == '/':
return homepage.layout
elif pathname == '/apps/signal_tracer':
return signalTracer_layout()
elif pathname == '/apps/tree_viewer':
return tv_layout()
elif pathname == '/apps/documentation':
return docs_layout()
else:
return '404 - Page not found'
def main():
parser = argparse.ArgumentParser(description='Tree House Explorer Genome Browser')
parser.add_argument(
'--host',
type=str,
action='store',
default='127.0.0.1',
help="Host address",
)
parser.add_argument(
'--port',
type=int,
action='store',
default=8050,
help='Port number',
)
parser.add_argument(
'--dev',
action='store_true',
default=False,
help='Run in development mode',
)
args = parser.parse_args()
if args.dev:
app.run_server(debug=True, port=args.port, host=args.host)
else:
print(f"Tree House Explorer running on http://{args.host}:{args.port}/")
http_server = WSGIServer((args.host, args.port), application=server, log=None)
http_server.serve_forever()
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Rules for defining OpenROAD configuration for various PDKs"""
OpenRoadPdkInfo = provider(
"provider for openROAD configuration for a pdk",
fields = {
"cell_site": "LEF standard cell site name to use for floorplanning",
"tracks_file": "Track setup script",
"endcap_cell": "The endcap cell to use in place and route",
"tap_cell": "The tap cell to use in the place and route.",
"pin_horizontal_metal_layer": "",
"pin_vertical_metal_layer": "",
"tapcell_distance": "Number of sites ",
"wire_rc_signal_metal_layer": "The metal layer to pull RC information for signal nets",
"wire_rc_clock_metal_layer": "The metal layer to pull RC information for clock nets",
"pdn_config": "PDN config",
"global_placement_cell_pad": "Global placement cell padding to aide in routing",
"do_not_use_cell_list": "Do not use cells in timing repair. This supports wild card * cell names",
"cts_buffer_cell": "Clock Tree Buffer cell",
"fill_cells": "Metal fill cells",
"global_routing_layer_adjustments": "Global routing adjustment layers",
"global_routing_clock_layers": "Clock routing layers",
"global_routing_signal_layers": "Signal routing layers",
"tie_low_port": "Tie low port",
"tie_high_port": "Tie high port",
"tie_separation": "Tie sepearation value",
},
)
def _open_road_pdk_configuration_impl(ctx):
return [
OpenRoadPdkInfo(
cell_site = ctx.attr.cell_site,
tracks_file = ctx.file.tracks_file,
tap_cell = ctx.attr.tap_cell,
pin_vertical_metal_layer = ctx.attr.pin_vertical_metal_layer,
pin_horizontal_metal_layer = ctx.attr.pin_horizontal_metal_layer,
tapcell_distance = ctx.attr.tapcell_distance,
endcap_cell = ctx.attr.endcap_cell,
pdn_config = ctx.file.pdn_config,
wire_rc_signal_metal_layer = ctx.attr.wire_rc_signal_metal_layer,
wire_rc_clock_metal_layer = ctx.attr.wire_rc_clock_metal_layer,
global_placement_cell_pad = ctx.attr.global_placement_cell_pad,
do_not_use_cell_list = ctx.attr.do_not_use_cell_list,
cts_buffer_cell = ctx.attr.cts_buffer_cell,
fill_cells = ctx.attr.fill_cells,
global_routing_layer_adjustments = ctx.attr.global_routing_layer_adjustments,
global_routing_clock_layers = ctx.attr.global_routing_clock_layers,
global_routing_signal_layers = ctx.attr.global_routing_signal_layers,
tie_low_port = ctx.attr.tie_low_port,
tie_high_port = ctx.attr.tie_high_port,
tie_separation = ctx.attr.tie_separation,
),
]
open_road_pdk_configuration = rule(
implementation = _open_road_pdk_configuration_impl,
attrs = {
"cell_site": attr.string(mandatory = True, doc = "LEF standard cell site name."),
"tracks_file": attr.label(mandatory = True, allow_single_file = True, doc = "Track setup script."),
"pdn_config": attr.label(mandatory = True, allow_single_file = True, doc = "PDN Config."),
"tap_cell": attr.string(mandatory = True),
"pin_horizontal_metal_layer": attr.string(mandatory = True),
"pin_vertical_metal_layer": attr.string(mandatory = True),
"tapcell_distance": attr.int(mandatory = True),
"endcap_cell": attr.string(mandatory = True),
"wire_rc_signal_metal_layer": attr.string(mandatory = True),
"wire_rc_clock_metal_layer": attr.string(mandatory = True),
"global_placement_cell_pad": attr.int(mandatory = True),
"do_not_use_cell_list": attr.string_list(mandatory = True, doc = "This value can be an empty list if all cells should be used in P&R"),
"cts_buffer_cell": attr.string(mandatory = True, doc = "Clock Tree Buffer cell"),
"fill_cells": attr.string_list(mandatory = True),
"global_routing_layer_adjustments": attr.string_dict(mandatory = True),
"global_routing_clock_layers": attr.string(mandatory = True),
"global_routing_signal_layers": attr.string(mandatory = True),
"tie_low_port": attr.string(mandatory = True),
"tie_high_port": attr.string(mandatory = True),
"tie_separation": attr.int(mandatory = True),
},
)
def assert_has_open_road_configuration(synthesis_info):
"""Asserts if PDK is missing openROAD configuration.
Args:
synthesis_info: bazel rule context.
"""
if not get_open_road_configuration(synthesis_info):
fail("The PDK used for synthesis does not have an OpenROAD configuration.")
def get_open_road_configuration(synthesis_info):
"""Returns the openROAD configuration for the synthesized netlist.
Args:
synthesis_info: SynthesisInfo provider to extract openROAD configuration from.
Returns:
OpenRoadPdkInfo: The openroad pdk information.
"""
standard_cell_info = synthesis_info.standard_cell_info
if not standard_cell_info:
fail("This rule is missing the standard cell info attached to the synthesized verilog.")
return standard_cell_info.open_road_configuration
|
"""main entry for langlab command-line interface"""
def main():
from langlab import Langlab
ret, fwds = Langlab().run_command()
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
"""Tests for PySnoo components."""
|
# -*- coding: utf-8 -*-
import requests
import re
import urllib
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
def binary_word_search(string, data):
"""A simple binary search algorithm to check if string exists in data."""
string = str(string)
if len(data) == 0:
return False
mid = len(data)//2
if data[mid] == string:
return True
elif string < data[mid]:
data = data[:mid]
else:
data = data[mid+1:]
return binary_word_search(string, data)
def check_word(string):
""" Checks if a given string of characters is a playable word
in the Polish version of Scrabble. """
string = string.strip().lower()
if not string.isalpha(): # Incorrect format
return "Wygląda na to, że to nie jest prawidłowe słowo!"
path = "./sorted_words/"
filename = path+"words_"+string[0]
with open(filename, 'r') as wfile:
words = wfile.readlines()
# This is where the magic happens!
correct = binary_word_search(string+'\n', words)
return parse_word_response(string, correct)
def parse_word_response(word, correct):
word = word.capitalize()
if correct:
output = bcolors.BOLD + bcolors.OKGREEN + \
word + bcolors.ENDC + " to prawidłowe słowo!\n"
return '\n'.join((output, request_definition(word)))
else:
return (bcolors.BOLD + bcolors.FAIL + word +
bcolors.ENDC + " nie jest słowem dopuszczalnym w Scrabble!\n")
def request_definition(word):
""" Connects to sjp.pl and tries to define a given word
using regular expressions to find the definition."""
word_url = "https://sjp.pl/"+word
try:
page = requests.get(word_url, timeout=5)
if not page:
return "Nie udało się połączyć z serwerem."
except Exception:
return ("Wystąpił błąd przy szukaniu definicji słowa. " +
"Prawdopodobnie nie jesteś połączony z internetem.")
# Sorry for the ugly regexes!
regex_def = r"(?<=(<p style=\"margin: .5em 0; font: medium/1.4 sans-serif; max-width: 32em; \">)).*(?=</p>)"
regex_redirect = r"(?<=(<p style=\"margin: .5em 0; \">\<span class=\"lc\"> →</span> <a href=\"/)).*(?=\">)"
regex_word = r"(?<=(\"lc\" href=\"/)).*?(?=\")"
word_match = re.search(regex_word, page.text)
def_match = re.search(regex_def, page.text)
if word_match and def_match:
word = urllib.parse.unquote(word_match.group()).capitalize()
define = def_match.group().replace('<br />', '\n').replace(""", "\"")
output = bcolors.BOLD + bcolors.HEADER + \
word + bcolors.ENDC + '\n'
output += define
return output
redirect_match = re.search(regex_redirect, page.text)
if redirect_match:
return request_definition(redirect_match.group())
return "Niestety www.sjp.pl nie podaje definicji tego słowa :("
# Loop for convenience!
while True:
string = input(bcolors.HEADER +
"\nWpisz słowo aby je sprawdzić!"+bcolors.ENDC+"\n>")
print(check_word(string))
|
#!/usr/bin/env python
# encoding: utf-8
"""Common utilities."""
import logging as lg
import os
import os.path as osp
from contextlib import contextmanager
from shutil import rmtree
from tempfile import mkstemp
from threading import Thread
from six.moves.queue import Queue
_logger = lg.getLogger(__name__)
class HdfsError(Exception):
"""Base error class.
:param message: Error message.
:param args: optional Message formatting arguments.
"""
def __init__(self, message, *args, **kwargs):
self.message = message % args if args else message
super(HdfsError, self).__init__(self.message)
self.exception = kwargs.get("exception")
class AsyncWriter(object):
"""Asynchronous publisher-consumer.
:param consumer: Function which takes a single generator as argument.
This class can be used to transform functions which expect a generator into
file-like writer objects. This can make it possible to combine different APIs
together more easily. For example, to send streaming requests:
.. code-block:: python
import requests as rq
with AsyncWriter(lambda data: rq.post(URL, data=data)) as writer:
writer.write('Hello, world!')
"""
# Expected by pandas to write csv files (https://github.com/mtth/hdfs/pull/130).
__iter__ = None
def __init__(self, consumer):
self._consumer = consumer
self._queue = None
self._reader = None
self._err = None
_logger.debug('Instantiated %r.', self)
def __repr__(self):
return '<%s(consumer=%r)>' % (self.__class__.__name__, self._consumer)
def __enter__(self):
if self._queue:
raise ValueError('Cannot nest contexts.')
self._queue = Queue()
self._err = None
def consumer(data):
"""Wrapped consumer that lets us get a child's exception."""
try:
_logger.debug('Starting consumer.')
self._consumer(data)
except Exception as err: # pylint: disable=broad-except
_logger.exception('Exception in child.')
self._err = err
finally:
_logger.debug('Finished consumer.')
def reader(queue):
"""Generator read by the consumer."""
while True:
chunk = queue.get()
if chunk is None:
break
yield chunk
self._reader = Thread(target=consumer, args=(reader(self._queue), ))
self._reader.start()
_logger.debug('Started child thread.')
return self
def __exit__(self, exc_type, exc_value, traceback):
if exc_value:
_logger.debug('Exception in parent.')
if self._reader and self._reader.is_alive():
_logger.debug('Signaling child.')
self._queue.put(None)
self._reader.join()
if self._err:
raise self._err # pylint: disable=raising-bad-type
else:
_logger.debug('Child terminated without errors.')
self._queue = None
def flush(self):
"""Pass-through implementation."""
pass
def seekable(self):
"""Implement file-like method expected by certain libraries.
`fastavro` relies on it in python 3.
"""
return False
def tell(self):
"""No-op implementation."""
return 0
def write(self, chunk):
"""Stream data to the underlying consumer.
:param chunk: Bytes to write. These will be buffered in memory until the
consumer reads them.
"""
if chunk:
# We skip empty chunks, otherwise they cause request to terminate the
# response stream. Note that these chunks can be produced by valid
# upstream encoders (e.g. bzip2).
self._queue.put(chunk)
@contextmanager
def temppath(dpath=None):
"""Create a temporary path.
:param dpath: Explicit directory name where to create the temporary path. A
system dependent default will be used otherwise (cf. `tempfile.mkstemp`).
Usage::
with temppath() as path:
pass # do stuff
Any file or directory corresponding to the path will be automatically deleted
afterwards.
"""
(desc, path) = mkstemp(dir=dpath)
os.close(desc)
os.remove(path)
try:
_logger.debug('Created temporary path at %s.', path)
yield path
finally:
if osp.exists(path):
if osp.isdir(path):
rmtree(path)
_logger.debug('Deleted temporary directory at %s.', path)
else:
os.remove(path)
_logger.debug('Deleted temporary file at %s.', path)
else:
_logger.debug('No temporary file or directory to delete at %s.', path)
|
from floodsystem.analysis import polyfit
from floodsystem.stationdata import build_station_list, update_water_levels
from floodsystem.datafetcher import fetch_measure_levels
import datetime
import numpy
def test_polyfit():
stations = build_station_list()
update_water_levels(stations)
#find the polynomial and shift for the first few stations, and assert their types
for i in range(0, 100):
dates, levels = fetch_measure_levels(stations[i].measure_id, dt=datetime.timedelta(days=2))
# try and remove empty list problem with Letcombe Bassett
if dates == [] or levels == []:
pass
else:
poly, d0 = polyfit(dates, levels, 4)
assert isinstance(poly, numpy.poly1d)
assert isinstance(d0, float)
#check that d0 is bigger than 0 for all values
assert d0 >= 0
|
# -*- coding: utf-8 -*-
"""
This source code file is licensed under the GNU General Public License Version 3.
For full details, please refer to the file "LICENSE.txt" which is provided as part of this source code package.
Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
"""
import time
import cv2
import sys
import os
import queue
__dir__ = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, __dir__)
from WrappedDeviceAPI import *
def sample():
deviceAPI = IDeviceAPI('Android')
'''
describe:初始化
param[0],str类型:手机序列号,默认为None,当接入一个设备时可不指定序列号,当接入多个设备时需要指定
param[1],bool类型:手机为横屏还是竖屏,True为竖屏,False为横屏
param[2],int类型:长边的长度
param[3],str类型:指定日志目录,默认为/tmp/LogDeviceAPI
param[4],枚举类型:指定日志级别,取值为[LOG_DEBUG, LOG_INFO, LOG_WARNING, LOG_ERROR, LOG_CRITICAL],默认为LOG_DEBUG
param[5],bool类型:是否show出图片
param[5],字典 :一些组件需要的参数,可以自己定义,例如端口号等等
return,bool类型,成功返回True,失败返回False
'''
if not deviceAPI.Initialize('908fedc0', False, 720, 1280, '/tmp/LogDeviceAPI', LOG_DEBUG):
return False
'''
describe:获取当前图像帧
return:Mat类型的图像
'''
frame = deviceAPI.GetFrame()
if frame is None:
return False
'''
==========================================================================================================
============================================TouchCMD==================================================
==========================================================================================================
describe:让手机执行动作
aType参数表示动作类型[TOUCH_CLICK, TOUCH_DOWN, TOUCH_UP, TOUCH_SWIPE, TOUCH_MOVE]
sx为x坐标,当aType为[TOUCH_CLICK, TOUCH_DOWN]时表示按压点的x坐标,当aType为[TOUCH_SWIPE, TOUCH_MOVE]时表示起始点的x坐标
sy为y坐标,当aType为[TOUCH_CLICK, TOUCH_DOWN]时表示按压点的y坐标,当aType为[TOUCH_SWIPE, TOUCH_MOVE]时表示起始点的y坐标
ex为x坐标,当aType为[TOUCH_SWIPE, TOUCH_MOVE]时表示结束点的x坐标
ex为y坐标,当aType为[TOUCH_SWIPE, TOUCH_MOVE]时表示结束点的y坐标
DaType为执行该操作的方式,有minitouch方式和ADB命令方式,分别表示为[DACT_TOUCH, DACT_ADB],默认为DACT_TOUCH
contact为触点,默认为0
durationMS为执行一次动作持续的时间,在aType为[TOUCH_CLICK, TOUCH_SWIPE]时使用,当aType为TOUCH_CLICK时默认为-1,当aType为TOUCH_SWIPE时默认为50
needUp仅在aType为TOUCH_SWIPE时使用,表示滑动后是否需要抬起,默认为True
return:True or False
'''
# deviceAPI.TouchCMD(aType=[TOUCH_CLICK, TOUCH_DOWN, TOUCH_UP, TOUCH_SWIPE, TOUCH_MOVE],
# sx=int,
# sy=int,
# ex=int,
# ey=int,
# contact=0,
# durationMS=50,
# needUp=True,
# wait_time=0)
'''
describe:执行点击操作
sx为横坐标,相对于初始化时传入的坐标系
sy为纵坐标,相对于初始化时传入的坐标系
contact为触点,默认为0
durantionMS为动作持续时间,默认为-1
wait_time为执行动作后,手机端等待时间,单位为毫秒,默认为0
return True or False
'''
if not deviceAPI.TouchCMD(aType=TOUCH_CLICK, sx=300, sy=300, contact=0, durantionMS=-1, wait_time=0):
return False
'''
describe:执行按压操作
sx为横坐标,相对于初始化时传入的坐标系
sy为纵坐标,相对于初始化时传入的坐标系
contact为触点,默认为0
wait_time为执行动作后,手机端等待时间,单位为毫秒,默认为0
return True or False
'''
if not deviceAPI.TouchCMD(aType=TOUCH_DOWN, sx=300, sy=300, contact=0, wait_time=0):
return False
'''
describe:执行抬起操作
wait_time为执行动作后,手机端等待时间,单位为秒,默认为0
return True or False
'''
if not deviceAPI.TouchCMD(aType=TOUCH_UP, contact=1, wait_time=0):
return False
'''
describe:执行滑动
sx, sy为起始点的坐标
ex, ey为终止点的坐标
DaType表示执行动作的实现方式,有minitouch和ADB两种[DACT_TOUCH, DACT_ADB],默认为DACT_TOUCH
contact为触点,默认为0
durantionMS为动作持续时间,默认为50
needUp表示滑动后是否抬起,默认为True
wait_time为执行动作后,手机端等待时间,单位为毫秒,默认为0
return True or False
'''
if not deviceAPI.TouchCMD(aType=TOUCH_SWIPE,
sx=500,
sy=500,
ex=600,
ey=600,
contact=0,
durationMS=500,
needUp=False,
wait_time=0):
return False
'''
describe:执行滑动操作,与swipe不同的是他只有终止点,通过多个move可以组合成一个swipe
sx为横坐标,相对于初始化时传入的坐标系
sy为纵坐标,相对于初始化时传入的坐标系
contact为触点,默认为0
wait_time为执行动作后,手机端等待时间,单位为毫秒,默认为0
return True or False
'''
if not deviceAPI.TouchCMD(aType=TOUCH_MOVE, sx=300, sy=300, contact=0, wait_time=0):
return False
'''
describe:执行滑动操作,与move不同的是它进行了补点操作
sx为横坐标,相对于初始化时传入的坐标系
sy为纵坐标,相对于初始化时传入的坐标系
contact为触点,默认为0
wait_time为执行动作后,手机端等待时间,单位为毫秒,默认为0
return True or False
'''
if not deviceAPI.TouchCMD(aType=TOUCH_SWIPEMOVE, sx=300, sy=300, durationMS=50, contact=0, wait_time=0):
return False
'''
describe:reset
wait_time为执行动作后,手机端等待时间,单位为毫秒,默认为0
return True or False
'''
if not deviceAPI.TouchCMD(aType=TOUCH_RESET, wait_time=0):
return False
'''
==========================================================================================================
============================================DeviceCMD=================================================
==========================================================================================================
describe:执行设备相关的操作
aType:操作类型[DEVICE_INSTALL, DEVICE_START, DEVICE_EXIT, DEVICE_CURAPP, DEVICE_CLEARAPP, DEVICE_KEY,
DEVICE_TEXT, DEVICE_SLEEP, DEVICE_WAKE, DEVICE_WMSIZE, DEVICE_BINDRO, DEVICE_SCREENSHOT,
DEVICE_SCREENORI]
APKPath:安装包路径
PKGName:包名
ActivityName:包的activity
key:
'''
# deviceAPI.DeviceCMD(aType=[DEVICE_INSTALL, DEVICE_START, DEVICE_EXIT, DEVICE_CURAPP, DEVICE_CLEARAPP, DEVICE_KEY,
# DEVICE_TEXT, DEVICE_SLEEP, DEVICE_WAKE, DEVICE_WMSIZE, DEVICE_BINDRO, DEVICE_SCREENSHOT,
# DEVICE_SCREENORI],
# APKPath=str,
# PKGName=str,
# ActivityName=str,
# key=str,
# text=str,
# rotation=str,
# targetPath=str)
'''
aType为DEVICE_INSTALL时表示安装app
APKPath为所需参数,表示apk包在PC端的存放路径
return True or False
'''
if not deviceAPI.DeviceCMD(aType=DEVICE_INSTALL, APKPath='/home/ting/kidting/game_ai_sdk/data/qqspeed/game.apk'):
return False
'''
aType为DEVICE_START时表示启动app
APKPath为所需参数,表示apk包在PC端的存放路径
ActivityName为apk包启动的activity
return True or False
'''
if not deviceAPI.DeviceCMD(aType=DEVICE_START, PKGName='com.tencent.tmgp.speedmobile',
ActivityName='com.tencent.tmgp.speedmobile.speedmobile'):
return False
'''
aType为DEVICE_CURAPP时表示获取当前app
return 字典,currentAPP = {'package': str(), 'activity': str()}
'''
currentAPP = deviceAPI.DeviceCMD(aType=DEVICE_CURAPP)
'''
aType为DEVICE_PARAM时表示获取当前app运行时,手机的性能参数
PKGName为所需参数,表示APP包名
return deviceParam为字典,分别存有CPU, 内存, 电量, 温度这四个参数
deviceParam = {
'cpu': float,
'mem': float,
'temperature': float,
'battery': int
}
'''
deviceParam = deviceAPI.DeviceCMD(aType=DEVICE_PARAM, PKGName='com.tencent.tmgp.speedmobile')
'''
aType为DEVICE_CLEARAPP时表示清空app数据
PKGName为所需参数,表示APP包名
return True or False
'''
if not deviceAPI.DeviceCMD(aType=DEVICE_CLEARAPP, PKGName='com.tencent.tmgp.speedmobile'):
return False
'''
aType为DEVICE_EXIT时表示退出app
PKGName为所需参数,表示APP包名
return True or False
'''
if not deviceAPI.DeviceCMD(aType=DEVICE_EXIT, PKGName='com.tencent.tmgp.speedmobile'):
return False
'''
aType为DEVICE_KEY时表示输入手机键盘的按键
key为所需参数,str类型,表示手机具体按键
return True or False
'''
if not deviceAPI.DeviceCMD(aType=DEVICE_KEY, key='cmd'):
return False
'''
aType为DEVICE_TEXT时表示输入字符串
text为所需参数,str类型,表示具体输入的字符串
return True or False
'''
if not deviceAPI.DeviceCMD(aType=DEVICE_TEXT, text='abc'):
return False
'''
aType为DEVICE_SLEEP时表示设备锁屏
return True or False
'''
if not deviceAPI.DeviceCMD(aType=DEVICE_SLEEP):
return False
'''
aType为DEVICE_WAKE时表示设备解锁启动
return True or False
'''
if not deviceAPI.DeviceCMD(aType=DEVICE_WAKE):
return False
'''
aType为DEVICE_WMSIZE时表示获取设备的分辨率
return height, width
'''
height, width = deviceAPI.DeviceCMD(aType=DEVICE_WMSIZE)
if height == -1 or width == -1:
return False
# '''
# aType为DEVICE_BINDRO时表示设置设备锁定朝向
# return height, width
# '''
# height, width = deviceAPI.DeviceCMD(aType=DEVICE_BINDRO)
# if height == -1 or width == -1:
# return False
'''
aType为DEVICE_SCREENSHOT时表示快照,截屏
targetPath表示在PC端存放的路径
return True or False
'''
if not deviceAPI.DeviceCMD(aType=DEVICE_SCREENSHOT, targetPath='./test.png'):
return False
'''
aType为DEVICE_SCREENORI时表示返回设备当前时横屏还是竖屏
return UI_SCREEN_ORI_PORTRAIT or UI_SCREEN_ORI_LANDSCAPE
'''
res = deviceAPI.DeviceCMD(aType=DEVICE_SCREENORI)
if res == UI_SCREEN_ORI_PORTRAIT:
print('竖屏')
elif res == UI_SCREEN_ORI_LANDSCAPE:
print('横屏')
else:
return False
'''
describe:获取最大触点数
return int
'''
maxContact = deviceAPI.DeviceCMD(aType=DEVICE_MAXCONTACT)
if maxContact < 0:
return False
'''
describe:用ADB命令执行点击操作
return int
'''
if not deviceAPI.DeviceCMD(aType=DEVICE_CLICK, px=300, py=300):
return False
'''
describe:用ADB命令执行滑动操作(需要先执行点击后,才能看到滑动效果,将会瞬间滑动到指定的坐标上)
return int
'''
if not deviceAPI.DeviceCMD(aType=DEVICE_SWIPE, sx=300, sy=300, ex=500, ey=500, durationMS=50):
return False
'''
describe:等待所有指令发送至手机端,在程序退出时使用
'''
deviceAPI.Finish()
'''
==========================================================================================================
==========================================================================================================
==========================================================================================================
'''
def demo1():
# deviceAPI1 = IDeviceAPI('Android', 'PlatformWeTest')
# deviceAPI2 = IDeviceAPI('Android', 'PlatformWeTest')
deviceAPI1 = IDeviceAPI('Android')
deviceAPI2 = IDeviceAPI('Android')
deviceAPI1.Initialize(deviceSerial='4da2dea3', height=200, width=1280, logDir='./log', minitouchPort=1122, minicapPort=1133)
deviceAPI2.Initialize(deviceSerial='9889db384258523633', height=200, width=1280, logDir='./log', minitouchPort=1144, minicapPort=1155)
# maxContact = deviceAPI.DeviceCMD(aType=DEVICE_MAXCONTACT)
# begin = time.time()
# for i in range(10):
# if not deviceAPI1.TouchCMD(aType=TOUCH_CLICK, sx=300, sy=300, durationMS=1000, wait_time=1000):
# print('click failed')
# end = time.time()
# print(end - begin)
#
# if not deviceAPI.TouchCMD(aType=TOUCH_DOWN, sx=100, sy=100, wait_time=1000):
# print('click failed')
# # if not deviceAPI.TouchCMD(aType=TOUCH_UP):
# print('up failed')
# if not deviceAPI.TouchCMD(aType=TOUCH_CLICK, sx=500, sy=500, contact=0, durantionMS=50, wait_time=1000):
# return False
# if not deviceAPI1.DeviceCMD(aType=DEVICE_SWIPE, sx=640, sy=100, ex=640, ey=300, durationMS=1000):
# print('click failed')
# time.sleep(100000)
# return None
if not deviceAPI1.TouchCMD(aType=TOUCH_DOWN, sx=640, sy=100, wait_time=1000):
print('click failed')
if not deviceAPI2.TouchCMD(aType=TOUCH_DOWN, sx=200, sy=200, wait_time=50):
print('click failed')
if not deviceAPI1.TouchCMD(aType=TOUCH_SWIPEMOVE, sx=640, sy=300, durationMS=1000, contact=0, wait_time=1000):
return False
if not deviceAPI2.TouchCMD(aType=TOUCH_SWIPEMOVE, sx=100, sy=100, durationMS=1000, contact=0, wait_time=1000):
return False
if not deviceAPI1.TouchCMD(aType=TOUCH_SWIPEMOVE, sx=100, sy=100, durationMS=1000, contact=0, wait_time=1000):
return False
if not deviceAPI2.TouchCMD(aType=TOUCH_SWIPEMOVE, sx=200, sy=200, durationMS=1000, contact=0, wait_time=1000):
return False
# print(maxContact)
# if not deviceAPI.TouchCMD(aType=TOUCH_SWIPE, sx=200, sy=200, ex=400, ey=400, wait_time=1000, durationMS=500):
# print('swipe failed')
# return False
# if not deviceAPI.TouchCMD(aType=TOUCH_DOWN, sx=300, sy=300, wait_time=1000):
# print('click failed')
# return False
# if not deviceAPI.TouchCMD(aType=TOUCH_DOWN, sx=500, sy=500, contact=1, wait_time=1000):
# print('click failed')
# return False
# for i in range(10):
# if not deviceAPI.TouchCMD(aType=TOUCH_MOVE, sx=500, sy=500, wait_time=1000):
# print('click failed')
# return False
# if not deviceAPI.TouchCMD(aType=TOUCH_MOVE, sx=400, sy=400, contact=1, wait_time=1000):
# print('click failed')
# return False
# if not deviceAPI.TouchCMD(aType=TOUCH_MOVE, sx=400, sy=400, wait_time=1000):
# print('click failed')
# return False
# if not deviceAPI.TouchCMD(aType=TOUCH_MOVE, sx=500, sy=500, contact=1, wait_time=1000):
# print('click failed')
# return False
# # time.sleep(1)
#
# if not deviceAPI.TouchCMD(aType=TOUCH_UP, contact=1, wait_time=1000):
# print('click failed')
# return False
#
# if not deviceAPI.TouchCMD(aType=TOUCH_RESET):
# print('reset failed')
# return False
time.sleep(5)
for i in range(100000):
frame1 = deviceAPI1.GetFrame()
frame2 = deviceAPI2.GetFrame()
if frame1 is not None:
cv2.imshow('test1', frame1)
cv2.waitKey(1)
if frame2 is not None:
cv2.imshow('test2', frame2)
cv2.waitKey(1)
# #time.sleep(1)
def demo():
# deviceAPI1 = IDeviceAPI('Android', 'PlatformWeTest')
deviceAPI1 = IDeviceAPI('Android')
flag, strerror = deviceAPI1.Initialize(isPortrait=False, long_edge=1280, logDir='./log', level=LOG_INFO, showRawScreen=False)
print(flag)
print(strerror)
# maxContact = deviceAPI.DeviceCMD(aType=DEVICE_MAXCONTACT)
# begin = time.time()
# for i in range(10):
# if not deviceAPI1.TouchCMD(aType=TOUCH_CLICK, sx=300, sy=300, durationMS=1000, wait_time=1000):
# print('click failed')
# end = time.time()
# print(end - begin)
#
# if not deviceAPI.TouchCMD(aType=TOUCH_DOWN, sx=100, sy=100, wait_time=1000):
# print('click failed')
# # if not deviceAPI.TouchCMD(aType=TOUCH_UP):
# # print('up failed')
# pkgName = deviceAPI1.DeviceCMD(aType=DEVICE_CURAPP)
# parameter= deviceAPI1.DeviceCMD(aType=DEVICE_PARAM, PKGName=pkgName['package'])
# print(parameter)
# exit(0)
if not deviceAPI1.TouchCMD(aType=TOUCH_CLICK, sx=1130, sy=442, contact=0, durationMS=5000, wait_time=1000):
return False
if not deviceAPI1.DeviceCMD(aType=DEVICE_SWIPE, sx=640, sy=100, ex=640, ey=300, durationMS=1000):
print('click failed')
# time.sleep(100000)
# return None
# if not deviceAPI1.TouchCMD(aType=TOUCH_DOWN, sx=100, sy=100, wait_time=5000):
# print('click failed')
# if not deviceAPI1.TouchCMD(aType=TOUCH_UP):
# print('up failed')
# begin = time.time()
if not deviceAPI1.TouchCMD(aType=TOUCH_CLICK, sx=1270, sy=300, durationMS=5000, wait_time=1000):
print('click failed')
# end = time.time()
# print("action:{}".format(end - begin))
begin = time.time()
if not deviceAPI1.TouchCMD(aType=TOUCH_SWIPEMOVE, sx=100, sy=300, durationMS=1000, contact=0, wait_time=1000):
return False
if not deviceAPI1.TouchCMD(aType=TOUCH_SWIPEMOVE, sx=100, sy=100, durationMS=1000, contact=0, wait_time=1000):
return False
end = time.time()
# print("action:{}".format(end - begin))
# print(maxContact)
# if not deviceAPI.TouchCMD(aType=TOUCH_SWIPE, sx=200, sy=200, ex=400, ey=400, wait_time=1000, durationMS=500):
# print('swipe failed')
# return False
# if not deviceAPI.TouchCMD(aType=TOUCH_DOWN, sx=300, sy=300, wait_time=1000):
# print('click failed')
# return False
# if not deviceAPI.TouchCMD(aType=TOUCH_DOWN, sx=500, sy=500, contact=1, wait_time=1000):
# print('click failed')
# return False
# for i in range(10):
# if not deviceAPI.TouchCMD(aType=TOUCH_MOVE, sx=500, sy=500, wait_time=1000):
# print('click failed')
# return False
# if not deviceAPI.TouchCMD(aType=TOUCH_MOVE, sx=400, sy=400, contact=1, wait_time=1000):
# print('click failed')
# return False
# if not deviceAPI.TouchCMD(aType=TOUCH_MOVE, sx=400, sy=400, wait_time=1000):
# print('click failed')
# return False
# if not deviceAPI.TouchCMD(aType=TOUCH_MOVE, sx=500, sy=500, contact=1, wait_time=1000):
# print('click failed')
# return False
# # time.sleep(1)
#
# if not deviceAPI.TouchCMD(aType=TOUCH_UP, contact=1, wait_time=1000):
# print('click failed')
# return False
#
# if not deviceAPI.TouchCMD(aType=TOUCH_RESET):
# print('reset failed')
# return False
# print('action down')
time.sleep(5)
count = 0
abegin = time.time()
while True:
begin = time.time()
frame1, error = deviceAPI1.GetFrame()
end = time.time()
# print('getframe: {0}', format(count))
# print('getframe: {0}', format(end - begin))
if frame1 is not None:
# cv2.imwrite('test.png', frame1)
count += 1
# if count == 500:
# break
cv2.imshow('test1', frame1)
cv2.waitKey(1)
aend = time.time()
# print((aend - abegin)/501)
if __name__ == '__main__':
# sample()
demo()
|
"""
Tests for the Keys endpoint.
"""
import pytest
PROJECT_ID = "454087345e09f3e7e7eae3.57891254"
KEY_ID = 34089721
@pytest.mark.vcr
def test_keys(client):
"""Tests fetching of all keys
"""
keys = client.keys(PROJECT_ID, {
"page": 2,
"limit": 3,
"disable_references": "1",
"filter_archived": "exclude"
})
assert keys.project_id == PROJECT_ID
assert keys.items[0].key_id == KEY_ID
assert keys.current_page == 2
assert keys.total_count == 13
assert keys.page_count == 5
assert keys.limit == 3
assert not keys.is_last_page()
assert not keys.is_first_page()
assert keys.has_next_page()
assert keys.has_prev_page()
@pytest.mark.vcr
def test_create_keys(client):
"""Tests creation of translation keys
"""
keys = client.create_keys(PROJECT_ID, [
{
"key_name": "python_1",
"platforms": ["ios", "android"],
"description": "Created by Python"
},
{
"key_name": "python_2",
"platforms": ["web"],
"translations": [
{
"language_iso": "en",
"translation": "Hi from Python"
}
]
}
])
assert keys.project_id == PROJECT_ID
assert len(keys.items) == 2
key_1 = keys.items[0]
key_2 = keys.items[1]
assert key_1.key_name['ios'] == "python_1"
assert "ios" in key_1.platforms
assert "web" not in key_1.platforms
assert key_1.description == "Created by Python"
assert key_2.key_name['web'] == "python_2"
assert "ios" not in key_2.platforms
assert "web" in key_2.platforms
assert key_2.translations[2]["language_iso"] == "en"
assert key_2.translations[2]["translation"] == "Hi from Python"
@pytest.mark.vcr
def test_create_key(client):
"""Tests creation of a single key
"""
keys = client.create_keys(PROJECT_ID, {
"key_name": "python_3",
"platforms": ["ios", "android"],
"translations": [
{
"language_iso": "ru_RU",
"translation": "Привет от Python"
}
]
})
assert keys.project_id == PROJECT_ID
key = keys.items[0]
assert key.key_name['ios'] == "python_3"
assert "ios" in key.platforms
assert "web" not in key.platforms
assert key.translations[0]["language_iso"] == "ru_RU"
assert key.translations[0]["translation"] == "Привет от Python"
@pytest.mark.vcr
def test_key(client):
"""Tests fetching of a key
"""
key = client.key(PROJECT_ID, KEY_ID, {"disable_references": "1"})
assert key.project_id == PROJECT_ID
assert key.branch == 'master'
assert key.key_id == KEY_ID
assert key.created_at == "2019-12-27 12:53:16 (Etc/UTC)"
assert key.created_at_timestamp == 1577451196
assert key.key_name['ios'] == "manual_setup"
assert key.filenames['android'] == ''
assert key.description == 'Updated by Python'
assert "web" in key.platforms
assert "python" in key.tags
assert key.comments == []
assert key.screenshots[0]['screenshot_id'] == 343286
assert key.translations[0]['translation_id'] == 220681440
assert not key.is_plural
assert key.plural_name == ''
assert not key.is_hidden
assert not key.is_archived
assert key.context == ''
assert key.base_words == 2
assert key.char_limit == 0
assert key.custom_attributes == ''
assert key.modified_at == "2020-06-20 13:02:37 (Etc/UTC)"
assert key.modified_at_timestamp == 1592658157
assert key.translations_modified_at == "2020-06-20 12:42:24 (Etc/UTC)"
assert key.translations_modified_at_timestamp == 1592656944
@pytest.mark.vcr
def test_update_key(client):
"""Tests updating of a key
"""
key = client.update_key(PROJECT_ID, KEY_ID, {
"description": "Updated by Python",
"tags": ["python"]
})
assert key.project_id == PROJECT_ID
assert key.key_id == KEY_ID
assert key.description == "Updated by Python"
assert "python" in key.tags
assert key.modified_at_timestamp == 1592658157
@pytest.mark.vcr
def test_update_keys(client):
"""Tests bulk key update
"""
keys = client.update_keys(PROJECT_ID, [
{
"key_id": 48855757,
"description": "Bulk updated",
"tags": ["bulk-python"]
},
{
"key_id": 48855758,
"translations": [
{
"language_iso": "ru_RU",
"translation": "Обновлённый перевод Python"
}
]
}
])
assert keys.project_id == PROJECT_ID
assert len(keys.items) == 2
key_1 = keys.items[0]
key_2 = keys.items[1]
assert key_1.key_id == 48855757
assert key_1.description == "Bulk updated"
assert "bulk-python" in key_1.tags
assert key_2.key_id == 48855758
assert key_2.translations[0]["language_iso"] == "ru_RU"
assert key_2.translations[0]["translation"] == "Обновлённый перевод Python"
@pytest.mark.vcr
def test_delete_key(client):
"""Tests a single key deletion
"""
resp = client.delete_key(PROJECT_ID, 48855760)
assert resp['project_id'] == PROJECT_ID
assert resp['key_removed']
@pytest.mark.vcr
def test_delete_keys(client):
"""Tests deletion of multiple keys in bulk
"""
resp = client.delete_keys(PROJECT_ID, [48855757, 48855758])
assert resp['project_id'] == PROJECT_ID
assert resp['keys_removed']
|
import asyncio
import pytest
import cryptocom.exchange as cro
@pytest.mark.asyncio
async def test_account_get_balance(account: cro.Account):
balances = await account.get_balance()
assert balances[cro.Coin.CRO].available > 1
assert balances[cro.Coin.USDT].available > 1
for coin in cro.Coin:
assert coin.value in balances
@pytest.mark.asyncio
async def test_no_dublicated_mass_limit_orders(
exchange: cro.Exchange, account: cro.Account):
buy_price = round(await exchange.get_price(cro.Pair.CRO_USDT) / 2, 4)
orders_count = 185
order_ids = await asyncio.gather(*[
account.buy_limit(
cro.Pair.CRO_USDT, 0.001,
round(buy_price / 1000 + i / 10000.0, 4)
)
for i in range(orders_count)
])
real_orders = await asyncio.gather(*[
account.get_order(id_)
for id_ in order_ids
])
for order in real_orders:
assert order.is_active, order
open_orders = await account.get_open_orders(cro.Pair.CRO_USDT)
open_order_ids = sorted(o.id for o in open_orders if o.is_active)
assert len(real_orders) == len(open_order_ids) == orders_count
assert open_order_ids == sorted(order_ids)
@pytest.mark.asyncio
async def test_account_limit_orders(
account: cro.Account, exchange: cro.Exchange):
buy_price = round(await exchange.get_price(cro.Pair.CRO_USDT) / 10, 4)
order_ids = await asyncio.gather(*[
account.buy_limit(cro.Pair.CRO_USDT, 0.001, buy_price)
for i in range(25)
])
order_ids += await asyncio.gather(*[
account.sell_limit(cro.Pair.CRO_USDT, 0.01, round(buy_price * 2, 4))
for i in range(25)
])
all_orders = await account.get_orders_history(
cro.Pair.CRO_USDT, page_size=50)
await account.cancel_order(
order_ids[0], cro.Pair.CRO_USDT, check_status=True)
order = await account.get_order(order_ids[0])
assert order.is_canceled
for order_id in order_ids[1:]:
await account.cancel_order(order_id, cro.Pair.CRO_USDT)
open_orders = [
order
for order in await account.get_open_orders()
if order.id in order_ids
]
assert not open_orders
all_orders = await account.get_orders_history(
cro.Pair.CRO_USDT, page_size=50)
ids = [order.id for order in all_orders]
assert set(ids) & set(order_ids)
async def make_trades(account, exchange, order_ids):
price = await exchange.get_price(cro.Pair.CRO_USDT)
order_id = await account.buy_market(cro.Pair.CRO_USDT, round(price, 4))
order = await account.get_order(order_id)
assert order.is_filled
assert order_id == order.id
order_ids['buy'].append(order.id)
order_id = await account.sell_market(cro.Pair.CRO_USDT, 1)
order = await account.get_order(order_id)
assert order.is_filled
assert order_id == order.id
order_ids['sell'].append(order.id)
@pytest.mark.asyncio
async def test_account_market_orders(
account: cro.Account, exchange: cro.Exchange):
order_ids = {'buy': [], 'sell': []}
await asyncio.gather(*[
make_trades(account, exchange, order_ids) for _ in range(10)
])
trades = await account.get_trades(cro.Pair.CRO_USDT, page_size=20)
for trade in trades:
if trade.is_buy:
assert trade.order_id in order_ids['buy']
assert trade.order_id not in order_ids['sell']
elif trade.is_sell:
assert trade.order_id in order_ids['sell']
assert trade.order_id not in order_ids['buy']
|
l = float(input('Entre a largura da parede em metros.: '))
a = float(input('Entre a altura da parede em metros..: '))
area = l * a
tinta = (area / 2)
print()
print('Área da parede......: {:.3f} m²'.format(area))
print('Quantidade de tinta.: {} litros'.format(tinta))
input()
|
""" Forms for searching """
from flask_wtf import FlaskForm
from wtforms import SubmitField, TextField, validators
from wtforms.fields import StringField
from wtforms.widgets import TextArea
class SearchForm(FlaskForm):
""" Form to search an ingredient """
name = TextField("Name", [validators.Length(min=1, max=200), validators.DataRequired()])
submit = SubmitField('Submit')
|
# Generated by Django 3.1.7 on 2021-04-09 07:13
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('polls', '0002_question_ip_address'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ip_address', models.CharField(default='0.0.0.0', max_length=20)),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='polls.question')),
],
),
]
|
import os
import discord
import asyncio
print("Discord Version: {} ".format(discord.__version__))
class App(discord.Client):
async def on_ready(self):
print("Logged in as {} with ID {}".format(self.user.name, self.user.id))
async def on_message(self, message):
print("{}: {}: {}: {}".format(message.channel, message.author, message.author.name, message.content ))
if message.author == self.user:
return
elif message.content.startswith('!meme'):
await channel.send(file=discord.File('meme.jpg'))
elif message.content.startswith('!alert'):
await message.channel.send('ALERT')
elif message.content.startswith('!sleepy'):
await message.channel.send('Is sleepy...')
elif message.content.startswith('!logout'):
await self.close()
# Client Token
token = os.environ['token']
# Assign Lowercase App Name
app = App()
# Run App with Token
app.run(token)
|
"""
Import all gentoo based modules.
All manually entered modules can be placed in the following import section.
Portage_Gen based projects will be generated automatically as soon as we
can find an index generated by portage info.
"""
import logging
import os
from benchbuild.settings import CFG
from . import (autoportage, bzip2, crafty, eix, gentoo, gzip, info, lammps,
postgresql, sevenz, x264, xz)
LOG = logging.getLogger(__name__)
def __initialize_dynamic_projects__(autotest_path):
from benchbuild.projects.gentoo.portage_gen import PortageFactory
LOG.debug("Loading AutoPortage projects from %s", autotest_path)
if os.path.exists(autotest_path):
with open(autotest_path, 'r') as ebuilds:
for line in ebuilds:
ebuild_data = line.strip('\n')
ebuild_data = ebuild_data.split('/')
domain = ebuild_data[0]
name = ebuild_data[1]
PortageFactory("Auto{0}{1}".format(domain, name),
domain + "_" + name, domain)
__initialize_dynamic_projects__(str(CFG['gentoo']['autotest_loc']))
|
#!/usr/bin/env python
# stack_landsat.py
# Lawrence Dudley 4/2/2017
''' This script takes a landsat MTL.txt file as input, along with a list
of desired bands to stack (optional) and creates a geotiff image containing
these image bands.
'''
from osgeo import gdal, gdal_array
import numpy as np
import glob
import argparse
import os
def get_bands(input_mtl, bands_for_stack):
''' Returns file path of all bands '''
# directory of input_mtl
mtl_basename = os.path.basename(input_mtl)
img_dir = os.path.abspath(input_mtl).replace(mtl_basename, "")
band_ids = ["B" + band_no + ".TIF" for band_no in bands_for_stack]
bands = []
for band in glob.glob(img_dir + "*.TIF"):
if any(band_id in band for band_id in band_ids):
bands.append(band)
return bands
def gdal2array(image_name):
''' Opens, reads into array and closes a gdal image '''
gdal_ds = gdal.Open(image_name)
img_as_array = gdal_ds.ReadAsArray()
gdal_ds = None
return img_as_array
def stack_bands(bands, output_name):
''' Stack bands into 3d numpy array, and save using first band as prototype
'''
stack = np.array([gdal2array(band) for band in bands])
prototype = gdal.Open(bands[0])
gdal_array.SaveArray(stack, output_name, 'gtiff', prototype)
prototype = None
def run():
''' Parse arguments, get the list of images to stack and call stack_bands()
'''
parser = argparse.ArgumentParser()
parser.add_argument("-i",
"--input",
type=str,
help="Input MTL.txt file for landsat image")
parser.add_argument("-b",
"--bands",
type=str,
default=None,
help="Specify which bands to stack. Default is B1 - 8")
args = parser.parse_args()
input_mtl = args.input
bands = args.bands
if not bands:
bands = "1 2 3 4 5 6 7 8"
bands = bands.split()
bands_for_stack = get_bands(input_mtl, bands)
output_name = input_mtl.replace("MTL.txt", "stack.tif")
stack_bands(bands_for_stack, output_name)
if __name__ == "__main__":
run()
|
'''
this code is for transforming real data obtained from unicom to experimental data
'''
import pandas as pd
import numpy as np
file = 'huawei3G'
df = pd.read_csv('..\data\\' + file + '.csv')
mu, sigma = 0, 1
min_num_capacity, max_num_capacity = 5, 10
x_provider = np.round(df['Lon'].values, 2)
y_provider = np.round(df['Lat'].values, 2)
num_capacity = np.random.randint(min_num_capacity, max_num_capacity)
num_provider = x_provider.shape[0]
num_customer = num_provider * np.random.randint(2, 4)
x_customer = np.round(np.random.normal(mu, sigma, num_customer), 2)
y_customer = np.round(np.random.normal(mu, sigma, num_customer), 2)
min_x, max_x = min(x_provider), max(x_provider)
x_provider = (x_provider - min_x) / (max_x - min_x) * 100
min_y, max_y = min(y_provider), max(y_provider)
y_provider = (y_provider - min_y) / (max_y - min_y) * 100
min_x, max_x = min(x_customer), max(x_customer)
x_customer = (x_customer - min_x) / (max_x - min_x) * 100
min_y, max_y = min(y_customer), max(y_customer)
y_customer = (y_customer - min_y) / (max_y - min_y) * 100
min_demand, max_demand = 1, 10
min_capacity, max_capacity = 0, 50
min_cost, max_cost = 100, 1000
# ratio of old provider to all provider
old_ratio = 0.3
with open('..\data\\' + file + '.txt', 'w') as f:
f.write(str(num_provider) + '\n')
# generate the demand of each customer
demand_customer = np.random.randint(min_demand, max_demand, num_customer)
# generate the capacity and corresponding cost of provider
for i in range(num_provider):
flag = np.random.random()
if flag < old_ratio: # a provider is old if flag is less than old_ratio
capacity = np.random.randint(min_capacity, max_capacity, num_capacity - 1)
capacity = list(capacity) + [0]
cost = np.random.randint(min_cost, max_cost, num_capacity - 2)
cost = list(cost) + [0, 0]
else:
capacity = np.random.randint(min_capacity, max_capacity, num_capacity - 1)
capacity = list(capacity) + [0]
cost = np.random.randint(min_cost, max_cost, num_capacity - 1)
cost = list(cost) + [0]
cost = sorted(cost)
capacity = sorted(capacity)
f.write(str(x_provider[i]) + ' ' + str(y_provider[i]) + ' ' + str(num_capacity) + ' ')
for j in range(len(capacity)):
f.write(str(capacity[j]) + ' ')
for j in range(len(cost) - 1):
f.write(str(cost[j]) + ' ')
f.write(str(cost[len(cost) - 1]) + '\n')
f.write(str(num_customer) + '\n')
for i in range(num_customer):
f.write(str(x_customer[i]) + ' ' + str(y_customer[i]) + ' ' + str(demand_customer[i]) + '\n')
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import gc
import glob
import pathlib
import warnings
from pathlib import Path
from random import shuffle, random
import torch
import torch.nn.functional as F
import yaml
from PIL import Image, ImageOps
from pynvml import *
from tqdm import tqdm
from colors import print_cyan, print_blue, print_green
from postfx import apply_to_pil
from prompt_translate import translate
from rudalle import get_rudalle_model, get_tokenizer, get_vae, get_realesrgan
from rudalle.image_prompts import ImagePrompts
from rudalle.pipelines import generate_images, super_resolution
from rudalle.utils import seed_everything
with open(f'config.yaml', 'r') as f:
cfg = yaml.load(f, Loader=yaml.FullLoader)
# ############ ARGS HERE ################
DEFAULT_CAPTION = "\u0420\u0438\u0447\u0430\u0440\u0434 \u0414. \u0414\u0436\u0435\u0439\u043C\u0441 Aphex Twin"
CAPTION = cfg['gen_prompt'] if cfg['gen_prompt'] else DEFAULT_CAPTION
TRANSLATE = cfg['translate']
MODEL_NAME = cfg['gen_model'] if cfg['gen_model'] else 'Malevich'
CHECKPOINT_PATH = f'checkpoints/{MODEL_NAME}.pt' if cfg['gen_model'] else None
FILE_NAME = cfg['file_name'] if cfg['file_name'] else CAPTION
OUTPUT_PATH = f'content/output/{MODEL_NAME}'
if cfg['output_dir']:
OUTPUT_PATH = OUTPUT_PATH + '/' + cfg['output_dir']
PROMPT_PATH = f"content/Data/{MODEL_NAME}/Prompt" if cfg['use_image_prompts'] else None
TEMPERATURE = cfg['temperature']
IMAGE_COUNT = cfg['image_count']
SUPER_RESOLUTION = cfg['super_res']
SR = cfg['upscale']
INCREMENT_FROM = 0 # increment file name number from here
TOP_K = cfg['top_k']
TOP_P = cfg['top_p']
POST_FX = cfg['post_fx']
# ######################################
warnings.filterwarnings("ignore", category=UserWarning)
os.environ['PYTORCH_CUDA_ALLOC_CONF'] = f"max_split_size_mb:2048"
dev = torch.device('cuda')
torch.nn.functional.conv2d(
torch.zeros(32, 32, 32, 32, device=dev),
torch.zeros(32, 32, 32, 32, device=dev)
)
gc.collect()
torch.cuda.empty_cache()
model = get_rudalle_model('Malevich', pretrained=True, fp16=True, device='cuda')
if CHECKPOINT_PATH is not None and Path.exists(Path(CHECKPOINT_PATH)):
model.load_state_dict(torch.load(CHECKPOINT_PATH))
print_blue(f'Loaded from {CHECKPOINT_PATH}')
vae = get_vae().to('cuda')
tokenizer = get_tokenizer()
realesrgan = get_realesrgan(SR, device='cuda') if SUPER_RESOLUTION else None
output_path = Path(OUTPUT_PATH)
output_path.mkdir(parents=True, exist_ok=True)
input_text = CAPTION
if TRANSLATE and CAPTION is not DEFAULT_CAPTION:
input_text = translate(CAPTION)
else:
print_cyan(f'prompt: {CAPTION}')
borders = {'up': 4, 'left': 0, 'right': 0, 'down': 0}
temp_index = 0
topp_index = 0
topk_index = 0
prompt_index = 0
if cfg['seed']:
seed_everything(cfg['gen_seed'])
print_green(f"\nseed {cfg['gen_seed']}\n")
paths = []
prompt_imgs = []
if PROMPT_PATH is not None:
types = ('*.png', '*.jpg', "*.jpeg", "*.bmp")
for ext in types:
paths.extend(glob.glob(os.path.join(PROMPT_PATH, ext)))
for path in paths:
prompt_imgs.append(Image.open(path).resize((256, 256)))
if cfg['shuffle_start']:
shuffle(TEMPERATURE)
shuffle(TOP_P)
shuffle(TOP_K)
if len(prompt_imgs) > 1:
shuffle(prompt_imgs)
scores = {}
for i in tqdm(range(IMAGE_COUNT), colour='green'):
temp = TEMPERATURE[temp_index]
top_p = TOP_P[topp_index]
top_k = TOP_K[topk_index]
if len(prompt_imgs) > 0:
if prompt_index > len(prompt_imgs) - 1:
prompt_index = 0
if cfg['shuffle_loop'] and len(prompt_imgs) > 1:
shuffle(prompt_imgs)
prompt_img = prompt_imgs[prompt_index]
if cfg['prompt_flip'] > 0.0 and random() <= cfg['prompt_flip']:
prompt_img = ImageOps.flip(prompt_img)
prompt_index = prompt_index + 1
image_prompt = ImagePrompts(prompt_img, borders, vae, 'cuda', crop_first=True)
pil_images, score = generate_images(image_prompts=image_prompt, text=input_text, tokenizer=tokenizer,
dalle=model, vae=vae, images_num=1, top_k=top_k, top_p=top_p,
temperature=temp)
else:
pil_images, score = generate_images(text=input_text, tokenizer=tokenizer, dalle=model,
vae=vae, images_num=1, top_k=top_k, top_p=top_p, temperature=temp)
temp_index = temp_index + 1
topp_index = topp_index + 1
topk_index = topk_index + 1
if temp_index > len(TEMPERATURE) - 1:
temp_index = 0
if cfg['shuffle_loop']:
shuffle(TEMPERATURE)
if topp_index > len(TOP_P) - 1:
topp_index = 0
if cfg['shuffle_loop']:
shuffle(TOP_P)
if topk_index > len(TOP_K) - 1:
topk_index = 0
if cfg['shuffle_loop']:
shuffle(TOP_K)
if SUPER_RESOLUTION:
pil_images = super_resolution(pil_images, realesrgan)
caption = CAPTION if CAPTION is not None else input_text
save_index = INCREMENT_FROM + i
save_prefix = f'{FILE_NAME}_s{int(score[0])}_t{temp}_p{top_p}_k{top_k}'
save_name = f'{save_prefix}_{save_index:03d}'
save_path = pathlib.Path(os.path.join(output_path, save_name + '.png'))
while pathlib.Path.exists(save_path):
save_index = save_index + 1
save_name = f'{save_prefix}_{save_index:03d}'
save_path = pathlib.Path(os.path.join(output_path, save_name + '.png'))
for n in range(len(pil_images)):
scores[save_name] = score[n]
if not POST_FX or cfg['save_both']:
pil_images[n].save(save_path)
print_cyan(f'\n{save_path}, score: {int(score[n])}')
if POST_FX:
apply_to_pil(pil_images[n], output_path, save_name + '_fx',
noise=cfg['noise'],
noise_strength=cfg['noise_strength'],
clip_limit=cfg['clip_limit'],
sigma_a=cfg['sigma_a'],
sigma_b=cfg['sigma_b']
)
gc.collect()
torch.cuda.empty_cache()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
from setuptools import Command, setup
import batch_requests
class PyTest(Command):
'''
A command handler for setup.py test.
'''
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import subprocess
import sys
errno = subprocess.call('py.test --cov-report html --cov batch_requests tests/', shell=True)
raise SystemExit(errno)
name = 'django-batch-requests'
version = batch_requests.__version__
package = 'batch_requests'
description = 'Create batch APIs for Django.'
long_description = (
'Django batch requests allow developers to combine multiple http requests' +
' into a single batch request. This is essentially useful to avoid making multiple' +
' http requests to save on round trip network latency.'
)
url = 'https://github.com/tanwanirahul/django-batch-requests'
author = 'Rahul Tanwani'
author_email = 'tanwanirahul@gmail.com'
license = 'MIT'
install_requires = []
def read(*paths):
'''
Build a file path from paths and return the contents.
'''
with open(os.path.join(*paths), 'r') as f:
return f.read()
def get_packages(package):
'''
Return root package and all sub-packages.
'''
return [dirpath
for dirpath, dirnames, filenames in os.walk(package)
if os.path.exists(os.path.join(dirpath, '__init__.py'))]
def get_package_data(package):
'''
Return all files under the root package, that are not in a
package themselves.
'''
walk = [(dirpath.replace(package + os.sep, '', 1), filenames)
for dirpath, dirnames, filenames in os.walk(package)
if not os.path.exists(os.path.join(dirpath, '__init__.py'))]
filepaths = []
for base, filenames in walk:
filepaths.extend([os.path.join(base, filename)
for filename in filenames])
return {package: filepaths}
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
os.system('python setup.py bdist_wheel upload')
print('You probably want to also tag the version now:')
print(" git tag -a {0} -m 'version {0}'".format(version))
print(' git push --tags')
sys.exit()
setup(
name=name,
version=version,
url=url,
license=license,
description=description,
long_description=long_description,
author=author,
author_email=author_email,
packages=get_packages(package),
package_data=get_package_data(package),
install_requires=install_requires,
cmdclass = {'test': PyTest},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP'
]
)
|
import unittest
from bseg.morphology import Morphology
from bseg.bunsetsu import Bunsetsu
class TestBunsetsu(unittest.TestCase):
def setUp(self):
morp1 = Morphology("天気 名詞,一般,*,*,*,*,天気,テンキ,テンキ")
morp2 = Morphology("が 助詞,格助詞,一般,*,*,*,が,ガ,ガ")
self.bnst1 = Bunsetsu([morp1, morp2])
morp3 = Morphology("良い 形容詞,自立,*,*,形容詞・アウオ段,\
基本形,良い,ヨイ,ヨイ")
morp4 = Morphology("。 記号,句点,*,*,*,*,。,。,。")
self.bnst2 = Bunsetsu([morp3, morp4])
def test___init__(self):
self.assertEqual(self.bnst1.surface, "天気が")
def test_ispredicate(self):
self.assertFalse(self.bnst1.ispredicate())
self.assertTrue(self.bnst2.ispredicate())
if __name__ == "__main__":
unittest.main()
|
"""
Code taken from https://raw.githubusercontent.com/hma02/thesne/master/model/tsne.py
And then modified.
"""
import os, sys
import theano.tensor as T
import theano
import numpy as np
from utils import dist2hy
import theano.sandbox.rng_mrg as RNG_MRG
import theano.tensor.shared_randomstreams as RNG_TRG
from theano.tensor.shared_randomstreams import RandomStreams
RNG = np.random.RandomState(0)
MRG = RNG_MRG.MRG_RandomStreams(RNG.randint(2 ** 30))
TRG = RNG_TRG.RandomStreams(seed=1234)
epsilon = 1e-6
floath = np.float32
def sqeuclidean_var(X):
N = X.shape[0]
ss = (X ** 2).sum(axis=1)
return ss.reshape((N, 1)) + ss.reshape((1, N)) - 2*X.dot(X.T)
def discrete_sample(preds, num_sam, temperature=1.0):
# function to sample an index from a probability array
probas = TRG.choice(a=np.arange(3), size=[num_sam,], p=preds)
return np.argmax(probas, axis=1)
def euclidean2_np(X):
N = X.shape[0]
ss = np.sum(X**2, axis=1)
dist = np.reshape(ss, [N, 1]) + np.reshape(ss, [1, N]) - 2*np.dot(X, X.T)
dist = dist * np.asarray(dist>0,'float32')
return dist
def p_Xp_given_X_np(X, sigma, metric, approxF=0):
N = X.shape[0]
if metric == 'euclidean':
sqdistance = euclidean2_np(X)
elif metric == 'precomputed':
sqdistance = X**2
else:
raise Exception('Invalid metric')
euc_dist = np.exp(-sqdistance / (np.reshape(2*(sigma**2), [N, 1])))
np.fill_diagonal(euc_dist, 0.0 )
if approxF > 0:
sorted_euc_dist = euc_dist[:,:]
np.sort(sorted_euc_dist, axis=1)
row_sum = np.reshape(np.sum(sorted_euc_dist[:,1:approxF+1], axis=1), [N, 1])
else:
row_sum = np.reshape(np.sum(euc_dist, axis=1), [N, 1])
return euc_dist/row_sum # Possibly dangerous
def p_Xp_given_X_var(X, sigma, metric):
N = X.shape[0]
if metric == 'euclidean':
sqdistance = sqeuclidean_var(X)
elif metric == 'precomputed':
sqdistance = X**2
else:
raise Exception('Invalid metric')
esqdistance = T.exp(-sqdistance / ((2 * (sigma**2)).reshape((N, 1))))
esqdistance_zd = T.fill_diagonal(esqdistance, 0)
row_sum = T.sum(esqdistance_zd, axis=1).reshape((N, 1))
return esqdistance_zd/row_sum
def p_Xp_X_var(p_Xp_given_X):
return (p_Xp_given_X + p_Xp_given_X.T) / 2.0
def p_Yp_Y_var(Y):
N = Y.shape[0]
sqdistance = sqeuclidean_var(Y)
one_over = T.fill_diagonal(1/(sqdistance + 1), 0)
p_Yp_given_Y = one_over/one_over.sum(axis=1).reshape((N, 1))
return p_Yp_given_Y
def p_Yp_Y_var_np(Y):
N = Y.shape[0]
sqdistance = euclidean2_np(Y)
one_over = 1./(sqdistance + 1)
p_Yp_given_Y = one_over/one_over.sum(axis=1).reshape((N, 1))
return p_Yp_given_Y
def kl_cost_var(X, Y, sigma, metric):
p_Xp_given_X = p_Xp_given_X_var(X, sigma, metric)
PX = p_Xp_X_var(p_Xp_given_X)
PY = p_Yp_Y_var(Y)
PXc = T.maximum(PX, epsilon)
PYc = T.maximum(PY, epsilon)
return T.mean(T.sum(PX * T.log(PXc / PYc),-1))
def reverse_kl_cost_var(X, Y, sigma, metric):
p_Xp_given_X = p_Xp_given_X_var(X, sigma, metric)
PX = p_Xp_X_var(p_Xp_given_X)
PY = p_Yp_Y_var(Y)
PXc = T.maximum(PX, epsilon)
PYc = T.maximum(PY, epsilon)
return -T.mean(T.sum(PY * T.log(PXc / PYc),-1))
def js_cost_var(X, Y, sigma, metric):
return kl_cost_var(X, Y, sigma, metric) * 0.5 + \
reverse_kl_cost_var(X, Y, sigma, metric) * 0.5
def chi_square_cost_var(X, Y, sigma, metric):
p_Xp_given_X = p_Xp_given_X_var(X, sigma, metric)
PX = p_Xp_X_var(p_Xp_given_X)
PY = p_Yp_Y_var(Y)
PXc = T.maximum(PX, epsilon)
PYc = T.maximum(PY, epsilon)
return T.mean(T.sum(PY * (PXc / PYc - 1.)**2, -1))
def hellinger_cost_var(X, Y, sigma, metric):
p_Xp_given_X = p_Xp_given_X_var(X, sigma, metric)
PX = p_Xp_X_var(p_Xp_given_X)
PY = p_Yp_Y_var(Y)
PXc = T.maximum(PX, epsilon)
PYc = T.maximum(PY, epsilon)
return T.mean(T.sum(PY * (T.sqrt(PXc / PYc) - 1.)**2,-1))
def find_sigma(X_shared, sigma_shared, N, perplexity, sigma_iters,
metric, verbose=0):
"""Binary search on sigma for a given perplexity."""
X = T.fmatrix('X')
sigma = T.fvector('sigma')
target = np.log(perplexity)
P = T.maximum(p_Xp_given_X_var(X, sigma, metric), epsilon)
entropy = -T.sum(P*T.log(P), axis=1)
# Setting update for binary search interval
sigmin_shared = theano.shared(np.full(N, np.sqrt(epsilon), dtype=floath))
sigmax_shared = theano.shared(np.full(N, np.inf, dtype=floath))
sigmin = T.fvector('sigmin')
sigmax = T.fvector('sigmax')
upmin = T.switch(T.lt(entropy, target), sigma, sigmin)
upmax = T.switch(T.gt(entropy, target), sigma, sigmax)
givens = {X: X_shared, sigma: sigma_shared, sigmin: sigmin_shared,
sigmax: sigmax_shared}
updates = [(sigmin_shared, upmin), (sigmax_shared, upmax)]
update_intervals = theano.function([], entropy, givens=givens,
updates=updates)
# Setting update for sigma according to search interval
upsigma = T.switch(T.isinf(sigmax), sigma*2, (sigmin + sigmax)/2.)
givens = {sigma: sigma_shared, sigmin: sigmin_shared,
sigmax: sigmax_shared}
updates = [(sigma_shared, upsigma)]
update_sigma = theano.function([], sigma, givens=givens, updates=updates)
for i in range(sigma_iters):
e = update_intervals()
update_sigma()
if verbose:
print('Iteration: {0}.'.format(i+1))
print('Perplexities in [{0:.4f}, {1:.4f}].'.format(np.exp(e.min()),
np.exp(e.max())))
if np.any(np.isnan(np.exp(e))):
raise Exception('Invalid sigmas. The perplexity is probably too low.')
def find_sigma_np(X, sigma, N, perplexity, sigma_iters, metric, verbose=1, approxF=0):
"""Binary search on sigma for a given perplexity."""
target = np.log(perplexity)
# Setting update for binary search interval
sigmin = np.full(N, np.sqrt(epsilon), dtype='float32')
sigmax = np.full(N, np.inf, dtype='float32')
for i in range(sigma_iters):
P = np.maximum(p_Xp_given_X_np(X, sigma, metric, approxF), epsilon)
entropy = -np.sum(P*np.log(P), axis=1)
minind = np.argwhere(entropy < target).flatten()
maxind = np.argwhere(entropy > target).flatten()
sigmin[minind] = sigma[minind]
sigmax[maxind] = sigma[maxind]
infmask = np.argwhere(np.isinf(sigmax)).flatten()
old_sigma = sigma[infmask]
sigma = (sigmin + sigmax)/2.
sigma[infmask] = old_sigma*2
if verbose:
print('Iteration: {0}.'.format(i+1))
print('Perplexities in [{0:.4f}, {1:.4f}].'.format(np.exp(entropy.min()), np.exp(entropy.max())))
if np.any(np.isnan(np.exp(entropy))):
raise Exception('Invalid sigmas. The perplexity is probably too low.')
return sigma
if __name__ == '__main__':
asdf = discrete_sample(np.asarray([0.3,0.2,0.5]), 1000)
import pdb; pdb.set_trace()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import division, with_statement
import phonetics
import history
import culture
import speed
import beauty
import internet
import meaning
weights = (
("phonetics-spellability", phonetics.spellability, 40),
("phonetics-pronounceability", phonetics.pronounceability, 10),
("history-timelessness", history.timelessness, 20),
("history-relevancy", history.relevancy, 30),
("history-rarity", history.rarity, 10),
#("internet-googlability", internet.googlability, 8),
#("internet-availability", internet.availability, 4),
("meaning-secularity", meaning.secularity, 30),
#("beauty-palindromicity", beauty.palindromicity, 20),
#("beauty-initialization", beauty.initialization, 1),
("speed-shortness", speed.shortness, 20),
("speed-recitability", speed.recitability, 4),
("speed-nicklessness", speed.nicklessness, 15),
("speed-nickedness", speed.nickedness, 10),
("culture-chineseness", culture.chineseness, 4),
("culture-genderedness", culture.genderedness, 20),
)
total_weight = sum([w for (id, s, w) in weights])
def judge(name):
"""
Return some sort of score for automatically ranking names based on all the
features we can extract so far.
I guess we'll just add the scores * weights up for now.
"""
score = 0
for scoreID, scorer, weight in weights:
subscore = scorer(name)
score += subscore * weight
name.scores[scoreID] = subscore
name.score = score
return score
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.