_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7 values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
904f8959b5a73682caac3af723aab56e2b116450e8202d8da53376f402aed4b9 | AlexCharlton/glls | glls-render.scm | (module glls-render (c-prefix
define-pipeline
export-pipeline)
(import chicken scheme)
(use (prefix glls glls:) glls-renderable (prefix gl-utils gl:))
(import-for-syntax (prefix glls glls:) (prefix glls-compiler glls:)
glls-renderable matchable miscmacros data-structures)
(reexport (except glls define-pipeline)
(only glls-renderable
renderable-size
unique-textures?
set-renderable-vao!
set-renderable-n-elements!
set-renderable-element-type!
set-renderable-mode!
set-renderable-offset!))
(begin-for-syntax
(require-library glls-renderable)
(define c-prefix (make-parameter '||))
(define header-included? (make-parameter #f)))
(define c-prefix (make-parameter '||)) ; Needs to be defined twice so it can be manipulated upon export (for some reason)
(define-syntax renderable-setters
(ir-macro-transformer
(lambda (exp i compare)
(match exp
((_ name uniforms)
(let ((base-name (symbol-append 'set- name '-renderable-)))
`(begin
,@(let loop ((uniforms uniforms) (i 0))
(if (null? uniforms)
'()
(cons `(define (,(symbol-append base-name (caar uniforms) '!)
renderable value)
(set-renderable-uniform-value! renderable ,i value
',(caar uniforms)))
(loop (cdr uniforms) (add1 i))))))))
(exp (syntax-error 'renderable-setters "Bad arguments" exp))))))
(define-for-syntax (get-uniforms s)
(cond
((and (list? s)
(list? (car s))
(member (caar s) glls:shader-types))
(get-keyword uniform: (cdar s) (lambda () '())))
((and (list? s) (>= (length s) 2) (member #:uniform s))
(cdr (member #:uniform s)))
(else (syntax-error 'define-pipeline "Only shaders that include uniform definitions may be used with glls-render" s))))
(define-syntax define-renderable-functions
(ir-macro-transformer
(lambda (exp i compare)
(match exp
((_ name . shaders)
(let* ((name (strip-syntax name))
(uniforms (delete-duplicates
(concatenate (map get-uniforms (strip-syntax shaders)))
(lambda (a b) (eq? (car a) (car b))))))
(let-values (((render-funs render-fun-name
render-arrays-fun-name
fast-fun-begin-name
fast-fun-name fast-fun-end-name
fast-fun-arrays-name)
(if (feature? compiling:)
(render-functions (c-prefix) name uniforms)
(values #f #f #f #f #f #f #f))))
`(begin
,(if (feature? compiling:)
`(begin
,(if (not (header-included?))
(begin
(header-included? #t)
`(begin
(import foreign)
(foreign-declare ,gllsRender.h)))
#f)
(foreign-declare ,render-funs)
(define ,(symbol-append 'render- name)
(foreign-lambda void ,render-fun-name c-pointer))
(define ,(symbol-append 'render-arrays- name)
(foreign-lambda void ,render-arrays-fun-name c-pointer))
(define (,(symbol-append name '-fast-render-functions))
(values
(foreign-lambda void ,(symbol->string fast-fun-begin-name)
c-pointer)
(foreign-lambda void ,(symbol->string fast-fun-name)
c-pointer)
(foreign-lambda void ,(symbol->string fast-fun-end-name))
(foreign-lambda void ,(symbol->string fast-fun-arrays-name)
c-pointer)
(foreign-value ,(string-append
"&" (symbol->string fast-fun-begin-name))
c-pointer)
(foreign-value ,(string-append
"&" (symbol->string fast-fun-name))
c-pointer)
(foreign-value ,(string-append
"&" (symbol->string fast-fun-end-name))
c-pointer)
(foreign-value ,(string-append
"&" (symbol->string fast-fun-arrays-name))
c-pointer))))
`(begin
(define (,(symbol-append 'render- name) renderable)
(render-renderable ',uniforms renderable #f))
(define (,(symbol-append 'render-arrays- name) renderable)
(render-renderable ',uniforms renderable #t))))
(define (,(symbol-append 'make- name '-renderable) . args)
(apply make-renderable ,name args))
(renderable-setters ,name ,uniforms)))))
(expr (syntax-error 'define-pipeline "Invalid pipeline definition" expr))))))
(define-syntax define-pipeline
(syntax-rules ()
((_ name shaders ...)
(begin (glls:define-pipeline name shaders ...)
(define-renderable-functions name shaders ...)))
((_ . expr) (syntax-error 'define-pipeline "Invalide pipeline definition" expr))))
(define-syntax export-pipeline
(ir-macro-transformer
(lambda (expr i c)
(cons 'export
(flatten
(let loop ((pipelines (cdr expr)))
(if (null? pipelines)
'()
(if (not (symbol? (car pipelines)))
(syntax-error 'export-shader "Expected a pipeline name" expr)
(cons (let* ((name (strip-syntax (car pipelines)))
(render (symbol-append 'render- name))
(make-renderable (symbol-append 'make- name
'-renderable))
(fast-funs (symbol-append name
'-fast-render-functions)))
(list name render make-renderable fast-funs))
(loop (cdr pipelines)))))))))))
) ; glls-render
| null | https://raw.githubusercontent.com/AlexCharlton/glls/31d1124f8e7a79cea196d6d1228b26e0344c3753/glls-render.scm | scheme | Needs to be defined twice so it can be manipulated upon export (for some reason)
glls-render | (module glls-render (c-prefix
define-pipeline
export-pipeline)
(import chicken scheme)
(use (prefix glls glls:) glls-renderable (prefix gl-utils gl:))
(import-for-syntax (prefix glls glls:) (prefix glls-compiler glls:)
glls-renderable matchable miscmacros data-structures)
(reexport (except glls define-pipeline)
(only glls-renderable
renderable-size
unique-textures?
set-renderable-vao!
set-renderable-n-elements!
set-renderable-element-type!
set-renderable-mode!
set-renderable-offset!))
(begin-for-syntax
(require-library glls-renderable)
(define c-prefix (make-parameter '||))
(define header-included? (make-parameter #f)))
(define-syntax renderable-setters
(ir-macro-transformer
(lambda (exp i compare)
(match exp
((_ name uniforms)
(let ((base-name (symbol-append 'set- name '-renderable-)))
`(begin
,@(let loop ((uniforms uniforms) (i 0))
(if (null? uniforms)
'()
(cons `(define (,(symbol-append base-name (caar uniforms) '!)
renderable value)
(set-renderable-uniform-value! renderable ,i value
',(caar uniforms)))
(loop (cdr uniforms) (add1 i))))))))
(exp (syntax-error 'renderable-setters "Bad arguments" exp))))))
(define-for-syntax (get-uniforms s)
(cond
((and (list? s)
(list? (car s))
(member (caar s) glls:shader-types))
(get-keyword uniform: (cdar s) (lambda () '())))
((and (list? s) (>= (length s) 2) (member #:uniform s))
(cdr (member #:uniform s)))
(else (syntax-error 'define-pipeline "Only shaders that include uniform definitions may be used with glls-render" s))))
(define-syntax define-renderable-functions
(ir-macro-transformer
(lambda (exp i compare)
(match exp
((_ name . shaders)
(let* ((name (strip-syntax name))
(uniforms (delete-duplicates
(concatenate (map get-uniforms (strip-syntax shaders)))
(lambda (a b) (eq? (car a) (car b))))))
(let-values (((render-funs render-fun-name
render-arrays-fun-name
fast-fun-begin-name
fast-fun-name fast-fun-end-name
fast-fun-arrays-name)
(if (feature? compiling:)
(render-functions (c-prefix) name uniforms)
(values #f #f #f #f #f #f #f))))
`(begin
,(if (feature? compiling:)
`(begin
,(if (not (header-included?))
(begin
(header-included? #t)
`(begin
(import foreign)
(foreign-declare ,gllsRender.h)))
#f)
(foreign-declare ,render-funs)
(define ,(symbol-append 'render- name)
(foreign-lambda void ,render-fun-name c-pointer))
(define ,(symbol-append 'render-arrays- name)
(foreign-lambda void ,render-arrays-fun-name c-pointer))
(define (,(symbol-append name '-fast-render-functions))
(values
(foreign-lambda void ,(symbol->string fast-fun-begin-name)
c-pointer)
(foreign-lambda void ,(symbol->string fast-fun-name)
c-pointer)
(foreign-lambda void ,(symbol->string fast-fun-end-name))
(foreign-lambda void ,(symbol->string fast-fun-arrays-name)
c-pointer)
(foreign-value ,(string-append
"&" (symbol->string fast-fun-begin-name))
c-pointer)
(foreign-value ,(string-append
"&" (symbol->string fast-fun-name))
c-pointer)
(foreign-value ,(string-append
"&" (symbol->string fast-fun-end-name))
c-pointer)
(foreign-value ,(string-append
"&" (symbol->string fast-fun-arrays-name))
c-pointer))))
`(begin
(define (,(symbol-append 'render- name) renderable)
(render-renderable ',uniforms renderable #f))
(define (,(symbol-append 'render-arrays- name) renderable)
(render-renderable ',uniforms renderable #t))))
(define (,(symbol-append 'make- name '-renderable) . args)
(apply make-renderable ,name args))
(renderable-setters ,name ,uniforms)))))
(expr (syntax-error 'define-pipeline "Invalid pipeline definition" expr))))))
(define-syntax define-pipeline
(syntax-rules ()
((_ name shaders ...)
(begin (glls:define-pipeline name shaders ...)
(define-renderable-functions name shaders ...)))
((_ . expr) (syntax-error 'define-pipeline "Invalide pipeline definition" expr))))
(define-syntax export-pipeline
(ir-macro-transformer
(lambda (expr i c)
(cons 'export
(flatten
(let loop ((pipelines (cdr expr)))
(if (null? pipelines)
'()
(if (not (symbol? (car pipelines)))
(syntax-error 'export-shader "Expected a pipeline name" expr)
(cons (let* ((name (strip-syntax (car pipelines)))
(render (symbol-append 'render- name))
(make-renderable (symbol-append 'make- name
'-renderable))
(fast-funs (symbol-append name
'-fast-render-functions)))
(list name render make-renderable fast-funs))
(loop (cdr pipelines)))))))))))
|
dd36a795ab374a524e8ddaa9809cee2184cfd17af94473d188f280bee04c4af1 | kmi/irs | ontoweb-ontology-rewrite.lisp | Mode : Lisp ; Package :
File created in WebOnto
(in-package "OCML")
(in-ontology ontoweb-ontology)
Automatically translated from RDF file # P"D:/users / jbd2 / code / freaky / rdf - files / ontoweb - data / ontoweb - ontology - rc1.rdfs "
at 20:27:57 , on 27/10/2003
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#GENERICONTOLOGY)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#TOOL ()
((//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_EXCEPTION_HANDLING :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOWS :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#IMPORTS :type //www.ontoweb.org/extended\#language)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_CONSTRAINT :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_PRODUTION_RULES :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#SUPPORTS :type //www.ontoweb.org/extended\#methodology)
(//WWW.ONTOWEB.ORG/EXTENDED\#PRICINGPOLICY :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_LOCKING_LEVEL :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_ATTACHED_INFERENCE_ENGINE :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_DEFAULT_VALUE :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#FUNCTIONALITY :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_COLLABORATIVE_WORKING :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_WORK_MANAGEMENT :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#MAXIMUM_ARITY_ALLOWED :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_MERGE :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_USER_CHANGE_CONTROL :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_DOCUMENTATION :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_INHERITANCE :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_CONFIGURATION_MANAGEMENT :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_VALIDATION :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#CONTAINS :type //www.ontoweb.org/extended\#ontology)
(//WWW.ONTOWEB.ORG/EXTENDED\#EXPORTSTO :type //www.ontoweb.org/extended\#language)
(//WWW.ONTOWEB.ORG/EXTENDED\#HAS_LIBRARIES_OF_ONTOLOGIES :type //www.w3.org/2001/xmlschema\#string)))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#PERSON
((//WWW.ONTOWEB.ORG/EXTENDED\#PHONE :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#PHOTO :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#FAX :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#AUTHOROF :type //www.ontoweb.org/extended\#publication)
(//WWW.ONTOWEB.ORG/EXTENDED\#ADDRESS :type //www.w3.org/2001/xmlschema\#string)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWINSTANCESATTRIBUTES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWINSTANCESATTRIBUTES property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#allowinstancesattributes")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#INPROCEEDINGS)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___ORGANISATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___ORGANISATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#organisation)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#description)
(Subpropertyof //www.ontoweb.org/extended\#description)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#EVENT)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#COMPANYSTAFF)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#AUTHOR (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#AUTHOR property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#author")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#EDUCATIONALRESSOURCE)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCLANGUAGE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCLANGUAGE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dclanguage")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#TRADITIONALLANGUAGE)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#RESEARCHAREA)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#LINGUISTICONTOLOGY)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#MISCRISKS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#MISCRISKS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#miscrisks")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#RESEARCHGROUP)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#MISC)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#IMPROVEECOMMERCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#IMPROVEECOMMERCE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#improveecommerce")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#HOMEPAGE___PROJECT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#HOMEPAGE___PROJECT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#project)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#homepage)
(Subpropertyof //www.ontoweb.org/extended\#homepage)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#MASTERTHESIS)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCRIGHTS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCRIGHTS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dcrights")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#TITLE___PUBLICATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#TITLE___PUBLICATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#publication)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#title)
(Subpropertyof //www.ontoweb.org/extended\#title)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#APPLICATIONFOREDUCATION)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#METHODOLOGYUSED (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#METHODOLOGYUSED property
((Range //www.ontoweb.org/extended\#methodology)
(Domain //www.ontoweb.org/extended\#applicationforintelligentinformationintegration)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#methodologyused")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#PROJECTMANAGEMENTBOARD)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#URL (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#URL property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#url")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#OTHERTECHNICALRISKS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#OTHERTECHNICALRISKS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#othertechnicalrisks")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#APPLICATIONFORINTELLIGENTINFORMATIONINTEGRATION)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#METHODOLOGY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#METHODOLOGY property
((Range //www.ontoweb.org/extended\#methodology)
(Domain //www.ontoweb.org/extended\#application)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#methodology")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NAME___EVENT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NAME___EVENT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#event)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#name)
(Subpropertyof //www.ontoweb.org/extended\#name)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NUMBEROFMETACLASSES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NUMBEROFMETACLASSES property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#numberofmetaclasses")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWSLOTDEFAULTVALUE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWSLOTDEFAULTVALUE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#allowslotdefaultvalue")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DURATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DURATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#duration")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#LOCATION___EVENT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#LOCATION___EVENT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#event)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#location)
(Subpropertyof //www.ontoweb.org/extended\#location)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#PROVIDER (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#PROVIDER property
((Range //www.ontoweb.org/extended\#organisation)
(Domain //www.ontoweb.org/extended\#educationalressource)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#provider")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#HOMEPAGE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#HOMEPAGE property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#homepage")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#BRANCHINGFACTOR (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#BRANCHINGFACTOR property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#branchingfactor")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWCOMPLETE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWCOMPLETE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#allowcomplete")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#SOURCEINFORMATIONAVAILABILITY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#SOURCEINFORMATIONAVAILABILITY property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#sourceinformationavailability")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#APPLICATIONFORKNOWLEDGEMANAGEMENT)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#MANUAL)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DIFFICULTYLEVEL (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DIFFICULTYLEVEL property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#difficultylevel")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DATE_START_ (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DATE_START_ property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#event)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#date(start)")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#COPYRIGHT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#COPYRIGHT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#copyright")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCFORMAT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCFORMAT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dcformat")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#EDUCATIONALAIM (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#EDUCATIONALAIM property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#educationalaim")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#HEAD (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#HEAD property
((Range //www.ontoweb.org/extended\#person)
(Domain //www.ontoweb.org/extended\#project)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#head")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NAME___ORGANISATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NAME___ORGANISATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#organisation)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#name)
(Subpropertyof //www.ontoweb.org/extended\#name)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#WORKSHOP)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCSUBJECT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCSUBJECT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dcsubject")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#METHODOLOGYFORREENGINEERING)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#SUCCESSSTORIES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#SUCCESSSTORIES property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#application)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#successstories")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#ORGANISATION)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCCONTRIBUTOR (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCCONTRIBUTOR property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dccontributor")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NUMBEROFCONCEPTS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NUMBEROFCONCEPTS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#numberofconcepts")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#CARRIEDOUTBY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#CARRIEDOUTBY property
((Range //www.ontoweb.org/extended\#organisation)
(Domain //www.ontoweb.org/extended\#project)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#carriedoutby")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#LANGUAGE)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#MAXIMUMARITY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#MAXIMUMARITY property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#maximumarity")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#LOCATION___ORGANISATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#LOCATION___ORGANISATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#organisation)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#location)
(Subpropertyof //www.ontoweb.org/extended\#location)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___EVENT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___EVENT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#event)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#description)
(Subpropertyof //www.ontoweb.org/extended\#description)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#MISCPUBLICATION)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCPUBLISHER (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCPUBLISHER property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dcpublisher")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#URL___EDUCATIONALRESSOURCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#URL___EDUCATIONALRESSOURCE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#url)
(Subpropertyof //www.ontoweb.org/extended\#url)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#CONSORTIUM)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ISWORKEDONBY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ISWORKEDONBY property
((Range //www.ontoweb.org/extended\#academicstaff)
(Domain //www.ontoweb.org/extended\#researcharea)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#isworkedonby")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#TITLE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#TITLE property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#title")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#MODELINGGUIDELINES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#MODELINGGUIDELINES property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#modelingguidelines")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#METHODOLOGYFORONTOLOGYLEARNING)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#STUDENT)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#IMPROVEB2B (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#IMPROVEB2B property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#improveb2b")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#IMPROVEKM (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#IMPROVEKM property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#improvekm")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#PROBLEMS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#PROBLEMS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#application)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#problems")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#METHODOLOGYFROMTHESCRATCH)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ISSUPPORTEDBY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ISSUPPORTEDBY property
((Range //www.ontoweb.org/extended\#tool)
(Domain //www.ontoweb.org/extended\#methodology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#issupportedby")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCCREATOR (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCCREATOR property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dccreator")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#ARTICLE)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#BUSINESSAREA)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#HOMEPAGE___ORGANISATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#HOMEPAGE___ORGANISATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#organisation)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#homepage)
(Subpropertyof //www.ontoweb.org/extended\#homepage)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#AUTOMATIONOFMANUALTASK (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#AUTOMATIONOFMANUALTASK property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#automationofmanualtask")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#LOCATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#LOCATION property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#location")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#CATEGORY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#CATEGORY property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#category")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#OTHERCOMMERCIALRISKS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#OTHERCOMMERCIALRISKS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#othercommercialrisks")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#ACADEMICSTAFF)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#APPLICATION)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___RESEARCHAREA (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___RESEARCHAREA property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#researcharea)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#description)
(Subpropertyof //www.ontoweb.org/extended\#description)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#CONFERENCE)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#INITIALCONSTRUCTIONCOSTS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#INITIALCONSTRUCTIONCOSTS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#initialconstructioncosts")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#LECTURE)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#BOOKLET)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#APPLICATIONFORINFORMATIONRETRIEVAL)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#ONTOLOGY)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#KNOWLEDGEAQUISITION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#KNOWLEDGEAQUISITION property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#knowledgeaquisition")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#SHORTDESCRIPTION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#SHORTDESCRIPTION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#news)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#shortdescription")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#METHODOLOGYFORMERGE)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#POTENTIALCOSTCUTTING (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#POTENTIALCOSTCUTTING property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#potentialcostcutting")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#ENTERPRISE)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ISIMPORTEDFROM (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ISIMPORTEDFROM property
((Range //www.ontoweb.org/extended\#tool)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#isimportedfrom")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NOTE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NOTE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#publication)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#note")
))
there 's a relation methodology of arity 2 so
;;complains. Is this a bug in the ontoweb ontology?
( def - class //WWW.ONTOWEB.ORG / EXTENDED\#METHODOLOGY )
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DATE_END_ (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DATE_END_ property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#event)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#date(end)")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#PERFORMANCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#PERFORMANCE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#performance")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#INFERENCEENGINE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#INFERENCEENGINE property
((Range //www.ontoweb.org/extended\#tool)
(Domain //www.ontoweb.org/extended\#application)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#inferenceengine")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DEFAULT_ROOT_RELATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DEFAULT_ROOT_RELATION property
())
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#INBOOK)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#INCLUDES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#INCLUDES property
((Range //www.ontoweb.org/extended\#ontology)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#includes")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#SPECIALINTERESTGROUP)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#UNIVERSITY)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWSOUND (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWSOUND property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#allowsound")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DELIVERYLANGUAGE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DELIVERYLANGUAGE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#deliverylanguage")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#URL___PUBLICATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#URL___PUBLICATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#publication)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#url)
(Subpropertyof //www.ontoweb.org/extended\#url)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ABSTRACT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ABSTRACT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#publication)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#abstract")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#EMPLOYS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#EMPLOYS property
((Range //www.ontoweb.org/extended\#person)
(Domain //www.ontoweb.org/extended\#organisation)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#employs")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#KEYWORDS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#KEYWORDS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#publication)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#keywords")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCTITLE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCTITLE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dctitle")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#TITLE___EDUCATIONALRESSOURCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#TITLE___EDUCATIONALRESSOURCE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#title)
(Subpropertyof //www.ontoweb.org/extended\#title)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NAME___NEWS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NAME___NEWS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#news)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#name)
(Subpropertyof //www.ontoweb.org/extended\#name)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#USEDTODEVELOP (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#USEDTODEVELOP property
((Range //www.ontoweb.org/extended\#ontology)
(Domain //www.ontoweb.org/extended\#methodology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#usedtodevelop")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#USING (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#USING property
((Range //www.ontoweb.org/extended\#methodology)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#using")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#INFERENCEENGINEUSED (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#INFERENCEENGINEUSED property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#applicationforintelligentinformationintegration)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#inferenceengineused")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NAME___PERSON (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NAME___PERSON property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#person)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#name)
(Subpropertyof //www.ontoweb.org/extended\#name)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#INCOLLECTION)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#DEPARTMENT)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCCOVERAGE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCCOVERAGE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dccoverage")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#BENEFITS___APPLICATIONFORINTELLIGENTINFORMATIONINTEGRATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#BENEFITS___APPLICATIONFORINTELLIGENTINFORMATIONINTEGRATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#applicationforintelligentinformationintegration)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#benefits)
(Subpropertyof //www.ontoweb.org/extended\#benefits)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#FINANCEDBY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#FINANCEDBY property
((Range //www.ontoweb.org/extended\#organisation)
(Domain //www.ontoweb.org/extended\#project)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#financedby")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWPRODUCTIONRULES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWPRODUCTIONRULES property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#allowproductionrules")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#PROJECT)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ISEXPORTEDTO (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ISEXPORTEDTO property
((Range //www.ontoweb.org/extended\#tool)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#isexportedto")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCIDENTIFIER (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCIDENTIFIER property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dcidentifier")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___NEWS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___NEWS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#news)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#description)
(Subpropertyof //www.ontoweb.org/extended\#description)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NUMBEROFRELATIONS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NUMBEROFRELATIONS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#numberofrelations")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#APPLICATIONFORECOMMERCE)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NAME___PROJECT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NAME___PROJECT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#project)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#name)
(Subpropertyof //www.ontoweb.org/extended\#name)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#BUSINESSSECTOR (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#BUSINESSSECTOR property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#businesssector")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#MANAGER (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#MANAGER property
((Range //www.ontoweb.org/extended\#person)
(Domain //www.ontoweb.org/extended\#project)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#manager")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#ONTOWEBPORTAL)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCDATE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCDATE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dcdate")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#METAONTOLOGY)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#TASKONTOLOGY)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCRELATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCRELATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dcrelation")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#YEAR (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#YEAR property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#publication)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#year")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#AUDIENCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#AUDIENCE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#audience")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCTYPE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCTYPE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dctype")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#RELATED (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#RELATED property
((Range //www.ontoweb.org/extended\#topic)
(Domain //www.ontoweb.org/extended\#application)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#related")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#NEWS)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NUMBEROFAXIOMS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NUMBEROFAXIOMS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#numberofaxioms")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#PEDAGOGICROLE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#PEDAGOGICROLE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#pedagogicrole")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#MEETING)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#THESIS)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NAME___ONTOLOGY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NAME___ONTOLOGY property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontology)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#name)
(Subpropertyof //www.ontoweb.org/extended\#name)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#PRODUCT)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___EDUCATIONALRESSOURCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___EDUCATIONALRESSOURCE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#description)
(Subpropertyof //www.ontoweb.org/extended\#description)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NAME___BUSINESSAREA (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NAME___BUSINESSAREA property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessarea)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#name)
(Subpropertyof //www.ontoweb.org/extended\#name)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#BOOK)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#LACKOFTRANSPARENTROL (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#LACKOFTRANSPARENTROL property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#lackoftransparentrol")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#IMPROVEINTRANETCOMMUNICATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#IMPROVEINTRANETCOMMUNICATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#improveintranetcommunication")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#description")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#REPRESENTATIONONTOLOGY)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#INSTITUTE)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCDESCRIPTION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCDESCRIPTION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dcdescription")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#MAINTENANCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#MAINTENANCE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#maintenance")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#ASSOCIATION)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#USEDTOIMPLEMENT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#USEDTOIMPLEMENT property
((Range //www.ontoweb.org/extended\#ontology)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#usedtoimplement")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#RECOMMENDEDLIFECYCLE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#RECOMMENDEDLIFECYCLE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#methodology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#recommendedlifecycle")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#KNOWLEDGEAQUISITION___APPLICATIONFORINTELLIGENTINFORMATIONINTEGRATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#KNOWLEDGEAQUISITION___APPLICATIONFORINTELLIGENTINFORMATIONINTEGRATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#applicationforintelligentinformationintegration)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#knowledgeaquisition)
(Subpropertyof //www.ontoweb.org/extended\#knowledgeaquisition)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#GENERATESFROM (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#GENERATESFROM property
((Range //www.ontoweb.org/extended\#tool)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#generatesfrom")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ISIMPLEMENTEDIN (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ISIMPLEMENTEDIN property
((Range //www.ontoweb.org/extended\#language)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#isimplementedin")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWREASONING (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWREASONING property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#allowreasoning")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#KRFORMALISM (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#KRFORMALISM property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#krformalism")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#STUDIESAT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#STUDIESAT property
((Range //www.ontoweb.org/extended\#university)
(Domain //www.ontoweb.org/extended\#student)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#studiesat")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#WEBBASEDLANGUAGE)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NAME (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NAME property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#name")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NUMBEROFINSTANCES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NUMBEROFINSTANCES property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#numberofinstances")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#DEFAULT_ROOT_CONCEPT)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ISDEALTWITHIN (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ISDEALTWITHIN property
((Range //www.ontoweb.org/extended\#project)
(Domain //www.ontoweb.org/extended\#researcharea)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#isdealtwithin")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWIMPLEMENTEDINFERENCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWIMPLEMENTEDINFERENCE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#allowimplementedinference")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#TOPIC___EDUCATIONALRESSOURCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#TOPIC___EDUCATIONALRESSOURCE property
((Range //www.ontoweb.org/extended\#topic)
(Domain //www.ontoweb.org/extended\#educationalressource)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#topic)
(Subpropertyof //www.ontoweb.org/extended\#topic)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#CARRIESOUT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#CARRIESOUT property
((Range //www.ontoweb.org/extended\#project)
(Domain //www.ontoweb.org/extended\#organisation)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#carriesout")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#MEMBER (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#MEMBER property
((Range //www.ontoweb.org/extended\#person)
(Domain //www.ontoweb.org/extended\#project)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#member")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#METHODOLOGYFORCOOPERATIVECONSTRUCTION)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#METHODOLOGYFOREVALUATION)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#BENEFITS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#BENEFITS property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#benefits")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#CARRIEDOUTBY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#CARRIEDOUTBY property
((Range //www.ontoweb.org/extended\#organisation)
(Domain //www.ontoweb.org/extended\#researcharea)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#carriedoutby")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#HOMEPAGE___APPLICATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#HOMEPAGE___APPLICATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#application)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#homepage)
(Subpropertyof //www.ontoweb.org/extended\#homepage)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#BIBLIOGRAPHYDETAILS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#BIBLIOGRAPHYDETAILS property
((Range //www.ontoweb.org/extended\#publication)
(Domain //www.ontoweb.org/extended\#educationalressource)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#bibliographydetails")
))
;;there's a relate called topic
;;;so we don't load the class
;;;(def-class //WWW.ONTOWEB.ORG/EXTENDED\#TOPIC)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ADVANTAGES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ADVANTAGES property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#advantages")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#RUNTIMEDEPLOYMENT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#RUNTIMEDEPLOYMENT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#runtimedeployment")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#BELONGSTO (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#BELONGSTO property
((Range //www.ontoweb.org/extended\#organisation)
(Domain //www.ontoweb.org/extended\#person)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#belongsto")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___BUSINESSAREA (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___BUSINESSAREA property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessarea)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#description)
(Subpropertyof //www.ontoweb.org/extended\#description)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#EMAIL (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#EMAIL property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#person)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#email")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DEVELOPS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DEVELOPS property
((Range //www.ontoweb.org/extended\#product)
(Domain //www.ontoweb.org/extended\#organisation)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#develops")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___APPLICATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___APPLICATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#application)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#description)
(Subpropertyof //www.ontoweb.org/extended\#description)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#FINANCES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#FINANCES property
((Range //www.ontoweb.org/extended\#project)
(Domain //www.ontoweb.org/extended\#organisation)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#finances")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCSOURCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCSOURCE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dcsource")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#DOMAINONTOLOGY)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#QUALITYASSURANCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#QUALITYASSURANCE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#application)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#qualityassurance")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#PUBLISHES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#PUBLISHES property
((Range //www.ontoweb.org/extended\#publication)
(Domain //www.ontoweb.org/extended\#organisation)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#publishes")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#COMPETITION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#COMPETITION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#competition")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#GENERATES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#GENERATES property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#generates")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#SEMANTICPORTALSANDWEBCOMMUNITIES)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#BUSINESSSCENARIO)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#CONTACT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#CONTACT property
((Range //www.ontoweb.org/extended\#person)
(Domain //www.ontoweb.org/extended\#event)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#contact")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#PHDTHESIS)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#BENEFITS___APPLICATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#BENEFITS___APPLICATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#application)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#benefits)
(Subpropertyof //www.ontoweb.org/extended\#benefits)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#KNOWLEDGEAQUISITION___APPLICATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#KNOWLEDGEAQUISITION___APPLICATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#application)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#knowledgeaquisition)
(Subpropertyof //www.ontoweb.org/extended\#knowledgeaquisition)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#STEPS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#STEPS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#methodology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#steps")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#DOMAINTASKONTOLOGY)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#EXHIBITION)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DOMAINOFONTOLOGY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DOMAINOFONTOLOGY property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#domainofontology")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#TOPIC (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#TOPIC property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#topic")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#TOPIC___NEWS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#TOPIC___NEWS property
((Range //www.ontoweb.org/extended\#topic)
(Domain //www.ontoweb.org/extended\#news)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#topic)
(Subpropertyof //www.ontoweb.org/extended\#topic)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#INTEROPERABILITY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#INTEROPERABILITY property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#interoperability")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#APPLICATIONONTOLOGY)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#TITLE___APPLICATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#TITLE___APPLICATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#application)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#title)
(Subpropertyof //www.ontoweb.org/extended\#title)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#APPLICATIONFORNATURALLANGUAGEPROCESSING)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#PROCEEDINGS)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#LICENCEPRICE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#LICENCEPRICE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#licenceprice")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#HOMEPAGE___PERSON (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#HOMEPAGE___PERSON property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#person)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#homepage)
(Subpropertyof //www.ontoweb.org/extended\#homepage)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___PROJECT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___PROJECT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#project)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#description)
(Subpropertyof //www.ontoweb.org/extended\#description)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#TECHREPORT)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NAME___RESEARCHAREA (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NAME___RESEARCHAREA property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#researcharea)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#name)
(Subpropertyof //www.ontoweb.org/extended\#name)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#WORKGROUP)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#PUBLICATION)
| null | https://raw.githubusercontent.com/kmi/irs/e1b8d696f61c6b6878c0e92d993ed549fee6e7dd/ontologies/domains/ontoweb-ontology/ontoweb-ontology-rewrite.lisp | lisp | Package :
complains. Is this a bug in the ontoweb ontology?
there's a relate called topic
so we don't load the class
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#TOPIC) |
File created in WebOnto
(in-package "OCML")
(in-ontology ontoweb-ontology)
Automatically translated from RDF file # P"D:/users / jbd2 / code / freaky / rdf - files / ontoweb - data / ontoweb - ontology - rc1.rdfs "
at 20:27:57 , on 27/10/2003
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#GENERICONTOLOGY)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#TOOL ()
((//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_EXCEPTION_HANDLING :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOWS :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#IMPORTS :type //www.ontoweb.org/extended\#language)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_CONSTRAINT :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_PRODUTION_RULES :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#SUPPORTS :type //www.ontoweb.org/extended\#methodology)
(//WWW.ONTOWEB.ORG/EXTENDED\#PRICINGPOLICY :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_LOCKING_LEVEL :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_ATTACHED_INFERENCE_ENGINE :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_DEFAULT_VALUE :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#FUNCTIONALITY :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_COLLABORATIVE_WORKING :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_WORK_MANAGEMENT :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#MAXIMUM_ARITY_ALLOWED :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_MERGE :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_USER_CHANGE_CONTROL :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_DOCUMENTATION :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_INHERITANCE :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_CONFIGURATION_MANAGEMENT :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#ALLOW_VALIDATION :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#CONTAINS :type //www.ontoweb.org/extended\#ontology)
(//WWW.ONTOWEB.ORG/EXTENDED\#EXPORTSTO :type //www.ontoweb.org/extended\#language)
(//WWW.ONTOWEB.ORG/EXTENDED\#HAS_LIBRARIES_OF_ONTOLOGIES :type //www.w3.org/2001/xmlschema\#string)))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#PERSON
((//WWW.ONTOWEB.ORG/EXTENDED\#PHONE :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#PHOTO :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#FAX :type //www.w3.org/2001/xmlschema\#string)
(//WWW.ONTOWEB.ORG/EXTENDED\#AUTHOROF :type //www.ontoweb.org/extended\#publication)
(//WWW.ONTOWEB.ORG/EXTENDED\#ADDRESS :type //www.w3.org/2001/xmlschema\#string)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWINSTANCESATTRIBUTES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWINSTANCESATTRIBUTES property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#allowinstancesattributes")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#INPROCEEDINGS)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___ORGANISATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___ORGANISATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#organisation)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#description)
(Subpropertyof //www.ontoweb.org/extended\#description)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#EVENT)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#COMPANYSTAFF)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#AUTHOR (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#AUTHOR property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#author")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#EDUCATIONALRESSOURCE)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCLANGUAGE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCLANGUAGE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dclanguage")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#TRADITIONALLANGUAGE)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#RESEARCHAREA)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#LINGUISTICONTOLOGY)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#MISCRISKS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#MISCRISKS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#miscrisks")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#RESEARCHGROUP)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#MISC)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#IMPROVEECOMMERCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#IMPROVEECOMMERCE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#improveecommerce")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#HOMEPAGE___PROJECT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#HOMEPAGE___PROJECT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#project)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#homepage)
(Subpropertyof //www.ontoweb.org/extended\#homepage)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#MASTERTHESIS)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCRIGHTS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCRIGHTS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dcrights")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#TITLE___PUBLICATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#TITLE___PUBLICATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#publication)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#title)
(Subpropertyof //www.ontoweb.org/extended\#title)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#APPLICATIONFOREDUCATION)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#METHODOLOGYUSED (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#METHODOLOGYUSED property
((Range //www.ontoweb.org/extended\#methodology)
(Domain //www.ontoweb.org/extended\#applicationforintelligentinformationintegration)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#methodologyused")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#PROJECTMANAGEMENTBOARD)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#URL (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#URL property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#url")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#OTHERTECHNICALRISKS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#OTHERTECHNICALRISKS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#othertechnicalrisks")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#APPLICATIONFORINTELLIGENTINFORMATIONINTEGRATION)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#METHODOLOGY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#METHODOLOGY property
((Range //www.ontoweb.org/extended\#methodology)
(Domain //www.ontoweb.org/extended\#application)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#methodology")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NAME___EVENT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NAME___EVENT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#event)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#name)
(Subpropertyof //www.ontoweb.org/extended\#name)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NUMBEROFMETACLASSES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NUMBEROFMETACLASSES property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#numberofmetaclasses")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWSLOTDEFAULTVALUE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWSLOTDEFAULTVALUE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#allowslotdefaultvalue")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DURATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DURATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#duration")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#LOCATION___EVENT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#LOCATION___EVENT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#event)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#location)
(Subpropertyof //www.ontoweb.org/extended\#location)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#PROVIDER (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#PROVIDER property
((Range //www.ontoweb.org/extended\#organisation)
(Domain //www.ontoweb.org/extended\#educationalressource)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#provider")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#HOMEPAGE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#HOMEPAGE property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#homepage")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#BRANCHINGFACTOR (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#BRANCHINGFACTOR property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#branchingfactor")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWCOMPLETE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWCOMPLETE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#allowcomplete")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#SOURCEINFORMATIONAVAILABILITY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#SOURCEINFORMATIONAVAILABILITY property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#sourceinformationavailability")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#APPLICATIONFORKNOWLEDGEMANAGEMENT)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#MANUAL)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DIFFICULTYLEVEL (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DIFFICULTYLEVEL property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#difficultylevel")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DATE_START_ (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DATE_START_ property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#event)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#date(start)")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#COPYRIGHT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#COPYRIGHT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#copyright")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCFORMAT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCFORMAT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dcformat")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#EDUCATIONALAIM (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#EDUCATIONALAIM property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#educationalaim")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#HEAD (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#HEAD property
((Range //www.ontoweb.org/extended\#person)
(Domain //www.ontoweb.org/extended\#project)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#head")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NAME___ORGANISATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NAME___ORGANISATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#organisation)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#name)
(Subpropertyof //www.ontoweb.org/extended\#name)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#WORKSHOP)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCSUBJECT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCSUBJECT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dcsubject")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#METHODOLOGYFORREENGINEERING)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#SUCCESSSTORIES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#SUCCESSSTORIES property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#application)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#successstories")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#ORGANISATION)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCCONTRIBUTOR (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCCONTRIBUTOR property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dccontributor")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NUMBEROFCONCEPTS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NUMBEROFCONCEPTS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#numberofconcepts")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#CARRIEDOUTBY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#CARRIEDOUTBY property
((Range //www.ontoweb.org/extended\#organisation)
(Domain //www.ontoweb.org/extended\#project)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#carriedoutby")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#LANGUAGE)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#MAXIMUMARITY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#MAXIMUMARITY property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#maximumarity")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#LOCATION___ORGANISATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#LOCATION___ORGANISATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#organisation)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#location)
(Subpropertyof //www.ontoweb.org/extended\#location)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___EVENT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___EVENT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#event)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#description)
(Subpropertyof //www.ontoweb.org/extended\#description)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#MISCPUBLICATION)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCPUBLISHER (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCPUBLISHER property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dcpublisher")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#URL___EDUCATIONALRESSOURCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#URL___EDUCATIONALRESSOURCE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#url)
(Subpropertyof //www.ontoweb.org/extended\#url)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#CONSORTIUM)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ISWORKEDONBY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ISWORKEDONBY property
((Range //www.ontoweb.org/extended\#academicstaff)
(Domain //www.ontoweb.org/extended\#researcharea)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#isworkedonby")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#TITLE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#TITLE property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#title")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#MODELINGGUIDELINES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#MODELINGGUIDELINES property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#modelingguidelines")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#METHODOLOGYFORONTOLOGYLEARNING)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#STUDENT)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#IMPROVEB2B (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#IMPROVEB2B property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#improveb2b")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#IMPROVEKM (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#IMPROVEKM property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#improvekm")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#PROBLEMS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#PROBLEMS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#application)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#problems")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#METHODOLOGYFROMTHESCRATCH)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ISSUPPORTEDBY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ISSUPPORTEDBY property
((Range //www.ontoweb.org/extended\#tool)
(Domain //www.ontoweb.org/extended\#methodology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#issupportedby")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCCREATOR (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCCREATOR property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dccreator")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#ARTICLE)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#BUSINESSAREA)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#HOMEPAGE___ORGANISATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#HOMEPAGE___ORGANISATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#organisation)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#homepage)
(Subpropertyof //www.ontoweb.org/extended\#homepage)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#AUTOMATIONOFMANUALTASK (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#AUTOMATIONOFMANUALTASK property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#automationofmanualtask")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#LOCATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#LOCATION property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#location")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#CATEGORY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#CATEGORY property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#category")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#OTHERCOMMERCIALRISKS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#OTHERCOMMERCIALRISKS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#othercommercialrisks")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#ACADEMICSTAFF)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#APPLICATION)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___RESEARCHAREA (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___RESEARCHAREA property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#researcharea)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#description)
(Subpropertyof //www.ontoweb.org/extended\#description)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#CONFERENCE)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#INITIALCONSTRUCTIONCOSTS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#INITIALCONSTRUCTIONCOSTS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#initialconstructioncosts")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#LECTURE)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#BOOKLET)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#APPLICATIONFORINFORMATIONRETRIEVAL)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#ONTOLOGY)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#KNOWLEDGEAQUISITION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#KNOWLEDGEAQUISITION property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#knowledgeaquisition")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#SHORTDESCRIPTION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#SHORTDESCRIPTION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#news)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#shortdescription")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#METHODOLOGYFORMERGE)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#POTENTIALCOSTCUTTING (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#POTENTIALCOSTCUTTING property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#potentialcostcutting")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#ENTERPRISE)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ISIMPORTEDFROM (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ISIMPORTEDFROM property
((Range //www.ontoweb.org/extended\#tool)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#isimportedfrom")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NOTE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NOTE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#publication)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#note")
))
there 's a relation methodology of arity 2 so
( def - class //WWW.ONTOWEB.ORG / EXTENDED\#METHODOLOGY )
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DATE_END_ (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DATE_END_ property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#event)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#date(end)")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#PERFORMANCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#PERFORMANCE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#performance")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#INFERENCEENGINE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#INFERENCEENGINE property
((Range //www.ontoweb.org/extended\#tool)
(Domain //www.ontoweb.org/extended\#application)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#inferenceengine")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DEFAULT_ROOT_RELATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DEFAULT_ROOT_RELATION property
())
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#INBOOK)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#INCLUDES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#INCLUDES property
((Range //www.ontoweb.org/extended\#ontology)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#includes")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#SPECIALINTERESTGROUP)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#UNIVERSITY)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWSOUND (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWSOUND property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#allowsound")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DELIVERYLANGUAGE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DELIVERYLANGUAGE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#deliverylanguage")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#URL___PUBLICATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#URL___PUBLICATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#publication)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#url)
(Subpropertyof //www.ontoweb.org/extended\#url)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ABSTRACT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ABSTRACT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#publication)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#abstract")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#EMPLOYS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#EMPLOYS property
((Range //www.ontoweb.org/extended\#person)
(Domain //www.ontoweb.org/extended\#organisation)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#employs")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#KEYWORDS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#KEYWORDS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#publication)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#keywords")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCTITLE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCTITLE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dctitle")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#TITLE___EDUCATIONALRESSOURCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#TITLE___EDUCATIONALRESSOURCE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#title)
(Subpropertyof //www.ontoweb.org/extended\#title)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NAME___NEWS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NAME___NEWS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#news)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#name)
(Subpropertyof //www.ontoweb.org/extended\#name)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#USEDTODEVELOP (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#USEDTODEVELOP property
((Range //www.ontoweb.org/extended\#ontology)
(Domain //www.ontoweb.org/extended\#methodology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#usedtodevelop")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#USING (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#USING property
((Range //www.ontoweb.org/extended\#methodology)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#using")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#INFERENCEENGINEUSED (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#INFERENCEENGINEUSED property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#applicationforintelligentinformationintegration)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#inferenceengineused")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NAME___PERSON (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NAME___PERSON property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#person)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#name)
(Subpropertyof //www.ontoweb.org/extended\#name)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#INCOLLECTION)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#DEPARTMENT)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCCOVERAGE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCCOVERAGE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dccoverage")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#BENEFITS___APPLICATIONFORINTELLIGENTINFORMATIONINTEGRATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#BENEFITS___APPLICATIONFORINTELLIGENTINFORMATIONINTEGRATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#applicationforintelligentinformationintegration)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#benefits)
(Subpropertyof //www.ontoweb.org/extended\#benefits)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#FINANCEDBY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#FINANCEDBY property
((Range //www.ontoweb.org/extended\#organisation)
(Domain //www.ontoweb.org/extended\#project)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#financedby")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWPRODUCTIONRULES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWPRODUCTIONRULES property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#allowproductionrules")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#PROJECT)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ISEXPORTEDTO (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ISEXPORTEDTO property
((Range //www.ontoweb.org/extended\#tool)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#isexportedto")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCIDENTIFIER (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCIDENTIFIER property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dcidentifier")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___NEWS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___NEWS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#news)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#description)
(Subpropertyof //www.ontoweb.org/extended\#description)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NUMBEROFRELATIONS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NUMBEROFRELATIONS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#numberofrelations")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#APPLICATIONFORECOMMERCE)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NAME___PROJECT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NAME___PROJECT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#project)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#name)
(Subpropertyof //www.ontoweb.org/extended\#name)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#BUSINESSSECTOR (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#BUSINESSSECTOR property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#businesssector")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#MANAGER (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#MANAGER property
((Range //www.ontoweb.org/extended\#person)
(Domain //www.ontoweb.org/extended\#project)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#manager")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#ONTOWEBPORTAL)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCDATE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCDATE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dcdate")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#METAONTOLOGY)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#TASKONTOLOGY)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCRELATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCRELATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dcrelation")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#YEAR (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#YEAR property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#publication)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#year")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#AUDIENCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#AUDIENCE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#audience")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCTYPE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCTYPE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dctype")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#RELATED (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#RELATED property
((Range //www.ontoweb.org/extended\#topic)
(Domain //www.ontoweb.org/extended\#application)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#related")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#NEWS)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NUMBEROFAXIOMS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NUMBEROFAXIOMS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#numberofaxioms")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#PEDAGOGICROLE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#PEDAGOGICROLE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#pedagogicrole")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#MEETING)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#THESIS)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NAME___ONTOLOGY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NAME___ONTOLOGY property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontology)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#name)
(Subpropertyof //www.ontoweb.org/extended\#name)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#PRODUCT)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___EDUCATIONALRESSOURCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___EDUCATIONALRESSOURCE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#educationalressource)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#description)
(Subpropertyof //www.ontoweb.org/extended\#description)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NAME___BUSINESSAREA (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NAME___BUSINESSAREA property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessarea)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#name)
(Subpropertyof //www.ontoweb.org/extended\#name)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#BOOK)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#LACKOFTRANSPARENTROL (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#LACKOFTRANSPARENTROL property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#lackoftransparentrol")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#IMPROVEINTRANETCOMMUNICATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#IMPROVEINTRANETCOMMUNICATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#improveintranetcommunication")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#description")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#REPRESENTATIONONTOLOGY)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#INSTITUTE)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCDESCRIPTION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCDESCRIPTION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dcdescription")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#MAINTENANCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#MAINTENANCE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#maintenance")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#ASSOCIATION)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#USEDTOIMPLEMENT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#USEDTOIMPLEMENT property
((Range //www.ontoweb.org/extended\#ontology)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#usedtoimplement")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#RECOMMENDEDLIFECYCLE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#RECOMMENDEDLIFECYCLE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#methodology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#recommendedlifecycle")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#KNOWLEDGEAQUISITION___APPLICATIONFORINTELLIGENTINFORMATIONINTEGRATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#KNOWLEDGEAQUISITION___APPLICATIONFORINTELLIGENTINFORMATIONINTEGRATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#applicationforintelligentinformationintegration)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#knowledgeaquisition)
(Subpropertyof //www.ontoweb.org/extended\#knowledgeaquisition)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#GENERATESFROM (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#GENERATESFROM property
((Range //www.ontoweb.org/extended\#tool)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#generatesfrom")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ISIMPLEMENTEDIN (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ISIMPLEMENTEDIN property
((Range //www.ontoweb.org/extended\#language)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#isimplementedin")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWREASONING (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWREASONING property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#allowreasoning")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#KRFORMALISM (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#KRFORMALISM property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#krformalism")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#STUDIESAT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#STUDIESAT property
((Range //www.ontoweb.org/extended\#university)
(Domain //www.ontoweb.org/extended\#student)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#studiesat")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#WEBBASEDLANGUAGE)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NAME (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NAME property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#name")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NUMBEROFINSTANCES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NUMBEROFINSTANCES property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#numberofinstances")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#DEFAULT_ROOT_CONCEPT)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ISDEALTWITHIN (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ISDEALTWITHIN property
((Range //www.ontoweb.org/extended\#project)
(Domain //www.ontoweb.org/extended\#researcharea)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#isdealtwithin")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWIMPLEMENTEDINFERENCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ALLOWIMPLEMENTEDINFERENCE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#language)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#allowimplementedinference")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#TOPIC___EDUCATIONALRESSOURCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#TOPIC___EDUCATIONALRESSOURCE property
((Range //www.ontoweb.org/extended\#topic)
(Domain //www.ontoweb.org/extended\#educationalressource)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#topic)
(Subpropertyof //www.ontoweb.org/extended\#topic)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#CARRIESOUT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#CARRIESOUT property
((Range //www.ontoweb.org/extended\#project)
(Domain //www.ontoweb.org/extended\#organisation)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#carriesout")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#MEMBER (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#MEMBER property
((Range //www.ontoweb.org/extended\#person)
(Domain //www.ontoweb.org/extended\#project)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#member")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#METHODOLOGYFORCOOPERATIVECONSTRUCTION)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#METHODOLOGYFOREVALUATION)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#BENEFITS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#BENEFITS property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#benefits")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#CARRIEDOUTBY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#CARRIEDOUTBY property
((Range //www.ontoweb.org/extended\#organisation)
(Domain //www.ontoweb.org/extended\#researcharea)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#carriedoutby")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#HOMEPAGE___APPLICATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#HOMEPAGE___APPLICATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#application)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#homepage)
(Subpropertyof //www.ontoweb.org/extended\#homepage)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#BIBLIOGRAPHYDETAILS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#BIBLIOGRAPHYDETAILS property
((Range //www.ontoweb.org/extended\#publication)
(Domain //www.ontoweb.org/extended\#educationalressource)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#bibliographydetails")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#ADVANTAGES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#ADVANTAGES property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#advantages")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#RUNTIMEDEPLOYMENT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#RUNTIMEDEPLOYMENT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#runtimedeployment")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#BELONGSTO (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#BELONGSTO property
((Range //www.ontoweb.org/extended\#organisation)
(Domain //www.ontoweb.org/extended\#person)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#belongsto")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___BUSINESSAREA (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___BUSINESSAREA property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessarea)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#description)
(Subpropertyof //www.ontoweb.org/extended\#description)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#EMAIL (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#EMAIL property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#person)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#email")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DEVELOPS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DEVELOPS property
((Range //www.ontoweb.org/extended\#product)
(Domain //www.ontoweb.org/extended\#organisation)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#develops")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___APPLICATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___APPLICATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#application)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#description)
(Subpropertyof //www.ontoweb.org/extended\#description)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#FINANCES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#FINANCES property
((Range //www.ontoweb.org/extended\#project)
(Domain //www.ontoweb.org/extended\#organisation)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#finances")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DCSOURCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DCSOURCE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontowebportal)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#dcsource")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#DOMAINONTOLOGY)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#QUALITYASSURANCE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#QUALITYASSURANCE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#application)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#qualityassurance")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#PUBLISHES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#PUBLISHES property
((Range //www.ontoweb.org/extended\#publication)
(Domain //www.ontoweb.org/extended\#organisation)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#publishes")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#COMPETITION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#COMPETITION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#competition")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#GENERATES (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#GENERATES property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#generates")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#SEMANTICPORTALSANDWEBCOMMUNITIES)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#BUSINESSSCENARIO)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#CONTACT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#CONTACT property
((Range //www.ontoweb.org/extended\#person)
(Domain //www.ontoweb.org/extended\#event)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#contact")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#PHDTHESIS)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#BENEFITS___APPLICATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#BENEFITS___APPLICATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#application)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#benefits)
(Subpropertyof //www.ontoweb.org/extended\#benefits)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#KNOWLEDGEAQUISITION___APPLICATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#KNOWLEDGEAQUISITION___APPLICATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#application)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#knowledgeaquisition)
(Subpropertyof //www.ontoweb.org/extended\#knowledgeaquisition)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#STEPS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#STEPS property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#methodology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#steps")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#DOMAINTASKONTOLOGY)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#EXHIBITION)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DOMAINOFONTOLOGY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DOMAINOFONTOLOGY property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#domainofontology")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#TOPIC (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#TOPIC property
((Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#topic")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#TOPIC___NEWS (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#TOPIC___NEWS property
((Range //www.ontoweb.org/extended\#topic)
(Domain //www.ontoweb.org/extended\#news)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#topic)
(Subpropertyof //www.ontoweb.org/extended\#topic)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#INTEROPERABILITY (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#INTEROPERABILITY property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#businessscenario)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#interoperability")
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#APPLICATIONONTOLOGY)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#TITLE___APPLICATION (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#TITLE___APPLICATION property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#application)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#title)
(Subpropertyof //www.ontoweb.org/extended\#title)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#APPLICATIONFORNATURALLANGUAGEPROCESSING)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#PROCEEDINGS)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#LICENCEPRICE (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#LICENCEPRICE property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#ontology)
(Subpropertyof //www.ontoweb.org/extended\#default_root_relation)
(Label "#licenceprice")
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#HOMEPAGE___PERSON (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#HOMEPAGE___PERSON property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#person)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#homepage)
(Subpropertyof //www.ontoweb.org/extended\#homepage)
))
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___PROJECT (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#DESCRIPTION___PROJECT property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#project)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#description)
(Subpropertyof //www.ontoweb.org/extended\#description)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#TECHREPORT)
(def-relation //WWW.ONTOWEB.ORG/EXTENDED\#NAME___RESEARCHAREA (?x ?y))
(def-instance //WWW.ONTOWEB.ORG/EXTENDED\#NAME___RESEARCHAREA property
((Range //www.w3.org/2001/xmlschema\#string)
(Domain //www.ontoweb.org/extended\#researcharea)
(//Schema.Ontoprise.Com/Oxml/Rdf/1.0\#Is_Local_Relation_Of //www.ontoweb.org/extended\#name)
(Subpropertyof //www.ontoweb.org/extended\#name)
))
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#WORKGROUP)
(def-class //WWW.ONTOWEB.ORG/EXTENDED\#PUBLICATION)
|
24112d3af6a026890b868c478a0cd07e3aaa234c018766e00bae8cc80599ae7b | WorksHub/client | interceptors.cljs | (ns wh.interceptors
(:require
[re-frame.core :refer [trim-v]]))
(def default-interceptors [trim-v])
| null | https://raw.githubusercontent.com/WorksHub/client/a51729585c2b9d7692e57b3edcd5217c228cf47c/client/env/prod/wh/interceptors.cljs | clojure | (ns wh.interceptors
(:require
[re-frame.core :refer [trim-v]]))
(def default-interceptors [trim-v])
| |
4e0202334a6598e91d891b555de8b0a2de62d0a75445cc69940676756e495a48 | slipstream/SlipStreamServer | test_utils.clj | (ns com.sixsq.slipstream.ssclj.resources.deployment.test-utils
(:require [clojure.test :refer [deftest is]]
[com.sixsq.slipstream.ssclj.resources.deployment.utils :as utils]))
(def image-a {:description "descr image-a"
:path "root/image-a"
:name "image-a"
:type "IMAGE"
:logoURL ""
:content {:loginUser "root"
:created "2018-07-25T10:07:43.309Z"
:updated "2018-07-25T10:07:43.309Z"
:imageIDs {:cloud-a 1
:cloud-b 2
:cloud-c 3}
:author "test"
:networkType "public"
:commit "commit image a"
:os "Ubuntu"}
:id "module/image-a",
:resourceURI ""})
(def image-b {:description "descr image-b"
:path "root/image-b"
:name "image-b"
:type "IMAGE"
:logoURL ""
:content {:parentModule image-a
:loginUser "ubuntu"
:created "2018-07-25T10:07:43.309Z"
:updated "2018-07-25T10:07:43.309Z"
:imageIDs {:cloud-a 10
:cloud-b 2
:cloud-d 4}
:author "test"
:networkType "public"
:commit "commit image b"
:os "Ubuntu"}
:id "module/image-b",
:resourceURI ""})
(def comp-a {:description "Apache web server appliance with custom landing page.",
:path "examples/tutorials/service-testing/apache",
;:logo {:href "logolink"},
:content {:parentModule image-a
:updated "2018-10-03T13:19:47.310Z",
:outputParameters [{:parameter "hostname", :description "hostname/ip of the image"}
{:parameter "port", :description "Port", :value "8080"}
{:parameter "instanceid", :description "Cloud instance id"}
{:parameter "ready", :description "Server ready to recieve connections"}],
:created "2018-10-03T13:19:47.310Z",
:targets {:packages ["yum-utils" "apache"]
:postinstall "postinstall comp-a",
:onVmRemove "onVmRemove comp-a",
:onVmAdd "onVmAdd comp-a",
:deployment "deployment comp-a"
},
:author "super",
:networkType "public",
:commit "update ifb-core-cloud flavor"},
:updated "2018-10-03T13:19:47.347Z",
:name "apache",
:type "COMPONENT",
:created "2018-07-25T10:08:49.035Z",
:id "module/comp-a",
:parentPath "examples/tutorials/service-testing",
:resourceURI ""})
(def comp-b {:description "Apache web server ++",
:path "examples/tutorials/service-testing/apache++",
:content {:parentModule comp-a
:updated "2018-10-03T13:19:47.310Z",
:outputParameters [{:parameter "hostname", :description "hostname/ip of the image"}
{:parameter "port", :value "80"}
{:parameter "instanceid", :description "Cloud instance id"}
{:parameter "ready", :description "Server ready to recieve connections"}],
:created "2018-10-03T13:19:47.310Z",
:targets {:postinstall "postinstall comp-b",
:onVmRemove "onVmRemove comp-b",
:packages ["emacs"]
:onVmAdd "onVmAdd comp-b",
:deployment "deployment comp-b"
},
:author "super",
:networkType "public",
:commit "update ifb-core-cloud flavor"},
:updated "2018-10-03T13:19:47.347Z",
:name "apache++",
:type "COMPONENT",
:created "2018-07-25T10:08:49.035Z",
:id "module/comp-b",
:parentPath "examples/tutorials/service-testing",
:resourceURI ""})
(def comp-c {:description "Apache web server +++",
:path "examples/tutorials/service-testing/apache+++",
:content {:parentModule (assoc-in comp-b [:content :parentModule :content :parentModule] image-b)
:updated "2018-10-03T13:19:47.310Z",
:outputParameters [{:parameter "hostname", :description "hostname/ip of the image"}
{:parameter "port", :value "80"}
{:parameter "instanceid", :description "Cloud instance id"}
{:parameter "ready", :description "Server ready to recieve connections"}],
:created "2018-10-03T13:19:47.310Z",
:targets {:deployment "deployment comp-c"},
:author "super",
:networkType "public",
:commit "update ifb-core-cloud flavor"},
:updated "2018-10-03T13:19:47.347Z",
:name "apache++",
:type "COMPONENT",
:created "2018-07-25T10:08:49.035Z",
:id "module/comp-b",
:parentPath "examples/tutorials/service-testing",
:resourceURI ""})
(def app-a {:description "Deployment",
:path "examples/tutorials/service-testing/system",
:content {:updated "2018-07-25T10:09:18.955Z",
:created "2018-07-25T10:09:18.955Z",
:author "super",
:nodes [{:node "comp-c",
:multiplicity 1,
:component comp-c}
{:node "image-b", :multiplicity 1,
:component image-b,
:parameterMappings [{:parameter "image-b.port",
:value "comp-c:port", :mapped true}
{:parameter "image-b.ready",
:value "comp-c:ready", :mapped true}
{:parameter "image-b.hostname",
:value "comp-c:hostname", :mapped true}]}]
:commit "update image ids"},
:updated "2018-07-25T10:09:18.972Z",
:name "system",
:type "APPLICATION",
:created "2018-07-25T10:09:18.583Z",
:id "module/app",
:parentPath "examples/tutorials/service-testing",
:resourceURI ""})
(deftest test-resolve-template-from-simple-image
(is
(=
(utils/resolve-deployment-template {:module image-a})
{:module
{:content
{:author "test"
:commit "commit image a"
:created "2018-07-25T10:07:43.309Z"
:imageIDs {:cloud-a 1
:cloud-b 2
:cloud-c 3}
:inputParameters
'({:description "Cloud credential ID for managing node deployment"
:parameter "credential.id"})
:loginUser "root"
:networkType "public"
:os "Ubuntu"
:outputParameters '({:description "SSH keypair name used"
:parameter "keypair.name"}
{:description "Cloud instance id"
:parameter "instanceid"}
{:description "Custom state"
:parameter "statecustom"}
{:description "Hostname or IP address of the image"
:parameter "hostname"}
{:description "Machine abort flag, set when aborting"
:parameter "abort"}
{:description "SSH password if available"
:parameter "password.ssh"}
{:description "SSH URL to connect to virtual machine"
:parameter "url.ssh"}
{:description "'true' when current state is completed"
:parameter "complete"}
{:description "Optional service URL for virtual machine"
:parameter "url.service"})
:targets {}
:updated "2018-07-25T10:07:43.309Z"}
:description "descr image-a"
:logoURL ""
:name "image-a"
:path "root/image-a"
:id "module/image-a"
:resourceURI ""
:type "IMAGE"}})))
(deftest test-resolve-template-from-image-with-parent
(is (= (utils/resolve-deployment-template {:module image-b})
{:module
{:content {:author "test"
:commit "commit image b"
:created "2018-07-25T10:07:43.309Z"
:imageIDs {:cloud-a 10
:cloud-b 2
:cloud-c 3
:cloud-d 4}
:inputParameters '({:description "Cloud credential ID for managing node deployment"
:parameter "credential.id"})
:loginUser "ubuntu"
:networkType "public"
:os "Ubuntu"
:outputParameters '({:description "SSH keypair name used"
:parameter "keypair.name"}
{:description "Cloud instance id"
:parameter "instanceid"}
{:description "Custom state"
:parameter "statecustom"}
{:description "Hostname or IP address of the image"
:parameter "hostname"}
{:description "Machine abort flag, set when aborting"
:parameter "abort"}
{:description "SSH password if available"
:parameter "password.ssh"}
{:description "SSH URL to connect to virtual machine"
:parameter "url.ssh"}
{:description "'true' when current state is completed"
:parameter "complete"}
{:description "Optional service URL for virtual machine"
:parameter "url.service"})
:targets {}
:updated "2018-07-25T10:07:43.309Z"}
:description "descr image-b"
:logoURL ""
:name "image-b"
:path "root/image-b"
:resourceURI ""
:id "module/image-b"
:type "IMAGE"}})))
(deftest test-resolve-template-from-component
(is (= (utils/resolve-deployment-template {:module comp-a})
{:module
{:content {:author "super"
:commit "update ifb-core-cloud flavor"
:created "2018-10-03T13:19:47.310Z"
:imageIDs {:cloud-a 1
:cloud-b 2
:cloud-c 3}
:inputParameters '({:description "Cloud credential ID for managing node deployment"
:parameter "credential.id"})
:loginUser "root"
:networkType "public"
:os "Ubuntu"
:outputParameters '({:description "Server ready to recieve connections"
:parameter "ready"}
{:description "SSH keypair name used"
:parameter "keypair.name"}
{:description "Cloud instance id"
:parameter "instanceid"}
{:description "Custom state"
:parameter "statecustom"}
{:description "hostname/ip of the image"
:parameter "hostname"}
{:description "Port"
:parameter "port"
:value "8080"}
{:description "Machine abort flag, set when aborting"
:parameter "abort"}
{:description "SSH password if available"
:parameter "password.ssh"}
{:description "SSH URL to connect to virtual machine"
:parameter "url.ssh"}
{:description "'true' when current state is completed"
:parameter "complete"}
{:description "Optional service URL for virtual machine"
:parameter "url.service"})
:targets {:deployment '("deployment comp-a")
:onVmAdd '("onVmAdd comp-a")
:onVmRemove '("onVmRemove comp-a")
:packages '("yum-utils"
"apache")
:postinstall '("postinstall comp-a")}
:updated "2018-10-03T13:19:47.310Z"}
:created "2018-07-25T10:08:49.035Z"
:description "Apache web server appliance with custom landing page."
:id "module/comp-a"
:name "apache"
:parentPath "examples/tutorials/service-testing"
:path "examples/tutorials/service-testing/apache"
:resourceURI ""
:type "COMPONENT"
:updated "2018-10-03T13:19:47.347Z"}})))
(deftest test-resolve-template-from-component-with-heritage
(is (= (utils/resolve-deployment-template {:module comp-b})
{:module
{:content {:author "super"
:commit "update ifb-core-cloud flavor"
:created "2018-10-03T13:19:47.310Z"
:imageIDs {:cloud-a 1
:cloud-b 2
:cloud-c 3}
:inputParameters '({:description "Cloud credential ID for managing node deployment"
:parameter "credential.id"})
:loginUser "root"
:networkType "public"
:os "Ubuntu"
:outputParameters '({:description "Server ready to recieve connections"
:parameter "ready"}
{:description "SSH keypair name used"
:parameter "keypair.name"}
{:description "Cloud instance id"
:parameter "instanceid"}
{:description "Custom state"
:parameter "statecustom"}
{:description "hostname/ip of the image"
:parameter "hostname"}
{:description "Port"
:parameter "port"
:value "80"}
{:description "Machine abort flag, set when aborting"
:parameter "abort"}
{:description "SSH password if available"
:parameter "password.ssh"}
{:description "SSH URL to connect to virtual machine"
:parameter "url.ssh"}
{:description "'true' when current state is completed"
:parameter "complete"}
{:description "Optional service URL for virtual machine"
:parameter "url.service"})
:targets {:deployment '("deployment comp-a"
"deployment comp-b")
:onVmAdd '("onVmAdd comp-a"
"onVmAdd comp-b")
:onVmRemove '("onVmRemove comp-a"
"onVmRemove comp-b")
:packages '("yum-utils"
"apache"
"emacs")
:postinstall '("postinstall comp-a"
"postinstall comp-b")}
:updated "2018-10-03T13:19:47.310Z"}
:created "2018-07-25T10:08:49.035Z"
:description "Apache web server ++"
:id "module/comp-b"
:name "apache++"
:parentPath "examples/tutorials/service-testing"
:path "examples/tutorials/service-testing/apache++"
:resourceURI ""
:type "COMPONENT"
:updated "2018-10-03T13:19:47.347Z"}})))
(deftest test-resolve-template-from-component-with-heritage-and-image-with-parent
(is (= (utils/resolve-deployment-template {:module comp-c})
{:module
{:content {:author "super"
:commit "update ifb-core-cloud flavor"
:created "2018-10-03T13:19:47.310Z"
:imageIDs {:cloud-a 10
:cloud-b 2
:cloud-c 3
:cloud-d 4}
:inputParameters '({:description "Cloud credential ID for managing node deployment"
:parameter "credential.id"})
:loginUser "ubuntu"
:networkType "public"
:os "Ubuntu"
:outputParameters
'({:description "Server ready to recieve connections"
:parameter "ready"}
{:description "SSH keypair name used"
:parameter "keypair.name"}
{:description "Cloud instance id"
:parameter "instanceid"}
{:description "Custom state"
:parameter "statecustom"}
{:description "hostname/ip of the image"
:parameter "hostname"}
{:description "Port"
:parameter "port"
:value "80"}
{:description "Machine abort flag, set when aborting"
:parameter "abort"}
{:description "SSH password if available"
:parameter "password.ssh"}
{:description "SSH URL to connect to virtual machine"
:parameter "url.ssh"}
{:description "'true' when current state is completed"
:parameter "complete"}
{:description "Optional service URL for virtual machine"
:parameter "url.service"})
:targets {:deployment '("deployment comp-a"
"deployment comp-b"
"deployment comp-c")
:onVmAdd '("onVmAdd comp-a"
"onVmAdd comp-b")
:onVmRemove '("onVmRemove comp-a"
"onVmRemove comp-b")
:packages '("yum-utils"
"apache"
"emacs")
:postinstall '("postinstall comp-a"
"postinstall comp-b")}
:updated "2018-10-03T13:19:47.310Z"}
:created "2018-07-25T10:08:49.035Z"
:description "Apache web server +++"
:id "module/comp-b"
:name "apache++"
:parentPath "examples/tutorials/service-testing"
:path "examples/tutorials/service-testing/apache+++"
:resourceURI ""
:type "COMPONENT"
:updated "2018-10-03T13:19:47.347Z"}})))
| null | https://raw.githubusercontent.com/slipstream/SlipStreamServer/3ee5c516877699746c61c48fc72779fe3d4e4652/cimi-resources/test/com/sixsq/slipstream/ssclj/resources/deployment/test_utils.clj | clojure | :logo {:href "logolink"}, | (ns com.sixsq.slipstream.ssclj.resources.deployment.test-utils
(:require [clojure.test :refer [deftest is]]
[com.sixsq.slipstream.ssclj.resources.deployment.utils :as utils]))
(def image-a {:description "descr image-a"
:path "root/image-a"
:name "image-a"
:type "IMAGE"
:logoURL ""
:content {:loginUser "root"
:created "2018-07-25T10:07:43.309Z"
:updated "2018-07-25T10:07:43.309Z"
:imageIDs {:cloud-a 1
:cloud-b 2
:cloud-c 3}
:author "test"
:networkType "public"
:commit "commit image a"
:os "Ubuntu"}
:id "module/image-a",
:resourceURI ""})
(def image-b {:description "descr image-b"
:path "root/image-b"
:name "image-b"
:type "IMAGE"
:logoURL ""
:content {:parentModule image-a
:loginUser "ubuntu"
:created "2018-07-25T10:07:43.309Z"
:updated "2018-07-25T10:07:43.309Z"
:imageIDs {:cloud-a 10
:cloud-b 2
:cloud-d 4}
:author "test"
:networkType "public"
:commit "commit image b"
:os "Ubuntu"}
:id "module/image-b",
:resourceURI ""})
(def comp-a {:description "Apache web server appliance with custom landing page.",
:path "examples/tutorials/service-testing/apache",
:content {:parentModule image-a
:updated "2018-10-03T13:19:47.310Z",
:outputParameters [{:parameter "hostname", :description "hostname/ip of the image"}
{:parameter "port", :description "Port", :value "8080"}
{:parameter "instanceid", :description "Cloud instance id"}
{:parameter "ready", :description "Server ready to recieve connections"}],
:created "2018-10-03T13:19:47.310Z",
:targets {:packages ["yum-utils" "apache"]
:postinstall "postinstall comp-a",
:onVmRemove "onVmRemove comp-a",
:onVmAdd "onVmAdd comp-a",
:deployment "deployment comp-a"
},
:author "super",
:networkType "public",
:commit "update ifb-core-cloud flavor"},
:updated "2018-10-03T13:19:47.347Z",
:name "apache",
:type "COMPONENT",
:created "2018-07-25T10:08:49.035Z",
:id "module/comp-a",
:parentPath "examples/tutorials/service-testing",
:resourceURI ""})
(def comp-b {:description "Apache web server ++",
:path "examples/tutorials/service-testing/apache++",
:content {:parentModule comp-a
:updated "2018-10-03T13:19:47.310Z",
:outputParameters [{:parameter "hostname", :description "hostname/ip of the image"}
{:parameter "port", :value "80"}
{:parameter "instanceid", :description "Cloud instance id"}
{:parameter "ready", :description "Server ready to recieve connections"}],
:created "2018-10-03T13:19:47.310Z",
:targets {:postinstall "postinstall comp-b",
:onVmRemove "onVmRemove comp-b",
:packages ["emacs"]
:onVmAdd "onVmAdd comp-b",
:deployment "deployment comp-b"
},
:author "super",
:networkType "public",
:commit "update ifb-core-cloud flavor"},
:updated "2018-10-03T13:19:47.347Z",
:name "apache++",
:type "COMPONENT",
:created "2018-07-25T10:08:49.035Z",
:id "module/comp-b",
:parentPath "examples/tutorials/service-testing",
:resourceURI ""})
(def comp-c {:description "Apache web server +++",
:path "examples/tutorials/service-testing/apache+++",
:content {:parentModule (assoc-in comp-b [:content :parentModule :content :parentModule] image-b)
:updated "2018-10-03T13:19:47.310Z",
:outputParameters [{:parameter "hostname", :description "hostname/ip of the image"}
{:parameter "port", :value "80"}
{:parameter "instanceid", :description "Cloud instance id"}
{:parameter "ready", :description "Server ready to recieve connections"}],
:created "2018-10-03T13:19:47.310Z",
:targets {:deployment "deployment comp-c"},
:author "super",
:networkType "public",
:commit "update ifb-core-cloud flavor"},
:updated "2018-10-03T13:19:47.347Z",
:name "apache++",
:type "COMPONENT",
:created "2018-07-25T10:08:49.035Z",
:id "module/comp-b",
:parentPath "examples/tutorials/service-testing",
:resourceURI ""})
(def app-a {:description "Deployment",
:path "examples/tutorials/service-testing/system",
:content {:updated "2018-07-25T10:09:18.955Z",
:created "2018-07-25T10:09:18.955Z",
:author "super",
:nodes [{:node "comp-c",
:multiplicity 1,
:component comp-c}
{:node "image-b", :multiplicity 1,
:component image-b,
:parameterMappings [{:parameter "image-b.port",
:value "comp-c:port", :mapped true}
{:parameter "image-b.ready",
:value "comp-c:ready", :mapped true}
{:parameter "image-b.hostname",
:value "comp-c:hostname", :mapped true}]}]
:commit "update image ids"},
:updated "2018-07-25T10:09:18.972Z",
:name "system",
:type "APPLICATION",
:created "2018-07-25T10:09:18.583Z",
:id "module/app",
:parentPath "examples/tutorials/service-testing",
:resourceURI ""})
(deftest test-resolve-template-from-simple-image
(is
(=
(utils/resolve-deployment-template {:module image-a})
{:module
{:content
{:author "test"
:commit "commit image a"
:created "2018-07-25T10:07:43.309Z"
:imageIDs {:cloud-a 1
:cloud-b 2
:cloud-c 3}
:inputParameters
'({:description "Cloud credential ID for managing node deployment"
:parameter "credential.id"})
:loginUser "root"
:networkType "public"
:os "Ubuntu"
:outputParameters '({:description "SSH keypair name used"
:parameter "keypair.name"}
{:description "Cloud instance id"
:parameter "instanceid"}
{:description "Custom state"
:parameter "statecustom"}
{:description "Hostname or IP address of the image"
:parameter "hostname"}
{:description "Machine abort flag, set when aborting"
:parameter "abort"}
{:description "SSH password if available"
:parameter "password.ssh"}
{:description "SSH URL to connect to virtual machine"
:parameter "url.ssh"}
{:description "'true' when current state is completed"
:parameter "complete"}
{:description "Optional service URL for virtual machine"
:parameter "url.service"})
:targets {}
:updated "2018-07-25T10:07:43.309Z"}
:description "descr image-a"
:logoURL ""
:name "image-a"
:path "root/image-a"
:id "module/image-a"
:resourceURI ""
:type "IMAGE"}})))
(deftest test-resolve-template-from-image-with-parent
(is (= (utils/resolve-deployment-template {:module image-b})
{:module
{:content {:author "test"
:commit "commit image b"
:created "2018-07-25T10:07:43.309Z"
:imageIDs {:cloud-a 10
:cloud-b 2
:cloud-c 3
:cloud-d 4}
:inputParameters '({:description "Cloud credential ID for managing node deployment"
:parameter "credential.id"})
:loginUser "ubuntu"
:networkType "public"
:os "Ubuntu"
:outputParameters '({:description "SSH keypair name used"
:parameter "keypair.name"}
{:description "Cloud instance id"
:parameter "instanceid"}
{:description "Custom state"
:parameter "statecustom"}
{:description "Hostname or IP address of the image"
:parameter "hostname"}
{:description "Machine abort flag, set when aborting"
:parameter "abort"}
{:description "SSH password if available"
:parameter "password.ssh"}
{:description "SSH URL to connect to virtual machine"
:parameter "url.ssh"}
{:description "'true' when current state is completed"
:parameter "complete"}
{:description "Optional service URL for virtual machine"
:parameter "url.service"})
:targets {}
:updated "2018-07-25T10:07:43.309Z"}
:description "descr image-b"
:logoURL ""
:name "image-b"
:path "root/image-b"
:resourceURI ""
:id "module/image-b"
:type "IMAGE"}})))
(deftest test-resolve-template-from-component
(is (= (utils/resolve-deployment-template {:module comp-a})
{:module
{:content {:author "super"
:commit "update ifb-core-cloud flavor"
:created "2018-10-03T13:19:47.310Z"
:imageIDs {:cloud-a 1
:cloud-b 2
:cloud-c 3}
:inputParameters '({:description "Cloud credential ID for managing node deployment"
:parameter "credential.id"})
:loginUser "root"
:networkType "public"
:os "Ubuntu"
:outputParameters '({:description "Server ready to recieve connections"
:parameter "ready"}
{:description "SSH keypair name used"
:parameter "keypair.name"}
{:description "Cloud instance id"
:parameter "instanceid"}
{:description "Custom state"
:parameter "statecustom"}
{:description "hostname/ip of the image"
:parameter "hostname"}
{:description "Port"
:parameter "port"
:value "8080"}
{:description "Machine abort flag, set when aborting"
:parameter "abort"}
{:description "SSH password if available"
:parameter "password.ssh"}
{:description "SSH URL to connect to virtual machine"
:parameter "url.ssh"}
{:description "'true' when current state is completed"
:parameter "complete"}
{:description "Optional service URL for virtual machine"
:parameter "url.service"})
:targets {:deployment '("deployment comp-a")
:onVmAdd '("onVmAdd comp-a")
:onVmRemove '("onVmRemove comp-a")
:packages '("yum-utils"
"apache")
:postinstall '("postinstall comp-a")}
:updated "2018-10-03T13:19:47.310Z"}
:created "2018-07-25T10:08:49.035Z"
:description "Apache web server appliance with custom landing page."
:id "module/comp-a"
:name "apache"
:parentPath "examples/tutorials/service-testing"
:path "examples/tutorials/service-testing/apache"
:resourceURI ""
:type "COMPONENT"
:updated "2018-10-03T13:19:47.347Z"}})))
(deftest test-resolve-template-from-component-with-heritage
(is (= (utils/resolve-deployment-template {:module comp-b})
{:module
{:content {:author "super"
:commit "update ifb-core-cloud flavor"
:created "2018-10-03T13:19:47.310Z"
:imageIDs {:cloud-a 1
:cloud-b 2
:cloud-c 3}
:inputParameters '({:description "Cloud credential ID for managing node deployment"
:parameter "credential.id"})
:loginUser "root"
:networkType "public"
:os "Ubuntu"
:outputParameters '({:description "Server ready to recieve connections"
:parameter "ready"}
{:description "SSH keypair name used"
:parameter "keypair.name"}
{:description "Cloud instance id"
:parameter "instanceid"}
{:description "Custom state"
:parameter "statecustom"}
{:description "hostname/ip of the image"
:parameter "hostname"}
{:description "Port"
:parameter "port"
:value "80"}
{:description "Machine abort flag, set when aborting"
:parameter "abort"}
{:description "SSH password if available"
:parameter "password.ssh"}
{:description "SSH URL to connect to virtual machine"
:parameter "url.ssh"}
{:description "'true' when current state is completed"
:parameter "complete"}
{:description "Optional service URL for virtual machine"
:parameter "url.service"})
:targets {:deployment '("deployment comp-a"
"deployment comp-b")
:onVmAdd '("onVmAdd comp-a"
"onVmAdd comp-b")
:onVmRemove '("onVmRemove comp-a"
"onVmRemove comp-b")
:packages '("yum-utils"
"apache"
"emacs")
:postinstall '("postinstall comp-a"
"postinstall comp-b")}
:updated "2018-10-03T13:19:47.310Z"}
:created "2018-07-25T10:08:49.035Z"
:description "Apache web server ++"
:id "module/comp-b"
:name "apache++"
:parentPath "examples/tutorials/service-testing"
:path "examples/tutorials/service-testing/apache++"
:resourceURI ""
:type "COMPONENT"
:updated "2018-10-03T13:19:47.347Z"}})))
(deftest test-resolve-template-from-component-with-heritage-and-image-with-parent
(is (= (utils/resolve-deployment-template {:module comp-c})
{:module
{:content {:author "super"
:commit "update ifb-core-cloud flavor"
:created "2018-10-03T13:19:47.310Z"
:imageIDs {:cloud-a 10
:cloud-b 2
:cloud-c 3
:cloud-d 4}
:inputParameters '({:description "Cloud credential ID for managing node deployment"
:parameter "credential.id"})
:loginUser "ubuntu"
:networkType "public"
:os "Ubuntu"
:outputParameters
'({:description "Server ready to recieve connections"
:parameter "ready"}
{:description "SSH keypair name used"
:parameter "keypair.name"}
{:description "Cloud instance id"
:parameter "instanceid"}
{:description "Custom state"
:parameter "statecustom"}
{:description "hostname/ip of the image"
:parameter "hostname"}
{:description "Port"
:parameter "port"
:value "80"}
{:description "Machine abort flag, set when aborting"
:parameter "abort"}
{:description "SSH password if available"
:parameter "password.ssh"}
{:description "SSH URL to connect to virtual machine"
:parameter "url.ssh"}
{:description "'true' when current state is completed"
:parameter "complete"}
{:description "Optional service URL for virtual machine"
:parameter "url.service"})
:targets {:deployment '("deployment comp-a"
"deployment comp-b"
"deployment comp-c")
:onVmAdd '("onVmAdd comp-a"
"onVmAdd comp-b")
:onVmRemove '("onVmRemove comp-a"
"onVmRemove comp-b")
:packages '("yum-utils"
"apache"
"emacs")
:postinstall '("postinstall comp-a"
"postinstall comp-b")}
:updated "2018-10-03T13:19:47.310Z"}
:created "2018-07-25T10:08:49.035Z"
:description "Apache web server +++"
:id "module/comp-b"
:name "apache++"
:parentPath "examples/tutorials/service-testing"
:path "examples/tutorials/service-testing/apache+++"
:resourceURI ""
:type "COMPONENT"
:updated "2018-10-03T13:19:47.347Z"}})))
|
312aadaf93db477c08a2e47e7eca6590570afc8bbed868ded3c4d6b91f3d035f | artyom-poptsov/guile-dsv | rfc4180.scm | rfc4180.scm -- DSV parser for RFC 4180 format .
Copyright ( C ) 2015 , 2016 , 2020 < >
;;
;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;; (at your option) any later version.
;;
;; The program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;;
You should have received a copy of the GNU General Public License
;; along with the program. If not, see </>.
;;; Commentary:
A parser of RFC 4180 < > data format
;; (Comma-Separated Values, CSV).
;;; Code:
(define-module (dsv rfc4180)
#:use-module (ice-9 regex)
#:use-module (srfi srfi-1)
#:use-module (srfi srfi-26)
#:use-module ((string transform)
#:select (escape-special-chars))
#:use-module (ice-9 rdelim)
#:use-module (scheme documentation)
#:use-module (dsv common)
#:use-module (dsv parser)
#:use-module (dsv builder)
#:export (make-parser
make-string-parser
make-builder
scm->dsv
scm->dsv-string
dsv->scm
dsv-string->scm
guess-delimiter
;; Variables
%default-delimiter))
;;; Global variables
(define-with-docs %default-line-break
"Default line break style as described in the RFC."
"\r\n")
(define-with-docs %default-delimiter
"Default field delimiter."
#\,)
;;; Helper procedures
(define-syntax case-pred
(syntax-rules (else)
((_ pred key ((datum ...) exp) ...)
(cond
((or (pred key datum) ...) exp) ...))
((_ pred key ((datum ...) exp) ... (else else-exp ...))
(cond
((or (pred key datum) ...) exp) ...
(else else-exp ...)))))
;;; Writing
(define (make-builder scm port delimiter line-break)
(%make-builder scm
port
'rfc4180
(value-or-default delimiter %default-delimiter)
(value-or-default line-break %default-line-break)))
(define (escape-double-quotes field)
"Escape each double-quote in a FIELD with additional double-quote."
(escape-special-chars field #\" #\"))
(define (quote-field field)
"Quote a FIELD with double-quotes."
(string-append (string #\") field (string #\")))
(define* (scm->dsv builder)
"Create a DSV document from a data using a BUILDER."
(define (should-be-enclosed? field)
"Check if a FIELD should be enclosed in double-quotes."
(or (string-index field (char-set (builder-delimiter builder)
#\" #\newline))
(string-contains field (builder-line-break builder))))
(builder-build builder
(lambda (field)
(let ((escaped-field (escape-double-quotes field)))
(if (should-be-enclosed? escaped-field)
(quote-field escaped-field)
field)))))
(define (scm->dsv-string scm delimiter line-break)
(call-with-output-string
(lambda (port)
(scm->dsv (make-builder scm port delimiter line-break)))))
;;; Reading
(define (make-parser port delimiter known-delimiters comment-prefix)
(%make-parser port
'rfc4180
(value-or-default delimiter %default-delimiter)
(value-or-default known-delimiters %known-delimiters)
comment-prefix))
(define (make-string-parser str delimiter known-delimiters comment-prefix)
(call-with-input-string str (cut make-parser <> delimiter known-delimiters
comment-prefix)))
;; (define (test)
( display ( frame - procedure - name ( stack - ref ( make - stack # t ) 1 ) ) ) )
;;;; The parser itself.
XXX : The procedure does not handle comments . Although the RFC 4180 says
;; nothing about comments inside CSV data, it might be useful to handle
;; comments in some way if it is explicitly requested by the user.
(define (dsv->scm parser)
(define (fsm-read-quote-crlf table row buffer)
(define %current-state 'read-quote-crlf)
(let ((char (parser-read-char parser)))
(cond
((linefeed? char)
(fsm-read (append table (list (append row (list (buffer->string buffer)))))
'() ; row
'())) ; buffer
(else
(dsv-error %current-state
"Missing line feed after carriage return"
`((state . ,%current-state)
(table . ,table)
(row . ,row)
(buffer . ,buffer)
(char . ,char)))))))
(define (fsm-read-quote table row buffer)
(define %current-state 'read-quote)
(let ((char (parser-read-char parser)))
(cond
((double-quote? char)
(debug-fsm-transition %current-state 'read-quoted-field)
(fsm-read-quoted-field table row (cons char buffer)))
((delimiter? parser char)
(debug-fsm-transition %current-state 'read)
(fsm-read table
(append row (list (buffer->string buffer)))
'()))
((carriage-return? char)
(fsm-read-quote-crlf table row buffer))
((linefeed? char)
(debug-fsm-transition %current-state 'read)
(fsm-read (append table (list (append row (if (null? buffer)
(list "")
(list (buffer->string buffer))))))
'() ; row
'())) ; buffer
((eof-object? char)
(debug-fsm-transition %current-state 'end 'final)
(append table (list (append row (if (null? buffer)
(list "")
(list (buffer->string buffer)))))))
(else
(dsv-error %current-state
"A field contains unescaped double-quotes"
`((state . ,%current-state)
(table . ,table)
(row . ,row)
(buffer . ,buffer)
(char . ,char)))))))
(define (fsm-read-quoted-field table row buffer)
(define %current-state 'read-quoted-field)
(let ((char (parser-read-char parser)))
(cond
((eof-object? char)
(dsv-error 'fsm-read-quoted-field
"Missing quote at the end of a quoted field"
`((state . ,%current-state)
(table . ,table)
(row . ,row)
(buffer . ,buffer)
(char . ,char))))
((double-quote? char)
(debug-fsm-transition %current-state 'read-quote)
(fsm-read-quote table row buffer))
(else
(fsm-read-quoted-field table row (cons char buffer))))))
(define (fsm-read-field-crlf table row buffer)
(define %current-state 'read-field-crlf)
(let ((char (parser-read-char parser)))
(cond
((linefeed? char)
(fsm-read (append table (list (append row (list (buffer->string buffer)))))
'() ; row
'())) ; buffer
(else
(dsv-error %current-state
"Missing line feed after carriage return"
`((state . ,%current-state)
(table . ,table)
(row . ,row)
(buffer . ,buffer)
(char . ,char)))))))
(define (fsm-read-field table row buffer)
(define %current-state 'read-field)
(let ((char (parser-read-char parser)))
(cond
((or (eof-object? char) (delimiter? parser char))
(debug-fsm-transition %current-state 'read)
(fsm-read table
(append row (list (buffer->string buffer))) ; row
'())) ; buffer
((carriage-return? char)
(fsm-read-field-crlf table row buffer))
((linefeed? char)
(debug-fsm-transition %current-state 'read)
(fsm-read (append table (list (append row (list (buffer->string buffer)))))
'() ; row
'())) ; buffer
((double-quote? char)
(dsv-error %current-state "A double quote inside an unquoted field"
`((table . ,table)
(row . ,row)
(buffer . ,buffer)
(char . ,char))))
(else
(fsm-read-field table row (cons char buffer))))))
(define (fsm-read table row buffer)
(define %current-state 'read)
(let ((char (parser-read-char parser)))
(cond
((eof-object? char)
(debug-fsm-transition %current-state 'end 'final)
(if (null? row)
table
(append table (list row))))
((carriage-return? char)
(fsm-read table row buffer))
((double-quote? char)
(debug-fsm-transition %current-state 'read-quoted-field)
(fsm-read-quoted-field table row buffer))
((delimiter? parser char)
(fsm-read table (append row (list "")) '()))
((linefeed? char)
(fsm-read (append table (list row))
'() ; row
'())) ; buffer
(else
(debug-fsm-transition %current-state 'read-field)
(fsm-read-field table row (cons char buffer))))))
(fsm-read '() '() '()))
(define guess-delimiter (make-delimiter-guesser dsv->scm))
;;; rfc4180.scm ends here
| null | https://raw.githubusercontent.com/artyom-poptsov/guile-dsv/ece5701906a900a42ab184b03c8c9e9110b7caa3/modules/dsv/rfc4180.scm | scheme |
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
The program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with the program. If not, see </>.
Commentary:
(Comma-Separated Values, CSV).
Code:
Variables
Global variables
Helper procedures
Writing
Reading
(define (test)
The parser itself.
nothing about comments inside CSV data, it might be useful to handle
comments in some way if it is explicitly requested by the user.
row
buffer
row
buffer
row
buffer
row
buffer
row
buffer
row
buffer
rfc4180.scm ends here | rfc4180.scm -- DSV parser for RFC 4180 format .
Copyright ( C ) 2015 , 2016 , 2020 < >
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
A parser of RFC 4180 < > data format
(define-module (dsv rfc4180)
#:use-module (ice-9 regex)
#:use-module (srfi srfi-1)
#:use-module (srfi srfi-26)
#:use-module ((string transform)
#:select (escape-special-chars))
#:use-module (ice-9 rdelim)
#:use-module (scheme documentation)
#:use-module (dsv common)
#:use-module (dsv parser)
#:use-module (dsv builder)
#:export (make-parser
make-string-parser
make-builder
scm->dsv
scm->dsv-string
dsv->scm
dsv-string->scm
guess-delimiter
%default-delimiter))
(define-with-docs %default-line-break
"Default line break style as described in the RFC."
"\r\n")
(define-with-docs %default-delimiter
"Default field delimiter."
#\,)
(define-syntax case-pred
(syntax-rules (else)
((_ pred key ((datum ...) exp) ...)
(cond
((or (pred key datum) ...) exp) ...))
((_ pred key ((datum ...) exp) ... (else else-exp ...))
(cond
((or (pred key datum) ...) exp) ...
(else else-exp ...)))))
(define (make-builder scm port delimiter line-break)
(%make-builder scm
port
'rfc4180
(value-or-default delimiter %default-delimiter)
(value-or-default line-break %default-line-break)))
(define (escape-double-quotes field)
"Escape each double-quote in a FIELD with additional double-quote."
(escape-special-chars field #\" #\"))
(define (quote-field field)
"Quote a FIELD with double-quotes."
(string-append (string #\") field (string #\")))
(define* (scm->dsv builder)
"Create a DSV document from a data using a BUILDER."
(define (should-be-enclosed? field)
"Check if a FIELD should be enclosed in double-quotes."
(or (string-index field (char-set (builder-delimiter builder)
#\" #\newline))
(string-contains field (builder-line-break builder))))
(builder-build builder
(lambda (field)
(let ((escaped-field (escape-double-quotes field)))
(if (should-be-enclosed? escaped-field)
(quote-field escaped-field)
field)))))
(define (scm->dsv-string scm delimiter line-break)
(call-with-output-string
(lambda (port)
(scm->dsv (make-builder scm port delimiter line-break)))))
(define (make-parser port delimiter known-delimiters comment-prefix)
(%make-parser port
'rfc4180
(value-or-default delimiter %default-delimiter)
(value-or-default known-delimiters %known-delimiters)
comment-prefix))
(define (make-string-parser str delimiter known-delimiters comment-prefix)
(call-with-input-string str (cut make-parser <> delimiter known-delimiters
comment-prefix)))
( display ( frame - procedure - name ( stack - ref ( make - stack # t ) 1 ) ) ) )
XXX : The procedure does not handle comments . Although the RFC 4180 says
(define (dsv->scm parser)
(define (fsm-read-quote-crlf table row buffer)
(define %current-state 'read-quote-crlf)
(let ((char (parser-read-char parser)))
(cond
((linefeed? char)
(fsm-read (append table (list (append row (list (buffer->string buffer)))))
(else
(dsv-error %current-state
"Missing line feed after carriage return"
`((state . ,%current-state)
(table . ,table)
(row . ,row)
(buffer . ,buffer)
(char . ,char)))))))
(define (fsm-read-quote table row buffer)
(define %current-state 'read-quote)
(let ((char (parser-read-char parser)))
(cond
((double-quote? char)
(debug-fsm-transition %current-state 'read-quoted-field)
(fsm-read-quoted-field table row (cons char buffer)))
((delimiter? parser char)
(debug-fsm-transition %current-state 'read)
(fsm-read table
(append row (list (buffer->string buffer)))
'()))
((carriage-return? char)
(fsm-read-quote-crlf table row buffer))
((linefeed? char)
(debug-fsm-transition %current-state 'read)
(fsm-read (append table (list (append row (if (null? buffer)
(list "")
(list (buffer->string buffer))))))
((eof-object? char)
(debug-fsm-transition %current-state 'end 'final)
(append table (list (append row (if (null? buffer)
(list "")
(list (buffer->string buffer)))))))
(else
(dsv-error %current-state
"A field contains unescaped double-quotes"
`((state . ,%current-state)
(table . ,table)
(row . ,row)
(buffer . ,buffer)
(char . ,char)))))))
(define (fsm-read-quoted-field table row buffer)
(define %current-state 'read-quoted-field)
(let ((char (parser-read-char parser)))
(cond
((eof-object? char)
(dsv-error 'fsm-read-quoted-field
"Missing quote at the end of a quoted field"
`((state . ,%current-state)
(table . ,table)
(row . ,row)
(buffer . ,buffer)
(char . ,char))))
((double-quote? char)
(debug-fsm-transition %current-state 'read-quote)
(fsm-read-quote table row buffer))
(else
(fsm-read-quoted-field table row (cons char buffer))))))
(define (fsm-read-field-crlf table row buffer)
(define %current-state 'read-field-crlf)
(let ((char (parser-read-char parser)))
(cond
((linefeed? char)
(fsm-read (append table (list (append row (list (buffer->string buffer)))))
(else
(dsv-error %current-state
"Missing line feed after carriage return"
`((state . ,%current-state)
(table . ,table)
(row . ,row)
(buffer . ,buffer)
(char . ,char)))))))
(define (fsm-read-field table row buffer)
(define %current-state 'read-field)
(let ((char (parser-read-char parser)))
(cond
((or (eof-object? char) (delimiter? parser char))
(debug-fsm-transition %current-state 'read)
(fsm-read table
((carriage-return? char)
(fsm-read-field-crlf table row buffer))
((linefeed? char)
(debug-fsm-transition %current-state 'read)
(fsm-read (append table (list (append row (list (buffer->string buffer)))))
((double-quote? char)
(dsv-error %current-state "A double quote inside an unquoted field"
`((table . ,table)
(row . ,row)
(buffer . ,buffer)
(char . ,char))))
(else
(fsm-read-field table row (cons char buffer))))))
(define (fsm-read table row buffer)
(define %current-state 'read)
(let ((char (parser-read-char parser)))
(cond
((eof-object? char)
(debug-fsm-transition %current-state 'end 'final)
(if (null? row)
table
(append table (list row))))
((carriage-return? char)
(fsm-read table row buffer))
((double-quote? char)
(debug-fsm-transition %current-state 'read-quoted-field)
(fsm-read-quoted-field table row buffer))
((delimiter? parser char)
(fsm-read table (append row (list "")) '()))
((linefeed? char)
(fsm-read (append table (list row))
(else
(debug-fsm-transition %current-state 'read-field)
(fsm-read-field table row (cons char buffer))))))
(fsm-read '() '() '()))
(define guess-delimiter (make-delimiter-guesser dsv->scm))
|
25295760d563ad02a96790bedde728866ac24532113c8c4c8bc8a800b54a12b7 | Perry961002/SICP | exe2.47-frame.scm | 框架的一个可能构造函数
(define (make-frame origin edge1 edge2)
(list origin edge1 edge2))
(define (origin-frame f)
(car f))
(define (edge1-frame f)
(cadr f))
(define (edge2-frame f)
(caddr f))
框架的另一个构造函数
(define (make-frame origin edge1 edge2)
(cons origin (cons edge1 edge2)))
相应的构造函数
(define (origin-frame f)
(car f))
(define (edge1-frame f)
(cadr f))
(define (edge2-frame f)
(cddr f)) | null | https://raw.githubusercontent.com/Perry961002/SICP/89d539e600a73bec42d350592f0ac626e041bf16/Chap2/exercise/exe2.47-frame.scm | scheme | 框架的一个可能构造函数
(define (make-frame origin edge1 edge2)
(list origin edge1 edge2))
(define (origin-frame f)
(car f))
(define (edge1-frame f)
(cadr f))
(define (edge2-frame f)
(caddr f))
框架的另一个构造函数
(define (make-frame origin edge1 edge2)
(cons origin (cons edge1 edge2)))
相应的构造函数
(define (origin-frame f)
(car f))
(define (edge1-frame f)
(cadr f))
(define (edge2-frame f)
(cddr f)) | |
f4faab2aec6e14200903b39f3b8dd4bfca17a8b3f45d31ee63c7b9436b88c834 | sionescu/iolib | ex3-server.lisp | (in-package :iolib.examples)
This file was originally written by ( )
and this code is released under the same license as IOLib .
;;;; This is a more common-lisp-like style of ex2-server, and to be
;;;; used for the rest of the examples as appropriate. We introduce
;;;; with-open-socket, which does a lot of cleanup on the created
;;;; socket and ensures it is closed. This is usually the recommended
;;;; idiom for simple clients.
;;;; Also in this example we start to handle some of the more common
conditions which can be signaled by IOLib .
;; ex-0b
(defun run-ex3-server-helper (port)
(with-open-socket
(server :connect :passive
:address-family :internet
:type :stream
:ipv6 nil
:external-format '(:utf-8 :eol-style :crlf))
(format t "Created socket: ~A[fd=~A]~%" server (socket-os-fd server))
;; Bind the socket to all interfaces with specified port.
(bind-address server +ipv4-unspecified+ :port port :reuse-addr t)
(format t "Bound socket: ~A~%" server)
;; start listening on the server socket
(listen-on server :backlog 5)
(format t "Listening on socket bound to: ~A:~A~%"
(local-host server)
(local-port server))
;; ex-0e
;; ex-1b
;; keep accepting connections forever.
(loop
(format t "Waiting to accept a connection...~%")
;; Here we see with-accept-connection which simplifies closing
;; the client socket when are done with it.
(with-accept-connection (client server :wait t)
;; When we get a new connection, show who it
;; is from.
(multiple-value-bind (who rport)
(remote-name client)
(format t "Got a connnection from ~A:~A!~%" who rport))
;; Since we're using an internet TCP stream, we can use format
;; with it. However, we should be sure to finish-output in
;; order that all the data is sent.
(multiple-value-bind (s m h d mon y)
(get-decoded-time)
(format t "Sending the time...")
;; Catch the condition of the client closing the connection.
;; Since we exist inside a with-accept-connection, the
;; socket will be automatically closed.
(handler-case
(progn
(format client "~A/~A/~A ~A:~A:~A~%" mon d y h m s)
(finish-output client))
(socket-connection-reset-error ()
(format t "Client reset connection!~%"))
(hangup ()
(format t "Client closed conection!~%")))
(format t "Sent!~%"))))
;; ex-1e
;; ex-2b
t))
;; ex-2e
;; ex-3b
This is the main entry point into the example 3 server .
(defun run-ex3-server (&key (port *port*))
(handler-case
(run-ex3-server-helper port)
(socket-address-in-use-error ()
;; Here we catch a condition which represents trying to bind to
the same port before the first one has been released by the
;; kernel. Generally this means you forgot to put ':reuse-addr
;; t' as an argument to bind address.
(format t "Bind: Address already in use, forget :reuse-addr t?")))
(finish-output))
;; ex-3e | null | https://raw.githubusercontent.com/sionescu/iolib/dac715c81db55704db623d8b2cfc399ebcf6175f/examples/ex3-server.lisp | lisp | This is a more common-lisp-like style of ex2-server, and to be
used for the rest of the examples as appropriate. We introduce
with-open-socket, which does a lot of cleanup on the created
socket and ensures it is closed. This is usually the recommended
idiom for simple clients.
Also in this example we start to handle some of the more common
ex-0b
Bind the socket to all interfaces with specified port.
start listening on the server socket
ex-0e
ex-1b
keep accepting connections forever.
Here we see with-accept-connection which simplifies closing
the client socket when are done with it.
When we get a new connection, show who it
is from.
Since we're using an internet TCP stream, we can use format
with it. However, we should be sure to finish-output in
order that all the data is sent.
Catch the condition of the client closing the connection.
Since we exist inside a with-accept-connection, the
socket will be automatically closed.
ex-1e
ex-2b
ex-2e
ex-3b
Here we catch a condition which represents trying to bind to
kernel. Generally this means you forgot to put ':reuse-addr
t' as an argument to bind address.
ex-3e | (in-package :iolib.examples)
This file was originally written by ( )
and this code is released under the same license as IOLib .
conditions which can be signaled by IOLib .
(defun run-ex3-server-helper (port)
(with-open-socket
(server :connect :passive
:address-family :internet
:type :stream
:ipv6 nil
:external-format '(:utf-8 :eol-style :crlf))
(format t "Created socket: ~A[fd=~A]~%" server (socket-os-fd server))
(bind-address server +ipv4-unspecified+ :port port :reuse-addr t)
(format t "Bound socket: ~A~%" server)
(listen-on server :backlog 5)
(format t "Listening on socket bound to: ~A:~A~%"
(local-host server)
(local-port server))
(loop
(format t "Waiting to accept a connection...~%")
(with-accept-connection (client server :wait t)
(multiple-value-bind (who rport)
(remote-name client)
(format t "Got a connnection from ~A:~A!~%" who rport))
(multiple-value-bind (s m h d mon y)
(get-decoded-time)
(format t "Sending the time...")
(handler-case
(progn
(format client "~A/~A/~A ~A:~A:~A~%" mon d y h m s)
(finish-output client))
(socket-connection-reset-error ()
(format t "Client reset connection!~%"))
(hangup ()
(format t "Client closed conection!~%")))
(format t "Sent!~%"))))
t))
This is the main entry point into the example 3 server .
(defun run-ex3-server (&key (port *port*))
(handler-case
(run-ex3-server-helper port)
(socket-address-in-use-error ()
the same port before the first one has been released by the
(format t "Bind: Address already in use, forget :reuse-addr t?")))
(finish-output)) |
d4df2190668e013716b75c2b6f271d99120054e68031c12f624f441f00181469 | unclebob/AdventOfCode2020 | core_spec.clj | (ns day1.core-spec
(:require [speclj.core :refer :all]
[day1.core :refer :all]))
(describe "reading input"
(it "can read a file of numbers"
(spit "t-input" "1\n2\n3\n")
(should= [1 2 3] (read-numbers "t-input"))
))
(describe "finding pairs that add to"
(it "can find a pair"
(should= [0 0] (find-pair 0 [0 0]))
(should= [0 1] (find-pair 1 [0 1 2]))
)
)
(describe "solution"
(it "is"
(should= nil (solve2))))
| null | https://raw.githubusercontent.com/unclebob/AdventOfCode2020/fc4ba9ad042cbcc48dfa5947373ab46b750d89e5/day1/spec/day1/core_spec.clj | clojure | (ns day1.core-spec
(:require [speclj.core :refer :all]
[day1.core :refer :all]))
(describe "reading input"
(it "can read a file of numbers"
(spit "t-input" "1\n2\n3\n")
(should= [1 2 3] (read-numbers "t-input"))
))
(describe "finding pairs that add to"
(it "can find a pair"
(should= [0 0] (find-pair 0 [0 0]))
(should= [0 1] (find-pair 1 [0 1 2]))
)
)
(describe "solution"
(it "is"
(should= nil (solve2))))
| |
7ac715c4bd26d8305651da29349afee66aeb5aae0269a4c5377969222de298aa | teamwalnut/graphql-ppx | result_decoder.mli | type query_config = {
schema : string option;
records : bool option;
objects : bool option;
inline : bool option;
template_tag : string option;
template_tag_location : string option;
template_tag_import : string option;
template_tag_return_type : string option;
tagged_template : bool option;
template_tag_is_function : bool option;
future_added_value : bool option;
extend : string option;
fragment_in_query : Ppx_config.fragment_in_query option;
apollo_mode : bool option;
}
val generate_config :
json_read_fn:(string -> Read_schema.Json.t) ->
map_loc:
(Source_pos.source_position * Source_pos.source_position ->
Source_pos.ast_location) ->
delimiter:string option ->
initial_query_config:query_config ->
Graphql_ast.definition list ->
(Graphql_ast.definition * Generator_utils.output_config) list
val unify_document_schema :
(Graphql_ast.definition * Generator_utils.output_config) list ->
(Result_structure.definition * Generator_utils.output_config) list
(**
Takes a list of the AST of the GraphQL definitions (operations or \
fragments) and transforms that into a list of result structures. These result \
structures form the basis to generate the reason code for:
- `query` variable
- `parse` function
- `serialize` function
- `makeVariables` function
*)
| null | https://raw.githubusercontent.com/teamwalnut/graphql-ppx/8276452ebe8d89a748b6b267afc94161650ab620/src/graphql_compiler/result_decoder.mli | ocaml | *
Takes a list of the AST of the GraphQL definitions (operations or \
fragments) and transforms that into a list of result structures. These result \
structures form the basis to generate the reason code for:
- `query` variable
- `parse` function
- `serialize` function
- `makeVariables` function
| type query_config = {
schema : string option;
records : bool option;
objects : bool option;
inline : bool option;
template_tag : string option;
template_tag_location : string option;
template_tag_import : string option;
template_tag_return_type : string option;
tagged_template : bool option;
template_tag_is_function : bool option;
future_added_value : bool option;
extend : string option;
fragment_in_query : Ppx_config.fragment_in_query option;
apollo_mode : bool option;
}
val generate_config :
json_read_fn:(string -> Read_schema.Json.t) ->
map_loc:
(Source_pos.source_position * Source_pos.source_position ->
Source_pos.ast_location) ->
delimiter:string option ->
initial_query_config:query_config ->
Graphql_ast.definition list ->
(Graphql_ast.definition * Generator_utils.output_config) list
val unify_document_schema :
(Graphql_ast.definition * Generator_utils.output_config) list ->
(Result_structure.definition * Generator_utils.output_config) list
|
bba087e0c2517fa61013f98994eb74d3245b4c5adc404d6d86c894c8676058a3 | yurug/ocaml4.04.0-copatterns | testing.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 2006 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* Testing auxilliaries. *)
open Scanf;;
let all_tests_ok = ref true;;
let finish () =
match !all_tests_ok with
| true ->
print_endline "\nAll tests succeeded."
| _ ->
print_endline "\n\n********* Test suite failed. ***********\n";;
at_exit finish;;
let test_num = ref (-1);;
let print_test_number () =
print_string " "; print_int !test_num; flush stdout;;
let next_test () =
incr test_num;
print_test_number ();;
let print_test_fail () =
all_tests_ok := false;
print_string
(Printf.sprintf "\n********* Test number %i failed ***********\n"
!test_num);;
let print_failure_test_fail () =
all_tests_ok := false;
print_string
(Printf.sprintf
"\n********* Failure Test number %i incorrectly failed ***********\n"
!test_num);;
let print_failure_test_succeed () =
all_tests_ok := false;
print_string
(Printf.sprintf
"\n********* Failure Test number %i failed to fail ***********\n"
!test_num);;
let test b =
next_test ();
if not b then print_test_fail ();;
(* Applies f to x and checks that the evaluation indeed
raises an exception that verifies the predicate [pred]. *)
let test_raises_exc_p pred f x =
next_test ();
try
ignore (f x);
print_failure_test_succeed ();
false
with
| x ->
pred x || (print_failure_test_fail (); false);;
(* Applies f to x and checks that the evaluation indeed
raises some exception. *)
let test_raises_some_exc f = test_raises_exc_p (fun _ -> true) f;;
let test_raises_this_exc exc = test_raises_exc_p (fun x -> x = exc);;
(* Applies f to x and checks that the evaluation indeed
raises exception Failure s. *)
let test_raises_this_failure s f x =
test_raises_exc_p (fun x -> x = Failure s) f x;;
(* Applies f to x and checks that the evaluation indeed
raises the exception Failure. *)
let test_raises_some_failure f x =
test_raises_exc_p (function Failure _ -> true | _ -> false) f x;;
let failure_test f x s = test_raises_this_failure s f x;;
let any_failure_test = test_raises_some_failure;;
let scan_failure_test f x =
test_raises_exc_p (function Scan_failure _ -> true | _ -> false) f x;;
| null | https://raw.githubusercontent.com/yurug/ocaml4.04.0-copatterns/b3ec6a3cc203bd2cde3b618546d29e10f1102323/testsuite/lib/testing.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Testing auxilliaries.
Applies f to x and checks that the evaluation indeed
raises an exception that verifies the predicate [pred].
Applies f to x and checks that the evaluation indeed
raises some exception.
Applies f to x and checks that the evaluation indeed
raises exception Failure s.
Applies f to x and checks that the evaluation indeed
raises the exception Failure. | , projet Cristal , INRIA Rocquencourt
Copyright 2006 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Scanf;;
let all_tests_ok = ref true;;
let finish () =
match !all_tests_ok with
| true ->
print_endline "\nAll tests succeeded."
| _ ->
print_endline "\n\n********* Test suite failed. ***********\n";;
at_exit finish;;
let test_num = ref (-1);;
let print_test_number () =
print_string " "; print_int !test_num; flush stdout;;
let next_test () =
incr test_num;
print_test_number ();;
let print_test_fail () =
all_tests_ok := false;
print_string
(Printf.sprintf "\n********* Test number %i failed ***********\n"
!test_num);;
let print_failure_test_fail () =
all_tests_ok := false;
print_string
(Printf.sprintf
"\n********* Failure Test number %i incorrectly failed ***********\n"
!test_num);;
let print_failure_test_succeed () =
all_tests_ok := false;
print_string
(Printf.sprintf
"\n********* Failure Test number %i failed to fail ***********\n"
!test_num);;
let test b =
next_test ();
if not b then print_test_fail ();;
let test_raises_exc_p pred f x =
next_test ();
try
ignore (f x);
print_failure_test_succeed ();
false
with
| x ->
pred x || (print_failure_test_fail (); false);;
let test_raises_some_exc f = test_raises_exc_p (fun _ -> true) f;;
let test_raises_this_exc exc = test_raises_exc_p (fun x -> x = exc);;
let test_raises_this_failure s f x =
test_raises_exc_p (fun x -> x = Failure s) f x;;
let test_raises_some_failure f x =
test_raises_exc_p (function Failure _ -> true | _ -> false) f x;;
let failure_test f x s = test_raises_this_failure s f x;;
let any_failure_test = test_raises_some_failure;;
let scan_failure_test f x =
test_raises_exc_p (function Scan_failure _ -> true | _ -> false) f x;;
|
a6b1ce15729c171528f4b7ead65123411416c0a2e74293d95c09c275d3a50108 | kennknowles/aspcc | AstRun.mli | (** ASP/VbScript runtimes and customization/module loader *)
open VbTypes
type runtime = (AspAst.statement, AspAst.rvalue) Runtime.t
val create_runtime : unit -> runtime
* { 6 Execution entry points }
(** Runs a whole page, or list of statements *)
val page : runtime -> AspAst.page -> unit
(** Runs a single statement *)
val statement : runtime -> AspAst.statement -> unit
(** Evaluates a single expression. If [return_object] is [true], then it
will return a raw object, otherwise it will evaluate default properties
until a non-object is obtained. *)
val expression : runtime -> ?return_object:bool -> AspAst.rvalue -> value_t ref
| null | https://raw.githubusercontent.com/kennknowles/aspcc/951a91cc21e291b1d3c750bbbca7fa79209edd08/runtime/AstRun.mli | ocaml | * ASP/VbScript runtimes and customization/module loader
* Runs a whole page, or list of statements
* Runs a single statement
* Evaluates a single expression. If [return_object] is [true], then it
will return a raw object, otherwise it will evaluate default properties
until a non-object is obtained. |
open VbTypes
type runtime = (AspAst.statement, AspAst.rvalue) Runtime.t
val create_runtime : unit -> runtime
* { 6 Execution entry points }
val page : runtime -> AspAst.page -> unit
val statement : runtime -> AspAst.statement -> unit
val expression : runtime -> ?return_object:bool -> AspAst.rvalue -> value_t ref
|
88d6ba4df2e1429741fe6934d3f3c25f477f8a2404f2c2a97ad112cefafb5f72 | ajhc/ajhc | T2572.hs | # LANGUAGE RankNTypes , ScopedTypeVariables #
Trac # 2572
module Foo where
type GTypeFun = forall a . a -> ()
gmapType :: Int -> GTypeFun
gmapType _ (_ :: a) = undefined
| null | https://raw.githubusercontent.com/ajhc/ajhc/8ef784a6a3b5998cfcd95d0142d627da9576f264/regress/tests/1_typecheck/2_pass/ghc/T2572.hs | haskell | # LANGUAGE RankNTypes , ScopedTypeVariables #
Trac # 2572
module Foo where
type GTypeFun = forall a . a -> ()
gmapType :: Int -> GTypeFun
gmapType _ (_ :: a) = undefined
| |
4caf57ff7c6ec0f8e0bcaa490fe778216767cbaed845355d645d43ab178d46da | clojure-interop/google-cloud-clients | Storage.clj | (ns com.google.cloud.storage.Storage
"An interface for Google Cloud Storage."
(:refer-clojure :only [require comment defn ->])
(:import [com.google.cloud.storage Storage]))
(defn update-acl
"Updates an ACL entry on the specified bucket.
Example of updating a new ACL entry on a bucket.
String bucketName = \"my_unique_bucket\";
Acl acl = storage.updateAcl(bucketName, Acl.of(User.ofAllAuthenticatedUsers(), Role.OWNER));
Example of updating a new ACL entry on a requester_pays bucket with a user_project option.
String bucketName = \"my_unique_bucket\";
Acl acl = storage.updateAcl(bucketName, Acl.of(User.ofAllAuthenticatedUsers(), Role.OWNER),
BucketSourceOption.userProject(\"myProject\"));
bucket - name of the bucket where the updateAcl operation takes place - `java.lang.String`
acl - ACL to update - `com.google.cloud.storage.Acl`
options - extra parameters to apply to this operation - `com.google.cloud.storage.Storage$BucketSourceOption`
returns: `com.google.cloud.storage.Acl`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Acl [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Acl acl ^com.google.cloud.storage.Storage$BucketSourceOption options]
(-> this (.updateAcl bucket acl options)))
(^com.google.cloud.storage.Acl [^Storage this ^com.google.cloud.storage.BlobId blob ^com.google.cloud.storage.Acl acl]
(-> this (.updateAcl blob acl))))
(defn update-default-acl
"Updates a default blob ACL entry on the specified bucket.
Default ACLs are applied to a new blob within the bucket when no ACL was provided for that
blob.
Example of updating a new default ACL entry on a bucket.
String bucketName = \"my_unique_bucket\";
Acl acl =
storage.updateDefaultAcl(bucketName, Acl.of(User.ofAllAuthenticatedUsers(), Role.OWNER));
bucket - `java.lang.String`
acl - `com.google.cloud.storage.Acl`
returns: `com.google.cloud.storage.Acl`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Acl [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Acl acl]
(-> this (.updateDefaultAcl bucket acl))))
(defn list
"Lists the bucket's blobs. If the Storage.BlobListOption.currentDirectory() option is provided,
results are returned in a directory-like mode.
Example of listing blobs in a provided directory.
String bucketName = \"my_unique_bucket\";
String directory = \"my_directory/\";
Page<Blob> blobs = storage.list(bucketName, BlobListOption.currentDirectory(),
BlobListOption.prefix(directory));
Iterator<Blob> blobIterator = blobs.iterateAll();
while (blobIterator.hasNext()) {
Blob blob = blobIterator.next();
// do something with the blob
}
bucket - `java.lang.String`
options - `com.google.cloud.storage.Storage$BlobListOption`
returns: `com.google.api.gax.paging.Page<com.google.cloud.storage.Blob>`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.api.gax.paging.Page [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Storage$BlobListOption options]
(-> this (.list bucket options)))
(^com.google.api.gax.paging.Page [^Storage this ^com.google.cloud.storage.Storage$BucketListOption options]
(-> this (.list options))))
(defn get-default-acl
"Returns the default object ACL entry for the specified entity on the specified bucket or null if not found.
Default ACLs are applied to a new blob within the bucket when no ACL was provided for that
blob.
Example of getting the default ACL entry for an entity on a bucket.
String bucketName = \"my_unique_bucket\";
Acl acl = storage.getDefaultAcl(bucketName, User.ofAllAuthenticatedUsers());
bucket - `java.lang.String`
entity - `com.google.cloud.storage.Acl$Entity`
returns: `com.google.cloud.storage.Acl`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Acl [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Acl$Entity entity]
(-> this (.getDefaultAcl bucket entity))))
(defn set-iam-policy
"Updates the IAM policy on the specified bucket.
Example of updating the IAM policy on a bucket.
// We want to make all objects in our bucket publicly readable.
String bucketName = \"my_unique_bucket\";
Policy currentPolicy = storage.getIamPolicy(bucketName);
Policy updatedPolicy =
storage.setIamPolicy(
bucketName,
currentPolicy.toBuilder()
.addIdentity(StorageRoles.objectViewer(), Identity.allUsers())
.build());
bucket - name of the bucket where the setIamPolicy operation takes place - `java.lang.String`
policy - policy to be set on the specified bucket - `com.google.cloud.Policy`
options - extra parameters to apply to this operation - `com.google.cloud.storage.Storage$BucketSourceOption`
returns: `com.google.cloud.Policy`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.Policy [^Storage this ^java.lang.String bucket ^com.google.cloud.Policy policy ^com.google.cloud.storage.Storage$BucketSourceOption options]
(-> this (.setIamPolicy bucket policy options))))
(defn list-acls
"Lists the ACL entries for the provided bucket.
Example of listing the ACL entries for a blob.
String bucketName = \"my_unique_bucket\";
List<Acl> acls = storage.listAcls(bucketName);
for (Acl acl : acls) {
// do something with ACL entry
}
Example of listing the ACL entries for a blob in a requester_pays bucket with a user_project
option.
String bucketName = \"my_unique_bucket\";
List<Acl> acls = storage.listAcls(bucketName, BucketSourceOption.userProject(\"myProject\"));
for (Acl acl : acls) {
// do something with ACL entry
}
bucket - the name of the bucket to list ACLs for - `java.lang.String`
options - any number of BucketSourceOptions to apply to this operation - `com.google.cloud.storage.Storage$BucketSourceOption`
returns: `java.util.List<com.google.cloud.storage.Acl>`
throws: com.google.cloud.storage.StorageException - upon failure"
(^java.util.List [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Storage$BucketSourceOption options]
(-> this (.listAcls bucket options)))
(^java.util.List [^Storage this ^com.google.cloud.storage.BlobId blob]
(-> this (.listAcls blob))))
(defn copy
"Sends a copy request. This method copies both blob's data and information. To override source
blob's information supply a BlobInfo to the CopyRequest using either Storage.CopyRequest.Builder.setTarget(BlobInfo, Storage.BlobTargetOption...) or Storage.CopyRequest.Builder.setTarget(BlobInfo, Iterable).
This method returns a CopyWriter object for the provided CopyRequest. If
source and destination objects share the same location and storage class the source blob is
copied with one request and CopyWriter.getResult() immediately returns, regardless of
the Storage.CopyRequest.megabytesCopiedPerChunk parameter. If source and destination have
different location or storage class CopyWriter.getResult() might issue multiple RPC
calls depending on blob's size.
Example of copying a blob.
String bucketName = \"my_unique_bucket\";
String blobName = \"my_blob_name\";
String copyBlobName = \"copy_blob_name\";
CopyRequest request = CopyRequest.newBuilder()
.setSource(BlobId.of(bucketName, blobName))
.setTarget(BlobId.of(bucketName, copyBlobName))
.build();
Blob blob = storage.copy(request).getResult();
Example of copying a blob in chunks.
String bucketName = \"my_unique_bucket\";
String blobName = \"my_blob_name\";
String copyBlobName = \"copy_blob_name\";
CopyRequest request = CopyRequest.newBuilder()
.setSource(BlobId.of(bucketName, blobName))
.setTarget(BlobId.of(bucketName, copyBlobName))
.build();
CopyWriter copyWriter = storage.copy(request);
while (!copyWriter.isDone()) {
copyWriter.copyChunk();
}
Blob blob = copyWriter.getResult();
Example of rotating the encryption key of a blob.
String bucketName = \"my_unique_bucket\";
String blobName = \"my_blob_name\";
String oldEncryptionKey = \"old_encryption_key\";
String newEncryptionKey = \"new_encryption_key\";
BlobId blobId = BlobId.of(bucketName, blobName);
CopyRequest request = CopyRequest.newBuilder()
.setSource(blobId)
.setSourceOptions(BlobSourceOption.decryptionKey(oldEncryptionKey))
.setTarget(blobId, BlobTargetOption.encryptionKey(newEncryptionKey))
.build();
Blob blob = storage.copy(request).getResult();
copy-request - `com.google.cloud.storage.Storage$CopyRequest`
returns: a CopyWriter object that can be used to get information on the newly created
blob or to complete the copy if more than one RPC request is needed - `com.google.cloud.storage.CopyWriter`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.CopyWriter [^Storage this ^com.google.cloud.storage.Storage$CopyRequest copy-request]
(-> this (.copy copy-request))))
(defn reader
"Returns a channel for reading the blob's content. The blob's latest generation is read. If the
blob changes while reading (i.e. BlobInfo.getEtag() changes), subsequent calls to
blobReadChannel.read(ByteBuffer) may throw StorageException.
Example of reading a blob's content through a reader.
String bucketName = \"my_unique_bucket\";
String blobName = \"my_blob_name\";
try (ReadChannel reader = storage.reader(bucketName, blobName)) {
ByteBuffer bytes = ByteBuffer.allocate(64 * 1024);
while (reader.read(bytes) > 0) {
bytes.flip();
// do something with bytes
bytes.clear();
}
}
bucket - `java.lang.String`
blob - `java.lang.String`
options - `com.google.cloud.storage.Storage$BlobSourceOption`
returns: `com.google.cloud.ReadChannel`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.ReadChannel [^Storage this ^java.lang.String bucket ^java.lang.String blob ^com.google.cloud.storage.Storage$BlobSourceOption options]
(-> this (.reader bucket blob options)))
(^com.google.cloud.ReadChannel [^Storage this ^com.google.cloud.storage.BlobId blob ^com.google.cloud.storage.Storage$BlobSourceOption options]
(-> this (.reader blob options))))
(defn delete
"Deletes the requested blob.
Example of deleting a blob, only if its generation matches a value, otherwise a StorageException is thrown.
String bucketName = \"my_unique_bucket\";
String blobName = \"my_blob_name\";
long blobGeneration = 42;
boolean deleted = storage.delete(bucketName, blobName,
BlobSourceOption.generationMatch(blobGeneration));
if (deleted) {
// the blob was deleted
} else {
// the blob was not found
}
bucket - `java.lang.String`
blob - `java.lang.String`
options - `com.google.cloud.storage.Storage$BlobSourceOption`
returns: true if blob was deleted, false if it was not found - `boolean`
throws: com.google.cloud.storage.StorageException - upon failure"
(^Boolean [^Storage this ^java.lang.String bucket ^java.lang.String blob ^com.google.cloud.storage.Storage$BlobSourceOption options]
(-> this (.delete bucket blob options)))
(^Boolean [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Storage$BucketSourceOption options]
(-> this (.delete bucket options)))
(^Boolean [^Storage this ^com.google.cloud.storage.BlobId blob]
(-> this (.delete blob))))
(defn sign-url
"Generates a signed URL for a blob. If you have a blob that you want to allow access to for a
fixed amount of time, you can use this method to generate a URL that is only valid within a
certain time period. This is particularly useful if you don't want publicly accessible blobs,
but also don't want to require users to explicitly log in. Signing a URL requires a service
account signer. If an instance of ServiceAccountSigner was passed to
StorageOptions' builder via setCredentials(Credentials) or the default
credentials are being used and the environment variable GOOGLE_APPLICATION_CREDENTIALS
is set or your application is running in App Engine, then signUrl will use that
credentials to sign the URL. If the credentials passed to StorageOptions do not
implement ServiceAccountSigner (this is the case, for instance, for Google Cloud SDK
credentials) then signUrl will throw an IllegalStateException unless an
implementation of ServiceAccountSigner is passed using the Storage.SignUrlOption.signWith(ServiceAccountSigner) option.
A service account signer is looked for in the following order:
The signer passed with the option Storage.SignUrlOption.signWith(ServiceAccountSigner)
The credentials passed to StorageOptions
The default credentials, if no credentials were passed to StorageOptions
Example of creating a signed URL that is valid for 2 weeks, using the default credentials
for signing the URL.
String bucketName = \"my_unique_bucket\";
String blobName = \"my_blob_name\";
URL signedUrl = storage.signUrl(BlobInfo.newBuilder(bucketName, blobName).build(), 14,
TimeUnit.DAYS);
Example of creating a signed URL passing the Storage.SignUrlOption.withV4Signature() option,
which enables V4 signing.
String bucketName = \"my_unique_bucket\";
String blobName = \"my_blob_name\";
URL signedUrl = storage.signUrl(BlobInfo.newBuilder(bucketName, blobName).build(),
7, TimeUnit.DAYS, Storage.SignUrlOption.withV4Signature());
Example of creating a signed URL passing the Storage.SignUrlOption.signWith(ServiceAccountSigner) option, that will be used for signing the URL.
String bucketName = \"my_unique_bucket\";
String blobName = \"my_blob_name\";
String keyPath = \"/path/to/key.json\";
URL signedUrl = storage.signUrl(BlobInfo.newBuilder(bucketName, blobName).build(),
14, TimeUnit.DAYS, SignUrlOption.signWith(
ServiceAccountCredentials.fromStream(new FileInputStream(keyPath))));
Note that the ServiceAccountSigner may require additional configuration to enable
URL signing. See the documentation for the implementation for more details.
blob-info - the blob associated with the signed URL - `com.google.cloud.storage.BlobInfo`
duration - time until the signed URL expires, expressed in unit. The finest granularity supported is 1 second, finer granularities will be truncated - `long`
unit - time unit of the duration parameter - `java.util.concurrent.TimeUnit`
options - optional URL signing options SignUrlOption.withHostName() option to set a custom host name instead of using . - `com.google.cloud.storage.Storage$SignUrlOption`
returns: `java.net.URL`
throws: java.lang.IllegalStateException - if Storage.SignUrlOption.signWith(ServiceAccountSigner) was not used and no implementation of ServiceAccountSigner was provided to StorageOptions"
(^java.net.URL [^Storage this ^com.google.cloud.storage.BlobInfo blob-info ^Long duration ^java.util.concurrent.TimeUnit unit ^com.google.cloud.storage.Storage$SignUrlOption options]
(-> this (.signUrl blob-info duration unit options))))
(defn lock-retention-policy
"Locks bucket retention policy. Requires a local metageneration value in the request. Review
example below.
Accepts an optional userProject Storage.BucketTargetOption option which defines the project
id to assign operational costs.
Warning: Once a retention policy is locked, it can't be unlocked, removed, or shortened.
Example of locking a retention policy on a bucket, only if its local metageneration value
matches the bucket's service metageneration otherwise a StorageException is thrown.
String bucketName = \"my_unique_bucket\";
Bucket bucket = storage.get(bucketName, BucketGetOption.fields(BucketField.METAGENERATION));
storage.lockRetentionPolicy(bucket, BucketTargetOption.metagenerationMatch());
bucket - `com.google.cloud.storage.BucketInfo`
options - `com.google.cloud.storage.Storage$BucketTargetOption`
returns: a Bucket object of the locked bucket - `com.google.cloud.storage.Bucket`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Bucket [^Storage this ^com.google.cloud.storage.BucketInfo bucket ^com.google.cloud.storage.Storage$BucketTargetOption options]
(-> this (.lockRetentionPolicy bucket options))))
(defn get-service-account
"Returns the service account associated with the given project.
Example of getting a service account.
String projectId = \"\";
ServiceAccount account = storage.getServiceAccount(projectId);
project-id - the ID of the project for which the service account should be fetched. - `java.lang.String`
returns: the service account associated with this project - `com.google.cloud.storage.ServiceAccount`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.ServiceAccount [^Storage this ^java.lang.String project-id]
(-> this (.getServiceAccount project-id))))
(defn batch
"Creates a new empty batch for grouping multiple service calls in one underlying RPC call.
Example of using a batch request to delete, update and get a blob.
String bucketName = \"my_unique_bucket\";
String blobName1 = \"my_blob_name1\";
String blobName2 = \"my_blob_name2\";
StorageBatch batch = storage.batch();
BlobId firstBlob = BlobId.of(bucketName, blobName1);
BlobId secondBlob = BlobId.of(bucketName, blobName2);
batch.delete(firstBlob).notify(new BatchResult.Callback<Boolean, StorageException>() {
public void success(Boolean result) {
// deleted successfully
}
public void error(StorageException exception) {
// delete failed
}
});
batch.update(BlobInfo.newBuilder(secondBlob).setContentType(\"text/plain\").build());
StorageBatchResult<Blob> result = batch.get(secondBlob);
batch.submit();
Blob blob = result.get(); // returns get result or throws StorageException
returns: `com.google.cloud.storage.StorageBatch`"
(^com.google.cloud.storage.StorageBatch [^Storage this]
(-> this (.batch))))
(defn get-acl
"Returns the ACL entry for the specified entity on the specified bucket or null if not
found.
Example of getting the ACL entry for an entity on a bucket.
String bucketName = \"my_unique_bucket\";
Acl acl = storage.getAcl(bucketName, User.ofAllAuthenticatedUsers());
Example of getting the ACL entry for a specific user on a requester_pays bucket with a
user_project option.
String bucketName = \"my_unique_bucket\";
String userEmail = \"\";
BucketSourceOption userProjectOption = BucketSourceOption.userProject(\"myProject\");
Acl acl = storage.getAcl(bucketName, new User(userEmail), userProjectOption);
bucket - name of the bucket where the getAcl operation takes place - `java.lang.String`
entity - ACL entity to fetch - `com.google.cloud.storage.Acl$Entity`
options - extra parameters to apply to this operation - `com.google.cloud.storage.Storage$BucketSourceOption`
returns: `com.google.cloud.storage.Acl`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Acl [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Acl$Entity entity ^com.google.cloud.storage.Storage$BucketSourceOption options]
(-> this (.getAcl bucket entity options)))
(^com.google.cloud.storage.Acl [^Storage this ^com.google.cloud.storage.BlobId blob ^com.google.cloud.storage.Acl$Entity entity]
(-> this (.getAcl blob entity))))
(defn update
"Updates bucket information.
Accepts an optional userProject Storage.BucketTargetOption option which defines the project
id to assign operational costs.
Example of updating bucket information.
String bucketName = \"my_unique_bucket\";
BucketInfo bucketInfo = BucketInfo.newBuilder(bucketName).setVersioningEnabled(true).build();
Bucket bucket = storage.update(bucketInfo);
bucket-info - `com.google.cloud.storage.BucketInfo`
options - `com.google.cloud.storage.Storage$BucketTargetOption`
returns: the updated bucket - `com.google.cloud.storage.Bucket`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Bucket [^Storage this ^com.google.cloud.storage.BucketInfo bucket-info ^com.google.cloud.storage.Storage$BucketTargetOption options]
(-> this (.update bucket-info options)))
(^com.google.cloud.storage.Blob [^Storage this ^com.google.cloud.storage.BlobInfo blob-info]
(-> this (.update blob-info))))
(defn create-default-acl
"Creates a new default blob ACL entry on the specified bucket.
Default ACLs are applied to a new blob within the bucket when no ACL was provided for that
blob.
Example of creating a new default ACL entry on a bucket.
String bucketName = \"my_unique_bucket\";
Acl acl =
storage.createDefaultAcl(bucketName, Acl.of(User.ofAllAuthenticatedUsers(), Role.READER));
bucket - `java.lang.String`
acl - `com.google.cloud.storage.Acl`
returns: `com.google.cloud.storage.Acl`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Acl [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Acl acl]
(-> this (.createDefaultAcl bucket acl))))
(defn delete-acl
"Deletes the ACL entry for the specified entity on the specified bucket.
Example of deleting the ACL entry for an entity on a bucket.
String bucketName = \"my_unique_bucket\";
boolean deleted = storage.deleteAcl(bucketName, User.ofAllAuthenticatedUsers());
if (deleted) {
// the acl entry was deleted
} else {
// the acl entry was not found
}
Example of deleting the ACL entry for a specific user on a requester_pays bucket with a
user_project option.
String bucketName = \"my_unique_bucket\";
BucketSourceOption userProject = BucketSourceOption.userProject(\"myProject\");
boolean deleted = storage.deleteAcl(bucketName, User.ofAllAuthenticatedUsers(), userProject);
bucket - name of the bucket to delete an ACL from - `java.lang.String`
entity - ACL entity to delete - `com.google.cloud.storage.Acl$Entity`
options - extra parameters to apply to this operation - `com.google.cloud.storage.Storage$BucketSourceOption`
returns: true if the ACL was deleted, false if it was not found - `boolean`
throws: com.google.cloud.storage.StorageException - upon failure"
(^Boolean [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Acl$Entity entity ^com.google.cloud.storage.Storage$BucketSourceOption options]
(-> this (.deleteAcl bucket entity options)))
(^Boolean [^Storage this ^com.google.cloud.storage.BlobId blob ^com.google.cloud.storage.Acl$Entity entity]
(-> this (.deleteAcl blob entity))))
(defn read-all-bytes
"Reads all the bytes from a blob.
Example of reading all bytes of a blob, if generation matches a value, otherwise a StorageException is thrown.
String bucketName = \"my_unique_bucket\";
String blobName = \"my_blob_name\";
long blobGeneration = 42\";
byte[] content = storage.readAllBytes(bucketName, blobName,
BlobSourceOption.generationMatch(blobGeneration));
bucket - `java.lang.String`
blob - `java.lang.String`
options - `com.google.cloud.storage.Storage$BlobSourceOption`
returns: the blob's content - `byte[]`
throws: com.google.cloud.storage.StorageException - upon failure"
([^Storage this ^java.lang.String bucket ^java.lang.String blob ^com.google.cloud.storage.Storage$BlobSourceOption options]
(-> this (.readAllBytes bucket blob options)))
([^Storage this ^com.google.cloud.storage.BlobId blob ^com.google.cloud.storage.Storage$BlobSourceOption options]
(-> this (.readAllBytes blob options))))
(defn writer
"Creates a blob and return a channel for writing its content. By default any md5 and crc32c
values in the given blobInfo are ignored unless requested via the BlobWriteOption.md5Match and BlobWriteOption.crc32cMatch options.
Example of writing a blob's content through a writer.
String bucketName = \"my_unique_bucket\";
String blobName = \"my_blob_name\";
BlobId blobId = BlobId.of(bucketName, blobName);
byte[] content = \"Hello, World!\".getBytes(UTF_8);
BlobInfo blobInfo = BlobInfo.newBuilder(blobId).setContentType(\"text/plain\").build();
try (WriteChannel writer = storage.writer(blobInfo)) {
try {
writer.write(ByteBuffer.wrap(content, 0, content.length));
} catch (Exception ex) {
// handle exception
}
}
blob-info - `com.google.cloud.storage.BlobInfo`
options - `com.google.cloud.storage.Storage$BlobWriteOption`
returns: `com.google.cloud.WriteChannel`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.WriteChannel [^Storage this ^com.google.cloud.storage.BlobInfo blob-info ^com.google.cloud.storage.Storage$BlobWriteOption options]
(-> this (.writer blob-info options)))
(^com.google.cloud.WriteChannel [^Storage this ^java.net.URL signed-url]
(-> this (.writer signed-url))))
(defn compose
"Sends a compose request.
Accepts an optional userProject Storage.BlobTargetOption option which defines the project id
to assign operational costs.
Example of composing two blobs.
String bucketName = \"my_unique_bucket\";
String blobName = \"my_blob_name\";
String sourceBlob1 = \"source_blob_1\";
String sourceBlob2 = \"source_blob_2\";
BlobId blobId = BlobId.of(bucketName, blobName);
BlobInfo blobInfo = BlobInfo.newBuilder(blobId).setContentType(\"text/plain\").build();
ComposeRequest request = ComposeRequest.newBuilder()
.setTarget(blobInfo)
.addSource(sourceBlob1)
.addSource(sourceBlob2)
.build();
Blob blob = storage.compose(request);
compose-request - `com.google.cloud.storage.Storage$ComposeRequest`
returns: the composed blob - `com.google.cloud.storage.Blob`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Blob [^Storage this ^com.google.cloud.storage.Storage$ComposeRequest compose-request]
(-> this (.compose compose-request))))
(defn get-iam-policy
"Gets the IAM policy for the provided bucket.
Example of getting the IAM policy for a bucket.
String bucketName = \"my_unique_bucket\";
Policy policy = storage.getIamPolicy(bucketName);
bucket - name of the bucket where the getIamPolicy operation takes place - `java.lang.String`
options - extra parameters to apply to this operation - `com.google.cloud.storage.Storage$BucketSourceOption`
returns: `com.google.cloud.Policy`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.Policy [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Storage$BucketSourceOption options]
(-> this (.getIamPolicy bucket options))))
(defn delete-default-acl
"Deletes the default object ACL entry for the specified entity on the specified bucket.
Default ACLs are applied to a new blob within the bucket when no ACL was provided for that
blob.
Example of deleting the default ACL entry for an entity on a bucket.
String bucketName = \"my_unique_bucket\";
boolean deleted = storage.deleteDefaultAcl(bucketName, User.ofAllAuthenticatedUsers());
if (deleted) {
// the acl entry was deleted
} else {
// the acl entry was not found
}
bucket - `java.lang.String`
entity - `com.google.cloud.storage.Acl$Entity`
returns: true if the ACL was deleted, false if it was not found - `boolean`
throws: com.google.cloud.storage.StorageException - upon failure"
(^Boolean [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Acl$Entity entity]
(-> this (.deleteDefaultAcl bucket entity))))
(defn create
"Creates a new blob with the sub array of the given byte array. Direct upload is used to upload
content. For large content, writer(com.google.cloud.storage.BlobInfo, com.google.cloud.storage.Storage.BlobWriteOption...) is recommended as it uses resumable upload.
MD5 and CRC32C hashes of content are computed and used for validating transferred data.
Accepts a userProject Storage.BlobGetOption option, which defines the project id to assign
operational costs.
Example of creating a blob from a byte array.
String bucketName = \"my_unique_bucket\";
String blobName = \"my_blob_name\";
BlobId blobId = BlobId.of(bucketName, blobName);
BlobInfo blobInfo = BlobInfo.newBuilder(blobId).setContentType(\"text/plain\").build();
Blob blob = storage.create(blobInfo, \"Hello, World!\".getBytes(UTF_8), 7, 5);
blob-info - `com.google.cloud.storage.BlobInfo`
content - `byte[]`
offset - `int`
length - `int`
options - `com.google.cloud.storage.Storage$BlobTargetOption`
returns: a [@code Blob} with complete information - `com.google.cloud.storage.Blob`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Blob [^Storage this ^com.google.cloud.storage.BlobInfo blob-info content ^Integer offset ^Integer length ^com.google.cloud.storage.Storage$BlobTargetOption options]
(-> this (.create blob-info content offset length options)))
(^com.google.cloud.storage.Blob [^Storage this ^com.google.cloud.storage.BlobInfo blob-info content ^com.google.cloud.storage.Storage$BlobTargetOption options]
(-> this (.create blob-info content options)))
(^com.google.cloud.storage.Bucket [^Storage this ^com.google.cloud.storage.BucketInfo bucket-info ^com.google.cloud.storage.Storage$BucketTargetOption options]
(-> this (.create bucket-info options))))
(defn list-default-acls
"Lists the default blob ACL entries for the provided bucket.
Default ACLs are applied to a new blob within the bucket when no ACL was provided for that
blob.
Example of listing the default ACL entries for a blob.
String bucketName = \"my_unique_bucket\";
List<Acl> acls = storage.listDefaultAcls(bucketName);
for (Acl acl : acls) {
// do something with ACL entry
}
bucket - `java.lang.String`
returns: `java.util.List<com.google.cloud.storage.Acl>`
throws: com.google.cloud.storage.StorageException - upon failure"
(^java.util.List [^Storage this ^java.lang.String bucket]
(-> this (.listDefaultAcls bucket))))
(defn get
"Returns the requested blob or null if not found.
Accepts an optional userProject Storage.BlobGetOption option which defines the project id to
assign operational costs.
Example of getting information on a blob, only if its metageneration matches a value,
otherwise a StorageException is thrown.
String bucketName = \"my_unique_bucket\";
String blobName = \"my_blob_name\";
long blobMetageneration = 42;
Blob blob = storage.get(bucketName, blobName,
BlobGetOption.metagenerationMatch(blobMetageneration));
bucket - `java.lang.String`
blob - `java.lang.String`
options - `com.google.cloud.storage.Storage$BlobGetOption`
returns: `com.google.cloud.storage.Blob`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Blob [^Storage this ^java.lang.String bucket ^java.lang.String blob ^com.google.cloud.storage.Storage$BlobGetOption options]
(-> this (.get bucket blob options)))
(^com.google.cloud.storage.Bucket [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Storage$BucketGetOption options]
(-> this (.get bucket options)))
(^com.google.cloud.storage.Blob [^Storage this ^com.google.cloud.storage.BlobId blob]
(-> this (.get blob))))
(defn create-acl
"Creates a new ACL entry on the specified bucket.
Example of creating a new ACL entry on a bucket.
String bucketName = \"my_unique_bucket\";
Acl acl = storage.createAcl(bucketName, Acl.of(User.ofAllAuthenticatedUsers(), Role.READER));
Example of creating a new ACL entry on a requester_pays bucket with a user_project option.
String bucketName = \"my_unique_bucket\";
Acl acl = storage.createAcl(bucketName, Acl.of(User.ofAllAuthenticatedUsers(), Role.READER),
BucketSourceOption.userProject(\"myProject\"));
bucket - name of the bucket for which an ACL should be created - `java.lang.String`
acl - ACL to create - `com.google.cloud.storage.Acl`
options - extra parameters to apply to this operation - `com.google.cloud.storage.Storage$BucketSourceOption`
returns: `com.google.cloud.storage.Acl`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Acl [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Acl acl ^com.google.cloud.storage.Storage$BucketSourceOption options]
(-> this (.createAcl bucket acl options)))
(^com.google.cloud.storage.Acl [^Storage this ^com.google.cloud.storage.BlobId blob ^com.google.cloud.storage.Acl acl]
(-> this (.createAcl blob acl))))
(defn test-iam-permissions
"Tests whether the caller holds the permissions on the specified bucket. Returns a list of
booleans in the same placement and order in which the permissions were specified.
Example of testing permissions on a bucket.
String bucketName = \"my_unique_bucket\";
List<Boolean> response =
storage.testIamPermissions(
bucket,
ImmutableList.of(\"storage.buckets.get\", \"storage.buckets.getIamPolicy\"));
for (boolean hasPermission : response) {
// Do something with permission test response
}
bucket - name of the bucket where the testIamPermissions operation takes place - `java.lang.String`
permissions - list of permissions to test on the bucket - `java.util.List`
options - extra parameters to apply to this operation - `com.google.cloud.storage.Storage$BucketSourceOption`
returns: `java.util.List<java.lang.Boolean>`
throws: com.google.cloud.storage.StorageException - upon failure"
(^java.util.List [^Storage this ^java.lang.String bucket ^java.util.List permissions ^com.google.cloud.storage.Storage$BucketSourceOption options]
(-> this (.testIamPermissions bucket permissions options))))
| null | https://raw.githubusercontent.com/clojure-interop/google-cloud-clients/80852d0496057c22f9cdc86d6f9ffc0fa3cd7904/com.google.cloud.storage/src/com/google/cloud/storage/Storage.clj | clojure |
// returns get result or throws StorageException
| (ns com.google.cloud.storage.Storage
"An interface for Google Cloud Storage."
(:refer-clojure :only [require comment defn ->])
(:import [com.google.cloud.storage Storage]))
(defn update-acl
"Updates an ACL entry on the specified bucket.
Example of updating a new ACL entry on a bucket.
Example of updating a new ACL entry on a requester_pays bucket with a user_project option.
Acl acl = storage.updateAcl(bucketName, Acl.of(User.ofAllAuthenticatedUsers(), Role.OWNER),
bucket - name of the bucket where the updateAcl operation takes place - `java.lang.String`
acl - ACL to update - `com.google.cloud.storage.Acl`
options - extra parameters to apply to this operation - `com.google.cloud.storage.Storage$BucketSourceOption`
returns: `com.google.cloud.storage.Acl`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Acl [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Acl acl ^com.google.cloud.storage.Storage$BucketSourceOption options]
(-> this (.updateAcl bucket acl options)))
(^com.google.cloud.storage.Acl [^Storage this ^com.google.cloud.storage.BlobId blob ^com.google.cloud.storage.Acl acl]
(-> this (.updateAcl blob acl))))
(defn update-default-acl
"Updates a default blob ACL entry on the specified bucket.
Default ACLs are applied to a new blob within the bucket when no ACL was provided for that
blob.
Example of updating a new default ACL entry on a bucket.
Acl acl =
bucket - `java.lang.String`
acl - `com.google.cloud.storage.Acl`
returns: `com.google.cloud.storage.Acl`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Acl [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Acl acl]
(-> this (.updateDefaultAcl bucket acl))))
(defn list
"Lists the bucket's blobs. If the Storage.BlobListOption.currentDirectory() option is provided,
results are returned in a directory-like mode.
Example of listing blobs in a provided directory.
Page<Blob> blobs = storage.list(bucketName, BlobListOption.currentDirectory(),
while (blobIterator.hasNext()) {
// do something with the blob
}
bucket - `java.lang.String`
options - `com.google.cloud.storage.Storage$BlobListOption`
returns: `com.google.api.gax.paging.Page<com.google.cloud.storage.Blob>`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.api.gax.paging.Page [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Storage$BlobListOption options]
(-> this (.list bucket options)))
(^com.google.api.gax.paging.Page [^Storage this ^com.google.cloud.storage.Storage$BucketListOption options]
(-> this (.list options))))
(defn get-default-acl
"Returns the default object ACL entry for the specified entity on the specified bucket or null if not found.
Default ACLs are applied to a new blob within the bucket when no ACL was provided for that
blob.
Example of getting the default ACL entry for an entity on a bucket.
bucket - `java.lang.String`
entity - `com.google.cloud.storage.Acl$Entity`
returns: `com.google.cloud.storage.Acl`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Acl [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Acl$Entity entity]
(-> this (.getDefaultAcl bucket entity))))
(defn set-iam-policy
"Updates the IAM policy on the specified bucket.
Example of updating the IAM policy on a bucket.
// We want to make all objects in our bucket publicly readable.
Policy updatedPolicy =
storage.setIamPolicy(
bucketName,
currentPolicy.toBuilder()
.addIdentity(StorageRoles.objectViewer(), Identity.allUsers())
bucket - name of the bucket where the setIamPolicy operation takes place - `java.lang.String`
policy - policy to be set on the specified bucket - `com.google.cloud.Policy`
options - extra parameters to apply to this operation - `com.google.cloud.storage.Storage$BucketSourceOption`
returns: `com.google.cloud.Policy`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.Policy [^Storage this ^java.lang.String bucket ^com.google.cloud.Policy policy ^com.google.cloud.storage.Storage$BucketSourceOption options]
(-> this (.setIamPolicy bucket policy options))))
(defn list-acls
"Lists the ACL entries for the provided bucket.
Example of listing the ACL entries for a blob.
for (Acl acl : acls) {
// do something with ACL entry
}
Example of listing the ACL entries for a blob in a requester_pays bucket with a user_project
option.
for (Acl acl : acls) {
// do something with ACL entry
}
bucket - the name of the bucket to list ACLs for - `java.lang.String`
options - any number of BucketSourceOptions to apply to this operation - `com.google.cloud.storage.Storage$BucketSourceOption`
returns: `java.util.List<com.google.cloud.storage.Acl>`
throws: com.google.cloud.storage.StorageException - upon failure"
(^java.util.List [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Storage$BucketSourceOption options]
(-> this (.listAcls bucket options)))
(^java.util.List [^Storage this ^com.google.cloud.storage.BlobId blob]
(-> this (.listAcls blob))))
(defn copy
"Sends a copy request. This method copies both blob's data and information. To override source
blob's information supply a BlobInfo to the CopyRequest using either Storage.CopyRequest.Builder.setTarget(BlobInfo, Storage.BlobTargetOption...) or Storage.CopyRequest.Builder.setTarget(BlobInfo, Iterable).
This method returns a CopyWriter object for the provided CopyRequest. If
source and destination objects share the same location and storage class the source blob is
copied with one request and CopyWriter.getResult() immediately returns, regardless of
the Storage.CopyRequest.megabytesCopiedPerChunk parameter. If source and destination have
different location or storage class CopyWriter.getResult() might issue multiple RPC
calls depending on blob's size.
Example of copying a blob.
CopyRequest request = CopyRequest.newBuilder()
.setSource(BlobId.of(bucketName, blobName))
.setTarget(BlobId.of(bucketName, copyBlobName))
Example of copying a blob in chunks.
CopyRequest request = CopyRequest.newBuilder()
.setSource(BlobId.of(bucketName, blobName))
.setTarget(BlobId.of(bucketName, copyBlobName))
while (!copyWriter.isDone()) {
}
Example of rotating the encryption key of a blob.
CopyRequest request = CopyRequest.newBuilder()
.setSource(blobId)
.setSourceOptions(BlobSourceOption.decryptionKey(oldEncryptionKey))
.setTarget(blobId, BlobTargetOption.encryptionKey(newEncryptionKey))
copy-request - `com.google.cloud.storage.Storage$CopyRequest`
returns: a CopyWriter object that can be used to get information on the newly created
blob or to complete the copy if more than one RPC request is needed - `com.google.cloud.storage.CopyWriter`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.CopyWriter [^Storage this ^com.google.cloud.storage.Storage$CopyRequest copy-request]
(-> this (.copy copy-request))))
(defn reader
"Returns a channel for reading the blob's content. The blob's latest generation is read. If the
blob changes while reading (i.e. BlobInfo.getEtag() changes), subsequent calls to
blobReadChannel.read(ByteBuffer) may throw StorageException.
Example of reading a blob's content through a reader.
try (ReadChannel reader = storage.reader(bucketName, blobName)) {
while (reader.read(bytes) > 0) {
// do something with bytes
}
}
bucket - `java.lang.String`
blob - `java.lang.String`
options - `com.google.cloud.storage.Storage$BlobSourceOption`
returns: `com.google.cloud.ReadChannel`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.ReadChannel [^Storage this ^java.lang.String bucket ^java.lang.String blob ^com.google.cloud.storage.Storage$BlobSourceOption options]
(-> this (.reader bucket blob options)))
(^com.google.cloud.ReadChannel [^Storage this ^com.google.cloud.storage.BlobId blob ^com.google.cloud.storage.Storage$BlobSourceOption options]
(-> this (.reader blob options))))
(defn delete
"Deletes the requested blob.
Example of deleting a blob, only if its generation matches a value, otherwise a StorageException is thrown.
boolean deleted = storage.delete(bucketName, blobName,
if (deleted) {
// the blob was deleted
} else {
// the blob was not found
}
bucket - `java.lang.String`
blob - `java.lang.String`
options - `com.google.cloud.storage.Storage$BlobSourceOption`
returns: true if blob was deleted, false if it was not found - `boolean`
throws: com.google.cloud.storage.StorageException - upon failure"
(^Boolean [^Storage this ^java.lang.String bucket ^java.lang.String blob ^com.google.cloud.storage.Storage$BlobSourceOption options]
(-> this (.delete bucket blob options)))
(^Boolean [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Storage$BucketSourceOption options]
(-> this (.delete bucket options)))
(^Boolean [^Storage this ^com.google.cloud.storage.BlobId blob]
(-> this (.delete blob))))
(defn sign-url
"Generates a signed URL for a blob. If you have a blob that you want to allow access to for a
fixed amount of time, you can use this method to generate a URL that is only valid within a
certain time period. This is particularly useful if you don't want publicly accessible blobs,
but also don't want to require users to explicitly log in. Signing a URL requires a service
account signer. If an instance of ServiceAccountSigner was passed to
StorageOptions' builder via setCredentials(Credentials) or the default
credentials are being used and the environment variable GOOGLE_APPLICATION_CREDENTIALS
is set or your application is running in App Engine, then signUrl will use that
credentials to sign the URL. If the credentials passed to StorageOptions do not
implement ServiceAccountSigner (this is the case, for instance, for Google Cloud SDK
credentials) then signUrl will throw an IllegalStateException unless an
implementation of ServiceAccountSigner is passed using the Storage.SignUrlOption.signWith(ServiceAccountSigner) option.
A service account signer is looked for in the following order:
The signer passed with the option Storage.SignUrlOption.signWith(ServiceAccountSigner)
The credentials passed to StorageOptions
The default credentials, if no credentials were passed to StorageOptions
Example of creating a signed URL that is valid for 2 weeks, using the default credentials
for signing the URL.
URL signedUrl = storage.signUrl(BlobInfo.newBuilder(bucketName, blobName).build(), 14,
Example of creating a signed URL passing the Storage.SignUrlOption.withV4Signature() option,
which enables V4 signing.
URL signedUrl = storage.signUrl(BlobInfo.newBuilder(bucketName, blobName).build(),
Example of creating a signed URL passing the Storage.SignUrlOption.signWith(ServiceAccountSigner) option, that will be used for signing the URL.
URL signedUrl = storage.signUrl(BlobInfo.newBuilder(bucketName, blobName).build(),
14, TimeUnit.DAYS, SignUrlOption.signWith(
Note that the ServiceAccountSigner may require additional configuration to enable
URL signing. See the documentation for the implementation for more details.
blob-info - the blob associated with the signed URL - `com.google.cloud.storage.BlobInfo`
duration - time until the signed URL expires, expressed in unit. The finest granularity supported is 1 second, finer granularities will be truncated - `long`
unit - time unit of the duration parameter - `java.util.concurrent.TimeUnit`
options - optional URL signing options SignUrlOption.withHostName() option to set a custom host name instead of using . - `com.google.cloud.storage.Storage$SignUrlOption`
returns: `java.net.URL`
throws: java.lang.IllegalStateException - if Storage.SignUrlOption.signWith(ServiceAccountSigner) was not used and no implementation of ServiceAccountSigner was provided to StorageOptions"
(^java.net.URL [^Storage this ^com.google.cloud.storage.BlobInfo blob-info ^Long duration ^java.util.concurrent.TimeUnit unit ^com.google.cloud.storage.Storage$SignUrlOption options]
(-> this (.signUrl blob-info duration unit options))))
(defn lock-retention-policy
"Locks bucket retention policy. Requires a local metageneration value in the request. Review
example below.
Accepts an optional userProject Storage.BucketTargetOption option which defines the project
id to assign operational costs.
Warning: Once a retention policy is locked, it can't be unlocked, removed, or shortened.
Example of locking a retention policy on a bucket, only if its local metageneration value
matches the bucket's service metageneration otherwise a StorageException is thrown.
bucket - `com.google.cloud.storage.BucketInfo`
options - `com.google.cloud.storage.Storage$BucketTargetOption`
returns: a Bucket object of the locked bucket - `com.google.cloud.storage.Bucket`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Bucket [^Storage this ^com.google.cloud.storage.BucketInfo bucket ^com.google.cloud.storage.Storage$BucketTargetOption options]
(-> this (.lockRetentionPolicy bucket options))))
(defn get-service-account
"Returns the service account associated with the given project.
Example of getting a service account.
project-id - the ID of the project for which the service account should be fetched. - `java.lang.String`
returns: the service account associated with this project - `com.google.cloud.storage.ServiceAccount`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.ServiceAccount [^Storage this ^java.lang.String project-id]
(-> this (.getServiceAccount project-id))))
(defn batch
"Creates a new empty batch for grouping multiple service calls in one underlying RPC call.
Example of using a batch request to delete, update and get a blob.
batch.delete(firstBlob).notify(new BatchResult.Callback<Boolean, StorageException>() {
public void success(Boolean result) {
// deleted successfully
}
public void error(StorageException exception) {
// delete failed
}
returns: `com.google.cloud.storage.StorageBatch`"
(^com.google.cloud.storage.StorageBatch [^Storage this]
(-> this (.batch))))
(defn get-acl
"Returns the ACL entry for the specified entity on the specified bucket or null if not
found.
Example of getting the ACL entry for an entity on a bucket.
Example of getting the ACL entry for a specific user on a requester_pays bucket with a
user_project option.
bucket - name of the bucket where the getAcl operation takes place - `java.lang.String`
entity - ACL entity to fetch - `com.google.cloud.storage.Acl$Entity`
options - extra parameters to apply to this operation - `com.google.cloud.storage.Storage$BucketSourceOption`
returns: `com.google.cloud.storage.Acl`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Acl [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Acl$Entity entity ^com.google.cloud.storage.Storage$BucketSourceOption options]
(-> this (.getAcl bucket entity options)))
(^com.google.cloud.storage.Acl [^Storage this ^com.google.cloud.storage.BlobId blob ^com.google.cloud.storage.Acl$Entity entity]
(-> this (.getAcl blob entity))))
(defn update
"Updates bucket information.
Accepts an optional userProject Storage.BucketTargetOption option which defines the project
id to assign operational costs.
Example of updating bucket information.
bucket-info - `com.google.cloud.storage.BucketInfo`
options - `com.google.cloud.storage.Storage$BucketTargetOption`
returns: the updated bucket - `com.google.cloud.storage.Bucket`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Bucket [^Storage this ^com.google.cloud.storage.BucketInfo bucket-info ^com.google.cloud.storage.Storage$BucketTargetOption options]
(-> this (.update bucket-info options)))
(^com.google.cloud.storage.Blob [^Storage this ^com.google.cloud.storage.BlobInfo blob-info]
(-> this (.update blob-info))))
(defn create-default-acl
"Creates a new default blob ACL entry on the specified bucket.
Default ACLs are applied to a new blob within the bucket when no ACL was provided for that
blob.
Example of creating a new default ACL entry on a bucket.
Acl acl =
bucket - `java.lang.String`
acl - `com.google.cloud.storage.Acl`
returns: `com.google.cloud.storage.Acl`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Acl [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Acl acl]
(-> this (.createDefaultAcl bucket acl))))
(defn delete-acl
"Deletes the ACL entry for the specified entity on the specified bucket.
Example of deleting the ACL entry for an entity on a bucket.
if (deleted) {
// the acl entry was deleted
} else {
// the acl entry was not found
}
Example of deleting the ACL entry for a specific user on a requester_pays bucket with a
user_project option.
bucket - name of the bucket to delete an ACL from - `java.lang.String`
entity - ACL entity to delete - `com.google.cloud.storage.Acl$Entity`
options - extra parameters to apply to this operation - `com.google.cloud.storage.Storage$BucketSourceOption`
returns: true if the ACL was deleted, false if it was not found - `boolean`
throws: com.google.cloud.storage.StorageException - upon failure"
(^Boolean [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Acl$Entity entity ^com.google.cloud.storage.Storage$BucketSourceOption options]
(-> this (.deleteAcl bucket entity options)))
(^Boolean [^Storage this ^com.google.cloud.storage.BlobId blob ^com.google.cloud.storage.Acl$Entity entity]
(-> this (.deleteAcl blob entity))))
(defn read-all-bytes
"Reads all the bytes from a blob.
Example of reading all bytes of a blob, if generation matches a value, otherwise a StorageException is thrown.
byte[] content = storage.readAllBytes(bucketName, blobName,
bucket - `java.lang.String`
blob - `java.lang.String`
options - `com.google.cloud.storage.Storage$BlobSourceOption`
returns: the blob's content - `byte[]`
throws: com.google.cloud.storage.StorageException - upon failure"
([^Storage this ^java.lang.String bucket ^java.lang.String blob ^com.google.cloud.storage.Storage$BlobSourceOption options]
(-> this (.readAllBytes bucket blob options)))
([^Storage this ^com.google.cloud.storage.BlobId blob ^com.google.cloud.storage.Storage$BlobSourceOption options]
(-> this (.readAllBytes blob options))))
(defn writer
"Creates a blob and return a channel for writing its content. By default any md5 and crc32c
values in the given blobInfo are ignored unless requested via the BlobWriteOption.md5Match and BlobWriteOption.crc32cMatch options.
Example of writing a blob's content through a writer.
try (WriteChannel writer = storage.writer(blobInfo)) {
try {
} catch (Exception ex) {
// handle exception
}
}
blob-info - `com.google.cloud.storage.BlobInfo`
options - `com.google.cloud.storage.Storage$BlobWriteOption`
returns: `com.google.cloud.WriteChannel`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.WriteChannel [^Storage this ^com.google.cloud.storage.BlobInfo blob-info ^com.google.cloud.storage.Storage$BlobWriteOption options]
(-> this (.writer blob-info options)))
(^com.google.cloud.WriteChannel [^Storage this ^java.net.URL signed-url]
(-> this (.writer signed-url))))
(defn compose
"Sends a compose request.
Accepts an optional userProject Storage.BlobTargetOption option which defines the project id
to assign operational costs.
Example of composing two blobs.
ComposeRequest request = ComposeRequest.newBuilder()
.setTarget(blobInfo)
.addSource(sourceBlob1)
.addSource(sourceBlob2)
compose-request - `com.google.cloud.storage.Storage$ComposeRequest`
returns: the composed blob - `com.google.cloud.storage.Blob`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Blob [^Storage this ^com.google.cloud.storage.Storage$ComposeRequest compose-request]
(-> this (.compose compose-request))))
(defn get-iam-policy
"Gets the IAM policy for the provided bucket.
Example of getting the IAM policy for a bucket.
bucket - name of the bucket where the getIamPolicy operation takes place - `java.lang.String`
options - extra parameters to apply to this operation - `com.google.cloud.storage.Storage$BucketSourceOption`
returns: `com.google.cloud.Policy`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.Policy [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Storage$BucketSourceOption options]
(-> this (.getIamPolicy bucket options))))
(defn delete-default-acl
"Deletes the default object ACL entry for the specified entity on the specified bucket.
Default ACLs are applied to a new blob within the bucket when no ACL was provided for that
blob.
Example of deleting the default ACL entry for an entity on a bucket.
if (deleted) {
// the acl entry was deleted
} else {
// the acl entry was not found
}
bucket - `java.lang.String`
entity - `com.google.cloud.storage.Acl$Entity`
returns: true if the ACL was deleted, false if it was not found - `boolean`
throws: com.google.cloud.storage.StorageException - upon failure"
(^Boolean [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Acl$Entity entity]
(-> this (.deleteDefaultAcl bucket entity))))
(defn create
"Creates a new blob with the sub array of the given byte array. Direct upload is used to upload
content. For large content, writer(com.google.cloud.storage.BlobInfo, com.google.cloud.storage.Storage.BlobWriteOption...) is recommended as it uses resumable upload.
MD5 and CRC32C hashes of content are computed and used for validating transferred data.
Accepts a userProject Storage.BlobGetOption option, which defines the project id to assign
operational costs.
Example of creating a blob from a byte array.
blob-info - `com.google.cloud.storage.BlobInfo`
content - `byte[]`
offset - `int`
length - `int`
options - `com.google.cloud.storage.Storage$BlobTargetOption`
returns: a [@code Blob} with complete information - `com.google.cloud.storage.Blob`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Blob [^Storage this ^com.google.cloud.storage.BlobInfo blob-info content ^Integer offset ^Integer length ^com.google.cloud.storage.Storage$BlobTargetOption options]
(-> this (.create blob-info content offset length options)))
(^com.google.cloud.storage.Blob [^Storage this ^com.google.cloud.storage.BlobInfo blob-info content ^com.google.cloud.storage.Storage$BlobTargetOption options]
(-> this (.create blob-info content options)))
(^com.google.cloud.storage.Bucket [^Storage this ^com.google.cloud.storage.BucketInfo bucket-info ^com.google.cloud.storage.Storage$BucketTargetOption options]
(-> this (.create bucket-info options))))
(defn list-default-acls
"Lists the default blob ACL entries for the provided bucket.
Default ACLs are applied to a new blob within the bucket when no ACL was provided for that
blob.
Example of listing the default ACL entries for a blob.
for (Acl acl : acls) {
// do something with ACL entry
}
bucket - `java.lang.String`
returns: `java.util.List<com.google.cloud.storage.Acl>`
throws: com.google.cloud.storage.StorageException - upon failure"
(^java.util.List [^Storage this ^java.lang.String bucket]
(-> this (.listDefaultAcls bucket))))
(defn get
"Returns the requested blob or null if not found.
Accepts an optional userProject Storage.BlobGetOption option which defines the project id to
assign operational costs.
Example of getting information on a blob, only if its metageneration matches a value,
otherwise a StorageException is thrown.
Blob blob = storage.get(bucketName, blobName,
bucket - `java.lang.String`
blob - `java.lang.String`
options - `com.google.cloud.storage.Storage$BlobGetOption`
returns: `com.google.cloud.storage.Blob`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Blob [^Storage this ^java.lang.String bucket ^java.lang.String blob ^com.google.cloud.storage.Storage$BlobGetOption options]
(-> this (.get bucket blob options)))
(^com.google.cloud.storage.Bucket [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Storage$BucketGetOption options]
(-> this (.get bucket options)))
(^com.google.cloud.storage.Blob [^Storage this ^com.google.cloud.storage.BlobId blob]
(-> this (.get blob))))
(defn create-acl
"Creates a new ACL entry on the specified bucket.
Example of creating a new ACL entry on a bucket.
Example of creating a new ACL entry on a requester_pays bucket with a user_project option.
Acl acl = storage.createAcl(bucketName, Acl.of(User.ofAllAuthenticatedUsers(), Role.READER),
bucket - name of the bucket for which an ACL should be created - `java.lang.String`
acl - ACL to create - `com.google.cloud.storage.Acl`
options - extra parameters to apply to this operation - `com.google.cloud.storage.Storage$BucketSourceOption`
returns: `com.google.cloud.storage.Acl`
throws: com.google.cloud.storage.StorageException - upon failure"
(^com.google.cloud.storage.Acl [^Storage this ^java.lang.String bucket ^com.google.cloud.storage.Acl acl ^com.google.cloud.storage.Storage$BucketSourceOption options]
(-> this (.createAcl bucket acl options)))
(^com.google.cloud.storage.Acl [^Storage this ^com.google.cloud.storage.BlobId blob ^com.google.cloud.storage.Acl acl]
(-> this (.createAcl blob acl))))
(defn test-iam-permissions
"Tests whether the caller holds the permissions on the specified bucket. Returns a list of
booleans in the same placement and order in which the permissions were specified.
Example of testing permissions on a bucket.
List<Boolean> response =
storage.testIamPermissions(
bucket,
for (boolean hasPermission : response) {
// Do something with permission test response
}
bucket - name of the bucket where the testIamPermissions operation takes place - `java.lang.String`
permissions - list of permissions to test on the bucket - `java.util.List`
options - extra parameters to apply to this operation - `com.google.cloud.storage.Storage$BucketSourceOption`
returns: `java.util.List<java.lang.Boolean>`
throws: com.google.cloud.storage.StorageException - upon failure"
(^java.util.List [^Storage this ^java.lang.String bucket ^java.util.List permissions ^com.google.cloud.storage.Storage$BucketSourceOption options]
(-> this (.testIamPermissions bucket permissions options))))
|
6388bf7115321ea7778535ffda285102b5cd85f44ae027eb95596dd8dfff4688 | DKurilo/hackerrank | solution.hs | # LANGUAGE OverloadedStrings , UnicodeSyntax #
module Main where
import Prelude.Unicode
import Control.Monad
import qualified Data.ByteString.Char8 as BSC
import qualified Data.Map as DM
import Debug.Trace
import System.IO
data Result = WIN | LOSE
deriving (Show, Eq, Ord)
data Player = A | B
deriving (Show, Eq, Ord)
data State = GS [Int] Player
deriving (Show, Eq, Ord)
type Cache = DM.Map State Result
nextPlayer ∷ Player → Player
nextPlayer A = B
nextPlayer B = A
canDo ∷ [Int] → [[Int]]
canDo [] = []
canDo rs = go 1 rs
where go ∷ Int → [Int] → [[Int]]
go _ [] = []
go from (r':rs') = [x:lim x rs' | x ← [from..(r' - 1)]] ⧺
(map (r':) $ go 0 rs')
lim ∷ Int → [Int] → [Int]
lim m xs = map (\x → min m x) xs
winOrLose ∷ Cache → State → (Result, Cache)
winOrLose c st = case DM.lookup st c of
Just r → (r, c)
_ | head rs ≡ 1 ∧ filter (>0) rs ≡ [1] → (LOSE, DM.insert st LOSE c)
| otherwise → (res, DM.insert st res c')
where (GS rs p) = st
(res, c') = foldl (\(r, c) rs' →
let (r', c') = winOrLose c (GS rs' $ nextPlayer p) in
if r' ≡ LOSE then (WIN,c') else (r,c')) (LOSE, c) $ canDo rs
main ∷ IO()
main = do
let getInt bx = case BSC.readInt bx of
Just (x,_) → x
_ → 0
let getInts = map getInt <$> BSC.split ' '
n ← getInt <$> BSC.getLine
forM_ [1..n] $ \_ → do
rs ← getInts <$> BSC.getLine
BSC.putStrLn ∘ BSC.pack ∘ show ∘ fst ∘ winOrLose DM.empty $ GS rs A
| null | https://raw.githubusercontent.com/DKurilo/hackerrank/37063170567b397b25a2b7123bc9c1299d34814a/bitter-chocolate/solution.hs | haskell | # LANGUAGE OverloadedStrings , UnicodeSyntax #
module Main where
import Prelude.Unicode
import Control.Monad
import qualified Data.ByteString.Char8 as BSC
import qualified Data.Map as DM
import Debug.Trace
import System.IO
data Result = WIN | LOSE
deriving (Show, Eq, Ord)
data Player = A | B
deriving (Show, Eq, Ord)
data State = GS [Int] Player
deriving (Show, Eq, Ord)
type Cache = DM.Map State Result
nextPlayer ∷ Player → Player
nextPlayer A = B
nextPlayer B = A
canDo ∷ [Int] → [[Int]]
canDo [] = []
canDo rs = go 1 rs
where go ∷ Int → [Int] → [[Int]]
go _ [] = []
go from (r':rs') = [x:lim x rs' | x ← [from..(r' - 1)]] ⧺
(map (r':) $ go 0 rs')
lim ∷ Int → [Int] → [Int]
lim m xs = map (\x → min m x) xs
winOrLose ∷ Cache → State → (Result, Cache)
winOrLose c st = case DM.lookup st c of
Just r → (r, c)
_ | head rs ≡ 1 ∧ filter (>0) rs ≡ [1] → (LOSE, DM.insert st LOSE c)
| otherwise → (res, DM.insert st res c')
where (GS rs p) = st
(res, c') = foldl (\(r, c) rs' →
let (r', c') = winOrLose c (GS rs' $ nextPlayer p) in
if r' ≡ LOSE then (WIN,c') else (r,c')) (LOSE, c) $ canDo rs
main ∷ IO()
main = do
let getInt bx = case BSC.readInt bx of
Just (x,_) → x
_ → 0
let getInts = map getInt <$> BSC.split ' '
n ← getInt <$> BSC.getLine
forM_ [1..n] $ \_ → do
rs ← getInts <$> BSC.getLine
BSC.putStrLn ∘ BSC.pack ∘ show ∘ fst ∘ winOrLose DM.empty $ GS rs A
| |
24bebcddc2f251d8e90692d1c881fb42ba8c86eeaf6b03f707b9f0633f17cb29 | sboehler/beans | Main.hs | module Main where
import qualified Beans.Command.Balance as Balance
import qualified Beans.Command.Fetch as Fetch
import qualified Beans.Command.Import as Import
import qualified Beans.Command.Infer as Infer
import qualified Beans.Command.Transcode as Transcode
import Beans.Commodity (Commodity)
import Beans.Date (Date, Interval (..))
import Beans.Filter (AccountFilter (AccountFilter), CommodityFilter (CommodityFilter), Filter (..))
import qualified Beans.Megaparsec as M
import Beans.Parser (ParserException)
import Beans.Process (ProcessException)
import Control.Monad.Catch (MonadThrow, catch)
import Control.Monad.IO.Class (MonadIO)
import Control.Monad.Reader (runReaderT)
import Data.Bool (bool)
import Data.Either.Combinators (rightToMaybe)
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Void (Void)
import Options.Applicative
import qualified Text.Megaparsec as M
import qualified Text.Megaparsec.Char as M
import qualified Text.Megaparsec.Char.Lexer as L
run :: Command -> IO ()
run c =
run' c
`catch` (\(e :: ProcessException) -> print e)
`catch` (\(e :: ParserException) -> print e)
run' :: (MonadIO m, MonadThrow m) => Command -> m ()
run' (Balance options) = runReaderT Balance.run options
run' (Fetch options) = runReaderT Fetch.run options
run' (Import options) = runReaderT Import.run options
run' (Infer options) = runReaderT Infer.run options
run' (Transcode options) = runReaderT Transcode.run options
data Command
= Balance Balance.Options
| Fetch Fetch.Options
| Import Import.Config
| Infer Infer.Options
| Transcode Transcode.Options
deriving (Show)
toReadM :: M.Parsec Void Text a -> ReadM a
toReadM p = maybeReader $ rightToMaybe . M.parse p "" . Text.pack
journalParser :: Parser FilePath
journalParser = strOption options
where
options = short 'j' <> long "journal" <> metavar "JOURNAL" <> help "The journal file to parse"
showCommoditiesParser :: Parser Bool
showCommoditiesParser = switch options
where
options = long "show-commodities" <> short 'c' <> help "Show commodities"
percentParser :: Parser (Maybe AccountFilter)
percentParser = optional $ AccountFilter <$> strOption options
where
options = long "percent" <> metavar "REGEX"
diffingParser :: Parser Balance.Diffing
diffingParser = bool Balance.NoDiffing Balance.Diffing <$> switch options
where
options = long "diff" <> short 'd' <> help "Diff balances"
balanceFormatParser :: Parser Balance.Format
balanceFormatParser = g <$> strOption options
where
options = long "format" <> short 'f' <> help "The format of th report" <> value "hierarchical"
g :: String -> Balance.Format
g "flat" = Balance.Flat
g _ = Balance.Hierarchical
valuationParser :: Parser [Commodity]
valuationParser = option parse options <|> pure []
where
parse = toReadM (M.parseCommodity `M.sepBy` M.char ',')
options = long "val" <> metavar "COMMODITY" <> short 'v' <> help "Valuation at market prices"
filterParser :: Parser Filter
filterParser = Filter <$> af <*> cf
where
af = AccountFilter <$> strOption (long "account-filter" <> value "" <> metavar "REGEX")
cf = CommodityFilter <$> strOption (long "commodity-filter" <> value "" <> metavar "REGEX")
dateparser :: String -> String -> Parser (Maybe Date)
dateparser optionStr helpStr = optional $ option parse options
where
parse = toReadM M.parseISODate
options = long optionStr <> help helpStr <> metavar "YYYY-MM-DD"
collapseParser :: Parser Balance.Collapse
collapseParser = many $ option parse options
where
options = short 'p' <> long "collapse" <> metavar "REGEX,DEPTH"
parse = toReadM $ do
s <- Text.unpack <$> M.takeWhileP Nothing (/= ',')
_ <- M.char ','
d <- L.decimal
pure (AccountFilter s, d)
fromParser, toParser :: Parser (Maybe Date)
fromParser = dateparser "from" "Consider only transactions at or after this date"
toParser = dateparser "to" "Consider only transactions before this date"
balanceOptions :: Parser Balance.Options
balanceOptions =
Balance.Options
<$> journalParser
<*> valuationParser
<*> filterParser
<*> diffingParser
<*> showCommoditiesParser
<*> balanceFormatParser
<*> fromParser
<*> toParser
<*> intervalParser
<*> percentParser
<*> collapseParser
intervalParser :: Parser (Maybe Interval)
intervalParser = optional $ option parse (metavar "INTERVAL" <> short 'i' <> long "interval")
where
parse :: ReadM Interval
parse = eitherReader $ \case
"daily" -> pure Daily
"weekly" -> pure Weekly
"monthly" -> pure Monthly
"quarterly" -> pure Quarterly
"yearly" -> pure Yearly
c -> Left $ "Unrecognized option: " <> c
commoditiesParser :: Parser (Maybe [Commodity])
commoditiesParser = optional $ option parse options
where
options = long "commodities" <> metavar "COMMODITY" <> short 'c' <> help "The commodity to fetch"
parse = toReadM $ M.parseCommodity `M.sepBy` M.char ','
configFileParser :: Parser FilePath
configFileParser = argument str options
where
options = metavar "CONFIG_FILE" <> help "The dhall config file to parse"
fetchOptions :: Parser Fetch.Options
fetchOptions = Fetch.Options <$> commoditiesParser <*> configFileParser
importOptions :: Parser Import.Config
importOptions =
Import.Config <$> importer <*> inputFile <*> account
where
importer = strOption (metavar "IMPORTER" <> short 'i')
account = option (toReadM M.parseAccount) (metavar "ACCOUNT" <> long "account" <> short 'a')
inputFile = argument str (metavar "INPUT_FILE" <> help "The data file to parse")
inferOptions :: Parser Infer.Options
inferOptions =
Infer.Options <$> trainingFile <*> targetFile
where
trainingFile =
strOption
( metavar "TRAINING_FILE" <> help "The file containing the training data"
<> short 't'
<> long "training-file"
)
targetFile = argument str (metavar "TARGET_FILE")
transcodeOptions :: Parser Transcode.Options
transcodeOptions =
Transcode.Options
<$> option
(toReadM M.parseCommodity)
( metavar "COMMODITY"
<> help "The valuation commodity"
<> long "commodity"
<> short 'c'
)
<*> strOption
( metavar "SOURCE_FILE" <> help "The source file"
<> short 's'
<> long "source-file"
)
<*> argument str (metavar "TARGET_FILE")
cmd :: Parser Command
cmd =
hsubparser $
command
"balance"
(info (Balance <$> balanceOptions) (progDesc "Print a generic balance"))
<> command
"fetch"
(info (Fetch <$> fetchOptions) (progDesc "Fetch latest prices"))
<> command
"import"
(info (Import <$> importOptions) (progDesc "Import transactions"))
<> command
"infer"
(info (Infer <$> inferOptions) (progDesc "Infer accounts"))
<> command
"transcode"
(info (Transcode <$> transcodeOptions) (progDesc "Transcode to beancount"))
parserConfig :: ParserInfo Command
parserConfig =
info
(helper <*> cmd)
(fullDesc <> progDesc "A plain text accounting tool" <> header "beans")
main :: IO ()
main = execParser parserConfig >>= run
| null | https://raw.githubusercontent.com/sboehler/beans/897fc30a602f49906eb952c4fd5c8c0bf05a6beb/app/Main.hs | haskell | module Main where
import qualified Beans.Command.Balance as Balance
import qualified Beans.Command.Fetch as Fetch
import qualified Beans.Command.Import as Import
import qualified Beans.Command.Infer as Infer
import qualified Beans.Command.Transcode as Transcode
import Beans.Commodity (Commodity)
import Beans.Date (Date, Interval (..))
import Beans.Filter (AccountFilter (AccountFilter), CommodityFilter (CommodityFilter), Filter (..))
import qualified Beans.Megaparsec as M
import Beans.Parser (ParserException)
import Beans.Process (ProcessException)
import Control.Monad.Catch (MonadThrow, catch)
import Control.Monad.IO.Class (MonadIO)
import Control.Monad.Reader (runReaderT)
import Data.Bool (bool)
import Data.Either.Combinators (rightToMaybe)
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Void (Void)
import Options.Applicative
import qualified Text.Megaparsec as M
import qualified Text.Megaparsec.Char as M
import qualified Text.Megaparsec.Char.Lexer as L
run :: Command -> IO ()
run c =
run' c
`catch` (\(e :: ProcessException) -> print e)
`catch` (\(e :: ParserException) -> print e)
run' :: (MonadIO m, MonadThrow m) => Command -> m ()
run' (Balance options) = runReaderT Balance.run options
run' (Fetch options) = runReaderT Fetch.run options
run' (Import options) = runReaderT Import.run options
run' (Infer options) = runReaderT Infer.run options
run' (Transcode options) = runReaderT Transcode.run options
data Command
= Balance Balance.Options
| Fetch Fetch.Options
| Import Import.Config
| Infer Infer.Options
| Transcode Transcode.Options
deriving (Show)
toReadM :: M.Parsec Void Text a -> ReadM a
toReadM p = maybeReader $ rightToMaybe . M.parse p "" . Text.pack
journalParser :: Parser FilePath
journalParser = strOption options
where
options = short 'j' <> long "journal" <> metavar "JOURNAL" <> help "The journal file to parse"
showCommoditiesParser :: Parser Bool
showCommoditiesParser = switch options
where
options = long "show-commodities" <> short 'c' <> help "Show commodities"
percentParser :: Parser (Maybe AccountFilter)
percentParser = optional $ AccountFilter <$> strOption options
where
options = long "percent" <> metavar "REGEX"
diffingParser :: Parser Balance.Diffing
diffingParser = bool Balance.NoDiffing Balance.Diffing <$> switch options
where
options = long "diff" <> short 'd' <> help "Diff balances"
balanceFormatParser :: Parser Balance.Format
balanceFormatParser = g <$> strOption options
where
options = long "format" <> short 'f' <> help "The format of th report" <> value "hierarchical"
g :: String -> Balance.Format
g "flat" = Balance.Flat
g _ = Balance.Hierarchical
valuationParser :: Parser [Commodity]
valuationParser = option parse options <|> pure []
where
parse = toReadM (M.parseCommodity `M.sepBy` M.char ',')
options = long "val" <> metavar "COMMODITY" <> short 'v' <> help "Valuation at market prices"
filterParser :: Parser Filter
filterParser = Filter <$> af <*> cf
where
af = AccountFilter <$> strOption (long "account-filter" <> value "" <> metavar "REGEX")
cf = CommodityFilter <$> strOption (long "commodity-filter" <> value "" <> metavar "REGEX")
dateparser :: String -> String -> Parser (Maybe Date)
dateparser optionStr helpStr = optional $ option parse options
where
parse = toReadM M.parseISODate
options = long optionStr <> help helpStr <> metavar "YYYY-MM-DD"
collapseParser :: Parser Balance.Collapse
collapseParser = many $ option parse options
where
options = short 'p' <> long "collapse" <> metavar "REGEX,DEPTH"
parse = toReadM $ do
s <- Text.unpack <$> M.takeWhileP Nothing (/= ',')
_ <- M.char ','
d <- L.decimal
pure (AccountFilter s, d)
fromParser, toParser :: Parser (Maybe Date)
fromParser = dateparser "from" "Consider only transactions at or after this date"
toParser = dateparser "to" "Consider only transactions before this date"
balanceOptions :: Parser Balance.Options
balanceOptions =
Balance.Options
<$> journalParser
<*> valuationParser
<*> filterParser
<*> diffingParser
<*> showCommoditiesParser
<*> balanceFormatParser
<*> fromParser
<*> toParser
<*> intervalParser
<*> percentParser
<*> collapseParser
intervalParser :: Parser (Maybe Interval)
intervalParser = optional $ option parse (metavar "INTERVAL" <> short 'i' <> long "interval")
where
parse :: ReadM Interval
parse = eitherReader $ \case
"daily" -> pure Daily
"weekly" -> pure Weekly
"monthly" -> pure Monthly
"quarterly" -> pure Quarterly
"yearly" -> pure Yearly
c -> Left $ "Unrecognized option: " <> c
commoditiesParser :: Parser (Maybe [Commodity])
commoditiesParser = optional $ option parse options
where
options = long "commodities" <> metavar "COMMODITY" <> short 'c' <> help "The commodity to fetch"
parse = toReadM $ M.parseCommodity `M.sepBy` M.char ','
configFileParser :: Parser FilePath
configFileParser = argument str options
where
options = metavar "CONFIG_FILE" <> help "The dhall config file to parse"
fetchOptions :: Parser Fetch.Options
fetchOptions = Fetch.Options <$> commoditiesParser <*> configFileParser
importOptions :: Parser Import.Config
importOptions =
Import.Config <$> importer <*> inputFile <*> account
where
importer = strOption (metavar "IMPORTER" <> short 'i')
account = option (toReadM M.parseAccount) (metavar "ACCOUNT" <> long "account" <> short 'a')
inputFile = argument str (metavar "INPUT_FILE" <> help "The data file to parse")
inferOptions :: Parser Infer.Options
inferOptions =
Infer.Options <$> trainingFile <*> targetFile
where
trainingFile =
strOption
( metavar "TRAINING_FILE" <> help "The file containing the training data"
<> short 't'
<> long "training-file"
)
targetFile = argument str (metavar "TARGET_FILE")
transcodeOptions :: Parser Transcode.Options
transcodeOptions =
Transcode.Options
<$> option
(toReadM M.parseCommodity)
( metavar "COMMODITY"
<> help "The valuation commodity"
<> long "commodity"
<> short 'c'
)
<*> strOption
( metavar "SOURCE_FILE" <> help "The source file"
<> short 's'
<> long "source-file"
)
<*> argument str (metavar "TARGET_FILE")
cmd :: Parser Command
cmd =
hsubparser $
command
"balance"
(info (Balance <$> balanceOptions) (progDesc "Print a generic balance"))
<> command
"fetch"
(info (Fetch <$> fetchOptions) (progDesc "Fetch latest prices"))
<> command
"import"
(info (Import <$> importOptions) (progDesc "Import transactions"))
<> command
"infer"
(info (Infer <$> inferOptions) (progDesc "Infer accounts"))
<> command
"transcode"
(info (Transcode <$> transcodeOptions) (progDesc "Transcode to beancount"))
parserConfig :: ParserInfo Command
parserConfig =
info
(helper <*> cmd)
(fullDesc <> progDesc "A plain text accounting tool" <> header "beans")
main :: IO ()
main = execParser parserConfig >>= run
| |
2238f988d4a08230ce6ca19360f8a936443bb76af2b0f7f4504b3b63ff54ef7c | jeapostrophe/exp | letwreck.rkt | #lang racket/base
(require (except-in rackunit fail)
racket/list)
(define f #f)
(define-syntax-rule (function=? a b)
(begin (set! f a)
(equal? (quote a) (quote b))))
(check-false
(function=? (λ (x) (+ x 1))
(λ (x) (add1 x))))
(check-equal? (f 1) 2)
(define q (quote (λ (x) (+ x 1))))
(check-true (list? q))
(check-equal? (first q) 'λ)
(check-equal? (second q) '(x))
(check-equal? (third q) '(+ x 1))
(define g (λ (y) (+ y 4)))
(define-syntax-rule (m e)
(if (list? 'e)
e
(error 'm "I own you")))
(check-equal? (m ((λ (x) (+ 1 x)) 5)) 6)
(check-exn exn:fail? (λ () (m 6)))
(define-syntax sexp=?
(syntax-rules ()
((sexp=? (a1 . b1) (a2 . b2) yes no)
(sexp=? a1 a2 (sexp=? b1 b2 yes no) no))
((sexp=? (a1 . b1) e2 yes no)
no)
((sexp=? e1 (a2 . b2) yes no)
no)
((sexp=? #(e1 ...) #(e2 ...) yes no)
(sexp=? (e1 ...) (e2 ...) yes no))
((sexp=? #(e1 ...) e2 yes no)
no)
((sexp=? e1 #(e2 ...) yes no)
no)
((sexp=? e1 e2 yes no)
(ident? e1
(ident? e2 (ident=? e1 e2 yes no) no)
(ident? e2 no (const=? e1 e2 yes no))))))
(define-syntax ident?
(syntax-rules ()
((ident? a yes no)
(let-syntax ((test (syntax-rules ()
((test a y n) y)
((test _ y n) n))))
(test *anident* yes no)))))
(check-true (ident? x #t #f))
(check-true (ident? y #t #f))
(check-true (let-syntax ((test (syntax-rules ()
((test x y n) y)
((test _ y n) n))))
(test *anident* #t #f)))
(check-false (ident? 5 #t #f))
(check-false (let-syntax ((test (syntax-rules ()
((test 5 y n) y)
((test _ y n) n))))
(test *anident* #t #f)))
(check-false (ident? (x y) #t #f))
(define-syntax ident=?
(syntax-rules ()
((ident=? a b yes no)
(let-syntax ((test (syntax-rules (a)
((test a y n) y)
((test x y n) n))))
(test b yes no)))))
(define-syntax const=?
(syntax-rules ()
((const=? a b yes no)
(let-syntax ((test (syntax-rules ()
((test a y n) y)
((test _ y n) n))))
(test b yes no)))))
(check-false
(sexp=? (λ (x) (+ x 1))
(λ (x) (add1 x))
#t
#f))
(check-true
(sexp=? (λ (x) (+ x 1))
(λ (x) (+ x 1))
#t
#f))
(check-false
(sexp=? (λ (x) (+ x 1))
(λ (x) (add1 x))
unbound-identifier
#f))
(define-syntax-rule (mylet ([y ye]) be)
((λ (y) be) ye))
(check-equal? (mylet ([x 5]) (+ x 5)) 10)
(define-syntax-rule (weird-macro1 (operator x operand))
(let ([x 5]) (operator x operand)))
(check-equal? (weird-macro1 (+ x 5))
10)
(define-syntax find
(syntax-rules ()
((find ident (a . b) sk fk)
(find ident a sk (find ident b sk fk)))
((find ident #(a ...) sk fk)
(find ident (a ...) sk fk))
((find ident a (sk-op . sk-args) fk)
(ident? a
(ident=? ident a (sk-op a . sk-args) fk)
fk))))
(define-syntax loop
(syntax-rules ()
((loop e)
(let-syntax ((k (syntax-rules ()
((_ ident e*)
(call/cc (lambda (ident)
(let f ()
e*
(f))))))))
(find break e (k e) (k dummy e))))
((loop es ...)
(loop (begin es ...)))))
(module+ test
(let ()
(define x 0)
(define-syntax-rule (lambda (x) e)
42)
(loop (set! x (+ x 1))
(display x)
(when (>= x 100)
(break #f)))))
;; a diversion on success & failure continuations
(define next-choice #f)
(define (pick opts)
(cond
[(empty? opts)
(fail)]
[else
(let/cc the-rest-of-the-program
(define last-choice next-choice)
(set! next-choice
(λ ()
(set! next-choice last-choice)
(the-rest-of-the-program
(pick (rest opts)))))
(the-rest-of-the-program (first opts)))]))
(define (fail)
(if next-choice
(next-choice)
(error 'fail)))
(let ()
(let* ([x (pick '(1 2 3 4 5 6 7 8 9))]
[y (pick '(3 4 5 6 7 8 9))])
(printf "Before X is ~a, Y is ~a\n" x y)
(unless (= x (* 2 y))
(fail))
(printf "After X is ~a, Y is ~a\n" x y)))
(define (epick opts call-me-on-success call-me-on-failure)
(cond
[(empty? opts)
(call-me-on-failure)]
[else
(call-me-on-success
(first opts)
(λ ()
(epick (rest opts)
call-me-on-success
call-me-on-failure)))]))
(epick '(1 2 3 4 5 6 7 8 9)
(λ (x xs-fail)
(epick '(3 4 5 6 7 8 9)
(λ (y ys-fail)
(printf "eBefore X is ~a, Y is ~a\n" x y)
(if (= x (* 2 y))
(printf "eAfter X is ~a, Y is ~a\n" x y)
(ys-fail)))
(λ ()
(xs-fail))))
(λ ()
(error 'fail)))
;; alpha renaming
(let ()
(define (f x)
(+ x 1))
(f 1))
(let ()
(define (f y)
(+ y 1))
(f 1))
(let ()
(define (f a-long-descriptive-name-for-this-variable)
(+ a-long-descriptive-name-for-this-variable 1))
(f 1))
(let ()
(define (this-f a-long-descriptive-name-for-this-variable)
(+ a-long-descriptive-name-for-this-variable 1))
(this-f 1))
;; binding specifications
(define-syntax-rule
;; this is bound here
;; | |
;; v v
(jlet ([x xe] ...) be)
((λ (x ...) be) xe ...))
(define-syntax jlet*
(syntax-rules ()
[(_ () be)
be]
[(_ ([x0 ;; <-- this
xe0]
;; is bound here
vvvvvvvvvvvvvvvvvvvv
[xN xeN]
...) be)
(jlet ([x0 xe0])
(jlet* ([xN xeN] ...)
be))]))
(define-syntax jletrec
(syntax-rules ()
[(_ ([xN ;; <- these are bound everywhere inside the jletrec
xeN] ...) be)
(jlet ([xN #f]
...)
(begin (set! xN xeN)
...
be))]))
(require (for-syntax racket/base
syntax/parse
racket/syntax))
(define-syntax (jletwreck stx)
(syntax-parse stx
[(_ ([binding:id (other-binding:id ...) bound-body:expr]
...)
body:expr)
(with-syntax* ([(new-binding ...)
(generate-temporaries #'(binding ...))]
[((new-other-binding ...) ...)
(for/list ([obs (in-list (syntax->list #'((other-binding ...) ...)))])
(for/list ([ob (in-list (syntax->list obs))])
(for/or ([old (in-list (syntax->list #'(binding ...)))]
[new (in-list (syntax->list #'(new-binding ...)))])
(and (bound-identifier=? old ob)
new))))])
(syntax/loc stx
(jletrec ([new-binding
(let-syntax ([other-binding (make-rename-transformer #'new-other-binding)]
...)
bound-body)] ...)
(let-syntax ([binding (make-rename-transformer #'new-binding)]
...)
body))))]))
(jlet ([x 'x] [y 'y] [z 'z] [h 'h] [i 'i])
(jletwreck
([x (i) (list x y z h i)]
[y (x) (list x y z h i)]
[z (y) (list x y z h i)]
[h (x z) (list x y z h i)]
[i () (list x y z h i)])
(list x y z h i)))
(define-syntax-rule (t e) (λ () e))
(define-syntax-rule (tlist e ...) (t (list (e) ...)))
(jlet ([x (t 'x)] [y (t 'y)] [z (t 'z)] [h (t 'h)] [i (t 'i)])
(jletwreck
([x (i) (tlist x y z h i)]
[y (x) (tlist x y z h i)]
[z (y) (tlist x y z h i)]
[h (x z) (tlist x y z h i)]
[i () (tlist x y z h i)])
((tlist x y z h i))))
| null | https://raw.githubusercontent.com/jeapostrophe/exp/43615110fd0439d2ef940c42629fcdc054c370f9/letwreck.rkt | racket | a diversion on success & failure continuations
alpha renaming
binding specifications
this is bound here
| |
v v
<-- this
is bound here
<- these are bound everywhere inside the jletrec | #lang racket/base
(require (except-in rackunit fail)
racket/list)
(define f #f)
(define-syntax-rule (function=? a b)
(begin (set! f a)
(equal? (quote a) (quote b))))
(check-false
(function=? (λ (x) (+ x 1))
(λ (x) (add1 x))))
(check-equal? (f 1) 2)
(define q (quote (λ (x) (+ x 1))))
(check-true (list? q))
(check-equal? (first q) 'λ)
(check-equal? (second q) '(x))
(check-equal? (third q) '(+ x 1))
(define g (λ (y) (+ y 4)))
(define-syntax-rule (m e)
(if (list? 'e)
e
(error 'm "I own you")))
(check-equal? (m ((λ (x) (+ 1 x)) 5)) 6)
(check-exn exn:fail? (λ () (m 6)))
(define-syntax sexp=?
(syntax-rules ()
((sexp=? (a1 . b1) (a2 . b2) yes no)
(sexp=? a1 a2 (sexp=? b1 b2 yes no) no))
((sexp=? (a1 . b1) e2 yes no)
no)
((sexp=? e1 (a2 . b2) yes no)
no)
((sexp=? #(e1 ...) #(e2 ...) yes no)
(sexp=? (e1 ...) (e2 ...) yes no))
((sexp=? #(e1 ...) e2 yes no)
no)
((sexp=? e1 #(e2 ...) yes no)
no)
((sexp=? e1 e2 yes no)
(ident? e1
(ident? e2 (ident=? e1 e2 yes no) no)
(ident? e2 no (const=? e1 e2 yes no))))))
(define-syntax ident?
(syntax-rules ()
((ident? a yes no)
(let-syntax ((test (syntax-rules ()
((test a y n) y)
((test _ y n) n))))
(test *anident* yes no)))))
(check-true (ident? x #t #f))
(check-true (ident? y #t #f))
(check-true (let-syntax ((test (syntax-rules ()
((test x y n) y)
((test _ y n) n))))
(test *anident* #t #f)))
(check-false (ident? 5 #t #f))
(check-false (let-syntax ((test (syntax-rules ()
((test 5 y n) y)
((test _ y n) n))))
(test *anident* #t #f)))
(check-false (ident? (x y) #t #f))
(define-syntax ident=?
(syntax-rules ()
((ident=? a b yes no)
(let-syntax ((test (syntax-rules (a)
((test a y n) y)
((test x y n) n))))
(test b yes no)))))
(define-syntax const=?
(syntax-rules ()
((const=? a b yes no)
(let-syntax ((test (syntax-rules ()
((test a y n) y)
((test _ y n) n))))
(test b yes no)))))
(check-false
(sexp=? (λ (x) (+ x 1))
(λ (x) (add1 x))
#t
#f))
(check-true
(sexp=? (λ (x) (+ x 1))
(λ (x) (+ x 1))
#t
#f))
(check-false
(sexp=? (λ (x) (+ x 1))
(λ (x) (add1 x))
unbound-identifier
#f))
(define-syntax-rule (mylet ([y ye]) be)
((λ (y) be) ye))
(check-equal? (mylet ([x 5]) (+ x 5)) 10)
(define-syntax-rule (weird-macro1 (operator x operand))
(let ([x 5]) (operator x operand)))
(check-equal? (weird-macro1 (+ x 5))
10)
(define-syntax find
(syntax-rules ()
((find ident (a . b) sk fk)
(find ident a sk (find ident b sk fk)))
((find ident #(a ...) sk fk)
(find ident (a ...) sk fk))
((find ident a (sk-op . sk-args) fk)
(ident? a
(ident=? ident a (sk-op a . sk-args) fk)
fk))))
(define-syntax loop
(syntax-rules ()
((loop e)
(let-syntax ((k (syntax-rules ()
((_ ident e*)
(call/cc (lambda (ident)
(let f ()
e*
(f))))))))
(find break e (k e) (k dummy e))))
((loop es ...)
(loop (begin es ...)))))
(module+ test
(let ()
(define x 0)
(define-syntax-rule (lambda (x) e)
42)
(loop (set! x (+ x 1))
(display x)
(when (>= x 100)
(break #f)))))
(define next-choice #f)
(define (pick opts)
(cond
[(empty? opts)
(fail)]
[else
(let/cc the-rest-of-the-program
(define last-choice next-choice)
(set! next-choice
(λ ()
(set! next-choice last-choice)
(the-rest-of-the-program
(pick (rest opts)))))
(the-rest-of-the-program (first opts)))]))
(define (fail)
(if next-choice
(next-choice)
(error 'fail)))
(let ()
(let* ([x (pick '(1 2 3 4 5 6 7 8 9))]
[y (pick '(3 4 5 6 7 8 9))])
(printf "Before X is ~a, Y is ~a\n" x y)
(unless (= x (* 2 y))
(fail))
(printf "After X is ~a, Y is ~a\n" x y)))
(define (epick opts call-me-on-success call-me-on-failure)
(cond
[(empty? opts)
(call-me-on-failure)]
[else
(call-me-on-success
(first opts)
(λ ()
(epick (rest opts)
call-me-on-success
call-me-on-failure)))]))
(epick '(1 2 3 4 5 6 7 8 9)
(λ (x xs-fail)
(epick '(3 4 5 6 7 8 9)
(λ (y ys-fail)
(printf "eBefore X is ~a, Y is ~a\n" x y)
(if (= x (* 2 y))
(printf "eAfter X is ~a, Y is ~a\n" x y)
(ys-fail)))
(λ ()
(xs-fail))))
(λ ()
(error 'fail)))
(let ()
(define (f x)
(+ x 1))
(f 1))
(let ()
(define (f y)
(+ y 1))
(f 1))
(let ()
(define (f a-long-descriptive-name-for-this-variable)
(+ a-long-descriptive-name-for-this-variable 1))
(f 1))
(let ()
(define (this-f a-long-descriptive-name-for-this-variable)
(+ a-long-descriptive-name-for-this-variable 1))
(this-f 1))
(define-syntax-rule
(jlet ([x xe] ...) be)
((λ (x ...) be) xe ...))
(define-syntax jlet*
(syntax-rules ()
[(_ () be)
be]
xe0]
vvvvvvvvvvvvvvvvvvvv
[xN xeN]
...) be)
(jlet ([x0 xe0])
(jlet* ([xN xeN] ...)
be))]))
(define-syntax jletrec
(syntax-rules ()
xeN] ...) be)
(jlet ([xN #f]
...)
(begin (set! xN xeN)
...
be))]))
(require (for-syntax racket/base
syntax/parse
racket/syntax))
(define-syntax (jletwreck stx)
(syntax-parse stx
[(_ ([binding:id (other-binding:id ...) bound-body:expr]
...)
body:expr)
(with-syntax* ([(new-binding ...)
(generate-temporaries #'(binding ...))]
[((new-other-binding ...) ...)
(for/list ([obs (in-list (syntax->list #'((other-binding ...) ...)))])
(for/list ([ob (in-list (syntax->list obs))])
(for/or ([old (in-list (syntax->list #'(binding ...)))]
[new (in-list (syntax->list #'(new-binding ...)))])
(and (bound-identifier=? old ob)
new))))])
(syntax/loc stx
(jletrec ([new-binding
(let-syntax ([other-binding (make-rename-transformer #'new-other-binding)]
...)
bound-body)] ...)
(let-syntax ([binding (make-rename-transformer #'new-binding)]
...)
body))))]))
(jlet ([x 'x] [y 'y] [z 'z] [h 'h] [i 'i])
(jletwreck
([x (i) (list x y z h i)]
[y (x) (list x y z h i)]
[z (y) (list x y z h i)]
[h (x z) (list x y z h i)]
[i () (list x y z h i)])
(list x y z h i)))
(define-syntax-rule (t e) (λ () e))
(define-syntax-rule (tlist e ...) (t (list (e) ...)))
(jlet ([x (t 'x)] [y (t 'y)] [z (t 'z)] [h (t 'h)] [i (t 'i)])
(jletwreck
([x (i) (tlist x y z h i)]
[y (x) (tlist x y z h i)]
[z (y) (tlist x y z h i)]
[h (x z) (tlist x y z h i)]
[i () (tlist x y z h i)])
((tlist x y z h i))))
|
6f8966885e1cb82fc730c7a9eadcdcbb9443d99c7689178f3d06f922af12358d | Verites/verigraph | Derivation.hs | module Abstract.Rewriting.DPO.Derivation
( Derivation(..)
, generateDerivation
, getDObjects
, getAllBottomObjects
, getLeftBottomMorphisms
, getRightBottomMorphisms
)
where
import Abstract.Category
import Abstract.Rewriting.DPO
data Derivation morph = Derivation
{ production :: Production morph
, match :: morph
, comatch :: morph
, gluing :: morph
, dToG :: morph
, dToH :: morph
} deriving (Eq, Show, Read)
generateDerivationUnsafe :: (DPO morph) => morph -> Production morph -> Derivation morph
generateDerivationUnsafe morph p = Derivation p morph n k f g
where
(k,n,f,g) = calculateDPO morph p
| Given a match @m@ and a production @p@ , it returns @Just d@ , where @d@ is the corresponding Derivation if @m@ satisfies the rewriting conditions , or @Nothing@.
generateDerivation :: (DPO morph) => MorphismsConfig morph -> morph -> Production morph -> Maybe (Derivation morph)
generateDerivation conf morph p =
if satisfiesRewritingConditions conf p morph then
Just (generateDerivationUnsafe morph p)
else Nothing
getDObjects :: (DPO morph) => [Derivation morph] -> [Obj morph]
getDObjects = fmap (domain . dToG)
getLeftBottomMorphisms :: [Derivation morph] -> [morph]
getLeftBottomMorphisms = fmap dToG
getRightBottomMorphisms :: [Derivation morph] -> [morph]
getRightBottomMorphisms = fmap dToH
getBottomObjects :: (DPO morph) => Derivation morph -> (Obj morph,Obj morph,Obj morph)
getBottomObjects d =
let l = codomain . dToG
k = domain . dToG
r = codomain . dToH
in (l d, k d, r d)
getAllBottomObjects :: (DPO morph) => [Derivation morph] -> [Obj morph]
getAllBottomObjects [] = error "can not return objects of an empty derivation"
getAllBottomObjects [d] = (\(a,b,c) -> [a,b,c]) $ getBottomObjects d
getAllBottomObjects (d:ds) = (\(a,b,_) -> [a,b]) (getBottomObjects d) ++ getAllBottomObjects ds
| null | https://raw.githubusercontent.com/Verites/verigraph/754ec08bf4a55ea7402d8cd0705e58b1d2c9cd67/src/library/Abstract/Rewriting/DPO/Derivation.hs | haskell | module Abstract.Rewriting.DPO.Derivation
( Derivation(..)
, generateDerivation
, getDObjects
, getAllBottomObjects
, getLeftBottomMorphisms
, getRightBottomMorphisms
)
where
import Abstract.Category
import Abstract.Rewriting.DPO
data Derivation morph = Derivation
{ production :: Production morph
, match :: morph
, comatch :: morph
, gluing :: morph
, dToG :: morph
, dToH :: morph
} deriving (Eq, Show, Read)
generateDerivationUnsafe :: (DPO morph) => morph -> Production morph -> Derivation morph
generateDerivationUnsafe morph p = Derivation p morph n k f g
where
(k,n,f,g) = calculateDPO morph p
| Given a match @m@ and a production @p@ , it returns @Just d@ , where @d@ is the corresponding Derivation if @m@ satisfies the rewriting conditions , or @Nothing@.
generateDerivation :: (DPO morph) => MorphismsConfig morph -> morph -> Production morph -> Maybe (Derivation morph)
generateDerivation conf morph p =
if satisfiesRewritingConditions conf p morph then
Just (generateDerivationUnsafe morph p)
else Nothing
getDObjects :: (DPO morph) => [Derivation morph] -> [Obj morph]
getDObjects = fmap (domain . dToG)
getLeftBottomMorphisms :: [Derivation morph] -> [morph]
getLeftBottomMorphisms = fmap dToG
getRightBottomMorphisms :: [Derivation morph] -> [morph]
getRightBottomMorphisms = fmap dToH
getBottomObjects :: (DPO morph) => Derivation morph -> (Obj morph,Obj morph,Obj morph)
getBottomObjects d =
let l = codomain . dToG
k = domain . dToG
r = codomain . dToH
in (l d, k d, r d)
getAllBottomObjects :: (DPO morph) => [Derivation morph] -> [Obj morph]
getAllBottomObjects [] = error "can not return objects of an empty derivation"
getAllBottomObjects [d] = (\(a,b,c) -> [a,b,c]) $ getBottomObjects d
getAllBottomObjects (d:ds) = (\(a,b,_) -> [a,b]) (getBottomObjects d) ++ getAllBottomObjects ds
| |
bf2bc514ec4f25edf2cef5790abc81f25133982b43b1fc8f12aa80ac98bf8ee7 | ocaml-flambda/ocaml-jst | env.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* Environment handling *)
open Cmi_format
open Misc
open Asttypes
open Longident
open Path
open Types
open Local_store
module String = Misc.Stdlib.String
let add_delayed_check_forward = ref (fun _ -> assert false)
type 'a usage_tbl = ('a -> unit) Types.Uid.Tbl.t
* This table is used to track usage of value declarations .
A declaration is identified by its uid .
The callback attached to a declaration is called whenever the value ( or
type , or ... ) is used explicitly ( lookup_value , ... ) or implicitly
( inclusion test between signatures , cf , ... ) .
A declaration is identified by its uid.
The callback attached to a declaration is called whenever the value (or
type, or ...) is used explicitly (lookup_value, ...) or implicitly
(inclusion test between signatures, cf Includemod.value_descriptions, ...).
*)
let value_declarations : unit usage_tbl ref = s_table Types.Uid.Tbl.create 16
let type_declarations : unit usage_tbl ref = s_table Types.Uid.Tbl.create 16
let module_declarations : unit usage_tbl ref = s_table Types.Uid.Tbl.create 16
let uid_to_loc : Location.t Types.Uid.Tbl.t ref =
s_table Types.Uid.Tbl.create 16
let register_uid uid loc = Types.Uid.Tbl.add !uid_to_loc uid loc
let get_uid_to_loc_tbl () = !uid_to_loc
type constructor_usage = Positive | Pattern | Exported_private | Exported
type constructor_usages =
{
mutable cu_positive: bool;
mutable cu_pattern: bool;
mutable cu_exported_private: bool;
}
let add_constructor_usage cu usage =
match usage with
| Positive -> cu.cu_positive <- true
| Pattern -> cu.cu_pattern <- true
| Exported_private -> cu.cu_exported_private <- true
| Exported ->
cu.cu_positive <- true;
cu.cu_pattern <- true;
cu.cu_exported_private <- true
let constructor_usages () =
{cu_positive = false; cu_pattern = false; cu_exported_private = false}
let constructor_usage_complaint ~rebind priv cu
: Warnings.constructor_usage_warning option =
match priv, rebind with
| Asttypes.Private, _ | _, true ->
if cu.cu_positive || cu.cu_pattern || cu.cu_exported_private then None
else Some Unused
| Asttypes.Public, false -> begin
match cu.cu_positive, cu.cu_pattern, cu.cu_exported_private with
| true, _, _ -> None
| false, false, false -> Some Unused
| false, true, _ -> Some Not_constructed
| false, false, true -> Some Only_exported_private
end
let used_constructors : constructor_usage usage_tbl ref =
s_table Types.Uid.Tbl.create 16
type label_usage =
Projection | Mutation | Construct | Exported_private | Exported
type label_usages =
{
mutable lu_projection: bool;
mutable lu_mutation: bool;
mutable lu_construct: bool;
}
let add_label_usage lu usage =
match usage with
| Projection -> lu.lu_projection <- true;
| Mutation -> lu.lu_mutation <- true
| Construct -> lu.lu_construct <- true
| Exported_private ->
lu.lu_projection <- true
| Exported ->
lu.lu_projection <- true;
lu.lu_mutation <- true;
lu.lu_construct <- true
let is_mutating_label_usage = function
| Mutation -> true
| (Projection | Construct | Exported_private | Exported) -> false
let label_usages () =
{lu_projection = false; lu_mutation = false; lu_construct = false}
let label_usage_complaint priv mut lu
: Warnings.field_usage_warning option =
match priv, mut with
| Asttypes.Private, _ ->
if lu.lu_projection then None
else Some Unused
| Asttypes.Public, Asttypes.Immutable -> begin
match lu.lu_projection, lu.lu_construct with
| true, _ -> None
| false, false -> Some Unused
| false, true -> Some Not_read
end
| Asttypes.Public, Asttypes.Mutable -> begin
match lu.lu_projection, lu.lu_mutation, lu.lu_construct with
| true, true, _ -> None
| false, false, false -> Some Unused
| false, _, _ -> Some Not_read
| true, false, _ -> Some Not_mutated
end
let used_labels : label_usage usage_tbl ref =
s_table Types.Uid.Tbl.create 16
(** Map indexed by the name of module components. *)
module NameMap = String.Map
type value_unbound_reason =
| Val_unbound_instance_variable
| Val_unbound_self
| Val_unbound_ancestor
| Val_unbound_ghost_recursive of Location.t
type module_unbound_reason =
| Mod_unbound_illegal_recursion
type summary =
Env_empty
| Env_value of summary * Ident.t * value_description
| Env_type of summary * Ident.t * type_declaration
| Env_extension of summary * Ident.t * extension_constructor
| Env_module of summary * Ident.t * module_presence * module_declaration
| Env_modtype of summary * Ident.t * modtype_declaration
| Env_class of summary * Ident.t * class_declaration
| Env_cltype of summary * Ident.t * class_type_declaration
| Env_open of summary * Path.t
| Env_functor_arg of summary * Ident.t
| Env_constraints of summary * type_declaration Path.Map.t
| Env_copy_types of summary
| Env_persistent of summary * Ident.t
| Env_value_unbound of summary * string * value_unbound_reason
| Env_module_unbound of summary * string * module_unbound_reason
let map_summary f = function
Env_empty -> Env_empty
| Env_value (s, id, d) -> Env_value (f s, id, d)
| Env_type (s, id, d) -> Env_type (f s, id, d)
| Env_extension (s, id, d) -> Env_extension (f s, id, d)
| Env_module (s, id, p, d) -> Env_module (f s, id, p, d)
| Env_modtype (s, id, d) -> Env_modtype (f s, id, d)
| Env_class (s, id, d) -> Env_class (f s, id, d)
| Env_cltype (s, id, d) -> Env_cltype (f s, id, d)
| Env_open (s, p) -> Env_open (f s, p)
| Env_functor_arg (s, id) -> Env_functor_arg (f s, id)
| Env_constraints (s, m) -> Env_constraints (f s, m)
| Env_copy_types s -> Env_copy_types (f s)
| Env_persistent (s, id) -> Env_persistent (f s, id)
| Env_value_unbound (s, u, r) -> Env_value_unbound (f s, u, r)
| Env_module_unbound (s, u, r) -> Env_module_unbound (f s, u, r)
type address =
| Aunit of Compilation_unit.t
| Alocal of Ident.t
| Adot of address * int
module TycompTbl =
struct
(** This module is used to store components of types (i.e. labels
and constructors). We keep a representation of each nested
"open" and the set of local bindings between each of them. *)
type 'a t = {
current: 'a Ident.tbl;
(** Local bindings since the last open. *)
opened: 'a opened option;
(** Symbolic representation of the last (innermost) open, if any. *)
}
and 'a opened = {
components: ('a list) NameMap.t;
* Components from the opened module . We keep a list of
bindings for each name , as in comp_labels and
comp_constrs .
bindings for each name, as in comp_labels and
comp_constrs. *)
root: Path.t;
(** Only used to check removal of open *)
using: (string -> ('a * 'a) option -> unit) option;
(** A callback to be applied when a component is used from this
"open". This is used to detect unused "opens". The
arguments are used to detect shadowing. *)
next: 'a t;
(** The table before opening the module. *)
}
let empty = { current = Ident.empty; opened = None }
let add id x tbl =
{tbl with current = Ident.add id x tbl.current}
let add_open slot wrap root components next =
let using =
match slot with
| None -> None
| Some f -> Some (fun s x -> f s (wrap x))
in
{
current = Ident.empty;
opened = Some {using; components; root; next};
}
let remove_last_open rt tbl =
match tbl.opened with
| Some {root; next; _} when Path.same rt root ->
{ next with current =
Ident.fold_all Ident.add tbl.current next.current }
| _ ->
assert false
let rec find_same id tbl =
try Ident.find_same id tbl.current
with Not_found as exn ->
begin match tbl.opened with
| Some {next; _} -> find_same id next
| None -> raise exn
end
let nothing = fun () -> ()
let mk_callback rest name desc using =
match using with
| None -> nothing
| Some f ->
(fun () ->
match rest with
| [] -> f name None
| (hidden, _) :: _ -> f name (Some (desc, hidden)))
let rec find_all ~mark name tbl =
List.map (fun (_id, desc) -> desc, nothing)
(Ident.find_all name tbl.current) @
match tbl.opened with
| None -> []
| Some {using; next; components; root = _} ->
let rest = find_all ~mark name next in
let using = if mark then using else None in
match NameMap.find name components with
| exception Not_found -> rest
| opened ->
List.map
(fun desc -> desc, mk_callback rest name desc using)
opened
@ rest
let rec fold_name f tbl acc =
let acc = Ident.fold_name (fun _id d -> f d) tbl.current acc in
match tbl.opened with
| Some {using = _; next; components; root = _} ->
acc
|> NameMap.fold
(fun _name -> List.fold_right f)
components
|> fold_name f next
| None ->
acc
let rec local_keys tbl acc =
let acc = Ident.fold_all (fun k _ accu -> k::accu) tbl.current acc in
match tbl.opened with
| Some o -> local_keys o.next acc
| None -> acc
let diff_keys is_local tbl1 tbl2 =
let keys2 = local_keys tbl2 [] in
List.filter
(fun id ->
is_local (find_same id tbl2) &&
try ignore (find_same id tbl1); false
with Not_found -> true)
keys2
end
type empty = |
type escaping_context =
| Return
| Tailcall_argument
| Tailcall_function
| Partial_application
type value_lock =
| Lock of { mode : Alloc_mode.t; escaping_context : escaping_context option }
| Region_lock
module IdTbl =
struct
(** This module is used to store all kinds of components except
(labels and constructors) in environments. We keep a
representation of each nested "open" and the set of local
bindings between each of them. *)
type ('lock, 'a, 'b) t = {
current: 'a Ident.tbl;
(** Local bindings since the last open or lock *)
layer: ('lock, 'a, 'b) layer;
(** Symbolic representation of the last (innermost) open, if any. *)
}
and ('lock, 'a, 'b) layer =
| Open of {
root: Path.t;
(** The path of the opened module, to be prefixed in front of
its local names to produce a valid path in the current
environment. *)
components: 'b NameMap.t;
(** Components from the opened module. *)
using: (string -> ('a * 'a) option -> unit) option;
(** A callback to be applied when a component is used from this
"open". This is used to detect unused "opens". The
arguments are used to detect shadowing. *)
next: ('lock, 'a, 'b) t;
(** The table before opening the module. *)
}
| Map of {
f: ('a -> 'a);
next: ('lock, 'a, 'b) t;
}
| Lock of {
mode: 'lock;
next: ('lock, 'a, 'b) t;
}
| Nothing
let empty = { current = Ident.empty; layer = Nothing }
let add id x tbl =
{tbl with current = Ident.add id x tbl.current}
let remove id tbl =
{tbl with current = Ident.remove id tbl.current}
let add_open slot wrap root components next =
let using =
match slot with
| None -> None
| Some f -> Some (fun s x -> f s (wrap x))
in
{
current = Ident.empty;
layer = Open {using; root; components; next};
}
let remove_last_open rt tbl =
match tbl.layer with
| Open {root; next; _} when Path.same rt root ->
{ next with current =
Ident.fold_all Ident.add tbl.current next.current }
| _ ->
assert false
let add_lock mode next =
{ current = Ident.empty; layer = Lock {mode; next} }
let map f next =
{
current = Ident.empty;
layer = Map {f; next}
}
let rec find_same id tbl =
try Ident.find_same id tbl.current
with Not_found as exn ->
begin match tbl.layer with
| Open {next; _} -> find_same id next
| Map {f; next} -> f (find_same id next)
| Lock {mode=_; next} -> find_same id next
| Nothing -> raise exn
end
let rec find_name_and_locks wrap ~mark name tbl macc =
try
let (id, desc) = Ident.find_name name tbl.current in
Pident id, macc, desc
with Not_found as exn ->
begin match tbl.layer with
| Open {using; root; next; components} ->
begin try
let descr = wrap (NameMap.find name components) in
let res = Pdot (root, name), macc, descr in
if mark then begin match using with
| None -> ()
| Some f -> begin
match find_name_and_locks wrap ~mark:false name next macc with
| exception Not_found -> f name None
| _, _, descr' -> f name (Some (descr', descr))
end
end;
res
with Not_found ->
find_name_and_locks wrap ~mark name next macc
end
| Map {f; next} ->
let (p, macc, desc) =
find_name_and_locks wrap ~mark name next macc in
p, macc, f desc
| Lock {mode; next} ->
find_name_and_locks wrap ~mark name next (mode :: macc)
| Nothing ->
raise exn
end
let find_name_and_modes wrap ~mark name tbl =
find_name_and_locks wrap ~mark name tbl []
let find_name wrap ~mark name tbl =
let (id, ([] : empty list), desc) =
find_name_and_modes wrap ~mark name tbl in
id, desc
let rec find_all wrap name tbl =
List.map
(fun (id, desc) -> Pident id, desc)
(Ident.find_all name tbl.current) @
match tbl.layer with
| Nothing -> []
| Open {root; using = _; next; components} ->
begin try
let desc = wrap (NameMap.find name components) in
(Pdot (root, name), desc) :: find_all wrap name next
with Not_found ->
find_all wrap name next
end
| Map {f; next} ->
List.map (fun (p, desc) -> (p, f desc))
(find_all wrap name next)
| Lock {mode=_;next} ->
find_all wrap name next
let rec fold_name wrap f tbl acc =
let acc =
Ident.fold_name
(fun id d -> f (Ident.name id) (Pident id, d))
tbl.current acc
in
match tbl.layer with
| Open {root; using = _; next; components} ->
acc
|> NameMap.fold
(fun name desc -> f name (Pdot (root, name), wrap desc))
components
|> fold_name wrap f next
| Nothing ->
acc
| Map {f=g; next} ->
acc
|> fold_name wrap
(fun name (path, desc) -> f name (path, g desc))
next
| Lock {mode=_; next} ->
fold_name wrap f next acc
let rec local_keys tbl acc =
let acc = Ident.fold_all (fun k _ accu -> k::accu) tbl.current acc in
match tbl.layer with
| Open {next; _ } | Map {next; _} | Lock {next; _} -> local_keys next acc
| Nothing -> acc
let rec iter wrap f tbl =
Ident.iter (fun id desc -> f id (Pident id, desc)) tbl.current;
match tbl.layer with
| Open {root; using = _; next; components} ->
NameMap.iter
(fun s x ->
let root_scope = Path.scope root in
f (Ident.create_scoped ~scope:root_scope s)
(Pdot (root, s), wrap x))
components;
iter wrap f next
| Map {f=g; next} ->
iter wrap (fun id (path, desc) -> f id (path, g desc)) next
| Lock {mode=_; next} ->
iter wrap f next
| Nothing -> ()
let diff_keys tbl1 tbl2 =
let keys2 = local_keys tbl2 [] in
List.filter
(fun id ->
try ignore (find_same id tbl1); false
with Not_found -> true)
keys2
end
type type_descr_kind =
(label_description, constructor_description) type_kind
type type_descriptions = type_descr_kind
let in_signature_flag = 0x01
type t = {
values: (value_lock, value_entry, value_data) IdTbl.t;
constrs: constructor_data TycompTbl.t;
labels: label_data TycompTbl.t;
types: (empty, type_data, type_data) IdTbl.t;
modules: (empty, module_entry, module_data) IdTbl.t;
modtypes: (empty, modtype_data, modtype_data) IdTbl.t;
classes: (empty, class_data, class_data) IdTbl.t;
cltypes: (empty, cltype_data, cltype_data) IdTbl.t;
functor_args: unit Ident.tbl;
summary: summary;
local_constraints: type_declaration Path.Map.t;
flags: int;
}
and module_components =
{
alerts: alerts;
uid: Uid.t;
comps:
(components_maker,
(module_components_repr, module_components_failure) result)
Lazy_backtrack.t;
}
and components_maker = {
cm_env: t;
cm_prefixing_subst: Subst.t;
cm_path: Path.t;
cm_addr: address_lazy;
cm_mty: Subst.Lazy.modtype;
cm_shape: Shape.t;
}
and module_components_repr =
Structure_comps of structure_components
| Functor_comps of functor_components
and module_components_failure =
| No_components_abstract
| No_components_alias of Path.t
and structure_components = {
mutable comp_values: value_data NameMap.t;
mutable comp_constrs: constructor_data list NameMap.t;
mutable comp_labels: label_data list NameMap.t;
mutable comp_types: type_data NameMap.t;
mutable comp_modules: module_data NameMap.t;
mutable comp_modtypes: modtype_data NameMap.t;
mutable comp_classes: class_data NameMap.t;
mutable comp_cltypes: cltype_data NameMap.t;
}
and functor_components = {
fcomp_arg: functor_parameter;
(* Formal parameter and argument signature *)
fcomp_res: module_type; (* Result signature *)
fcomp_shape: Shape.t;
fcomp_cache: (Path.t, module_components) Hashtbl.t; (* For memoization *)
fcomp_subst_cache: (Path.t, module_type) Hashtbl.t
}
and address_unforced =
| Projection of { parent : address_lazy; pos : int; }
| ModAlias of { env : t; path : Path.t; }
and address_lazy = (address_unforced, address) Lazy_backtrack.t
and value_data =
{ vda_description : value_description;
vda_address : address_lazy;
vda_mode : Value_mode.t;
vda_shape : Shape.t }
and value_entry =
| Val_bound of value_data
| Val_unbound of value_unbound_reason
and constructor_data =
{ cda_description : constructor_description;
cda_address : address_lazy option;
cda_shape: Shape.t; }
and label_data = label_description
and type_data =
{ tda_declaration : type_declaration;
tda_descriptions : type_descriptions;
tda_shape : Shape.t; }
and module_data =
{ mda_declaration : Subst.Lazy.module_decl;
mda_components : module_components;
mda_address : address_lazy;
mda_shape: Shape.t; }
and module_entry =
| Mod_local of module_data
| Mod_persistent
| Mod_unbound of module_unbound_reason
and modtype_data =
{ mtda_declaration : Subst.Lazy.modtype_declaration;
mtda_shape : Shape.t; }
and class_data =
{ clda_declaration : class_declaration;
clda_address : address_lazy;
clda_shape : Shape.t }
and cltype_data =
{ cltda_declaration : class_type_declaration;
cltda_shape : Shape.t }
let empty_structure =
Structure_comps {
comp_values = NameMap.empty;
comp_constrs = NameMap.empty;
comp_labels = NameMap.empty;
comp_types = NameMap.empty;
comp_modules = NameMap.empty; comp_modtypes = NameMap.empty;
comp_classes = NameMap.empty;
comp_cltypes = NameMap.empty }
type unbound_value_hint =
| No_hint
| Missing_rec of Location.t
type lookup_error =
| Unbound_value of Longident.t * unbound_value_hint
| Unbound_type of Longident.t
| Unbound_constructor of Longident.t
| Unbound_label of Longident.t
| Unbound_module of Longident.t
| Unbound_class of Longident.t
| Unbound_modtype of Longident.t
| Unbound_cltype of Longident.t
| Unbound_instance_variable of string
| Not_an_instance_variable of string
| Masked_instance_variable of Longident.t
| Masked_self_variable of Longident.t
| Masked_ancestor_variable of Longident.t
| Structure_used_as_functor of Longident.t
| Abstract_used_as_functor of Longident.t
| Functor_used_as_structure of Longident.t
| Abstract_used_as_structure of Longident.t
| Generative_used_as_applicative of Longident.t
| Illegal_reference_to_recursive_module
| Cannot_scrape_alias of Longident.t * Path.t
| Local_value_used_in_closure of Longident.t * escaping_context option
type error =
| Missing_module of Location.t * Path.t * Path.t
| Illegal_value_name of Location.t * string
| Lookup_error of Location.t * t * lookup_error
exception Error of error
let error err = raise (Error err)
let lookup_error loc env err =
error (Lookup_error(loc, env, err))
let same_constr = ref (fun _ _ _ -> assert false)
let check_well_formed_module = ref (fun _ -> assert false)
Helper to decide whether to report an identifier shadowing
by some ' open ' . For labels and constructors , we do not report
if the two elements are from the same re - exported declaration .
Later , one could also interpret some attributes on value and
type declarations to silence the shadowing warnings .
by some 'open'. For labels and constructors, we do not report
if the two elements are from the same re-exported declaration.
Later, one could also interpret some attributes on value and
type declarations to silence the shadowing warnings. *)
let check_shadowing env = function
| `Constructor (Some (cda1, cda2))
when not (!same_constr env
cda1.cda_description.cstr_res
cda2.cda_description.cstr_res) ->
Some "constructor"
| `Label (Some (l1, l2))
when not (!same_constr env l1.lbl_res l2.lbl_res) ->
Some "label"
| `Value (Some _) -> Some "value"
| `Type (Some _) -> Some "type"
| `Module (Some _) | `Component (Some _) -> Some "module"
| `Module_type (Some _) -> Some "module type"
| `Class (Some _) -> Some "class"
| `Class_type (Some _) -> Some "class type"
| `Constructor _ | `Label _
| `Value None | `Type None | `Module None | `Module_type None
| `Class None | `Class_type None | `Component None ->
None
let empty = {
values = IdTbl.empty; constrs = TycompTbl.empty;
labels = TycompTbl.empty; types = IdTbl.empty;
modules = IdTbl.empty; modtypes = IdTbl.empty;
classes = IdTbl.empty; cltypes = IdTbl.empty;
summary = Env_empty; local_constraints = Path.Map.empty;
flags = 0;
functor_args = Ident.empty;
}
let in_signature b env =
let flags =
if b then env.flags lor in_signature_flag
else env.flags land (lnot in_signature_flag)
in
{env with flags}
let is_in_signature env = env.flags land in_signature_flag <> 0
let has_local_constraints env =
not (Path.Map.is_empty env.local_constraints)
let is_ident = function
Pident _ -> true
| Pdot _ | Papply _ -> false
let is_ext cda =
match cda.cda_description with
| {cstr_tag = Cstr_extension _} -> true
| _ -> false
let is_local_ext cda =
match cda.cda_description with
| {cstr_tag = Cstr_extension(p, _)} -> is_ident p
| _ -> false
let diff env1 env2 =
IdTbl.diff_keys env1.values env2.values @
TycompTbl.diff_keys is_local_ext env1.constrs env2.constrs @
IdTbl.diff_keys env1.modules env2.modules @
IdTbl.diff_keys env1.classes env2.classes
Functions for use in " wrap " parameters in IdTbl
let wrap_identity x = x
let wrap_value vda = Val_bound vda
let wrap_module mda = Mod_local mda
(* Forward declarations *)
let components_of_module_maker' =
ref ((fun _ -> assert false) :
components_maker ->
(module_components_repr, module_components_failure) result)
let components_of_functor_appl' =
ref ((fun ~loc:_ ~f_path:_ ~f_comp:_ ~arg:_ _env -> assert false) :
loc:Location.t -> f_path:Path.t -> f_comp:functor_components ->
arg:Path.t -> t -> module_components)
let check_functor_application =
to be filled by
ref ((fun ~errors:_ ~loc:_
~lid_whole_app:_ ~f0_path:_ ~args:_
~arg_path:_ ~arg_mty:_ ~param_mty:_
_env
-> assert false) :
errors:bool -> loc:Location.t ->
lid_whole_app:Longident.t ->
f0_path:Path.t -> args:(Path.t * Types.module_type) list ->
arg_path:Path.t -> arg_mty:module_type -> param_mty:module_type ->
t -> unit)
let scrape_alias =
to be filled with Mtype.scrape_alias
ref ((fun _env _mty -> assert false) :
t -> Subst.Lazy.modtype -> Subst.Lazy.modtype)
let md md_type =
{md_type; md_attributes=[]; md_loc=Location.none
;md_uid = Uid.internal_not_actually_unique}
(* Print addresses *)
let rec print_address ppf = function
| Aunit cu -> Format.fprintf ppf "%s" (Compilation_unit.full_path_as_string cu)
| Alocal id -> Format.fprintf ppf "%s" (Ident.name id)
| Adot(a, pos) -> Format.fprintf ppf "%a.[%i]" print_address a pos
type address_head =
| AHunit of Compilation_unit.t
| AHlocal of Ident.t
let rec address_head = function
| Aunit cu -> AHunit cu
| Alocal id -> AHlocal id
| Adot (a, _) -> address_head a
(* The name of the compilation unit currently compiled. *)
module Current_unit_name : sig
val get : unit -> Compilation_unit.t option
val set : Compilation_unit.t option -> unit
val is : string -> bool
val is_ident : Ident.t -> bool
val is_path : Path.t -> bool
end = struct
let get () =
Compilation_unit.get_current ()
let set comp_unit =
Compilation_unit.set_current comp_unit
let get_name () =
Option.map Compilation_unit.name (get ())
let is name =
let current_name_string =
Option.map Compilation_unit.Name.to_string (get_name ())
in
Option.equal String.equal current_name_string (Some name)
let is_ident id =
Ident.is_global id && is (Ident.name id)
let is_path = function
| Pident id -> is_ident id
| Pdot _ | Papply _ -> false
end
let set_unit_name = Current_unit_name.set
let get_unit_name = Current_unit_name.get
let find_same_module id tbl =
match IdTbl.find_same id tbl with
| x -> x
| exception Not_found
when Ident.is_global id && not (Current_unit_name.is_ident id) ->
Mod_persistent
let find_name_module ~mark name tbl =
match IdTbl.find_name wrap_module ~mark name tbl with
| x -> x
| exception Not_found when not (Current_unit_name.is name) ->
let path = Pident(Ident.create_persistent name) in
path, Mod_persistent
let add_persistent_structure id env =
if not (Ident.is_global id) then invalid_arg "Env.add_persistent_structure";
if Current_unit_name.is_ident id then env
else begin
let material =
(* This addition only observably changes the environment if it shadows a
non-persistent module already in the environment.
(See PR#9345) *)
match
IdTbl.find_name wrap_module ~mark:false (Ident.name id) env.modules
with
| exception Not_found | _, Mod_persistent -> false
| _ -> true
in
let summary =
if material then Env_persistent (env.summary, id)
else env.summary
in
let modules =
(* With [-no-alias-deps], non-material additions should not
affect the environment at all. We should only observe the
existence of a cmi when accessing components of the module.
(See #9991). *)
if material || not !Clflags.transparent_modules then
IdTbl.add id Mod_persistent env.modules
else
env.modules
in
{ env with modules; summary }
end
let components_of_module ~alerts ~uid env ps path addr mty shape =
{
alerts;
uid;
comps = Lazy_backtrack.create {
cm_env = env;
cm_prefixing_subst = ps;
cm_path = path;
cm_addr = addr;
cm_mty = mty;
cm_shape = shape;
}
}
let sign_of_cmi ~freshen { Persistent_env.Persistent_signature.cmi; _ } =
let name = cmi.cmi_name in
let sign = cmi.cmi_sign in
let flags = cmi.cmi_flags in
let id = Ident.create_persistent (Compilation_unit.name_as_string name) in
let path = Pident id in
let alerts =
List.fold_left (fun acc -> function Alerts s -> s | _ -> acc)
Misc.Stdlib.String.Map.empty
flags
in
let md =
{ md_type = Mty_signature sign;
md_loc = Location.none;
md_attributes = [];
md_uid = Uid.of_compilation_unit_id name;
}
in
let mda_address = Lazy_backtrack.create_forced (Aunit name) in
let mda_declaration =
Subst.(Lazy.module_decl Make_local identity (Lazy.of_module_decl md))
in
let mda_shape =
Shape.for_persistent_unit (name |> Compilation_unit.full_path_as_string)
in
let mda_components =
let mty = Subst.Lazy.of_modtype (Mty_signature sign) in
let mty =
if freshen then
Subst.Lazy.modtype (Subst.Rescope (Path.scope path))
Subst.identity mty
else mty
in
components_of_module ~alerts ~uid:md.md_uid
empty Subst.identity
path mda_address mty mda_shape
in
{
mda_declaration;
mda_components;
mda_address;
mda_shape;
}
let read_sign_of_cmi = sign_of_cmi ~freshen:true
let save_sign_of_cmi = sign_of_cmi ~freshen:false
let persistent_env : module_data Persistent_env.t ref =
s_table Persistent_env.empty ()
let without_cmis f x =
Persistent_env.without_cmis !persistent_env f x
let imports () = Persistent_env.imports !persistent_env
let import_crcs ~source crcs =
Persistent_env.import_crcs !persistent_env ~source crcs
let read_pers_mod modname filename =
Persistent_env.read !persistent_env read_sign_of_cmi modname filename
let find_pers_mod name =
Persistent_env.find !persistent_env read_sign_of_cmi name
let check_pers_mod ~loc name =
Persistent_env.check !persistent_env read_sign_of_cmi ~loc name
let crc_of_unit name =
Persistent_env.crc_of_unit !persistent_env read_sign_of_cmi name
let is_imported_opaque modname =
Persistent_env.is_imported_opaque !persistent_env modname
let register_import_as_opaque modname =
Persistent_env.register_import_as_opaque !persistent_env modname
let reset_declaration_caches () =
Types.Uid.Tbl.clear !value_declarations;
Types.Uid.Tbl.clear !type_declarations;
Types.Uid.Tbl.clear !module_declarations;
Types.Uid.Tbl.clear !used_constructors;
Types.Uid.Tbl.clear !used_labels;
Types.Uid.Tbl.clear !uid_to_loc;
()
let reset_cache ~preserve_persistent_env =
Compilation_unit.set_current None;
if not preserve_persistent_env then
Persistent_env.clear !persistent_env;
reset_declaration_caches ();
()
let reset_cache_toplevel () =
Persistent_env.clear_missing !persistent_env;
reset_declaration_caches ();
()
(* get_components *)
let get_components_res c =
match Persistent_env.can_load_cmis !persistent_env with
| Persistent_env.Can_load_cmis ->
Lazy_backtrack.force !components_of_module_maker' c.comps
| Persistent_env.Cannot_load_cmis log ->
Lazy_backtrack.force_logged log !components_of_module_maker' c.comps
let get_components c =
match get_components_res c with
| Error _ -> empty_structure
| Ok c -> c
(* Module type of functor application *)
let modtype_of_functor_appl fcomp p1 p2 =
match fcomp.fcomp_res with
| Mty_alias _ as mty -> mty
| mty ->
try
Hashtbl.find fcomp.fcomp_subst_cache p2
with Not_found ->
let scope = Path.scope (Papply(p1, p2)) in
let mty =
let subst =
match fcomp.fcomp_arg with
| Unit
| Named (None, _) -> Subst.identity
| Named (Some param, _) -> Subst.add_module param p2 Subst.identity
in
Subst.modtype (Rescope scope) subst mty
in
Hashtbl.add fcomp.fcomp_subst_cache p2 mty;
mty
let check_functor_appl
~errors ~loc ~lid_whole_app ~f0_path ~args
~f_comp
~arg_path ~arg_mty ~param_mty
env =
if not (Hashtbl.mem f_comp.fcomp_cache arg_path) then
!check_functor_application
~errors ~loc ~lid_whole_app ~f0_path ~args
~arg_path ~arg_mty ~param_mty
env
let modname_of_ident id = Ident.name id |> Compilation_unit.Name.of_string
(* Lookup by identifier *)
let find_ident_module id env =
match find_same_module id env.modules with
| Mod_local data -> data
| Mod_unbound _ -> raise Not_found
| Mod_persistent -> find_pers_mod (id |> modname_of_ident)
let rec find_module_components path env =
match path with
| Pident id -> (find_ident_module id env).mda_components
| Pdot(p, s) ->
let sc = find_structure_components p env in
(NameMap.find s sc.comp_modules).mda_components
| Papply(f_path, arg) ->
let f_comp = find_functor_components f_path env in
let loc = Location.(in_file !input_name) in
!components_of_functor_appl' ~loc ~f_path ~f_comp ~arg env
and find_structure_components path env =
match get_components (find_module_components path env) with
| Structure_comps c -> c
| Functor_comps _ -> raise Not_found
and find_functor_components path env =
match get_components (find_module_components path env) with
| Functor_comps f -> f
| Structure_comps _ -> raise Not_found
let find_module path env =
match path with
| Pident id ->
let data = find_ident_module id env in
Subst.Lazy.force_module_decl data.mda_declaration
| Pdot(p, s) ->
let sc = find_structure_components p env in
let data = NameMap.find s sc.comp_modules in
Subst.Lazy.force_module_decl data.mda_declaration
| Papply(p1, p2) ->
let fc = find_functor_components p1 env in
md (modtype_of_functor_appl fc p1 p2)
let find_module_lazy ~alias path env =
match path with
| Pident id ->
let data = find_ident_module id env in
data.mda_declaration
| Pdot(p, s) ->
let sc = find_structure_components p env in
let data = NameMap.find s sc.comp_modules in
data.mda_declaration
| Papply(p1, p2) ->
let fc = find_functor_components p1 env in
let md =
if alias then md (fc.fcomp_res)
else md (modtype_of_functor_appl fc p1 p2)
in
Subst.Lazy.of_module_decl md
let find_value_full path env =
match path with
| Pident id -> begin
match IdTbl.find_same id env.values with
| Val_bound data -> data
| Val_unbound _ -> raise Not_found
end
| Pdot(p, s) ->
let sc = find_structure_components p env in
NameMap.find s sc.comp_values
| Papply _ -> raise Not_found
let find_type_full path env =
match path with
| Pident id -> IdTbl.find_same id env.types
| Pdot(p, s) ->
let sc = find_structure_components p env in
NameMap.find s sc.comp_types
| Papply _ -> raise Not_found
let find_modtype_lazy path env =
match path with
| Pident id -> (IdTbl.find_same id env.modtypes).mtda_declaration
| Pdot(p, s) ->
let sc = find_structure_components p env in
(NameMap.find s sc.comp_modtypes).mtda_declaration
| Papply _ -> raise Not_found
let find_modtype path env =
Subst.Lazy.force_modtype_decl (find_modtype_lazy path env)
let find_class_full path env =
match path with
| Pident id -> IdTbl.find_same id env.classes
| Pdot(p, s) ->
let sc = find_structure_components p env in
NameMap.find s sc.comp_classes
| Papply _ -> raise Not_found
let find_cltype path env =
match path with
| Pident id -> (IdTbl.find_same id env.cltypes).cltda_declaration
| Pdot(p, s) ->
let sc = find_structure_components p env in
(NameMap.find s sc.comp_cltypes).cltda_declaration
| Papply _ -> raise Not_found
let find_value path env =
(find_value_full path env).vda_description
let find_class path env =
(find_class_full path env).clda_declaration
let find_ident_constructor id env =
(TycompTbl.find_same id env.constrs).cda_description
let find_ident_label id env =
TycompTbl.find_same id env.labels
let type_of_cstr path = function
| {cstr_inlined = Some decl; _} ->
let labels =
List.map snd (Datarepr.labels_of_type path decl)
in
begin match decl.type_kind with
| Type_record (_, repr) ->
{
tda_declaration = decl;
tda_descriptions = Type_record (labels, repr);
tda_shape = Shape.leaf decl.type_uid;
}
| _ -> assert false
end
| _ -> assert false
let find_type_data path env =
match Path.constructor_typath path with
| Regular p -> begin
match Path.Map.find p env.local_constraints with
| decl ->
{
tda_declaration = decl;
tda_descriptions = Type_abstract;
tda_shape = Shape.leaf decl.type_uid;
}
| exception Not_found -> find_type_full p env
end
| Cstr (ty_path, s) ->
(* This case corresponds to an inlined record *)
let tda =
try find_type_full ty_path env
with Not_found -> assert false
in
let cstr =
begin match tda.tda_descriptions with
| Type_variant (cstrs, _) -> begin
try
List.find (fun cstr -> cstr.cstr_name = s) cstrs
with Not_found -> assert false
end
| Type_record _ | Type_abstract | Type_open -> assert false
end
in
type_of_cstr path cstr
| LocalExt id ->
let cstr =
try (TycompTbl.find_same id env.constrs).cda_description
with Not_found -> assert false
in
type_of_cstr path cstr
| Ext (mod_path, s) ->
let comps =
try find_structure_components mod_path env
with Not_found -> assert false
in
let cstrs =
try NameMap.find s comps.comp_constrs
with Not_found -> assert false
in
let exts = List.filter is_ext cstrs in
match exts with
| [cda] -> type_of_cstr path cda.cda_description
| _ -> assert false
let find_type p env =
(find_type_data p env).tda_declaration
let find_type_descrs p env =
(find_type_data p env).tda_descriptions
let rec find_module_address path env =
match path with
| Pident id -> find_ident_module_address id env
| Pdot(p, s) ->
let c = find_structure_components p env in
get_address (NameMap.find s c.comp_modules).mda_address
| Papply _ -> raise Not_found
and find_ident_module_address id env =
get_address (find_ident_module id env).mda_address
and force_address = function
| Projection { parent; pos } -> Adot(get_address parent, pos)
| ModAlias { env; path } -> find_module_address path env
and get_address a =
Lazy_backtrack.force force_address a
let find_value_address path env =
get_address (find_value_full path env).vda_address
let find_class_address path env =
get_address (find_class_full path env).clda_address
let rec get_constrs_address = function
| [] -> raise Not_found
| cda :: rest ->
match cda.cda_address with
| None -> get_constrs_address rest
| Some a -> get_address a
let find_constructor_address path env =
match path with
| Pident id -> begin
let cda = TycompTbl.find_same id env.constrs in
match cda.cda_address with
| None -> raise Not_found
| Some addr -> get_address addr
end
| Pdot(p, s) ->
let c = find_structure_components p env in
get_constrs_address (NameMap.find s c.comp_constrs)
| Papply _ ->
raise Not_found
let find_hash_type path env =
match path with
| Pident id ->
let name = "#" ^ Ident.name id in
let _, tda =
IdTbl.find_name wrap_identity ~mark:false name env.types
in
tda.tda_declaration
| Pdot(p, s) ->
let c = find_structure_components p env in
let name = "#" ^ s in
let tda = NameMap.find name c.comp_types in
tda.tda_declaration
| Papply _ ->
raise Not_found
let probes = ref String.Set.empty
let reset_probes () = probes := String.Set.empty
let add_probe name = probes := String.Set.add name !probes
let has_probe name = String.Set.mem name !probes
let find_shape env (ns : Shape.Sig_component_kind.t) id =
match ns with
| Type ->
(IdTbl.find_same id env.types).tda_shape
| Extension_constructor ->
(TycompTbl.find_same id env.constrs).cda_shape
| Value ->
begin match IdTbl.find_same id env.values with
| Val_bound x -> x.vda_shape
| Val_unbound _ -> raise Not_found
end
| Module ->
begin match IdTbl.find_same id env.modules with
| Mod_local { mda_shape; _ } -> mda_shape
| Mod_persistent -> Shape.for_persistent_unit (Ident.name id)
| Mod_unbound _ ->
(* Only present temporarily while approximating the environment for
recursive modules.
[find_shape] is only ever called after the environment gets
properly populated. *)
assert false
| exception Not_found
when Ident.is_global id && not (Current_unit_name.is_ident id) ->
Shape.for_persistent_unit (Ident.name id)
end
| Module_type ->
(IdTbl.find_same id env.modtypes).mtda_shape
| Class ->
(IdTbl.find_same id env.classes).clda_shape
| Class_type ->
(IdTbl.find_same id env.cltypes).cltda_shape
let shape_of_path ~namespace env =
Shape.of_path ~namespace ~find_shape:(find_shape env)
let shape_or_leaf uid = function
| None -> Shape.leaf uid
| Some shape -> shape
let required_globals = s_ref []
let reset_required_globals () = required_globals := []
let get_required_globals () = !required_globals
let add_required_unit cu =
if not (List.exists (Compilation_unit.equal cu) !required_globals)
then required_globals := cu :: !required_globals
let add_required_ident id env =
if not !Clflags.transparent_modules && Ident.is_global id then
let address = find_ident_module_address id env in
match address_head address with
| AHlocal _ -> ()
| AHunit cu -> add_required_unit cu
let add_required_global path env =
add_required_ident (Path.head path) env
let rec normalize_module_path lax env = function
| Pident id as path when lax && Ident.is_global id ->
path (* fast path (avoids lookup) *)
| Pdot (p, s) as path ->
let p' = normalize_module_path lax env p in
if p == p' then expand_module_path lax env path
else expand_module_path lax env (Pdot(p', s))
| Papply (p1, p2) as path ->
let p1' = normalize_module_path lax env p1 in
let p2' = normalize_module_path true env p2 in
if p1 == p1' && p2 == p2' then expand_module_path lax env path
else expand_module_path lax env (Papply(p1', p2'))
| Pident _ as path ->
expand_module_path lax env path
and expand_module_path lax env path =
try match find_module_lazy ~alias:true path env with
{mdl_type=MtyL_alias path1} ->
let path' = normalize_module_path lax env path1 in
if not (lax || !Clflags.transparent_modules) then begin
let id = Path.head path in
if Ident.is_global_or_predef id && not (Ident.same id (Path.head path'))
then add_required_global (Pident id) env
end;
path'
| _ -> path
with Not_found when lax
|| (match path with Pident id -> not (Ident.is_global id) | _ -> true) ->
path
let normalize_module_path oloc env path =
try normalize_module_path (oloc = None) env path
with Not_found ->
match oloc with None -> assert false
| Some loc ->
error (Missing_module(loc, path,
normalize_module_path true env path))
let normalize_path_prefix oloc env path =
match path with
Pdot(p, s) ->
let p2 = normalize_module_path oloc env p in
if p == p2 then path else Pdot(p2, s)
| Pident _ ->
path
| Papply _ ->
assert false
let normalize_type_path oloc env path =
Inlined version of Path.is_constructor_typath :
constructor type paths ( i.e. path pointing to an inline
record argument of a constructpr ) are built as a regular
type path followed by a capitalized constructor name .
constructor type paths (i.e. path pointing to an inline
record argument of a constructpr) are built as a regular
type path followed by a capitalized constructor name. *)
match path with
| Pident _ ->
path
| Pdot(p, s) ->
let p2 =
if Path.is_uident s && not (Path.is_uident (Path.last p)) then
(* Cstr M.t.C *)
normalize_path_prefix oloc env p
else
(* Regular M.t, Ext M.C *)
normalize_module_path oloc env p
in
if p == p2 then path else Pdot (p2, s)
| Papply _ ->
assert false
let rec normalize_modtype_path env path =
let path = normalize_path_prefix None env path in
expand_modtype_path env path
and expand_modtype_path env path =
match (find_modtype_lazy path env).mtdl_type with
| Some (MtyL_ident path) -> normalize_modtype_path env path
| _ | exception Not_found -> path
let find_module_lazy path env =
find_module_lazy ~alias:false path env
(* Find the manifest type associated to a type when appropriate:
- the type should be public or should have a private row,
- the type should have an associated manifest type. *)
let find_type_expansion path env =
let decl = find_type path env in
match decl.type_manifest with
| Some body when decl.type_private = Public
|| decl.type_kind <> Type_abstract
|| Btype.has_constr_row body ->
(decl.type_params, body, decl.type_expansion_scope)
(* The manifest type of Private abstract data types without
private row are still considered unknown to the type system.
Hence, this case is caught by the following clause that also handles
purely abstract data types without manifest type definition. *)
| _ -> raise Not_found
(* Find the manifest type information associated to a type, i.e.
the necessary information for the compiler's type-based optimisations.
In particular, the manifest type associated to a private abstract type
is revealed for the sake of compiler's type-based optimisations. *)
let find_type_expansion_opt path env =
let decl = find_type path env in
match decl.type_manifest with
(* The manifest type of Private abstract data types can still get
an approximation using their manifest type. *)
| Some body ->
(decl.type_params, body, decl.type_expansion_scope)
| _ -> raise Not_found
let find_modtype_expansion_lazy path env =
match (find_modtype_lazy path env).mtdl_type with
| None -> raise Not_found
| Some mty -> mty
let find_modtype_expansion path env =
Subst.Lazy.force_modtype (find_modtype_expansion_lazy path env)
let rec is_functor_arg path env =
match path with
Pident id ->
begin try Ident.find_same id env.functor_args; true
with Not_found -> false
end
| Pdot (p, _s) -> is_functor_arg p env
| Papply _ -> true
(* Copying types associated with values *)
let make_copy_of_types env0 =
let memo = Hashtbl.create 16 in
let copy t =
try
Hashtbl.find memo (get_id t)
with Not_found ->
let t2 = Subst.type_expr Subst.identity t in
Hashtbl.add memo (get_id t) t2;
t2
in
let f = function
| Val_unbound _ as entry -> entry
| Val_bound vda ->
let desc = vda.vda_description in
let desc = { desc with val_type = copy desc.val_type } in
Val_bound { vda with vda_description = desc }
in
let values =
IdTbl.map f env0.values
in
(fun env ->
if env.values ! = then fatal_error " Env.make_copy_of_types " ;
{env with values; summary = Env_copy_types env.summary}
)
(* Iter on an environment (ignoring the body of functors and
not yet evaluated structures) *)
type iter_cont = unit -> unit
let iter_env_cont = ref []
let rec scrape_alias_for_visit env mty =
let open Subst.Lazy in
match mty with
| MtyL_alias path -> begin
match path with
| Pident id
when Ident.is_global id
&& not (Persistent_env.looked_up !persistent_env (id |> modname_of_ident)) ->
false
PR#6600 : find_module may raise Not_found
try
scrape_alias_for_visit env (find_module_lazy path env).mdl_type
with Not_found -> false
end
| _ -> true
let iter_env wrap proj1 proj2 f env () =
IdTbl.iter wrap (fun id x -> f (Pident id) x) (proj1 env);
let rec iter_components path path' mcomps =
let cont () =
let visit =
match Lazy_backtrack.get_arg mcomps.comps with
| None -> true
| Some { cm_mty; _ } ->
scrape_alias_for_visit env cm_mty
in
if not visit then () else
match get_components mcomps with
Structure_comps comps ->
NameMap.iter
(fun s d -> f (Pdot (path, s)) (Pdot (path', s), d))
(proj2 comps);
NameMap.iter
(fun s mda ->
iter_components
(Pdot (path, s)) (Pdot (path', s)) mda.mda_components)
comps.comp_modules
| Functor_comps _ -> ()
in iter_env_cont := (path, cont) :: !iter_env_cont
in
IdTbl.iter wrap_module
(fun id (path, entry) ->
match entry with
| Mod_unbound _ -> ()
| Mod_local data ->
iter_components (Pident id) path data.mda_components
| Mod_persistent ->
let modname = modname_of_ident id in
match Persistent_env.find_in_cache !persistent_env modname with
| None -> ()
| Some data ->
iter_components (Pident id) path data.mda_components)
env.modules
let run_iter_cont l =
iter_env_cont := [];
List.iter (fun c -> c ()) l;
let cont = List.rev !iter_env_cont in
iter_env_cont := [];
cont
let iter_types f =
iter_env wrap_identity (fun env -> env.types) (fun sc -> sc.comp_types)
(fun p1 (p2, tda) -> f p1 (p2, tda.tda_declaration))
let same_types env1 env2 =
env1.types == env2.types && env1.modules == env2.modules
let used_persistent () =
Persistent_env.fold !persistent_env
(fun s _m r -> Compilation_unit.Name.Set.add s r)
Compilation_unit.Name.Set.empty
let find_all_comps wrap proj s (p, mda) =
match get_components mda.mda_components with
Functor_comps _ -> []
| Structure_comps comps ->
try
let c = NameMap.find s (proj comps) in
[Pdot(p,s), wrap c]
with Not_found -> []
let rec find_shadowed_comps path env =
match path with
| Pident id ->
List.filter_map
(fun (p, data) ->
match data with
| Mod_local x -> Some (p, x)
| Mod_unbound _ | Mod_persistent -> None)
(IdTbl.find_all wrap_module (Ident.name id) env.modules)
| Pdot (p, s) ->
let l = find_shadowed_comps p env in
let l' =
List.map
(find_all_comps wrap_identity
(fun comps -> comps.comp_modules) s) l
in
List.flatten l'
| Papply _ -> []
let find_shadowed wrap proj1 proj2 path env =
match path with
Pident id ->
IdTbl.find_all wrap (Ident.name id) (proj1 env)
| Pdot (p, s) ->
let l = find_shadowed_comps p env in
let l' = List.map (find_all_comps wrap proj2 s) l in
List.flatten l'
| Papply _ -> []
let find_shadowed_types path env =
List.map fst
(find_shadowed wrap_identity
(fun env -> env.types) (fun comps -> comps.comp_types) path env)
(* Given a signature and a root path, prefix all idents in the signature
by the root path and build the corresponding substitution. *)
let prefix_idents root prefixing_sub sg =
let open Subst.Lazy in
let rec prefix_idents root items_and_paths prefixing_sub =
function
| [] -> (List.rev items_and_paths, prefixing_sub)
| SigL_value(id, _, _) as item :: rem ->
let p = Pdot(root, Ident.name id) in
prefix_idents root
((item, p) :: items_and_paths) prefixing_sub rem
| SigL_type(id, td, rs, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
prefix_idents root
((SigL_type(id, td, rs, vis), p) :: items_and_paths)
(Subst.add_type id p prefixing_sub)
rem
| SigL_typext(id, ec, es, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
(* we extend the substitution in case of an inlined record *)
prefix_idents root
((SigL_typext(id, ec, es, vis), p) :: items_and_paths)
(Subst.add_type id p prefixing_sub)
rem
| SigL_module(id, pres, md, rs, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
prefix_idents root
((SigL_module(id, pres, md, rs, vis), p) :: items_and_paths)
(Subst.add_module id p prefixing_sub)
rem
| SigL_modtype(id, mtd, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
prefix_idents root
((SigL_modtype(id, mtd, vis), p) :: items_and_paths)
(Subst.add_modtype id (Mty_ident p) prefixing_sub)
rem
| SigL_class(id, cd, rs, vis) :: rem ->
(* pretend this is a type, cf. PR#6650 *)
let p = Pdot(root, Ident.name id) in
prefix_idents root
((SigL_class(id, cd, rs, vis), p) :: items_and_paths)
(Subst.add_type id p prefixing_sub)
rem
| SigL_class_type(id, ctd, rs, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
prefix_idents root
((SigL_class_type(id, ctd, rs, vis), p) :: items_and_paths)
(Subst.add_type id p prefixing_sub)
rem
in
let sg = Subst.Lazy.force_signature_once sg in
prefix_idents root [] prefixing_sub sg
(* Compute structure descriptions *)
let add_to_tbl id decl tbl =
let decls = try NameMap.find id tbl with Not_found -> [] in
NameMap.add id (decl :: decls) tbl
let primitive_address_error =
Invalid_argument "Primitives don't have addresses"
let value_declaration_address (_ : t) id decl =
match decl.val_kind with
| Val_prim _ -> Lazy_backtrack.create_failed primitive_address_error
| _ -> Lazy_backtrack.create_forced (Alocal id)
let extension_declaration_address (_ : t) id (_ : extension_constructor) =
Lazy_backtrack.create_forced (Alocal id)
let class_declaration_address (_ : t) id (_ : class_declaration) =
Lazy_backtrack.create_forced (Alocal id)
let module_declaration_address env id presence md =
match presence with
| Mp_absent -> begin
let open Subst.Lazy in
match md.mdl_type with
| MtyL_alias path -> Lazy_backtrack.create (ModAlias {env; path})
| _ -> assert false
end
| Mp_present ->
Lazy_backtrack.create_forced (Alocal id)
let is_identchar c =
(* This should be kept in sync with the [identchar_latin1] character class
in [lexer.mll] *)
match c with
| 'A'..'Z' | 'a'..'z' | '_' | '\192'..'\214'
| '\216'..'\246' | '\248'..'\255' | '\'' | '0'..'9' ->
true
| _ ->
false
let rec components_of_module_maker
{cm_env; cm_prefixing_subst;
cm_path; cm_addr; cm_mty; cm_shape} : _ result =
match !scrape_alias cm_env cm_mty with
MtyL_signature sg ->
let c =
{ comp_values = NameMap.empty;
comp_constrs = NameMap.empty;
comp_labels = NameMap.empty; comp_types = NameMap.empty;
comp_modules = NameMap.empty; comp_modtypes = NameMap.empty;
comp_classes = NameMap.empty; comp_cltypes = NameMap.empty }
in
let items_and_paths, sub =
prefix_idents cm_path cm_prefixing_subst sg
in
let env = ref cm_env in
let pos = ref 0 in
let next_address () =
let addr : address_unforced =
Projection { parent = cm_addr; pos = !pos }
in
incr pos;
Lazy_backtrack.create addr
in
List.iter (fun ((item : Subst.Lazy.signature_item), path) ->
match item with
SigL_value(id, decl, _) ->
let decl' = Subst.value_description sub decl in
let addr =
match decl.val_kind with
| Val_prim _ -> Lazy_backtrack.create_failed primitive_address_error
| _ -> next_address ()
in
let vda_shape = Shape.proj cm_shape (Shape.Item.value id) in
let vda =
{ vda_description = decl'; vda_address = addr;
vda_mode = Value_mode.global; vda_shape }
in
c.comp_values <- NameMap.add (Ident.name id) vda c.comp_values;
| SigL_type(id, decl, _, _) ->
let final_decl = Subst.type_declaration sub decl in
Btype.set_static_row_name final_decl
(Subst.type_path sub (Path.Pident id));
let descrs =
match decl.type_kind with
| Type_variant (_,repr) ->
let cstrs = List.map snd
(Datarepr.constructors_of_type path final_decl
~current_unit:(get_unit_name ()))
in
List.iter
(fun descr ->
let cda_shape = Shape.leaf descr.cstr_uid in
let cda = {
cda_description = descr;
cda_address = None;
cda_shape }
in
c.comp_constrs <-
add_to_tbl descr.cstr_name cda c.comp_constrs
) cstrs;
Type_variant (cstrs, repr)
| Type_record (_, repr) ->
let lbls = List.map snd
(Datarepr.labels_of_type path final_decl)
in
List.iter
(fun descr ->
c.comp_labels <-
add_to_tbl descr.lbl_name descr c.comp_labels)
lbls;
Type_record (lbls, repr)
| Type_abstract -> Type_abstract
| Type_open -> Type_open
in
let shape = Shape.proj cm_shape (Shape.Item.type_ id) in
let tda =
{ tda_declaration = final_decl;
tda_descriptions = descrs;
tda_shape = shape; }
in
c.comp_types <- NameMap.add (Ident.name id) tda c.comp_types;
env := store_type_infos ~tda_shape:shape id decl !env
| SigL_typext(id, ext, _, _) ->
let ext' = Subst.extension_constructor sub ext in
let descr =
Datarepr.extension_descr ~current_unit:(get_unit_name ()) path
ext'
in
let addr = next_address () in
let cda_shape =
Shape.proj cm_shape (Shape.Item.extension_constructor id)
in
let cda =
{ cda_description = descr; cda_address = Some addr; cda_shape }
in
c.comp_constrs <- add_to_tbl (Ident.name id) cda c.comp_constrs
| SigL_module(id, pres, md, _, _) ->
let md' =
(* The prefixed items get the same scope as [cm_path], which is
the prefix. *)
Subst.Lazy.module_decl
(Subst.Rescope (Path.scope cm_path)) sub md
in
let addr =
match pres with
| Mp_absent -> begin
match md.mdl_type with
| MtyL_alias path ->
Lazy_backtrack.create (ModAlias {env = !env; path})
| _ -> assert false
end
| Mp_present -> next_address ()
in
let alerts =
Builtin_attributes.alerts_of_attrs md.mdl_attributes
in
let shape = Shape.proj cm_shape (Shape.Item.module_ id) in
let comps =
components_of_module ~alerts ~uid:md.mdl_uid !env
sub path addr md.mdl_type shape
in
let mda =
{ mda_declaration = md';
mda_components = comps;
mda_address = addr;
mda_shape = shape; }
in
c.comp_modules <-
NameMap.add (Ident.name id) mda c.comp_modules;
env :=
store_module ~update_summary:false ~check:None
id addr pres md shape !env
| SigL_modtype(id, decl, _) ->
let final_decl =
(* The prefixed items get the same scope as [cm_path], which is
the prefix. *)
Subst.Lazy.modtype_decl (Rescope (Path.scope cm_path))
sub decl
in
let shape = Shape.proj cm_shape (Shape.Item.module_type id) in
let mtda =
{ mtda_declaration = final_decl;
mtda_shape = shape; }
in
c.comp_modtypes <-
NameMap.add (Ident.name id) mtda c.comp_modtypes;
env := store_modtype ~update_summary:false id decl shape !env
| SigL_class(id, decl, _, _) ->
let decl' = Subst.class_declaration sub decl in
let addr = next_address () in
let shape = Shape.proj cm_shape (Shape.Item.class_ id) in
let clda =
{ clda_declaration = decl';
clda_address = addr;
clda_shape = shape; }
in
c.comp_classes <- NameMap.add (Ident.name id) clda c.comp_classes
| SigL_class_type(id, decl, _, _) ->
let decl' = Subst.cltype_declaration sub decl in
let shape = Shape.proj cm_shape (Shape.Item.class_type id) in
let cltda = { cltda_declaration = decl'; cltda_shape = shape } in
c.comp_cltypes <-
NameMap.add (Ident.name id) cltda c.comp_cltypes)
items_and_paths;
Ok (Structure_comps c)
| MtyL_functor(arg, ty_res) ->
let sub = cm_prefixing_subst in
let scoping = Subst.Rescope (Path.scope cm_path) in
let open Subst.Lazy in
Ok (Functor_comps {
(* fcomp_arg and fcomp_res must be prefixed eagerly, because
they are interpreted in the outer environment *)
fcomp_arg =
(match arg with
| Unit -> Unit
| Named (param, ty_arg) ->
Named (param, force_modtype (modtype scoping sub ty_arg)));
fcomp_res = force_modtype (modtype scoping sub ty_res);
fcomp_shape = cm_shape;
fcomp_cache = Hashtbl.create 17;
fcomp_subst_cache = Hashtbl.create 17 })
| MtyL_ident _ -> Error No_components_abstract
| MtyL_alias p -> Error (No_components_alias p)
(* Insertion of bindings by identifier + path *)
and check_usage loc id uid warn tbl =
if not loc.Location.loc_ghost &&
Uid.for_actual_declaration uid &&
Warnings.is_active (warn "")
then begin
let name = Ident.name id in
if Types.Uid.Tbl.mem tbl uid then ()
else let used = ref false in
Types.Uid.Tbl.add tbl uid (fun () -> used := true);
if not (name = "" || name.[0] = '_' || name.[0] = '#')
then
!add_delayed_check_forward
(fun () -> if not !used then Location.prerr_warning loc (warn name))
end;
and check_value_name name loc =
(* Note: we could also check here general validity of the
identifier, to protect against bad identifiers forged by -pp or
-ppx preprocessors. *)
if String.length name > 0 && not (is_identchar name.[0]) then
for i = 1 to String.length name - 1 do
if name.[i] = '#' then
error (Illegal_value_name(loc, name))
done
and store_value ?check mode id addr decl shape env =
check_value_name (Ident.name id) decl.val_loc;
Builtin_attributes.mark_alerts_used decl.val_attributes;
Option.iter
(fun f -> check_usage decl.val_loc id decl.val_uid f !value_declarations)
check;
let vda =
{ vda_description = decl;
vda_address = addr;
vda_mode = mode;
vda_shape = shape }
in
{ env with
values = IdTbl.add id (Val_bound vda) env.values;
summary = Env_value(env.summary, id, decl) }
and store_constructor ~check type_decl type_id cstr_id cstr env =
if check && not type_decl.type_loc.Location.loc_ghost
&& Warnings.is_active (Warnings.Unused_constructor ("", Unused))
then begin
let ty_name = Ident.name type_id in
let name = cstr.cstr_name in
let loc = cstr.cstr_loc in
let k = cstr.cstr_uid in
let priv = type_decl.type_private in
if not (Types.Uid.Tbl.mem !used_constructors k) then begin
let used = constructor_usages () in
Types.Uid.Tbl.add !used_constructors k
(add_constructor_usage used);
if not (ty_name = "" || ty_name.[0] = '_')
then
!add_delayed_check_forward
(fun () ->
Option.iter
(fun complaint ->
if not (is_in_signature env) then
Location.prerr_warning loc
(Warnings.Unused_constructor(name, complaint)))
(constructor_usage_complaint ~rebind:false priv used));
end;
end;
Builtin_attributes.mark_alerts_used cstr.cstr_attributes;
Builtin_attributes.mark_warn_on_literal_pattern_used
cstr.cstr_attributes;
let cda_shape = Shape.leaf cstr.cstr_uid in
{ env with
constrs =
TycompTbl.add cstr_id
{ cda_description = cstr; cda_address = None; cda_shape } env.constrs;
}
and store_label ~check type_decl type_id lbl_id lbl env =
if check && not type_decl.type_loc.Location.loc_ghost
&& Warnings.is_active (Warnings.Unused_field ("", Unused))
then begin
let ty_name = Ident.name type_id in
let priv = type_decl.type_private in
let name = lbl.lbl_name in
let loc = lbl.lbl_loc in
let mut = lbl.lbl_mut in
let k = lbl.lbl_uid in
if not (Types.Uid.Tbl.mem !used_labels k) then
let used = label_usages () in
Types.Uid.Tbl.add !used_labels k
(add_label_usage used);
if not (ty_name = "" || ty_name.[0] = '_' || name.[0] = '_')
then !add_delayed_check_forward
(fun () ->
Option.iter
(fun complaint ->
if not (is_in_signature env) then
Location.prerr_warning
loc (Warnings.Unused_field(name, complaint)))
(label_usage_complaint priv mut used))
end;
Builtin_attributes.mark_alerts_used lbl.lbl_attributes;
{ env with
labels = TycompTbl.add lbl_id lbl env.labels;
}
and store_type ~check id info shape env =
let loc = info.type_loc in
if check then
check_usage loc id info.type_uid
(fun s -> Warnings.Unused_type_declaration s)
!type_declarations;
let descrs, env =
let path = Pident id in
match info.type_kind with
| Type_variant (_,repr) ->
let constructors = Datarepr.constructors_of_type path info
~current_unit:(get_unit_name ())
in
Type_variant (List.map snd constructors, repr),
List.fold_left
(fun env (cstr_id, cstr) ->
store_constructor ~check info id cstr_id cstr env)
env constructors
| Type_record (_, repr) ->
let labels = Datarepr.labels_of_type path info in
Type_record (List.map snd labels, repr),
List.fold_left
(fun env (lbl_id, lbl) ->
store_label ~check info id lbl_id lbl env)
env labels
| Type_abstract -> Type_abstract, env
| Type_open -> Type_open, env
in
let tda =
{ tda_declaration = info;
tda_descriptions = descrs;
tda_shape = shape }
in
Builtin_attributes.mark_alerts_used info.type_attributes;
{ env with
types = IdTbl.add id tda env.types;
summary = Env_type(env.summary, id, info) }
and store_type_infos ~tda_shape id info env =
(* Simplified version of store_type that doesn't compute and store
constructor and label infos, but simply record the arity and
manifest-ness of the type. Used in components_of_module to
keep track of type abbreviations (e.g. type t = float) in the
computation of label representations. *)
let tda =
{
tda_declaration = info;
tda_descriptions = Type_abstract;
tda_shape
}
in
{ env with
types = IdTbl.add id tda env.types;
summary = Env_type(env.summary, id, info) }
and store_extension ~check ~rebind id addr ext shape env =
let loc = ext.ext_loc in
let cstr =
Datarepr.extension_descr ~current_unit:(get_unit_name ()) (Pident id) ext
in
let cda =
{ cda_description = cstr;
cda_address = Some addr;
cda_shape = shape }
in
Builtin_attributes.mark_alerts_used ext.ext_attributes;
Builtin_attributes.mark_alerts_used cstr.cstr_attributes;
Builtin_attributes.mark_warn_on_literal_pattern_used cstr.cstr_attributes;
if check && not loc.Location.loc_ghost &&
Warnings.is_active (Warnings.Unused_extension ("", false, Unused))
then begin
let priv = ext.ext_private in
let is_exception = Path.same ext.ext_type_path Predef.path_exn in
let name = cstr.cstr_name in
let k = cstr.cstr_uid in
if not (Types.Uid.Tbl.mem !used_constructors k) then begin
let used = constructor_usages () in
Types.Uid.Tbl.add !used_constructors k
(add_constructor_usage used);
!add_delayed_check_forward
(fun () ->
Option.iter
(fun complaint ->
if not (is_in_signature env) then
Location.prerr_warning loc
(Warnings.Unused_extension
(name, is_exception, complaint)))
(constructor_usage_complaint ~rebind priv used))
end;
end;
{ env with
constrs = TycompTbl.add id cda env.constrs;
summary = Env_extension(env.summary, id, ext) }
and store_module ?(update_summary=true) ~check
id addr presence md shape env =
let open Subst.Lazy in
let loc = md.mdl_loc in
Option.iter
(fun f -> check_usage loc id md.mdl_uid f !module_declarations) check;
let alerts = Builtin_attributes.alerts_of_attrs md.mdl_attributes in
let comps =
components_of_module ~alerts ~uid:md.mdl_uid
env Subst.identity (Pident id) addr md.mdl_type shape
in
let mda =
{ mda_declaration = md;
mda_components = comps;
mda_address = addr;
mda_shape = shape }
in
let summary =
if not update_summary then env.summary
else Env_module (env.summary, id, presence, force_module_decl md) in
{ env with
modules = IdTbl.add id (Mod_local mda) env.modules;
summary }
and store_modtype ?(update_summary=true) id info shape env =
Builtin_attributes.mark_alerts_used info.Subst.Lazy.mtdl_attributes;
let mtda = { mtda_declaration = info; mtda_shape = shape } in
let summary =
if not update_summary then env.summary
else Env_modtype (env.summary, id, Subst.Lazy.force_modtype_decl info) in
{ env with
modtypes = IdTbl.add id mtda env.modtypes;
summary }
and store_class id addr desc shape env =
Builtin_attributes.mark_alerts_used desc.cty_attributes;
let clda =
{ clda_declaration = desc;
clda_address = addr;
clda_shape = shape; }
in
{ env with
classes = IdTbl.add id clda env.classes;
summary = Env_class(env.summary, id, desc) }
and store_cltype id desc shape env =
Builtin_attributes.mark_alerts_used desc.clty_attributes;
let cltda = { cltda_declaration = desc; cltda_shape = shape } in
{ env with
cltypes = IdTbl.add id cltda env.cltypes;
summary = Env_cltype(env.summary, id, desc) }
(* Compute the components of a functor application in a path. *)
let components_of_functor_appl ~loc ~f_path ~f_comp ~arg env =
try
let c = Hashtbl.find f_comp.fcomp_cache arg in
c
with Not_found ->
let p = Papply(f_path, arg) in
let sub =
match f_comp.fcomp_arg with
| Unit
| Named (None, _) -> Subst.identity
| Named (Some param, _) -> Subst.add_module param arg Subst.identity
in
(* we have to apply eagerly instead of passing sub to [components_of_module]
because of the call to [check_well_formed_module]. *)
let mty = Subst.modtype (Rescope (Path.scope p)) sub f_comp.fcomp_res in
let addr = Lazy_backtrack.create_failed Not_found in
!check_well_formed_module env loc
("the signature of " ^ Path.name p) mty;
let shape_arg =
shape_of_path ~namespace:Shape.Sig_component_kind.Module env arg
in
let shape = Shape.app f_comp.fcomp_shape ~arg:shape_arg in
let comps =
components_of_module ~alerts:Misc.Stdlib.String.Map.empty
~uid:Uid.internal_not_actually_unique
(*???*)
env Subst.identity p addr (Subst.Lazy.of_modtype mty) shape
in
Hashtbl.add f_comp.fcomp_cache arg comps;
comps
(* Define forward functions *)
let _ =
components_of_functor_appl' := components_of_functor_appl;
components_of_module_maker' := components_of_module_maker
(* Insertion of bindings by identifier *)
let add_functor_arg id env =
{env with
functor_args = Ident.add id () env.functor_args;
summary = Env_functor_arg (env.summary, id)}
let add_value ?check ?shape ?(mode = Value_mode.global) id desc env =
let addr = value_declaration_address env id desc in
let shape = shape_or_leaf desc.val_uid shape in
store_value ?check mode id addr desc shape env
let add_type ~check ?shape id info env =
let shape = shape_or_leaf info.type_uid shape in
store_type ~check id info shape env
and add_extension ~check ?shape ~rebind id ext env =
let addr = extension_declaration_address env id ext in
let shape = shape_or_leaf ext.ext_uid shape in
store_extension ~check ~rebind id addr ext shape env
and add_module_declaration ?(arg=false) ?shape ~check id presence md env =
let check =
if not check then
None
else if arg && is_in_signature env then
Some (fun s -> Warnings.Unused_functor_parameter s)
else
Some (fun s -> Warnings.Unused_module s)
in
let md = Subst.Lazy.of_module_decl md in
let addr = module_declaration_address env id presence md in
let shape = shape_or_leaf md.mdl_uid shape in
let env = store_module ~check id addr presence md shape env in
if arg then add_functor_arg id env else env
and add_module_declaration_lazy ~update_summary id presence md env =
let addr = module_declaration_address env id presence md in
let shape = Shape.leaf md.Subst.Lazy.mdl_uid in
let env =
store_module ~update_summary ~check:None id addr presence md shape env
in
env
and add_modtype ?shape id info env =
let shape = shape_or_leaf info.mtd_uid shape in
store_modtype id (Subst.Lazy.of_modtype_decl info) shape env
and add_modtype_lazy ~update_summary id info env =
let shape = Shape.leaf info.Subst.Lazy.mtdl_uid in
store_modtype ~update_summary id info shape env
and add_class ?shape id ty env =
let addr = class_declaration_address env id ty in
let shape = shape_or_leaf ty.cty_uid shape in
store_class id addr ty shape env
and add_cltype ?shape id ty env =
let shape = shape_or_leaf ty.clty_uid shape in
store_cltype id ty shape env
let add_module_lazy ~update_summary id presence mty env =
let md = Subst.Lazy.{mdl_type = mty;
mdl_attributes = [];
mdl_loc = Location.none;
mdl_uid = Uid.internal_not_actually_unique}
in
add_module_declaration_lazy ~update_summary id presence md env
let add_module ?arg ?shape id presence mty env =
add_module_declaration ~check:false ?arg ?shape id presence (md mty) env
let add_local_type path info env =
{ env with
local_constraints = Path.Map.add path info env.local_constraints }
(* Insertion of bindings by name *)
let enter_value ?check name desc env =
let id = Ident.create_local name in
let addr = value_declaration_address env id desc in
let env = store_value ?check Value_mode.global id addr desc (Shape.leaf desc.val_uid) env in
(id, env)
let enter_type ~scope name info env =
let id = Ident.create_scoped ~scope name in
let env = store_type ~check:true id info (Shape.leaf info.type_uid) env in
(id, env)
let enter_extension ~scope ~rebind name ext env =
let id = Ident.create_scoped ~scope name in
let addr = extension_declaration_address env id ext in
let shape = Shape.leaf ext.ext_uid in
let env = store_extension ~check:true ~rebind id addr ext shape env in
(id, env)
let enter_module_declaration ~scope ?arg ?shape s presence md env =
let id = Ident.create_scoped ~scope s in
(id, add_module_declaration ?arg ?shape ~check:true id presence md env)
let enter_modtype ~scope name mtd env =
let id = Ident.create_scoped ~scope name in
let shape = Shape.leaf mtd.mtd_uid in
let env = store_modtype id (Subst.Lazy.of_modtype_decl mtd) shape env in
(id, env)
let enter_class ~scope name desc env =
let id = Ident.create_scoped ~scope name in
let addr = class_declaration_address env id desc in
let env = store_class id addr desc (Shape.leaf desc.cty_uid) env in
(id, env)
let enter_cltype ~scope name desc env =
let id = Ident.create_scoped ~scope name in
let env = store_cltype id desc (Shape.leaf desc.clty_uid) env in
(id, env)
let enter_module ~scope ?arg s presence mty env =
enter_module_declaration ~scope ?arg s presence (md mty) env
let add_lock ?escaping_context mode env =
let lock = Lock { mode; escaping_context } in
{ env with values = IdTbl.add_lock lock env.values }
let add_region_lock env =
{ env with values = IdTbl.add_lock Region_lock env.values }
(* Insertion of all components of a signature *)
let add_item (map, mod_shape) comp env =
let proj_shape item =
match mod_shape with
| None -> map, None
| Some mod_shape ->
let shape = Shape.proj mod_shape item in
Shape.Map.add map item shape, Some shape
in
match comp with
| Sig_value(id, decl, _) ->
let map, shape = proj_shape (Shape.Item.value id) in
map, add_value ?shape id decl env
| Sig_type(id, decl, _, _) ->
let map, shape = proj_shape (Shape.Item.type_ id) in
map, add_type ~check:false ?shape id decl env
| Sig_typext(id, ext, _, _) ->
let map, shape = proj_shape (Shape.Item.extension_constructor id) in
map, add_extension ~check:false ?shape ~rebind:false id ext env
| Sig_module(id, presence, md, _, _) ->
let map, shape = proj_shape (Shape.Item.module_ id) in
map, add_module_declaration ~check:false ?shape id presence md env
| Sig_modtype(id, decl, _) ->
let map, shape = proj_shape (Shape.Item.module_type id) in
map, add_modtype ?shape id decl env
| Sig_class(id, decl, _, _) ->
let map, shape = proj_shape (Shape.Item.class_ id) in
map, add_class ?shape id decl env
| Sig_class_type(id, decl, _, _) ->
let map, shape = proj_shape (Shape.Item.class_type id) in
map, add_cltype ?shape id decl env
let rec add_signature (map, mod_shape) sg env =
match sg with
[] -> map, env
| comp :: rem ->
let map, env = add_item (map, mod_shape) comp env in
add_signature (map, mod_shape) rem env
let enter_signature_and_shape ~scope ~parent_shape mod_shape sg env =
let sg = Subst.signature (Rescope scope) Subst.identity sg in
let shape, env = add_signature (parent_shape, mod_shape) sg env in
sg, shape, env
let enter_signature ?mod_shape ~scope sg env =
let sg, _, env =
enter_signature_and_shape ~scope ~parent_shape:Shape.Map.empty
mod_shape sg env
in
sg, env
let enter_signature_and_shape ~scope ~parent_shape mod_shape sg env =
enter_signature_and_shape ~scope ~parent_shape (Some mod_shape) sg env
let add_value = add_value ?shape:None
let add_type = add_type ?shape:None
let add_extension = add_extension ?shape:None
let add_class = add_class ?shape:None
let add_cltype = add_cltype ?shape:None
let add_modtype = add_modtype ?shape:None
let add_signature sg env =
let _, env = add_signature (Shape.Map.empty, None) sg env in
env
(* Add "unbound" bindings *)
let enter_unbound_value name reason env =
let id = Ident.create_local name in
{ env with
values = IdTbl.add id (Val_unbound reason) env.values;
summary = Env_value_unbound(env.summary, name, reason) }
let enter_unbound_module name reason env =
let id = Ident.create_local name in
{ env with
modules = IdTbl.add id (Mod_unbound reason) env.modules;
summary = Env_module_unbound(env.summary, name, reason) }
(* Open a signature path *)
let add_components slot root env0 comps =
let add_l w comps env0 =
TycompTbl.add_open slot w root comps env0
in
let add w comps env0 = IdTbl.add_open slot w root comps env0 in
let constrs =
add_l (fun x -> `Constructor x) comps.comp_constrs env0.constrs
in
let labels =
add_l (fun x -> `Label x) comps.comp_labels env0.labels
in
let values =
add (fun x -> `Value x) comps.comp_values env0.values
in
let types =
add (fun x -> `Type x) comps.comp_types env0.types
in
let modtypes =
add (fun x -> `Module_type x) comps.comp_modtypes env0.modtypes
in
let classes =
add (fun x -> `Class x) comps.comp_classes env0.classes
in
let cltypes =
add (fun x -> `Class_type x) comps.comp_cltypes env0.cltypes
in
let modules =
add (fun x -> `Module x) comps.comp_modules env0.modules
in
{ env0 with
summary = Env_open(env0.summary, root);
constrs;
labels;
values;
types;
modtypes;
classes;
cltypes;
modules;
}
let open_signature slot root env0 : (_,_) result =
match get_components_res (find_module_components root env0) with
| Error _ -> Error `Not_found
| exception Not_found -> Error `Not_found
| Ok (Functor_comps _) -> Error `Functor
| Ok (Structure_comps comps) ->
Ok (add_components slot root env0 comps)
let remove_last_open root env0 =
let rec filter_summary summary =
match summary with
Env_empty -> raise Exit
| Env_open (s, p) ->
if Path.same p root then s else raise Exit
| Env_value _
| Env_type _
| Env_extension _
| Env_module _
| Env_modtype _
| Env_class _
| Env_cltype _
| Env_functor_arg _
| Env_constraints _
| Env_persistent _
| Env_copy_types _
| Env_value_unbound _
| Env_module_unbound _ ->
map_summary filter_summary summary
in
match filter_summary env0.summary with
| summary ->
let rem_l tbl = TycompTbl.remove_last_open root tbl
and rem tbl = IdTbl.remove_last_open root tbl in
Some { env0 with
summary;
constrs = rem_l env0.constrs;
labels = rem_l env0.labels;
values = rem env0.values;
types = rem env0.types;
modtypes = rem env0.modtypes;
classes = rem env0.classes;
cltypes = rem env0.cltypes;
modules = rem env0.modules; }
| exception Exit ->
None
(* Open a signature from a file *)
let open_pers_signature name env =
match open_signature None (Pident(Ident.create_persistent name)) env with
| (Ok _ | Error `Not_found as res) -> res
| Error `Functor -> assert false
(* a compilation unit cannot refer to a functor *)
let open_signature
?(used_slot = ref false)
?(loc = Location.none) ?(toplevel = false)
ovf root env =
let unused =
match ovf with
| Asttypes.Fresh -> Warnings.Unused_open (Path.name root)
| Asttypes.Override -> Warnings.Unused_open_bang (Path.name root)
in
let warn_unused =
Warnings.is_active unused
and warn_shadow_id =
Warnings.is_active (Warnings.Open_shadow_identifier ("", ""))
and warn_shadow_lc =
Warnings.is_active (Warnings.Open_shadow_label_constructor ("",""))
in
if not toplevel && not loc.Location.loc_ghost
&& (warn_unused || warn_shadow_id || warn_shadow_lc)
then begin
let used = used_slot in
if warn_unused then
!add_delayed_check_forward
(fun () ->
if not !used then begin
used := true;
Location.prerr_warning loc unused
end
);
let shadowed = ref [] in
let slot s b =
begin match check_shadowing env b with
| Some kind when
ovf = Asttypes.Fresh && not (List.mem (kind, s) !shadowed) ->
shadowed := (kind, s) :: !shadowed;
let w =
match kind with
| "label" | "constructor" ->
Warnings.Open_shadow_label_constructor (kind, s)
| _ -> Warnings.Open_shadow_identifier (kind, s)
in
Location.prerr_warning loc w
| _ -> ()
end;
used := true
in
open_signature (Some slot) root env
end
else open_signature None root env
(* Read a signature from a file *)
let read_signature modname filename =
let mda = read_pers_mod (Compilation_unit.name modname) filename in
let md = Subst.Lazy.force_module_decl mda.mda_declaration in
match md.md_type with
| Mty_signature sg -> sg
| Mty_ident _ | Mty_functor _ | Mty_alias _ -> assert false
let is_identchar_latin1 = function
| 'A'..'Z' | 'a'..'z' | '_' | '\192'..'\214' | '\216'..'\246'
| '\248'..'\255' | '\'' | '0'..'9' -> true
| _ -> false
let unit_name_of_filename fn =
match Filename.extension fn with
| ".cmi" -> begin
let unit =
String.capitalize_ascii (Filename.remove_extension fn)
in
if String.for_all is_identchar_latin1 unit then
Some unit
else
None
end
| _ -> None
let persistent_structures_of_dir dir =
Load_path.Dir.files dir
|> List.to_seq
|> Seq.filter_map unit_name_of_filename
|> String.Set.of_seq
(* Save a signature to a file *)
let save_signature_with_transform cmi_transform ~alerts sg modname filename =
Btype.cleanup_abbrev ();
Subst.reset_for_saving ();
let sg = Subst.signature Make_local (Subst.for_saving Subst.identity) sg in
let cmi =
Persistent_env.make_cmi !persistent_env modname sg alerts
|> cmi_transform in
let pm = save_sign_of_cmi
{ Persistent_env.Persistent_signature.cmi; filename } in
Persistent_env.save_cmi !persistent_env
{ Persistent_env.Persistent_signature.filename; cmi } pm;
cmi
let save_signature ~alerts sg modname filename =
save_signature_with_transform (fun cmi -> cmi)
~alerts sg modname filename
let save_signature_with_imports ~alerts sg modname filename imports =
let with_imports cmi = { cmi with cmi_crcs = imports } in
save_signature_with_transform with_imports
~alerts sg modname filename
(* Make the initial environment *)
let (initial_safe_string, initial_unsafe_string) =
Predef.build_initial_env
(add_type ~check:false)
(add_extension ~check:false ~rebind:false)
empty
(* Tracking usage *)
let mark_module_used uid =
match Types.Uid.Tbl.find !module_declarations uid with
| mark -> mark ()
| exception Not_found -> ()
let mark_modtype_used _uid = ()
let mark_value_used uid =
match Types.Uid.Tbl.find !value_declarations uid with
| mark -> mark ()
| exception Not_found -> ()
let mark_type_used uid =
match Types.Uid.Tbl.find !type_declarations uid with
| mark -> mark ()
| exception Not_found -> ()
let mark_type_path_used env path =
match find_type path env with
| decl -> mark_type_used decl.type_uid
| exception Not_found -> ()
let mark_constructor_used usage cd =
match Types.Uid.Tbl.find !used_constructors cd.cd_uid with
| mark -> mark usage
| exception Not_found -> ()
let mark_extension_used usage ext =
match Types.Uid.Tbl.find !used_constructors ext.ext_uid with
| mark -> mark usage
| exception Not_found -> ()
let mark_label_used usage ld =
match Types.Uid.Tbl.find !used_labels ld.ld_uid with
| mark -> mark usage
| exception Not_found -> ()
let mark_constructor_description_used usage env cstr =
let ty_path = Btype.cstr_type_path cstr in
mark_type_path_used env ty_path;
match Types.Uid.Tbl.find !used_constructors cstr.cstr_uid with
| mark -> mark usage
| exception Not_found -> ()
let mark_label_description_used usage env lbl =
let ty_path =
match get_desc lbl.lbl_res with
| Tconstr(path, _, _) -> path
| _ -> assert false
in
mark_type_path_used env ty_path;
match Types.Uid.Tbl.find !used_labels lbl.lbl_uid with
| mark -> mark usage
| exception Not_found -> ()
let mark_class_used uid =
match Types.Uid.Tbl.find !type_declarations uid with
| mark -> mark ()
| exception Not_found -> ()
let mark_cltype_used uid =
match Types.Uid.Tbl.find !type_declarations uid with
| mark -> mark ()
| exception Not_found -> ()
let set_value_used_callback vd callback =
Types.Uid.Tbl.add !value_declarations vd.val_uid callback
let set_type_used_callback td callback =
if Uid.for_actual_declaration td.type_uid then
let old =
try Types.Uid.Tbl.find !type_declarations td.type_uid
with Not_found -> ignore
in
Types.Uid.Tbl.replace !type_declarations td.type_uid
(fun () -> callback old)
(* Lookup by name *)
let may_lookup_error report_errors loc env err =
if report_errors then lookup_error loc env err
else raise Not_found
let report_module_unbound ~errors ~loc env reason =
match reason with
| Mod_unbound_illegal_recursion ->
see # 5965
may_lookup_error errors loc env Illegal_reference_to_recursive_module
let report_value_unbound ~errors ~loc env reason lid =
match reason with
| Val_unbound_instance_variable ->
may_lookup_error errors loc env (Masked_instance_variable lid)
| Val_unbound_self ->
may_lookup_error errors loc env (Masked_self_variable lid)
| Val_unbound_ancestor ->
may_lookup_error errors loc env (Masked_ancestor_variable lid)
| Val_unbound_ghost_recursive rloc ->
let show_hint =
(* Only display the "missing rec" hint for non-ghost code *)
not loc.Location.loc_ghost
&& not rloc.Location.loc_ghost
in
let hint =
if show_hint then Missing_rec rloc else No_hint
in
may_lookup_error errors loc env (Unbound_value(lid, hint))
let use_module ~use ~loc path mda =
if use then begin
let comps = mda.mda_components in
mark_module_used comps.uid;
Misc.Stdlib.String.Map.iter
(fun kind message ->
let message = if message = "" then "" else "\n" ^ message in
Location.alert ~kind loc
(Printf.sprintf "module %s%s" (Path.name path) message)
)
comps.alerts
end
let use_value ~use ~loc path vda =
if use then begin
let desc = vda.vda_description in
mark_value_used desc.val_uid;
Builtin_attributes.check_alerts loc desc.val_attributes
(Path.name path)
end
let use_type ~use ~loc path tda =
if use then begin
let decl = tda.tda_declaration in
mark_type_used decl.type_uid;
Builtin_attributes.check_alerts loc decl.type_attributes
(Path.name path)
end
let use_modtype ~use ~loc path desc =
let open Subst.Lazy in
if use then begin
mark_modtype_used desc.mtdl_uid;
Builtin_attributes.check_alerts loc desc.mtdl_attributes
(Path.name path)
end
let use_class ~use ~loc path clda =
if use then begin
let desc = clda.clda_declaration in
mark_class_used desc.cty_uid;
Builtin_attributes.check_alerts loc desc.cty_attributes
(Path.name path)
end
let use_cltype ~use ~loc path desc =
if use then begin
mark_cltype_used desc.clty_uid;
Builtin_attributes.check_alerts loc desc.clty_attributes
(Path.name path)
end
let use_label ~use ~loc usage env lbl =
if use then begin
mark_label_description_used usage env lbl;
Builtin_attributes.check_alerts loc lbl.lbl_attributes lbl.lbl_name;
if is_mutating_label_usage usage then
Builtin_attributes.check_deprecated_mutable loc lbl.lbl_attributes
lbl.lbl_name
end
let use_constructor_desc ~use ~loc usage env cstr =
if use then begin
mark_constructor_description_used usage env cstr;
Builtin_attributes.check_alerts loc cstr.cstr_attributes cstr.cstr_name
end
let use_constructor ~use ~loc usage env cda =
use_constructor_desc ~use ~loc usage env cda.cda_description
type _ load =
| Load : module_data load
| Don't_load : unit load
let lookup_ident_module (type a) (load : a load) ~errors ~use ~loc s env =
let path, data =
match find_name_module ~mark:use s env.modules with
| res -> res
| exception Not_found ->
may_lookup_error errors loc env (Unbound_module (Lident s))
in
match data with
| Mod_local mda -> begin
use_module ~use ~loc path mda;
match load with
| Load -> path, (mda : a)
| Don't_load -> path, (() : a)
end
| Mod_unbound reason ->
report_module_unbound ~errors ~loc env reason
| Mod_persistent -> begin
let name = s |> Compilation_unit.Name.of_string in
match load with
| Don't_load ->
check_pers_mod ~loc name;
path, (() : a)
| Load -> begin
match find_pers_mod name with
| mda ->
use_module ~use ~loc path mda;
path, (mda : a)
| exception Not_found ->
may_lookup_error errors loc env (Unbound_module (Lident s))
end
end
let lock_mode ~errors ~loc env id vmode locks =
List.fold_left
(fun vmode lock ->
match lock with
| Region_lock -> Value_mode.local_to_regional vmode
| Lock {mode; escaping_context} ->
match Value_mode.submode vmode (Value_mode.of_alloc mode) with
| Ok () -> vmode
| Error _ ->
may_lookup_error errors loc env
(Local_value_used_in_closure (id, escaping_context)))
vmode locks
let lookup_ident_value ~errors ~use ~loc name env =
match IdTbl.find_name_and_modes wrap_value ~mark:use name env.values with
| (path, locks, Val_bound vda) ->
let mode = lock_mode ~errors ~loc env (Lident name) vda.vda_mode locks in
use_value ~use ~loc path vda;
path, vda.vda_description, mode
| (_, _, Val_unbound reason) ->
report_value_unbound ~errors ~loc env reason (Lident name)
| exception Not_found ->
may_lookup_error errors loc env (Unbound_value (Lident name, No_hint))
let lookup_ident_type ~errors ~use ~loc s env =
match IdTbl.find_name wrap_identity ~mark:use s env.types with
| (path, data) as res ->
use_type ~use ~loc path data;
res
| exception Not_found ->
may_lookup_error errors loc env (Unbound_type (Lident s))
let lookup_ident_modtype ~errors ~use ~loc s env =
match IdTbl.find_name wrap_identity ~mark:use s env.modtypes with
| (path, data) ->
use_modtype ~use ~loc path data.mtda_declaration;
(path, data.mtda_declaration)
| exception Not_found ->
may_lookup_error errors loc env (Unbound_modtype (Lident s))
let lookup_ident_class ~errors ~use ~loc s env =
match IdTbl.find_name wrap_identity ~mark:use s env.classes with
| (path, clda) ->
use_class ~use ~loc path clda;
path, clda.clda_declaration
| exception Not_found ->
may_lookup_error errors loc env (Unbound_class (Lident s))
let lookup_ident_cltype ~errors ~use ~loc s env =
match IdTbl.find_name wrap_identity ~mark:use s env.cltypes with
| path, cltda ->
use_cltype ~use ~loc path cltda.cltda_declaration;
path, cltda.cltda_declaration
| exception Not_found ->
may_lookup_error errors loc env (Unbound_cltype (Lident s))
let lookup_all_ident_labels ~errors ~use ~loc usage s env =
match TycompTbl.find_all ~mark:use s env.labels with
| [] -> may_lookup_error errors loc env (Unbound_label (Lident s))
| lbls -> begin
List.map
(fun (lbl, use_fn) ->
let use_fn () =
use_label ~use ~loc usage env lbl;
use_fn ()
in
(lbl, use_fn))
lbls
end
let lookup_all_ident_constructors ~errors ~use ~loc usage s env =
match TycompTbl.find_all ~mark:use s env.constrs with
| [] -> may_lookup_error errors loc env (Unbound_constructor (Lident s))
| cstrs ->
List.map
(fun (cda, use_fn) ->
let use_fn () =
use_constructor ~use ~loc usage env cda;
use_fn ()
in
(cda.cda_description, use_fn))
cstrs
let rec lookup_module_components ~errors ~use ~loc lid env =
match lid with
| Lident s ->
let path, data = lookup_ident_module Load ~errors ~use ~loc s env in
path, data.mda_components
| Ldot(l, s) ->
let path, data = lookup_dot_module ~errors ~use ~loc l s env in
path, data.mda_components
| Lapply _ as lid ->
let f_path, f_comp, arg = lookup_apply ~errors ~use ~loc lid env in
let comps =
!components_of_functor_appl' ~loc ~f_path ~f_comp ~arg env in
Papply (f_path, arg), comps
and lookup_structure_components ~errors ~use ~loc lid env =
let path, comps = lookup_module_components ~errors ~use ~loc lid env in
match get_components_res comps with
| Ok (Structure_comps comps) -> path, comps
| Ok (Functor_comps _) ->
may_lookup_error errors loc env (Functor_used_as_structure lid)
| Error No_components_abstract ->
may_lookup_error errors loc env (Abstract_used_as_structure lid)
| Error (No_components_alias p) ->
may_lookup_error errors loc env (Cannot_scrape_alias(lid, p))
and get_functor_components ~errors ~loc lid env comps =
match get_components_res comps with
| Ok (Functor_comps fcomps) -> begin
match fcomps.fcomp_arg with
| Unit -> (* PR#7611 *)
may_lookup_error errors loc env (Generative_used_as_applicative lid)
| Named (_, arg) -> fcomps, arg
end
| Ok (Structure_comps _) ->
may_lookup_error errors loc env (Structure_used_as_functor lid)
| Error No_components_abstract ->
may_lookup_error errors loc env (Abstract_used_as_functor lid)
| Error (No_components_alias p) ->
may_lookup_error errors loc env (Cannot_scrape_alias(lid, p))
and lookup_all_args ~errors ~use ~loc lid0 env =
let rec loop_lid_arg args = function
| Lident _ | Ldot _ as f_lid ->
(f_lid, args)
| Lapply (f_lid, arg_lid) ->
let arg_path, arg_md = lookup_module ~errors ~use ~loc arg_lid env in
loop_lid_arg ((f_lid,arg_path,arg_md.md_type)::args) f_lid
in
loop_lid_arg [] lid0
and lookup_apply ~errors ~use ~loc lid0 env =
let f0_lid, args0 = lookup_all_args ~errors ~use ~loc lid0 env in
let args_for_errors = List.map (fun (_,p,mty) -> (p,mty)) args0 in
let f0_path, f0_comp =
lookup_module_components ~errors ~use ~loc f0_lid env
in
let check_one_apply ~errors ~loc ~f_lid ~f_comp ~arg_path ~arg_mty env =
let f_comp, param_mty =
get_functor_components ~errors ~loc f_lid env f_comp
in
check_functor_appl
~errors ~loc ~lid_whole_app:lid0
~f0_path ~args:args_for_errors ~f_comp
~arg_path ~arg_mty ~param_mty
env;
arg_path, f_comp
in
let rec check_apply ~path:f_path ~comp:f_comp = function
| [] -> invalid_arg "Env.lookup_apply: empty argument list"
| [ f_lid, arg_path, arg_mty ] ->
let arg_path, comps =
check_one_apply ~errors ~loc ~f_lid ~f_comp
~arg_path ~arg_mty env
in
f_path, comps, arg_path
| (f_lid, arg_path, arg_mty) :: args ->
let arg_path, f_comp =
check_one_apply ~errors ~loc ~f_lid ~f_comp
~arg_path ~arg_mty env
in
let comp =
!components_of_functor_appl' ~loc ~f_path ~f_comp ~arg:arg_path env
in
let path = Papply (f_path, arg_path) in
check_apply ~path ~comp args
in
check_apply ~path:f0_path ~comp:f0_comp args0
and lookup_module ~errors ~use ~loc lid env =
match lid with
| Lident s ->
let path, data = lookup_ident_module Load ~errors ~use ~loc s env in
let md = Subst.Lazy.force_module_decl data.mda_declaration in
path, md
| Ldot(l, s) ->
let path, data = lookup_dot_module ~errors ~use ~loc l s env in
let md = Subst.Lazy.force_module_decl data.mda_declaration in
path, md
| Lapply _ as lid ->
let path_f, comp_f, path_arg = lookup_apply ~errors ~use ~loc lid env in
let md = md (modtype_of_functor_appl comp_f path_f path_arg) in
Papply(path_f, path_arg), md
and lookup_dot_module ~errors ~use ~loc l s env =
let p, comps = lookup_structure_components ~errors ~use ~loc l env in
match NameMap.find s comps.comp_modules with
| mda ->
let path = Pdot(p, s) in
use_module ~use ~loc path mda;
(path, mda)
| exception Not_found ->
may_lookup_error errors loc env (Unbound_module (Ldot(l, s)))
let lookup_dot_value ~errors ~use ~loc l s env =
let (path, comps) =
lookup_structure_components ~errors ~use ~loc l env
in
match NameMap.find s comps.comp_values with
| vda ->
let path = Pdot(path, s) in
use_value ~use ~loc path vda;
(path, vda.vda_description)
| exception Not_found ->
may_lookup_error errors loc env (Unbound_value (Ldot(l, s), No_hint))
let lookup_dot_type ~errors ~use ~loc l s env =
let (p, comps) = lookup_structure_components ~errors ~use ~loc l env in
match NameMap.find s comps.comp_types with
| tda ->
let path = Pdot(p, s) in
use_type ~use ~loc path tda;
(path, tda)
| exception Not_found ->
may_lookup_error errors loc env (Unbound_type (Ldot(l, s)))
let lookup_dot_modtype ~errors ~use ~loc l s env =
let (p, comps) = lookup_structure_components ~errors ~use ~loc l env in
match NameMap.find s comps.comp_modtypes with
| mta ->
let path = Pdot(p, s) in
use_modtype ~use ~loc path mta.mtda_declaration;
(path, mta.mtda_declaration)
| exception Not_found ->
may_lookup_error errors loc env (Unbound_modtype (Ldot(l, s)))
let lookup_dot_class ~errors ~use ~loc l s env =
let (p, comps) = lookup_structure_components ~errors ~use ~loc l env in
match NameMap.find s comps.comp_classes with
| clda ->
let path = Pdot(p, s) in
use_class ~use ~loc path clda;
(path, clda.clda_declaration)
| exception Not_found ->
may_lookup_error errors loc env (Unbound_class (Ldot(l, s)))
let lookup_dot_cltype ~errors ~use ~loc l s env =
let (p, comps) = lookup_structure_components ~errors ~use ~loc l env in
match NameMap.find s comps.comp_cltypes with
| cltda ->
let path = Pdot(p, s) in
use_cltype ~use ~loc path cltda.cltda_declaration;
(path, cltda.cltda_declaration)
| exception Not_found ->
may_lookup_error errors loc env (Unbound_cltype (Ldot(l, s)))
let lookup_all_dot_labels ~errors ~use ~loc usage l s env =
let (_, comps) = lookup_structure_components ~errors ~use ~loc l env in
match NameMap.find s comps.comp_labels with
| [] | exception Not_found ->
may_lookup_error errors loc env (Unbound_label (Ldot(l, s)))
| lbls ->
List.map
(fun lbl ->
let use_fun () = use_label ~use ~loc usage env lbl in
(lbl, use_fun))
lbls
let lookup_all_dot_constructors ~errors ~use ~loc usage l s env =
match l with
| Longident.Lident "*predef*" ->
(* Hack to support compilation of default arguments *)
lookup_all_ident_constructors
~errors ~use ~loc usage s initial_safe_string
| _ ->
let (_, comps) = lookup_structure_components ~errors ~use ~loc l env in
match NameMap.find s comps.comp_constrs with
| [] | exception Not_found ->
may_lookup_error errors loc env (Unbound_constructor (Ldot(l, s)))
| cstrs ->
List.map
(fun cda ->
let use_fun () = use_constructor ~use ~loc usage env cda in
(cda.cda_description, use_fun))
cstrs
(* General forms of the lookup functions *)
let lookup_module_path ~errors ~use ~loc ~load lid env : Path.t =
match lid with
| Lident s ->
if !Clflags.transparent_modules && not load then
fst (lookup_ident_module Don't_load ~errors ~use ~loc s env)
else
fst (lookup_ident_module Load ~errors ~use ~loc s env)
| Ldot(l, s) -> fst (lookup_dot_module ~errors ~use ~loc l s env)
| Lapply _ as lid ->
let path_f, _comp_f, path_arg = lookup_apply ~errors ~use ~loc lid env in
Papply(path_f, path_arg)
let lookup_value ~errors ~use ~loc lid env =
match lid with
| Lident s -> lookup_ident_value ~errors ~use ~loc s env
| Ldot(l, s) ->
let path, desc = lookup_dot_value ~errors ~use ~loc l s env in
let mode = Value_mode.global in
path, desc, mode
| Lapply _ -> assert false
let lookup_type_full ~errors ~use ~loc lid env =
match lid with
| Lident s -> lookup_ident_type ~errors ~use ~loc s env
| Ldot(l, s) -> lookup_dot_type ~errors ~use ~loc l s env
| Lapply _ -> assert false
let lookup_type ~errors ~use ~loc lid env =
let (path, tda) = lookup_type_full ~errors ~use ~loc lid env in
path, tda.tda_declaration
let lookup_modtype_lazy ~errors ~use ~loc lid env =
match lid with
| Lident s -> lookup_ident_modtype ~errors ~use ~loc s env
| Ldot(l, s) -> lookup_dot_modtype ~errors ~use ~loc l s env
| Lapply _ -> assert false
let lookup_modtype ~errors ~use ~loc lid env =
let (path, mt) = lookup_modtype_lazy ~errors ~use ~loc lid env in
path, Subst.Lazy.force_modtype_decl mt
let lookup_class ~errors ~use ~loc lid env =
match lid with
| Lident s -> lookup_ident_class ~errors ~use ~loc s env
| Ldot(l, s) -> lookup_dot_class ~errors ~use ~loc l s env
| Lapply _ -> assert false
let lookup_cltype ~errors ~use ~loc lid env =
match lid with
| Lident s -> lookup_ident_cltype ~errors ~use ~loc s env
| Ldot(l, s) -> lookup_dot_cltype ~errors ~use ~loc l s env
| Lapply _ -> assert false
let lookup_all_labels ~errors ~use ~loc usage lid env =
match lid with
| Lident s -> lookup_all_ident_labels ~errors ~use ~loc usage s env
| Ldot(l, s) -> lookup_all_dot_labels ~errors ~use ~loc usage l s env
| Lapply _ -> assert false
let lookup_label ~errors ~use ~loc usage lid env =
match lookup_all_labels ~errors ~use ~loc usage lid env with
| [] -> assert false
| (desc, use) :: _ -> use (); desc
let lookup_all_labels_from_type ~use ~loc usage ty_path env =
match find_type_descrs ty_path env with
| exception Not_found -> []
| Type_variant _ | Type_abstract | Type_open -> []
| Type_record (lbls, _) ->
List.map
(fun lbl ->
let use_fun () = use_label ~use ~loc usage env lbl in
(lbl, use_fun))
lbls
let lookup_all_constructors ~errors ~use ~loc usage lid env =
match lid with
| Lident s -> lookup_all_ident_constructors ~errors ~use ~loc usage s env
| Ldot(l, s) -> lookup_all_dot_constructors ~errors ~use ~loc usage l s env
| Lapply _ -> assert false
let lookup_constructor ~errors ~use ~loc usage lid env =
match lookup_all_constructors ~errors ~use ~loc usage lid env with
| [] -> assert false
| (desc, use) :: _ -> use (); desc
let lookup_all_constructors_from_type ~use ~loc usage ty_path env =
match find_type_descrs ty_path env with
| exception Not_found -> []
| Type_record _ | Type_abstract | Type_open -> []
| Type_variant (cstrs, _) ->
List.map
(fun cstr ->
let use_fun () =
use_constructor_desc ~use ~loc usage env cstr
in
(cstr, use_fun))
cstrs
(* Lookup functions that do not mark the item as used or
warn if it has alerts, and raise [Not_found] rather
than report errors *)
let find_module_by_name lid env =
let loc = Location.(in_file !input_name) in
lookup_module ~errors:false ~use:false ~loc lid env
let find_value_by_name lid env =
let loc = Location.(in_file !input_name) in
let path, desc, _ = lookup_value ~errors:false ~use:false ~loc lid env in
path, desc
let find_type_by_name lid env =
let loc = Location.(in_file !input_name) in
lookup_type ~errors:false ~use:false ~loc lid env
let find_modtype_by_name lid env =
let loc = Location.(in_file !input_name) in
lookup_modtype ~errors:false ~use:false ~loc lid env
let find_class_by_name lid env =
let loc = Location.(in_file !input_name) in
lookup_class ~errors:false ~use:false ~loc lid env
let find_cltype_by_name lid env =
let loc = Location.(in_file !input_name) in
lookup_cltype ~errors:false ~use:false ~loc lid env
let find_constructor_by_name lid env =
let loc = Location.(in_file !input_name) in
lookup_constructor ~errors:false ~use:false ~loc Positive lid env
let find_label_by_name lid env =
let loc = Location.(in_file !input_name) in
lookup_label ~errors:false ~use:false ~loc Projection lid env
(* Ordinary lookup functions *)
let lookup_module_path ?(use=true) ~loc ~load lid env =
lookup_module_path ~errors:true ~use ~loc ~load lid env
let lookup_module ?(use=true) ~loc lid env =
lookup_module ~errors:true ~use ~loc lid env
let lookup_value ?(use=true) ~loc lid env =
check_value_name (Longident.last lid) loc;
lookup_value ~errors:true ~use ~loc lid env
let lookup_type ?(use=true) ~loc lid env =
lookup_type ~errors:true ~use ~loc lid env
let lookup_modtype ?(use=true) ~loc lid env =
lookup_modtype ~errors:true ~use ~loc lid env
let lookup_modtype_path ?(use=true) ~loc lid env =
fst (lookup_modtype_lazy ~errors:true ~use ~loc lid env)
let lookup_class ?(use=true) ~loc lid env =
lookup_class ~errors:true ~use ~loc lid env
let lookup_cltype ?(use=true) ~loc lid env =
lookup_cltype ~errors:true ~use ~loc lid env
let lookup_all_constructors ?(use=true) ~loc usage lid env =
match lookup_all_constructors ~errors:true ~use ~loc usage lid env with
| exception Error(Lookup_error(loc', env', err)) ->
(Error(loc', env', err) : _ result)
| cstrs -> Ok cstrs
let lookup_constructor ?(use=true) ~loc lid env =
lookup_constructor ~errors:true ~use ~loc lid env
let lookup_all_constructors_from_type ?(use=true) ~loc usage ty_path env =
lookup_all_constructors_from_type ~use ~loc usage ty_path env
let lookup_all_labels ?(use=true) ~loc usage lid env =
match lookup_all_labels ~errors:true ~use ~loc usage lid env with
| exception Error(Lookup_error(loc', env', err)) ->
(Error(loc', env', err) : _ result)
| lbls -> Ok lbls
let lookup_label ?(use=true) ~loc lid env =
lookup_label ~errors:true ~use ~loc lid env
let lookup_all_labels_from_type ?(use=true) ~loc usage ty_path env =
lookup_all_labels_from_type ~use ~loc usage ty_path env
let lookup_instance_variable ?(use=true) ~loc name env =
match IdTbl.find_name_and_modes wrap_value ~mark:use name env.values with
| (path, _, Val_bound vda) -> begin
let desc = vda.vda_description in
match desc.val_kind with
| Val_ivar(mut, cl_num) ->
use_value ~use ~loc path vda;
path, mut, cl_num, desc.val_type
| _ ->
lookup_error loc env (Not_an_instance_variable name)
end
| (_, _, Val_unbound Val_unbound_instance_variable) ->
lookup_error loc env (Masked_instance_variable (Lident name))
| (_, _, Val_unbound Val_unbound_self) ->
lookup_error loc env (Not_an_instance_variable name)
| (_, _, Val_unbound Val_unbound_ancestor) ->
lookup_error loc env (Not_an_instance_variable name)
| (_, _, Val_unbound Val_unbound_ghost_recursive _) ->
lookup_error loc env (Unbound_instance_variable name)
| exception Not_found ->
lookup_error loc env (Unbound_instance_variable name)
(* Checking if a name is bound *)
let bound_module name env =
match IdTbl.find_name wrap_module ~mark:false name env.modules with
| _ -> true
| exception Not_found ->
if Current_unit_name.is name then false
else begin
match find_pers_mod (name |> Compilation_unit.Name.of_string) with
| _ -> true
| exception Not_found -> false
end
let bound wrap proj name env =
match IdTbl.find_name_and_modes wrap ~mark:false name (proj env) with
| _ -> true
| exception Not_found -> false
let bound_value name env =
bound wrap_value (fun env -> env.values) name env
let bound_type name env =
bound wrap_identity (fun env -> env.types) name env
let bound_modtype name env =
bound wrap_identity (fun env -> env.modtypes) name env
let bound_class name env =
bound wrap_identity (fun env -> env.classes) name env
let bound_cltype name env =
bound wrap_identity (fun env -> env.cltypes) name env
(* Folding on environments *)
let find_all wrap proj1 proj2 f lid env acc =
match lid with
| None ->
IdTbl.fold_name wrap
(fun name (p, data) acc -> f name p data acc)
(proj1 env) acc
| Some l ->
let p, desc =
lookup_module_components
~errors:false ~use:false ~loc:Location.none l env
in
begin match get_components desc with
| Structure_comps c ->
NameMap.fold
(fun s data acc -> f s (Pdot (p, s)) (wrap data) acc)
(proj2 c) acc
| Functor_comps _ ->
acc
end
let find_all_simple_list proj1 proj2 f lid env acc =
match lid with
| None ->
TycompTbl.fold_name
(fun data acc -> f data acc)
(proj1 env) acc
| Some l ->
let (_p, desc) =
lookup_module_components
~errors:false ~use:false ~loc:Location.none l env
in
begin match get_components desc with
| Structure_comps c ->
NameMap.fold
(fun _s comps acc ->
match comps with
| [] -> acc
| data :: _ -> f data acc)
(proj2 c) acc
| Functor_comps _ ->
acc
end
let fold_modules f lid env acc =
match lid with
| None ->
IdTbl.fold_name wrap_module
(fun name (p, entry) acc ->
match entry with
| Mod_unbound _ -> acc
| Mod_local mda ->
let md =
Subst.Lazy.force_module_decl mda.mda_declaration
in
f name p md acc
| Mod_persistent ->
let modname = name |> Compilation_unit.Name.of_string in
match Persistent_env.find_in_cache !persistent_env modname with
| None -> acc
| Some mda ->
let md =
Subst.Lazy.force_module_decl mda.mda_declaration
in
f name p md acc)
env.modules
acc
| Some l ->
let p, desc =
lookup_module_components
~errors:false ~use:false ~loc:Location.none l env
in
begin match get_components desc with
| Structure_comps c ->
NameMap.fold
(fun s mda acc ->
let md =
Subst.Lazy.force_module_decl mda.mda_declaration
in
f s (Pdot (p, s)) md acc)
c.comp_modules
acc
| Functor_comps _ ->
acc
end
let fold_values f =
find_all wrap_value (fun env -> env.values) (fun sc -> sc.comp_values)
(fun k p ve acc ->
match ve with
| Val_unbound _ -> acc
| Val_bound vda -> f k p vda.vda_description acc)
and fold_constructors f =
find_all_simple_list (fun env -> env.constrs) (fun sc -> sc.comp_constrs)
(fun cda acc -> f cda.cda_description acc)
and fold_labels f =
find_all_simple_list (fun env -> env.labels) (fun sc -> sc.comp_labels) f
and fold_types f =
find_all wrap_identity
(fun env -> env.types) (fun sc -> sc.comp_types)
(fun k p tda acc -> f k p tda.tda_declaration acc)
and fold_modtypes f =
let f l path data acc = f l path (Subst.Lazy.force_modtype_decl data) acc in
find_all wrap_identity
(fun env -> env.modtypes) (fun sc -> sc.comp_modtypes)
(fun k p mta acc -> f k p mta.mtda_declaration acc)
and fold_classes f =
find_all wrap_identity (fun env -> env.classes) (fun sc -> sc.comp_classes)
(fun k p clda acc -> f k p clda.clda_declaration acc)
and fold_cltypes f =
find_all wrap_identity
(fun env -> env.cltypes) (fun sc -> sc.comp_cltypes)
(fun k p cltda acc -> f k p cltda.cltda_declaration acc)
let filter_non_loaded_persistent f env =
let to_remove =
IdTbl.fold_name wrap_module
(fun name (_, entry) acc ->
match entry with
| Mod_local _ -> acc
| Mod_unbound _ -> acc
| Mod_persistent ->
let modname = name |> Compilation_unit.Name.of_string in
match Persistent_env.find_in_cache !persistent_env modname with
| Some _ -> acc
| None ->
if f (Ident.create_persistent name) then
acc
else
String.Set.add name acc)
env.modules
String.Set.empty
in
let remove_ids tbl ids =
String.Set.fold
(fun name tbl -> IdTbl.remove (Ident.create_persistent name) tbl)
ids
tbl
in
let rec filter_summary summary ids =
if String.Set.is_empty ids then
summary
else
match summary with
Env_persistent (s, id) when String.Set.mem (Ident.name id) ids ->
filter_summary s (String.Set.remove (Ident.name id) ids)
| Env_empty
| Env_value _
| Env_type _
| Env_extension _
| Env_module _
| Env_modtype _
| Env_class _
| Env_cltype _
| Env_open _
| Env_functor_arg _
| Env_constraints _
| Env_copy_types _
| Env_persistent _
| Env_value_unbound _
| Env_module_unbound _ ->
map_summary (fun s -> filter_summary s ids) summary
in
{ env with
modules = remove_ids env.modules to_remove;
summary = filter_summary env.summary to_remove;
}
(* Return the environment summary *)
let summary env =
if Path.Map.is_empty env.local_constraints then env.summary
else Env_constraints (env.summary, env.local_constraints)
let last_env = s_ref empty
let last_reduced_env = s_ref empty
let keep_only_summary env =
if !last_env == env then !last_reduced_env
else begin
let new_env =
{
empty with
summary = env.summary;
local_constraints = env.local_constraints;
flags = env.flags;
}
in
last_env := env;
last_reduced_env := new_env;
new_env
end
let env_of_only_summary env_from_summary env =
let new_env = env_from_summary env.summary Subst.identity in
{ new_env with
local_constraints = env.local_constraints;
flags = env.flags;
}
(* Error report *)
open Format
(* Forward declarations *)
let print_longident =
ref ((fun _ _ -> assert false) : formatter -> Longident.t -> unit)
let print_path =
ref ((fun _ _ -> assert false) : formatter -> Path.t -> unit)
let spellcheck ppf extract env lid =
let choices ~path name = Misc.spellcheck (extract path env) name in
match lid with
| Longident.Lapply _ -> ()
| Longident.Lident s ->
Misc.did_you_mean ppf (fun () -> choices ~path:None s)
| Longident.Ldot (r, s) ->
Misc.did_you_mean ppf (fun () -> choices ~path:(Some r) s)
let spellcheck_name ppf extract env name =
Misc.did_you_mean ppf
(fun () -> Misc.spellcheck (extract env) name)
let extract_values path env =
fold_values (fun name _ _ acc -> name :: acc) path env []
let extract_types path env =
fold_types (fun name _ _ acc -> name :: acc) path env []
let extract_modules path env =
fold_modules (fun name _ _ acc -> name :: acc) path env []
let extract_constructors path env =
fold_constructors (fun desc acc -> desc.cstr_name :: acc) path env []
let extract_labels path env =
fold_labels (fun desc acc -> desc.lbl_name :: acc) path env []
let extract_classes path env =
fold_classes (fun name _ _ acc -> name :: acc) path env []
let extract_modtypes path env =
fold_modtypes (fun name _ _ acc -> name :: acc) path env []
let extract_cltypes path env =
fold_cltypes (fun name _ _ acc -> name :: acc) path env []
let extract_instance_variables env =
fold_values
(fun name _ descr acc ->
match descr.val_kind with
| Val_ivar _ -> name :: acc
| _ -> acc) None env []
let report_lookup_error _loc env ppf = function
| Unbound_value(lid, hint) -> begin
fprintf ppf "Unbound value %a" !print_longident lid;
spellcheck ppf extract_values env lid;
match hint with
| No_hint -> ()
| Missing_rec def_loc ->
let (_, line, _) =
Location.get_pos_info def_loc.Location.loc_start
in
fprintf ppf
"@.@[%s@ %s %i@]"
"Hint: If this is a recursive definition,"
"you should add the 'rec' keyword on line"
line
end
| Unbound_type lid ->
fprintf ppf "Unbound type constructor %a" !print_longident lid;
spellcheck ppf extract_types env lid;
| Unbound_module lid -> begin
fprintf ppf "Unbound module %a" !print_longident lid;
match find_modtype_by_name lid env with
| exception Not_found -> spellcheck ppf extract_modules env lid;
| _ ->
fprintf ppf
"@.@[%s %a, %s@]"
"Hint: There is a module type named"
!print_longident lid
"but module types are not modules"
end
| Unbound_constructor lid ->
fprintf ppf "Unbound constructor %a" !print_longident lid;
spellcheck ppf extract_constructors env lid;
| Unbound_label lid ->
fprintf ppf "Unbound record field %a" !print_longident lid;
spellcheck ppf extract_labels env lid;
| Unbound_class lid -> begin
fprintf ppf "Unbound class %a" !print_longident lid;
match find_cltype_by_name lid env with
| exception Not_found -> spellcheck ppf extract_classes env lid;
| _ ->
fprintf ppf
"@.@[%s %a, %s@]"
"Hint: There is a class type named"
!print_longident lid
"but classes are not class types"
end
| Unbound_modtype lid -> begin
fprintf ppf "Unbound module type %a" !print_longident lid;
match find_module_by_name lid env with
| exception Not_found -> spellcheck ppf extract_modtypes env lid;
| _ ->
fprintf ppf
"@.@[%s %a, %s@]"
"Hint: There is a module named"
!print_longident lid
"but modules are not module types"
end
| Unbound_cltype lid ->
fprintf ppf "Unbound class type %a" !print_longident lid;
spellcheck ppf extract_cltypes env lid;
| Unbound_instance_variable s ->
fprintf ppf "Unbound instance variable %s" s;
spellcheck_name ppf extract_instance_variables env s;
| Not_an_instance_variable s ->
fprintf ppf "The value %s is not an instance variable" s;
spellcheck_name ppf extract_instance_variables env s;
| Masked_instance_variable lid ->
fprintf ppf
"The instance variable %a@ \
cannot be accessed from the definition of another instance variable"
!print_longident lid
| Masked_self_variable lid ->
fprintf ppf
"The self variable %a@ \
cannot be accessed from the definition of an instance variable"
!print_longident lid
| Masked_ancestor_variable lid ->
fprintf ppf
"The ancestor variable %a@ \
cannot be accessed from the definition of an instance variable"
!print_longident lid
| Illegal_reference_to_recursive_module ->
fprintf ppf "Illegal recursive module reference"
| Structure_used_as_functor lid ->
fprintf ppf "@[The module %a is a structure, it cannot be applied@]"
!print_longident lid
| Abstract_used_as_functor lid ->
fprintf ppf "@[The module %a is abstract, it cannot be applied@]"
!print_longident lid
| Functor_used_as_structure lid ->
fprintf ppf "@[The module %a is a functor, \
it cannot have any components@]" !print_longident lid
| Abstract_used_as_structure lid ->
fprintf ppf "@[The module %a is abstract, \
it cannot have any components@]" !print_longident lid
| Generative_used_as_applicative lid ->
fprintf ppf "@[The functor %a is generative,@ it@ cannot@ be@ \
applied@ in@ type@ expressions@]" !print_longident lid
| Cannot_scrape_alias(lid, p) ->
let cause =
if Current_unit_name.is_path p then "is the current compilation unit"
else "is missing"
in
fprintf ppf
"The module %a is an alias for module %a, which %s"
!print_longident lid !print_path p cause
| Local_value_used_in_closure (lid, context) ->
fprintf ppf
"@[The value %a is local, so cannot be used \
inside a closure that might escape@]"
!print_longident lid;
begin match context with
| Some Tailcall_argument ->
fprintf ppf "@.@[Hint: The closure might escape because it \
is an argument to a tail call@]"
| _ -> ()
end
let report_error ppf = function
| Missing_module(_, path1, path2) ->
fprintf ppf "@[@[<hov>";
if Path.same path1 path2 then
fprintf ppf "Internal path@ %s@ is dangling." (Path.name path1)
else
fprintf ppf "Internal path@ %s@ expands to@ %s@ which is dangling."
(Path.name path1) (Path.name path2);
fprintf ppf "@]@ @[%s@ %s@ %s.@]@]"
"The compiled interface for module" (Ident.name (Path.head path2))
"was not found"
| Illegal_value_name(_loc, name) ->
fprintf ppf "'%s' is not a valid value identifier."
name
| Lookup_error(loc, t, err) -> report_lookup_error loc t ppf err
let () =
Location.register_error_of_exn
(function
| Error err ->
let loc =
match err with
| Missing_module (loc, _, _)
| Illegal_value_name (loc, _)
| Lookup_error(loc, _, _) -> loc
in
let error_of_printer =
if loc = Location.none
then Location.error_of_printer_file
else Location.error_of_printer ~loc ?sub:None
in
Some (error_of_printer report_error err)
| _ ->
None
)
| null | https://raw.githubusercontent.com/ocaml-flambda/ocaml-jst/7e5a626e4b4e12f1e9106564e1baba4d0ef6309a/typing/env.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Environment handling
* Map indexed by the name of module components.
* This module is used to store components of types (i.e. labels
and constructors). We keep a representation of each nested
"open" and the set of local bindings between each of them.
* Local bindings since the last open.
* Symbolic representation of the last (innermost) open, if any.
* Only used to check removal of open
* A callback to be applied when a component is used from this
"open". This is used to detect unused "opens". The
arguments are used to detect shadowing.
* The table before opening the module.
* This module is used to store all kinds of components except
(labels and constructors) in environments. We keep a
representation of each nested "open" and the set of local
bindings between each of them.
* Local bindings since the last open or lock
* Symbolic representation of the last (innermost) open, if any.
* The path of the opened module, to be prefixed in front of
its local names to produce a valid path in the current
environment.
* Components from the opened module.
* A callback to be applied when a component is used from this
"open". This is used to detect unused "opens". The
arguments are used to detect shadowing.
* The table before opening the module.
Formal parameter and argument signature
Result signature
For memoization
Forward declarations
Print addresses
The name of the compilation unit currently compiled.
This addition only observably changes the environment if it shadows a
non-persistent module already in the environment.
(See PR#9345)
With [-no-alias-deps], non-material additions should not
affect the environment at all. We should only observe the
existence of a cmi when accessing components of the module.
(See #9991).
get_components
Module type of functor application
Lookup by identifier
This case corresponds to an inlined record
Only present temporarily while approximating the environment for
recursive modules.
[find_shape] is only ever called after the environment gets
properly populated.
fast path (avoids lookup)
Cstr M.t.C
Regular M.t, Ext M.C
Find the manifest type associated to a type when appropriate:
- the type should be public or should have a private row,
- the type should have an associated manifest type.
The manifest type of Private abstract data types without
private row are still considered unknown to the type system.
Hence, this case is caught by the following clause that also handles
purely abstract data types without manifest type definition.
Find the manifest type information associated to a type, i.e.
the necessary information for the compiler's type-based optimisations.
In particular, the manifest type associated to a private abstract type
is revealed for the sake of compiler's type-based optimisations.
The manifest type of Private abstract data types can still get
an approximation using their manifest type.
Copying types associated with values
Iter on an environment (ignoring the body of functors and
not yet evaluated structures)
Given a signature and a root path, prefix all idents in the signature
by the root path and build the corresponding substitution.
we extend the substitution in case of an inlined record
pretend this is a type, cf. PR#6650
Compute structure descriptions
This should be kept in sync with the [identchar_latin1] character class
in [lexer.mll]
The prefixed items get the same scope as [cm_path], which is
the prefix.
The prefixed items get the same scope as [cm_path], which is
the prefix.
fcomp_arg and fcomp_res must be prefixed eagerly, because
they are interpreted in the outer environment
Insertion of bindings by identifier + path
Note: we could also check here general validity of the
identifier, to protect against bad identifiers forged by -pp or
-ppx preprocessors.
Simplified version of store_type that doesn't compute and store
constructor and label infos, but simply record the arity and
manifest-ness of the type. Used in components_of_module to
keep track of type abbreviations (e.g. type t = float) in the
computation of label representations.
Compute the components of a functor application in a path.
we have to apply eagerly instead of passing sub to [components_of_module]
because of the call to [check_well_formed_module].
???
Define forward functions
Insertion of bindings by identifier
Insertion of bindings by name
Insertion of all components of a signature
Add "unbound" bindings
Open a signature path
Open a signature from a file
a compilation unit cannot refer to a functor
Read a signature from a file
Save a signature to a file
Make the initial environment
Tracking usage
Lookup by name
Only display the "missing rec" hint for non-ghost code
PR#7611
Hack to support compilation of default arguments
General forms of the lookup functions
Lookup functions that do not mark the item as used or
warn if it has alerts, and raise [Not_found] rather
than report errors
Ordinary lookup functions
Checking if a name is bound
Folding on environments
Return the environment summary
Error report
Forward declarations | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Cmi_format
open Misc
open Asttypes
open Longident
open Path
open Types
open Local_store
module String = Misc.Stdlib.String
let add_delayed_check_forward = ref (fun _ -> assert false)
type 'a usage_tbl = ('a -> unit) Types.Uid.Tbl.t
* This table is used to track usage of value declarations .
A declaration is identified by its uid .
The callback attached to a declaration is called whenever the value ( or
type , or ... ) is used explicitly ( lookup_value , ... ) or implicitly
( inclusion test between signatures , cf , ... ) .
A declaration is identified by its uid.
The callback attached to a declaration is called whenever the value (or
type, or ...) is used explicitly (lookup_value, ...) or implicitly
(inclusion test between signatures, cf Includemod.value_descriptions, ...).
*)
let value_declarations : unit usage_tbl ref = s_table Types.Uid.Tbl.create 16
let type_declarations : unit usage_tbl ref = s_table Types.Uid.Tbl.create 16
let module_declarations : unit usage_tbl ref = s_table Types.Uid.Tbl.create 16
let uid_to_loc : Location.t Types.Uid.Tbl.t ref =
s_table Types.Uid.Tbl.create 16
let register_uid uid loc = Types.Uid.Tbl.add !uid_to_loc uid loc
let get_uid_to_loc_tbl () = !uid_to_loc
type constructor_usage = Positive | Pattern | Exported_private | Exported
type constructor_usages =
{
mutable cu_positive: bool;
mutable cu_pattern: bool;
mutable cu_exported_private: bool;
}
let add_constructor_usage cu usage =
match usage with
| Positive -> cu.cu_positive <- true
| Pattern -> cu.cu_pattern <- true
| Exported_private -> cu.cu_exported_private <- true
| Exported ->
cu.cu_positive <- true;
cu.cu_pattern <- true;
cu.cu_exported_private <- true
let constructor_usages () =
{cu_positive = false; cu_pattern = false; cu_exported_private = false}
let constructor_usage_complaint ~rebind priv cu
: Warnings.constructor_usage_warning option =
match priv, rebind with
| Asttypes.Private, _ | _, true ->
if cu.cu_positive || cu.cu_pattern || cu.cu_exported_private then None
else Some Unused
| Asttypes.Public, false -> begin
match cu.cu_positive, cu.cu_pattern, cu.cu_exported_private with
| true, _, _ -> None
| false, false, false -> Some Unused
| false, true, _ -> Some Not_constructed
| false, false, true -> Some Only_exported_private
end
let used_constructors : constructor_usage usage_tbl ref =
s_table Types.Uid.Tbl.create 16
type label_usage =
Projection | Mutation | Construct | Exported_private | Exported
type label_usages =
{
mutable lu_projection: bool;
mutable lu_mutation: bool;
mutable lu_construct: bool;
}
let add_label_usage lu usage =
match usage with
| Projection -> lu.lu_projection <- true;
| Mutation -> lu.lu_mutation <- true
| Construct -> lu.lu_construct <- true
| Exported_private ->
lu.lu_projection <- true
| Exported ->
lu.lu_projection <- true;
lu.lu_mutation <- true;
lu.lu_construct <- true
let is_mutating_label_usage = function
| Mutation -> true
| (Projection | Construct | Exported_private | Exported) -> false
let label_usages () =
{lu_projection = false; lu_mutation = false; lu_construct = false}
let label_usage_complaint priv mut lu
: Warnings.field_usage_warning option =
match priv, mut with
| Asttypes.Private, _ ->
if lu.lu_projection then None
else Some Unused
| Asttypes.Public, Asttypes.Immutable -> begin
match lu.lu_projection, lu.lu_construct with
| true, _ -> None
| false, false -> Some Unused
| false, true -> Some Not_read
end
| Asttypes.Public, Asttypes.Mutable -> begin
match lu.lu_projection, lu.lu_mutation, lu.lu_construct with
| true, true, _ -> None
| false, false, false -> Some Unused
| false, _, _ -> Some Not_read
| true, false, _ -> Some Not_mutated
end
let used_labels : label_usage usage_tbl ref =
s_table Types.Uid.Tbl.create 16
module NameMap = String.Map
type value_unbound_reason =
| Val_unbound_instance_variable
| Val_unbound_self
| Val_unbound_ancestor
| Val_unbound_ghost_recursive of Location.t
type module_unbound_reason =
| Mod_unbound_illegal_recursion
type summary =
Env_empty
| Env_value of summary * Ident.t * value_description
| Env_type of summary * Ident.t * type_declaration
| Env_extension of summary * Ident.t * extension_constructor
| Env_module of summary * Ident.t * module_presence * module_declaration
| Env_modtype of summary * Ident.t * modtype_declaration
| Env_class of summary * Ident.t * class_declaration
| Env_cltype of summary * Ident.t * class_type_declaration
| Env_open of summary * Path.t
| Env_functor_arg of summary * Ident.t
| Env_constraints of summary * type_declaration Path.Map.t
| Env_copy_types of summary
| Env_persistent of summary * Ident.t
| Env_value_unbound of summary * string * value_unbound_reason
| Env_module_unbound of summary * string * module_unbound_reason
let map_summary f = function
Env_empty -> Env_empty
| Env_value (s, id, d) -> Env_value (f s, id, d)
| Env_type (s, id, d) -> Env_type (f s, id, d)
| Env_extension (s, id, d) -> Env_extension (f s, id, d)
| Env_module (s, id, p, d) -> Env_module (f s, id, p, d)
| Env_modtype (s, id, d) -> Env_modtype (f s, id, d)
| Env_class (s, id, d) -> Env_class (f s, id, d)
| Env_cltype (s, id, d) -> Env_cltype (f s, id, d)
| Env_open (s, p) -> Env_open (f s, p)
| Env_functor_arg (s, id) -> Env_functor_arg (f s, id)
| Env_constraints (s, m) -> Env_constraints (f s, m)
| Env_copy_types s -> Env_copy_types (f s)
| Env_persistent (s, id) -> Env_persistent (f s, id)
| Env_value_unbound (s, u, r) -> Env_value_unbound (f s, u, r)
| Env_module_unbound (s, u, r) -> Env_module_unbound (f s, u, r)
type address =
| Aunit of Compilation_unit.t
| Alocal of Ident.t
| Adot of address * int
module TycompTbl =
struct
type 'a t = {
current: 'a Ident.tbl;
opened: 'a opened option;
}
and 'a opened = {
components: ('a list) NameMap.t;
* Components from the opened module . We keep a list of
bindings for each name , as in comp_labels and
comp_constrs .
bindings for each name, as in comp_labels and
comp_constrs. *)
root: Path.t;
using: (string -> ('a * 'a) option -> unit) option;
next: 'a t;
}
let empty = { current = Ident.empty; opened = None }
let add id x tbl =
{tbl with current = Ident.add id x tbl.current}
let add_open slot wrap root components next =
let using =
match slot with
| None -> None
| Some f -> Some (fun s x -> f s (wrap x))
in
{
current = Ident.empty;
opened = Some {using; components; root; next};
}
let remove_last_open rt tbl =
match tbl.opened with
| Some {root; next; _} when Path.same rt root ->
{ next with current =
Ident.fold_all Ident.add tbl.current next.current }
| _ ->
assert false
let rec find_same id tbl =
try Ident.find_same id tbl.current
with Not_found as exn ->
begin match tbl.opened with
| Some {next; _} -> find_same id next
| None -> raise exn
end
let nothing = fun () -> ()
let mk_callback rest name desc using =
match using with
| None -> nothing
| Some f ->
(fun () ->
match rest with
| [] -> f name None
| (hidden, _) :: _ -> f name (Some (desc, hidden)))
let rec find_all ~mark name tbl =
List.map (fun (_id, desc) -> desc, nothing)
(Ident.find_all name tbl.current) @
match tbl.opened with
| None -> []
| Some {using; next; components; root = _} ->
let rest = find_all ~mark name next in
let using = if mark then using else None in
match NameMap.find name components with
| exception Not_found -> rest
| opened ->
List.map
(fun desc -> desc, mk_callback rest name desc using)
opened
@ rest
let rec fold_name f tbl acc =
let acc = Ident.fold_name (fun _id d -> f d) tbl.current acc in
match tbl.opened with
| Some {using = _; next; components; root = _} ->
acc
|> NameMap.fold
(fun _name -> List.fold_right f)
components
|> fold_name f next
| None ->
acc
let rec local_keys tbl acc =
let acc = Ident.fold_all (fun k _ accu -> k::accu) tbl.current acc in
match tbl.opened with
| Some o -> local_keys o.next acc
| None -> acc
let diff_keys is_local tbl1 tbl2 =
let keys2 = local_keys tbl2 [] in
List.filter
(fun id ->
is_local (find_same id tbl2) &&
try ignore (find_same id tbl1); false
with Not_found -> true)
keys2
end
type empty = |
type escaping_context =
| Return
| Tailcall_argument
| Tailcall_function
| Partial_application
type value_lock =
| Lock of { mode : Alloc_mode.t; escaping_context : escaping_context option }
| Region_lock
module IdTbl =
struct
type ('lock, 'a, 'b) t = {
current: 'a Ident.tbl;
layer: ('lock, 'a, 'b) layer;
}
and ('lock, 'a, 'b) layer =
| Open of {
root: Path.t;
components: 'b NameMap.t;
using: (string -> ('a * 'a) option -> unit) option;
next: ('lock, 'a, 'b) t;
}
| Map of {
f: ('a -> 'a);
next: ('lock, 'a, 'b) t;
}
| Lock of {
mode: 'lock;
next: ('lock, 'a, 'b) t;
}
| Nothing
let empty = { current = Ident.empty; layer = Nothing }
let add id x tbl =
{tbl with current = Ident.add id x tbl.current}
let remove id tbl =
{tbl with current = Ident.remove id tbl.current}
let add_open slot wrap root components next =
let using =
match slot with
| None -> None
| Some f -> Some (fun s x -> f s (wrap x))
in
{
current = Ident.empty;
layer = Open {using; root; components; next};
}
let remove_last_open rt tbl =
match tbl.layer with
| Open {root; next; _} when Path.same rt root ->
{ next with current =
Ident.fold_all Ident.add tbl.current next.current }
| _ ->
assert false
let add_lock mode next =
{ current = Ident.empty; layer = Lock {mode; next} }
let map f next =
{
current = Ident.empty;
layer = Map {f; next}
}
let rec find_same id tbl =
try Ident.find_same id tbl.current
with Not_found as exn ->
begin match tbl.layer with
| Open {next; _} -> find_same id next
| Map {f; next} -> f (find_same id next)
| Lock {mode=_; next} -> find_same id next
| Nothing -> raise exn
end
let rec find_name_and_locks wrap ~mark name tbl macc =
try
let (id, desc) = Ident.find_name name tbl.current in
Pident id, macc, desc
with Not_found as exn ->
begin match tbl.layer with
| Open {using; root; next; components} ->
begin try
let descr = wrap (NameMap.find name components) in
let res = Pdot (root, name), macc, descr in
if mark then begin match using with
| None -> ()
| Some f -> begin
match find_name_and_locks wrap ~mark:false name next macc with
| exception Not_found -> f name None
| _, _, descr' -> f name (Some (descr', descr))
end
end;
res
with Not_found ->
find_name_and_locks wrap ~mark name next macc
end
| Map {f; next} ->
let (p, macc, desc) =
find_name_and_locks wrap ~mark name next macc in
p, macc, f desc
| Lock {mode; next} ->
find_name_and_locks wrap ~mark name next (mode :: macc)
| Nothing ->
raise exn
end
let find_name_and_modes wrap ~mark name tbl =
find_name_and_locks wrap ~mark name tbl []
let find_name wrap ~mark name tbl =
let (id, ([] : empty list), desc) =
find_name_and_modes wrap ~mark name tbl in
id, desc
let rec find_all wrap name tbl =
List.map
(fun (id, desc) -> Pident id, desc)
(Ident.find_all name tbl.current) @
match tbl.layer with
| Nothing -> []
| Open {root; using = _; next; components} ->
begin try
let desc = wrap (NameMap.find name components) in
(Pdot (root, name), desc) :: find_all wrap name next
with Not_found ->
find_all wrap name next
end
| Map {f; next} ->
List.map (fun (p, desc) -> (p, f desc))
(find_all wrap name next)
| Lock {mode=_;next} ->
find_all wrap name next
let rec fold_name wrap f tbl acc =
let acc =
Ident.fold_name
(fun id d -> f (Ident.name id) (Pident id, d))
tbl.current acc
in
match tbl.layer with
| Open {root; using = _; next; components} ->
acc
|> NameMap.fold
(fun name desc -> f name (Pdot (root, name), wrap desc))
components
|> fold_name wrap f next
| Nothing ->
acc
| Map {f=g; next} ->
acc
|> fold_name wrap
(fun name (path, desc) -> f name (path, g desc))
next
| Lock {mode=_; next} ->
fold_name wrap f next acc
let rec local_keys tbl acc =
let acc = Ident.fold_all (fun k _ accu -> k::accu) tbl.current acc in
match tbl.layer with
| Open {next; _ } | Map {next; _} | Lock {next; _} -> local_keys next acc
| Nothing -> acc
let rec iter wrap f tbl =
Ident.iter (fun id desc -> f id (Pident id, desc)) tbl.current;
match tbl.layer with
| Open {root; using = _; next; components} ->
NameMap.iter
(fun s x ->
let root_scope = Path.scope root in
f (Ident.create_scoped ~scope:root_scope s)
(Pdot (root, s), wrap x))
components;
iter wrap f next
| Map {f=g; next} ->
iter wrap (fun id (path, desc) -> f id (path, g desc)) next
| Lock {mode=_; next} ->
iter wrap f next
| Nothing -> ()
let diff_keys tbl1 tbl2 =
let keys2 = local_keys tbl2 [] in
List.filter
(fun id ->
try ignore (find_same id tbl1); false
with Not_found -> true)
keys2
end
type type_descr_kind =
(label_description, constructor_description) type_kind
type type_descriptions = type_descr_kind
let in_signature_flag = 0x01
type t = {
values: (value_lock, value_entry, value_data) IdTbl.t;
constrs: constructor_data TycompTbl.t;
labels: label_data TycompTbl.t;
types: (empty, type_data, type_data) IdTbl.t;
modules: (empty, module_entry, module_data) IdTbl.t;
modtypes: (empty, modtype_data, modtype_data) IdTbl.t;
classes: (empty, class_data, class_data) IdTbl.t;
cltypes: (empty, cltype_data, cltype_data) IdTbl.t;
functor_args: unit Ident.tbl;
summary: summary;
local_constraints: type_declaration Path.Map.t;
flags: int;
}
and module_components =
{
alerts: alerts;
uid: Uid.t;
comps:
(components_maker,
(module_components_repr, module_components_failure) result)
Lazy_backtrack.t;
}
and components_maker = {
cm_env: t;
cm_prefixing_subst: Subst.t;
cm_path: Path.t;
cm_addr: address_lazy;
cm_mty: Subst.Lazy.modtype;
cm_shape: Shape.t;
}
and module_components_repr =
Structure_comps of structure_components
| Functor_comps of functor_components
and module_components_failure =
| No_components_abstract
| No_components_alias of Path.t
and structure_components = {
mutable comp_values: value_data NameMap.t;
mutable comp_constrs: constructor_data list NameMap.t;
mutable comp_labels: label_data list NameMap.t;
mutable comp_types: type_data NameMap.t;
mutable comp_modules: module_data NameMap.t;
mutable comp_modtypes: modtype_data NameMap.t;
mutable comp_classes: class_data NameMap.t;
mutable comp_cltypes: cltype_data NameMap.t;
}
and functor_components = {
fcomp_arg: functor_parameter;
fcomp_shape: Shape.t;
fcomp_subst_cache: (Path.t, module_type) Hashtbl.t
}
and address_unforced =
| Projection of { parent : address_lazy; pos : int; }
| ModAlias of { env : t; path : Path.t; }
and address_lazy = (address_unforced, address) Lazy_backtrack.t
and value_data =
{ vda_description : value_description;
vda_address : address_lazy;
vda_mode : Value_mode.t;
vda_shape : Shape.t }
and value_entry =
| Val_bound of value_data
| Val_unbound of value_unbound_reason
and constructor_data =
{ cda_description : constructor_description;
cda_address : address_lazy option;
cda_shape: Shape.t; }
and label_data = label_description
and type_data =
{ tda_declaration : type_declaration;
tda_descriptions : type_descriptions;
tda_shape : Shape.t; }
and module_data =
{ mda_declaration : Subst.Lazy.module_decl;
mda_components : module_components;
mda_address : address_lazy;
mda_shape: Shape.t; }
and module_entry =
| Mod_local of module_data
| Mod_persistent
| Mod_unbound of module_unbound_reason
and modtype_data =
{ mtda_declaration : Subst.Lazy.modtype_declaration;
mtda_shape : Shape.t; }
and class_data =
{ clda_declaration : class_declaration;
clda_address : address_lazy;
clda_shape : Shape.t }
and cltype_data =
{ cltda_declaration : class_type_declaration;
cltda_shape : Shape.t }
let empty_structure =
Structure_comps {
comp_values = NameMap.empty;
comp_constrs = NameMap.empty;
comp_labels = NameMap.empty;
comp_types = NameMap.empty;
comp_modules = NameMap.empty; comp_modtypes = NameMap.empty;
comp_classes = NameMap.empty;
comp_cltypes = NameMap.empty }
type unbound_value_hint =
| No_hint
| Missing_rec of Location.t
type lookup_error =
| Unbound_value of Longident.t * unbound_value_hint
| Unbound_type of Longident.t
| Unbound_constructor of Longident.t
| Unbound_label of Longident.t
| Unbound_module of Longident.t
| Unbound_class of Longident.t
| Unbound_modtype of Longident.t
| Unbound_cltype of Longident.t
| Unbound_instance_variable of string
| Not_an_instance_variable of string
| Masked_instance_variable of Longident.t
| Masked_self_variable of Longident.t
| Masked_ancestor_variable of Longident.t
| Structure_used_as_functor of Longident.t
| Abstract_used_as_functor of Longident.t
| Functor_used_as_structure of Longident.t
| Abstract_used_as_structure of Longident.t
| Generative_used_as_applicative of Longident.t
| Illegal_reference_to_recursive_module
| Cannot_scrape_alias of Longident.t * Path.t
| Local_value_used_in_closure of Longident.t * escaping_context option
type error =
| Missing_module of Location.t * Path.t * Path.t
| Illegal_value_name of Location.t * string
| Lookup_error of Location.t * t * lookup_error
exception Error of error
let error err = raise (Error err)
let lookup_error loc env err =
error (Lookup_error(loc, env, err))
let same_constr = ref (fun _ _ _ -> assert false)
let check_well_formed_module = ref (fun _ -> assert false)
Helper to decide whether to report an identifier shadowing
by some ' open ' . For labels and constructors , we do not report
if the two elements are from the same re - exported declaration .
Later , one could also interpret some attributes on value and
type declarations to silence the shadowing warnings .
by some 'open'. For labels and constructors, we do not report
if the two elements are from the same re-exported declaration.
Later, one could also interpret some attributes on value and
type declarations to silence the shadowing warnings. *)
let check_shadowing env = function
| `Constructor (Some (cda1, cda2))
when not (!same_constr env
cda1.cda_description.cstr_res
cda2.cda_description.cstr_res) ->
Some "constructor"
| `Label (Some (l1, l2))
when not (!same_constr env l1.lbl_res l2.lbl_res) ->
Some "label"
| `Value (Some _) -> Some "value"
| `Type (Some _) -> Some "type"
| `Module (Some _) | `Component (Some _) -> Some "module"
| `Module_type (Some _) -> Some "module type"
| `Class (Some _) -> Some "class"
| `Class_type (Some _) -> Some "class type"
| `Constructor _ | `Label _
| `Value None | `Type None | `Module None | `Module_type None
| `Class None | `Class_type None | `Component None ->
None
let empty = {
values = IdTbl.empty; constrs = TycompTbl.empty;
labels = TycompTbl.empty; types = IdTbl.empty;
modules = IdTbl.empty; modtypes = IdTbl.empty;
classes = IdTbl.empty; cltypes = IdTbl.empty;
summary = Env_empty; local_constraints = Path.Map.empty;
flags = 0;
functor_args = Ident.empty;
}
let in_signature b env =
let flags =
if b then env.flags lor in_signature_flag
else env.flags land (lnot in_signature_flag)
in
{env with flags}
let is_in_signature env = env.flags land in_signature_flag <> 0
let has_local_constraints env =
not (Path.Map.is_empty env.local_constraints)
let is_ident = function
Pident _ -> true
| Pdot _ | Papply _ -> false
let is_ext cda =
match cda.cda_description with
| {cstr_tag = Cstr_extension _} -> true
| _ -> false
let is_local_ext cda =
match cda.cda_description with
| {cstr_tag = Cstr_extension(p, _)} -> is_ident p
| _ -> false
let diff env1 env2 =
IdTbl.diff_keys env1.values env2.values @
TycompTbl.diff_keys is_local_ext env1.constrs env2.constrs @
IdTbl.diff_keys env1.modules env2.modules @
IdTbl.diff_keys env1.classes env2.classes
Functions for use in " wrap " parameters in IdTbl
let wrap_identity x = x
let wrap_value vda = Val_bound vda
let wrap_module mda = Mod_local mda
let components_of_module_maker' =
ref ((fun _ -> assert false) :
components_maker ->
(module_components_repr, module_components_failure) result)
let components_of_functor_appl' =
ref ((fun ~loc:_ ~f_path:_ ~f_comp:_ ~arg:_ _env -> assert false) :
loc:Location.t -> f_path:Path.t -> f_comp:functor_components ->
arg:Path.t -> t -> module_components)
let check_functor_application =
to be filled by
ref ((fun ~errors:_ ~loc:_
~lid_whole_app:_ ~f0_path:_ ~args:_
~arg_path:_ ~arg_mty:_ ~param_mty:_
_env
-> assert false) :
errors:bool -> loc:Location.t ->
lid_whole_app:Longident.t ->
f0_path:Path.t -> args:(Path.t * Types.module_type) list ->
arg_path:Path.t -> arg_mty:module_type -> param_mty:module_type ->
t -> unit)
let scrape_alias =
to be filled with Mtype.scrape_alias
ref ((fun _env _mty -> assert false) :
t -> Subst.Lazy.modtype -> Subst.Lazy.modtype)
let md md_type =
{md_type; md_attributes=[]; md_loc=Location.none
;md_uid = Uid.internal_not_actually_unique}
let rec print_address ppf = function
| Aunit cu -> Format.fprintf ppf "%s" (Compilation_unit.full_path_as_string cu)
| Alocal id -> Format.fprintf ppf "%s" (Ident.name id)
| Adot(a, pos) -> Format.fprintf ppf "%a.[%i]" print_address a pos
type address_head =
| AHunit of Compilation_unit.t
| AHlocal of Ident.t
let rec address_head = function
| Aunit cu -> AHunit cu
| Alocal id -> AHlocal id
| Adot (a, _) -> address_head a
module Current_unit_name : sig
val get : unit -> Compilation_unit.t option
val set : Compilation_unit.t option -> unit
val is : string -> bool
val is_ident : Ident.t -> bool
val is_path : Path.t -> bool
end = struct
let get () =
Compilation_unit.get_current ()
let set comp_unit =
Compilation_unit.set_current comp_unit
let get_name () =
Option.map Compilation_unit.name (get ())
let is name =
let current_name_string =
Option.map Compilation_unit.Name.to_string (get_name ())
in
Option.equal String.equal current_name_string (Some name)
let is_ident id =
Ident.is_global id && is (Ident.name id)
let is_path = function
| Pident id -> is_ident id
| Pdot _ | Papply _ -> false
end
let set_unit_name = Current_unit_name.set
let get_unit_name = Current_unit_name.get
let find_same_module id tbl =
match IdTbl.find_same id tbl with
| x -> x
| exception Not_found
when Ident.is_global id && not (Current_unit_name.is_ident id) ->
Mod_persistent
let find_name_module ~mark name tbl =
match IdTbl.find_name wrap_module ~mark name tbl with
| x -> x
| exception Not_found when not (Current_unit_name.is name) ->
let path = Pident(Ident.create_persistent name) in
path, Mod_persistent
let add_persistent_structure id env =
if not (Ident.is_global id) then invalid_arg "Env.add_persistent_structure";
if Current_unit_name.is_ident id then env
else begin
let material =
match
IdTbl.find_name wrap_module ~mark:false (Ident.name id) env.modules
with
| exception Not_found | _, Mod_persistent -> false
| _ -> true
in
let summary =
if material then Env_persistent (env.summary, id)
else env.summary
in
let modules =
if material || not !Clflags.transparent_modules then
IdTbl.add id Mod_persistent env.modules
else
env.modules
in
{ env with modules; summary }
end
let components_of_module ~alerts ~uid env ps path addr mty shape =
{
alerts;
uid;
comps = Lazy_backtrack.create {
cm_env = env;
cm_prefixing_subst = ps;
cm_path = path;
cm_addr = addr;
cm_mty = mty;
cm_shape = shape;
}
}
let sign_of_cmi ~freshen { Persistent_env.Persistent_signature.cmi; _ } =
let name = cmi.cmi_name in
let sign = cmi.cmi_sign in
let flags = cmi.cmi_flags in
let id = Ident.create_persistent (Compilation_unit.name_as_string name) in
let path = Pident id in
let alerts =
List.fold_left (fun acc -> function Alerts s -> s | _ -> acc)
Misc.Stdlib.String.Map.empty
flags
in
let md =
{ md_type = Mty_signature sign;
md_loc = Location.none;
md_attributes = [];
md_uid = Uid.of_compilation_unit_id name;
}
in
let mda_address = Lazy_backtrack.create_forced (Aunit name) in
let mda_declaration =
Subst.(Lazy.module_decl Make_local identity (Lazy.of_module_decl md))
in
let mda_shape =
Shape.for_persistent_unit (name |> Compilation_unit.full_path_as_string)
in
let mda_components =
let mty = Subst.Lazy.of_modtype (Mty_signature sign) in
let mty =
if freshen then
Subst.Lazy.modtype (Subst.Rescope (Path.scope path))
Subst.identity mty
else mty
in
components_of_module ~alerts ~uid:md.md_uid
empty Subst.identity
path mda_address mty mda_shape
in
{
mda_declaration;
mda_components;
mda_address;
mda_shape;
}
let read_sign_of_cmi = sign_of_cmi ~freshen:true
let save_sign_of_cmi = sign_of_cmi ~freshen:false
let persistent_env : module_data Persistent_env.t ref =
s_table Persistent_env.empty ()
let without_cmis f x =
Persistent_env.without_cmis !persistent_env f x
let imports () = Persistent_env.imports !persistent_env
let import_crcs ~source crcs =
Persistent_env.import_crcs !persistent_env ~source crcs
let read_pers_mod modname filename =
Persistent_env.read !persistent_env read_sign_of_cmi modname filename
let find_pers_mod name =
Persistent_env.find !persistent_env read_sign_of_cmi name
let check_pers_mod ~loc name =
Persistent_env.check !persistent_env read_sign_of_cmi ~loc name
let crc_of_unit name =
Persistent_env.crc_of_unit !persistent_env read_sign_of_cmi name
let is_imported_opaque modname =
Persistent_env.is_imported_opaque !persistent_env modname
let register_import_as_opaque modname =
Persistent_env.register_import_as_opaque !persistent_env modname
let reset_declaration_caches () =
Types.Uid.Tbl.clear !value_declarations;
Types.Uid.Tbl.clear !type_declarations;
Types.Uid.Tbl.clear !module_declarations;
Types.Uid.Tbl.clear !used_constructors;
Types.Uid.Tbl.clear !used_labels;
Types.Uid.Tbl.clear !uid_to_loc;
()
let reset_cache ~preserve_persistent_env =
Compilation_unit.set_current None;
if not preserve_persistent_env then
Persistent_env.clear !persistent_env;
reset_declaration_caches ();
()
let reset_cache_toplevel () =
Persistent_env.clear_missing !persistent_env;
reset_declaration_caches ();
()
let get_components_res c =
match Persistent_env.can_load_cmis !persistent_env with
| Persistent_env.Can_load_cmis ->
Lazy_backtrack.force !components_of_module_maker' c.comps
| Persistent_env.Cannot_load_cmis log ->
Lazy_backtrack.force_logged log !components_of_module_maker' c.comps
let get_components c =
match get_components_res c with
| Error _ -> empty_structure
| Ok c -> c
let modtype_of_functor_appl fcomp p1 p2 =
match fcomp.fcomp_res with
| Mty_alias _ as mty -> mty
| mty ->
try
Hashtbl.find fcomp.fcomp_subst_cache p2
with Not_found ->
let scope = Path.scope (Papply(p1, p2)) in
let mty =
let subst =
match fcomp.fcomp_arg with
| Unit
| Named (None, _) -> Subst.identity
| Named (Some param, _) -> Subst.add_module param p2 Subst.identity
in
Subst.modtype (Rescope scope) subst mty
in
Hashtbl.add fcomp.fcomp_subst_cache p2 mty;
mty
let check_functor_appl
~errors ~loc ~lid_whole_app ~f0_path ~args
~f_comp
~arg_path ~arg_mty ~param_mty
env =
if not (Hashtbl.mem f_comp.fcomp_cache arg_path) then
!check_functor_application
~errors ~loc ~lid_whole_app ~f0_path ~args
~arg_path ~arg_mty ~param_mty
env
let modname_of_ident id = Ident.name id |> Compilation_unit.Name.of_string
let find_ident_module id env =
match find_same_module id env.modules with
| Mod_local data -> data
| Mod_unbound _ -> raise Not_found
| Mod_persistent -> find_pers_mod (id |> modname_of_ident)
let rec find_module_components path env =
match path with
| Pident id -> (find_ident_module id env).mda_components
| Pdot(p, s) ->
let sc = find_structure_components p env in
(NameMap.find s sc.comp_modules).mda_components
| Papply(f_path, arg) ->
let f_comp = find_functor_components f_path env in
let loc = Location.(in_file !input_name) in
!components_of_functor_appl' ~loc ~f_path ~f_comp ~arg env
and find_structure_components path env =
match get_components (find_module_components path env) with
| Structure_comps c -> c
| Functor_comps _ -> raise Not_found
and find_functor_components path env =
match get_components (find_module_components path env) with
| Functor_comps f -> f
| Structure_comps _ -> raise Not_found
let find_module path env =
match path with
| Pident id ->
let data = find_ident_module id env in
Subst.Lazy.force_module_decl data.mda_declaration
| Pdot(p, s) ->
let sc = find_structure_components p env in
let data = NameMap.find s sc.comp_modules in
Subst.Lazy.force_module_decl data.mda_declaration
| Papply(p1, p2) ->
let fc = find_functor_components p1 env in
md (modtype_of_functor_appl fc p1 p2)
let find_module_lazy ~alias path env =
match path with
| Pident id ->
let data = find_ident_module id env in
data.mda_declaration
| Pdot(p, s) ->
let sc = find_structure_components p env in
let data = NameMap.find s sc.comp_modules in
data.mda_declaration
| Papply(p1, p2) ->
let fc = find_functor_components p1 env in
let md =
if alias then md (fc.fcomp_res)
else md (modtype_of_functor_appl fc p1 p2)
in
Subst.Lazy.of_module_decl md
let find_value_full path env =
match path with
| Pident id -> begin
match IdTbl.find_same id env.values with
| Val_bound data -> data
| Val_unbound _ -> raise Not_found
end
| Pdot(p, s) ->
let sc = find_structure_components p env in
NameMap.find s sc.comp_values
| Papply _ -> raise Not_found
let find_type_full path env =
match path with
| Pident id -> IdTbl.find_same id env.types
| Pdot(p, s) ->
let sc = find_structure_components p env in
NameMap.find s sc.comp_types
| Papply _ -> raise Not_found
let find_modtype_lazy path env =
match path with
| Pident id -> (IdTbl.find_same id env.modtypes).mtda_declaration
| Pdot(p, s) ->
let sc = find_structure_components p env in
(NameMap.find s sc.comp_modtypes).mtda_declaration
| Papply _ -> raise Not_found
let find_modtype path env =
Subst.Lazy.force_modtype_decl (find_modtype_lazy path env)
let find_class_full path env =
match path with
| Pident id -> IdTbl.find_same id env.classes
| Pdot(p, s) ->
let sc = find_structure_components p env in
NameMap.find s sc.comp_classes
| Papply _ -> raise Not_found
let find_cltype path env =
match path with
| Pident id -> (IdTbl.find_same id env.cltypes).cltda_declaration
| Pdot(p, s) ->
let sc = find_structure_components p env in
(NameMap.find s sc.comp_cltypes).cltda_declaration
| Papply _ -> raise Not_found
let find_value path env =
(find_value_full path env).vda_description
let find_class path env =
(find_class_full path env).clda_declaration
let find_ident_constructor id env =
(TycompTbl.find_same id env.constrs).cda_description
let find_ident_label id env =
TycompTbl.find_same id env.labels
let type_of_cstr path = function
| {cstr_inlined = Some decl; _} ->
let labels =
List.map snd (Datarepr.labels_of_type path decl)
in
begin match decl.type_kind with
| Type_record (_, repr) ->
{
tda_declaration = decl;
tda_descriptions = Type_record (labels, repr);
tda_shape = Shape.leaf decl.type_uid;
}
| _ -> assert false
end
| _ -> assert false
let find_type_data path env =
match Path.constructor_typath path with
| Regular p -> begin
match Path.Map.find p env.local_constraints with
| decl ->
{
tda_declaration = decl;
tda_descriptions = Type_abstract;
tda_shape = Shape.leaf decl.type_uid;
}
| exception Not_found -> find_type_full p env
end
| Cstr (ty_path, s) ->
let tda =
try find_type_full ty_path env
with Not_found -> assert false
in
let cstr =
begin match tda.tda_descriptions with
| Type_variant (cstrs, _) -> begin
try
List.find (fun cstr -> cstr.cstr_name = s) cstrs
with Not_found -> assert false
end
| Type_record _ | Type_abstract | Type_open -> assert false
end
in
type_of_cstr path cstr
| LocalExt id ->
let cstr =
try (TycompTbl.find_same id env.constrs).cda_description
with Not_found -> assert false
in
type_of_cstr path cstr
| Ext (mod_path, s) ->
let comps =
try find_structure_components mod_path env
with Not_found -> assert false
in
let cstrs =
try NameMap.find s comps.comp_constrs
with Not_found -> assert false
in
let exts = List.filter is_ext cstrs in
match exts with
| [cda] -> type_of_cstr path cda.cda_description
| _ -> assert false
let find_type p env =
(find_type_data p env).tda_declaration
let find_type_descrs p env =
(find_type_data p env).tda_descriptions
let rec find_module_address path env =
match path with
| Pident id -> find_ident_module_address id env
| Pdot(p, s) ->
let c = find_structure_components p env in
get_address (NameMap.find s c.comp_modules).mda_address
| Papply _ -> raise Not_found
and find_ident_module_address id env =
get_address (find_ident_module id env).mda_address
and force_address = function
| Projection { parent; pos } -> Adot(get_address parent, pos)
| ModAlias { env; path } -> find_module_address path env
and get_address a =
Lazy_backtrack.force force_address a
let find_value_address path env =
get_address (find_value_full path env).vda_address
let find_class_address path env =
get_address (find_class_full path env).clda_address
let rec get_constrs_address = function
| [] -> raise Not_found
| cda :: rest ->
match cda.cda_address with
| None -> get_constrs_address rest
| Some a -> get_address a
let find_constructor_address path env =
match path with
| Pident id -> begin
let cda = TycompTbl.find_same id env.constrs in
match cda.cda_address with
| None -> raise Not_found
| Some addr -> get_address addr
end
| Pdot(p, s) ->
let c = find_structure_components p env in
get_constrs_address (NameMap.find s c.comp_constrs)
| Papply _ ->
raise Not_found
let find_hash_type path env =
match path with
| Pident id ->
let name = "#" ^ Ident.name id in
let _, tda =
IdTbl.find_name wrap_identity ~mark:false name env.types
in
tda.tda_declaration
| Pdot(p, s) ->
let c = find_structure_components p env in
let name = "#" ^ s in
let tda = NameMap.find name c.comp_types in
tda.tda_declaration
| Papply _ ->
raise Not_found
let probes = ref String.Set.empty
let reset_probes () = probes := String.Set.empty
let add_probe name = probes := String.Set.add name !probes
let has_probe name = String.Set.mem name !probes
let find_shape env (ns : Shape.Sig_component_kind.t) id =
match ns with
| Type ->
(IdTbl.find_same id env.types).tda_shape
| Extension_constructor ->
(TycompTbl.find_same id env.constrs).cda_shape
| Value ->
begin match IdTbl.find_same id env.values with
| Val_bound x -> x.vda_shape
| Val_unbound _ -> raise Not_found
end
| Module ->
begin match IdTbl.find_same id env.modules with
| Mod_local { mda_shape; _ } -> mda_shape
| Mod_persistent -> Shape.for_persistent_unit (Ident.name id)
| Mod_unbound _ ->
assert false
| exception Not_found
when Ident.is_global id && not (Current_unit_name.is_ident id) ->
Shape.for_persistent_unit (Ident.name id)
end
| Module_type ->
(IdTbl.find_same id env.modtypes).mtda_shape
| Class ->
(IdTbl.find_same id env.classes).clda_shape
| Class_type ->
(IdTbl.find_same id env.cltypes).cltda_shape
let shape_of_path ~namespace env =
Shape.of_path ~namespace ~find_shape:(find_shape env)
let shape_or_leaf uid = function
| None -> Shape.leaf uid
| Some shape -> shape
let required_globals = s_ref []
let reset_required_globals () = required_globals := []
let get_required_globals () = !required_globals
let add_required_unit cu =
if not (List.exists (Compilation_unit.equal cu) !required_globals)
then required_globals := cu :: !required_globals
let add_required_ident id env =
if not !Clflags.transparent_modules && Ident.is_global id then
let address = find_ident_module_address id env in
match address_head address with
| AHlocal _ -> ()
| AHunit cu -> add_required_unit cu
let add_required_global path env =
add_required_ident (Path.head path) env
let rec normalize_module_path lax env = function
| Pident id as path when lax && Ident.is_global id ->
| Pdot (p, s) as path ->
let p' = normalize_module_path lax env p in
if p == p' then expand_module_path lax env path
else expand_module_path lax env (Pdot(p', s))
| Papply (p1, p2) as path ->
let p1' = normalize_module_path lax env p1 in
let p2' = normalize_module_path true env p2 in
if p1 == p1' && p2 == p2' then expand_module_path lax env path
else expand_module_path lax env (Papply(p1', p2'))
| Pident _ as path ->
expand_module_path lax env path
and expand_module_path lax env path =
try match find_module_lazy ~alias:true path env with
{mdl_type=MtyL_alias path1} ->
let path' = normalize_module_path lax env path1 in
if not (lax || !Clflags.transparent_modules) then begin
let id = Path.head path in
if Ident.is_global_or_predef id && not (Ident.same id (Path.head path'))
then add_required_global (Pident id) env
end;
path'
| _ -> path
with Not_found when lax
|| (match path with Pident id -> not (Ident.is_global id) | _ -> true) ->
path
let normalize_module_path oloc env path =
try normalize_module_path (oloc = None) env path
with Not_found ->
match oloc with None -> assert false
| Some loc ->
error (Missing_module(loc, path,
normalize_module_path true env path))
let normalize_path_prefix oloc env path =
match path with
Pdot(p, s) ->
let p2 = normalize_module_path oloc env p in
if p == p2 then path else Pdot(p2, s)
| Pident _ ->
path
| Papply _ ->
assert false
let normalize_type_path oloc env path =
Inlined version of Path.is_constructor_typath :
constructor type paths ( i.e. path pointing to an inline
record argument of a constructpr ) are built as a regular
type path followed by a capitalized constructor name .
constructor type paths (i.e. path pointing to an inline
record argument of a constructpr) are built as a regular
type path followed by a capitalized constructor name. *)
match path with
| Pident _ ->
path
| Pdot(p, s) ->
let p2 =
if Path.is_uident s && not (Path.is_uident (Path.last p)) then
normalize_path_prefix oloc env p
else
normalize_module_path oloc env p
in
if p == p2 then path else Pdot (p2, s)
| Papply _ ->
assert false
let rec normalize_modtype_path env path =
let path = normalize_path_prefix None env path in
expand_modtype_path env path
and expand_modtype_path env path =
match (find_modtype_lazy path env).mtdl_type with
| Some (MtyL_ident path) -> normalize_modtype_path env path
| _ | exception Not_found -> path
let find_module_lazy path env =
find_module_lazy ~alias:false path env
let find_type_expansion path env =
let decl = find_type path env in
match decl.type_manifest with
| Some body when decl.type_private = Public
|| decl.type_kind <> Type_abstract
|| Btype.has_constr_row body ->
(decl.type_params, body, decl.type_expansion_scope)
| _ -> raise Not_found
let find_type_expansion_opt path env =
let decl = find_type path env in
match decl.type_manifest with
| Some body ->
(decl.type_params, body, decl.type_expansion_scope)
| _ -> raise Not_found
let find_modtype_expansion_lazy path env =
match (find_modtype_lazy path env).mtdl_type with
| None -> raise Not_found
| Some mty -> mty
let find_modtype_expansion path env =
Subst.Lazy.force_modtype (find_modtype_expansion_lazy path env)
let rec is_functor_arg path env =
match path with
Pident id ->
begin try Ident.find_same id env.functor_args; true
with Not_found -> false
end
| Pdot (p, _s) -> is_functor_arg p env
| Papply _ -> true
let make_copy_of_types env0 =
let memo = Hashtbl.create 16 in
let copy t =
try
Hashtbl.find memo (get_id t)
with Not_found ->
let t2 = Subst.type_expr Subst.identity t in
Hashtbl.add memo (get_id t) t2;
t2
in
let f = function
| Val_unbound _ as entry -> entry
| Val_bound vda ->
let desc = vda.vda_description in
let desc = { desc with val_type = copy desc.val_type } in
Val_bound { vda with vda_description = desc }
in
let values =
IdTbl.map f env0.values
in
(fun env ->
if env.values ! = then fatal_error " Env.make_copy_of_types " ;
{env with values; summary = Env_copy_types env.summary}
)
type iter_cont = unit -> unit
let iter_env_cont = ref []
let rec scrape_alias_for_visit env mty =
let open Subst.Lazy in
match mty with
| MtyL_alias path -> begin
match path with
| Pident id
when Ident.is_global id
&& not (Persistent_env.looked_up !persistent_env (id |> modname_of_ident)) ->
false
PR#6600 : find_module may raise Not_found
try
scrape_alias_for_visit env (find_module_lazy path env).mdl_type
with Not_found -> false
end
| _ -> true
let iter_env wrap proj1 proj2 f env () =
IdTbl.iter wrap (fun id x -> f (Pident id) x) (proj1 env);
let rec iter_components path path' mcomps =
let cont () =
let visit =
match Lazy_backtrack.get_arg mcomps.comps with
| None -> true
| Some { cm_mty; _ } ->
scrape_alias_for_visit env cm_mty
in
if not visit then () else
match get_components mcomps with
Structure_comps comps ->
NameMap.iter
(fun s d -> f (Pdot (path, s)) (Pdot (path', s), d))
(proj2 comps);
NameMap.iter
(fun s mda ->
iter_components
(Pdot (path, s)) (Pdot (path', s)) mda.mda_components)
comps.comp_modules
| Functor_comps _ -> ()
in iter_env_cont := (path, cont) :: !iter_env_cont
in
IdTbl.iter wrap_module
(fun id (path, entry) ->
match entry with
| Mod_unbound _ -> ()
| Mod_local data ->
iter_components (Pident id) path data.mda_components
| Mod_persistent ->
let modname = modname_of_ident id in
match Persistent_env.find_in_cache !persistent_env modname with
| None -> ()
| Some data ->
iter_components (Pident id) path data.mda_components)
env.modules
let run_iter_cont l =
iter_env_cont := [];
List.iter (fun c -> c ()) l;
let cont = List.rev !iter_env_cont in
iter_env_cont := [];
cont
let iter_types f =
iter_env wrap_identity (fun env -> env.types) (fun sc -> sc.comp_types)
(fun p1 (p2, tda) -> f p1 (p2, tda.tda_declaration))
let same_types env1 env2 =
env1.types == env2.types && env1.modules == env2.modules
let used_persistent () =
Persistent_env.fold !persistent_env
(fun s _m r -> Compilation_unit.Name.Set.add s r)
Compilation_unit.Name.Set.empty
let find_all_comps wrap proj s (p, mda) =
match get_components mda.mda_components with
Functor_comps _ -> []
| Structure_comps comps ->
try
let c = NameMap.find s (proj comps) in
[Pdot(p,s), wrap c]
with Not_found -> []
let rec find_shadowed_comps path env =
match path with
| Pident id ->
List.filter_map
(fun (p, data) ->
match data with
| Mod_local x -> Some (p, x)
| Mod_unbound _ | Mod_persistent -> None)
(IdTbl.find_all wrap_module (Ident.name id) env.modules)
| Pdot (p, s) ->
let l = find_shadowed_comps p env in
let l' =
List.map
(find_all_comps wrap_identity
(fun comps -> comps.comp_modules) s) l
in
List.flatten l'
| Papply _ -> []
let find_shadowed wrap proj1 proj2 path env =
match path with
Pident id ->
IdTbl.find_all wrap (Ident.name id) (proj1 env)
| Pdot (p, s) ->
let l = find_shadowed_comps p env in
let l' = List.map (find_all_comps wrap proj2 s) l in
List.flatten l'
| Papply _ -> []
let find_shadowed_types path env =
List.map fst
(find_shadowed wrap_identity
(fun env -> env.types) (fun comps -> comps.comp_types) path env)
let prefix_idents root prefixing_sub sg =
let open Subst.Lazy in
let rec prefix_idents root items_and_paths prefixing_sub =
function
| [] -> (List.rev items_and_paths, prefixing_sub)
| SigL_value(id, _, _) as item :: rem ->
let p = Pdot(root, Ident.name id) in
prefix_idents root
((item, p) :: items_and_paths) prefixing_sub rem
| SigL_type(id, td, rs, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
prefix_idents root
((SigL_type(id, td, rs, vis), p) :: items_and_paths)
(Subst.add_type id p prefixing_sub)
rem
| SigL_typext(id, ec, es, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
prefix_idents root
((SigL_typext(id, ec, es, vis), p) :: items_and_paths)
(Subst.add_type id p prefixing_sub)
rem
| SigL_module(id, pres, md, rs, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
prefix_idents root
((SigL_module(id, pres, md, rs, vis), p) :: items_and_paths)
(Subst.add_module id p prefixing_sub)
rem
| SigL_modtype(id, mtd, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
prefix_idents root
((SigL_modtype(id, mtd, vis), p) :: items_and_paths)
(Subst.add_modtype id (Mty_ident p) prefixing_sub)
rem
| SigL_class(id, cd, rs, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
prefix_idents root
((SigL_class(id, cd, rs, vis), p) :: items_and_paths)
(Subst.add_type id p prefixing_sub)
rem
| SigL_class_type(id, ctd, rs, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
prefix_idents root
((SigL_class_type(id, ctd, rs, vis), p) :: items_and_paths)
(Subst.add_type id p prefixing_sub)
rem
in
let sg = Subst.Lazy.force_signature_once sg in
prefix_idents root [] prefixing_sub sg
let add_to_tbl id decl tbl =
let decls = try NameMap.find id tbl with Not_found -> [] in
NameMap.add id (decl :: decls) tbl
let primitive_address_error =
Invalid_argument "Primitives don't have addresses"
let value_declaration_address (_ : t) id decl =
match decl.val_kind with
| Val_prim _ -> Lazy_backtrack.create_failed primitive_address_error
| _ -> Lazy_backtrack.create_forced (Alocal id)
let extension_declaration_address (_ : t) id (_ : extension_constructor) =
Lazy_backtrack.create_forced (Alocal id)
let class_declaration_address (_ : t) id (_ : class_declaration) =
Lazy_backtrack.create_forced (Alocal id)
let module_declaration_address env id presence md =
match presence with
| Mp_absent -> begin
let open Subst.Lazy in
match md.mdl_type with
| MtyL_alias path -> Lazy_backtrack.create (ModAlias {env; path})
| _ -> assert false
end
| Mp_present ->
Lazy_backtrack.create_forced (Alocal id)
let is_identchar c =
match c with
| 'A'..'Z' | 'a'..'z' | '_' | '\192'..'\214'
| '\216'..'\246' | '\248'..'\255' | '\'' | '0'..'9' ->
true
| _ ->
false
let rec components_of_module_maker
{cm_env; cm_prefixing_subst;
cm_path; cm_addr; cm_mty; cm_shape} : _ result =
match !scrape_alias cm_env cm_mty with
MtyL_signature sg ->
let c =
{ comp_values = NameMap.empty;
comp_constrs = NameMap.empty;
comp_labels = NameMap.empty; comp_types = NameMap.empty;
comp_modules = NameMap.empty; comp_modtypes = NameMap.empty;
comp_classes = NameMap.empty; comp_cltypes = NameMap.empty }
in
let items_and_paths, sub =
prefix_idents cm_path cm_prefixing_subst sg
in
let env = ref cm_env in
let pos = ref 0 in
let next_address () =
let addr : address_unforced =
Projection { parent = cm_addr; pos = !pos }
in
incr pos;
Lazy_backtrack.create addr
in
List.iter (fun ((item : Subst.Lazy.signature_item), path) ->
match item with
SigL_value(id, decl, _) ->
let decl' = Subst.value_description sub decl in
let addr =
match decl.val_kind with
| Val_prim _ -> Lazy_backtrack.create_failed primitive_address_error
| _ -> next_address ()
in
let vda_shape = Shape.proj cm_shape (Shape.Item.value id) in
let vda =
{ vda_description = decl'; vda_address = addr;
vda_mode = Value_mode.global; vda_shape }
in
c.comp_values <- NameMap.add (Ident.name id) vda c.comp_values;
| SigL_type(id, decl, _, _) ->
let final_decl = Subst.type_declaration sub decl in
Btype.set_static_row_name final_decl
(Subst.type_path sub (Path.Pident id));
let descrs =
match decl.type_kind with
| Type_variant (_,repr) ->
let cstrs = List.map snd
(Datarepr.constructors_of_type path final_decl
~current_unit:(get_unit_name ()))
in
List.iter
(fun descr ->
let cda_shape = Shape.leaf descr.cstr_uid in
let cda = {
cda_description = descr;
cda_address = None;
cda_shape }
in
c.comp_constrs <-
add_to_tbl descr.cstr_name cda c.comp_constrs
) cstrs;
Type_variant (cstrs, repr)
| Type_record (_, repr) ->
let lbls = List.map snd
(Datarepr.labels_of_type path final_decl)
in
List.iter
(fun descr ->
c.comp_labels <-
add_to_tbl descr.lbl_name descr c.comp_labels)
lbls;
Type_record (lbls, repr)
| Type_abstract -> Type_abstract
| Type_open -> Type_open
in
let shape = Shape.proj cm_shape (Shape.Item.type_ id) in
let tda =
{ tda_declaration = final_decl;
tda_descriptions = descrs;
tda_shape = shape; }
in
c.comp_types <- NameMap.add (Ident.name id) tda c.comp_types;
env := store_type_infos ~tda_shape:shape id decl !env
| SigL_typext(id, ext, _, _) ->
let ext' = Subst.extension_constructor sub ext in
let descr =
Datarepr.extension_descr ~current_unit:(get_unit_name ()) path
ext'
in
let addr = next_address () in
let cda_shape =
Shape.proj cm_shape (Shape.Item.extension_constructor id)
in
let cda =
{ cda_description = descr; cda_address = Some addr; cda_shape }
in
c.comp_constrs <- add_to_tbl (Ident.name id) cda c.comp_constrs
| SigL_module(id, pres, md, _, _) ->
let md' =
Subst.Lazy.module_decl
(Subst.Rescope (Path.scope cm_path)) sub md
in
let addr =
match pres with
| Mp_absent -> begin
match md.mdl_type with
| MtyL_alias path ->
Lazy_backtrack.create (ModAlias {env = !env; path})
| _ -> assert false
end
| Mp_present -> next_address ()
in
let alerts =
Builtin_attributes.alerts_of_attrs md.mdl_attributes
in
let shape = Shape.proj cm_shape (Shape.Item.module_ id) in
let comps =
components_of_module ~alerts ~uid:md.mdl_uid !env
sub path addr md.mdl_type shape
in
let mda =
{ mda_declaration = md';
mda_components = comps;
mda_address = addr;
mda_shape = shape; }
in
c.comp_modules <-
NameMap.add (Ident.name id) mda c.comp_modules;
env :=
store_module ~update_summary:false ~check:None
id addr pres md shape !env
| SigL_modtype(id, decl, _) ->
let final_decl =
Subst.Lazy.modtype_decl (Rescope (Path.scope cm_path))
sub decl
in
let shape = Shape.proj cm_shape (Shape.Item.module_type id) in
let mtda =
{ mtda_declaration = final_decl;
mtda_shape = shape; }
in
c.comp_modtypes <-
NameMap.add (Ident.name id) mtda c.comp_modtypes;
env := store_modtype ~update_summary:false id decl shape !env
| SigL_class(id, decl, _, _) ->
let decl' = Subst.class_declaration sub decl in
let addr = next_address () in
let shape = Shape.proj cm_shape (Shape.Item.class_ id) in
let clda =
{ clda_declaration = decl';
clda_address = addr;
clda_shape = shape; }
in
c.comp_classes <- NameMap.add (Ident.name id) clda c.comp_classes
| SigL_class_type(id, decl, _, _) ->
let decl' = Subst.cltype_declaration sub decl in
let shape = Shape.proj cm_shape (Shape.Item.class_type id) in
let cltda = { cltda_declaration = decl'; cltda_shape = shape } in
c.comp_cltypes <-
NameMap.add (Ident.name id) cltda c.comp_cltypes)
items_and_paths;
Ok (Structure_comps c)
| MtyL_functor(arg, ty_res) ->
let sub = cm_prefixing_subst in
let scoping = Subst.Rescope (Path.scope cm_path) in
let open Subst.Lazy in
Ok (Functor_comps {
fcomp_arg =
(match arg with
| Unit -> Unit
| Named (param, ty_arg) ->
Named (param, force_modtype (modtype scoping sub ty_arg)));
fcomp_res = force_modtype (modtype scoping sub ty_res);
fcomp_shape = cm_shape;
fcomp_cache = Hashtbl.create 17;
fcomp_subst_cache = Hashtbl.create 17 })
| MtyL_ident _ -> Error No_components_abstract
| MtyL_alias p -> Error (No_components_alias p)
and check_usage loc id uid warn tbl =
if not loc.Location.loc_ghost &&
Uid.for_actual_declaration uid &&
Warnings.is_active (warn "")
then begin
let name = Ident.name id in
if Types.Uid.Tbl.mem tbl uid then ()
else let used = ref false in
Types.Uid.Tbl.add tbl uid (fun () -> used := true);
if not (name = "" || name.[0] = '_' || name.[0] = '#')
then
!add_delayed_check_forward
(fun () -> if not !used then Location.prerr_warning loc (warn name))
end;
and check_value_name name loc =
if String.length name > 0 && not (is_identchar name.[0]) then
for i = 1 to String.length name - 1 do
if name.[i] = '#' then
error (Illegal_value_name(loc, name))
done
and store_value ?check mode id addr decl shape env =
check_value_name (Ident.name id) decl.val_loc;
Builtin_attributes.mark_alerts_used decl.val_attributes;
Option.iter
(fun f -> check_usage decl.val_loc id decl.val_uid f !value_declarations)
check;
let vda =
{ vda_description = decl;
vda_address = addr;
vda_mode = mode;
vda_shape = shape }
in
{ env with
values = IdTbl.add id (Val_bound vda) env.values;
summary = Env_value(env.summary, id, decl) }
and store_constructor ~check type_decl type_id cstr_id cstr env =
if check && not type_decl.type_loc.Location.loc_ghost
&& Warnings.is_active (Warnings.Unused_constructor ("", Unused))
then begin
let ty_name = Ident.name type_id in
let name = cstr.cstr_name in
let loc = cstr.cstr_loc in
let k = cstr.cstr_uid in
let priv = type_decl.type_private in
if not (Types.Uid.Tbl.mem !used_constructors k) then begin
let used = constructor_usages () in
Types.Uid.Tbl.add !used_constructors k
(add_constructor_usage used);
if not (ty_name = "" || ty_name.[0] = '_')
then
!add_delayed_check_forward
(fun () ->
Option.iter
(fun complaint ->
if not (is_in_signature env) then
Location.prerr_warning loc
(Warnings.Unused_constructor(name, complaint)))
(constructor_usage_complaint ~rebind:false priv used));
end;
end;
Builtin_attributes.mark_alerts_used cstr.cstr_attributes;
Builtin_attributes.mark_warn_on_literal_pattern_used
cstr.cstr_attributes;
let cda_shape = Shape.leaf cstr.cstr_uid in
{ env with
constrs =
TycompTbl.add cstr_id
{ cda_description = cstr; cda_address = None; cda_shape } env.constrs;
}
and store_label ~check type_decl type_id lbl_id lbl env =
if check && not type_decl.type_loc.Location.loc_ghost
&& Warnings.is_active (Warnings.Unused_field ("", Unused))
then begin
let ty_name = Ident.name type_id in
let priv = type_decl.type_private in
let name = lbl.lbl_name in
let loc = lbl.lbl_loc in
let mut = lbl.lbl_mut in
let k = lbl.lbl_uid in
if not (Types.Uid.Tbl.mem !used_labels k) then
let used = label_usages () in
Types.Uid.Tbl.add !used_labels k
(add_label_usage used);
if not (ty_name = "" || ty_name.[0] = '_' || name.[0] = '_')
then !add_delayed_check_forward
(fun () ->
Option.iter
(fun complaint ->
if not (is_in_signature env) then
Location.prerr_warning
loc (Warnings.Unused_field(name, complaint)))
(label_usage_complaint priv mut used))
end;
Builtin_attributes.mark_alerts_used lbl.lbl_attributes;
{ env with
labels = TycompTbl.add lbl_id lbl env.labels;
}
and store_type ~check id info shape env =
let loc = info.type_loc in
if check then
check_usage loc id info.type_uid
(fun s -> Warnings.Unused_type_declaration s)
!type_declarations;
let descrs, env =
let path = Pident id in
match info.type_kind with
| Type_variant (_,repr) ->
let constructors = Datarepr.constructors_of_type path info
~current_unit:(get_unit_name ())
in
Type_variant (List.map snd constructors, repr),
List.fold_left
(fun env (cstr_id, cstr) ->
store_constructor ~check info id cstr_id cstr env)
env constructors
| Type_record (_, repr) ->
let labels = Datarepr.labels_of_type path info in
Type_record (List.map snd labels, repr),
List.fold_left
(fun env (lbl_id, lbl) ->
store_label ~check info id lbl_id lbl env)
env labels
| Type_abstract -> Type_abstract, env
| Type_open -> Type_open, env
in
let tda =
{ tda_declaration = info;
tda_descriptions = descrs;
tda_shape = shape }
in
Builtin_attributes.mark_alerts_used info.type_attributes;
{ env with
types = IdTbl.add id tda env.types;
summary = Env_type(env.summary, id, info) }
and store_type_infos ~tda_shape id info env =
let tda =
{
tda_declaration = info;
tda_descriptions = Type_abstract;
tda_shape
}
in
{ env with
types = IdTbl.add id tda env.types;
summary = Env_type(env.summary, id, info) }
and store_extension ~check ~rebind id addr ext shape env =
let loc = ext.ext_loc in
let cstr =
Datarepr.extension_descr ~current_unit:(get_unit_name ()) (Pident id) ext
in
let cda =
{ cda_description = cstr;
cda_address = Some addr;
cda_shape = shape }
in
Builtin_attributes.mark_alerts_used ext.ext_attributes;
Builtin_attributes.mark_alerts_used cstr.cstr_attributes;
Builtin_attributes.mark_warn_on_literal_pattern_used cstr.cstr_attributes;
if check && not loc.Location.loc_ghost &&
Warnings.is_active (Warnings.Unused_extension ("", false, Unused))
then begin
let priv = ext.ext_private in
let is_exception = Path.same ext.ext_type_path Predef.path_exn in
let name = cstr.cstr_name in
let k = cstr.cstr_uid in
if not (Types.Uid.Tbl.mem !used_constructors k) then begin
let used = constructor_usages () in
Types.Uid.Tbl.add !used_constructors k
(add_constructor_usage used);
!add_delayed_check_forward
(fun () ->
Option.iter
(fun complaint ->
if not (is_in_signature env) then
Location.prerr_warning loc
(Warnings.Unused_extension
(name, is_exception, complaint)))
(constructor_usage_complaint ~rebind priv used))
end;
end;
{ env with
constrs = TycompTbl.add id cda env.constrs;
summary = Env_extension(env.summary, id, ext) }
and store_module ?(update_summary=true) ~check
id addr presence md shape env =
let open Subst.Lazy in
let loc = md.mdl_loc in
Option.iter
(fun f -> check_usage loc id md.mdl_uid f !module_declarations) check;
let alerts = Builtin_attributes.alerts_of_attrs md.mdl_attributes in
let comps =
components_of_module ~alerts ~uid:md.mdl_uid
env Subst.identity (Pident id) addr md.mdl_type shape
in
let mda =
{ mda_declaration = md;
mda_components = comps;
mda_address = addr;
mda_shape = shape }
in
let summary =
if not update_summary then env.summary
else Env_module (env.summary, id, presence, force_module_decl md) in
{ env with
modules = IdTbl.add id (Mod_local mda) env.modules;
summary }
and store_modtype ?(update_summary=true) id info shape env =
Builtin_attributes.mark_alerts_used info.Subst.Lazy.mtdl_attributes;
let mtda = { mtda_declaration = info; mtda_shape = shape } in
let summary =
if not update_summary then env.summary
else Env_modtype (env.summary, id, Subst.Lazy.force_modtype_decl info) in
{ env with
modtypes = IdTbl.add id mtda env.modtypes;
summary }
and store_class id addr desc shape env =
Builtin_attributes.mark_alerts_used desc.cty_attributes;
let clda =
{ clda_declaration = desc;
clda_address = addr;
clda_shape = shape; }
in
{ env with
classes = IdTbl.add id clda env.classes;
summary = Env_class(env.summary, id, desc) }
and store_cltype id desc shape env =
Builtin_attributes.mark_alerts_used desc.clty_attributes;
let cltda = { cltda_declaration = desc; cltda_shape = shape } in
{ env with
cltypes = IdTbl.add id cltda env.cltypes;
summary = Env_cltype(env.summary, id, desc) }
let components_of_functor_appl ~loc ~f_path ~f_comp ~arg env =
try
let c = Hashtbl.find f_comp.fcomp_cache arg in
c
with Not_found ->
let p = Papply(f_path, arg) in
let sub =
match f_comp.fcomp_arg with
| Unit
| Named (None, _) -> Subst.identity
| Named (Some param, _) -> Subst.add_module param arg Subst.identity
in
let mty = Subst.modtype (Rescope (Path.scope p)) sub f_comp.fcomp_res in
let addr = Lazy_backtrack.create_failed Not_found in
!check_well_formed_module env loc
("the signature of " ^ Path.name p) mty;
let shape_arg =
shape_of_path ~namespace:Shape.Sig_component_kind.Module env arg
in
let shape = Shape.app f_comp.fcomp_shape ~arg:shape_arg in
let comps =
components_of_module ~alerts:Misc.Stdlib.String.Map.empty
~uid:Uid.internal_not_actually_unique
env Subst.identity p addr (Subst.Lazy.of_modtype mty) shape
in
Hashtbl.add f_comp.fcomp_cache arg comps;
comps
let _ =
components_of_functor_appl' := components_of_functor_appl;
components_of_module_maker' := components_of_module_maker
let add_functor_arg id env =
{env with
functor_args = Ident.add id () env.functor_args;
summary = Env_functor_arg (env.summary, id)}
let add_value ?check ?shape ?(mode = Value_mode.global) id desc env =
let addr = value_declaration_address env id desc in
let shape = shape_or_leaf desc.val_uid shape in
store_value ?check mode id addr desc shape env
let add_type ~check ?shape id info env =
let shape = shape_or_leaf info.type_uid shape in
store_type ~check id info shape env
and add_extension ~check ?shape ~rebind id ext env =
let addr = extension_declaration_address env id ext in
let shape = shape_or_leaf ext.ext_uid shape in
store_extension ~check ~rebind id addr ext shape env
and add_module_declaration ?(arg=false) ?shape ~check id presence md env =
let check =
if not check then
None
else if arg && is_in_signature env then
Some (fun s -> Warnings.Unused_functor_parameter s)
else
Some (fun s -> Warnings.Unused_module s)
in
let md = Subst.Lazy.of_module_decl md in
let addr = module_declaration_address env id presence md in
let shape = shape_or_leaf md.mdl_uid shape in
let env = store_module ~check id addr presence md shape env in
if arg then add_functor_arg id env else env
and add_module_declaration_lazy ~update_summary id presence md env =
let addr = module_declaration_address env id presence md in
let shape = Shape.leaf md.Subst.Lazy.mdl_uid in
let env =
store_module ~update_summary ~check:None id addr presence md shape env
in
env
and add_modtype ?shape id info env =
let shape = shape_or_leaf info.mtd_uid shape in
store_modtype id (Subst.Lazy.of_modtype_decl info) shape env
and add_modtype_lazy ~update_summary id info env =
let shape = Shape.leaf info.Subst.Lazy.mtdl_uid in
store_modtype ~update_summary id info shape env
and add_class ?shape id ty env =
let addr = class_declaration_address env id ty in
let shape = shape_or_leaf ty.cty_uid shape in
store_class id addr ty shape env
and add_cltype ?shape id ty env =
let shape = shape_or_leaf ty.clty_uid shape in
store_cltype id ty shape env
let add_module_lazy ~update_summary id presence mty env =
let md = Subst.Lazy.{mdl_type = mty;
mdl_attributes = [];
mdl_loc = Location.none;
mdl_uid = Uid.internal_not_actually_unique}
in
add_module_declaration_lazy ~update_summary id presence md env
let add_module ?arg ?shape id presence mty env =
add_module_declaration ~check:false ?arg ?shape id presence (md mty) env
let add_local_type path info env =
{ env with
local_constraints = Path.Map.add path info env.local_constraints }
let enter_value ?check name desc env =
let id = Ident.create_local name in
let addr = value_declaration_address env id desc in
let env = store_value ?check Value_mode.global id addr desc (Shape.leaf desc.val_uid) env in
(id, env)
let enter_type ~scope name info env =
let id = Ident.create_scoped ~scope name in
let env = store_type ~check:true id info (Shape.leaf info.type_uid) env in
(id, env)
let enter_extension ~scope ~rebind name ext env =
let id = Ident.create_scoped ~scope name in
let addr = extension_declaration_address env id ext in
let shape = Shape.leaf ext.ext_uid in
let env = store_extension ~check:true ~rebind id addr ext shape env in
(id, env)
let enter_module_declaration ~scope ?arg ?shape s presence md env =
let id = Ident.create_scoped ~scope s in
(id, add_module_declaration ?arg ?shape ~check:true id presence md env)
let enter_modtype ~scope name mtd env =
let id = Ident.create_scoped ~scope name in
let shape = Shape.leaf mtd.mtd_uid in
let env = store_modtype id (Subst.Lazy.of_modtype_decl mtd) shape env in
(id, env)
let enter_class ~scope name desc env =
let id = Ident.create_scoped ~scope name in
let addr = class_declaration_address env id desc in
let env = store_class id addr desc (Shape.leaf desc.cty_uid) env in
(id, env)
let enter_cltype ~scope name desc env =
let id = Ident.create_scoped ~scope name in
let env = store_cltype id desc (Shape.leaf desc.clty_uid) env in
(id, env)
let enter_module ~scope ?arg s presence mty env =
enter_module_declaration ~scope ?arg s presence (md mty) env
let add_lock ?escaping_context mode env =
let lock = Lock { mode; escaping_context } in
{ env with values = IdTbl.add_lock lock env.values }
let add_region_lock env =
{ env with values = IdTbl.add_lock Region_lock env.values }
let add_item (map, mod_shape) comp env =
let proj_shape item =
match mod_shape with
| None -> map, None
| Some mod_shape ->
let shape = Shape.proj mod_shape item in
Shape.Map.add map item shape, Some shape
in
match comp with
| Sig_value(id, decl, _) ->
let map, shape = proj_shape (Shape.Item.value id) in
map, add_value ?shape id decl env
| Sig_type(id, decl, _, _) ->
let map, shape = proj_shape (Shape.Item.type_ id) in
map, add_type ~check:false ?shape id decl env
| Sig_typext(id, ext, _, _) ->
let map, shape = proj_shape (Shape.Item.extension_constructor id) in
map, add_extension ~check:false ?shape ~rebind:false id ext env
| Sig_module(id, presence, md, _, _) ->
let map, shape = proj_shape (Shape.Item.module_ id) in
map, add_module_declaration ~check:false ?shape id presence md env
| Sig_modtype(id, decl, _) ->
let map, shape = proj_shape (Shape.Item.module_type id) in
map, add_modtype ?shape id decl env
| Sig_class(id, decl, _, _) ->
let map, shape = proj_shape (Shape.Item.class_ id) in
map, add_class ?shape id decl env
| Sig_class_type(id, decl, _, _) ->
let map, shape = proj_shape (Shape.Item.class_type id) in
map, add_cltype ?shape id decl env
let rec add_signature (map, mod_shape) sg env =
match sg with
[] -> map, env
| comp :: rem ->
let map, env = add_item (map, mod_shape) comp env in
add_signature (map, mod_shape) rem env
let enter_signature_and_shape ~scope ~parent_shape mod_shape sg env =
let sg = Subst.signature (Rescope scope) Subst.identity sg in
let shape, env = add_signature (parent_shape, mod_shape) sg env in
sg, shape, env
let enter_signature ?mod_shape ~scope sg env =
let sg, _, env =
enter_signature_and_shape ~scope ~parent_shape:Shape.Map.empty
mod_shape sg env
in
sg, env
let enter_signature_and_shape ~scope ~parent_shape mod_shape sg env =
enter_signature_and_shape ~scope ~parent_shape (Some mod_shape) sg env
let add_value = add_value ?shape:None
let add_type = add_type ?shape:None
let add_extension = add_extension ?shape:None
let add_class = add_class ?shape:None
let add_cltype = add_cltype ?shape:None
let add_modtype = add_modtype ?shape:None
let add_signature sg env =
let _, env = add_signature (Shape.Map.empty, None) sg env in
env
let enter_unbound_value name reason env =
let id = Ident.create_local name in
{ env with
values = IdTbl.add id (Val_unbound reason) env.values;
summary = Env_value_unbound(env.summary, name, reason) }
let enter_unbound_module name reason env =
let id = Ident.create_local name in
{ env with
modules = IdTbl.add id (Mod_unbound reason) env.modules;
summary = Env_module_unbound(env.summary, name, reason) }
let add_components slot root env0 comps =
let add_l w comps env0 =
TycompTbl.add_open slot w root comps env0
in
let add w comps env0 = IdTbl.add_open slot w root comps env0 in
let constrs =
add_l (fun x -> `Constructor x) comps.comp_constrs env0.constrs
in
let labels =
add_l (fun x -> `Label x) comps.comp_labels env0.labels
in
let values =
add (fun x -> `Value x) comps.comp_values env0.values
in
let types =
add (fun x -> `Type x) comps.comp_types env0.types
in
let modtypes =
add (fun x -> `Module_type x) comps.comp_modtypes env0.modtypes
in
let classes =
add (fun x -> `Class x) comps.comp_classes env0.classes
in
let cltypes =
add (fun x -> `Class_type x) comps.comp_cltypes env0.cltypes
in
let modules =
add (fun x -> `Module x) comps.comp_modules env0.modules
in
{ env0 with
summary = Env_open(env0.summary, root);
constrs;
labels;
values;
types;
modtypes;
classes;
cltypes;
modules;
}
let open_signature slot root env0 : (_,_) result =
match get_components_res (find_module_components root env0) with
| Error _ -> Error `Not_found
| exception Not_found -> Error `Not_found
| Ok (Functor_comps _) -> Error `Functor
| Ok (Structure_comps comps) ->
Ok (add_components slot root env0 comps)
let remove_last_open root env0 =
let rec filter_summary summary =
match summary with
Env_empty -> raise Exit
| Env_open (s, p) ->
if Path.same p root then s else raise Exit
| Env_value _
| Env_type _
| Env_extension _
| Env_module _
| Env_modtype _
| Env_class _
| Env_cltype _
| Env_functor_arg _
| Env_constraints _
| Env_persistent _
| Env_copy_types _
| Env_value_unbound _
| Env_module_unbound _ ->
map_summary filter_summary summary
in
match filter_summary env0.summary with
| summary ->
let rem_l tbl = TycompTbl.remove_last_open root tbl
and rem tbl = IdTbl.remove_last_open root tbl in
Some { env0 with
summary;
constrs = rem_l env0.constrs;
labels = rem_l env0.labels;
values = rem env0.values;
types = rem env0.types;
modtypes = rem env0.modtypes;
classes = rem env0.classes;
cltypes = rem env0.cltypes;
modules = rem env0.modules; }
| exception Exit ->
None
let open_pers_signature name env =
match open_signature None (Pident(Ident.create_persistent name)) env with
| (Ok _ | Error `Not_found as res) -> res
| Error `Functor -> assert false
let open_signature
?(used_slot = ref false)
?(loc = Location.none) ?(toplevel = false)
ovf root env =
let unused =
match ovf with
| Asttypes.Fresh -> Warnings.Unused_open (Path.name root)
| Asttypes.Override -> Warnings.Unused_open_bang (Path.name root)
in
let warn_unused =
Warnings.is_active unused
and warn_shadow_id =
Warnings.is_active (Warnings.Open_shadow_identifier ("", ""))
and warn_shadow_lc =
Warnings.is_active (Warnings.Open_shadow_label_constructor ("",""))
in
if not toplevel && not loc.Location.loc_ghost
&& (warn_unused || warn_shadow_id || warn_shadow_lc)
then begin
let used = used_slot in
if warn_unused then
!add_delayed_check_forward
(fun () ->
if not !used then begin
used := true;
Location.prerr_warning loc unused
end
);
let shadowed = ref [] in
let slot s b =
begin match check_shadowing env b with
| Some kind when
ovf = Asttypes.Fresh && not (List.mem (kind, s) !shadowed) ->
shadowed := (kind, s) :: !shadowed;
let w =
match kind with
| "label" | "constructor" ->
Warnings.Open_shadow_label_constructor (kind, s)
| _ -> Warnings.Open_shadow_identifier (kind, s)
in
Location.prerr_warning loc w
| _ -> ()
end;
used := true
in
open_signature (Some slot) root env
end
else open_signature None root env
let read_signature modname filename =
let mda = read_pers_mod (Compilation_unit.name modname) filename in
let md = Subst.Lazy.force_module_decl mda.mda_declaration in
match md.md_type with
| Mty_signature sg -> sg
| Mty_ident _ | Mty_functor _ | Mty_alias _ -> assert false
let is_identchar_latin1 = function
| 'A'..'Z' | 'a'..'z' | '_' | '\192'..'\214' | '\216'..'\246'
| '\248'..'\255' | '\'' | '0'..'9' -> true
| _ -> false
let unit_name_of_filename fn =
match Filename.extension fn with
| ".cmi" -> begin
let unit =
String.capitalize_ascii (Filename.remove_extension fn)
in
if String.for_all is_identchar_latin1 unit then
Some unit
else
None
end
| _ -> None
let persistent_structures_of_dir dir =
Load_path.Dir.files dir
|> List.to_seq
|> Seq.filter_map unit_name_of_filename
|> String.Set.of_seq
let save_signature_with_transform cmi_transform ~alerts sg modname filename =
Btype.cleanup_abbrev ();
Subst.reset_for_saving ();
let sg = Subst.signature Make_local (Subst.for_saving Subst.identity) sg in
let cmi =
Persistent_env.make_cmi !persistent_env modname sg alerts
|> cmi_transform in
let pm = save_sign_of_cmi
{ Persistent_env.Persistent_signature.cmi; filename } in
Persistent_env.save_cmi !persistent_env
{ Persistent_env.Persistent_signature.filename; cmi } pm;
cmi
let save_signature ~alerts sg modname filename =
save_signature_with_transform (fun cmi -> cmi)
~alerts sg modname filename
let save_signature_with_imports ~alerts sg modname filename imports =
let with_imports cmi = { cmi with cmi_crcs = imports } in
save_signature_with_transform with_imports
~alerts sg modname filename
let (initial_safe_string, initial_unsafe_string) =
Predef.build_initial_env
(add_type ~check:false)
(add_extension ~check:false ~rebind:false)
empty
let mark_module_used uid =
match Types.Uid.Tbl.find !module_declarations uid with
| mark -> mark ()
| exception Not_found -> ()
let mark_modtype_used _uid = ()
let mark_value_used uid =
match Types.Uid.Tbl.find !value_declarations uid with
| mark -> mark ()
| exception Not_found -> ()
let mark_type_used uid =
match Types.Uid.Tbl.find !type_declarations uid with
| mark -> mark ()
| exception Not_found -> ()
let mark_type_path_used env path =
match find_type path env with
| decl -> mark_type_used decl.type_uid
| exception Not_found -> ()
let mark_constructor_used usage cd =
match Types.Uid.Tbl.find !used_constructors cd.cd_uid with
| mark -> mark usage
| exception Not_found -> ()
let mark_extension_used usage ext =
match Types.Uid.Tbl.find !used_constructors ext.ext_uid with
| mark -> mark usage
| exception Not_found -> ()
let mark_label_used usage ld =
match Types.Uid.Tbl.find !used_labels ld.ld_uid with
| mark -> mark usage
| exception Not_found -> ()
let mark_constructor_description_used usage env cstr =
let ty_path = Btype.cstr_type_path cstr in
mark_type_path_used env ty_path;
match Types.Uid.Tbl.find !used_constructors cstr.cstr_uid with
| mark -> mark usage
| exception Not_found -> ()
let mark_label_description_used usage env lbl =
let ty_path =
match get_desc lbl.lbl_res with
| Tconstr(path, _, _) -> path
| _ -> assert false
in
mark_type_path_used env ty_path;
match Types.Uid.Tbl.find !used_labels lbl.lbl_uid with
| mark -> mark usage
| exception Not_found -> ()
let mark_class_used uid =
match Types.Uid.Tbl.find !type_declarations uid with
| mark -> mark ()
| exception Not_found -> ()
let mark_cltype_used uid =
match Types.Uid.Tbl.find !type_declarations uid with
| mark -> mark ()
| exception Not_found -> ()
let set_value_used_callback vd callback =
Types.Uid.Tbl.add !value_declarations vd.val_uid callback
let set_type_used_callback td callback =
if Uid.for_actual_declaration td.type_uid then
let old =
try Types.Uid.Tbl.find !type_declarations td.type_uid
with Not_found -> ignore
in
Types.Uid.Tbl.replace !type_declarations td.type_uid
(fun () -> callback old)
let may_lookup_error report_errors loc env err =
if report_errors then lookup_error loc env err
else raise Not_found
let report_module_unbound ~errors ~loc env reason =
match reason with
| Mod_unbound_illegal_recursion ->
see # 5965
may_lookup_error errors loc env Illegal_reference_to_recursive_module
let report_value_unbound ~errors ~loc env reason lid =
match reason with
| Val_unbound_instance_variable ->
may_lookup_error errors loc env (Masked_instance_variable lid)
| Val_unbound_self ->
may_lookup_error errors loc env (Masked_self_variable lid)
| Val_unbound_ancestor ->
may_lookup_error errors loc env (Masked_ancestor_variable lid)
| Val_unbound_ghost_recursive rloc ->
let show_hint =
not loc.Location.loc_ghost
&& not rloc.Location.loc_ghost
in
let hint =
if show_hint then Missing_rec rloc else No_hint
in
may_lookup_error errors loc env (Unbound_value(lid, hint))
let use_module ~use ~loc path mda =
if use then begin
let comps = mda.mda_components in
mark_module_used comps.uid;
Misc.Stdlib.String.Map.iter
(fun kind message ->
let message = if message = "" then "" else "\n" ^ message in
Location.alert ~kind loc
(Printf.sprintf "module %s%s" (Path.name path) message)
)
comps.alerts
end
let use_value ~use ~loc path vda =
if use then begin
let desc = vda.vda_description in
mark_value_used desc.val_uid;
Builtin_attributes.check_alerts loc desc.val_attributes
(Path.name path)
end
let use_type ~use ~loc path tda =
if use then begin
let decl = tda.tda_declaration in
mark_type_used decl.type_uid;
Builtin_attributes.check_alerts loc decl.type_attributes
(Path.name path)
end
let use_modtype ~use ~loc path desc =
let open Subst.Lazy in
if use then begin
mark_modtype_used desc.mtdl_uid;
Builtin_attributes.check_alerts loc desc.mtdl_attributes
(Path.name path)
end
let use_class ~use ~loc path clda =
if use then begin
let desc = clda.clda_declaration in
mark_class_used desc.cty_uid;
Builtin_attributes.check_alerts loc desc.cty_attributes
(Path.name path)
end
let use_cltype ~use ~loc path desc =
if use then begin
mark_cltype_used desc.clty_uid;
Builtin_attributes.check_alerts loc desc.clty_attributes
(Path.name path)
end
let use_label ~use ~loc usage env lbl =
if use then begin
mark_label_description_used usage env lbl;
Builtin_attributes.check_alerts loc lbl.lbl_attributes lbl.lbl_name;
if is_mutating_label_usage usage then
Builtin_attributes.check_deprecated_mutable loc lbl.lbl_attributes
lbl.lbl_name
end
let use_constructor_desc ~use ~loc usage env cstr =
if use then begin
mark_constructor_description_used usage env cstr;
Builtin_attributes.check_alerts loc cstr.cstr_attributes cstr.cstr_name
end
let use_constructor ~use ~loc usage env cda =
use_constructor_desc ~use ~loc usage env cda.cda_description
type _ load =
| Load : module_data load
| Don't_load : unit load
let lookup_ident_module (type a) (load : a load) ~errors ~use ~loc s env =
let path, data =
match find_name_module ~mark:use s env.modules with
| res -> res
| exception Not_found ->
may_lookup_error errors loc env (Unbound_module (Lident s))
in
match data with
| Mod_local mda -> begin
use_module ~use ~loc path mda;
match load with
| Load -> path, (mda : a)
| Don't_load -> path, (() : a)
end
| Mod_unbound reason ->
report_module_unbound ~errors ~loc env reason
| Mod_persistent -> begin
let name = s |> Compilation_unit.Name.of_string in
match load with
| Don't_load ->
check_pers_mod ~loc name;
path, (() : a)
| Load -> begin
match find_pers_mod name with
| mda ->
use_module ~use ~loc path mda;
path, (mda : a)
| exception Not_found ->
may_lookup_error errors loc env (Unbound_module (Lident s))
end
end
let lock_mode ~errors ~loc env id vmode locks =
List.fold_left
(fun vmode lock ->
match lock with
| Region_lock -> Value_mode.local_to_regional vmode
| Lock {mode; escaping_context} ->
match Value_mode.submode vmode (Value_mode.of_alloc mode) with
| Ok () -> vmode
| Error _ ->
may_lookup_error errors loc env
(Local_value_used_in_closure (id, escaping_context)))
vmode locks
let lookup_ident_value ~errors ~use ~loc name env =
match IdTbl.find_name_and_modes wrap_value ~mark:use name env.values with
| (path, locks, Val_bound vda) ->
let mode = lock_mode ~errors ~loc env (Lident name) vda.vda_mode locks in
use_value ~use ~loc path vda;
path, vda.vda_description, mode
| (_, _, Val_unbound reason) ->
report_value_unbound ~errors ~loc env reason (Lident name)
| exception Not_found ->
may_lookup_error errors loc env (Unbound_value (Lident name, No_hint))
let lookup_ident_type ~errors ~use ~loc s env =
match IdTbl.find_name wrap_identity ~mark:use s env.types with
| (path, data) as res ->
use_type ~use ~loc path data;
res
| exception Not_found ->
may_lookup_error errors loc env (Unbound_type (Lident s))
let lookup_ident_modtype ~errors ~use ~loc s env =
match IdTbl.find_name wrap_identity ~mark:use s env.modtypes with
| (path, data) ->
use_modtype ~use ~loc path data.mtda_declaration;
(path, data.mtda_declaration)
| exception Not_found ->
may_lookup_error errors loc env (Unbound_modtype (Lident s))
let lookup_ident_class ~errors ~use ~loc s env =
match IdTbl.find_name wrap_identity ~mark:use s env.classes with
| (path, clda) ->
use_class ~use ~loc path clda;
path, clda.clda_declaration
| exception Not_found ->
may_lookup_error errors loc env (Unbound_class (Lident s))
let lookup_ident_cltype ~errors ~use ~loc s env =
match IdTbl.find_name wrap_identity ~mark:use s env.cltypes with
| path, cltda ->
use_cltype ~use ~loc path cltda.cltda_declaration;
path, cltda.cltda_declaration
| exception Not_found ->
may_lookup_error errors loc env (Unbound_cltype (Lident s))
let lookup_all_ident_labels ~errors ~use ~loc usage s env =
match TycompTbl.find_all ~mark:use s env.labels with
| [] -> may_lookup_error errors loc env (Unbound_label (Lident s))
| lbls -> begin
List.map
(fun (lbl, use_fn) ->
let use_fn () =
use_label ~use ~loc usage env lbl;
use_fn ()
in
(lbl, use_fn))
lbls
end
let lookup_all_ident_constructors ~errors ~use ~loc usage s env =
match TycompTbl.find_all ~mark:use s env.constrs with
| [] -> may_lookup_error errors loc env (Unbound_constructor (Lident s))
| cstrs ->
List.map
(fun (cda, use_fn) ->
let use_fn () =
use_constructor ~use ~loc usage env cda;
use_fn ()
in
(cda.cda_description, use_fn))
cstrs
let rec lookup_module_components ~errors ~use ~loc lid env =
match lid with
| Lident s ->
let path, data = lookup_ident_module Load ~errors ~use ~loc s env in
path, data.mda_components
| Ldot(l, s) ->
let path, data = lookup_dot_module ~errors ~use ~loc l s env in
path, data.mda_components
| Lapply _ as lid ->
let f_path, f_comp, arg = lookup_apply ~errors ~use ~loc lid env in
let comps =
!components_of_functor_appl' ~loc ~f_path ~f_comp ~arg env in
Papply (f_path, arg), comps
and lookup_structure_components ~errors ~use ~loc lid env =
let path, comps = lookup_module_components ~errors ~use ~loc lid env in
match get_components_res comps with
| Ok (Structure_comps comps) -> path, comps
| Ok (Functor_comps _) ->
may_lookup_error errors loc env (Functor_used_as_structure lid)
| Error No_components_abstract ->
may_lookup_error errors loc env (Abstract_used_as_structure lid)
| Error (No_components_alias p) ->
may_lookup_error errors loc env (Cannot_scrape_alias(lid, p))
and get_functor_components ~errors ~loc lid env comps =
match get_components_res comps with
| Ok (Functor_comps fcomps) -> begin
match fcomps.fcomp_arg with
may_lookup_error errors loc env (Generative_used_as_applicative lid)
| Named (_, arg) -> fcomps, arg
end
| Ok (Structure_comps _) ->
may_lookup_error errors loc env (Structure_used_as_functor lid)
| Error No_components_abstract ->
may_lookup_error errors loc env (Abstract_used_as_functor lid)
| Error (No_components_alias p) ->
may_lookup_error errors loc env (Cannot_scrape_alias(lid, p))
and lookup_all_args ~errors ~use ~loc lid0 env =
let rec loop_lid_arg args = function
| Lident _ | Ldot _ as f_lid ->
(f_lid, args)
| Lapply (f_lid, arg_lid) ->
let arg_path, arg_md = lookup_module ~errors ~use ~loc arg_lid env in
loop_lid_arg ((f_lid,arg_path,arg_md.md_type)::args) f_lid
in
loop_lid_arg [] lid0
and lookup_apply ~errors ~use ~loc lid0 env =
let f0_lid, args0 = lookup_all_args ~errors ~use ~loc lid0 env in
let args_for_errors = List.map (fun (_,p,mty) -> (p,mty)) args0 in
let f0_path, f0_comp =
lookup_module_components ~errors ~use ~loc f0_lid env
in
let check_one_apply ~errors ~loc ~f_lid ~f_comp ~arg_path ~arg_mty env =
let f_comp, param_mty =
get_functor_components ~errors ~loc f_lid env f_comp
in
check_functor_appl
~errors ~loc ~lid_whole_app:lid0
~f0_path ~args:args_for_errors ~f_comp
~arg_path ~arg_mty ~param_mty
env;
arg_path, f_comp
in
let rec check_apply ~path:f_path ~comp:f_comp = function
| [] -> invalid_arg "Env.lookup_apply: empty argument list"
| [ f_lid, arg_path, arg_mty ] ->
let arg_path, comps =
check_one_apply ~errors ~loc ~f_lid ~f_comp
~arg_path ~arg_mty env
in
f_path, comps, arg_path
| (f_lid, arg_path, arg_mty) :: args ->
let arg_path, f_comp =
check_one_apply ~errors ~loc ~f_lid ~f_comp
~arg_path ~arg_mty env
in
let comp =
!components_of_functor_appl' ~loc ~f_path ~f_comp ~arg:arg_path env
in
let path = Papply (f_path, arg_path) in
check_apply ~path ~comp args
in
check_apply ~path:f0_path ~comp:f0_comp args0
and lookup_module ~errors ~use ~loc lid env =
match lid with
| Lident s ->
let path, data = lookup_ident_module Load ~errors ~use ~loc s env in
let md = Subst.Lazy.force_module_decl data.mda_declaration in
path, md
| Ldot(l, s) ->
let path, data = lookup_dot_module ~errors ~use ~loc l s env in
let md = Subst.Lazy.force_module_decl data.mda_declaration in
path, md
| Lapply _ as lid ->
let path_f, comp_f, path_arg = lookup_apply ~errors ~use ~loc lid env in
let md = md (modtype_of_functor_appl comp_f path_f path_arg) in
Papply(path_f, path_arg), md
and lookup_dot_module ~errors ~use ~loc l s env =
let p, comps = lookup_structure_components ~errors ~use ~loc l env in
match NameMap.find s comps.comp_modules with
| mda ->
let path = Pdot(p, s) in
use_module ~use ~loc path mda;
(path, mda)
| exception Not_found ->
may_lookup_error errors loc env (Unbound_module (Ldot(l, s)))
let lookup_dot_value ~errors ~use ~loc l s env =
let (path, comps) =
lookup_structure_components ~errors ~use ~loc l env
in
match NameMap.find s comps.comp_values with
| vda ->
let path = Pdot(path, s) in
use_value ~use ~loc path vda;
(path, vda.vda_description)
| exception Not_found ->
may_lookup_error errors loc env (Unbound_value (Ldot(l, s), No_hint))
let lookup_dot_type ~errors ~use ~loc l s env =
let (p, comps) = lookup_structure_components ~errors ~use ~loc l env in
match NameMap.find s comps.comp_types with
| tda ->
let path = Pdot(p, s) in
use_type ~use ~loc path tda;
(path, tda)
| exception Not_found ->
may_lookup_error errors loc env (Unbound_type (Ldot(l, s)))
let lookup_dot_modtype ~errors ~use ~loc l s env =
let (p, comps) = lookup_structure_components ~errors ~use ~loc l env in
match NameMap.find s comps.comp_modtypes with
| mta ->
let path = Pdot(p, s) in
use_modtype ~use ~loc path mta.mtda_declaration;
(path, mta.mtda_declaration)
| exception Not_found ->
may_lookup_error errors loc env (Unbound_modtype (Ldot(l, s)))
let lookup_dot_class ~errors ~use ~loc l s env =
let (p, comps) = lookup_structure_components ~errors ~use ~loc l env in
match NameMap.find s comps.comp_classes with
| clda ->
let path = Pdot(p, s) in
use_class ~use ~loc path clda;
(path, clda.clda_declaration)
| exception Not_found ->
may_lookup_error errors loc env (Unbound_class (Ldot(l, s)))
let lookup_dot_cltype ~errors ~use ~loc l s env =
let (p, comps) = lookup_structure_components ~errors ~use ~loc l env in
match NameMap.find s comps.comp_cltypes with
| cltda ->
let path = Pdot(p, s) in
use_cltype ~use ~loc path cltda.cltda_declaration;
(path, cltda.cltda_declaration)
| exception Not_found ->
may_lookup_error errors loc env (Unbound_cltype (Ldot(l, s)))
let lookup_all_dot_labels ~errors ~use ~loc usage l s env =
let (_, comps) = lookup_structure_components ~errors ~use ~loc l env in
match NameMap.find s comps.comp_labels with
| [] | exception Not_found ->
may_lookup_error errors loc env (Unbound_label (Ldot(l, s)))
| lbls ->
List.map
(fun lbl ->
let use_fun () = use_label ~use ~loc usage env lbl in
(lbl, use_fun))
lbls
let lookup_all_dot_constructors ~errors ~use ~loc usage l s env =
match l with
| Longident.Lident "*predef*" ->
lookup_all_ident_constructors
~errors ~use ~loc usage s initial_safe_string
| _ ->
let (_, comps) = lookup_structure_components ~errors ~use ~loc l env in
match NameMap.find s comps.comp_constrs with
| [] | exception Not_found ->
may_lookup_error errors loc env (Unbound_constructor (Ldot(l, s)))
| cstrs ->
List.map
(fun cda ->
let use_fun () = use_constructor ~use ~loc usage env cda in
(cda.cda_description, use_fun))
cstrs
let lookup_module_path ~errors ~use ~loc ~load lid env : Path.t =
match lid with
| Lident s ->
if !Clflags.transparent_modules && not load then
fst (lookup_ident_module Don't_load ~errors ~use ~loc s env)
else
fst (lookup_ident_module Load ~errors ~use ~loc s env)
| Ldot(l, s) -> fst (lookup_dot_module ~errors ~use ~loc l s env)
| Lapply _ as lid ->
let path_f, _comp_f, path_arg = lookup_apply ~errors ~use ~loc lid env in
Papply(path_f, path_arg)
let lookup_value ~errors ~use ~loc lid env =
match lid with
| Lident s -> lookup_ident_value ~errors ~use ~loc s env
| Ldot(l, s) ->
let path, desc = lookup_dot_value ~errors ~use ~loc l s env in
let mode = Value_mode.global in
path, desc, mode
| Lapply _ -> assert false
let lookup_type_full ~errors ~use ~loc lid env =
match lid with
| Lident s -> lookup_ident_type ~errors ~use ~loc s env
| Ldot(l, s) -> lookup_dot_type ~errors ~use ~loc l s env
| Lapply _ -> assert false
let lookup_type ~errors ~use ~loc lid env =
let (path, tda) = lookup_type_full ~errors ~use ~loc lid env in
path, tda.tda_declaration
let lookup_modtype_lazy ~errors ~use ~loc lid env =
match lid with
| Lident s -> lookup_ident_modtype ~errors ~use ~loc s env
| Ldot(l, s) -> lookup_dot_modtype ~errors ~use ~loc l s env
| Lapply _ -> assert false
let lookup_modtype ~errors ~use ~loc lid env =
let (path, mt) = lookup_modtype_lazy ~errors ~use ~loc lid env in
path, Subst.Lazy.force_modtype_decl mt
let lookup_class ~errors ~use ~loc lid env =
match lid with
| Lident s -> lookup_ident_class ~errors ~use ~loc s env
| Ldot(l, s) -> lookup_dot_class ~errors ~use ~loc l s env
| Lapply _ -> assert false
let lookup_cltype ~errors ~use ~loc lid env =
match lid with
| Lident s -> lookup_ident_cltype ~errors ~use ~loc s env
| Ldot(l, s) -> lookup_dot_cltype ~errors ~use ~loc l s env
| Lapply _ -> assert false
let lookup_all_labels ~errors ~use ~loc usage lid env =
match lid with
| Lident s -> lookup_all_ident_labels ~errors ~use ~loc usage s env
| Ldot(l, s) -> lookup_all_dot_labels ~errors ~use ~loc usage l s env
| Lapply _ -> assert false
let lookup_label ~errors ~use ~loc usage lid env =
match lookup_all_labels ~errors ~use ~loc usage lid env with
| [] -> assert false
| (desc, use) :: _ -> use (); desc
let lookup_all_labels_from_type ~use ~loc usage ty_path env =
match find_type_descrs ty_path env with
| exception Not_found -> []
| Type_variant _ | Type_abstract | Type_open -> []
| Type_record (lbls, _) ->
List.map
(fun lbl ->
let use_fun () = use_label ~use ~loc usage env lbl in
(lbl, use_fun))
lbls
let lookup_all_constructors ~errors ~use ~loc usage lid env =
match lid with
| Lident s -> lookup_all_ident_constructors ~errors ~use ~loc usage s env
| Ldot(l, s) -> lookup_all_dot_constructors ~errors ~use ~loc usage l s env
| Lapply _ -> assert false
let lookup_constructor ~errors ~use ~loc usage lid env =
match lookup_all_constructors ~errors ~use ~loc usage lid env with
| [] -> assert false
| (desc, use) :: _ -> use (); desc
let lookup_all_constructors_from_type ~use ~loc usage ty_path env =
match find_type_descrs ty_path env with
| exception Not_found -> []
| Type_record _ | Type_abstract | Type_open -> []
| Type_variant (cstrs, _) ->
List.map
(fun cstr ->
let use_fun () =
use_constructor_desc ~use ~loc usage env cstr
in
(cstr, use_fun))
cstrs
let find_module_by_name lid env =
let loc = Location.(in_file !input_name) in
lookup_module ~errors:false ~use:false ~loc lid env
let find_value_by_name lid env =
let loc = Location.(in_file !input_name) in
let path, desc, _ = lookup_value ~errors:false ~use:false ~loc lid env in
path, desc
let find_type_by_name lid env =
let loc = Location.(in_file !input_name) in
lookup_type ~errors:false ~use:false ~loc lid env
let find_modtype_by_name lid env =
let loc = Location.(in_file !input_name) in
lookup_modtype ~errors:false ~use:false ~loc lid env
let find_class_by_name lid env =
let loc = Location.(in_file !input_name) in
lookup_class ~errors:false ~use:false ~loc lid env
let find_cltype_by_name lid env =
let loc = Location.(in_file !input_name) in
lookup_cltype ~errors:false ~use:false ~loc lid env
let find_constructor_by_name lid env =
let loc = Location.(in_file !input_name) in
lookup_constructor ~errors:false ~use:false ~loc Positive lid env
let find_label_by_name lid env =
let loc = Location.(in_file !input_name) in
lookup_label ~errors:false ~use:false ~loc Projection lid env
let lookup_module_path ?(use=true) ~loc ~load lid env =
lookup_module_path ~errors:true ~use ~loc ~load lid env
let lookup_module ?(use=true) ~loc lid env =
lookup_module ~errors:true ~use ~loc lid env
let lookup_value ?(use=true) ~loc lid env =
check_value_name (Longident.last lid) loc;
lookup_value ~errors:true ~use ~loc lid env
let lookup_type ?(use=true) ~loc lid env =
lookup_type ~errors:true ~use ~loc lid env
let lookup_modtype ?(use=true) ~loc lid env =
lookup_modtype ~errors:true ~use ~loc lid env
let lookup_modtype_path ?(use=true) ~loc lid env =
fst (lookup_modtype_lazy ~errors:true ~use ~loc lid env)
let lookup_class ?(use=true) ~loc lid env =
lookup_class ~errors:true ~use ~loc lid env
let lookup_cltype ?(use=true) ~loc lid env =
lookup_cltype ~errors:true ~use ~loc lid env
let lookup_all_constructors ?(use=true) ~loc usage lid env =
match lookup_all_constructors ~errors:true ~use ~loc usage lid env with
| exception Error(Lookup_error(loc', env', err)) ->
(Error(loc', env', err) : _ result)
| cstrs -> Ok cstrs
let lookup_constructor ?(use=true) ~loc lid env =
lookup_constructor ~errors:true ~use ~loc lid env
let lookup_all_constructors_from_type ?(use=true) ~loc usage ty_path env =
lookup_all_constructors_from_type ~use ~loc usage ty_path env
let lookup_all_labels ?(use=true) ~loc usage lid env =
match lookup_all_labels ~errors:true ~use ~loc usage lid env with
| exception Error(Lookup_error(loc', env', err)) ->
(Error(loc', env', err) : _ result)
| lbls -> Ok lbls
let lookup_label ?(use=true) ~loc lid env =
lookup_label ~errors:true ~use ~loc lid env
let lookup_all_labels_from_type ?(use=true) ~loc usage ty_path env =
lookup_all_labels_from_type ~use ~loc usage ty_path env
let lookup_instance_variable ?(use=true) ~loc name env =
match IdTbl.find_name_and_modes wrap_value ~mark:use name env.values with
| (path, _, Val_bound vda) -> begin
let desc = vda.vda_description in
match desc.val_kind with
| Val_ivar(mut, cl_num) ->
use_value ~use ~loc path vda;
path, mut, cl_num, desc.val_type
| _ ->
lookup_error loc env (Not_an_instance_variable name)
end
| (_, _, Val_unbound Val_unbound_instance_variable) ->
lookup_error loc env (Masked_instance_variable (Lident name))
| (_, _, Val_unbound Val_unbound_self) ->
lookup_error loc env (Not_an_instance_variable name)
| (_, _, Val_unbound Val_unbound_ancestor) ->
lookup_error loc env (Not_an_instance_variable name)
| (_, _, Val_unbound Val_unbound_ghost_recursive _) ->
lookup_error loc env (Unbound_instance_variable name)
| exception Not_found ->
lookup_error loc env (Unbound_instance_variable name)
let bound_module name env =
match IdTbl.find_name wrap_module ~mark:false name env.modules with
| _ -> true
| exception Not_found ->
if Current_unit_name.is name then false
else begin
match find_pers_mod (name |> Compilation_unit.Name.of_string) with
| _ -> true
| exception Not_found -> false
end
let bound wrap proj name env =
match IdTbl.find_name_and_modes wrap ~mark:false name (proj env) with
| _ -> true
| exception Not_found -> false
let bound_value name env =
bound wrap_value (fun env -> env.values) name env
let bound_type name env =
bound wrap_identity (fun env -> env.types) name env
let bound_modtype name env =
bound wrap_identity (fun env -> env.modtypes) name env
let bound_class name env =
bound wrap_identity (fun env -> env.classes) name env
let bound_cltype name env =
bound wrap_identity (fun env -> env.cltypes) name env
let find_all wrap proj1 proj2 f lid env acc =
match lid with
| None ->
IdTbl.fold_name wrap
(fun name (p, data) acc -> f name p data acc)
(proj1 env) acc
| Some l ->
let p, desc =
lookup_module_components
~errors:false ~use:false ~loc:Location.none l env
in
begin match get_components desc with
| Structure_comps c ->
NameMap.fold
(fun s data acc -> f s (Pdot (p, s)) (wrap data) acc)
(proj2 c) acc
| Functor_comps _ ->
acc
end
let find_all_simple_list proj1 proj2 f lid env acc =
match lid with
| None ->
TycompTbl.fold_name
(fun data acc -> f data acc)
(proj1 env) acc
| Some l ->
let (_p, desc) =
lookup_module_components
~errors:false ~use:false ~loc:Location.none l env
in
begin match get_components desc with
| Structure_comps c ->
NameMap.fold
(fun _s comps acc ->
match comps with
| [] -> acc
| data :: _ -> f data acc)
(proj2 c) acc
| Functor_comps _ ->
acc
end
let fold_modules f lid env acc =
match lid with
| None ->
IdTbl.fold_name wrap_module
(fun name (p, entry) acc ->
match entry with
| Mod_unbound _ -> acc
| Mod_local mda ->
let md =
Subst.Lazy.force_module_decl mda.mda_declaration
in
f name p md acc
| Mod_persistent ->
let modname = name |> Compilation_unit.Name.of_string in
match Persistent_env.find_in_cache !persistent_env modname with
| None -> acc
| Some mda ->
let md =
Subst.Lazy.force_module_decl mda.mda_declaration
in
f name p md acc)
env.modules
acc
| Some l ->
let p, desc =
lookup_module_components
~errors:false ~use:false ~loc:Location.none l env
in
begin match get_components desc with
| Structure_comps c ->
NameMap.fold
(fun s mda acc ->
let md =
Subst.Lazy.force_module_decl mda.mda_declaration
in
f s (Pdot (p, s)) md acc)
c.comp_modules
acc
| Functor_comps _ ->
acc
end
let fold_values f =
find_all wrap_value (fun env -> env.values) (fun sc -> sc.comp_values)
(fun k p ve acc ->
match ve with
| Val_unbound _ -> acc
| Val_bound vda -> f k p vda.vda_description acc)
and fold_constructors f =
find_all_simple_list (fun env -> env.constrs) (fun sc -> sc.comp_constrs)
(fun cda acc -> f cda.cda_description acc)
and fold_labels f =
find_all_simple_list (fun env -> env.labels) (fun sc -> sc.comp_labels) f
and fold_types f =
find_all wrap_identity
(fun env -> env.types) (fun sc -> sc.comp_types)
(fun k p tda acc -> f k p tda.tda_declaration acc)
and fold_modtypes f =
let f l path data acc = f l path (Subst.Lazy.force_modtype_decl data) acc in
find_all wrap_identity
(fun env -> env.modtypes) (fun sc -> sc.comp_modtypes)
(fun k p mta acc -> f k p mta.mtda_declaration acc)
and fold_classes f =
find_all wrap_identity (fun env -> env.classes) (fun sc -> sc.comp_classes)
(fun k p clda acc -> f k p clda.clda_declaration acc)
and fold_cltypes f =
find_all wrap_identity
(fun env -> env.cltypes) (fun sc -> sc.comp_cltypes)
(fun k p cltda acc -> f k p cltda.cltda_declaration acc)
let filter_non_loaded_persistent f env =
let to_remove =
IdTbl.fold_name wrap_module
(fun name (_, entry) acc ->
match entry with
| Mod_local _ -> acc
| Mod_unbound _ -> acc
| Mod_persistent ->
let modname = name |> Compilation_unit.Name.of_string in
match Persistent_env.find_in_cache !persistent_env modname with
| Some _ -> acc
| None ->
if f (Ident.create_persistent name) then
acc
else
String.Set.add name acc)
env.modules
String.Set.empty
in
let remove_ids tbl ids =
String.Set.fold
(fun name tbl -> IdTbl.remove (Ident.create_persistent name) tbl)
ids
tbl
in
let rec filter_summary summary ids =
if String.Set.is_empty ids then
summary
else
match summary with
Env_persistent (s, id) when String.Set.mem (Ident.name id) ids ->
filter_summary s (String.Set.remove (Ident.name id) ids)
| Env_empty
| Env_value _
| Env_type _
| Env_extension _
| Env_module _
| Env_modtype _
| Env_class _
| Env_cltype _
| Env_open _
| Env_functor_arg _
| Env_constraints _
| Env_copy_types _
| Env_persistent _
| Env_value_unbound _
| Env_module_unbound _ ->
map_summary (fun s -> filter_summary s ids) summary
in
{ env with
modules = remove_ids env.modules to_remove;
summary = filter_summary env.summary to_remove;
}
let summary env =
if Path.Map.is_empty env.local_constraints then env.summary
else Env_constraints (env.summary, env.local_constraints)
let last_env = s_ref empty
let last_reduced_env = s_ref empty
let keep_only_summary env =
if !last_env == env then !last_reduced_env
else begin
let new_env =
{
empty with
summary = env.summary;
local_constraints = env.local_constraints;
flags = env.flags;
}
in
last_env := env;
last_reduced_env := new_env;
new_env
end
let env_of_only_summary env_from_summary env =
let new_env = env_from_summary env.summary Subst.identity in
{ new_env with
local_constraints = env.local_constraints;
flags = env.flags;
}
open Format
let print_longident =
ref ((fun _ _ -> assert false) : formatter -> Longident.t -> unit)
let print_path =
ref ((fun _ _ -> assert false) : formatter -> Path.t -> unit)
let spellcheck ppf extract env lid =
let choices ~path name = Misc.spellcheck (extract path env) name in
match lid with
| Longident.Lapply _ -> ()
| Longident.Lident s ->
Misc.did_you_mean ppf (fun () -> choices ~path:None s)
| Longident.Ldot (r, s) ->
Misc.did_you_mean ppf (fun () -> choices ~path:(Some r) s)
let spellcheck_name ppf extract env name =
Misc.did_you_mean ppf
(fun () -> Misc.spellcheck (extract env) name)
let extract_values path env =
fold_values (fun name _ _ acc -> name :: acc) path env []
let extract_types path env =
fold_types (fun name _ _ acc -> name :: acc) path env []
let extract_modules path env =
fold_modules (fun name _ _ acc -> name :: acc) path env []
let extract_constructors path env =
fold_constructors (fun desc acc -> desc.cstr_name :: acc) path env []
let extract_labels path env =
fold_labels (fun desc acc -> desc.lbl_name :: acc) path env []
let extract_classes path env =
fold_classes (fun name _ _ acc -> name :: acc) path env []
let extract_modtypes path env =
fold_modtypes (fun name _ _ acc -> name :: acc) path env []
let extract_cltypes path env =
fold_cltypes (fun name _ _ acc -> name :: acc) path env []
let extract_instance_variables env =
fold_values
(fun name _ descr acc ->
match descr.val_kind with
| Val_ivar _ -> name :: acc
| _ -> acc) None env []
let report_lookup_error _loc env ppf = function
| Unbound_value(lid, hint) -> begin
fprintf ppf "Unbound value %a" !print_longident lid;
spellcheck ppf extract_values env lid;
match hint with
| No_hint -> ()
| Missing_rec def_loc ->
let (_, line, _) =
Location.get_pos_info def_loc.Location.loc_start
in
fprintf ppf
"@.@[%s@ %s %i@]"
"Hint: If this is a recursive definition,"
"you should add the 'rec' keyword on line"
line
end
| Unbound_type lid ->
fprintf ppf "Unbound type constructor %a" !print_longident lid;
spellcheck ppf extract_types env lid;
| Unbound_module lid -> begin
fprintf ppf "Unbound module %a" !print_longident lid;
match find_modtype_by_name lid env with
| exception Not_found -> spellcheck ppf extract_modules env lid;
| _ ->
fprintf ppf
"@.@[%s %a, %s@]"
"Hint: There is a module type named"
!print_longident lid
"but module types are not modules"
end
| Unbound_constructor lid ->
fprintf ppf "Unbound constructor %a" !print_longident lid;
spellcheck ppf extract_constructors env lid;
| Unbound_label lid ->
fprintf ppf "Unbound record field %a" !print_longident lid;
spellcheck ppf extract_labels env lid;
| Unbound_class lid -> begin
fprintf ppf "Unbound class %a" !print_longident lid;
match find_cltype_by_name lid env with
| exception Not_found -> spellcheck ppf extract_classes env lid;
| _ ->
fprintf ppf
"@.@[%s %a, %s@]"
"Hint: There is a class type named"
!print_longident lid
"but classes are not class types"
end
| Unbound_modtype lid -> begin
fprintf ppf "Unbound module type %a" !print_longident lid;
match find_module_by_name lid env with
| exception Not_found -> spellcheck ppf extract_modtypes env lid;
| _ ->
fprintf ppf
"@.@[%s %a, %s@]"
"Hint: There is a module named"
!print_longident lid
"but modules are not module types"
end
| Unbound_cltype lid ->
fprintf ppf "Unbound class type %a" !print_longident lid;
spellcheck ppf extract_cltypes env lid;
| Unbound_instance_variable s ->
fprintf ppf "Unbound instance variable %s" s;
spellcheck_name ppf extract_instance_variables env s;
| Not_an_instance_variable s ->
fprintf ppf "The value %s is not an instance variable" s;
spellcheck_name ppf extract_instance_variables env s;
| Masked_instance_variable lid ->
fprintf ppf
"The instance variable %a@ \
cannot be accessed from the definition of another instance variable"
!print_longident lid
| Masked_self_variable lid ->
fprintf ppf
"The self variable %a@ \
cannot be accessed from the definition of an instance variable"
!print_longident lid
| Masked_ancestor_variable lid ->
fprintf ppf
"The ancestor variable %a@ \
cannot be accessed from the definition of an instance variable"
!print_longident lid
| Illegal_reference_to_recursive_module ->
fprintf ppf "Illegal recursive module reference"
| Structure_used_as_functor lid ->
fprintf ppf "@[The module %a is a structure, it cannot be applied@]"
!print_longident lid
| Abstract_used_as_functor lid ->
fprintf ppf "@[The module %a is abstract, it cannot be applied@]"
!print_longident lid
| Functor_used_as_structure lid ->
fprintf ppf "@[The module %a is a functor, \
it cannot have any components@]" !print_longident lid
| Abstract_used_as_structure lid ->
fprintf ppf "@[The module %a is abstract, \
it cannot have any components@]" !print_longident lid
| Generative_used_as_applicative lid ->
fprintf ppf "@[The functor %a is generative,@ it@ cannot@ be@ \
applied@ in@ type@ expressions@]" !print_longident lid
| Cannot_scrape_alias(lid, p) ->
let cause =
if Current_unit_name.is_path p then "is the current compilation unit"
else "is missing"
in
fprintf ppf
"The module %a is an alias for module %a, which %s"
!print_longident lid !print_path p cause
| Local_value_used_in_closure (lid, context) ->
fprintf ppf
"@[The value %a is local, so cannot be used \
inside a closure that might escape@]"
!print_longident lid;
begin match context with
| Some Tailcall_argument ->
fprintf ppf "@.@[Hint: The closure might escape because it \
is an argument to a tail call@]"
| _ -> ()
end
let report_error ppf = function
| Missing_module(_, path1, path2) ->
fprintf ppf "@[@[<hov>";
if Path.same path1 path2 then
fprintf ppf "Internal path@ %s@ is dangling." (Path.name path1)
else
fprintf ppf "Internal path@ %s@ expands to@ %s@ which is dangling."
(Path.name path1) (Path.name path2);
fprintf ppf "@]@ @[%s@ %s@ %s.@]@]"
"The compiled interface for module" (Ident.name (Path.head path2))
"was not found"
| Illegal_value_name(_loc, name) ->
fprintf ppf "'%s' is not a valid value identifier."
name
| Lookup_error(loc, t, err) -> report_lookup_error loc t ppf err
let () =
Location.register_error_of_exn
(function
| Error err ->
let loc =
match err with
| Missing_module (loc, _, _)
| Illegal_value_name (loc, _)
| Lookup_error(loc, _, _) -> loc
in
let error_of_printer =
if loc = Location.none
then Location.error_of_printer_file
else Location.error_of_printer ~loc ?sub:None
in
Some (error_of_printer report_error err)
| _ ->
None
)
|
7008e9c6c45b3adf5357007b07b8b38775d2b42309f719945b60e45c72348b5a | smudgelang/smudge | NoTargetAnyStates.hs | Copyright 2019 and .
-- This software is released under the 3-Clause BSD License.
-- The license can be viewed at
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE TypeFamilies #
module Language.Smudge.Passes.NoTargetAnyStates (
NoTargetAnyStates
) where
import Language.Smudge.Grammar (
StateMachine(StateMachine),
WholeState,
State(State),
)
import Language.Smudge.Parsers.Id (at)
import Language.Smudge.Semantics.Model (TaggedName, disqualifyTag)
import Language.Smudge.Passes.Passes (Passable(..), Severity(..), Fault(..))
import Data.Semigroup (Semigroup)
newtype NoTargetAnyStates = NoTargetAnyStates [TaggedName]
deriving (Semigroup, Monoid)
instance Passable NoTargetAnyStates where
type Representation NoTargetAnyStates = [WholeState TaggedName]
accumulate (_, _, _, hs, _) = mappend $ NoTargetAnyStates [st | (_, _, State st) <- hs, "_" == disqualifyTag st]
test _ (NoTargetAnyStates []) = []
test (StateMachine sm_name, _) (NoTargetAnyStates ss) =
[Fault ERROR (at st) $ disqualifyTag sm_name ++ ": Any-state forbidden in state transition" | st <- ss]
| null | https://raw.githubusercontent.com/smudgelang/smudge/c4f4fc3c7e6344fb0dd39d4679752120c7ff83da/src/Language/Smudge/Passes/NoTargetAnyStates.hs | haskell | This software is released under the 3-Clause BSD License.
The license can be viewed at | Copyright 2019 and .
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE TypeFamilies #
module Language.Smudge.Passes.NoTargetAnyStates (
NoTargetAnyStates
) where
import Language.Smudge.Grammar (
StateMachine(StateMachine),
WholeState,
State(State),
)
import Language.Smudge.Parsers.Id (at)
import Language.Smudge.Semantics.Model (TaggedName, disqualifyTag)
import Language.Smudge.Passes.Passes (Passable(..), Severity(..), Fault(..))
import Data.Semigroup (Semigroup)
newtype NoTargetAnyStates = NoTargetAnyStates [TaggedName]
deriving (Semigroup, Monoid)
instance Passable NoTargetAnyStates where
type Representation NoTargetAnyStates = [WholeState TaggedName]
accumulate (_, _, _, hs, _) = mappend $ NoTargetAnyStates [st | (_, _, State st) <- hs, "_" == disqualifyTag st]
test _ (NoTargetAnyStates []) = []
test (StateMachine sm_name, _) (NoTargetAnyStates ss) =
[Fault ERROR (at st) $ disqualifyTag sm_name ++ ": Any-state forbidden in state transition" | st <- ss]
|
1fd305c92de3a53e5b6c43d00370b4054549c27546dc96f72fafbd9bb49d8f46 | 2600hz/kazoo | kazoo_config_app.erl | %%%-----------------------------------------------------------------------------
( C ) 2010 - 2020 , 2600Hz
%%% @doc
@author
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%%
%%% @end
%%%-----------------------------------------------------------------------------
-module(kazoo_config_app).
-behaviour(application).
-include_lib("kazoo_stdlib/include/kz_types.hrl").
-export([start/2, stop/1]).
%%==============================================================================
%% Application callbacks
%%==============================================================================
%%------------------------------------------------------------------------------
%% @doc Implement the application start behaviour.
%% @end
%%------------------------------------------------------------------------------
-spec start(application:start_type(), any()) -> kz_types:startapp_ret().
start(_StartType, _StartArgs) ->
kazoo_config_sup:start_link().
%%------------------------------------------------------------------------------
%% @doc Implement the application stop behaviour.
%% @end
%%------------------------------------------------------------------------------
-spec stop(any()) -> any().
stop(_State) ->
'ok'.
| null | https://raw.githubusercontent.com/2600hz/kazoo/24519b9af9792caa67f7c09bbb9d27e2418f7ad6/core/kazoo_config/src/kazoo_config_app.erl | erlang | -----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
==============================================================================
Application callbacks
==============================================================================
------------------------------------------------------------------------------
@doc Implement the application start behaviour.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Implement the application stop behaviour.
@end
------------------------------------------------------------------------------ | ( C ) 2010 - 2020 , 2600Hz
@author
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
-module(kazoo_config_app).
-behaviour(application).
-include_lib("kazoo_stdlib/include/kz_types.hrl").
-export([start/2, stop/1]).
-spec start(application:start_type(), any()) -> kz_types:startapp_ret().
start(_StartType, _StartArgs) ->
kazoo_config_sup:start_link().
-spec stop(any()) -> any().
stop(_State) ->
'ok'.
|
efebf7b52f2ab08c0a8afad4a43c11d9cbff0674d5ad874f93bd2581ce27ba22 | nuprl/gradual-typing-performance | main.rkt | #lang typed/racket
stress testing run - t on 100 look ups , plus 5 [ dis|en]ables
;; ===================================================================================================
(require require-typed-check)
(require (only-in racket/string string-join))
(require/typed "run-t.rkt"
[EOM String]
[run-t (-> String String)])
(: dat->station-names (-> Path-String (Listof String)))
(define (dat->station-names fname)
(for/list ([line (in-list (file->lines fname))]
#:when (and (< 0 (string-length line))
(not (eq? #\- (string-ref line 0)))))
(string-trim line)))
(define BLUE-STATIONS (dat->station-names "../base/blue.dat"))
(define GREEN-STATIONS (dat->station-names "../base/green.dat"))
(: path (-> String String String))
(define (path from to)
(format "from ~a to ~a" from to))
(: enable (-> String String))
(define (enable s)
(format "enable ~a" s))
(: disable (-> String String))
(define (disable s)
(format "disable ~a" s))
(: assert (-> String Natural Void))
(define (assert result expected-length)
(define num-result (length (string-split result "\n")))
(unless (= num-result expected-length)
(error (format "Expected ~a results, got ~a\nFull list:~a"
expected-length
num-result
result))))
(: main (-> Void))
;; run the stress test n times
(define (main)
(: run-query (-> String String))
(define (run-query str)
(define r (run-t str))
(if r
r
(error 'main (format "run-t failed to respond to query ~e\n" str))))
(assert (run-query (path "Airport" "Northeastern")) 14)
(assert (run-query (disable "Government")) 1)
(assert (run-query (path "Airport" "Northeastern")) 16)
(assert (run-query (enable "Government")) 1)
(assert (run-query (path "Airport" "Harvard Square")) 12)
(assert (run-query (disable "Park Street")) 1)
(assert (run-query (path "Northeastern" "Harvard Square")) 1) ;;impossible path
(assert (run-query (enable "Park Street")) 1)
(assert (run-query (path "Northeastern" "Harvard Square")) 12)
;; --
(for* ([s1 (in-list GREEN-STATIONS)] [s2 (in-list BLUE-STATIONS)])
(run-query (path s1 s2))))
;(require/typed contract-profile [contract-profile-thunk (-> (-> Void) Void)])
;(contract-profile-thunk main)
(time (main))
| null | https://raw.githubusercontent.com/nuprl/gradual-typing-performance/35442b3221299a9cadba6810573007736b0d65d4/paper/jfp-2016/src/worst-configurations-6.4/mbta/1110/main.rkt | racket | ===================================================================================================
run the stress test n times
impossible path
--
(require/typed contract-profile [contract-profile-thunk (-> (-> Void) Void)])
(contract-profile-thunk main) | #lang typed/racket
stress testing run - t on 100 look ups , plus 5 [ dis|en]ables
(require require-typed-check)
(require (only-in racket/string string-join))
(require/typed "run-t.rkt"
[EOM String]
[run-t (-> String String)])
(: dat->station-names (-> Path-String (Listof String)))
(define (dat->station-names fname)
(for/list ([line (in-list (file->lines fname))]
#:when (and (< 0 (string-length line))
(not (eq? #\- (string-ref line 0)))))
(string-trim line)))
(define BLUE-STATIONS (dat->station-names "../base/blue.dat"))
(define GREEN-STATIONS (dat->station-names "../base/green.dat"))
(: path (-> String String String))
(define (path from to)
(format "from ~a to ~a" from to))
(: enable (-> String String))
(define (enable s)
(format "enable ~a" s))
(: disable (-> String String))
(define (disable s)
(format "disable ~a" s))
(: assert (-> String Natural Void))
(define (assert result expected-length)
(define num-result (length (string-split result "\n")))
(unless (= num-result expected-length)
(error (format "Expected ~a results, got ~a\nFull list:~a"
expected-length
num-result
result))))
(: main (-> Void))
(define (main)
(: run-query (-> String String))
(define (run-query str)
(define r (run-t str))
(if r
r
(error 'main (format "run-t failed to respond to query ~e\n" str))))
(assert (run-query (path "Airport" "Northeastern")) 14)
(assert (run-query (disable "Government")) 1)
(assert (run-query (path "Airport" "Northeastern")) 16)
(assert (run-query (enable "Government")) 1)
(assert (run-query (path "Airport" "Harvard Square")) 12)
(assert (run-query (disable "Park Street")) 1)
(assert (run-query (enable "Park Street")) 1)
(assert (run-query (path "Northeastern" "Harvard Square")) 12)
(for* ([s1 (in-list GREEN-STATIONS)] [s2 (in-list BLUE-STATIONS)])
(run-query (path s1 s2))))
(time (main))
|
697355682a0afdf6c79fcf8f3d9f1b66e455356de7f452ce4736e04f085807dd | RichiH/git-annex | GlobalSetter.hs | git - annex global options
-
- Copyright 2015 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2015 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module CmdLine.GlobalSetter where
import Types.DeferredParse
import Common
import Annex
import Options.Applicative
globalFlag :: Annex () -> Mod FlagFields GlobalSetter -> GlobalOption
globalFlag setter = flag' (DeferredParse setter)
globalSetter :: (v -> Annex ()) -> Parser v -> GlobalOption
globalSetter setter parser = DeferredParse . setter <$> parser
combineGlobalOptions :: [GlobalOption] -> Parser GlobalSetter
combineGlobalOptions l = DeferredParse . mapM_ getParsed
<$> many (foldl1 (<|>) l)
| null | https://raw.githubusercontent.com/RichiH/git-annex/bbcad2b0af8cd9264d0cb86e6ca126ae626171f3/CmdLine/GlobalSetter.hs | haskell | git - annex global options
-
- Copyright 2015 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2015 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module CmdLine.GlobalSetter where
import Types.DeferredParse
import Common
import Annex
import Options.Applicative
globalFlag :: Annex () -> Mod FlagFields GlobalSetter -> GlobalOption
globalFlag setter = flag' (DeferredParse setter)
globalSetter :: (v -> Annex ()) -> Parser v -> GlobalOption
globalSetter setter parser = DeferredParse . setter <$> parser
combineGlobalOptions :: [GlobalOption] -> Parser GlobalSetter
combineGlobalOptions l = DeferredParse . mapM_ getParsed
<$> many (foldl1 (<|>) l)
| |
347f5ca8eec59f8ed39c31371f8ba46a7dabac0cd875bb58326321a5f8230c68 | timothypratley/leaderboardx | db_firebase.cljs | (ns algopop.leaderboardx.app.db-firebase
(:require
[cljs.pprint :as pprint]
[algopop.leaderboardx.app.firebase :as firebase]
[reagent.core :as reagent]
[algopop.leaderboardx.app.firebase-serialization :as s]))
(defn unlisten
"Stops listening to a query tree."
[a t]
(.off (:ref @t))
(when-let [children (:children @t)]
(doseq [[k child] children]
(swap! a dissoc k)
(unlisten a child))))
(defn listen
"The input atom a will be modified to contain entities found by applying queries.
Successive queries are applied to the results of the previous query,
creating a tree of firebase references.
Queries are functions that return a reference .
All entities are unique and live in firebase under the entities path.
The reference tree is returned.
To stop listening to updates, call unlisten on the reference tree."
[a parent-k parent-v q & qs]
(let [r (q (firebase/user-entities) parent-k parent-v)
query-node (atom {:ref r
:children {}})]
(doto r
(.on "child_added"
(fn child-added [snapshot]
(let [k (s/firebase->clj (.-key snapshot))
v (s/firebase->clj (.val snapshot))]
(when (seq qs)
(swap! query-node assoc-in [:children k]
(apply listen a k v qs)))
(swap! a assoc k v))))
(.on "child_changed"
(fn child-changed [snapshot]
(swap! a update (s/firebase->clj (.-key snapshot))
merge (s/firebase->clj (.val snapshot)))))
(.on "child_removed"
(fn child-removed [snapshot]
(let [k (s/firebase->clj (.-key snapshot))
children (:children @query-node)
child (get children k)]
(swap! a dissoc k)
(when child
(unlisten a child))))))
query-node))
(defn watch-entities [parent-k a]
(reagent/with-let
[reference-tree
(listen
a
parent-k
nil
(fn get-edges-to-the-root [r k v]
(-> r
(.orderByChild "to")
(.equalTo k)))
(fn get-all-edges-from-nodes-connected-to-the-root [r k v]
(-> r
(.orderByChild "from")
(.equalTo (get v "from"))))
;; TODO: not quite right (pollutes top level because child returned without key)
;; Need it for information about the nodes
#_(fn get-the-from-nodes [r k v]
(-> r
(.orderByKey
(.equalTo (get v "from"))))))]
(finally
(unlisten a reference-tree))))
(defn watch-graph2 []
(firebase/db-ref []))
(defn membership [obj graph-name from to edge-name]
(doto obj
(aset (s/clj->firebase (str edge-name "-member-of-" graph-name))
#js {:from edge-name
:to graph-name
:edge-type "member-of"})
(aset (s/clj->firebase (str from "-member-of-" graph-name))
#js {:from from
:to graph-name
:edge-type "member-of"})
(aset (s/clj->firebase (str to "-member-of-" graph-name))
#js {:from to
:to graph-name
:edge-type "member-of"})))
;; TODO: created vs modified
(defn with-edge [obj graph-name from to node-type edge-type]
(let [edge-name (str from "-" edge-type "-" to)]
(doto obj
(aset (s/clj->firebase from)
#js {:created firebase/timestamp
:node-type node-type})
(aset (s/clj->firebase to)
#js {:created firebase/timestamp
:node-type node-type})
(aset (s/clj->firebase edge-name)
#js {:from from
:to to
:edge-type edge-type})
(membership graph-name from to edge-name))))
(defn build-update [obj graph-name entity-name node-type edge-type [out & more-outs :as outs] [in & more-ins :as ins]]
(cond
in (recur
(with-edge obj graph-name in entity-name node-type edge-type)
graph-name
entity-name
node-type
edge-type
outs
more-ins)
out (recur
(with-edge obj graph-name entity-name out node-type edge-type)
graph-name
entity-name
node-type
edge-type
more-outs
ins)
:else obj))
(defn replace-edges [graph-name entity-name node-type edge-type outs ins]
;; TODO: delete old edges!
(when (seq entity-name)
(let [entity-name (s/clj->firebase entity-name)]
(firebase/ref-update
[(firebase/user-entities)]
(build-update
(clj->js {(s/clj->firebase entity-name) {:created firebase/timestamp
:node-type node-type}})
graph-name
entity-name
node-type
edge-type
outs
ins)))))
| null | https://raw.githubusercontent.com/timothypratley/leaderboardx/ad1719b3bb49fb7ab495ed833f1a451ebb3aec4d/src/algopop/leaderboardx/app/db_firebase.cljs | clojure | TODO: not quite right (pollutes top level because child returned without key)
Need it for information about the nodes
TODO: created vs modified
TODO: delete old edges! | (ns algopop.leaderboardx.app.db-firebase
(:require
[cljs.pprint :as pprint]
[algopop.leaderboardx.app.firebase :as firebase]
[reagent.core :as reagent]
[algopop.leaderboardx.app.firebase-serialization :as s]))
(defn unlisten
"Stops listening to a query tree."
[a t]
(.off (:ref @t))
(when-let [children (:children @t)]
(doseq [[k child] children]
(swap! a dissoc k)
(unlisten a child))))
(defn listen
"The input atom a will be modified to contain entities found by applying queries.
Successive queries are applied to the results of the previous query,
creating a tree of firebase references.
Queries are functions that return a reference .
All entities are unique and live in firebase under the entities path.
The reference tree is returned.
To stop listening to updates, call unlisten on the reference tree."
[a parent-k parent-v q & qs]
(let [r (q (firebase/user-entities) parent-k parent-v)
query-node (atom {:ref r
:children {}})]
(doto r
(.on "child_added"
(fn child-added [snapshot]
(let [k (s/firebase->clj (.-key snapshot))
v (s/firebase->clj (.val snapshot))]
(when (seq qs)
(swap! query-node assoc-in [:children k]
(apply listen a k v qs)))
(swap! a assoc k v))))
(.on "child_changed"
(fn child-changed [snapshot]
(swap! a update (s/firebase->clj (.-key snapshot))
merge (s/firebase->clj (.val snapshot)))))
(.on "child_removed"
(fn child-removed [snapshot]
(let [k (s/firebase->clj (.-key snapshot))
children (:children @query-node)
child (get children k)]
(swap! a dissoc k)
(when child
(unlisten a child))))))
query-node))
(defn watch-entities [parent-k a]
(reagent/with-let
[reference-tree
(listen
a
parent-k
nil
(fn get-edges-to-the-root [r k v]
(-> r
(.orderByChild "to")
(.equalTo k)))
(fn get-all-edges-from-nodes-connected-to-the-root [r k v]
(-> r
(.orderByChild "from")
(.equalTo (get v "from"))))
#_(fn get-the-from-nodes [r k v]
(-> r
(.orderByKey
(.equalTo (get v "from"))))))]
(finally
(unlisten a reference-tree))))
(defn watch-graph2 []
(firebase/db-ref []))
(defn membership [obj graph-name from to edge-name]
(doto obj
(aset (s/clj->firebase (str edge-name "-member-of-" graph-name))
#js {:from edge-name
:to graph-name
:edge-type "member-of"})
(aset (s/clj->firebase (str from "-member-of-" graph-name))
#js {:from from
:to graph-name
:edge-type "member-of"})
(aset (s/clj->firebase (str to "-member-of-" graph-name))
#js {:from to
:to graph-name
:edge-type "member-of"})))
(defn with-edge [obj graph-name from to node-type edge-type]
(let [edge-name (str from "-" edge-type "-" to)]
(doto obj
(aset (s/clj->firebase from)
#js {:created firebase/timestamp
:node-type node-type})
(aset (s/clj->firebase to)
#js {:created firebase/timestamp
:node-type node-type})
(aset (s/clj->firebase edge-name)
#js {:from from
:to to
:edge-type edge-type})
(membership graph-name from to edge-name))))
(defn build-update [obj graph-name entity-name node-type edge-type [out & more-outs :as outs] [in & more-ins :as ins]]
(cond
in (recur
(with-edge obj graph-name in entity-name node-type edge-type)
graph-name
entity-name
node-type
edge-type
outs
more-ins)
out (recur
(with-edge obj graph-name entity-name out node-type edge-type)
graph-name
entity-name
node-type
edge-type
more-outs
ins)
:else obj))
(defn replace-edges [graph-name entity-name node-type edge-type outs ins]
(when (seq entity-name)
(let [entity-name (s/clj->firebase entity-name)]
(firebase/ref-update
[(firebase/user-entities)]
(build-update
(clj->js {(s/clj->firebase entity-name) {:created firebase/timestamp
:node-type node-type}})
graph-name
entity-name
node-type
edge-type
outs
ins)))))
|
03e5c6038ce12f5e3ee091d3a72013bf9b79be5aa899add3f54b78568d78c622 | mtravers/goddinpotty | roam_images.clj | (ns goddinpotty.import.roam-images
(:require [org.parkerici.multitool.core :as u]
[org.parkerici.multitool.cljcore :as ju]
[me.raynes.fs :as fs]
[goddinpotty.batadase :as bd]
[goddinpotty.rendering :as render]
[goddinpotty.utils :as utils]
[clojure.string :as s]
[clojure.tools.logging :as log]
))
;;; Was just images, now will do arbitrary assets like pdfs regardless of where they appear in
;;; markdown.
Regex that matches assets , returns filetype as second elt of match
(def roam-asset-regex #"https\:\/\/firebasestorage\.googleapis\.com/.*\.(\w+)\?.*")
(defn- roam-asset-url?
"Returns the extension if this is in fact a roam asset, nil otherwise"
[url]
(second (re-matches roam-asset-regex url)))
(defn- image-block?
[b]
(= :image (first (second (:parsed b)))))
Now in ju/
(defn local-file
([url]
(local-file url (ju/temp-file)))
([url local-file]
(let [url (java.net.URL. url)
local-file (if (instance? java.io.File local-file)
local-file
(java.io.File. local-file)
)]
(clojure.java.io/copy (.openStream url) local-file)
(str local-file))))
(defn block-links
[block]
(u/walk-collect #(and (string? %)
(s/starts-with? % "http")
%)
(:parsed block)))
(defn- maybe-download-url
[bm directory block url collect]
(when-let [ext (roam-asset-url? url)]
(let [base-filename (str (utils/clean-page-title (:content (bd/block-page bm block))) "-" (:id block) "." ext)
local-relative (str "assets/" base-filename )
local-full (str directory "/" local-relative)
new-url (str "../assets/" base-filename)
]
(if (fs/exists? local-full)
(log/info :already-downloaded base-filename url)
(do
(log/info :download base-filename url)
(local-file url local-full)))
(collect {url new-url}))))
(defn download-images
"Returns a map of original URLs to local files (relative path)"
[bm directory]
(u/collecting-merge
(fn [collect]
(doseq [block (vals bm)]
(when (image-block? block)
(let [[image-source _] (render/parse-image-block block)]
(maybe-download-url bm directory block image-source collect)))
(doseq [link (block-links block)] ;TODO if this was block-links-unparsed we could skip parse entirely
(maybe-download-url bm directory block link collect))))))
(defn- subst-image-source
[str substs]
(let [[image-source alt-text] (render/parse-image-string str)
replacement (get substs image-source)]
(if replacement
(s/replace str image-source replacement)
str)))
This totally does n't work unless its done before : hack
(defn subst-images
"Toplevel call"
[bm substs]
(u/map-values
(fn [b]
;; Now does every content. This might be slow as shit.
(if true ; (roam-image-block? b)
Note : there are two representations in a block : content and : parsed , but for now tweaking : content will suffice
(assoc b
:content
(subst-image-source (:content b) substs))
(dissoc b :dchildren))) ;avoid confusion
bm))
;;; More general and stupider version
(defn block-links-unparsed
[s]
(map first (re-seq roam-asset-regex s)))
(defn subst-string
[substs s]
(reduce (fn [ss link]
(if-let [replacement (get substs link)]
(s/replace ss link replacement)
(do (log/warn "No subst for" link s) ;shouldn't happen
ss)))
s
(block-links-unparsed s)))
(defn subst-images
"Toplevel call"
[bm substs]
(u/map-values
(fn [b]
;; Now does every content. This might be slow as shit.
(update b
:content
(partial subst-string substs)))
bm))
| null | https://raw.githubusercontent.com/mtravers/goddinpotty/b0b7f4fe5781a56b226d151d11a25130224bf093/src/goddinpotty/import/roam_images.clj | clojure | Was just images, now will do arbitrary assets like pdfs regardless of where they appear in
markdown.
TODO if this was block-links-unparsed we could skip parse entirely
Now does every content. This might be slow as shit.
(roam-image-block? b)
avoid confusion
More general and stupider version
shouldn't happen
Now does every content. This might be slow as shit. | (ns goddinpotty.import.roam-images
(:require [org.parkerici.multitool.core :as u]
[org.parkerici.multitool.cljcore :as ju]
[me.raynes.fs :as fs]
[goddinpotty.batadase :as bd]
[goddinpotty.rendering :as render]
[goddinpotty.utils :as utils]
[clojure.string :as s]
[clojure.tools.logging :as log]
))
Regex that matches assets , returns filetype as second elt of match
(def roam-asset-regex #"https\:\/\/firebasestorage\.googleapis\.com/.*\.(\w+)\?.*")
(defn- roam-asset-url?
"Returns the extension if this is in fact a roam asset, nil otherwise"
[url]
(second (re-matches roam-asset-regex url)))
(defn- image-block?
[b]
(= :image (first (second (:parsed b)))))
Now in ju/
(defn local-file
([url]
(local-file url (ju/temp-file)))
([url local-file]
(let [url (java.net.URL. url)
local-file (if (instance? java.io.File local-file)
local-file
(java.io.File. local-file)
)]
(clojure.java.io/copy (.openStream url) local-file)
(str local-file))))
(defn block-links
[block]
(u/walk-collect #(and (string? %)
(s/starts-with? % "http")
%)
(:parsed block)))
(defn- maybe-download-url
[bm directory block url collect]
(when-let [ext (roam-asset-url? url)]
(let [base-filename (str (utils/clean-page-title (:content (bd/block-page bm block))) "-" (:id block) "." ext)
local-relative (str "assets/" base-filename )
local-full (str directory "/" local-relative)
new-url (str "../assets/" base-filename)
]
(if (fs/exists? local-full)
(log/info :already-downloaded base-filename url)
(do
(log/info :download base-filename url)
(local-file url local-full)))
(collect {url new-url}))))
(defn download-images
"Returns a map of original URLs to local files (relative path)"
[bm directory]
(u/collecting-merge
(fn [collect]
(doseq [block (vals bm)]
(when (image-block? block)
(let [[image-source _] (render/parse-image-block block)]
(maybe-download-url bm directory block image-source collect)))
(maybe-download-url bm directory block link collect))))))
(defn- subst-image-source
[str substs]
(let [[image-source alt-text] (render/parse-image-string str)
replacement (get substs image-source)]
(if replacement
(s/replace str image-source replacement)
str)))
This totally does n't work unless its done before : hack
(defn subst-images
"Toplevel call"
[bm substs]
(u/map-values
(fn [b]
Note : there are two representations in a block : content and : parsed , but for now tweaking : content will suffice
(assoc b
:content
(subst-image-source (:content b) substs))
bm))
(defn block-links-unparsed
[s]
(map first (re-seq roam-asset-regex s)))
(defn subst-string
[substs s]
(reduce (fn [ss link]
(if-let [replacement (get substs link)]
(s/replace ss link replacement)
ss)))
s
(block-links-unparsed s)))
(defn subst-images
"Toplevel call"
[bm substs]
(u/map-values
(fn [b]
(update b
:content
(partial subst-string substs)))
bm))
|
df6b49a37f47f250c54d2b9ecfcd9e23cfabb6938ed85852aa0d4a0c6802d867 | input-output-hk/project-icarus-importer | Util.hs | {-# LANGUAGE RankNTypes #-}
# LANGUAGE TupleSections #
module Util where
import Universum
import Cardano.Wallet.Client.Http
import Control.Lens hiding ((^..), (^?))
import System.IO.Unsafe (unsafePerformIO)
import Test.Hspec
import Test.QuickCheck (arbitrary, generate)
type WalletRef = MVar Wallet
randomWallet :: WalletOperation -> IO NewWallet
randomWallet walletOp =
generate $
NewWallet
<$> arbitrary
<*> pure Nothing
<*> arbitrary
<*> pure "Wallet"
<*> pure walletOp
randomCreateWallet :: IO NewWallet
randomCreateWallet = randomWallet CreateWallet
randomRestoreWallet :: IO NewWallet
randomRestoreWallet = randomWallet RestoreWallet
createWalletCheck :: WalletClient IO -> NewWallet -> IO Wallet
createWalletCheck wc newWallet = do
result <- fmap wrData <$> postWallet wc newWallet
result `shouldPrism` _Right
firstAccountAndId :: WalletClient IO -> Wallet -> IO (Account, WalletAddress)
firstAccountAndId wc wallet = do
etoAccts <- getAccounts wc (walId wallet)
toAccts <- fmap wrData etoAccts `shouldPrism` _Right
toAccts `shouldSatisfy` (not . null)
let (toAcct : _) = toAccts
accAddresses toAcct `shouldSatisfy` (not . null)
let (toAddr : _) = accAddresses toAcct
pure (toAcct, toAddr)
newWalletRef :: IO WalletRef
newWalletRef = newEmptyMVar
sampleWallet :: WalletRef -> WalletClient IO -> IO Wallet
sampleWallet wRef wc = do
mwallet <- tryTakeMVar wRef
case mwallet of
Just wallet -> do
putMVar wRef wallet
pure wallet
Nothing -> do
w <- randomWallet CreateWallet
w' <- createWalletCheck wc w
didWrite <- tryPutMVar wRef w'
if didWrite
then pure w'
else readMVar wRef
genesisWallet :: WalletClient IO -> IO Wallet
genesisWallet wc = do
mwallet <- tryTakeMVar genesisRef
case mwallet of
Just wallet -> do
putMVar genesisRef wallet
pure wallet
Nothing -> do
Right allWallets <- fmap wrData <$> getWallets wc
wallet <- maybe
(fail "Genesis wallet is missing; did you import it prior to executing the test-suite?")
return
(find (("Genesis wallet" ==) . walName) allWallets)
didWrite <- tryPutMVar genesisRef wallet
if didWrite
then pure wallet
else readMVar genesisRef
genesisRef :: WalletRef
genesisRef = unsafePerformIO newEmptyMVar
# NOINLINE genesisRef #
shouldPrism :: Show s => s -> Prism' s a -> IO a
shouldPrism a b = do
a `shouldSatisfy` has b
let Just x = a ^? b
pure x
infixr 8 `shouldPrism`
shouldPrism_ :: Show s => s -> Prism' s a -> IO ()
shouldPrism_ a b =
a `shouldSatisfy` has b
infixr 8 `shouldPrism_`
| null | https://raw.githubusercontent.com/input-output-hk/project-icarus-importer/36342f277bcb7f1902e677a02d1ce93e4cf224f0/wallet-new/integration/Util.hs | haskell | # LANGUAGE RankNTypes # | # LANGUAGE TupleSections #
module Util where
import Universum
import Cardano.Wallet.Client.Http
import Control.Lens hiding ((^..), (^?))
import System.IO.Unsafe (unsafePerformIO)
import Test.Hspec
import Test.QuickCheck (arbitrary, generate)
type WalletRef = MVar Wallet
randomWallet :: WalletOperation -> IO NewWallet
randomWallet walletOp =
generate $
NewWallet
<$> arbitrary
<*> pure Nothing
<*> arbitrary
<*> pure "Wallet"
<*> pure walletOp
randomCreateWallet :: IO NewWallet
randomCreateWallet = randomWallet CreateWallet
randomRestoreWallet :: IO NewWallet
randomRestoreWallet = randomWallet RestoreWallet
createWalletCheck :: WalletClient IO -> NewWallet -> IO Wallet
createWalletCheck wc newWallet = do
result <- fmap wrData <$> postWallet wc newWallet
result `shouldPrism` _Right
firstAccountAndId :: WalletClient IO -> Wallet -> IO (Account, WalletAddress)
firstAccountAndId wc wallet = do
etoAccts <- getAccounts wc (walId wallet)
toAccts <- fmap wrData etoAccts `shouldPrism` _Right
toAccts `shouldSatisfy` (not . null)
let (toAcct : _) = toAccts
accAddresses toAcct `shouldSatisfy` (not . null)
let (toAddr : _) = accAddresses toAcct
pure (toAcct, toAddr)
newWalletRef :: IO WalletRef
newWalletRef = newEmptyMVar
sampleWallet :: WalletRef -> WalletClient IO -> IO Wallet
sampleWallet wRef wc = do
mwallet <- tryTakeMVar wRef
case mwallet of
Just wallet -> do
putMVar wRef wallet
pure wallet
Nothing -> do
w <- randomWallet CreateWallet
w' <- createWalletCheck wc w
didWrite <- tryPutMVar wRef w'
if didWrite
then pure w'
else readMVar wRef
genesisWallet :: WalletClient IO -> IO Wallet
genesisWallet wc = do
mwallet <- tryTakeMVar genesisRef
case mwallet of
Just wallet -> do
putMVar genesisRef wallet
pure wallet
Nothing -> do
Right allWallets <- fmap wrData <$> getWallets wc
wallet <- maybe
(fail "Genesis wallet is missing; did you import it prior to executing the test-suite?")
return
(find (("Genesis wallet" ==) . walName) allWallets)
didWrite <- tryPutMVar genesisRef wallet
if didWrite
then pure wallet
else readMVar genesisRef
genesisRef :: WalletRef
genesisRef = unsafePerformIO newEmptyMVar
# NOINLINE genesisRef #
shouldPrism :: Show s => s -> Prism' s a -> IO a
shouldPrism a b = do
a `shouldSatisfy` has b
let Just x = a ^? b
pure x
infixr 8 `shouldPrism`
shouldPrism_ :: Show s => s -> Prism' s a -> IO ()
shouldPrism_ a b =
a `shouldSatisfy` has b
infixr 8 `shouldPrism_`
|
50b7d9087a59d35ecd51d5ca73a096ae26ddffb37da3d0c35dabd61eb05a4971 | drlivingston/kabob | drugbank-skos.clj | ;; drugbank id to other chemical id mappings
;; it would be nice if this rule just worked - unfortunately there are errors
;; in the drugbank mappings - they are NOT UNIQUE
;; `{:name "drugbank-drug-exact-mapping-assertion"
;; :head ((?/dbice skos/exactMatch ?/otherice))
;; :body
;; (~@(kabob/rtv _/record
;; iaodrugbank/DrugBankDrugRecord_drugBankIdDataField1 ?/dbice
;; iaodrugbank/DrugBankDrugRecord_externalIdentifiersDataField1 ?/otherice))}
;; CORRECTING FOR NON-UNIQUENESS
`{:name "drugbank-drug-exact-mapping-assertion"
:head ((?/dbice skos/exactMatch ?/otherice))
:body
((?/fv0 kiao/hasTemplate iaodrugbank/DrugBankDrugRecord_drugBankIdDataField1)
(?/fv0 obo/IAO_0000219 ?/dbice)
(?/record obo/has_part ?/fv0)
(?/record obo/has_part ?/externalfv)
(?/externalfv kiao/hasTemplate iaodrugbank/DrugBankDrugRecord_externalIdentifiersDataField1)
(?/externalfv obo/IAO_0000219 ?/otherice)
;;check to see if that fv is in another record
(:optional ((?/record2 obo/has_part ?/externalfv)
(:not (= ?/record2 ?/record))))
(:not (:bound ?/record2))
)
:options {:magic-prefixes [["franzOption_clauseReorderer" "franz:identity"]]}
}
| null | https://raw.githubusercontent.com/drlivingston/kabob/7038076849744c959da9c8507e8a8ab7215410aa/kabob-build/src/main/resources/edu/ucdenver/ccp/kabob/build/rules/temp/drugbank-skos/drugbank-skos.clj | clojure | drugbank id to other chemical id mappings
it would be nice if this rule just worked - unfortunately there are errors
in the drugbank mappings - they are NOT UNIQUE
`{:name "drugbank-drug-exact-mapping-assertion"
:head ((?/dbice skos/exactMatch ?/otherice))
:body
(~@(kabob/rtv _/record
iaodrugbank/DrugBankDrugRecord_drugBankIdDataField1 ?/dbice
iaodrugbank/DrugBankDrugRecord_externalIdentifiersDataField1 ?/otherice))}
CORRECTING FOR NON-UNIQUENESS
check to see if that fv is in another record |
`{:name "drugbank-drug-exact-mapping-assertion"
:head ((?/dbice skos/exactMatch ?/otherice))
:body
((?/fv0 kiao/hasTemplate iaodrugbank/DrugBankDrugRecord_drugBankIdDataField1)
(?/fv0 obo/IAO_0000219 ?/dbice)
(?/record obo/has_part ?/fv0)
(?/record obo/has_part ?/externalfv)
(?/externalfv kiao/hasTemplate iaodrugbank/DrugBankDrugRecord_externalIdentifiersDataField1)
(?/externalfv obo/IAO_0000219 ?/otherice)
(:optional ((?/record2 obo/has_part ?/externalfv)
(:not (= ?/record2 ?/record))))
(:not (:bound ?/record2))
)
:options {:magic-prefixes [["franzOption_clauseReorderer" "franz:identity"]]}
}
|
290ffaa2bf10e0e772d488de624ffd48f0bdf39b43e8e32d5e8777c09920336c | foreverbell/project-euler-solutions | 6.hs | main = print diff where
diff = sum1 - sum2
sum1 = (sum [1 .. 100]) ^ 2
sum2 = sum (map (^2) [1 .. 100])
| null | https://raw.githubusercontent.com/foreverbell/project-euler-solutions/c0bf2746aafce9be510892814e2d03e20738bf2b/src/6.hs | haskell | main = print diff where
diff = sum1 - sum2
sum1 = (sum [1 .. 100]) ^ 2
sum2 = sum (map (^2) [1 .. 100])
| |
d2adedc8a72427790d8842bc71bf952ea57f66c488d9d8a136b3b6ed5555625d | ocaml-flambda/flambda-backend | reload.ml | # 2 "backend/amd64/reload.ml"
(**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 2000 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
[@@@ocaml.warning "+4"]
open Cmm
open Reg
open Mach
(* Reloading for the AMD64 *)
Summary of instruction set constraints :
" S " means either stack or register , " R " means register only .
Operation Res Arg1 Arg2
Imove R S
or S R
Iconst_int S if 32 - bit signed , R otherwise
Iconst_float R
Iconst_symbol ( not PIC ) S
Iconst_symbol ( PIC ) R
Icall_ind R
Itailcall_ind R
Iload R R R
Istore R R
Iintop(Icomp ) R R S
or R S R
Iintop(Imul|Idiv|Imod ) R R S
Iintop(Imulh ) R R S
Iintop(shift ) S S R
Iintop(others ) R R S
or S S R
Iintop_imm(Iadd , n)/lea R R
Iintop_imm(Imul , n ) R R
Iintop_imm(Icomp _ ) R S
Iintop_imm(others ) S S
Inegf ... Idivf R R S
Ifloatofint R S
Iintoffloat R S
Ispecific(Ilea ) R R R
Ispecific(Ifloatarithmem ) R R R
Ispecific(Icrc32q ) R R S ( and Res = Arg1 )
Ispecific(Irdtsc ) R
Ispecific(Irdpmc ) R R ( Arg1 = rcx )
Ispecific(Ifloat_iround ) R S
Ispecific(Ifloat_round _ ) R S
Ispecific(Ifloat_min ) R R S ( and Res = Arg1 )
Ispecific(Ifloat_max ) R R S ( and Res = Arg1 )
Conditional branches :
Iinttest S R
or R S
Ifloattest R S ( or S R if swapped test )
other tests S
"S" means either stack or register, "R" means register only.
Operation Res Arg1 Arg2
Imove R S
or S R
Iconst_int S if 32-bit signed, R otherwise
Iconst_float R
Iconst_symbol (not PIC) S
Iconst_symbol (PIC) R
Icall_ind R
Itailcall_ind R
Iload R R R
Istore R R
Iintop(Icomp) R R S
or R S R
Iintop(Imul|Idiv|Imod) R R S
Iintop(Imulh) R R S
Iintop(shift) S S R
Iintop(others) R R S
or S S R
Iintop_imm(Iadd, n)/lea R R
Iintop_imm(Imul, n) R R
Iintop_imm(Icomp _) R S
Iintop_imm(others) S S
Inegf...Idivf R R S
Ifloatofint R S
Iintoffloat R S
Ispecific(Ilea) R R R
Ispecific(Ifloatarithmem) R R R
Ispecific(Icrc32q) R R S (and Res = Arg1)
Ispecific(Irdtsc) R
Ispecific(Irdpmc) R R (Arg1 = rcx)
Ispecific(Ifloat_iround) R S
Ispecific(Ifloat_round _) R S
Ispecific(Ifloat_min) R R S (and Res = Arg1)
Ispecific(Ifloat_max) R R S (and Res = Arg1)
Conditional branches:
Iinttest S R
or R S
Ifloattest R S (or S R if swapped test)
other tests S
*)
let stackp r =
match r.loc with
Stack _ -> true
| Reg _ | Unknown -> false
class reload = object (self)
inherit Reloadgen.reload_generic as super
method! reload_operation op arg res =
match op with
| Iintop(Iadd|Isub|Iand|Ior|Ixor|Icheckbound) ->
One of the two arguments can reside in the stack , but not both
if stackp arg.(0) && stackp arg.(1)
then ([|arg.(0); self#makereg arg.(1)|], res)
else (arg, res)
| Iintop (Icomp _) ->
One of the two arguments can reside in the stack , but not both .
The result must be in a register .
The result must be in a register. *)
let res =
if stackp res.(0) then [| self#makereg res.(0) |] else res
in
if stackp arg.(0) && stackp arg.(1)
then ([|arg.(0); self#makereg arg.(1)|], res)
else (arg, res)
| Iintop_imm(Iadd, _) when arg.(0).loc <> res.(0).loc ->
(* This add will be turned into a lea; args and results must be
in registers *)
super#reload_operation op arg res
| Iintop_imm (Imul, _) ->
The result (= the argument ) must be a register ( # 10626 )
if stackp arg.(0)
then let r = self#makereg arg.(0) in ([|r|],[|r|])
else (arg, res)
| Ispecific Ifloat_iround
| Ispecific (Ifloat_round _)
| Iintop_imm (Icomp _, _) ->
(* The argument(s) can be either in register or on stack.
The result must be in a register. *)
let res =
if stackp res.(0) then [| self#makereg res.(0) |] else res
in
arg, res
| Iintop(Imulh _ | Idiv | Imod | Ilsl | Ilsr | Iasr)
| Iintop_imm((Iadd | Isub | Iand | Ior | Ixor | Ilsl | Ilsr | Iasr
| Imulh _ | Idiv | Imod | Icheckbound), _) ->
(* The argument(s) and results can be either in register or on stack *)
Note : Imulh , Idiv , Imod : arg(0 ) and res(0 ) already forced in regs
, , : ) already forced in regs
Ilsl, Ilsr, Iasr: arg(1) already forced in regs *)
(arg, res)
| Iintop_imm ((Ipopcnt | Iclz _ | Ictz _), _) -> assert false
| Iintop(Imul) | Iaddf | Isubf | Imulf | Idivf ->
First argument (= result ) must be in register , second arg
can reside in the stack
can reside in the stack *)
if stackp arg.(0)
then (let r = self#makereg arg.(0) in ([|r; arg.(1)|], [|r|]))
else (arg, res)
| Ispecific (Irdtsc | Irdpmc) ->
: result must be in register .
Irdpmc : result must be in register , arg.(0 ) already forced in reg .
Irdpmc: result must be in register, arg.(0) already forced in reg. *)
if stackp res.(0)
then (let r = self#makereg res.(0) in (arg, [|r|]))
else (arg, res)
| Ispecific(Ifloat_min | Ifloat_max)
| Ispecific Icrc32q ->
First argument and result must be in the same register .
Second argument can be either in a register or on stack .
Second argument can be either in a register or on stack. *)
if stackp arg.(0)
then (let r = self#makereg arg.(0) in ([|r; arg.(1)|], [|r|]))
else (arg, res)
| Ifloatofint | Iintoffloat ->
(* Result must be in register, but argument can be on stack *)
(arg, (if stackp res.(0) then [| self#makereg res.(0) |] else res))
| Iconst_int n ->
if n <= 0x7FFFFFFFn && n >= -0x80000000n
then (arg, res)
else super#reload_operation op arg res
| Iconst_symbol _ ->
if !Clflags.pic_code || !Clflags.dlcode || Arch.win64
then super#reload_operation op arg res
else (arg, res)
| Icsel tst ->
Last argument and result must be in the same register .
Result must be in register . The last two arguments are used
for emitting cmov , the remaining for [ Mach.test ] .
Result must be in register. The last two arguments are used
for emitting cmov, the remaining for [Mach.test]. *)
CR gyorsh : we already use Array.sub here ,
so no reason for this convoluted arrangement ,
using the first two args for cmov would simplify most of the
code as it wo n't need to have [ len ] , it will be able to have indexes
directly , but then in Emit we will have to do again
to call emit_test ( unless emit_test takes an index , which is also
weird ) .
so no reason for this convoluted arrangement,
using the first two args for cmov would simplify most of the
code as it won't need to have [len], it will be able to have indexes
directly, but then in Emit we will have to do Array.sub again
to call emit_test (unless emit_test takes an index, which is also
weird). *)
CR - soon gyorsh : [ reload_test ] may lose some sharing
between the arguments of the test and the last two arguments
and the result of the move .
between the arguments of the test and the last two arguments
and the result of the move. *)
let r = if stackp res.(0) then self#makereg res.(0) else res.(0) in
let len = Array.length arg in
let arg' = Array.copy arg in
let test_arg = self#reload_test tst (Array.sub arg 0 (len - 2)) in
for i = 0 to len - 2 - 1 do
arg'.(i) <- test_arg.(i)
done;
arg'.(len - 1) <- r;
(arg', [|r|])
| Iintop (Ipopcnt | Iclz _| Ictz _)
| Iintop_atomic _
| Ispecific (Isqrtf | Isextend32 | Izextend32 | Ilea _
| Istore_int (_, _, _)
| Ioffset_loc (_, _) | Ifloatarithmem (_, _)
| Ipause
| Ilfence | Isfence | Imfence
| Iprefetch _
| Ibswap _| Ifloatsqrtf _)
| Imove|Ispill|Ireload|Inegf|Iabsf|Iconst_float _|Icall_ind|Icall_imm _
| Icompf _
| Itailcall_ind|Itailcall_imm _|Iextcall _|Istackoffset _|Iload (_, _, _)
| Istore (_, _, _)|Ialloc _|Iname_for_debugger _|Iprobe _|Iprobe_is_enabled _
| Ivalueofint | Iintofvalue | Iopaque
| Ibeginregion | Iendregion | Ipoll _
-> (* Other operations: all args and results in registers,
except moves and probes. *)
super#reload_operation op arg res
method! reload_test tst arg =
match tst with
Iinttest _ ->
One of the two arguments can reside on stack
if stackp arg.(0) && stackp arg.(1)
then [| self#makereg arg.(0); arg.(1) |]
else arg
| Ifloattest (CFlt | CFnlt | CFle | CFnle) ->
(* Cf. emit.mlp: we swap arguments in this case *)
First argument can be on stack , second must be in register
if stackp arg.(1)
then [| arg.(0); self#makereg arg.(1) |]
else arg
| Ifloattest (CFeq | CFneq | CFgt | CFngt | CFge | CFnge) ->
Second argument can be on stack , first must be in register
if stackp arg.(0)
then [| self#makereg arg.(0); arg.(1) |]
else arg
| Iinttest_imm (_, _)
| Itruetest
| Ifalsetest
| Ioddtest
| Ieventest ->
(* The argument(s) can be either in register or on stack *)
arg
end
let fundecl f num_stack_slots =
(new reload)#fundecl f num_stack_slots
| null | https://raw.githubusercontent.com/ocaml-flambda/flambda-backend/83732c05071ed4217d280881a75010f8c99105ef/backend/amd64/reload.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Reloading for the AMD64
This add will be turned into a lea; args and results must be
in registers
The argument(s) can be either in register or on stack.
The result must be in a register.
The argument(s) and results can be either in register or on stack
Result must be in register, but argument can be on stack
Other operations: all args and results in registers,
except moves and probes.
Cf. emit.mlp: we swap arguments in this case
The argument(s) can be either in register or on stack | # 2 "backend/amd64/reload.ml"
, projet Cristal , INRIA Rocquencourt
Copyright 2000 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
[@@@ocaml.warning "+4"]
open Cmm
open Reg
open Mach
Summary of instruction set constraints :
" S " means either stack or register , " R " means register only .
Operation Res Arg1 Arg2
Imove R S
or S R
Iconst_int S if 32 - bit signed , R otherwise
Iconst_float R
Iconst_symbol ( not PIC ) S
Iconst_symbol ( PIC ) R
Icall_ind R
Itailcall_ind R
Iload R R R
Istore R R
Iintop(Icomp ) R R S
or R S R
Iintop(Imul|Idiv|Imod ) R R S
Iintop(Imulh ) R R S
Iintop(shift ) S S R
Iintop(others ) R R S
or S S R
Iintop_imm(Iadd , n)/lea R R
Iintop_imm(Imul , n ) R R
Iintop_imm(Icomp _ ) R S
Iintop_imm(others ) S S
Inegf ... Idivf R R S
Ifloatofint R S
Iintoffloat R S
Ispecific(Ilea ) R R R
Ispecific(Ifloatarithmem ) R R R
Ispecific(Icrc32q ) R R S ( and Res = Arg1 )
Ispecific(Irdtsc ) R
Ispecific(Irdpmc ) R R ( Arg1 = rcx )
Ispecific(Ifloat_iround ) R S
Ispecific(Ifloat_round _ ) R S
Ispecific(Ifloat_min ) R R S ( and Res = Arg1 )
Ispecific(Ifloat_max ) R R S ( and Res = Arg1 )
Conditional branches :
Iinttest S R
or R S
Ifloattest R S ( or S R if swapped test )
other tests S
"S" means either stack or register, "R" means register only.
Operation Res Arg1 Arg2
Imove R S
or S R
Iconst_int S if 32-bit signed, R otherwise
Iconst_float R
Iconst_symbol (not PIC) S
Iconst_symbol (PIC) R
Icall_ind R
Itailcall_ind R
Iload R R R
Istore R R
Iintop(Icomp) R R S
or R S R
Iintop(Imul|Idiv|Imod) R R S
Iintop(Imulh) R R S
Iintop(shift) S S R
Iintop(others) R R S
or S S R
Iintop_imm(Iadd, n)/lea R R
Iintop_imm(Imul, n) R R
Iintop_imm(Icomp _) R S
Iintop_imm(others) S S
Inegf...Idivf R R S
Ifloatofint R S
Iintoffloat R S
Ispecific(Ilea) R R R
Ispecific(Ifloatarithmem) R R R
Ispecific(Icrc32q) R R S (and Res = Arg1)
Ispecific(Irdtsc) R
Ispecific(Irdpmc) R R (Arg1 = rcx)
Ispecific(Ifloat_iround) R S
Ispecific(Ifloat_round _) R S
Ispecific(Ifloat_min) R R S (and Res = Arg1)
Ispecific(Ifloat_max) R R S (and Res = Arg1)
Conditional branches:
Iinttest S R
or R S
Ifloattest R S (or S R if swapped test)
other tests S
*)
let stackp r =
match r.loc with
Stack _ -> true
| Reg _ | Unknown -> false
class reload = object (self)
inherit Reloadgen.reload_generic as super
method! reload_operation op arg res =
match op with
| Iintop(Iadd|Isub|Iand|Ior|Ixor|Icheckbound) ->
One of the two arguments can reside in the stack , but not both
if stackp arg.(0) && stackp arg.(1)
then ([|arg.(0); self#makereg arg.(1)|], res)
else (arg, res)
| Iintop (Icomp _) ->
One of the two arguments can reside in the stack , but not both .
The result must be in a register .
The result must be in a register. *)
let res =
if stackp res.(0) then [| self#makereg res.(0) |] else res
in
if stackp arg.(0) && stackp arg.(1)
then ([|arg.(0); self#makereg arg.(1)|], res)
else (arg, res)
| Iintop_imm(Iadd, _) when arg.(0).loc <> res.(0).loc ->
super#reload_operation op arg res
| Iintop_imm (Imul, _) ->
The result (= the argument ) must be a register ( # 10626 )
if stackp arg.(0)
then let r = self#makereg arg.(0) in ([|r|],[|r|])
else (arg, res)
| Ispecific Ifloat_iround
| Ispecific (Ifloat_round _)
| Iintop_imm (Icomp _, _) ->
let res =
if stackp res.(0) then [| self#makereg res.(0) |] else res
in
arg, res
| Iintop(Imulh _ | Idiv | Imod | Ilsl | Ilsr | Iasr)
| Iintop_imm((Iadd | Isub | Iand | Ior | Ixor | Ilsl | Ilsr | Iasr
| Imulh _ | Idiv | Imod | Icheckbound), _) ->
Note : Imulh , Idiv , Imod : arg(0 ) and res(0 ) already forced in regs
, , : ) already forced in regs
Ilsl, Ilsr, Iasr: arg(1) already forced in regs *)
(arg, res)
| Iintop_imm ((Ipopcnt | Iclz _ | Ictz _), _) -> assert false
| Iintop(Imul) | Iaddf | Isubf | Imulf | Idivf ->
First argument (= result ) must be in register , second arg
can reside in the stack
can reside in the stack *)
if stackp arg.(0)
then (let r = self#makereg arg.(0) in ([|r; arg.(1)|], [|r|]))
else (arg, res)
| Ispecific (Irdtsc | Irdpmc) ->
: result must be in register .
Irdpmc : result must be in register , arg.(0 ) already forced in reg .
Irdpmc: result must be in register, arg.(0) already forced in reg. *)
if stackp res.(0)
then (let r = self#makereg res.(0) in (arg, [|r|]))
else (arg, res)
| Ispecific(Ifloat_min | Ifloat_max)
| Ispecific Icrc32q ->
First argument and result must be in the same register .
Second argument can be either in a register or on stack .
Second argument can be either in a register or on stack. *)
if stackp arg.(0)
then (let r = self#makereg arg.(0) in ([|r; arg.(1)|], [|r|]))
else (arg, res)
| Ifloatofint | Iintoffloat ->
(arg, (if stackp res.(0) then [| self#makereg res.(0) |] else res))
| Iconst_int n ->
if n <= 0x7FFFFFFFn && n >= -0x80000000n
then (arg, res)
else super#reload_operation op arg res
| Iconst_symbol _ ->
if !Clflags.pic_code || !Clflags.dlcode || Arch.win64
then super#reload_operation op arg res
else (arg, res)
| Icsel tst ->
Last argument and result must be in the same register .
Result must be in register . The last two arguments are used
for emitting cmov , the remaining for [ Mach.test ] .
Result must be in register. The last two arguments are used
for emitting cmov, the remaining for [Mach.test]. *)
CR gyorsh : we already use Array.sub here ,
so no reason for this convoluted arrangement ,
using the first two args for cmov would simplify most of the
code as it wo n't need to have [ len ] , it will be able to have indexes
directly , but then in Emit we will have to do again
to call emit_test ( unless emit_test takes an index , which is also
weird ) .
so no reason for this convoluted arrangement,
using the first two args for cmov would simplify most of the
code as it won't need to have [len], it will be able to have indexes
directly, but then in Emit we will have to do Array.sub again
to call emit_test (unless emit_test takes an index, which is also
weird). *)
CR - soon gyorsh : [ reload_test ] may lose some sharing
between the arguments of the test and the last two arguments
and the result of the move .
between the arguments of the test and the last two arguments
and the result of the move. *)
let r = if stackp res.(0) then self#makereg res.(0) else res.(0) in
let len = Array.length arg in
let arg' = Array.copy arg in
let test_arg = self#reload_test tst (Array.sub arg 0 (len - 2)) in
for i = 0 to len - 2 - 1 do
arg'.(i) <- test_arg.(i)
done;
arg'.(len - 1) <- r;
(arg', [|r|])
| Iintop (Ipopcnt | Iclz _| Ictz _)
| Iintop_atomic _
| Ispecific (Isqrtf | Isextend32 | Izextend32 | Ilea _
| Istore_int (_, _, _)
| Ioffset_loc (_, _) | Ifloatarithmem (_, _)
| Ipause
| Ilfence | Isfence | Imfence
| Iprefetch _
| Ibswap _| Ifloatsqrtf _)
| Imove|Ispill|Ireload|Inegf|Iabsf|Iconst_float _|Icall_ind|Icall_imm _
| Icompf _
| Itailcall_ind|Itailcall_imm _|Iextcall _|Istackoffset _|Iload (_, _, _)
| Istore (_, _, _)|Ialloc _|Iname_for_debugger _|Iprobe _|Iprobe_is_enabled _
| Ivalueofint | Iintofvalue | Iopaque
| Ibeginregion | Iendregion | Ipoll _
super#reload_operation op arg res
method! reload_test tst arg =
match tst with
Iinttest _ ->
One of the two arguments can reside on stack
if stackp arg.(0) && stackp arg.(1)
then [| self#makereg arg.(0); arg.(1) |]
else arg
| Ifloattest (CFlt | CFnlt | CFle | CFnle) ->
First argument can be on stack , second must be in register
if stackp arg.(1)
then [| arg.(0); self#makereg arg.(1) |]
else arg
| Ifloattest (CFeq | CFneq | CFgt | CFngt | CFge | CFnge) ->
Second argument can be on stack , first must be in register
if stackp arg.(0)
then [| self#makereg arg.(0); arg.(1) |]
else arg
| Iinttest_imm (_, _)
| Itruetest
| Ifalsetest
| Ioddtest
| Ieventest ->
arg
end
let fundecl f num_stack_slots =
(new reload)#fundecl f num_stack_slots
|
8f2eb949c796c80aadd5ff0db2d8b1cd7e77bf522b5e02965af050722410bc69 | aryx/xix | common.mli |
type byte = char
type bytes = string
type filename = string
type dirname = string
type ('a, 'b) either = Left of 'a | Right of 'b
type compare = Equal | Inf | Sup
exception Todo
exception Impossible of string
val spf : ('a, unit, string) format -> 'a
val pr : string -> unit
val pr2 : string -> unit
val with_file_out : (out_channel -> 'a) -> filename -> 'a
val with_file_in : (in_channel -> 'a) -> filename -> 'a
val rnd : int -> int -> int
val if_some : ('a -> unit) -> 'a option -> unit
val filter_some : 'a option list -> 'a list
val map_filter : ('a -> 'b option) -> 'a list -> 'b list
val optionize: (unit -> 'a) -> 'a option
val (<=>): 'a -> 'a -> compare
val sort_by_val_highfirst : ('a * 'b) list -> ('a * 'b) list
val sort_by_val_lowfirst : ('a * 'b) list -> ('a * 'b) list
val sort_by_key_highfirst : ('a * 'b) list -> ('a * 'b) list
val sort_by_key_lowfirst : ('a * 'b) list -> ('a * 'b) list
val group_by : ('a -> 'b) -> 'a list -> ('b * 'a list) list
val memoized :
?use_cache:bool -> ('a, 'b) Hashtbl.t -> 'a -> (unit -> 'b) -> 'b
val cat : string -> string list
module Regexp_ :
sig
val matched : int -> string -> string
val matched1 : string -> string
val matched2 : string -> string * string
val matched3 : string -> string * string * string
val matched4 : string -> string * string * string * string
val matched5 : string -> string * string * string * string * string
val matched6 :
string -> string * string * string * string * string * string
val matched7 :
string -> string * string * string * string * string * string * string
val _memo_compiled_regexp : (string, Str.regexp) Hashtbl.t
val candidate_match_func : string -> string -> bool
val split : string -> string -> string list
end
val ( =~ ) : string -> string -> bool
module List_ :
sig
val exclude : ('a -> bool) -> 'a list -> 'a list
val take : int -> 'a list -> 'a list
val take_safe : int -> 'a list -> 'a list
end
val push : 'a -> 'a list ref -> unit
module Stack_ :
sig
val top_opt: 'a Stack.t -> 'a option
val nth: int -> 'a Stack.t -> 'a
end
module Hashtbl_ :
sig
val of_list : ('a * 'b) list -> ('a, 'b) Hashtbl.t
val to_list : ('a, 'b) Hashtbl.t -> ('a * 'b) list
end
module Obj_ :
sig
val dump2 : Obj.t -> string
end
val dump : 'a -> string
val pr2_gen : 'a -> unit
| null | https://raw.githubusercontent.com/aryx/xix/60ce1bd9a3f923e0e8bb2192f8938a9aa49c739c/lib_core/commons/common.mli | ocaml |
type byte = char
type bytes = string
type filename = string
type dirname = string
type ('a, 'b) either = Left of 'a | Right of 'b
type compare = Equal | Inf | Sup
exception Todo
exception Impossible of string
val spf : ('a, unit, string) format -> 'a
val pr : string -> unit
val pr2 : string -> unit
val with_file_out : (out_channel -> 'a) -> filename -> 'a
val with_file_in : (in_channel -> 'a) -> filename -> 'a
val rnd : int -> int -> int
val if_some : ('a -> unit) -> 'a option -> unit
val filter_some : 'a option list -> 'a list
val map_filter : ('a -> 'b option) -> 'a list -> 'b list
val optionize: (unit -> 'a) -> 'a option
val (<=>): 'a -> 'a -> compare
val sort_by_val_highfirst : ('a * 'b) list -> ('a * 'b) list
val sort_by_val_lowfirst : ('a * 'b) list -> ('a * 'b) list
val sort_by_key_highfirst : ('a * 'b) list -> ('a * 'b) list
val sort_by_key_lowfirst : ('a * 'b) list -> ('a * 'b) list
val group_by : ('a -> 'b) -> 'a list -> ('b * 'a list) list
val memoized :
?use_cache:bool -> ('a, 'b) Hashtbl.t -> 'a -> (unit -> 'b) -> 'b
val cat : string -> string list
module Regexp_ :
sig
val matched : int -> string -> string
val matched1 : string -> string
val matched2 : string -> string * string
val matched3 : string -> string * string * string
val matched4 : string -> string * string * string * string
val matched5 : string -> string * string * string * string * string
val matched6 :
string -> string * string * string * string * string * string
val matched7 :
string -> string * string * string * string * string * string * string
val _memo_compiled_regexp : (string, Str.regexp) Hashtbl.t
val candidate_match_func : string -> string -> bool
val split : string -> string -> string list
end
val ( =~ ) : string -> string -> bool
module List_ :
sig
val exclude : ('a -> bool) -> 'a list -> 'a list
val take : int -> 'a list -> 'a list
val take_safe : int -> 'a list -> 'a list
end
val push : 'a -> 'a list ref -> unit
module Stack_ :
sig
val top_opt: 'a Stack.t -> 'a option
val nth: int -> 'a Stack.t -> 'a
end
module Hashtbl_ :
sig
val of_list : ('a * 'b) list -> ('a, 'b) Hashtbl.t
val to_list : ('a, 'b) Hashtbl.t -> ('a * 'b) list
end
module Obj_ :
sig
val dump2 : Obj.t -> string
end
val dump : 'a -> string
val pr2_gen : 'a -> unit
| |
da69b25bd2067da2eff14dc9266edf98fc822c4052c60936ca57e83d7c0095c9 | belambert/cl-asr | phoneme-recognition.lisp | Copyright 2010 - 2018
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;; -2.0
;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
(in-package :cl-asr)
;;;; Some hacks that make it possible to recognize PHONES as opposed to WORDS, although quite awkwardly
(defun build-flat-phoneme-fsm (&key ci-only)
"Build a flat bigram FSM for recognition by first constructing a pseudo vocab
and using that as the list of words that can be recognized."
(let ((pseudo-vocab (phonemes->pseudo-vocab :ci-only ci-only)))
(build-flat-bigram-fsm pseudo-vocab)))
(defun phonemes->pseudo-vocab (&key ci-only)
"Convert the phones in the currently loaded acoustic model into a list of pseudo-vocab."
(let* ((phonemes (remove-if-not 'listp (hash-table-keys (acoustic-model-phone-hmm-table *acoustic-model*))))
(pseudo-vocab '()))
(when ci-only
(setf phonemes (remove-if 'second phonemes)))
(setf pseudo-vocab (loop for phoneme in phonemes
for pseudo-word = (phone-specifier->string phoneme)
collecting pseudo-word))
(push "<sil>" pseudo-vocab)))
(defun phone-specifier->string (phone-spec)
"Given a phone specifier, a list of the phone possibly followed by before and after contexts,
convert it to a canonical string to be used as a 'word' during recognition."
(cond ((listp phone-spec)
(format nil "~A_~{~S~^_~}" *phone-prefix* phone-spec))
((stringp phone-spec)
phone-spec)
(t (error "Invalid phone specifier"))))
(defun phone-spec->pretty-string (phone-spec)
"Given a phone specifier, a list of the phone possibly followed by before and after contexts,
convert it to an easily human readable string."
(cond ((listp phone-spec)
(if (second phone-spec)
(format nil "~A (~a, ~a), ~A" (first phone-spec) (second phone-spec) (third phone-spec) (fourth phone-spec))
(first phone-spec)))
((stringp phone-spec)
phone-spec)
(t (error "Invalid phone specifier"))))
(defun string->phone-spec (string)
"Given a string representing the canonical 'word' version of a (possibly context dependent) phone,
return a 'phone specifier' list."
(mapcar 'read-from-string (subseq (split-sequence:split-sequence #\_ string) 1)))
(defun phone-string-p (string)
"Check if the given string represents a phone (i.e. has the *phone-prefix* prefix)."
(alexandria:starts-with-subseq *phone-prefix* string))
(defun cleanup-phone-string (string)
"Given a string, *if* it represents a phone, then return the easily human readable version of it."
(if (phone-string-p string)
(phone-spec->pretty-string (string->phone-spec string))
string))
| null | https://raw.githubusercontent.com/belambert/cl-asr/a734ddb396f18bf4a504e9ecb3c91e507764358c/src/extensions/phoneme-recognition.lisp | lisp |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Some hacks that make it possible to recognize PHONES as opposed to WORDS, although quite awkwardly | Copyright 2010 - 2018
distributed under the License is distributed on an " AS IS " BASIS ,
(in-package :cl-asr)
(defun build-flat-phoneme-fsm (&key ci-only)
"Build a flat bigram FSM for recognition by first constructing a pseudo vocab
and using that as the list of words that can be recognized."
(let ((pseudo-vocab (phonemes->pseudo-vocab :ci-only ci-only)))
(build-flat-bigram-fsm pseudo-vocab)))
(defun phonemes->pseudo-vocab (&key ci-only)
"Convert the phones in the currently loaded acoustic model into a list of pseudo-vocab."
(let* ((phonemes (remove-if-not 'listp (hash-table-keys (acoustic-model-phone-hmm-table *acoustic-model*))))
(pseudo-vocab '()))
(when ci-only
(setf phonemes (remove-if 'second phonemes)))
(setf pseudo-vocab (loop for phoneme in phonemes
for pseudo-word = (phone-specifier->string phoneme)
collecting pseudo-word))
(push "<sil>" pseudo-vocab)))
(defun phone-specifier->string (phone-spec)
"Given a phone specifier, a list of the phone possibly followed by before and after contexts,
convert it to a canonical string to be used as a 'word' during recognition."
(cond ((listp phone-spec)
(format nil "~A_~{~S~^_~}" *phone-prefix* phone-spec))
((stringp phone-spec)
phone-spec)
(t (error "Invalid phone specifier"))))
(defun phone-spec->pretty-string (phone-spec)
"Given a phone specifier, a list of the phone possibly followed by before and after contexts,
convert it to an easily human readable string."
(cond ((listp phone-spec)
(if (second phone-spec)
(format nil "~A (~a, ~a), ~A" (first phone-spec) (second phone-spec) (third phone-spec) (fourth phone-spec))
(first phone-spec)))
((stringp phone-spec)
phone-spec)
(t (error "Invalid phone specifier"))))
(defun string->phone-spec (string)
"Given a string representing the canonical 'word' version of a (possibly context dependent) phone,
return a 'phone specifier' list."
(mapcar 'read-from-string (subseq (split-sequence:split-sequence #\_ string) 1)))
(defun phone-string-p (string)
"Check if the given string represents a phone (i.e. has the *phone-prefix* prefix)."
(alexandria:starts-with-subseq *phone-prefix* string))
(defun cleanup-phone-string (string)
"Given a string, *if* it represents a phone, then return the easily human readable version of it."
(if (phone-string-p string)
(phone-spec->pretty-string (string->phone-spec string))
string))
|
36ab74a2c0567666f3d031c3a1fdca4b8c3c0d732d13e42e5ab504b2671e0e92 | shayne-fletcher/zen | dates.ml |
# load " unix.cma " ; ;
# load " str.cma " ; ;
# load " calendarLib.cma " ; ;
#load "unix.cma";;
#load "str.cma";;
#load "calendarLib.cma";;
*)
type t = CalendarLib.Date.t ;;
let parse_date : Genlex.token Stream.t -> t =
let int = parser [<'Genlex.Int i>]->i in
parser [< y = int; m = int; d = int >] -> CalendarLib.Date.make y m d
;;
let string_of_date : t -> string =
fun d ->
CalendarLib.Printer.Date.sprint "%Y %m %d" d
;;
let date_of_string : string -> t =
fun s ->
parse_date ((Genlex.make_lexer []) (Stream.of_string s))
;;
(*In this version, only weekends are holidays.*)
let is_business_day : t -> string -> bool =
fun t (loc:string) ->
let f = function
| CalendarLib.Date.Sat -> false
| CalendarLib.Date.Sun -> false
| _ -> true
in f (CalendarLib.Date.day_of_week t)
;;
type shift_convention =
| NoShift
| Following
| ModifiedFollowing
| Preceding
| ModifiedPreceding
;;
let string_of_shift_convention : shift_convention -> string =
function
| NoShift -> "NO_SHIFT"
| Following -> "FOLLOWING"
| ModifiedFollowing -> "MODIFIED_FOLLOWING"
| Preceding -> "PRECEDING"
| ModifiedPreceding -> "MODIFIED_PRECEDING"
;;
let shift_convention_of_string : string -> shift_convention =
function
| "NO_SHIFT" -> NoShift
| "FOLLOWING" -> Following
| "MODIFIED_FOLLOWING" -> ModifiedFollowing
| "PRECEDING" -> Preceding
| "MODIFIED_PRECEDING" -> ModifiedPreceding
| s -> failwith ("Couldn't convert \""^s^"\" to a shift convention")
;;
let parse_shift_convention = parser
| [< 'Genlex.Ident s >] -> shift_convention_of_string s
;;
let rec shift_following : t -> string -> t =
fun t loc ->
if is_business_day t loc then
t
else
shift_following (CalendarLib.Date.add t (CalendarLib.Date.Period.day 1)) loc
;;
let rec shift_preceding : t -> string -> t =
fun t loc ->
if is_business_day t loc then
t
else
shift_preceding (CalendarLib.Date.add t (CalendarLib.Date.Period.day (-1))) loc
;;
let shift_modified_following : t -> string -> t =
fun t loc ->
let s = shift_following t loc
in
let m = CalendarLib.Date.month t
and n = CalendarLib.Date.month s
in
if m = n
then
s
else
shift_preceding t loc
;;
let shift_modified_preceding : t -> string -> t =
fun t loc ->
let s = shift_preceding t loc
in
let m = CalendarLib.Date.month t
and n = CalendarLib.Date.month s
in
if m = n
then
s
else
shift_following t loc
;;
let shift : t->shift_convention->string->t =
fun t s loc ->
match s with
| NoShift -> t
| Following -> shift_following t loc
| Preceding -> shift_preceding t loc
| ModifiedFollowing -> shift_modified_following t loc
| ModifiedPreceding -> shift_preceding t loc
;;
type day_count =
| DC_30_360
| DC_ACT_360
| DC_ACT_365
| DC_ACT_ACT
;;
let string_of_day_count : day_count -> string =
function
| DC_30_360 -> "DC_30_360"
| DC_ACT_360 -> "DC_ACT_360"
| DC_ACT_365 -> "DC_ACT_365"
| DC_ACT_ACT -> "DC_ACT_ACT"
;;
let day_count_of_string : string -> day_count =
function
| "DC_30_360" -> DC_30_360
| "DC_ACT_360" -> DC_ACT_360
| "DC_ACT_365" -> DC_ACT_365
| "DC_ACT_ACT" -> DC_ACT_ACT
| s -> failwith ("Couldn't convert \""^s^"\" to a day count convention")
;;
let parse_day_count = parser
| [< 'Genlex.Ident s >] -> day_count_of_string s
;;
let year_fraction_act_360 : (t * t) -> float =
fun (s, u) ->
(float_of_int (CalendarLib.Date.Period.safe_nb_days (CalendarLib.Date.sub u s)))/. 360.0
;;
let year_fraction_act_365 : (t * t) -> float =
fun (s, u) ->
(float_of_int (CalendarLib.Date.Period.safe_nb_days (CalendarLib.Date.sub u s)))/. 365.0
;;
let year_fraction_30_360 : (t * t) -> float =
fun (s, u) ->
let sy = CalendarLib.Date.year s
and sm = CalendarLib.Date.int_of_month (CalendarLib.Date.month s)
and sd = CalendarLib.Date.day_of_month s
and uy = CalendarLib.Date.year u
and um = CalendarLib.Date.int_of_month (CalendarLib.Date.month u)
and ud = CalendarLib.Date.day_of_month u
in
let d1 = if sd != 31 then sd else 30
and d2 = if ud != 31 then ud else 30
in
let a : float = float_of_int (d2 - d1)
and b : float = (float_of_int (um - sm))*.30.0
and c : float = (float_of_int (uy - sy))*.360.0
in (a +. b +. c) /. 360.0
;;
let year_fraction_act_act : (t*t) -> float =
fun (s, u) ->
let sy = CalendarLib.Date.year s
and uy = CalendarLib.Date.year u
in
if sy != uy then
let uy_s = CalendarLib.Date.make uy 1 1
and sy_end = CalendarLib.Date.make sy 12 31
and sy_days = if CalendarLib.Date.is_leap_year sy then 366 else 365
and uy_days = if CalendarLib.Date.is_leap_year uy then 366 else 365
in
let n1 = CalendarLib.Date.Period.safe_nb_days (CalendarLib.Date.sub sy_end s)
and n2 = CalendarLib.Date.Period.safe_nb_days (CalendarLib.Date.sub u uy_s)
in
float_of_int (n1) /. (float_of_int sy_days) +. float_of_int (uy - sy - 1) +. float_of_int (n2)/.(float_of_int uy_days)
else
let days = if CalendarLib.Date.is_leap_year sy then 366 else 365
in float_of_int (CalendarLib.Date.Period.safe_nb_days (CalendarLib.Date.sub u s)) /. (float_of_int days)
;;
let year_fraction : (t * t) -> day_count -> float =
fun dt code ->
match code with
| DC_30_360 -> year_fraction_30_360 dt
| DC_ACT_360 -> year_fraction_act_360 dt
| DC_ACT_365 -> year_fraction_act_365 dt
| DC_ACT_ACT -> year_fraction_act_act dt
;;
let day_diff : t -> t -> int =
fun to_ from ->
CalendarLib.Date.Period.safe_nb_days (CalendarLib.Date.sub to_ from)
;;
let year_diff : t -> t -> float =
fun to_ from ->
(float_of_int (day_diff to_ from))/.365.0
;;
| null | https://raw.githubusercontent.com/shayne-fletcher/zen/10a1d0b9bf261bb133918dd62fb1593c3d4d21cb/ocaml/curve/dates.ml | ocaml | In this version, only weekends are holidays. |
# load " unix.cma " ; ;
# load " str.cma " ; ;
# load " calendarLib.cma " ; ;
#load "unix.cma";;
#load "str.cma";;
#load "calendarLib.cma";;
*)
type t = CalendarLib.Date.t ;;
let parse_date : Genlex.token Stream.t -> t =
let int = parser [<'Genlex.Int i>]->i in
parser [< y = int; m = int; d = int >] -> CalendarLib.Date.make y m d
;;
let string_of_date : t -> string =
fun d ->
CalendarLib.Printer.Date.sprint "%Y %m %d" d
;;
let date_of_string : string -> t =
fun s ->
parse_date ((Genlex.make_lexer []) (Stream.of_string s))
;;
let is_business_day : t -> string -> bool =
fun t (loc:string) ->
let f = function
| CalendarLib.Date.Sat -> false
| CalendarLib.Date.Sun -> false
| _ -> true
in f (CalendarLib.Date.day_of_week t)
;;
type shift_convention =
| NoShift
| Following
| ModifiedFollowing
| Preceding
| ModifiedPreceding
;;
let string_of_shift_convention : shift_convention -> string =
function
| NoShift -> "NO_SHIFT"
| Following -> "FOLLOWING"
| ModifiedFollowing -> "MODIFIED_FOLLOWING"
| Preceding -> "PRECEDING"
| ModifiedPreceding -> "MODIFIED_PRECEDING"
;;
let shift_convention_of_string : string -> shift_convention =
function
| "NO_SHIFT" -> NoShift
| "FOLLOWING" -> Following
| "MODIFIED_FOLLOWING" -> ModifiedFollowing
| "PRECEDING" -> Preceding
| "MODIFIED_PRECEDING" -> ModifiedPreceding
| s -> failwith ("Couldn't convert \""^s^"\" to a shift convention")
;;
let parse_shift_convention = parser
| [< 'Genlex.Ident s >] -> shift_convention_of_string s
;;
let rec shift_following : t -> string -> t =
fun t loc ->
if is_business_day t loc then
t
else
shift_following (CalendarLib.Date.add t (CalendarLib.Date.Period.day 1)) loc
;;
let rec shift_preceding : t -> string -> t =
fun t loc ->
if is_business_day t loc then
t
else
shift_preceding (CalendarLib.Date.add t (CalendarLib.Date.Period.day (-1))) loc
;;
let shift_modified_following : t -> string -> t =
fun t loc ->
let s = shift_following t loc
in
let m = CalendarLib.Date.month t
and n = CalendarLib.Date.month s
in
if m = n
then
s
else
shift_preceding t loc
;;
let shift_modified_preceding : t -> string -> t =
fun t loc ->
let s = shift_preceding t loc
in
let m = CalendarLib.Date.month t
and n = CalendarLib.Date.month s
in
if m = n
then
s
else
shift_following t loc
;;
let shift : t->shift_convention->string->t =
fun t s loc ->
match s with
| NoShift -> t
| Following -> shift_following t loc
| Preceding -> shift_preceding t loc
| ModifiedFollowing -> shift_modified_following t loc
| ModifiedPreceding -> shift_preceding t loc
;;
type day_count =
| DC_30_360
| DC_ACT_360
| DC_ACT_365
| DC_ACT_ACT
;;
let string_of_day_count : day_count -> string =
function
| DC_30_360 -> "DC_30_360"
| DC_ACT_360 -> "DC_ACT_360"
| DC_ACT_365 -> "DC_ACT_365"
| DC_ACT_ACT -> "DC_ACT_ACT"
;;
let day_count_of_string : string -> day_count =
function
| "DC_30_360" -> DC_30_360
| "DC_ACT_360" -> DC_ACT_360
| "DC_ACT_365" -> DC_ACT_365
| "DC_ACT_ACT" -> DC_ACT_ACT
| s -> failwith ("Couldn't convert \""^s^"\" to a day count convention")
;;
let parse_day_count = parser
| [< 'Genlex.Ident s >] -> day_count_of_string s
;;
let year_fraction_act_360 : (t * t) -> float =
fun (s, u) ->
(float_of_int (CalendarLib.Date.Period.safe_nb_days (CalendarLib.Date.sub u s)))/. 360.0
;;
let year_fraction_act_365 : (t * t) -> float =
fun (s, u) ->
(float_of_int (CalendarLib.Date.Period.safe_nb_days (CalendarLib.Date.sub u s)))/. 365.0
;;
let year_fraction_30_360 : (t * t) -> float =
fun (s, u) ->
let sy = CalendarLib.Date.year s
and sm = CalendarLib.Date.int_of_month (CalendarLib.Date.month s)
and sd = CalendarLib.Date.day_of_month s
and uy = CalendarLib.Date.year u
and um = CalendarLib.Date.int_of_month (CalendarLib.Date.month u)
and ud = CalendarLib.Date.day_of_month u
in
let d1 = if sd != 31 then sd else 30
and d2 = if ud != 31 then ud else 30
in
let a : float = float_of_int (d2 - d1)
and b : float = (float_of_int (um - sm))*.30.0
and c : float = (float_of_int (uy - sy))*.360.0
in (a +. b +. c) /. 360.0
;;
let year_fraction_act_act : (t*t) -> float =
fun (s, u) ->
let sy = CalendarLib.Date.year s
and uy = CalendarLib.Date.year u
in
if sy != uy then
let uy_s = CalendarLib.Date.make uy 1 1
and sy_end = CalendarLib.Date.make sy 12 31
and sy_days = if CalendarLib.Date.is_leap_year sy then 366 else 365
and uy_days = if CalendarLib.Date.is_leap_year uy then 366 else 365
in
let n1 = CalendarLib.Date.Period.safe_nb_days (CalendarLib.Date.sub sy_end s)
and n2 = CalendarLib.Date.Period.safe_nb_days (CalendarLib.Date.sub u uy_s)
in
float_of_int (n1) /. (float_of_int sy_days) +. float_of_int (uy - sy - 1) +. float_of_int (n2)/.(float_of_int uy_days)
else
let days = if CalendarLib.Date.is_leap_year sy then 366 else 365
in float_of_int (CalendarLib.Date.Period.safe_nb_days (CalendarLib.Date.sub u s)) /. (float_of_int days)
;;
let year_fraction : (t * t) -> day_count -> float =
fun dt code ->
match code with
| DC_30_360 -> year_fraction_30_360 dt
| DC_ACT_360 -> year_fraction_act_360 dt
| DC_ACT_365 -> year_fraction_act_365 dt
| DC_ACT_ACT -> year_fraction_act_act dt
;;
let day_diff : t -> t -> int =
fun to_ from ->
CalendarLib.Date.Period.safe_nb_days (CalendarLib.Date.sub to_ from)
;;
let year_diff : t -> t -> float =
fun to_ from ->
(float_of_int (day_diff to_ from))/.365.0
;;
|
a8cbc40bf45fee857cd1bdc5136cd2e2c361cb4fda9f375ed56ca1f14e31c8c2 | Silvast/valihuuto | migrations.clj | (ns valihuuto.db.migrations
(:require [valihuuto.config :refer [config]])
(:import org.flywaydb.core.Flyway))
(def flyway
(when-not *compile-files*
(-> (Flyway/configure)
(.dataSource
(:database-url config)
nil
nil)
(.load))))
(defn migrate! []
(.migrate flyway))
(defn clean! []
(.clean flyway))
| null | https://raw.githubusercontent.com/Silvast/valihuuto/ad7d2cf98eebe4290fdf25479c2ff2865c1770d5/src/valihuuto/db/migrations.clj | clojure | (ns valihuuto.db.migrations
(:require [valihuuto.config :refer [config]])
(:import org.flywaydb.core.Flyway))
(def flyway
(when-not *compile-files*
(-> (Flyway/configure)
(.dataSource
(:database-url config)
nil
nil)
(.load))))
(defn migrate! []
(.migrate flyway))
(defn clean! []
(.clean flyway))
| |
ead8a4a57a727bf4d1a4bb99e0ec93054af416aced6419ac1d42625173020025 | GaloisInc/daedalus | VM.hs | {-# Language OverloadedStrings #-}
module Daedalus.VM
( module Daedalus.VM
, Src.Pattern(..)
, Src.FName
) where
import Data.Set(Set)
import qualified Data.Set as Set
import Data.Map(Map)
import qualified Data.Map as Map
import Data.Text(Text)
import Data.ByteString(ByteString)
import Daedalus.Panic(panic)
import Daedalus.PP
import Daedalus.Rec
import qualified Daedalus.Core as Src
-- | A program
newtype Program = Program { pModules :: [Module] }
-- | A module
data Module = Module
{ mName :: Src.MName
, mImports :: [Src.MName]
, mTypes :: [Rec Src.TDecl]
, mFuns :: [VMFun]
}
-- | A function
data VMFun = VMFun
{ vmfName :: Src.FName
, vmfCaptures :: Captures
, vmfPure :: Bool -- ^ True if this is not a parser
, vmfLoop :: Bool -- XXX we need to know the other loop members
-- for inlining
, vmfDef :: VMFDef -- ^ Definition for the function, if any
, vmfIsEntry :: Bool
}
data VMFDef = VMExtern [BA] -- ^ Primitive with these arguments
| VMDef VMFBody -- ^ Definition
data VMFBody = VMFBody
{ vmfEntry :: Label
, vmfBlocks :: Map Label Block
}
-- | A basic block
data Block = Block
{ blockName :: Label
, blockType :: BlockType
, blockArgs :: [BA]
, blockLocalNum :: Int -- ^ How many locals we define
, blockInstrs :: [Instr]
, blockTerm :: CInstr
}
data BlockType =
NormalBlock
| ReturnBlock ReturnHow
{- ^ This block is used for returning from a function -}
| ThreadBlock
{- ^ This block is an entry point to a thread. -}
deriving (Eq,Show)
data ReturnHow =
RetPure -- ^ pure function
| RetYes Captures -- ^ parser, success, is it a capturing parser
| RetNo Captures -- ^ parser, failure, is it a capturing parser
deriving (Eq,Show)
data DebugCall = DebugCall | DebugTailCall
-- | Instructions
data Instr =
Say String
| Output E
| Notify E -- Let this thread know other alternative failed
| CallPrim BV PrimName [E]
| Spawn BV Closure
| Let BV E
| Free (Set VMVar) -- ^ variable cannot be used for the rest of the block
| NoteFail Src.ErrorSource String E E -- ^ input, message
| PushDebug DebugCall Text
| PopDebug
-- | Instructions that jump
data CInstr =
Jump JumpPoint
| JumpIf E JumpChoice
| Yield
| ReturnNo
| ReturnYes E E -- ^ Result, input
| ReturnPure E -- ^ Return from a pure function (no fail cont.)
| CallPure Src.FName Closure [E]
-- ^ The jump point contains information on where to continue after
-- return and what we need to preserve acrross the call
| Call Src.FName Captures Closure Closure [E]
The JumpPoints contain information about the return addresses .
-- The closures are: no, yes
| TailCall Src.FName Captures [E]
-- ^ Used for both grammars and exprs.
-- This is basically the same as `Jump` just with the extra
-- info that we are calling a function (e.g., for stack trace)
-- | A flag to indicate if a function may capture the continuation.
-- If yes, then the function could return multiple times, and we need to
-- explicitly store the continuation closures.
-- It is always safe, but less efficient, to use 'Capture'
data Captures = Capture | NoCapture | Unknown
deriving (Eq,Show)
-- | Target of a jump
data JumpPoint = JumpPoint { jLabel :: Label, jArgs :: [E] }
type Closure = JumpPoint
-- | Before jumping to these targets we should deallocate the given
-- variables. We could achieve the same with just normal jumps and
-- additional basic blocks, but this seems more straight forward
data JumpWithFree = JumpWithFree
{ freeFirst :: Set VMVar -- ^ Free these before jumping
, jumpTarget :: JumpPoint
}
jumpNoFree :: JumpPoint -> JumpWithFree
jumpNoFree tgt = JumpWithFree { freeFirst = Set.empty, jumpTarget = tgt }
| Two joint points , but we 'll use exactly one of the two .
-- This matters for memory management.
-- NOTE: String literal patterns should have been already compiled away.
data JumpChoice = JumpCase (Map Src.Pattern JumpWithFree)
-- | Constants, and acces to the VM state that does not change in a block.
data E =
EUnit
| ENum Integer Src.Type -- ^ Only unboxed
| EBool Bool
| EFloat Double Src.Type
| EMapEmpty Src.Type Src.Type
| ENothing Src.Type
| EBlockArg BA
| EVar BV
data VMVar = ArgVar BA | LocalVar BV
deriving (Eq,Ord)
-- | Types of values in the VM
data VMT =
TSem Src.Type
| TThreadId
deriving (Eq,Ord)
--------------------------------------------------------------------------------
eIsVar :: E -> Maybe VMVar
eIsVar e =
case e of
EVar x -> Just (LocalVar x)
EBlockArg x -> Just (ArgVar x)
_ -> Nothing
eVar :: VMVar -> E
eVar var =
case var of
LocalVar x -> EVar x
ArgVar x -> EBlockArg x
iArgs :: Instr -> [E]
iArgs i =
case i of
Say {} -> []
Output e -> [e]
Notify e -> [e]
CallPrim _ _ es -> es
Spawn _ j -> jArgs j
NoteFail _ _ ei em -> [ei,em]
Let _ e -> [e]
Free _ -> [] -- XXX: these could be just owned args
PushDebug{} -> []
PopDebug -> []
pAllBlocks :: Program -> [Block]
pAllBlocks p =
[ b | m <- pModules p, f <- mFuns m, VMDef d <- [vmfDef f]
, b <- Map.elems (vmfBlocks d) ]
extraArgs :: BlockType -> Int
extraArgs b =
case b of
NormalBlock -> 0
ThreadBlock -> 1 -- notified?
ReturnBlock h ->
case h of
RetPure -> 1 -- value
RetNo _ -> 0
RetYes _ -> 2 -- value, input
--------------------------------------------------------------------------------
-- Names
data Effect = MayFail | DoesNotFail
deriving (Eq,Ord,Show)
data Label = Label Text Int deriving (Eq,Ord,Show)
data BV = BV Int VMT deriving (Eq,Ord)
data BA = BA Int VMT Ownership deriving (Eq,Ord)
data Ownership = Owned | Borrowed | Unmanaged deriving (Eq,Ord,Show)
class GetOwnership t where
getOwnership :: t -> Ownership
instance GetOwnership BA where
getOwnership (BA _ _ o) = o
instance GetOwnership BV where
getOwnership (BV {}) = Owned -- XXX: in the future maybe we can consider
-- borrowed locals too?
instance GetOwnership VMVar where
getOwnership v =
case v of
LocalVar x -> getOwnership x
ArgVar x -> getOwnership x
class HasType t where
getType :: t -> VMT
getSemType :: HasType t => t -> Src.Type
getSemType t =
case getType t of
TSem a -> a
TThreadId -> panic "getSemType" [ "TThreadId" ]
instance HasType BV where getType (BV _ t) = t
instance HasType BA where getType (BA _ t _) = t
instance HasType VMVar where
getType x =
case x of
LocalVar y -> getType y
ArgVar y -> getType y
instance HasType E where
getType e =
case e of
EUnit -> TSem Src.TUnit
ENum _ t -> TSem t
EBool {} -> TSem Src.TBool
EFloat _ t -> TSem t
EMapEmpty t1 t2 -> TSem (Src.TMap t1 t2)
ENothing t -> TSem (Src.TMaybe t)
EBlockArg x -> getType x
EVar a -> getType a
data PrimName =
StructCon Src.UserType
| NewBuilder Src.Type
| Integer Integer
| ByteArray ByteString
| Op1 Src.Op1
| Op2 Src.Op2
| Op3 Src.Op3
| OpN Src.OpN -- Without `CallF`
--------------------------------------------------------------------------------
ppFun :: Doc -> [Doc] -> Doc
ppFun f ds = f <.> parens (hsep (punctuate comma ds))
instance PP Ownership where
pp m = case m of
Owned -> "Owned"
Borrowed -> "Borrowed"
Unmanaged -> "Unmanaged"
instance PP Label where
pp (Label f i) = "L_" <.> int i <.> "_" <.> pp f
instance PP Instr where
pp instr =
case instr of
CallPrim x f vs -> pp x <+> "=" <+> ppFun (pp f) (map pp vs)
Spawn x c -> pp x <+> "=" <+> ppFun "spawn" [pp c]
Say x -> ppFun "say" [text (show x)]
Output v -> ppFun "output" [ pp v ]
Notify v -> ppFun "notify" [ pp v ]
NoteFail src loc v m ->
ppFun "noteFail" [pp src, text (show loc), pp v, pp m]
Free x -> "free" <+> commaSep (map pp (Set.toList x))
Let x v -> ppBinder x <+> "=" <+> "copy" <+> pp v
PopDebug -> "popDebug"
PushDebug how txt -> ppFun "pushDebug" [ pp how, text (show txt) ]
instance PP DebugCall where
pp x =
case x of
DebugTailCall -> ".tailCall"
DebugCall -> ".call"
instance PP CInstr where
pp cintsr =
case cintsr of
Jump v -> "jump" <+> pp v
JumpIf b (JumpCase ps) -> "case" <+> pp b <+> "of"
$$ nest 2 (vcat (map ppAlt (Map.toList ps)))
where ppAlt (p,g) = pp p <+> "->" <+> pp g
Yield -> "yield"
ReturnNo -> ppFun "return_fail" []
ReturnYes e i -> ppFun "return" [pp e, pp i]
ReturnPure e -> ppFun "return" [pp e]
CallPure f l es -> ppFun (pp f) (map pp es) $$ nest 2 ("jump" <+> pp l)
Call f c no yes es ->
vcat [ ppFun (pp f) (map pp es)
, nest 2 $ vcat [ pp c
, "ok:" <+> pp yes
, "fail:" <+> pp no
]
]
TailCall f c xs -> ppFun (pp f) (map pp xs) <+> ".tail" <+> pp c
instance PP JumpWithFree where
pp jf = ppF <+> pp (Jump (jumpTarget jf))
where ppF = if Set.null (freeFirst jf)
then empty
else pp (Free (freeFirst jf)) <.> semi
instance PP Program where
pp p = vcat' (map pp (pModules p))
instance PP Module where
pp m =
vcat' [ "module" <+> pp (mName m) <+> "where"
, vcat [ "import" <+> pp i | i <- mImports m ]
, vcat' [ pp t | t <- mTypes m ]
, vcat' [ pp f | f <- mFuns m ]
]
instance PP VMFun where
pp f =
(".function" <+> pp (vmfName f)) $$
nest 2 (pp (vmfCaptures f) <+> (if vmfLoop f then ".loop" else empty)
<+> (if vmfIsEntry f then ".root" else empty)
$$ case vmfDef f of
VMExtern as -> ".extern" <+>
hsep [ parens (pp a <+> ":" <+> pp (getType a)) | a <- as ]
VMDef d -> ".entry" <+> pp (vmfEntry d) $$ blocks (vmfBlocks d))
where
blocks = vcat' . map pp . Map.elems
instance PP Captures where
pp c = case c of
Capture -> ".spawns"
NoCapture -> empty
Unknown -> ".capture-unknown"
instance PP VMT where
pp ty =
case ty of
TSem t -> pp t
TThreadId -> "thread_t"
instance PP E where
pp val =
case val of
EVar v -> pp v
EBlockArg i -> pp i
EUnit -> "unit"
ENum i t -> integer i <+> "@" <.> ppPrec 1 t
EBool b -> text (show b)
EFloat f _ -> double f
EMapEmpty k t -> "emptyMap" <+> "@" <.> ppPrec 1 k <+> "@" <.> ppPrec 1 t
ENothing t -> "nothing" <+> "@" <.> ppPrec 1 t
instance PP VMVar where
pp v =
case v of
LocalVar x -> pp x
ArgVar x -> pp x
instance PP BV where
pp (BV x _) = "r" <.> int x
instance PP BA where
pp (BA x _ o) = "ra" <.> int x <.> own
where own = case o of
Owned -> "o"
Borrowed -> "b"
Unmanaged -> "u"
instance PP BlockType where
pp b =
case b of
NormalBlock -> "/* normal block */"
ThreadBlock -> "/* thread */"
ReturnBlock r -> pp r
instance PP ReturnHow where
pp r =
case r of
RetPure -> "/* return pure */"
RetYes c -> "/* return yes" <+> pp c <+> "*/"
RetNo c -> "/* return no" <+> pp c <+> "*/"
instance PP Block where
pp b = l <.> colon <+> ty $$ nest 2
(vcat (map pp (blockInstrs b)) $$ pp (blockTerm b))
where
ty = pp (blockType b)
l = case blockArgs b of
[] -> pp (blockName b)
xs -> ppFun (pp (blockName b)) (map ppArg xs)
ppArg a = pp a <+> ":" <+> pp (getType a)
instance PP JumpPoint where
pp (JumpPoint l es) =
case es of
[] -> lab
_ -> ppFun lab (map pp es)
where
lab = pp l
ppBinder :: (PP a, HasType a) => a -> Doc
ppBinder a = pp a <+> ":" <+> pp (getType a)
instance PP PrimName where
pp pn =
case pn of
StructCon t -> "newStruct" <+> "@" <.> ppPrec 1 t
NewBuilder t -> "newBuilder" <+> "@" <.> ppPrec 1 t
ByteArray bs -> text (show bs)
Integer n -> ppFun "Integer" [ pp n ]
Op1 op -> pp op
Op2 op -> pp op
Op3 op -> pp op
OpN op -> pp op
| null | https://raw.githubusercontent.com/GaloisInc/daedalus/c7f2a6702157a699f88f2a374541dc3b64e106e8/daedalus-vm/src/Daedalus/VM.hs | haskell | # Language OverloadedStrings #
| A program
| A module
| A function
^ True if this is not a parser
XXX we need to know the other loop members
for inlining
^ Definition for the function, if any
^ Primitive with these arguments
^ Definition
| A basic block
^ How many locals we define
^ This block is used for returning from a function
^ This block is an entry point to a thread.
^ pure function
^ parser, success, is it a capturing parser
^ parser, failure, is it a capturing parser
| Instructions
Let this thread know other alternative failed
^ variable cannot be used for the rest of the block
^ input, message
| Instructions that jump
^ Result, input
^ Return from a pure function (no fail cont.)
^ The jump point contains information on where to continue after
return and what we need to preserve acrross the call
The closures are: no, yes
^ Used for both grammars and exprs.
This is basically the same as `Jump` just with the extra
info that we are calling a function (e.g., for stack trace)
| A flag to indicate if a function may capture the continuation.
If yes, then the function could return multiple times, and we need to
explicitly store the continuation closures.
It is always safe, but less efficient, to use 'Capture'
| Target of a jump
| Before jumping to these targets we should deallocate the given
variables. We could achieve the same with just normal jumps and
additional basic blocks, but this seems more straight forward
^ Free these before jumping
This matters for memory management.
NOTE: String literal patterns should have been already compiled away.
| Constants, and acces to the VM state that does not change in a block.
^ Only unboxed
| Types of values in the VM
------------------------------------------------------------------------------
XXX: these could be just owned args
notified?
value
value, input
------------------------------------------------------------------------------
Names
XXX: in the future maybe we can consider
borrowed locals too?
Without `CallF`
------------------------------------------------------------------------------ | module Daedalus.VM
( module Daedalus.VM
, Src.Pattern(..)
, Src.FName
) where
import Data.Set(Set)
import qualified Data.Set as Set
import Data.Map(Map)
import qualified Data.Map as Map
import Data.Text(Text)
import Data.ByteString(ByteString)
import Daedalus.Panic(panic)
import Daedalus.PP
import Daedalus.Rec
import qualified Daedalus.Core as Src
newtype Program = Program { pModules :: [Module] }
data Module = Module
{ mName :: Src.MName
, mImports :: [Src.MName]
, mTypes :: [Rec Src.TDecl]
, mFuns :: [VMFun]
}
data VMFun = VMFun
{ vmfName :: Src.FName
, vmfCaptures :: Captures
, vmfIsEntry :: Bool
}
data VMFBody = VMFBody
{ vmfEntry :: Label
, vmfBlocks :: Map Label Block
}
data Block = Block
{ blockName :: Label
, blockType :: BlockType
, blockArgs :: [BA]
, blockInstrs :: [Instr]
, blockTerm :: CInstr
}
data BlockType =
NormalBlock
| ReturnBlock ReturnHow
| ThreadBlock
deriving (Eq,Show)
data ReturnHow =
deriving (Eq,Show)
data DebugCall = DebugCall | DebugTailCall
data Instr =
Say String
| Output E
| CallPrim BV PrimName [E]
| Spawn BV Closure
| Let BV E
| PushDebug DebugCall Text
| PopDebug
data CInstr =
Jump JumpPoint
| JumpIf E JumpChoice
| Yield
| ReturnNo
| CallPure Src.FName Closure [E]
| Call Src.FName Captures Closure Closure [E]
The JumpPoints contain information about the return addresses .
| TailCall Src.FName Captures [E]
data Captures = Capture | NoCapture | Unknown
deriving (Eq,Show)
data JumpPoint = JumpPoint { jLabel :: Label, jArgs :: [E] }
type Closure = JumpPoint
data JumpWithFree = JumpWithFree
, jumpTarget :: JumpPoint
}
jumpNoFree :: JumpPoint -> JumpWithFree
jumpNoFree tgt = JumpWithFree { freeFirst = Set.empty, jumpTarget = tgt }
| Two joint points , but we 'll use exactly one of the two .
data JumpChoice = JumpCase (Map Src.Pattern JumpWithFree)
data E =
EUnit
| EBool Bool
| EFloat Double Src.Type
| EMapEmpty Src.Type Src.Type
| ENothing Src.Type
| EBlockArg BA
| EVar BV
data VMVar = ArgVar BA | LocalVar BV
deriving (Eq,Ord)
data VMT =
TSem Src.Type
| TThreadId
deriving (Eq,Ord)
eIsVar :: E -> Maybe VMVar
eIsVar e =
case e of
EVar x -> Just (LocalVar x)
EBlockArg x -> Just (ArgVar x)
_ -> Nothing
eVar :: VMVar -> E
eVar var =
case var of
LocalVar x -> EVar x
ArgVar x -> EBlockArg x
iArgs :: Instr -> [E]
iArgs i =
case i of
Say {} -> []
Output e -> [e]
Notify e -> [e]
CallPrim _ _ es -> es
Spawn _ j -> jArgs j
NoteFail _ _ ei em -> [ei,em]
Let _ e -> [e]
PushDebug{} -> []
PopDebug -> []
pAllBlocks :: Program -> [Block]
pAllBlocks p =
[ b | m <- pModules p, f <- mFuns m, VMDef d <- [vmfDef f]
, b <- Map.elems (vmfBlocks d) ]
extraArgs :: BlockType -> Int
extraArgs b =
case b of
NormalBlock -> 0
ReturnBlock h ->
case h of
RetNo _ -> 0
data Effect = MayFail | DoesNotFail
deriving (Eq,Ord,Show)
data Label = Label Text Int deriving (Eq,Ord,Show)
data BV = BV Int VMT deriving (Eq,Ord)
data BA = BA Int VMT Ownership deriving (Eq,Ord)
data Ownership = Owned | Borrowed | Unmanaged deriving (Eq,Ord,Show)
class GetOwnership t where
getOwnership :: t -> Ownership
instance GetOwnership BA where
getOwnership (BA _ _ o) = o
instance GetOwnership BV where
instance GetOwnership VMVar where
getOwnership v =
case v of
LocalVar x -> getOwnership x
ArgVar x -> getOwnership x
class HasType t where
getType :: t -> VMT
getSemType :: HasType t => t -> Src.Type
getSemType t =
case getType t of
TSem a -> a
TThreadId -> panic "getSemType" [ "TThreadId" ]
instance HasType BV where getType (BV _ t) = t
instance HasType BA where getType (BA _ t _) = t
instance HasType VMVar where
getType x =
case x of
LocalVar y -> getType y
ArgVar y -> getType y
instance HasType E where
getType e =
case e of
EUnit -> TSem Src.TUnit
ENum _ t -> TSem t
EBool {} -> TSem Src.TBool
EFloat _ t -> TSem t
EMapEmpty t1 t2 -> TSem (Src.TMap t1 t2)
ENothing t -> TSem (Src.TMaybe t)
EBlockArg x -> getType x
EVar a -> getType a
data PrimName =
StructCon Src.UserType
| NewBuilder Src.Type
| Integer Integer
| ByteArray ByteString
| Op1 Src.Op1
| Op2 Src.Op2
| Op3 Src.Op3
ppFun :: Doc -> [Doc] -> Doc
ppFun f ds = f <.> parens (hsep (punctuate comma ds))
instance PP Ownership where
pp m = case m of
Owned -> "Owned"
Borrowed -> "Borrowed"
Unmanaged -> "Unmanaged"
instance PP Label where
pp (Label f i) = "L_" <.> int i <.> "_" <.> pp f
instance PP Instr where
pp instr =
case instr of
CallPrim x f vs -> pp x <+> "=" <+> ppFun (pp f) (map pp vs)
Spawn x c -> pp x <+> "=" <+> ppFun "spawn" [pp c]
Say x -> ppFun "say" [text (show x)]
Output v -> ppFun "output" [ pp v ]
Notify v -> ppFun "notify" [ pp v ]
NoteFail src loc v m ->
ppFun "noteFail" [pp src, text (show loc), pp v, pp m]
Free x -> "free" <+> commaSep (map pp (Set.toList x))
Let x v -> ppBinder x <+> "=" <+> "copy" <+> pp v
PopDebug -> "popDebug"
PushDebug how txt -> ppFun "pushDebug" [ pp how, text (show txt) ]
instance PP DebugCall where
pp x =
case x of
DebugTailCall -> ".tailCall"
DebugCall -> ".call"
instance PP CInstr where
pp cintsr =
case cintsr of
Jump v -> "jump" <+> pp v
JumpIf b (JumpCase ps) -> "case" <+> pp b <+> "of"
$$ nest 2 (vcat (map ppAlt (Map.toList ps)))
where ppAlt (p,g) = pp p <+> "->" <+> pp g
Yield -> "yield"
ReturnNo -> ppFun "return_fail" []
ReturnYes e i -> ppFun "return" [pp e, pp i]
ReturnPure e -> ppFun "return" [pp e]
CallPure f l es -> ppFun (pp f) (map pp es) $$ nest 2 ("jump" <+> pp l)
Call f c no yes es ->
vcat [ ppFun (pp f) (map pp es)
, nest 2 $ vcat [ pp c
, "ok:" <+> pp yes
, "fail:" <+> pp no
]
]
TailCall f c xs -> ppFun (pp f) (map pp xs) <+> ".tail" <+> pp c
instance PP JumpWithFree where
pp jf = ppF <+> pp (Jump (jumpTarget jf))
where ppF = if Set.null (freeFirst jf)
then empty
else pp (Free (freeFirst jf)) <.> semi
instance PP Program where
pp p = vcat' (map pp (pModules p))
instance PP Module where
pp m =
vcat' [ "module" <+> pp (mName m) <+> "where"
, vcat [ "import" <+> pp i | i <- mImports m ]
, vcat' [ pp t | t <- mTypes m ]
, vcat' [ pp f | f <- mFuns m ]
]
instance PP VMFun where
pp f =
(".function" <+> pp (vmfName f)) $$
nest 2 (pp (vmfCaptures f) <+> (if vmfLoop f then ".loop" else empty)
<+> (if vmfIsEntry f then ".root" else empty)
$$ case vmfDef f of
VMExtern as -> ".extern" <+>
hsep [ parens (pp a <+> ":" <+> pp (getType a)) | a <- as ]
VMDef d -> ".entry" <+> pp (vmfEntry d) $$ blocks (vmfBlocks d))
where
blocks = vcat' . map pp . Map.elems
instance PP Captures where
pp c = case c of
Capture -> ".spawns"
NoCapture -> empty
Unknown -> ".capture-unknown"
instance PP VMT where
pp ty =
case ty of
TSem t -> pp t
TThreadId -> "thread_t"
instance PP E where
pp val =
case val of
EVar v -> pp v
EBlockArg i -> pp i
EUnit -> "unit"
ENum i t -> integer i <+> "@" <.> ppPrec 1 t
EBool b -> text (show b)
EFloat f _ -> double f
EMapEmpty k t -> "emptyMap" <+> "@" <.> ppPrec 1 k <+> "@" <.> ppPrec 1 t
ENothing t -> "nothing" <+> "@" <.> ppPrec 1 t
instance PP VMVar where
pp v =
case v of
LocalVar x -> pp x
ArgVar x -> pp x
instance PP BV where
pp (BV x _) = "r" <.> int x
instance PP BA where
pp (BA x _ o) = "ra" <.> int x <.> own
where own = case o of
Owned -> "o"
Borrowed -> "b"
Unmanaged -> "u"
instance PP BlockType where
pp b =
case b of
NormalBlock -> "/* normal block */"
ThreadBlock -> "/* thread */"
ReturnBlock r -> pp r
instance PP ReturnHow where
pp r =
case r of
RetPure -> "/* return pure */"
RetYes c -> "/* return yes" <+> pp c <+> "*/"
RetNo c -> "/* return no" <+> pp c <+> "*/"
instance PP Block where
pp b = l <.> colon <+> ty $$ nest 2
(vcat (map pp (blockInstrs b)) $$ pp (blockTerm b))
where
ty = pp (blockType b)
l = case blockArgs b of
[] -> pp (blockName b)
xs -> ppFun (pp (blockName b)) (map ppArg xs)
ppArg a = pp a <+> ":" <+> pp (getType a)
instance PP JumpPoint where
pp (JumpPoint l es) =
case es of
[] -> lab
_ -> ppFun lab (map pp es)
where
lab = pp l
ppBinder :: (PP a, HasType a) => a -> Doc
ppBinder a = pp a <+> ":" <+> pp (getType a)
instance PP PrimName where
pp pn =
case pn of
StructCon t -> "newStruct" <+> "@" <.> ppPrec 1 t
NewBuilder t -> "newBuilder" <+> "@" <.> ppPrec 1 t
ByteArray bs -> text (show bs)
Integer n -> ppFun "Integer" [ pp n ]
Op1 op -> pp op
Op2 op -> pp op
Op3 op -> pp op
OpN op -> pp op
|
3c55bc82ce4a01f97f76ee5bcd1a1e06c11a3940181387bc04c0ab44d4a39115 | ocaml-sf/learn-ocaml-corpus | bmove_variant.ml | open Seq
(* -------------------------------------------------------------------------- *)
(* The size of a tree. *)
let rec size (t : tree) : int =
match t with
| TLeaf _ ->
1
| TNonLeaf offspring ->
1 + size_offspring offspring
and size_offspring (offspring : offspring) : int =
match offspring() with
| Nil ->
0
| Cons ((_move, t), offspring) ->
size t + size_offspring offspring
(* -------------------------------------------------------------------------- *)
(* The height of a tree. *)
let rec height (t : tree) : int =
match t with
| TLeaf _ ->
0
| TNonLeaf offspring ->
1 + height_offspring offspring
and height_offspring (offspring : offspring) : int =
match offspring() with
| Nil ->
0
| Cons ((_move, t), offspring) ->
max (height t) (height_offspring offspring)
(* -------------------------------------------------------------------------- *)
(* Evaluating a tree, with a sense parameter: Minimax. *)
let rec eval (sense : sense) (t : tree) : value =
match t with
| TLeaf v ->
interpret sense v
| TNonLeaf offspring ->
eval_offspring sense offspring
and eval_offspring (sense : sense) (offspring : offspring) : value =
match offspring() with
| Nil ->
unit sense
| Cons ((_move, t), offspring) ->
join sense
(eval (opposite sense) t)
(eval_offspring sense offspring)
(* -------------------------------------------------------------------------- *)
(* Evaluating a tree, without a sense parameter: Negamax. *)
let rec nval (t : tree) : value =
match t with
| TLeaf v ->
v
| TNonLeaf offspring ->
nval_offspring offspring
and nval_offspring (offspring : offspring) =
match offspring() with
| Nil ->
bottom
| Cons ((_move, t), offspring) ->
max
(- nval t)
(nval_offspring offspring)
(* -------------------------------------------------------------------------- *)
Evaluating a tree , in Negamax style , and looping over children in
a tail - recursive manner .
a tail-recursive manner. *)
let rec ntval (t : tree) : value =
match t with
| TLeaf v ->
v
| TNonLeaf offspring ->
ntval_offspring bottom offspring
and ntval_offspring (running_max : value) (offspring : offspring) : value =
match offspring() with
| Nil ->
running_max
| Cons ((_move, t), offspring) ->
let v = - ntval t in
let running_max = max running_max v in
ntval_offspring running_max offspring
(* -------------------------------------------------------------------------- *)
Evaluating a tree , using the Alpha - Beta algorithm .
let rec bval (alpha : value) (beta : value) (t : tree) : value =
assert (alpha < beta);
match t with
| TLeaf v ->
(* We could project [v] onto the closed interval [alpha, beta],
but this does not make any difference; [v] is equivalent to
its projection. *)
v
| TNonLeaf offspring ->
bval_offspring alpha beta offspring
and bval_offspring (alpha : value) (beta : value) (offspring : offspring) : value =
assert (alpha < beta);
match offspring() with
| Nil ->
(* We could return the maximum of the children that we have examined,
but it would be less than or equal to [alpha], so it is equivalent
to [alpha]. *)
alpha
| Cons ((_move, t), offspring) ->
let v = - (bval (-beta) (-alpha) t) in
if beta <= v then
(* Returning [beta] or [v] makes no difference; they are equivalent. *)
v
else
let alpha = max alpha v in
(* Because v < beta holds, we still have alpha < beta. *)
assert (alpha < beta);
bval_offspring alpha beta offspring
(* -------------------------------------------------------------------------- *)
In a game tree where every leaf carries the value -1 ( loss ) , 0 ( draw ) ,
or +1 ( win ) , determining whether the first player is assured to win .
or +1 (win), determining whether the first player is assured to win. *)
let assured_win (t : tree) : bool =
let win = +1 in
bval (win-1) win t >= win
(* -------------------------------------------------------------------------- *)
Evaluating a tree using Alpha - Beta and returning the best move .
let rec bmove_offspring alpha beta (candidate : move option) offspring : move option =
assert (alpha < beta);
match offspring() with
| Nil ->
assert (candidate <> None);
candidate
| Cons ((move, t), offspring) ->
let v = - (bval (-beta) (-alpha) t) in
if beta <= v then
Some move
else
let alpha, candidate =
if candidate = None then
(* There are no previous moves, so keep this move as a default.
This ensures that we do not return [None] in the end. *)
max alpha v, Some move
else if alpha < v then
(* This move improves on the previous moves: keep it. *)
v, Some move
else
(* This move does not improve on the previous candidate move
Discard it. *)
alpha, candidate
in
(* Because v < beta holds, we still have alpha < beta. *)
assert (alpha < beta);
bmove_offspring alpha beta candidate offspring
let bmove alpha beta t : move option =
assert (alpha < beta);
match t with
| TLeaf v ->
None
| TNonLeaf offspring ->
bmove_offspring alpha beta None offspring
| null | https://raw.githubusercontent.com/ocaml-sf/learn-ocaml-corpus/7dcf4d72b49863a3e37e41b3c3097aa4c6101a69/exercises/fpottier/alpha_beta/right/bmove_variant.ml | ocaml | --------------------------------------------------------------------------
The size of a tree.
--------------------------------------------------------------------------
The height of a tree.
--------------------------------------------------------------------------
Evaluating a tree, with a sense parameter: Minimax.
--------------------------------------------------------------------------
Evaluating a tree, without a sense parameter: Negamax.
--------------------------------------------------------------------------
--------------------------------------------------------------------------
We could project [v] onto the closed interval [alpha, beta],
but this does not make any difference; [v] is equivalent to
its projection.
We could return the maximum of the children that we have examined,
but it would be less than or equal to [alpha], so it is equivalent
to [alpha].
Returning [beta] or [v] makes no difference; they are equivalent.
Because v < beta holds, we still have alpha < beta.
--------------------------------------------------------------------------
--------------------------------------------------------------------------
There are no previous moves, so keep this move as a default.
This ensures that we do not return [None] in the end.
This move improves on the previous moves: keep it.
This move does not improve on the previous candidate move
Discard it.
Because v < beta holds, we still have alpha < beta. | open Seq
let rec size (t : tree) : int =
match t with
| TLeaf _ ->
1
| TNonLeaf offspring ->
1 + size_offspring offspring
and size_offspring (offspring : offspring) : int =
match offspring() with
| Nil ->
0
| Cons ((_move, t), offspring) ->
size t + size_offspring offspring
let rec height (t : tree) : int =
match t with
| TLeaf _ ->
0
| TNonLeaf offspring ->
1 + height_offspring offspring
and height_offspring (offspring : offspring) : int =
match offspring() with
| Nil ->
0
| Cons ((_move, t), offspring) ->
max (height t) (height_offspring offspring)
let rec eval (sense : sense) (t : tree) : value =
match t with
| TLeaf v ->
interpret sense v
| TNonLeaf offspring ->
eval_offspring sense offspring
and eval_offspring (sense : sense) (offspring : offspring) : value =
match offspring() with
| Nil ->
unit sense
| Cons ((_move, t), offspring) ->
join sense
(eval (opposite sense) t)
(eval_offspring sense offspring)
let rec nval (t : tree) : value =
match t with
| TLeaf v ->
v
| TNonLeaf offspring ->
nval_offspring offspring
and nval_offspring (offspring : offspring) =
match offspring() with
| Nil ->
bottom
| Cons ((_move, t), offspring) ->
max
(- nval t)
(nval_offspring offspring)
Evaluating a tree , in Negamax style , and looping over children in
a tail - recursive manner .
a tail-recursive manner. *)
let rec ntval (t : tree) : value =
match t with
| TLeaf v ->
v
| TNonLeaf offspring ->
ntval_offspring bottom offspring
and ntval_offspring (running_max : value) (offspring : offspring) : value =
match offspring() with
| Nil ->
running_max
| Cons ((_move, t), offspring) ->
let v = - ntval t in
let running_max = max running_max v in
ntval_offspring running_max offspring
Evaluating a tree , using the Alpha - Beta algorithm .
let rec bval (alpha : value) (beta : value) (t : tree) : value =
assert (alpha < beta);
match t with
| TLeaf v ->
v
| TNonLeaf offspring ->
bval_offspring alpha beta offspring
and bval_offspring (alpha : value) (beta : value) (offspring : offspring) : value =
assert (alpha < beta);
match offspring() with
| Nil ->
alpha
| Cons ((_move, t), offspring) ->
let v = - (bval (-beta) (-alpha) t) in
if beta <= v then
v
else
let alpha = max alpha v in
assert (alpha < beta);
bval_offspring alpha beta offspring
In a game tree where every leaf carries the value -1 ( loss ) , 0 ( draw ) ,
or +1 ( win ) , determining whether the first player is assured to win .
or +1 (win), determining whether the first player is assured to win. *)
let assured_win (t : tree) : bool =
let win = +1 in
bval (win-1) win t >= win
Evaluating a tree using Alpha - Beta and returning the best move .
let rec bmove_offspring alpha beta (candidate : move option) offspring : move option =
assert (alpha < beta);
match offspring() with
| Nil ->
assert (candidate <> None);
candidate
| Cons ((move, t), offspring) ->
let v = - (bval (-beta) (-alpha) t) in
if beta <= v then
Some move
else
let alpha, candidate =
if candidate = None then
max alpha v, Some move
else if alpha < v then
v, Some move
else
alpha, candidate
in
assert (alpha < beta);
bmove_offspring alpha beta candidate offspring
let bmove alpha beta t : move option =
assert (alpha < beta);
match t with
| TLeaf v ->
None
| TNonLeaf offspring ->
bmove_offspring alpha beta None offspring
|
19570a2052db263e515b9be180130c4cb0b08c5e8830175cf8df84ccac336fb4 | CryptoKami/cryptokami-core | Mode.hs | {-# LANGUAGE RankNTypes #-}
# LANGUAGE TemplateHaskell #
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
-- | Execution modes for block logic tests.
module Test.Pos.Block.Logic.Mode
( TestParams (..)
, HasTestParams (..)
, TestInitModeContext (..)
, BlockTestContextTag
, PureDBSnapshotsVar(..)
, BlockTestContext(..)
, BlockTestMode
, runBlockTestMode
, initBlockTestContext
, BlockProperty
, blockPropertyToProperty
, blockPropertyTestable
Lens
, btcGStateL
, btcSystemStartL
, btcLoggerNameL
, btcSSlottingVarL
, btcUpdateContextL
, btcSscStateL
, btcTxpMemL
, btcTxpGlobalSettingsL
, btcSlotIdL
, btcParamsL
, btcReportingContextL
, btcDelegationL
, btcPureDBSnapshotsL
, btcAllSecretsL
-- MonadSlots
, getCurrentSlotTestDefault
, getCurrentSlotBlockingTestDefault
, getCurrentSlotInaccurateTestDefault
, currentTimeSlottingTestDefault
) where
import Universum
import Control.Lens (lens, makeClassy, makeLensesWith)
import Data.Default (def)
import qualified Data.Map as Map
import qualified Data.Text.Buildable
import Data.Time.Units (TimeUnit (..))
import Formatting (bprint, build, formatToString, shown, (%))
import Mockable (Production, currentTime, runProduction)
import qualified Prelude
import System.Wlog (HasLoggerName (..), LoggerName)
import Test.QuickCheck (Arbitrary (..), Gen, Property, forAll, ioProperty)
import Test.QuickCheck.Monadic (PropertyM, monadic)
import Pos.AllSecrets (AllSecrets (..), HasAllSecrets (..), mkAllSecretsSimple)
import Pos.Block.BListener (MonadBListener (..), onApplyBlocksStub, onRollbackBlocksStub)
import Pos.Block.Slog (HasSlogGState (..), mkSlogGState)
import Pos.Communication.Limits (HasAdoptedBlockVersionData (..))
import Pos.Configuration (HasNodeConfiguration)
import Pos.Core (BlockVersionData, CoreConfiguration (..), GenesisConfiguration (..),
GenesisInitializer (..), GenesisSpec (..), HasConfiguration, SlotId,
Timestamp (..), genesisSecretKeys, withGenesisSpec)
import Pos.Core.Configuration (HasGenesisBlockVersionData, withGenesisBlockVersionData)
import Pos.DB (DBPure, MonadDB (..), MonadDBRead (..), MonadGState (..))
import qualified Pos.DB as DB
import qualified Pos.DB.Block as DB
import Pos.DB.DB (gsAdoptedBVDataDefault, initNodeDBs)
import Pos.DB.Pure (DBPureVar, newDBPureVar)
import Pos.Delegation (DelegationVar, HasDlgConfiguration, mkDelegationVar)
import Pos.Generator.Block (BlockGenMode)
import Pos.Generator.BlockEvent (SnapshotId)
import qualified Pos.GState as GS
import Pos.KnownPeers (MonadFormatPeers (..))
import Pos.Launcher.Configuration (Configuration (..), HasConfigurations)
import Pos.Lrc (LrcContext (..), mkLrcSyncData)
import Pos.Network.Types (HasNodeType (..), NodeType (..))
import Pos.Reporting (HasReportingContext (..), ReportingContext, emptyReportingContext)
import Pos.Slotting (HasSlottingVar (..), MonadSlots (..), SimpleSlottingMode,
SimpleSlottingVar, currentTimeSlottingSimple,
getCurrentSlotBlockingSimple, getCurrentSlotInaccurateSimple,
getCurrentSlotSimple, mkSimpleSlottingVar)
import Pos.Slotting.MemState (MonadSlotsData)
import Pos.Slotting.Types (SlottingData)
import Pos.Ssc (HasSscConfiguration, SscMemTag, SscState, mkSscState)
import Pos.Txp (GenericTxpLocalData, MempoolExt, MonadTxpLocal (..), TxpGlobalSettings,
TxpHolderTag, mkTxpLocalData, txNormalize, txProcessTransactionNoLock,
txpGlobalSettings)
import Pos.Update.Context (UpdateContext, mkUpdateContext)
import Pos.Util (newInitFuture, postfixLFields, postfixLFields2)
import Pos.Util.CompileInfo (withCompileInfo)
import Pos.Util.LoggerName (HasLoggerName' (..), askLoggerNameDefault,
modifyLoggerNameDefault)
import Pos.Util.Util (HasLens (..))
import Pos.WorkMode (EmptyMempoolExt)
import Test.Pos.Block.Logic.Emulation (Emulation (..), runEmulation, sudoLiftIO)
import Test.Pos.Configuration (defaultTestBlockVersionData, defaultTestConf,
defaultTestGenesisSpec)
----------------------------------------------------------------------------
-- Parameters
----------------------------------------------------------------------------
-- | This data type contains all parameters which should be generated
-- before testing starts.
data TestParams = TestParams
{ _tpStartTime :: !Timestamp
-- ^ System start time.
, _tpBlockVersionData :: !BlockVersionData
^ Genesis ' BlockVersionData ' in tests .
, _tpGenesisInitializer :: !GenesisInitializer
^ ' GenesisInitializer ' in ' TestParams ' allows one to use custom
-- genesis data.
}
makeClassy ''TestParams
instance Buildable TestParams where
build TestParams {..} =
bprint ("TestParams {\n"%
" start time: "%shown%"\n"%
" initializer: "%build%"\n"%
"}\n")
_tpStartTime
_tpGenesisInitializer
instance Show TestParams where
show = formatToString build
instance Arbitrary TestParams where
arbitrary = do
let _tpStartTime = Timestamp (fromMicroseconds 0)
let _tpBlockVersionData = defaultTestBlockVersionData
_tpGenesisInitializer <-
withGenesisBlockVersionData
_tpBlockVersionData
genGenesisInitializer
return TestParams {..}
genGenesisInitializer :: HasGenesisBlockVersionData => Gen GenesisInitializer
genGenesisInitializer = do
giTestBalance <- arbitrary
giFakeAvvmBalance <- arbitrary
giAvvmBalanceFactor <- arbitrary
giUseHeavyDlg <- arbitrary
giSeed <- arbitrary
return GenesisInitializer {..}
This function creates ' CoreConfiguration ' from ' TestParams ' and
uses it to satisfy ' HasConfiguration ' .
withTestParams :: TestParams -> (HasConfiguration => r) -> r
withTestParams TestParams {..} = withGenesisSpec _tpStartTime coreConfiguration
where
defaultCoreConf :: CoreConfiguration
defaultCoreConf = ccCore defaultTestConf
coreConfiguration :: CoreConfiguration
coreConfiguration = defaultCoreConf {ccGenesis = GCSpec genesisSpec}
genesisSpec =
defaultTestGenesisSpec
{ gsInitializer = _tpGenesisInitializer
, gsBlockVersionData = _tpBlockVersionData
}
----------------------------------------------------------------------------
Init mode with instances
----------------------------------------------------------------------------
-- The fields are lazy on purpose: this allows using them with
-- futures.
data TestInitModeContext = TestInitModeContext
{ timcDBPureVar :: DBPureVar
, timcSlottingVar :: TVar SlottingData
, timcSystemStart :: !Timestamp
, timcLrcContext :: LrcContext
}
makeLensesWith postfixLFields ''TestInitModeContext
type TestInitMode = ReaderT TestInitModeContext Production
runTestInitMode :: TestInitModeContext -> TestInitMode a -> IO a
runTestInitMode ctx = runProduction . flip runReaderT ctx
----------------------------------------------------------------------------
-- Main context
----------------------------------------------------------------------------
newtype PureDBSnapshotsVar = PureDBSnapshotsVar
{ getPureDBSnapshotsVar :: IORef (Map SnapshotId DBPure)
}
data BlockTestContext = BlockTestContext
{ btcGState :: !GS.GStateContext
, btcSystemStart :: !Timestamp
, btcLoggerName :: !LoggerName
, btcSSlottingVar :: !SimpleSlottingVar
, btcUpdateContext :: !UpdateContext
, btcSscState :: !SscState
, btcTxpMem :: !(GenericTxpLocalData EmptyMempoolExt)
, btcTxpGlobalSettings :: !TxpGlobalSettings
, btcSlotId :: !(Maybe SlotId)
-- ^ If this value is 'Just' we will return it as the current
-- slot. Otherwise simple slotting is used.
, btcParams :: !TestParams
, btcReportingContext :: !ReportingContext
, btcDelegation :: !DelegationVar
, btcPureDBSnapshots :: !PureDBSnapshotsVar
, btcAllSecrets :: !AllSecrets
}
makeLensesWith postfixLFields2 ''BlockTestContext
instance HasTestParams BlockTestContext where
testParams = btcParamsL
instance HasAllSecrets BlockTestContext where
allSecrets = btcAllSecretsL
----------------------------------------------------------------------------
-- Initialization
----------------------------------------------------------------------------
initBlockTestContext ::
( HasConfiguration
, HasSscConfiguration
, HasDlgConfiguration
, HasNodeConfiguration
)
=> TestParams
-> (BlockTestContext -> Emulation a)
-> Emulation a
initBlockTestContext tp@TestParams {..} callback = do
clockVar <- Emulation ask
dbPureVar <- newDBPureVar
(futureLrcCtx, putLrcCtx) <- newInitFuture "lrcCtx"
(futureSlottingVar, putSlottingVar) <- newInitFuture "slottingVar"
systemStart <- Timestamp <$> currentTime
let initCtx =
TestInitModeContext
dbPureVar
futureSlottingVar
systemStart
futureLrcCtx
initBlockTestContextDo = do
initNodeDBs
_gscSlottingVar <- newTVarIO =<< GS.getSlottingData
putSlottingVar _gscSlottingVar
btcSSlottingVar <- mkSimpleSlottingVar
let btcLoggerName = "testing"
lcLrcSync <- mkLrcSyncData >>= newTVarIO
let _gscLrcContext = LrcContext {..}
putLrcCtx _gscLrcContext
btcUpdateContext <- mkUpdateContext
btcSscState <- mkSscState
_gscSlogGState <- mkSlogGState
btcTxpMem <- mkTxpLocalData
let btcTxpGlobalSettings = txpGlobalSettings
let btcReportingContext = emptyReportingContext
let btcSlotId = Nothing
let btcParams = tp
let btcGState = GS.GStateContext {_gscDB = DB.PureDB dbPureVar, ..}
btcDelegation <- mkDelegationVar
btcPureDBSnapshots <- PureDBSnapshotsVar <$> newIORef Map.empty
let secretKeys =
case genesisSecretKeys of
Nothing ->
error "initBlockTestContext: no genesisSecretKeys"
Just ks -> ks
let btcAllSecrets = mkAllSecretsSimple secretKeys
let btCtx = BlockTestContext {btcSystemStart = systemStart, ..}
liftIO $ flip runReaderT clockVar $ unEmulation $ callback btCtx
sudoLiftIO $ runTestInitMode initCtx $ initBlockTestContextDo
----------------------------------------------------------------------------
-- ExecMode
----------------------------------------------------------------------------
data BlockTestContextTag
instance HasLens BlockTestContextTag BlockTestContext BlockTestContext where
lensOf = identity
type BlockTestMode = ReaderT BlockTestContext Emulation
runBlockTestMode ::
( HasNodeConfiguration
, HasSscConfiguration
, HasDlgConfiguration
, HasConfiguration
)
=> TestParams
-> BlockTestMode a
-> IO a
runBlockTestMode tp action =
runEmulation (getTimestamp $ tp ^. tpStartTime) $
initBlockTestContext tp (runReaderT action)
----------------------------------------------------------------------------
-- Property
----------------------------------------------------------------------------
type BlockProperty = PropertyM BlockTestMode
-- | Convert 'BlockProperty' to 'Property' using given generator of
' TestParams ' .
blockPropertyToProperty ::
(HasNodeConfiguration, HasDlgConfiguration, HasSscConfiguration)
=> Gen TestParams
-> (HasConfiguration =>
BlockProperty a)
-> Property
blockPropertyToProperty tpGen blockProperty =
forAll tpGen $ \tp ->
withTestParams tp $
monadic (ioProperty . runBlockTestMode tp) blockProperty
| Simplified version of ' blockPropertyToProperty ' which uses
' Arbitrary ' instance to generate ' TestParams ' .
--
You can treat it as ' Testable ' instance for ' HasConfiguration = >
-- BlockProperty a', but unfortunately it's impossible to write such
-- instance.
--
-- The following code doesn't compile:
--
instance ( HasNodeConfiguration , HasSscConfiguration )
= > Testable ( HasConfiguration = > BlockProperty a ) where
property = blockPropertyToProperty arbitrary
blockPropertyTestable ::
(HasNodeConfiguration, HasDlgConfiguration, HasSscConfiguration)
=> (HasConfiguration => BlockProperty a)
-> Property
blockPropertyTestable = blockPropertyToProperty arbitrary
----------------------------------------------------------------------------
Boilerplate TestInitContext instances
----------------------------------------------------------------------------
instance HasLens DBPureVar TestInitModeContext DBPureVar where
lensOf = timcDBPureVar_L
instance HasLens LrcContext TestInitModeContext LrcContext where
lensOf = timcLrcContext_L
instance HasSlottingVar TestInitModeContext where
slottingTimestamp = timcSystemStart_L
slottingVar = timcSlottingVar_L
instance HasConfiguration => MonadDBRead TestInitMode where
dbGet = DB.dbGetPureDefault
dbIterSource = DB.dbIterSourcePureDefault
dbGetSerBlock = DB.dbGetSerBlockPureDefault
dbGetSerUndo = DB.dbGetSerUndoPureDefault
instance HasConfiguration => MonadDB TestInitMode where
dbPut = DB.dbPutPureDefault
dbWriteBatch = DB.dbWriteBatchPureDefault
dbDelete = DB.dbDeletePureDefault
dbPutSerBlund = DB.dbPutSerBlundPureDefault
instance (HasConfiguration, MonadSlotsData ctx TestInitMode)
=> MonadSlots ctx TestInitMode
where
getCurrentSlot = getCurrentSlotSimple =<< mkSimpleSlottingVar
getCurrentSlotBlocking = getCurrentSlotBlockingSimple =<< mkSimpleSlottingVar
getCurrentSlotInaccurate = getCurrentSlotInaccurateSimple =<< mkSimpleSlottingVar
currentTimeSlotting = currentTimeSlottingSimple
----------------------------------------------------------------------------
-- Boilerplate BlockTestContext instances
----------------------------------------------------------------------------
instance GS.HasGStateContext BlockTestContext where
gStateContext = btcGStateL
instance HasLens DBPureVar BlockTestContext DBPureVar where
lensOf = GS.gStateContext . GS.gscDB . pureDBLens
where
-- pva701: sorry for newbie code
getter = \case
DB.RealDB _ -> realDBInTestsError
DB.PureDB pdb -> pdb
setter _ pdb = DB.PureDB pdb
pureDBLens = lens getter setter
realDBInTestsError = error "You are using real db in tests"
instance HasLens PureDBSnapshotsVar BlockTestContext PureDBSnapshotsVar where
lensOf = btcPureDBSnapshotsL
instance HasLens LoggerName BlockTestContext LoggerName where
lensOf = btcLoggerNameL
instance HasLens LrcContext BlockTestContext LrcContext where
lensOf = GS.gStateContext . GS.gscLrcContext
instance HasLens UpdateContext BlockTestContext UpdateContext where
lensOf = btcUpdateContextL
instance HasLens SscMemTag BlockTestContext SscState where
lensOf = btcSscStateL
instance HasLens TxpGlobalSettings BlockTestContext TxpGlobalSettings where
lensOf = btcTxpGlobalSettingsL
instance HasLens TestParams BlockTestContext TestParams where
lensOf = btcParamsL
instance HasLens SimpleSlottingVar BlockTestContext SimpleSlottingVar where
lensOf = btcSSlottingVarL
instance HasReportingContext BlockTestContext where
reportingContext = btcReportingContextL
instance HasSlottingVar BlockTestContext where
slottingTimestamp = btcSystemStartL
slottingVar = GS.gStateContext . GS.gscSlottingVar
instance HasSlogGState BlockTestContext where
slogGState = GS.gStateContext . GS.gscSlogGState
instance HasLens DelegationVar BlockTestContext DelegationVar where
lensOf = btcDelegationL
instance HasLens TxpHolderTag BlockTestContext (GenericTxpLocalData EmptyMempoolExt) where
lensOf = btcTxpMemL
instance HasLoggerName' BlockTestContext where
loggerName = lensOf @LoggerName
instance HasNodeType BlockTestContext where
getNodeType _ = NodeCore -- doesn't really matter, it's for reporting
instance {-# OVERLAPPING #-} HasLoggerName BlockTestMode where
askLoggerName = askLoggerNameDefault
modifyLoggerName = modifyLoggerNameDefault
type TestSlottingContext ctx m =
( SimpleSlottingMode ctx m
, HasLens BlockTestContextTag ctx BlockTestContext
)
testSlottingHelper
:: TestSlottingContext ctx m
=> (SimpleSlottingVar -> m a)
-> (SlotId -> a)
-> m a
testSlottingHelper targetF alternative = do
BlockTestContext{..} <- view (lensOf @BlockTestContextTag)
case btcSlotId of
Nothing -> targetF btcSSlottingVar
Just slot -> pure $ alternative slot
getCurrentSlotTestDefault :: TestSlottingContext ctx m => m (Maybe SlotId)
getCurrentSlotTestDefault = testSlottingHelper getCurrentSlotSimple Just
getCurrentSlotBlockingTestDefault :: TestSlottingContext ctx m => m SlotId
getCurrentSlotBlockingTestDefault = testSlottingHelper getCurrentSlotBlockingSimple identity
getCurrentSlotInaccurateTestDefault :: TestSlottingContext ctx m => m SlotId
getCurrentSlotInaccurateTestDefault = testSlottingHelper getCurrentSlotInaccurateSimple identity
currentTimeSlottingTestDefault :: SimpleSlottingMode ctx m => m Timestamp
currentTimeSlottingTestDefault = currentTimeSlottingSimple
instance (HasConfiguration, MonadSlotsData ctx BlockTestMode)
=> MonadSlots ctx BlockTestMode where
getCurrentSlot = getCurrentSlotTestDefault
getCurrentSlotBlocking = getCurrentSlotBlockingTestDefault
getCurrentSlotInaccurate = getCurrentSlotInaccurateTestDefault
currentTimeSlotting = currentTimeSlottingTestDefault
instance HasConfiguration => MonadDBRead BlockTestMode where
dbGet = DB.dbGetPureDefault
dbIterSource = DB.dbIterSourcePureDefault
dbGetSerBlock = DB.dbGetSerBlockPureDefault
dbGetSerUndo = DB.dbGetSerUndoPureDefault
instance HasConfiguration => MonadDB BlockTestMode where
dbPut = DB.dbPutPureDefault
dbWriteBatch = DB.dbWriteBatchPureDefault
dbDelete = DB.dbDeletePureDefault
dbPutSerBlund = DB.dbPutSerBlundPureDefault
instance HasConfiguration => MonadGState BlockTestMode where
gsAdoptedBVData = gsAdoptedBVDataDefault
instance HasConfiguration => HasAdoptedBlockVersionData BlockTestMode where
adoptedBVData = gsAdoptedBVData
instance MonadBListener BlockTestMode where
onApplyBlocks = onApplyBlocksStub
onRollbackBlocks = onRollbackBlocksStub
instance MonadFormatPeers BlockTestMode where
formatKnownPeers _ = pure Nothing
type instance MempoolExt BlockTestMode = EmptyMempoolExt
instance HasConfigurations => MonadTxpLocal (BlockGenMode EmptyMempoolExt BlockTestMode) where
txpNormalize = withCompileInfo def $ txNormalize
txpProcessTx = withCompileInfo def $ txProcessTransactionNoLock
instance HasConfigurations => MonadTxpLocal BlockTestMode where
txpNormalize = withCompileInfo def $ txNormalize
txpProcessTx = withCompileInfo def $ txProcessTransactionNoLock
| null | https://raw.githubusercontent.com/CryptoKami/cryptokami-core/12ca60a9ad167b6327397b3b2f928c19436ae114/generator/src/Test/Pos/Block/Logic/Mode.hs | haskell | # LANGUAGE RankNTypes #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
| Execution modes for block logic tests.
MonadSlots
--------------------------------------------------------------------------
Parameters
--------------------------------------------------------------------------
| This data type contains all parameters which should be generated
before testing starts.
^ System start time.
genesis data.
--------------------------------------------------------------------------
--------------------------------------------------------------------------
The fields are lazy on purpose: this allows using them with
futures.
--------------------------------------------------------------------------
Main context
--------------------------------------------------------------------------
^ If this value is 'Just' we will return it as the current
slot. Otherwise simple slotting is used.
--------------------------------------------------------------------------
Initialization
--------------------------------------------------------------------------
--------------------------------------------------------------------------
ExecMode
--------------------------------------------------------------------------
--------------------------------------------------------------------------
Property
--------------------------------------------------------------------------
| Convert 'BlockProperty' to 'Property' using given generator of
BlockProperty a', but unfortunately it's impossible to write such
instance.
The following code doesn't compile:
--------------------------------------------------------------------------
--------------------------------------------------------------------------
--------------------------------------------------------------------------
Boilerplate BlockTestContext instances
--------------------------------------------------------------------------
pva701: sorry for newbie code
doesn't really matter, it's for reporting
# OVERLAPPING # | # LANGUAGE TemplateHaskell #
module Test.Pos.Block.Logic.Mode
( TestParams (..)
, HasTestParams (..)
, TestInitModeContext (..)
, BlockTestContextTag
, PureDBSnapshotsVar(..)
, BlockTestContext(..)
, BlockTestMode
, runBlockTestMode
, initBlockTestContext
, BlockProperty
, blockPropertyToProperty
, blockPropertyTestable
Lens
, btcGStateL
, btcSystemStartL
, btcLoggerNameL
, btcSSlottingVarL
, btcUpdateContextL
, btcSscStateL
, btcTxpMemL
, btcTxpGlobalSettingsL
, btcSlotIdL
, btcParamsL
, btcReportingContextL
, btcDelegationL
, btcPureDBSnapshotsL
, btcAllSecretsL
, getCurrentSlotTestDefault
, getCurrentSlotBlockingTestDefault
, getCurrentSlotInaccurateTestDefault
, currentTimeSlottingTestDefault
) where
import Universum
import Control.Lens (lens, makeClassy, makeLensesWith)
import Data.Default (def)
import qualified Data.Map as Map
import qualified Data.Text.Buildable
import Data.Time.Units (TimeUnit (..))
import Formatting (bprint, build, formatToString, shown, (%))
import Mockable (Production, currentTime, runProduction)
import qualified Prelude
import System.Wlog (HasLoggerName (..), LoggerName)
import Test.QuickCheck (Arbitrary (..), Gen, Property, forAll, ioProperty)
import Test.QuickCheck.Monadic (PropertyM, monadic)
import Pos.AllSecrets (AllSecrets (..), HasAllSecrets (..), mkAllSecretsSimple)
import Pos.Block.BListener (MonadBListener (..), onApplyBlocksStub, onRollbackBlocksStub)
import Pos.Block.Slog (HasSlogGState (..), mkSlogGState)
import Pos.Communication.Limits (HasAdoptedBlockVersionData (..))
import Pos.Configuration (HasNodeConfiguration)
import Pos.Core (BlockVersionData, CoreConfiguration (..), GenesisConfiguration (..),
GenesisInitializer (..), GenesisSpec (..), HasConfiguration, SlotId,
Timestamp (..), genesisSecretKeys, withGenesisSpec)
import Pos.Core.Configuration (HasGenesisBlockVersionData, withGenesisBlockVersionData)
import Pos.DB (DBPure, MonadDB (..), MonadDBRead (..), MonadGState (..))
import qualified Pos.DB as DB
import qualified Pos.DB.Block as DB
import Pos.DB.DB (gsAdoptedBVDataDefault, initNodeDBs)
import Pos.DB.Pure (DBPureVar, newDBPureVar)
import Pos.Delegation (DelegationVar, HasDlgConfiguration, mkDelegationVar)
import Pos.Generator.Block (BlockGenMode)
import Pos.Generator.BlockEvent (SnapshotId)
import qualified Pos.GState as GS
import Pos.KnownPeers (MonadFormatPeers (..))
import Pos.Launcher.Configuration (Configuration (..), HasConfigurations)
import Pos.Lrc (LrcContext (..), mkLrcSyncData)
import Pos.Network.Types (HasNodeType (..), NodeType (..))
import Pos.Reporting (HasReportingContext (..), ReportingContext, emptyReportingContext)
import Pos.Slotting (HasSlottingVar (..), MonadSlots (..), SimpleSlottingMode,
SimpleSlottingVar, currentTimeSlottingSimple,
getCurrentSlotBlockingSimple, getCurrentSlotInaccurateSimple,
getCurrentSlotSimple, mkSimpleSlottingVar)
import Pos.Slotting.MemState (MonadSlotsData)
import Pos.Slotting.Types (SlottingData)
import Pos.Ssc (HasSscConfiguration, SscMemTag, SscState, mkSscState)
import Pos.Txp (GenericTxpLocalData, MempoolExt, MonadTxpLocal (..), TxpGlobalSettings,
TxpHolderTag, mkTxpLocalData, txNormalize, txProcessTransactionNoLock,
txpGlobalSettings)
import Pos.Update.Context (UpdateContext, mkUpdateContext)
import Pos.Util (newInitFuture, postfixLFields, postfixLFields2)
import Pos.Util.CompileInfo (withCompileInfo)
import Pos.Util.LoggerName (HasLoggerName' (..), askLoggerNameDefault,
modifyLoggerNameDefault)
import Pos.Util.Util (HasLens (..))
import Pos.WorkMode (EmptyMempoolExt)
import Test.Pos.Block.Logic.Emulation (Emulation (..), runEmulation, sudoLiftIO)
import Test.Pos.Configuration (defaultTestBlockVersionData, defaultTestConf,
defaultTestGenesisSpec)
data TestParams = TestParams
{ _tpStartTime :: !Timestamp
, _tpBlockVersionData :: !BlockVersionData
^ Genesis ' BlockVersionData ' in tests .
, _tpGenesisInitializer :: !GenesisInitializer
^ ' GenesisInitializer ' in ' TestParams ' allows one to use custom
}
makeClassy ''TestParams
instance Buildable TestParams where
build TestParams {..} =
bprint ("TestParams {\n"%
" start time: "%shown%"\n"%
" initializer: "%build%"\n"%
"}\n")
_tpStartTime
_tpGenesisInitializer
instance Show TestParams where
show = formatToString build
instance Arbitrary TestParams where
arbitrary = do
let _tpStartTime = Timestamp (fromMicroseconds 0)
let _tpBlockVersionData = defaultTestBlockVersionData
_tpGenesisInitializer <-
withGenesisBlockVersionData
_tpBlockVersionData
genGenesisInitializer
return TestParams {..}
genGenesisInitializer :: HasGenesisBlockVersionData => Gen GenesisInitializer
genGenesisInitializer = do
giTestBalance <- arbitrary
giFakeAvvmBalance <- arbitrary
giAvvmBalanceFactor <- arbitrary
giUseHeavyDlg <- arbitrary
giSeed <- arbitrary
return GenesisInitializer {..}
This function creates ' CoreConfiguration ' from ' TestParams ' and
uses it to satisfy ' HasConfiguration ' .
withTestParams :: TestParams -> (HasConfiguration => r) -> r
withTestParams TestParams {..} = withGenesisSpec _tpStartTime coreConfiguration
where
defaultCoreConf :: CoreConfiguration
defaultCoreConf = ccCore defaultTestConf
coreConfiguration :: CoreConfiguration
coreConfiguration = defaultCoreConf {ccGenesis = GCSpec genesisSpec}
genesisSpec =
defaultTestGenesisSpec
{ gsInitializer = _tpGenesisInitializer
, gsBlockVersionData = _tpBlockVersionData
}
Init mode with instances
data TestInitModeContext = TestInitModeContext
{ timcDBPureVar :: DBPureVar
, timcSlottingVar :: TVar SlottingData
, timcSystemStart :: !Timestamp
, timcLrcContext :: LrcContext
}
makeLensesWith postfixLFields ''TestInitModeContext
type TestInitMode = ReaderT TestInitModeContext Production
runTestInitMode :: TestInitModeContext -> TestInitMode a -> IO a
runTestInitMode ctx = runProduction . flip runReaderT ctx
newtype PureDBSnapshotsVar = PureDBSnapshotsVar
{ getPureDBSnapshotsVar :: IORef (Map SnapshotId DBPure)
}
data BlockTestContext = BlockTestContext
{ btcGState :: !GS.GStateContext
, btcSystemStart :: !Timestamp
, btcLoggerName :: !LoggerName
, btcSSlottingVar :: !SimpleSlottingVar
, btcUpdateContext :: !UpdateContext
, btcSscState :: !SscState
, btcTxpMem :: !(GenericTxpLocalData EmptyMempoolExt)
, btcTxpGlobalSettings :: !TxpGlobalSettings
, btcSlotId :: !(Maybe SlotId)
, btcParams :: !TestParams
, btcReportingContext :: !ReportingContext
, btcDelegation :: !DelegationVar
, btcPureDBSnapshots :: !PureDBSnapshotsVar
, btcAllSecrets :: !AllSecrets
}
makeLensesWith postfixLFields2 ''BlockTestContext
instance HasTestParams BlockTestContext where
testParams = btcParamsL
instance HasAllSecrets BlockTestContext where
allSecrets = btcAllSecretsL
initBlockTestContext ::
( HasConfiguration
, HasSscConfiguration
, HasDlgConfiguration
, HasNodeConfiguration
)
=> TestParams
-> (BlockTestContext -> Emulation a)
-> Emulation a
initBlockTestContext tp@TestParams {..} callback = do
clockVar <- Emulation ask
dbPureVar <- newDBPureVar
(futureLrcCtx, putLrcCtx) <- newInitFuture "lrcCtx"
(futureSlottingVar, putSlottingVar) <- newInitFuture "slottingVar"
systemStart <- Timestamp <$> currentTime
let initCtx =
TestInitModeContext
dbPureVar
futureSlottingVar
systemStart
futureLrcCtx
initBlockTestContextDo = do
initNodeDBs
_gscSlottingVar <- newTVarIO =<< GS.getSlottingData
putSlottingVar _gscSlottingVar
btcSSlottingVar <- mkSimpleSlottingVar
let btcLoggerName = "testing"
lcLrcSync <- mkLrcSyncData >>= newTVarIO
let _gscLrcContext = LrcContext {..}
putLrcCtx _gscLrcContext
btcUpdateContext <- mkUpdateContext
btcSscState <- mkSscState
_gscSlogGState <- mkSlogGState
btcTxpMem <- mkTxpLocalData
let btcTxpGlobalSettings = txpGlobalSettings
let btcReportingContext = emptyReportingContext
let btcSlotId = Nothing
let btcParams = tp
let btcGState = GS.GStateContext {_gscDB = DB.PureDB dbPureVar, ..}
btcDelegation <- mkDelegationVar
btcPureDBSnapshots <- PureDBSnapshotsVar <$> newIORef Map.empty
let secretKeys =
case genesisSecretKeys of
Nothing ->
error "initBlockTestContext: no genesisSecretKeys"
Just ks -> ks
let btcAllSecrets = mkAllSecretsSimple secretKeys
let btCtx = BlockTestContext {btcSystemStart = systemStart, ..}
liftIO $ flip runReaderT clockVar $ unEmulation $ callback btCtx
sudoLiftIO $ runTestInitMode initCtx $ initBlockTestContextDo
data BlockTestContextTag
instance HasLens BlockTestContextTag BlockTestContext BlockTestContext where
lensOf = identity
type BlockTestMode = ReaderT BlockTestContext Emulation
runBlockTestMode ::
( HasNodeConfiguration
, HasSscConfiguration
, HasDlgConfiguration
, HasConfiguration
)
=> TestParams
-> BlockTestMode a
-> IO a
runBlockTestMode tp action =
runEmulation (getTimestamp $ tp ^. tpStartTime) $
initBlockTestContext tp (runReaderT action)
type BlockProperty = PropertyM BlockTestMode
' TestParams ' .
blockPropertyToProperty ::
(HasNodeConfiguration, HasDlgConfiguration, HasSscConfiguration)
=> Gen TestParams
-> (HasConfiguration =>
BlockProperty a)
-> Property
blockPropertyToProperty tpGen blockProperty =
forAll tpGen $ \tp ->
withTestParams tp $
monadic (ioProperty . runBlockTestMode tp) blockProperty
| Simplified version of ' blockPropertyToProperty ' which uses
' Arbitrary ' instance to generate ' TestParams ' .
You can treat it as ' Testable ' instance for ' HasConfiguration = >
instance ( HasNodeConfiguration , HasSscConfiguration )
= > Testable ( HasConfiguration = > BlockProperty a ) where
property = blockPropertyToProperty arbitrary
blockPropertyTestable ::
(HasNodeConfiguration, HasDlgConfiguration, HasSscConfiguration)
=> (HasConfiguration => BlockProperty a)
-> Property
blockPropertyTestable = blockPropertyToProperty arbitrary
Boilerplate TestInitContext instances
instance HasLens DBPureVar TestInitModeContext DBPureVar where
lensOf = timcDBPureVar_L
instance HasLens LrcContext TestInitModeContext LrcContext where
lensOf = timcLrcContext_L
instance HasSlottingVar TestInitModeContext where
slottingTimestamp = timcSystemStart_L
slottingVar = timcSlottingVar_L
instance HasConfiguration => MonadDBRead TestInitMode where
dbGet = DB.dbGetPureDefault
dbIterSource = DB.dbIterSourcePureDefault
dbGetSerBlock = DB.dbGetSerBlockPureDefault
dbGetSerUndo = DB.dbGetSerUndoPureDefault
instance HasConfiguration => MonadDB TestInitMode where
dbPut = DB.dbPutPureDefault
dbWriteBatch = DB.dbWriteBatchPureDefault
dbDelete = DB.dbDeletePureDefault
dbPutSerBlund = DB.dbPutSerBlundPureDefault
instance (HasConfiguration, MonadSlotsData ctx TestInitMode)
=> MonadSlots ctx TestInitMode
where
getCurrentSlot = getCurrentSlotSimple =<< mkSimpleSlottingVar
getCurrentSlotBlocking = getCurrentSlotBlockingSimple =<< mkSimpleSlottingVar
getCurrentSlotInaccurate = getCurrentSlotInaccurateSimple =<< mkSimpleSlottingVar
currentTimeSlotting = currentTimeSlottingSimple
instance GS.HasGStateContext BlockTestContext where
gStateContext = btcGStateL
instance HasLens DBPureVar BlockTestContext DBPureVar where
lensOf = GS.gStateContext . GS.gscDB . pureDBLens
where
getter = \case
DB.RealDB _ -> realDBInTestsError
DB.PureDB pdb -> pdb
setter _ pdb = DB.PureDB pdb
pureDBLens = lens getter setter
realDBInTestsError = error "You are using real db in tests"
instance HasLens PureDBSnapshotsVar BlockTestContext PureDBSnapshotsVar where
lensOf = btcPureDBSnapshotsL
instance HasLens LoggerName BlockTestContext LoggerName where
lensOf = btcLoggerNameL
instance HasLens LrcContext BlockTestContext LrcContext where
lensOf = GS.gStateContext . GS.gscLrcContext
instance HasLens UpdateContext BlockTestContext UpdateContext where
lensOf = btcUpdateContextL
instance HasLens SscMemTag BlockTestContext SscState where
lensOf = btcSscStateL
instance HasLens TxpGlobalSettings BlockTestContext TxpGlobalSettings where
lensOf = btcTxpGlobalSettingsL
instance HasLens TestParams BlockTestContext TestParams where
lensOf = btcParamsL
instance HasLens SimpleSlottingVar BlockTestContext SimpleSlottingVar where
lensOf = btcSSlottingVarL
instance HasReportingContext BlockTestContext where
reportingContext = btcReportingContextL
instance HasSlottingVar BlockTestContext where
slottingTimestamp = btcSystemStartL
slottingVar = GS.gStateContext . GS.gscSlottingVar
instance HasSlogGState BlockTestContext where
slogGState = GS.gStateContext . GS.gscSlogGState
instance HasLens DelegationVar BlockTestContext DelegationVar where
lensOf = btcDelegationL
instance HasLens TxpHolderTag BlockTestContext (GenericTxpLocalData EmptyMempoolExt) where
lensOf = btcTxpMemL
instance HasLoggerName' BlockTestContext where
loggerName = lensOf @LoggerName
instance HasNodeType BlockTestContext where
askLoggerName = askLoggerNameDefault
modifyLoggerName = modifyLoggerNameDefault
type TestSlottingContext ctx m =
( SimpleSlottingMode ctx m
, HasLens BlockTestContextTag ctx BlockTestContext
)
testSlottingHelper
:: TestSlottingContext ctx m
=> (SimpleSlottingVar -> m a)
-> (SlotId -> a)
-> m a
testSlottingHelper targetF alternative = do
BlockTestContext{..} <- view (lensOf @BlockTestContextTag)
case btcSlotId of
Nothing -> targetF btcSSlottingVar
Just slot -> pure $ alternative slot
getCurrentSlotTestDefault :: TestSlottingContext ctx m => m (Maybe SlotId)
getCurrentSlotTestDefault = testSlottingHelper getCurrentSlotSimple Just
getCurrentSlotBlockingTestDefault :: TestSlottingContext ctx m => m SlotId
getCurrentSlotBlockingTestDefault = testSlottingHelper getCurrentSlotBlockingSimple identity
getCurrentSlotInaccurateTestDefault :: TestSlottingContext ctx m => m SlotId
getCurrentSlotInaccurateTestDefault = testSlottingHelper getCurrentSlotInaccurateSimple identity
currentTimeSlottingTestDefault :: SimpleSlottingMode ctx m => m Timestamp
currentTimeSlottingTestDefault = currentTimeSlottingSimple
instance (HasConfiguration, MonadSlotsData ctx BlockTestMode)
=> MonadSlots ctx BlockTestMode where
getCurrentSlot = getCurrentSlotTestDefault
getCurrentSlotBlocking = getCurrentSlotBlockingTestDefault
getCurrentSlotInaccurate = getCurrentSlotInaccurateTestDefault
currentTimeSlotting = currentTimeSlottingTestDefault
instance HasConfiguration => MonadDBRead BlockTestMode where
dbGet = DB.dbGetPureDefault
dbIterSource = DB.dbIterSourcePureDefault
dbGetSerBlock = DB.dbGetSerBlockPureDefault
dbGetSerUndo = DB.dbGetSerUndoPureDefault
instance HasConfiguration => MonadDB BlockTestMode where
dbPut = DB.dbPutPureDefault
dbWriteBatch = DB.dbWriteBatchPureDefault
dbDelete = DB.dbDeletePureDefault
dbPutSerBlund = DB.dbPutSerBlundPureDefault
instance HasConfiguration => MonadGState BlockTestMode where
gsAdoptedBVData = gsAdoptedBVDataDefault
instance HasConfiguration => HasAdoptedBlockVersionData BlockTestMode where
adoptedBVData = gsAdoptedBVData
instance MonadBListener BlockTestMode where
onApplyBlocks = onApplyBlocksStub
onRollbackBlocks = onRollbackBlocksStub
instance MonadFormatPeers BlockTestMode where
formatKnownPeers _ = pure Nothing
type instance MempoolExt BlockTestMode = EmptyMempoolExt
instance HasConfigurations => MonadTxpLocal (BlockGenMode EmptyMempoolExt BlockTestMode) where
txpNormalize = withCompileInfo def $ txNormalize
txpProcessTx = withCompileInfo def $ txProcessTransactionNoLock
instance HasConfigurations => MonadTxpLocal BlockTestMode where
txpNormalize = withCompileInfo def $ txNormalize
txpProcessTx = withCompileInfo def $ txProcessTransactionNoLock
|
3da0bf64a0c026858bb4b4bb0ce3cc6c52d2f54d0be7b075e7f863b66ee3a60d | RyanMcG/Cadence | routes.clj | (ns cadence.routes
(:require [noir.response :as response]
(cemerick [friend :as friend]
[drawbridge :as drawbridge])
(compojure [route :as route] [core :refer :all])
[cadence.security :refer [wrap-anonymous-only]]
(cadence.views [response-codes :as response-codes]
[landing :as views-landing]
[training :as views-training]
[admin :as views-admin]
[user :as views-user])))
(defroutes admin-routes
(let [nrepl-handler (drawbridge/ring-handler)]
(ANY "/repl" [] nrepl-handler))
(GET "/migrations" [] views-admin/migrations)
(POST "/migrations" [] views-admin/post-migrations))
(defroutes user-routes
(GET ["/auth/as/:crypt-user-id" :crypt-user-id #"^[\da-fA-F]{10,40}$"]
[] views-training/auth)
(POST ["/auth/as/:crypt-user-id" :crypt-user-id #"^[\da-fA-F]{10,40}$"]
[] views-training/auth-check)
(GET "/auth" [] views-training/auth)
(POST "/auth" [] views-training/auth-check)
(GET "/training" [] views-training/training)
(POST "/training" [] views-training/training-post)
(GET "/profile/:username" [] views-user/profile)
(GET "/profile" [] views-user/profile-base)
(ANY "/logout" [] views-user/logout))
(defroutes app-routes
(GET "/" [] views-landing/root)
(context "/user" [] (friend/wrap-authorize user-routes
#{:cadence.security/user}))
(context "/admin" [] (friend/wrap-authorize admin-routes
#{:cadence.security/admin}))
(GET "/login" [] (wrap-anonymous-only views-user/login
"You must "
[:a {:href "/user/logout"} "logout"]
" before you can log in again."))
(GET "/signup" [] (wrap-anonymous-only views-user/signup
"You must "
[:a {:href "/user/logout"}
"logout"]
" before creating an account."))
(POST "/signup" [] (wrap-anonymous-only views-user/signup-check
"You must "
[:a {:href "/user/logout"}
"logout"]
" before creating an account."))
(route/resources "/")
(ANY ["/doc:anything" :anything #"^(?!s/index.html).*$"] []
(response/redirect "/docs/index.html"))
(route/not-found response-codes/not-found))
| null | https://raw.githubusercontent.com/RyanMcG/Cadence/c7364cba7e2de48c8a0b90f0f4d16a8248c097d4/src/cadence/routes.clj | clojure | (ns cadence.routes
(:require [noir.response :as response]
(cemerick [friend :as friend]
[drawbridge :as drawbridge])
(compojure [route :as route] [core :refer :all])
[cadence.security :refer [wrap-anonymous-only]]
(cadence.views [response-codes :as response-codes]
[landing :as views-landing]
[training :as views-training]
[admin :as views-admin]
[user :as views-user])))
(defroutes admin-routes
(let [nrepl-handler (drawbridge/ring-handler)]
(ANY "/repl" [] nrepl-handler))
(GET "/migrations" [] views-admin/migrations)
(POST "/migrations" [] views-admin/post-migrations))
(defroutes user-routes
(GET ["/auth/as/:crypt-user-id" :crypt-user-id #"^[\da-fA-F]{10,40}$"]
[] views-training/auth)
(POST ["/auth/as/:crypt-user-id" :crypt-user-id #"^[\da-fA-F]{10,40}$"]
[] views-training/auth-check)
(GET "/auth" [] views-training/auth)
(POST "/auth" [] views-training/auth-check)
(GET "/training" [] views-training/training)
(POST "/training" [] views-training/training-post)
(GET "/profile/:username" [] views-user/profile)
(GET "/profile" [] views-user/profile-base)
(ANY "/logout" [] views-user/logout))
(defroutes app-routes
(GET "/" [] views-landing/root)
(context "/user" [] (friend/wrap-authorize user-routes
#{:cadence.security/user}))
(context "/admin" [] (friend/wrap-authorize admin-routes
#{:cadence.security/admin}))
(GET "/login" [] (wrap-anonymous-only views-user/login
"You must "
[:a {:href "/user/logout"} "logout"]
" before you can log in again."))
(GET "/signup" [] (wrap-anonymous-only views-user/signup
"You must "
[:a {:href "/user/logout"}
"logout"]
" before creating an account."))
(POST "/signup" [] (wrap-anonymous-only views-user/signup-check
"You must "
[:a {:href "/user/logout"}
"logout"]
" before creating an account."))
(route/resources "/")
(ANY ["/doc:anything" :anything #"^(?!s/index.html).*$"] []
(response/redirect "/docs/index.html"))
(route/not-found response-codes/not-found))
| |
8e03444d036c992869394a6ace1a76ea8bec39fc8911a8719586ab68df7c5b2f | Chris00/ANSITerminal | ANSITerminal_unix.ml | File : ANSITerminal_unix.ml
Allow colors , cursor movements , erasing , ... under Unix shells .
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Copyright 2004 by Troestler .
This library is free software ; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public License
version 3 as published by the Free Software Foundation , with the
special exception on linking described in file LICENSE .
This library is distributed in the hope that it will be useful , but
WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the file
LICENSE for more details .
Allow colors, cursor movements, erasing,... under Unix shells.
*********************************************************************
Copyright 2004 by Troestler Christophe
Christophe.Troestler(at)umons.ac.be
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public License
version 3 as published by the Free Software Foundation, with the
special exception on linking described in file LICENSE.
This library is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file
LICENSE for more details.
*)
* See the file(s ) ctlseqs . * ( unix ; in Debian package xspecs )
CSI = " \027 [ " ( ESC [ )
man
CSI = "\027[" (ESC [)
man console_codes
*)
(* man tty(4) *)
open Printf
open Scanf
include ANSITerminal_common
let isatty = ref Unix.isatty
let is_out_channel_atty ch = !isatty(Unix.descr_of_out_channel ch)
Cursor
let set_cursor x y =
if is_out_channel_atty stdout then (
if x <= 0 then (if y > 0 then printf "\027[%id%!" y)
else (* x > 0 *) if y <= 0 then printf "\027[%iG%!" x
else printf "\027[%i;%iH%!" y x
)
let move_cursor x y =
if is_out_channel_atty stdout then (
if x > 0 then printf "\027[%iC%!" x
else if x < 0 then printf "\027[%iD%!" (-x);
if y > 0 then printf "\027[%iB%!" y
else if y < 0 then printf "\027[%iA%!" (-y)
)
let save_cursor () =
if is_out_channel_atty stdout then printf "\027[s%!"
let restore_cursor () =
if is_out_channel_atty stdout then printf "\027[u%!"
let move_bol () = print_string "\r"; flush stdout
Inpired by and
-5/tty_8c-source.html
-5/tty_8c-source.html *)
let send_and_read_response fdin query fmt f =
let alarm = ref false in
let set_alarm (_:int) = alarm := true in
let old_alarm = Sys.signal Sys.sigalrm (Sys.Signal_handle set_alarm) in
let tty = Unix.tcgetattr fdin in
Unix.tcsetattr fdin Unix.TCSANOW { tty with
Unix.c_ignbrk = false; c_brkint = false; c_parmrk = false;
c_istrip = false; c_inlcr = false; c_igncr = false; c_icrnl = false;
c_ixon = false; c_opost = true;
c_csize = 8; c_parenb = false; c_icanon = false; c_isig = false;
c_echo = false; c_echonl = false;
c_vmin = 1; c_vtime = 0 };
let restore() =
ignore(Unix.alarm 0);
Unix.tcsetattr fdin Unix.TCSANOW tty;
Sys.set_signal Sys.sigalrm old_alarm in
let buf = Bytes.make 127 '\000' in
(* FIXME: make it more robust so that it ignores previous key pressed. *)
let rec get_answer pos =
let l = Unix.read fdin buf pos 1 in
let buf = Bytes.unsafe_to_string buf in (* local use only *)
try sscanf buf fmt f (* bail out as soon as enough info is present *)
with Scan_failure _ ->
if !alarm || pos = 126 then failwith "ANSITerminal.input_answer"
else if buf.[pos] = '\000' then get_answer pos
else get_answer (pos + l) in
try
ignore(Unix.write fdin query 0 (Bytes.length query));
ignore(Unix.alarm 1);
let r = get_answer 0 in
restore();
r
with e ->
restore();
raise e
(* Query Cursor Position <ESC>[6n *)
(* Report Cursor Position <ESC>[{ROW};{COLUMN}R *)
let pos_cursor_query = Bytes.of_string "\027[6n"
let pos_cursor () =
if is_out_channel_atty stdout then (
try
send_and_read_response Unix.stdin pos_cursor_query
"\027[%d;%dR" (fun y x -> (x,y))
with _ -> failwith "ANSITerminal.pos_cursor"
)
else failwith "ANSITerminal.pos_cursor: not a TTY"
See also the output of ' resize -s x y ' ( e.g. in an Emacs shell ) .
let resize width height =
if is_out_channel_atty stdout then (
if width <= 0 then invalid_arg "ANSITerminal.resize: width <= 0";
if height <= 0 then invalid_arg "ANSITerminal.resize: height <= 0";
printf "\027[8;%i;%it%!" height width
)
FIXME : what about the following recipe :
If you run
echo -e " \e[18 t "
then xterm will respond with a line of the form
ESC [ 8 ; height ; width t
It generates this line as if it were typed input , so it can then be
read by your program on stdin .
If you run
echo -e "\e[18t"
then xterm will respond with a line of the form
ESC [ 8 ; height ; width t
It generates this line as if it were typed input, so it can then be
read by your program on stdin. *)
external size_ : Unix.file_descr -> int * int = "ANSITerminal_term_size"
let size () =
if !isatty Unix.stdin then (
size_ Unix.stdin
)
else failwith "ANSITerminal.size: not a TTY"
(* Erasing *)
let erase loc =
if is_out_channel_atty stdout then (
print_string (match loc with
| Eol -> "\027[K"
| Above -> "\027[1J"
| Below ->"\027[0J"
| Screen -> "\027[2J");
flush stdout
)
(* Scrolling *)
let scroll lines =
if is_out_channel_atty stdout then (
if lines > 0 then printf "\027[%iS%!" lines
else if lines < 0 then printf "\027[%iT%!" (- lines)
)
let style_to_string = function
| Reset -> "0"
| Bold -> "1"
| Underlined -> "4"
| Blink -> "5"
| Inverse -> "7"
| Hidden -> "8"
| Foreground Black -> "30"
| Foreground Red -> "31"
| Foreground Green -> "32"
| Foreground Yellow -> "33"
| Foreground Blue -> "34"
| Foreground Magenta -> "35"
| Foreground Cyan -> "36"
| Foreground White -> "37"
| Foreground Default -> "39"
| Background Black -> "40"
| Background Red -> "41"
| Background Green -> "42"
| Background Yellow -> "43"
| Background Blue -> "44"
| Background Magenta -> "45"
| Background Cyan -> "46"
| Background White -> "47"
| Background Default -> "49"
let print_with pr ~tty style txt =
if tty then (
pr "\027[";
pr (String.concat ";" (List.map style_to_string style));
pr "m";
);
pr txt;
if tty && !autoreset then pr "\027[0m"
let print_string style txt =
print_with print_string style txt ~tty:(is_out_channel_atty stdout)
let prerr_string style txt =
print_with prerr_string style txt ~tty:(is_out_channel_atty stderr)
let printf style = ksprintf (print_string style)
let eprintf style = ksprintf (prerr_string style)
let to_string style txt =
let s = "\027["
^ String.concat ";" (List.map style_to_string style)
^ "m"
^ txt in
if !autoreset then s ^ "\027[0m" else s
let sprintf style = ksprintf (to_string style)
| null | https://raw.githubusercontent.com/Chris00/ANSITerminal/21c434122c376382cc8c0a3cb7c678984b96e1c6/src/ANSITerminal_unix.ml | ocaml | man tty(4)
x > 0
FIXME: make it more robust so that it ignores previous key pressed.
local use only
bail out as soon as enough info is present
Query Cursor Position <ESC>[6n
Report Cursor Position <ESC>[{ROW};{COLUMN}R
Erasing
Scrolling | File : ANSITerminal_unix.ml
Allow colors , cursor movements , erasing , ... under Unix shells .
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Copyright 2004 by Troestler .
This library is free software ; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public License
version 3 as published by the Free Software Foundation , with the
special exception on linking described in file LICENSE .
This library is distributed in the hope that it will be useful , but
WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the file
LICENSE for more details .
Allow colors, cursor movements, erasing,... under Unix shells.
*********************************************************************
Copyright 2004 by Troestler Christophe
Christophe.Troestler(at)umons.ac.be
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public License
version 3 as published by the Free Software Foundation, with the
special exception on linking described in file LICENSE.
This library is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file
LICENSE for more details.
*)
* See the file(s ) ctlseqs . * ( unix ; in Debian package xspecs )
CSI = " \027 [ " ( ESC [ )
man
CSI = "\027[" (ESC [)
man console_codes
*)
open Printf
open Scanf
include ANSITerminal_common
let isatty = ref Unix.isatty
let is_out_channel_atty ch = !isatty(Unix.descr_of_out_channel ch)
Cursor
let set_cursor x y =
if is_out_channel_atty stdout then (
if x <= 0 then (if y > 0 then printf "\027[%id%!" y)
else printf "\027[%i;%iH%!" y x
)
let move_cursor x y =
if is_out_channel_atty stdout then (
if x > 0 then printf "\027[%iC%!" x
else if x < 0 then printf "\027[%iD%!" (-x);
if y > 0 then printf "\027[%iB%!" y
else if y < 0 then printf "\027[%iA%!" (-y)
)
let save_cursor () =
if is_out_channel_atty stdout then printf "\027[s%!"
let restore_cursor () =
if is_out_channel_atty stdout then printf "\027[u%!"
let move_bol () = print_string "\r"; flush stdout
Inpired by and
-5/tty_8c-source.html
-5/tty_8c-source.html *)
let send_and_read_response fdin query fmt f =
let alarm = ref false in
let set_alarm (_:int) = alarm := true in
let old_alarm = Sys.signal Sys.sigalrm (Sys.Signal_handle set_alarm) in
let tty = Unix.tcgetattr fdin in
Unix.tcsetattr fdin Unix.TCSANOW { tty with
Unix.c_ignbrk = false; c_brkint = false; c_parmrk = false;
c_istrip = false; c_inlcr = false; c_igncr = false; c_icrnl = false;
c_ixon = false; c_opost = true;
c_csize = 8; c_parenb = false; c_icanon = false; c_isig = false;
c_echo = false; c_echonl = false;
c_vmin = 1; c_vtime = 0 };
let restore() =
ignore(Unix.alarm 0);
Unix.tcsetattr fdin Unix.TCSANOW tty;
Sys.set_signal Sys.sigalrm old_alarm in
let buf = Bytes.make 127 '\000' in
let rec get_answer pos =
let l = Unix.read fdin buf pos 1 in
with Scan_failure _ ->
if !alarm || pos = 126 then failwith "ANSITerminal.input_answer"
else if buf.[pos] = '\000' then get_answer pos
else get_answer (pos + l) in
try
ignore(Unix.write fdin query 0 (Bytes.length query));
ignore(Unix.alarm 1);
let r = get_answer 0 in
restore();
r
with e ->
restore();
raise e
let pos_cursor_query = Bytes.of_string "\027[6n"
let pos_cursor () =
if is_out_channel_atty stdout then (
try
send_and_read_response Unix.stdin pos_cursor_query
"\027[%d;%dR" (fun y x -> (x,y))
with _ -> failwith "ANSITerminal.pos_cursor"
)
else failwith "ANSITerminal.pos_cursor: not a TTY"
See also the output of ' resize -s x y ' ( e.g. in an Emacs shell ) .
let resize width height =
if is_out_channel_atty stdout then (
if width <= 0 then invalid_arg "ANSITerminal.resize: width <= 0";
if height <= 0 then invalid_arg "ANSITerminal.resize: height <= 0";
printf "\027[8;%i;%it%!" height width
)
FIXME : what about the following recipe :
If you run
echo -e " \e[18 t "
then xterm will respond with a line of the form
ESC [ 8 ; height ; width t
It generates this line as if it were typed input , so it can then be
read by your program on stdin .
If you run
echo -e "\e[18t"
then xterm will respond with a line of the form
ESC [ 8 ; height ; width t
It generates this line as if it were typed input, so it can then be
read by your program on stdin. *)
external size_ : Unix.file_descr -> int * int = "ANSITerminal_term_size"
let size () =
if !isatty Unix.stdin then (
size_ Unix.stdin
)
else failwith "ANSITerminal.size: not a TTY"
let erase loc =
if is_out_channel_atty stdout then (
print_string (match loc with
| Eol -> "\027[K"
| Above -> "\027[1J"
| Below ->"\027[0J"
| Screen -> "\027[2J");
flush stdout
)
let scroll lines =
if is_out_channel_atty stdout then (
if lines > 0 then printf "\027[%iS%!" lines
else if lines < 0 then printf "\027[%iT%!" (- lines)
)
let style_to_string = function
| Reset -> "0"
| Bold -> "1"
| Underlined -> "4"
| Blink -> "5"
| Inverse -> "7"
| Hidden -> "8"
| Foreground Black -> "30"
| Foreground Red -> "31"
| Foreground Green -> "32"
| Foreground Yellow -> "33"
| Foreground Blue -> "34"
| Foreground Magenta -> "35"
| Foreground Cyan -> "36"
| Foreground White -> "37"
| Foreground Default -> "39"
| Background Black -> "40"
| Background Red -> "41"
| Background Green -> "42"
| Background Yellow -> "43"
| Background Blue -> "44"
| Background Magenta -> "45"
| Background Cyan -> "46"
| Background White -> "47"
| Background Default -> "49"
let print_with pr ~tty style txt =
if tty then (
pr "\027[";
pr (String.concat ";" (List.map style_to_string style));
pr "m";
);
pr txt;
if tty && !autoreset then pr "\027[0m"
let print_string style txt =
print_with print_string style txt ~tty:(is_out_channel_atty stdout)
let prerr_string style txt =
print_with prerr_string style txt ~tty:(is_out_channel_atty stderr)
let printf style = ksprintf (print_string style)
let eprintf style = ksprintf (prerr_string style)
let to_string style txt =
let s = "\027["
^ String.concat ";" (List.map style_to_string style)
^ "m"
^ txt in
if !autoreset then s ^ "\027[0m" else s
let sprintf style = ksprintf (to_string style)
|
ac5bca48eea54df9d2ec9b32974d8717b3f783c930bf3723fdcea7ca3feaf322 | pascal-knodel/haskell-craft | E'12'44.hs | --
--
--
------------------
Exercise 12.44 .
------------------
--
--
--
module E'12'44 where
| null | https://raw.githubusercontent.com/pascal-knodel/haskell-craft/c03d6eb857abd8b4785b6de075b094ec3653c968/_/links/E'12'44.hs | haskell |
----------------
----------------
| Exercise 12.44 .
module E'12'44 where
|
a82b0858261cbe33680c91576f9d2d23d1371b501b12b53f0bb2f3b9f25419ca | ssadler/zeno | Shuffle.hs |
module Zeno.Notariser.Shuffle where
import Control.Monad
import Control.Monad.Reader
import Data.Bits
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Word
import Data.ByteString.Short (unpack)
import Data.FixedBytes
import Data.Serialize (encode)
import Network.Bitcoin (sha256b)
import UnliftIO
import Zeno.Consensus
-- | Shuffle a list using the round seed
roundShuffle :: MonadIO m => [a] -> Consensus m [a]
roundShuffle items = do
when (length (take 0x10000 items) == 0x10000) do
error "distribute: items too long"
RoundData{..} <- ask
s <- StepId roundId <$> readIORef mutStepNum <*> readIORef mutStepRetry
pure $ shuffleWithWords items . infWord16 . infBytes $ sha256b $ encode s
List shuffle that takes a random series of 16 bit words . In order to select
-- a random element from the list, it skips random inputs that do not produce
indexes within range when truncated . This means that we throw out up to half
-- of the random inputs, but the selection is unbiased.
shuffleWithWords :: [a] -> [Word16] -> [a]
shuffleWithWords [] _ = []
shuffleWithWords _ [] = error "shuffleWithWords has no more words"
shuffleWithWords items (word:words) =
let limit = length items - 1 -- valid indexes are 0..n-1
mask = 2 ^ hibit limit - 1 -- mask for neccesary bits
idx = fromIntegral $ word .&. mask -- take neccesary bits from word
in if idx > limit
then shuffleWithWords items words -- skip if not within range
else
let (a, chosen:b) = splitAt idx items
in chosen : shuffleWithWords (a ++ b) words
-- | Get high bit from a number
hibit :: (Bits n, Num n) => n -> Int
hibit 0 = 0
hibit i = 1 + hibit (shiftR i 1)
-- | Generate an infinite series of bytes from hash
infBytes :: Bytes32 -> [Word8]
infBytes = concatMap (unpack . unFixed) . f
where f s = s : f (sha256b $ fromFixed s)
-- | Convert infinite series of bytes to words
infWord16 :: [Word8] -> [Word16]
infWord16 = f . map fromIntegral
where f (a:b:xs) = shift a 8 + b : f xs
-- For demonstration purposes -------------------------------------------------------
demoShuffle :: Ord a => [a] -> Int -> Map.Map a Int
demoShuffle items n =
Map.fromListWith (+) $
inner items n (drop 10 $ infWord16 $ infBytes minBound)
where
inner _ 0 _ = mempty
inner items n words =
let r = shuffleWithWords items words
nextWords = drop (length items * 2) words
in (head r, 1) : inner items (n-1) nextWords
| null | https://raw.githubusercontent.com/ssadler/zeno/9f715d7104a7b7b00dee9fe35275fb217532fdb6/src/Zeno/Notariser/Shuffle.hs | haskell | | Shuffle a list using the round seed
a random element from the list, it skips random inputs that do not produce
of the random inputs, but the selection is unbiased.
valid indexes are 0..n-1
mask for neccesary bits
take neccesary bits from word
skip if not within range
| Get high bit from a number
| Generate an infinite series of bytes from hash
| Convert infinite series of bytes to words
For demonstration purposes ------------------------------------------------------- |
module Zeno.Notariser.Shuffle where
import Control.Monad
import Control.Monad.Reader
import Data.Bits
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Word
import Data.ByteString.Short (unpack)
import Data.FixedBytes
import Data.Serialize (encode)
import Network.Bitcoin (sha256b)
import UnliftIO
import Zeno.Consensus
roundShuffle :: MonadIO m => [a] -> Consensus m [a]
roundShuffle items = do
when (length (take 0x10000 items) == 0x10000) do
error "distribute: items too long"
RoundData{..} <- ask
s <- StepId roundId <$> readIORef mutStepNum <*> readIORef mutStepRetry
pure $ shuffleWithWords items . infWord16 . infBytes $ sha256b $ encode s
List shuffle that takes a random series of 16 bit words . In order to select
indexes within range when truncated . This means that we throw out up to half
shuffleWithWords :: [a] -> [Word16] -> [a]
shuffleWithWords [] _ = []
shuffleWithWords _ [] = error "shuffleWithWords has no more words"
shuffleWithWords items (word:words) =
in if idx > limit
else
let (a, chosen:b) = splitAt idx items
in chosen : shuffleWithWords (a ++ b) words
hibit :: (Bits n, Num n) => n -> Int
hibit 0 = 0
hibit i = 1 + hibit (shiftR i 1)
infBytes :: Bytes32 -> [Word8]
infBytes = concatMap (unpack . unFixed) . f
where f s = s : f (sha256b $ fromFixed s)
infWord16 :: [Word8] -> [Word16]
infWord16 = f . map fromIntegral
where f (a:b:xs) = shift a 8 + b : f xs
demoShuffle :: Ord a => [a] -> Int -> Map.Map a Int
demoShuffle items n =
Map.fromListWith (+) $
inner items n (drop 10 $ infWord16 $ infBytes minBound)
where
inner _ 0 _ = mempty
inner items n words =
let r = shuffleWithWords items words
nextWords = drop (length items * 2) words
in (head r, 1) : inner items (n-1) nextWords
|
2c8a7fd8e7d4e9ca257470d3061eb357e507848e7779def3e851365443717006 | evrim/core-server | defclass+.lisp | (in-package :core-server)
;; +-------------------------------------------------------------------------
;; | Defining Classes with defclass+
;; +-------------------------------------------------------------------------
(defclass+ a ()
((slot-in-the-server :host local :initform "I can win")
(slot-in-the-client :host remote :initform (jobject :foo "bar"
:moo "tar")))
(:ctor make-a))
;; {
;; "coreClass": {
;; "slotInTheClient": {
;; "name": 'slotInTheClient',
;; "type": 'primitive',
;; "label": 'Slot In The Client' }
;; }
;; ,
;; "slotInTheClient": {
;; "foo": 'bar',
;; "moo": 'tar' }
;; }
(let ((a (make-a :slot-in-the-server "win can I")))
(with-js (a) (make-indented-stream *core-output*)
a))
;; +-------------------------------------------------------------------------
;; | Defining Components with defcomponent
;; +-------------------------------------------------------------------------
(defcomponent b (a)
()
(:ctor make-b))
(defmethod/local get-time ((self b))
(get-universal-time))
(defmethod/remote init ((self b))
(alert (+ "Hello, i'm B, time is :"
(date-to-string
(list-date-to-javascript (get-time self))))))
(let ((b (make-b :slot-in-the-server "win can I")))
(with-js (b) (make-indented-stream *core-output*)
b))
;;function (toExtend, k11) {
var k11 = k11 || window.k ;
;; var g2 = toExtend || new Object();
;; extend({
: makeMethod(function ( k1108 ) {
;; var self = self || this;
self._destroy = function ( k1114 ) {
;; return k1114(this);
;; };
;; return self.destroy(k1108);
;; }),
;; _destroy: makeMethod(function (k1100) {
;; var self = self || this;
;; addToGc(function (k1104) {
return + ' ? s : ' + ' unbound - session - id ' , ' b - YiGBSxYe ' ) ) ;
;; });
;; return k1100(true);
;; }),
;; funkall: makeMethod(function (action, args, k191) {
;; var k191 = k191 || window.k;
;; var self = self || this;
;; return funcallCc(self.url + action + '$', args, function (g93) {
if ( ' function ' = = ( ) ) {
return g93(self , k191 ) ;
;; } else {
;; return k191(g93);
;; };
;; });
;; }),
: function ( k187 ) {
;; var self = self || this;
return self.funkall('?s : ' + ' unbound - session - id ' + ' $ k : ' + ' b - YiGBSxYe ' + ' $ method : ' + ' GET - TIME ' , { } , ;
;; }
} , ) ;
;; mapobject(function (k, v, k158) {
var k158 = k158 || window.k ;
if ( ( ! ( ( ' undefined ' = = ( v ) ) || ( null = = = v ) ) & & ( ( '' = = = g2[k ] ) || ( ( ' undefined ' = = ( typeof g2[k ] ) ) || ( null = = = g2[k ] ) ) || ( ' undefined ' = = = g2[k ] ) ) ) || ( ' undefined ' = = = ( typeof g2[k ] ) ) ) {
;; return k158(g2[k] = v);
;; } else {
;; return k158(null);
;; };
;; }, {
;; slotInTheClient: {
;; "foo": 'bar',
;; "moo": 'tar' }
;; ,
;; url: null
;; });
;; g2.ctor = arguments.callee;
;; return apply(makeMethod(function (k147) {
;; var self = self || this;
return self.getTime(function ( g52 ) {
;; return k147(alert('Hello, i\'m B, time is :' + dateToString(listDateToJavascript(g52))));
;; });
} ) , , null , function ( value9 ) {
g2.destroy = composeProg1Cc(makeMethod(function ( k128 ) {
;; var self = self || this;
var g34 = self._destroy ;
if ( ! ( ( ' undefined ' = = ( g34 ) ) || ( null = = = g34 ) ) ) {
return self._destroy(function ( value37 ) {
;; removeSlots(self, new Array('_destroy'));
removeSlots(self , new Array('destroy ' ) ) ;
;; return k128(self);
;; });
;; } else {
removeSlots(self , new Array('destroy ' ) ) ;
return k128(self ) ;
;; };
;; }), g2.destroy);
g2._destroy = makeMethod(function ( k115 ) {
;; var self = self || this;
;; addToGc(function (k119) {
return k119(new Array('TEST - COMPONENT - DESTROY.core ' + ' ? s : ' + ' unbound - session - id ' , ' b - YiGBSxYe ' ) ) ;
;; });
;; return k115(true);
;; });
;; return k11(g2);
;; });
;; }
(let ((b (make-b :slot-in-the-server "win can I")))
(with-js (b) (make-indented-stream *core-output*)
b))
;; +-------------------------------------------------------------------------
;; | Inheritance
;; +-------------------------------------------------------------------------
(defcomponent c (b)
()
(:ctor make-c))
(defmethod/remote init ((self c))
(call-next-method self)
(alert (+ "I am C.")))
;; function (toExtend, k11) {
var k11 = k11 || window.k ;
;; var g2 = toExtend || new Object();
;; extend({
: makeMethod(function ( k1113 ) {
;; var self = self || this;
self._destroy = function ( k1119 ) {
;; return k1119(this);
;; };
;; return self.destroy(k1113);
;; }),
;; _destroy: makeMethod(function (k1105) {
;; var self = self || this;
;; addToGc(function (k1109) {
return + ' ? s : ' + ' unbound - session - id ' , ' c - NHVQvjPb ' ) ) ;
;; });
;; return k1105(true);
;; }),
funkall : makeMethod(function ( action , args , ) {
var k196 = k196 || window.k ;
;; var self = self || this;
;; return funcallCc(self.url + action + '$', args, function (g98) {
if ( ' function ' = = ( ) ) {
return g98(self , ) ;
;; } else {
;; return k196(g98);
;; };
;; });
;; }),
: function ( k192 ) {
;; var self = self || this;
;; return self.funkall('?s:' + 'unbound-session-id' + '$k:' + 'c-NHVQvjPb' + '$method:' + 'GET-TIME', {}, k192);
;; }
} , ) ;
mapobject(function ( k , v , k163 ) {
var k163 = k163 || window.k ;
if ( ( ! ( ( ' undefined ' = = ( v ) ) || ( null = = = v ) ) & & ( ( '' = = = g2[k ] ) || ( ( ' undefined ' = = ( typeof g2[k ] ) ) || ( null = = = g2[k ] ) ) || ( ' undefined ' = = = g2[k ] ) ) ) || ( ' undefined ' = = = ( typeof g2[k ] ) ) ) {
;; return k163(g2[k] = v);
;; } else {
;; return k163(null);
;; };
;; }, {
;; slotInTheClient: {
;; "foo": 'bar',
;; "moo": 'tar' }
;; ,
;; url: null
;; });
;; g2.ctor = arguments.callee;
;; return apply(makeMethod(function (k147) {
;; var self = self || this;
return self.getTime(function ( g57 ) {
alert('Hello , i\'m B , time is : ' + dateToString(listDateToJavascript(g57 ) ) ) ;
;; return k147(alert('I am C.'));
;; });
} ) , , null , function ( value9 ) {
g2.destroy = composeProg1Cc(makeMethod(function ( k128 ) {
;; var self = self || this;
var g34 = self._destroy ;
if ( ! ( ( ' undefined ' = = ( g34 ) ) || ( null = = = g34 ) ) ) {
return self._destroy(function ( value37 ) {
;; removeSlots(self, new Array('_destroy'));
removeSlots(self , new Array('destroy ' ) ) ;
;; return k128(self);
;; });
;; } else {
removeSlots(self , new Array('destroy ' ) ) ;
return k128(self ) ;
;; };
;; }), g2.destroy);
g2._destroy = makeMethod(function ( k115 ) {
;; var self = self || this;
;; addToGc(function (k119) {
return k119(new Array('TEST - COMPONENT - DESTROY.core ' + ' ? s : ' + ' unbound - session - id ' , ' c - NHVQvjPb ' ) ) ;
;; });
;; return k115(true);
;; });
;; return k11(g2);
;; });
;; }
(let ((c (make-c :slot-in-the-server "win can I")))
(with-js (c) (make-indented-stream *core-output*)
c))
(let ((b (make-b :slot-in-the-server "win can I")))
(with-js (b) (make-indented-stream *core-output*)
(with-call/cc
(call/cc b
(extend (jobject :slot-in-the-client "remote!")
(<:input :type "text" :name "field1")))))) | null | https://raw.githubusercontent.com/evrim/core-server/200ea8151d2f8d81b593d605b183a9cddae1e82d/examples/defclass%2B.lisp | lisp | +-------------------------------------------------------------------------
| Defining Classes with defclass+
+-------------------------------------------------------------------------
{
"coreClass": {
"slotInTheClient": {
"name": 'slotInTheClient',
"type": 'primitive',
"label": 'Slot In The Client' }
}
,
"slotInTheClient": {
"foo": 'bar',
"moo": 'tar' }
}
+-------------------------------------------------------------------------
| Defining Components with defcomponent
+-------------------------------------------------------------------------
function (toExtend, k11) {
var g2 = toExtend || new Object();
extend({
var self = self || this;
return k1114(this);
};
return self.destroy(k1108);
}),
_destroy: makeMethod(function (k1100) {
var self = self || this;
addToGc(function (k1104) {
});
return k1100(true);
}),
funkall: makeMethod(function (action, args, k191) {
var k191 = k191 || window.k;
var self = self || this;
return funcallCc(self.url + action + '$', args, function (g93) {
} else {
return k191(g93);
};
});
}),
var self = self || this;
}
mapobject(function (k, v, k158) {
return k158(g2[k] = v);
} else {
return k158(null);
};
}, {
slotInTheClient: {
"foo": 'bar',
"moo": 'tar' }
,
url: null
});
g2.ctor = arguments.callee;
return apply(makeMethod(function (k147) {
var self = self || this;
return k147(alert('Hello, i\'m B, time is :' + dateToString(listDateToJavascript(g52))));
});
var self = self || this;
removeSlots(self, new Array('_destroy'));
return k128(self);
});
} else {
};
}), g2.destroy);
var self = self || this;
addToGc(function (k119) {
});
return k115(true);
});
return k11(g2);
});
}
+-------------------------------------------------------------------------
| Inheritance
+-------------------------------------------------------------------------
function (toExtend, k11) {
var g2 = toExtend || new Object();
extend({
var self = self || this;
return k1119(this);
};
return self.destroy(k1113);
}),
_destroy: makeMethod(function (k1105) {
var self = self || this;
addToGc(function (k1109) {
});
return k1105(true);
}),
var self = self || this;
return funcallCc(self.url + action + '$', args, function (g98) {
} else {
return k196(g98);
};
});
}),
var self = self || this;
return self.funkall('?s:' + 'unbound-session-id' + '$k:' + 'c-NHVQvjPb' + '$method:' + 'GET-TIME', {}, k192);
}
return k163(g2[k] = v);
} else {
return k163(null);
};
}, {
slotInTheClient: {
"foo": 'bar',
"moo": 'tar' }
,
url: null
});
g2.ctor = arguments.callee;
return apply(makeMethod(function (k147) {
var self = self || this;
return k147(alert('I am C.'));
});
var self = self || this;
removeSlots(self, new Array('_destroy'));
return k128(self);
});
} else {
};
}), g2.destroy);
var self = self || this;
addToGc(function (k119) {
});
return k115(true);
});
return k11(g2);
});
} | (in-package :core-server)
(defclass+ a ()
((slot-in-the-server :host local :initform "I can win")
(slot-in-the-client :host remote :initform (jobject :foo "bar"
:moo "tar")))
(:ctor make-a))
(let ((a (make-a :slot-in-the-server "win can I")))
(with-js (a) (make-indented-stream *core-output*)
a))
(defcomponent b (a)
()
(:ctor make-b))
(defmethod/local get-time ((self b))
(get-universal-time))
(defmethod/remote init ((self b))
(alert (+ "Hello, i'm B, time is :"
(date-to-string
(list-date-to-javascript (get-time self))))))
(let ((b (make-b :slot-in-the-server "win can I")))
(with-js (b) (make-indented-stream *core-output*)
b))
: makeMethod(function ( k1108 ) {
self._destroy = function ( k1114 ) {
if ( ' function ' = = ( ) ) {
: function ( k187 ) {
if ( ( ! ( ( ' undefined ' = = ( v ) ) || ( null = = = v ) ) & & ( ( '' = = = g2[k ] ) || ( ( ' undefined ' = = ( typeof g2[k ] ) ) || ( null = = = g2[k ] ) ) || ( ' undefined ' = = = g2[k ] ) ) ) || ( ' undefined ' = = = ( typeof g2[k ] ) ) ) {
return self.getTime(function ( g52 ) {
} ) , , null , function ( value9 ) {
g2.destroy = composeProg1Cc(makeMethod(function ( k128 ) {
if ( ! ( ( ' undefined ' = = ( g34 ) ) || ( null = = = g34 ) ) ) {
return self._destroy(function ( value37 ) {
g2._destroy = makeMethod(function ( k115 ) {
(let ((b (make-b :slot-in-the-server "win can I")))
(with-js (b) (make-indented-stream *core-output*)
b))
(defcomponent c (b)
()
(:ctor make-c))
(defmethod/remote init ((self c))
(call-next-method self)
(alert (+ "I am C.")))
: makeMethod(function ( k1113 ) {
self._destroy = function ( k1119 ) {
funkall : makeMethod(function ( action , args , ) {
if ( ' function ' = = ( ) ) {
: function ( k192 ) {
mapobject(function ( k , v , k163 ) {
if ( ( ! ( ( ' undefined ' = = ( v ) ) || ( null = = = v ) ) & & ( ( '' = = = g2[k ] ) || ( ( ' undefined ' = = ( typeof g2[k ] ) ) || ( null = = = g2[k ] ) ) || ( ' undefined ' = = = g2[k ] ) ) ) || ( ' undefined ' = = = ( typeof g2[k ] ) ) ) {
return self.getTime(function ( g57 ) {
} ) , , null , function ( value9 ) {
g2.destroy = composeProg1Cc(makeMethod(function ( k128 ) {
if ( ! ( ( ' undefined ' = = ( g34 ) ) || ( null = = = g34 ) ) ) {
return self._destroy(function ( value37 ) {
g2._destroy = makeMethod(function ( k115 ) {
(let ((c (make-c :slot-in-the-server "win can I")))
(with-js (c) (make-indented-stream *core-output*)
c))
(let ((b (make-b :slot-in-the-server "win can I")))
(with-js (b) (make-indented-stream *core-output*)
(with-call/cc
(call/cc b
(extend (jobject :slot-in-the-client "remote!")
(<:input :type "text" :name "field1")))))) |
81735426ee1db38ea799f9c9c2e3aca3d4c1a0a4df4eac0f8901557fc007c2ed | footprintanalytics/footprint-web | load_and_dump_test.clj | (ns metabase.cmd.load-and-dump-test
(:require [clojure.java.io :as io]
[clojure.test :refer :all]
[metabase.cmd.compare-h2-dbs :as compare-h2-dbs]
[metabase.cmd.copy.h2 :as copy.h2]
[metabase.cmd.dump-to-h2 :as dump-to-h2]
[metabase.cmd.load-from-h2 :as load-from-h2]
[metabase.cmd.test-util :as cmd.test-util]
[metabase.db.connection :as mdb.connection]
[metabase.db.spec :as mdb.spec]
[metabase.db.test-util :as mdb.test-util]
[metabase.driver :as driver]
[metabase.models.setting :as setting]
[metabase.test :as mt]
[metabase.test.data.interface :as tx]
[metabase.util.i18n.impl :as i18n.impl]))
(defn- abs-path
[path]
(.getAbsolutePath (io/file path)))
(deftest load-and-dump-test
(testing "Loading of data from h2 to DB and migrating back to H2"
(let [h2-fixture-db-file @cmd.test-util/fixture-db-file-path
h2-file (abs-path "/tmp/out.db")
db-name "dump-test"]
(mt/test-drivers #{:mysql :postgres :h2}
(copy.h2/delete-existing-h2-database-files! h2-file)
(let [data-source (mdb.test-util/->ClojureJDBCSpecDataSource
(if (= driver/*driver* :h2)
{:subprotocol "h2"
:subname (format "mem:%s;DB_CLOSE_DELAY=10" (mt/random-name))
:classname "org.h2.Driver"}
(let [details (tx/dbdef->connection-details driver/*driver* :db {:database-name db-name})]
(mdb.spec/spec driver/*driver* details))))]
(binding [setting/*disable-cache* true
mdb.connection/*application-db* (mdb.connection/application-db driver/*driver* data-source)]
(with-redefs [i18n.impl/site-locale-from-setting-fn (atom (constantly false))]
(when-not (= driver/*driver* :h2)
(tx/create-db! driver/*driver* {:database-name db-name}))
(load-from-h2/load-from-h2! h2-fixture-db-file)
(dump-to-h2/dump-to-h2! h2-file)
(is (not (compare-h2-dbs/different-contents?
h2-file
h2-fixture-db-file))))))))))
| null | https://raw.githubusercontent.com/footprintanalytics/footprint-web/d3090d943dd9fcea493c236f79e7ef8a36ae17fc/test/metabase/cmd/load_and_dump_test.clj | clojure | (ns metabase.cmd.load-and-dump-test
(:require [clojure.java.io :as io]
[clojure.test :refer :all]
[metabase.cmd.compare-h2-dbs :as compare-h2-dbs]
[metabase.cmd.copy.h2 :as copy.h2]
[metabase.cmd.dump-to-h2 :as dump-to-h2]
[metabase.cmd.load-from-h2 :as load-from-h2]
[metabase.cmd.test-util :as cmd.test-util]
[metabase.db.connection :as mdb.connection]
[metabase.db.spec :as mdb.spec]
[metabase.db.test-util :as mdb.test-util]
[metabase.driver :as driver]
[metabase.models.setting :as setting]
[metabase.test :as mt]
[metabase.test.data.interface :as tx]
[metabase.util.i18n.impl :as i18n.impl]))
(defn- abs-path
[path]
(.getAbsolutePath (io/file path)))
(deftest load-and-dump-test
(testing "Loading of data from h2 to DB and migrating back to H2"
(let [h2-fixture-db-file @cmd.test-util/fixture-db-file-path
h2-file (abs-path "/tmp/out.db")
db-name "dump-test"]
(mt/test-drivers #{:mysql :postgres :h2}
(copy.h2/delete-existing-h2-database-files! h2-file)
(let [data-source (mdb.test-util/->ClojureJDBCSpecDataSource
(if (= driver/*driver* :h2)
{:subprotocol "h2"
:subname (format "mem:%s;DB_CLOSE_DELAY=10" (mt/random-name))
:classname "org.h2.Driver"}
(let [details (tx/dbdef->connection-details driver/*driver* :db {:database-name db-name})]
(mdb.spec/spec driver/*driver* details))))]
(binding [setting/*disable-cache* true
mdb.connection/*application-db* (mdb.connection/application-db driver/*driver* data-source)]
(with-redefs [i18n.impl/site-locale-from-setting-fn (atom (constantly false))]
(when-not (= driver/*driver* :h2)
(tx/create-db! driver/*driver* {:database-name db-name}))
(load-from-h2/load-from-h2! h2-fixture-db-file)
(dump-to-h2/dump-to-h2! h2-file)
(is (not (compare-h2-dbs/different-contents?
h2-file
h2-fixture-db-file))))))))))
| |
4b10339cfd91a3f669ddbaedcfe24d690da89a7d82c648c0dfc74efea95010c1 | fpco/ide-backend | BuildPaths.hs | -----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.BuildPaths
Copyright : 2003 - 2004 ,
2008
--
-- Maintainer :
-- Portability : portable
--
A bunch of dirs , paths and file names used for intermediate build steps .
--
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions are
met :
* Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
* Redistributions in binary form must reproduce the above
copyright notice , this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution .
* Neither the name of nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT
OWNER OR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE ,
DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Distribution.Simple.BuildPaths (
defaultDistPref, srcPref,
hscolourPref, haddockPref,
autogenModulesDir,
autogenModuleName,
cppHeaderName,
haddockName,
mkLibName,
mkProfLibName,
mkSharedLibName,
exeExtension,
objExtension,
dllExtension,
) where
import System.FilePath ((</>), (<.>))
import Distribution.Package
( PackageIdentifier, packageName )
import Distribution.ModuleName (ModuleName)
import qualified Distribution.ModuleName as ModuleName
import Distribution.Compiler
( CompilerId(..) )
import Distribution.PackageDescription (PackageDescription)
import Distribution.Simple.LocalBuildInfo (LocalBuildInfo(buildDir))
import Distribution.Simple.Setup (defaultDistPref)
import Distribution.Text
( display )
import Distribution.System (OS(..), buildOS)
-- ---------------------------------------------------------------------------
-- Build directories and files
srcPref :: FilePath -> FilePath
srcPref distPref = distPref </> "src"
hscolourPref :: FilePath -> PackageDescription -> FilePath
hscolourPref = haddockPref
haddockPref :: FilePath -> PackageDescription -> FilePath
haddockPref distPref pkg_descr
= distPref </> "doc" </> "html" </> display (packageName pkg_descr)
-- |The directory in which we put auto-generated modules
autogenModulesDir :: LocalBuildInfo -> String
autogenModulesDir lbi = buildDir lbi </> "autogen"
cppHeaderName :: String
cppHeaderName = "cabal_macros.h"
-- |The name of the auto-generated module associated with a package
autogenModuleName :: PackageDescription -> ModuleName
autogenModuleName pkg_descr =
ModuleName.fromString $
"Paths_" ++ map fixchar (display (packageName pkg_descr))
where fixchar '-' = '_'
fixchar c = c
haddockName :: PackageDescription -> FilePath
haddockName pkg_descr = display (packageName pkg_descr) <.> "haddock"
-- ---------------------------------------------------------------------------
Library file names
mkLibName :: PackageIdentifier -> String
mkLibName lib = "libHS" ++ display lib <.> "a"
mkProfLibName :: PackageIdentifier -> String
mkProfLibName lib = "libHS" ++ display lib ++ "_p" <.> "a"
-- Implement proper name mangling for dynamical shared objects
-- libHS<packagename>-<compilerFlavour><compilerVersion>
-- e.g. libHSbase-2.1-ghc6.6.1.so
mkSharedLibName :: PackageIdentifier -> CompilerId -> String
mkSharedLibName lib (CompilerId compilerFlavor compilerVersion)
= "libHS" ++ display lib ++ "-" ++ comp <.> dllExtension
where comp = display compilerFlavor ++ display compilerVersion
-- ------------------------------------------------------------
-- * Platform file extensions
-- ------------------------------------------------------------
ToDo : This should be determined via autoconf ( AC_EXEEXT )
-- | Extension for executable files
( typically @\"\"@ on Unix and @\"exe\"@ on Windows or OS\/2 )
exeExtension :: String
exeExtension = case buildOS of
Windows -> "exe"
_ -> ""
ToDo : This should be determined via autoconf ( AC_OBJEXT )
| Extension for object files . For GHC and NHC the extension is @\"o\"@.
-- Hugs uses either @\"o\"@ or @\"obj\"@ depending on the used C compiler.
objExtension :: String
objExtension = "o"
-- | Extension for dynamically linked (or shared) libraries
( typically @\"so\"@ on Unix and @\"dll\"@ on Windows )
dllExtension :: String
dllExtension = case buildOS of
Windows -> "dll"
OSX -> "dylib"
_ -> "so"
| null | https://raw.githubusercontent.com/fpco/ide-backend/860636f2d0e872e9481569236bce690637e0016e/ide-backend/TestSuite/inputs/Cabal-1.14.0/Distribution/Simple/BuildPaths.hs | haskell | ---------------------------------------------------------------------------
|
Module : Distribution.Simple.BuildPaths
Maintainer :
Portability : portable
---------------------------------------------------------------------------
Build directories and files
|The directory in which we put auto-generated modules
|The name of the auto-generated module associated with a package
---------------------------------------------------------------------------
Implement proper name mangling for dynamical shared objects
libHS<packagename>-<compilerFlavour><compilerVersion>
e.g. libHSbase-2.1-ghc6.6.1.so
------------------------------------------------------------
* Platform file extensions
------------------------------------------------------------
| Extension for executable files
Hugs uses either @\"o\"@ or @\"obj\"@ depending on the used C compiler.
| Extension for dynamically linked (or shared) libraries | Copyright : 2003 - 2004 ,
2008
A bunch of dirs , paths and file names used for intermediate build steps .
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions are
met :
* Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
* Redistributions in binary form must reproduce the above
copyright notice , this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution .
* Neither the name of nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT
OWNER OR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE ,
DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Distribution.Simple.BuildPaths (
defaultDistPref, srcPref,
hscolourPref, haddockPref,
autogenModulesDir,
autogenModuleName,
cppHeaderName,
haddockName,
mkLibName,
mkProfLibName,
mkSharedLibName,
exeExtension,
objExtension,
dllExtension,
) where
import System.FilePath ((</>), (<.>))
import Distribution.Package
( PackageIdentifier, packageName )
import Distribution.ModuleName (ModuleName)
import qualified Distribution.ModuleName as ModuleName
import Distribution.Compiler
( CompilerId(..) )
import Distribution.PackageDescription (PackageDescription)
import Distribution.Simple.LocalBuildInfo (LocalBuildInfo(buildDir))
import Distribution.Simple.Setup (defaultDistPref)
import Distribution.Text
( display )
import Distribution.System (OS(..), buildOS)
srcPref :: FilePath -> FilePath
srcPref distPref = distPref </> "src"
hscolourPref :: FilePath -> PackageDescription -> FilePath
hscolourPref = haddockPref
haddockPref :: FilePath -> PackageDescription -> FilePath
haddockPref distPref pkg_descr
= distPref </> "doc" </> "html" </> display (packageName pkg_descr)
autogenModulesDir :: LocalBuildInfo -> String
autogenModulesDir lbi = buildDir lbi </> "autogen"
cppHeaderName :: String
cppHeaderName = "cabal_macros.h"
autogenModuleName :: PackageDescription -> ModuleName
autogenModuleName pkg_descr =
ModuleName.fromString $
"Paths_" ++ map fixchar (display (packageName pkg_descr))
where fixchar '-' = '_'
fixchar c = c
haddockName :: PackageDescription -> FilePath
haddockName pkg_descr = display (packageName pkg_descr) <.> "haddock"
Library file names
mkLibName :: PackageIdentifier -> String
mkLibName lib = "libHS" ++ display lib <.> "a"
mkProfLibName :: PackageIdentifier -> String
mkProfLibName lib = "libHS" ++ display lib ++ "_p" <.> "a"
mkSharedLibName :: PackageIdentifier -> CompilerId -> String
mkSharedLibName lib (CompilerId compilerFlavor compilerVersion)
= "libHS" ++ display lib ++ "-" ++ comp <.> dllExtension
where comp = display compilerFlavor ++ display compilerVersion
ToDo : This should be determined via autoconf ( AC_EXEEXT )
( typically @\"\"@ on Unix and @\"exe\"@ on Windows or OS\/2 )
exeExtension :: String
exeExtension = case buildOS of
Windows -> "exe"
_ -> ""
ToDo : This should be determined via autoconf ( AC_OBJEXT )
| Extension for object files . For GHC and NHC the extension is @\"o\"@.
objExtension :: String
objExtension = "o"
( typically @\"so\"@ on Unix and @\"dll\"@ on Windows )
dllExtension :: String
dllExtension = case buildOS of
Windows -> "dll"
OSX -> "dylib"
_ -> "so"
|
e5125673f06d24d9eec92bdbac74d5cff5205151bb583fa5387656cb53127b85 | xtdb/xtdb | project.clj | (defproject com.xtdb.labs/xtdb-http-health-check "<inherited>"
:description "XTDB health check server"
:plugins [[lein-parent "0.3.8"]]
:parent-project {:path "../../project.clj"
:inherit [:version :repositories :deploy-repositories
:managed-dependencies
:pedantic? :global-vars
:license :url :pom-addition]}
:scm {:dir "../.."}
:dependencies [[org.clojure/clojure]
[com.xtdb/xtdb-core]
[pro.juxt.clojars-mirrors.xtdb/xtdb-http-server-deps "0.0.2"]]
:profiles {:dev {:dependencies [[ch.qos.logback/logback-classic]]}}
:jvm-opts ["-Dlogback.configurationFile=../../resources/logback-test.xml"
"-Dclojure.spec.compile-asserts=true"
"-Dclojure.spec.check-asserts=true"])
| null | https://raw.githubusercontent.com/xtdb/xtdb/2d7da72e3c3f6023760bc9ebce08fda0362c06c0/labs/http-health-check/project.clj | clojure | (defproject com.xtdb.labs/xtdb-http-health-check "<inherited>"
:description "XTDB health check server"
:plugins [[lein-parent "0.3.8"]]
:parent-project {:path "../../project.clj"
:inherit [:version :repositories :deploy-repositories
:managed-dependencies
:pedantic? :global-vars
:license :url :pom-addition]}
:scm {:dir "../.."}
:dependencies [[org.clojure/clojure]
[com.xtdb/xtdb-core]
[pro.juxt.clojars-mirrors.xtdb/xtdb-http-server-deps "0.0.2"]]
:profiles {:dev {:dependencies [[ch.qos.logback/logback-classic]]}}
:jvm-opts ["-Dlogback.configurationFile=../../resources/logback-test.xml"
"-Dclojure.spec.compile-asserts=true"
"-Dclojure.spec.check-asserts=true"])
| |
036a3c3890d8b80b6f7b6bbb63d77ddc70b85ef4b57912eb968cdb4ca8c7dd73 | rabbitmq/rabbitmq-management | rabbit_mgmt_wm_queue_purge.erl | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%
Copyright ( c ) 2007 - 2020 VMware , Inc. or its affiliates . All rights reserved .
%%
-module(rabbit_mgmt_wm_queue_purge).
-export([init/2, resource_exists/2, is_authorized/2, allowed_methods/2,
delete_resource/2]).
-export([variances/2]).
-include_lib("rabbitmq_management_agent/include/rabbit_mgmt_records.hrl").
-include_lib("amqp_client/include/amqp_client.hrl").
%%--------------------------------------------------------------------
init(Req, _State) ->
{cowboy_rest, rabbit_mgmt_headers:set_common_permission_headers(Req, ?MODULE), #context{}}.
variances(Req, Context) ->
{[<<"accept-encoding">>, <<"origin">>], Req, Context}.
allowed_methods(ReqData, Context) ->
{[<<"DELETE">>, <<"OPTIONS">>], ReqData, Context}.
resource_exists(ReqData, Context) ->
{case rabbit_mgmt_wm_queue:queue(ReqData) of
not_found -> false;
_ -> true
end, ReqData, Context}.
delete_resource(ReqData, Context) ->
Name = rabbit_mgmt_util:id(queue, ReqData),
rabbit_mgmt_util:direct_request(
'queue.purge',
fun rabbit_mgmt_format:format_accept_content/1,
[{queue, Name}], "Error purging queue: ~s", ReqData, Context).
is_authorized(ReqData, Context) ->
rabbit_mgmt_util:is_authorized_vhost(ReqData, Context).
| null | https://raw.githubusercontent.com/rabbitmq/rabbitmq-management/543906f01ccd0344aff648f21bb6b5156b2a2ca2/src/rabbit_mgmt_wm_queue_purge.erl | erlang |
-------------------------------------------------------------------- | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
Copyright ( c ) 2007 - 2020 VMware , Inc. or its affiliates . All rights reserved .
-module(rabbit_mgmt_wm_queue_purge).
-export([init/2, resource_exists/2, is_authorized/2, allowed_methods/2,
delete_resource/2]).
-export([variances/2]).
-include_lib("rabbitmq_management_agent/include/rabbit_mgmt_records.hrl").
-include_lib("amqp_client/include/amqp_client.hrl").
init(Req, _State) ->
{cowboy_rest, rabbit_mgmt_headers:set_common_permission_headers(Req, ?MODULE), #context{}}.
variances(Req, Context) ->
{[<<"accept-encoding">>, <<"origin">>], Req, Context}.
allowed_methods(ReqData, Context) ->
{[<<"DELETE">>, <<"OPTIONS">>], ReqData, Context}.
resource_exists(ReqData, Context) ->
{case rabbit_mgmt_wm_queue:queue(ReqData) of
not_found -> false;
_ -> true
end, ReqData, Context}.
delete_resource(ReqData, Context) ->
Name = rabbit_mgmt_util:id(queue, ReqData),
rabbit_mgmt_util:direct_request(
'queue.purge',
fun rabbit_mgmt_format:format_accept_content/1,
[{queue, Name}], "Error purging queue: ~s", ReqData, Context).
is_authorized(ReqData, Context) ->
rabbit_mgmt_util:is_authorized_vhost(ReqData, Context).
|
711b9af35a85de86d40fa87562dcee56b269cb6699de2bfafb5998f19ed16a40 | byorgey/haskell-course | InClass.hs |
-- f :: a -> a -> a
-- f x y = x + y
g : : a - > a - > a
g x y = x & & y
g x y =
case ( ) of
Int - > x + y
Bool - > x & & y
_ - > x
g :: a -> a -> a
g x y = x && y
g x y =
case (typeOf x) of
Int -> x + y
Bool -> x && y
_ -> x
-}
Parametric polymorphism
f :: a -> a -> a
f x y = y
f' :: a -> a -> a
f' x y = x
f'' :: a -> a -> a
f'' x y = -- error "don't call me"
f'' x y
f1 :: a -> a
f1 x = x
f2 :: a -> b
f2 x = f2 x
f3 :: a -> b -> a
f3 x _ = x
f4 :: [a] -> [a]
f4 x = x
f4' = reverse
f4'' = take 10
f4''' xs = xs ++ xs
f5 :: (b -> c) -> (a -> b) -> (a -> c)
f5 = (.)
f6 :: (a -> a) -> a -> a
f6 f x = f x
f6' _ x = x
f6'' f x = f (f x)
f6''' f = f
f6'''' f = f . f
-- Type classes
class Eq a where
(= =) : : a - > a - > = = y = not ( x /= y )
( /= ) : : a - > a - > /= y = not ( x = = y )
class Eq a where
(==) :: a -> a -> Bool
x == y = not (x /= y)
(/=) :: a -> a -> Bool
x /= y = not (x == y)
-}
data Foo = F Int | G (Char -> Int)
deriving (Eq)
instance where
(= =) ( F x ) ( F y ) = x = = y
( G x ) = = ( G y ) = x = = y
_ = = _ = False
instance Eq Foo where
(==) (F x) (F y) = x == y
(G x) == (G y) = x == y
_ == _ = False
-}
instance Eq a => Eq [a] where
(x:xs) == (y:ys) = x == y && xs == ys
[] == [] = True
_ == _ = False
data Maybe a =
Nothing
| Just a
class Foo a b where
bar :: a -> b -> Bool
baz :: a -> a -> a
read :: String -> a | null | https://raw.githubusercontent.com/byorgey/haskell-course/f928cc8031aec2147f037949826134a484414ed6/weeks/05-type-classes/InClass.hs | haskell | f :: a -> a -> a
f x y = x + y
error "don't call me"
Type classes |
g : : a - > a - > a
g x y = x & & y
g x y =
case ( ) of
Int - > x + y
Bool - > x & & y
_ - > x
g :: a -> a -> a
g x y = x && y
g x y =
case (typeOf x) of
Int -> x + y
Bool -> x && y
_ -> x
-}
Parametric polymorphism
f :: a -> a -> a
f x y = y
f' :: a -> a -> a
f' x y = x
f'' :: a -> a -> a
f'' x y
f1 :: a -> a
f1 x = x
f2 :: a -> b
f2 x = f2 x
f3 :: a -> b -> a
f3 x _ = x
f4 :: [a] -> [a]
f4 x = x
f4' = reverse
f4'' = take 10
f4''' xs = xs ++ xs
f5 :: (b -> c) -> (a -> b) -> (a -> c)
f5 = (.)
f6 :: (a -> a) -> a -> a
f6 f x = f x
f6' _ x = x
f6'' f x = f (f x)
f6''' f = f
f6'''' f = f . f
class Eq a where
(= =) : : a - > a - > = = y = not ( x /= y )
( /= ) : : a - > a - > /= y = not ( x = = y )
class Eq a where
(==) :: a -> a -> Bool
x == y = not (x /= y)
(/=) :: a -> a -> Bool
x /= y = not (x == y)
-}
data Foo = F Int | G (Char -> Int)
deriving (Eq)
instance where
(= =) ( F x ) ( F y ) = x = = y
( G x ) = = ( G y ) = x = = y
_ = = _ = False
instance Eq Foo where
(==) (F x) (F y) = x == y
(G x) == (G y) = x == y
_ == _ = False
-}
instance Eq a => Eq [a] where
(x:xs) == (y:ys) = x == y && xs == ys
[] == [] = True
_ == _ = False
data Maybe a =
Nothing
| Just a
class Foo a b where
bar :: a -> b -> Bool
baz :: a -> a -> a
read :: String -> a |
a6315f85e90af5e4a04e062071fff27f579185ad29e67584414d6daa8ded8578 | lambe-lang/compiler | type_check_test.ml | open Lambe_ast
open Lambe_checker
let lambe_type = Alcotest.testable Lambe_render.Type.pp ( = )
let should_synthetize_int_type () =
let expected = Result.Ok (Type.Variable "int")
and computed = Type_check.synthetize () Term.(Literal (Integer 1)) in
Alcotest.(check (result lambe_type string))
"should_synthetize_int_type" expected computed
let should_synthetize_float_type () =
let expected = Result.Ok (Type.Variable "float")
and computed = Type_check.synthetize () Term.(Literal (Float 1.)) in
Alcotest.(check (result lambe_type string))
"should_synthetize_float_type" expected computed
let should_synthetize_string_type () =
let expected = Result.Ok (Type.Variable "string")
and computed = Type_check.synthetize () Term.(Literal (String "1")) in
Alcotest.(check (result lambe_type string))
"should_synthetize_string_type" expected computed
let should_synthetize_char_type () =
let expected = Result.Ok (Type.Variable "char")
and computed = Type_check.synthetize () Term.(Literal (Char '1')) in
Alcotest.(check (result lambe_type string))
"should_synthetize_char_type" expected computed
let test_cases =
let open Alcotest in
( "Type_check"
, [
test_case "Should synthesize int type" `Quick should_synthetize_int_type
; test_case "Should synthesize float type" `Quick
should_synthetize_float_type
; test_case "Should synthesize string type" `Quick
should_synthetize_string_type
; test_case "Should synthesize char type" `Quick should_synthetize_char_type
] )
| null | https://raw.githubusercontent.com/lambe-lang/compiler/79d7937c06ca30e231855ec4ce99012ca0395cd5/attic/test/checker/type_check_test.ml | ocaml | open Lambe_ast
open Lambe_checker
let lambe_type = Alcotest.testable Lambe_render.Type.pp ( = )
let should_synthetize_int_type () =
let expected = Result.Ok (Type.Variable "int")
and computed = Type_check.synthetize () Term.(Literal (Integer 1)) in
Alcotest.(check (result lambe_type string))
"should_synthetize_int_type" expected computed
let should_synthetize_float_type () =
let expected = Result.Ok (Type.Variable "float")
and computed = Type_check.synthetize () Term.(Literal (Float 1.)) in
Alcotest.(check (result lambe_type string))
"should_synthetize_float_type" expected computed
let should_synthetize_string_type () =
let expected = Result.Ok (Type.Variable "string")
and computed = Type_check.synthetize () Term.(Literal (String "1")) in
Alcotest.(check (result lambe_type string))
"should_synthetize_string_type" expected computed
let should_synthetize_char_type () =
let expected = Result.Ok (Type.Variable "char")
and computed = Type_check.synthetize () Term.(Literal (Char '1')) in
Alcotest.(check (result lambe_type string))
"should_synthetize_char_type" expected computed
let test_cases =
let open Alcotest in
( "Type_check"
, [
test_case "Should synthesize int type" `Quick should_synthetize_int_type
; test_case "Should synthesize float type" `Quick
should_synthetize_float_type
; test_case "Should synthesize string type" `Quick
should_synthetize_string_type
; test_case "Should synthesize char type" `Quick should_synthetize_char_type
] )
| |
dc4ed318a0b2ff993a26e8a2e1bdf4c4df743974b237af1325c54e7321f7aba6 | nasa/Common-Metadata-Repository | service.clj | (ns cmr.indexer.data.concepts.service
"Contains functions to parse and convert service and service association concepts."
(:require
[clojure.string :as string]
[cmr.common.mime-types :as mt]
[cmr.common.util :as util]
[cmr.indexer.data.concept-parser :as concept-parser]
[cmr.indexer.data.concepts.service-keyword-util :as service-keyword-util]
[cmr.indexer.data.concepts.association-util :as assoc-util]
[cmr.indexer.data.elasticsearch :as es]
[cmr.transmit.metadata-db :as mdb]))
(defmethod es/parsed-concept->elastic-doc :service
[_context concept parsed-concept]
(let [{:keys [concept-id revision-id deleted provider-id native-id user-id
revision-date format extra-fields service-associations generic-associations]} concept
{:keys [service-name]} extra-fields
long-name (:LongName parsed-concept)
service-type (:Type parsed-concept)
schema-keys [:LongName
:Name
:Type
:Version
:AncillaryKeywords
:ContactGroups
:ContactPersons
:URL
:ServiceKeywords
:ServiceOrganizations]
keyword-values (service-keyword-util/concept-keys->keyword-text
parsed-concept schema-keys)
all-assocs (concat service-associations generic-associations)]
(if deleted
{:concept-id concept-id
:revision-id revision-id
:deleted deleted
:service-name service-name
:service-name-lowercase (string/lower-case service-name)
:provider-id provider-id
:provider-id-lowercase (string/lower-case provider-id)
:native-id native-id
:native-id-lowercase (string/lower-case native-id)
:keyword keyword-values
:user-id user-id
:revision-date revision-date}
{:concept-id concept-id
:revision-id revision-id
:deleted deleted
:service-name service-name
:service-name-lowercase (string/lower-case service-name)
:service-type-lowercase (string/lower-case service-type)
:long-name long-name
:long-name-lowercase (string/lower-case long-name)
:provider-id provider-id
:provider-id-lowercase (string/lower-case provider-id)
:native-id native-id
:native-id-lowercase (string/lower-case native-id)
:keyword keyword-values
:user-id user-id
:revision-date revision-date
:metadata-format (name (mt/format-key format))
:associations-gzip-b64 (assoc-util/associations->gzip-base64-str all-assocs concept-id)})))
(defn- service-associations->service-concepts
"Returns the service concepts for the given service associations."
[context service-associations]
(let [service-concept-ids (map :service-concept-id service-associations)
service-concepts (mdb/get-latest-concepts context service-concept-ids true)]
(filter #(not (:deleted %)) service-concepts)))
(defn- has-formats?
"Returns true if the given service has more than one supported formats value."
[service]
(let [format-pairs (get-in service [:ServiceOptions :SupportedReformattings])
input-formats (distinct (map :SupportedInputFormat format-pairs))
output-formats (distinct (mapcat :SupportedOutputFormats format-pairs))
distinct-input-output (distinct (concat input-formats output-formats))]
(and (not (zero? (count output-formats)))
(> (count distinct-input-output) 1))))
(defn- has-subset-type?
"Returns true if the given service has a defined Subset with one of its
values matches the given subset type."
[service subset-type]
(let [{{subset-types :Subset} :ServiceOptions} service]
(and (seq subset-types)
(-> subset-types
(subset-type)
(some?)))))
(defn- has-spatial-subsetting?
"Returns true if the given service has a defined SubsetType with one of its
values being 'Spatial'."
[service]
(has-subset-type? service :SpatialSubset))
(defn- has-temporal-subsetting?
"Returns true if the given service has a defined SubsetType with one of its
values being 'Temporal'."
[service]
(has-subset-type? service :TemporalSubset))
(defn- has-variables?
"Returns true if the given service has a defined SubsetType with one of its
values being 'Variable'."
[service]
(has-subset-type? service :VariableSubset))
(defn- has-transforms?
"Returns true if the given service has a defined SubsetTypes or InterpolationTypes,
or multiple supported projections values."
[service]
(let [{service-options :ServiceOptions} service
{interpolation-types :InterpolationTypes
input-projections :SupportedInputProjections
output-projections :SupportedOutputProjections} service-options
supported-projections (distinct (concat
(map :ProjectionName input-projections)
(map :ProjectionName output-projections)))]
(or (seq interpolation-types)
(> (count supported-projections) 1))))
(defn- get-has-features
"Returns the has features for the given services"
[services]
{:has-formats (boolean (some has-formats? services))
:has-transforms (boolean (some has-transforms? services))
:has-variables (boolean (some has-variables? services))
:has-spatial-subsetting (boolean (some has-spatial-subsetting? services))
:has-temporal-subsetting (boolean (some has-temporal-subsetting? services))})
(defn- get-trimmed-has-features
"Returns the has features for the given services with false features trimmed off"
[services]
(->> services
get-has-features
(util/remove-map-keys false?)))
(defn- get-service-features
"Returns the service features for the list of services"
[services]
(let [opendap-services (filter #(= "OPeNDAP" (:Type %)) services)
esi-services (filter #(= "ESI" (:Type %)) services)
harmony-services (filter #(= "Harmony" (:Type %)) services)]
(util/remove-map-keys
empty?
{:opendap (get-trimmed-has-features opendap-services )
:esi (get-trimmed-has-features esi-services)
:harmony (get-trimmed-has-features harmony-services)})))
(defn service-associations->elastic-doc
"Converts the service association into the portion going in the collection elastic document."
[context service-associations]
(let [service-concepts (service-associations->service-concepts context service-associations)
service-names (map #(get-in % [:extra-fields :service-name]) service-concepts)
service-concept-ids (map :concept-id service-concepts)
parsed-services (map #(concept-parser/parse-concept context %) service-concepts)
service-types (map :Type parsed-services)
service-features (get-service-features parsed-services)]
(merge
{:service-names service-names
:service-names-lowercase (map string/lower-case service-names)
:service-concept-ids service-concept-ids
:service-types-lowercase (map string/lower-case service-types)
:service-features-gzip-b64 (when (seq service-features)
(-> service-features
pr-str
util/string->gzip-base64))}
(get-has-features parsed-services))))
| null | https://raw.githubusercontent.com/nasa/Common-Metadata-Repository/431c8650185deb23583d4c471b772203fc868fe6/indexer-app/src/cmr/indexer/data/concepts/service.clj | clojure | (ns cmr.indexer.data.concepts.service
"Contains functions to parse and convert service and service association concepts."
(:require
[clojure.string :as string]
[cmr.common.mime-types :as mt]
[cmr.common.util :as util]
[cmr.indexer.data.concept-parser :as concept-parser]
[cmr.indexer.data.concepts.service-keyword-util :as service-keyword-util]
[cmr.indexer.data.concepts.association-util :as assoc-util]
[cmr.indexer.data.elasticsearch :as es]
[cmr.transmit.metadata-db :as mdb]))
(defmethod es/parsed-concept->elastic-doc :service
[_context concept parsed-concept]
(let [{:keys [concept-id revision-id deleted provider-id native-id user-id
revision-date format extra-fields service-associations generic-associations]} concept
{:keys [service-name]} extra-fields
long-name (:LongName parsed-concept)
service-type (:Type parsed-concept)
schema-keys [:LongName
:Name
:Type
:Version
:AncillaryKeywords
:ContactGroups
:ContactPersons
:URL
:ServiceKeywords
:ServiceOrganizations]
keyword-values (service-keyword-util/concept-keys->keyword-text
parsed-concept schema-keys)
all-assocs (concat service-associations generic-associations)]
(if deleted
{:concept-id concept-id
:revision-id revision-id
:deleted deleted
:service-name service-name
:service-name-lowercase (string/lower-case service-name)
:provider-id provider-id
:provider-id-lowercase (string/lower-case provider-id)
:native-id native-id
:native-id-lowercase (string/lower-case native-id)
:keyword keyword-values
:user-id user-id
:revision-date revision-date}
{:concept-id concept-id
:revision-id revision-id
:deleted deleted
:service-name service-name
:service-name-lowercase (string/lower-case service-name)
:service-type-lowercase (string/lower-case service-type)
:long-name long-name
:long-name-lowercase (string/lower-case long-name)
:provider-id provider-id
:provider-id-lowercase (string/lower-case provider-id)
:native-id native-id
:native-id-lowercase (string/lower-case native-id)
:keyword keyword-values
:user-id user-id
:revision-date revision-date
:metadata-format (name (mt/format-key format))
:associations-gzip-b64 (assoc-util/associations->gzip-base64-str all-assocs concept-id)})))
(defn- service-associations->service-concepts
"Returns the service concepts for the given service associations."
[context service-associations]
(let [service-concept-ids (map :service-concept-id service-associations)
service-concepts (mdb/get-latest-concepts context service-concept-ids true)]
(filter #(not (:deleted %)) service-concepts)))
(defn- has-formats?
"Returns true if the given service has more than one supported formats value."
[service]
(let [format-pairs (get-in service [:ServiceOptions :SupportedReformattings])
input-formats (distinct (map :SupportedInputFormat format-pairs))
output-formats (distinct (mapcat :SupportedOutputFormats format-pairs))
distinct-input-output (distinct (concat input-formats output-formats))]
(and (not (zero? (count output-formats)))
(> (count distinct-input-output) 1))))
(defn- has-subset-type?
"Returns true if the given service has a defined Subset with one of its
values matches the given subset type."
[service subset-type]
(let [{{subset-types :Subset} :ServiceOptions} service]
(and (seq subset-types)
(-> subset-types
(subset-type)
(some?)))))
(defn- has-spatial-subsetting?
"Returns true if the given service has a defined SubsetType with one of its
values being 'Spatial'."
[service]
(has-subset-type? service :SpatialSubset))
(defn- has-temporal-subsetting?
"Returns true if the given service has a defined SubsetType with one of its
values being 'Temporal'."
[service]
(has-subset-type? service :TemporalSubset))
(defn- has-variables?
"Returns true if the given service has a defined SubsetType with one of its
values being 'Variable'."
[service]
(has-subset-type? service :VariableSubset))
(defn- has-transforms?
"Returns true if the given service has a defined SubsetTypes or InterpolationTypes,
or multiple supported projections values."
[service]
(let [{service-options :ServiceOptions} service
{interpolation-types :InterpolationTypes
input-projections :SupportedInputProjections
output-projections :SupportedOutputProjections} service-options
supported-projections (distinct (concat
(map :ProjectionName input-projections)
(map :ProjectionName output-projections)))]
(or (seq interpolation-types)
(> (count supported-projections) 1))))
(defn- get-has-features
"Returns the has features for the given services"
[services]
{:has-formats (boolean (some has-formats? services))
:has-transforms (boolean (some has-transforms? services))
:has-variables (boolean (some has-variables? services))
:has-spatial-subsetting (boolean (some has-spatial-subsetting? services))
:has-temporal-subsetting (boolean (some has-temporal-subsetting? services))})
(defn- get-trimmed-has-features
"Returns the has features for the given services with false features trimmed off"
[services]
(->> services
get-has-features
(util/remove-map-keys false?)))
(defn- get-service-features
"Returns the service features for the list of services"
[services]
(let [opendap-services (filter #(= "OPeNDAP" (:Type %)) services)
esi-services (filter #(= "ESI" (:Type %)) services)
harmony-services (filter #(= "Harmony" (:Type %)) services)]
(util/remove-map-keys
empty?
{:opendap (get-trimmed-has-features opendap-services )
:esi (get-trimmed-has-features esi-services)
:harmony (get-trimmed-has-features harmony-services)})))
(defn service-associations->elastic-doc
"Converts the service association into the portion going in the collection elastic document."
[context service-associations]
(let [service-concepts (service-associations->service-concepts context service-associations)
service-names (map #(get-in % [:extra-fields :service-name]) service-concepts)
service-concept-ids (map :concept-id service-concepts)
parsed-services (map #(concept-parser/parse-concept context %) service-concepts)
service-types (map :Type parsed-services)
service-features (get-service-features parsed-services)]
(merge
{:service-names service-names
:service-names-lowercase (map string/lower-case service-names)
:service-concept-ids service-concept-ids
:service-types-lowercase (map string/lower-case service-types)
:service-features-gzip-b64 (when (seq service-features)
(-> service-features
pr-str
util/string->gzip-base64))}
(get-has-features parsed-services))))
| |
ad24cd75b8921de564c0c3c64f7ac1c1db704f23896d1073521e2412e687f9ca | johnwhitington/ocamli | c.ml | let _ = ()
| null | https://raw.githubusercontent.com/johnwhitington/ocamli/28da5d87478a51583a6cb792bf3a8ee44b990e9f/ppx_interpret/example/c.ml | ocaml | let _ = ()
| |
a94780f2000d7f203e4bbad10f92f48809e08c28fef4113ce11dd4d9e9670f57 | formal-land/coq-of-ocaml | modules.ml | module List2 = struct
type 'a t =
| Nil
| Cons of 'a * 'a t
let rec sum (l : int t) : int =
match l with
| Nil -> 0
| Cons (x, xs) -> x + sum xs
let rec of_list = function
| [] -> Nil
| x :: xs -> Cons (x, of_list xs)
module Inside = struct
let x = 12
end
end
let n _ = List2.sum (List2.of_list [5; 7; 6; List2.Inside.x])
module Syn = List2.Inside
let xx = Syn.x
| null | https://raw.githubusercontent.com/formal-land/coq-of-ocaml/c9c86b08eb19d7fd023f48029cc5f9bf53f6a11c/tests/modules.ml | ocaml | module List2 = struct
type 'a t =
| Nil
| Cons of 'a * 'a t
let rec sum (l : int t) : int =
match l with
| Nil -> 0
| Cons (x, xs) -> x + sum xs
let rec of_list = function
| [] -> Nil
| x :: xs -> Cons (x, of_list xs)
module Inside = struct
let x = 12
end
end
let n _ = List2.sum (List2.of_list [5; 7; 6; List2.Inside.x])
module Syn = List2.Inside
let xx = Syn.x
| |
bbf074e9a643f9385c95226e92cb23361b21f52b5e0ee14813159603d03de1fc | lexi-lambda/litpub | jsexpr.rkt | #lang racket/base
(require json
web-server/http)
(provide response/jsexpr)
(define APPLICATION/JSON-CONTENT-TYPE #"application/json; charset=utf-8")
(define (response/jsexpr jsexpr
#:code [code 200]
#:message [message #"OK"]
#:seconds [seconds (current-seconds)]
#:mime-type [mime-type APPLICATION/JSON-CONTENT-TYPE]
#:headers [headers '()])
(response/full code message seconds mime-type headers
(list (jsexpr->bytes jsexpr))))
| null | https://raw.githubusercontent.com/lexi-lambda/litpub/2f326c1c0e0ee8cad0b8b3f7f7b4a49a02ac62b5/util/jsexpr.rkt | racket | #lang racket/base
(require json
web-server/http)
(provide response/jsexpr)
(define APPLICATION/JSON-CONTENT-TYPE #"application/json; charset=utf-8")
(define (response/jsexpr jsexpr
#:code [code 200]
#:message [message #"OK"]
#:seconds [seconds (current-seconds)]
#:mime-type [mime-type APPLICATION/JSON-CONTENT-TYPE]
#:headers [headers '()])
(response/full code message seconds mime-type headers
(list (jsexpr->bytes jsexpr))))
| |
3632ce8694cdb09a45e6a29edc09955e5c1df46a8c9cf5f59f0be59b28b83770 | zkry/tessellation | tessellation.rkt | #lang racket
(require metapict)
(require (for-syntax racket/list
racket/format))
(define node-size 0.3)
(define (set-scale scale)
(match scale
['small
(set-curve-pict-size 500 500)
(set! node-size 0.03)]
['medium
(set-curve-pict-size 800 800)
(set! node-size 0.03)]
['large
(set-curve-pict-size 1200 1200)
(set! node-size 0.015)]
['x-large
(set-curve-pict-size 2400 2400)
(set! node-size 0.01)]))
(set-scale 'large)
(struct base-grid (points shapes) #:prefab)
(struct filled-curve (curve) #:prefab)
;; How do I define the same thing (constant) for multiple levels?
(define-for-syntax pt-ids
(let ((base-ids (map (lambda (offset) (string (integer->char (+ (char->integer #\a) offset)))) (range 26))))
(append base-ids (for*/list ((i base-ids) (j base-ids)) (~a i j)))))
(define pt-ids
(let ((base-ids (map (lambda (offset) (string (integer->char (+ (char->integer #\a) offset)))) (range 26))))
(append base-ids (for*/list ((i base-ids) (j base-ids)) (~a i j)))))
(define-for-syntax (pt-id stx)
(and (identifier? stx)
(index-of pt-ids (symbol->string (syntax->datum stx)))))
(define (pt-id stx)
(and (identifier? stx)
(index-of pt-ids (symbol->string (syntax->datum stx)))))
(define (pt-equal? a b)
(and (< (abs (- (pt-x a) (pt-x b))) 1e-5)
(< (abs (- (pt-y a) (pt-y b))) 1e-5)))
;; points-deduplicate returns the set of points in a
;; not in b.
(define (pt-deduplicate a b)
(filter (lambda (pa)
(not (for/or ([pb b]) (pt-equal? pa pb))))
a))
(define (map-bez bezs f)
(map
(lambda (b)
(bez (f (bez-p0 b))
(f (bez-p1 b))
(f (bez-p2 b))
(f (bez-p3 b))))
bezs))
(define square-frame
(list
(curve (pt -1 -1) .. (pt -1 1))
(curve (pt -1 1) .. (pt 1 1))
(curve (pt 1 1) .. (pt 1 -1))
(curve (pt -1 -1) .. (pt 1 -1))))
(define (rotate90 c)
(define (rotate90-curve c)
(list c ((rotatedd (- 90)) c)))
(flatten (map rotate90-curve (flatten c))))
(define (rotate45 c)
(define (rotate45-curve c)
(list c ((rotatedd (- 45)) c)))
(flatten (map rotate45-curve (flatten c))))
(define (rotate-curve-lambda angle n)
(lambda (c)
(map (lambda (n) ((rotatedd (* n angle)) c)) (range n))))
(define (rotate/4 c)
(flatten (map (rotate-curve-lambda 90.0 4) (flatten c))))
(define (rotate/8 c)
(flatten (map (rotate-curve-lambda 45.0 8) (flatten c))))
(define (rotate/16 c)
(flatten (map (rotate-curve-lambda 22.5 16) (flatten c))))
(define (hmirror c)
(define (pt-hflip p)
(pt (- (pt-x p)) (pt-y p)))
(define (hmirror-curve c)
(defm (curve closed? bezs) c)
(list c (curve: closed? (map-bez bezs pt-hflip))))
(flatten (map hmirror-curve (flatten c))))
(define (vmirror c)
(define (pt-vflip p)
(pt (pt-x p) (- (pt-y p))))
(define (vmirror-curve c)
(defm (curve closed? bezs) c)
(list c (curve: closed? (map-bez bezs pt-vflip))))
(flatten (map vmirror-curve (flatten c))))
;; Return lambda that translates curve by x, y. Used for tessellation.
(define (translate x y)
(define (translate-pt x y)
(lambda (p)
(pt (+ x (pt-x p)) (+ y (pt-y p)))))
(lambda (c)
(match c
[(? filled-curve?)
(let ((c (filled-curve-curve c)))
(defm (curve closed? bezs) c)
(fill (curve: closed? (map-bez bezs (translate-pt x y)))))]
[_
(defm (curve closed? bezs) c)
(curve: closed? (map-bez bezs (translate-pt x y)))])))
(define (fill-wrap x)
(map filled-curve (flatten x)))
(define-syntax (process-curve stx)
(syntax-case stx ()
[(_ points (f x ...))
(if (equal? (syntax->datum #'f) 'fill)
#'(process-curve points (fill-wrap x ...)) ; We want process-curve to remove all instances of fill, so replace fill with identity.
#'(f (process-curve points x) ...))]
[(_ points id)
(if (pt-id #'id)
#'(list-ref points (pt-id #'id))
#'id)]))
(define-syntax (generate-grid stx)
(syntax-case stx ()
[(_ curve ...)
#'(let ((points '())
(shapes '()))
(let* ((processed-curves (flatten (process-curve points curve)))
(new-points (for*/fold ([pt-acc '()])
;; Compare every shape with every other shape
([i (append shapes processed-curves)]
[j (flatten (list processed-curves))])
(if (equal? i j) ; If the shape is being compared with itself,
pt-acc ; skip it.
;; Calculate the intersections of shapes i and j, then deduplicate the points
;; and add it to the accumulated list.
(let ((next-pts (pt-deduplicate (intersection-points i j) pt-acc)))
(append pt-acc next-pts))))))
(set! points (append points (pt-deduplicate new-points points)))
(set! shapes (append shapes (flatten processed-curves)))) ...
(base-grid points shapes))]))
(def (node p id)
(def circ (circle p node-size))
(def filled (color "white" (fill circ)))
(def label (label-cnt (~a id) p))
(draw filled circ label))
(define (display-grid grid)
(draw (for/draw ([s (base-grid-shapes grid)])
s)
(for/draw ([pt (base-grid-points grid)]
[id pt-ids])
(node pt id))))
;; TODO: DRY this macro up.
(define-syntax (tessellate stx)
(syntax-case stx ()
[(_ g (width-start width-end) (height-start height-end) curves ...)
#'(draw (for/draw ([xy (for*/list ([x (range (* 2 width-start) (* 2 (add1 width-end)) 2)]
[y (range (* 2 height-start) (* 2 (add1 height-end)) 2)])
(cons x y))])
(for/draw ([s (map (translate (car xy) (cdr xy))
(flatten (list (process-curve (base-grid-points g) curves) ...)))])
s)))]
[(_ g width height curves ...)
#'(draw (for/draw ([xy (for*/list ([x (range 0 (* 2 width) 2)]
[y (range 0 (* 2 height) 2)])
(cons x y))])
(for/draw ([s (map (translate (car xy) (cdr xy))
(flatten (list (process-curve (base-grid-points g) curves) ...)))])
s)))]))
(define-syntax (with-grid stx)
(syntax-case stx ()
[(_ grid body ...)
#'(let ((points (base-grid-points grid)))
(process-curve points body) ...)]))
(provide set-scale
with-grid
tessellate
display-grid
generate-grid
vmirror
hmirror
rotate/4
rotate/8
rotate/16
rotate90
rotate45
square-frame
(all-from-out metapict))
| null | https://raw.githubusercontent.com/zkry/tessellation/6f881912eb35592f96539485e7bdd62bdc329528/tessellation.rkt | racket | How do I define the same thing (constant) for multiple levels?
points-deduplicate returns the set of points in a
not in b.
Return lambda that translates curve by x, y. Used for tessellation.
We want process-curve to remove all instances of fill, so replace fill with identity.
Compare every shape with every other shape
If the shape is being compared with itself,
skip it.
Calculate the intersections of shapes i and j, then deduplicate the points
and add it to the accumulated list.
TODO: DRY this macro up. | #lang racket
(require metapict)
(require (for-syntax racket/list
racket/format))
(define node-size 0.3)
(define (set-scale scale)
(match scale
['small
(set-curve-pict-size 500 500)
(set! node-size 0.03)]
['medium
(set-curve-pict-size 800 800)
(set! node-size 0.03)]
['large
(set-curve-pict-size 1200 1200)
(set! node-size 0.015)]
['x-large
(set-curve-pict-size 2400 2400)
(set! node-size 0.01)]))
(set-scale 'large)
(struct base-grid (points shapes) #:prefab)
(struct filled-curve (curve) #:prefab)
(define-for-syntax pt-ids
(let ((base-ids (map (lambda (offset) (string (integer->char (+ (char->integer #\a) offset)))) (range 26))))
(append base-ids (for*/list ((i base-ids) (j base-ids)) (~a i j)))))
(define pt-ids
(let ((base-ids (map (lambda (offset) (string (integer->char (+ (char->integer #\a) offset)))) (range 26))))
(append base-ids (for*/list ((i base-ids) (j base-ids)) (~a i j)))))
(define-for-syntax (pt-id stx)
(and (identifier? stx)
(index-of pt-ids (symbol->string (syntax->datum stx)))))
(define (pt-id stx)
(and (identifier? stx)
(index-of pt-ids (symbol->string (syntax->datum stx)))))
(define (pt-equal? a b)
(and (< (abs (- (pt-x a) (pt-x b))) 1e-5)
(< (abs (- (pt-y a) (pt-y b))) 1e-5)))
(define (pt-deduplicate a b)
(filter (lambda (pa)
(not (for/or ([pb b]) (pt-equal? pa pb))))
a))
(define (map-bez bezs f)
(map
(lambda (b)
(bez (f (bez-p0 b))
(f (bez-p1 b))
(f (bez-p2 b))
(f (bez-p3 b))))
bezs))
(define square-frame
(list
(curve (pt -1 -1) .. (pt -1 1))
(curve (pt -1 1) .. (pt 1 1))
(curve (pt 1 1) .. (pt 1 -1))
(curve (pt -1 -1) .. (pt 1 -1))))
(define (rotate90 c)
(define (rotate90-curve c)
(list c ((rotatedd (- 90)) c)))
(flatten (map rotate90-curve (flatten c))))
(define (rotate45 c)
(define (rotate45-curve c)
(list c ((rotatedd (- 45)) c)))
(flatten (map rotate45-curve (flatten c))))
(define (rotate-curve-lambda angle n)
(lambda (c)
(map (lambda (n) ((rotatedd (* n angle)) c)) (range n))))
(define (rotate/4 c)
(flatten (map (rotate-curve-lambda 90.0 4) (flatten c))))
(define (rotate/8 c)
(flatten (map (rotate-curve-lambda 45.0 8) (flatten c))))
(define (rotate/16 c)
(flatten (map (rotate-curve-lambda 22.5 16) (flatten c))))
(define (hmirror c)
(define (pt-hflip p)
(pt (- (pt-x p)) (pt-y p)))
(define (hmirror-curve c)
(defm (curve closed? bezs) c)
(list c (curve: closed? (map-bez bezs pt-hflip))))
(flatten (map hmirror-curve (flatten c))))
(define (vmirror c)
(define (pt-vflip p)
(pt (pt-x p) (- (pt-y p))))
(define (vmirror-curve c)
(defm (curve closed? bezs) c)
(list c (curve: closed? (map-bez bezs pt-vflip))))
(flatten (map vmirror-curve (flatten c))))
(define (translate x y)
(define (translate-pt x y)
(lambda (p)
(pt (+ x (pt-x p)) (+ y (pt-y p)))))
(lambda (c)
(match c
[(? filled-curve?)
(let ((c (filled-curve-curve c)))
(defm (curve closed? bezs) c)
(fill (curve: closed? (map-bez bezs (translate-pt x y)))))]
[_
(defm (curve closed? bezs) c)
(curve: closed? (map-bez bezs (translate-pt x y)))])))
(define (fill-wrap x)
(map filled-curve (flatten x)))
(define-syntax (process-curve stx)
(syntax-case stx ()
[(_ points (f x ...))
(if (equal? (syntax->datum #'f) 'fill)
#'(f (process-curve points x) ...))]
[(_ points id)
(if (pt-id #'id)
#'(list-ref points (pt-id #'id))
#'id)]))
(define-syntax (generate-grid stx)
(syntax-case stx ()
[(_ curve ...)
#'(let ((points '())
(shapes '()))
(let* ((processed-curves (flatten (process-curve points curve)))
(new-points (for*/fold ([pt-acc '()])
([i (append shapes processed-curves)]
[j (flatten (list processed-curves))])
(let ((next-pts (pt-deduplicate (intersection-points i j) pt-acc)))
(append pt-acc next-pts))))))
(set! points (append points (pt-deduplicate new-points points)))
(set! shapes (append shapes (flatten processed-curves)))) ...
(base-grid points shapes))]))
(def (node p id)
(def circ (circle p node-size))
(def filled (color "white" (fill circ)))
(def label (label-cnt (~a id) p))
(draw filled circ label))
(define (display-grid grid)
(draw (for/draw ([s (base-grid-shapes grid)])
s)
(for/draw ([pt (base-grid-points grid)]
[id pt-ids])
(node pt id))))
(define-syntax (tessellate stx)
(syntax-case stx ()
[(_ g (width-start width-end) (height-start height-end) curves ...)
#'(draw (for/draw ([xy (for*/list ([x (range (* 2 width-start) (* 2 (add1 width-end)) 2)]
[y (range (* 2 height-start) (* 2 (add1 height-end)) 2)])
(cons x y))])
(for/draw ([s (map (translate (car xy) (cdr xy))
(flatten (list (process-curve (base-grid-points g) curves) ...)))])
s)))]
[(_ g width height curves ...)
#'(draw (for/draw ([xy (for*/list ([x (range 0 (* 2 width) 2)]
[y (range 0 (* 2 height) 2)])
(cons x y))])
(for/draw ([s (map (translate (car xy) (cdr xy))
(flatten (list (process-curve (base-grid-points g) curves) ...)))])
s)))]))
(define-syntax (with-grid stx)
(syntax-case stx ()
[(_ grid body ...)
#'(let ((points (base-grid-points grid)))
(process-curve points body) ...)]))
(provide set-scale
with-grid
tessellate
display-grid
generate-grid
vmirror
hmirror
rotate/4
rotate/8
rotate/16
rotate90
rotate45
square-frame
(all-from-out metapict))
|
0deed34f37532ee2edeec860d6f30668a2d1e397006815a5985e119412063a50 | gmr/huesos-de-vaquero | huesos.erl | %% ------------------------------------------------------------------
%% Module used for running the development version of the app only
%% ------------------------------------------------------------------
-module(huesos).
-export([start/0]).
-define(APPS, [crypto, ranch, cowlib, cowboy, compiler, syntax_tools, erlydtl, huesos]).
start() ->
start_apps(?APPS).
start_apps([]) -> ok;
start_apps([App | Apps]) ->
case application:start(App) of
ok -> start_apps(Apps);
{error, {already_started, App}} -> start_apps(Apps)
end.
| null | https://raw.githubusercontent.com/gmr/huesos-de-vaquero/de12e7e0c474fbac4bbf3dcdcb403a4cb206a1b6/src/huesos.erl | erlang | ------------------------------------------------------------------
Module used for running the development version of the app only
------------------------------------------------------------------ |
-module(huesos).
-export([start/0]).
-define(APPS, [crypto, ranch, cowlib, cowboy, compiler, syntax_tools, erlydtl, huesos]).
start() ->
start_apps(?APPS).
start_apps([]) -> ok;
start_apps([App | Apps]) ->
case application:start(App) of
ok -> start_apps(Apps);
{error, {already_started, App}} -> start_apps(Apps)
end.
|
45292d242638209ed6cdd389e5b2a9f3586cde206cb5ea35276482170d7ee5a2 | tweag/asterius | rts.hs | import System.Environment
import System.Process
main :: IO ()
main = do
args <- getArgs
callProcess "ahc-link" $ ["--input-hs", "test/rts/MVar.hs", "--run"] <> args
callProcess "ahc-link" $ ["--input-hs", "test/rts/FFI.hs", "--run"] <> args
callProcess "ahc-link" $
["--input-hs", "test/rts/ThreadDelay.hs", "--run"]
<> args
callProcess "ahc-link" $ ["--input-hs", "test/rts/ForkIO.hs", "--run"] <> args
| null | https://raw.githubusercontent.com/tweag/asterius/e7b823c87499656860f87b9b468eb0567add1de8/asterius/test/rts.hs | haskell | import System.Environment
import System.Process
main :: IO ()
main = do
args <- getArgs
callProcess "ahc-link" $ ["--input-hs", "test/rts/MVar.hs", "--run"] <> args
callProcess "ahc-link" $ ["--input-hs", "test/rts/FFI.hs", "--run"] <> args
callProcess "ahc-link" $
["--input-hs", "test/rts/ThreadDelay.hs", "--run"]
<> args
callProcess "ahc-link" $ ["--input-hs", "test/rts/ForkIO.hs", "--run"] <> args
| |
df1fb36721f9739e5596fc234b8b0926993f674b3cfa2b814ea3f47b5e80a908 | philnguyen/soft-contract | match-no-check.rkt | #lang racket
(provide/contract
[f ((or/c (cons/c real? string?) string?) . -> . real?)])
(define (f x)
(match x
[(cons r s) #:when (<= r 1) (string-length s)]
[(cons r s) (/ (string-length s) r)]
[_ (string-length x)]))
| null | https://raw.githubusercontent.com/philnguyen/soft-contract/5e07dc2d622ee80b961f4e8aebd04ce950720239/soft-contract/test/programs/paper/match-no-check.rkt | racket | #lang racket
(provide/contract
[f ((or/c (cons/c real? string?) string?) . -> . real?)])
(define (f x)
(match x
[(cons r s) #:when (<= r 1) (string-length s)]
[(cons r s) (/ (string-length s) r)]
[_ (string-length x)]))
| |
8f85c9160aee9c3d4cc082d433067ac7b68bba9424914536b3522c60213d49ea | OCamlPro/ocp-index | indexPredefined.mli | (**************************************************************************)
(* *)
(* Copyright 2013 OCamlPro *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the Lesser GNU Public License version 3.0 .
(* *)
(* This software is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* Lesser GNU General Public License for more details. *)
(* *)
(**************************************************************************)
(** This module contains definitions for the predefined OCaml elements which are
not in Pervasives like base types ([int], [char]...) and exceptions
([Match_failure]...) *)
val types: IndexTypes.info list
val variants: IndexTypes.info list
val exceptions: IndexTypes.info list
val keywords: IndexTypes.info list
val all: IndexTypes.info list
| null | https://raw.githubusercontent.com/OCamlPro/ocp-index/15bcffa66584f7cc786d026757287c98780cfdbc/libs/indexPredefined.mli | ocaml | ************************************************************************
Copyright 2013 OCamlPro
All rights reserved. This file is distributed under the terms of
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
Lesser GNU General Public License for more details.
************************************************************************
* This module contains definitions for the predefined OCaml elements which are
not in Pervasives like base types ([int], [char]...) and exceptions
([Match_failure]...) | the Lesser GNU Public License version 3.0 .
val types: IndexTypes.info list
val variants: IndexTypes.info list
val exceptions: IndexTypes.info list
val keywords: IndexTypes.info list
val all: IndexTypes.info list
|
d6d50b974bb50f1b664a56d90ac58486a773745815e25e61615b2778fbe0e748 | juji-io/datalevin | index.cljc | (ns datalevin.test.index
(:require
[datalevin.test.core :as tdc :refer [db-fixture]]
[clojure.test :refer [deftest testing is use-fixtures]]
[datalevin.util :as u]
[datalevin.core :as d]))
(use-fixtures :each db-fixture)
(deftest test-datoms
(let [dir (u/tmp-dir (str "reset-test-" (random-uuid)))
dvec #(vector (:e %) (:a %) (:v %))
db (-> (d/empty-db dir {:name {:db/valueType :db.type/string}
:age {:db/valueType :db.type/long}})
(d/db-with [ [:db/add 1 :name "Petr"]
[:db/add 1 :age 44]
[:db/add 2 :name "Ivan"]
[:db/add 2 :age 25]
[:db/add 3 :name "Sergey"]
[:db/add 3 :age 11] ]))]
(testing "Main indexes, sort order"
(is (= [[1 :name "Petr"]
[1 :age 44]
[2 :name "Ivan"]
[2 :age 25]
[3 :name "Sergey"]
[3 :age 11]]
(map dvec (d/datoms db :eavt))))
(is (= [[2 :name "Ivan"]
[1 :name "Petr"]
[3 :name "Sergey"]
[3 :age 11]
[2 :age 25]
[1 :age 44] ]
(map dvec (d/datoms db :avet)))))
(testing "Components filtration"
(is (= [[1 :name "Petr"]
[1 :age 44]]
(map dvec (d/datoms db :eavt 1))))
(is (= [ [1 :age 44] ]
(map dvec (d/datoms db :eavt 1 :age))))
(is (= [ [3 :age 11]
[2 :age 25]
[1 :age 44] ]
(map dvec (d/datoms db :avet :age)))))
(d/close-db db)
(u/delete-files dir)))
;; should not expect attribute in lexicographic order
;; attributes are in order of creation
(deftest test-seek-datoms
(let [dir (u/tmp-dir (str "seek-test-" (random-uuid)))
dvec #(vector (:e %) (:a %) (:v %))
db (-> (d/empty-db dir {:name {:db/valueType :db.type/string}
:age {:db/valueType :db.type/long}})
(d/db-with [[:db/add 1 :name "Petr"]
[:db/add 1 :age 44]
[:db/add 2 :name "Ivan"]
[:db/add 2 :age 25]
[:db/add 3 :name "Sergey"]
[:db/add 3 :age 11]]))]
(testing "Non-termination"
(is (= (map dvec (d/seek-datoms db :avet :age 10))
[ [3 :age 11]
[2 :age 25]
[1 :age 44]])))
(testing "Closest value lookup"
(is (= (map dvec (d/seek-datoms db :avet :name "P"))
[
[1 :name "Petr"]
[3 :name "Sergey"]
[3 :age 11]
[2 :age 25]
[1 :age 44]])))
(testing "Exact value lookup"
(is (= (map dvec (d/seek-datoms db :avet :name "Petr"))
[ [1 :name "Petr"]
[3 :name "Sergey"]
[3 :age 11]
[2 :age 25]
[1 :age 44]])))
(d/close-db db)
(u/delete-files dir)))
;; should not expect attributes in lexicographic order
(deftest test-rseek-datoms
(let [dir (u/tmp-dir (str "rseek-test-" (random-uuid)))
dvec #(vector (:e %) (:a %) (:v %))
db (-> (d/empty-db dir {:name {:db/valueType :db.type/string}
:age {:db/valueType :db.type/long}})
(d/db-with [[:db/add 1 :name "Petr"]
[:db/add 1 :age 44]
[:db/add 2 :name "Ivan"]
[:db/add 2 :age 25]
[:db/add 3 :name "Sergey"]
[:db/add 3 :age 11]]))]
(testing "Non-termination"
(is (= (map dvec (d/rseek-datoms db :avet :name "Petr"))
[ [1 :name "Petr"]
[2 :name "Ivan"]])))
(testing "Closest value lookup"
(is (= (map dvec (d/rseek-datoms db :avet :age 26))
[[2 :age 25]
[3 :age 11]
[3 :name "Sergey"]
[1 :name "Petr"]
[2 :name "Ivan"]])))
(testing "Exact value lookup"
(is (= (map dvec (d/rseek-datoms db :avet :age 25))
[[2 :age 25]
[3 :age 11]
[3 :name "Sergey"]
[1 :name "Petr"]
[2 :name "Ivan"]])))
(d/close-db db)
(u/delete-files dir)))
(deftest test-index-range
(let [dir (u/tmp-dir (str "range-test-" (random-uuid)))
dvec #(vector (:e %) (:a %) (:v %))
db (d/db-with
(d/empty-db dir {:name {:db/valueType :db.type/string}
:age {:db/valueType :db.type/long}})
[ { :db/id 1 :name "Ivan" :age 15 }
{ :db/id 2 :name "Oleg" :age 20 }
{ :db/id 3 :name "Sergey" :age 7 }
{ :db/id 4 :name "Pavel" :age 45 }
{ :db/id 5 :name "Petr" :age 20 } ])]
(is (= (map dvec (d/index-range db :name "Pe" "S"))
[ [5 :name "Petr"] ]))
(is (= (map dvec (d/index-range db :name "O" "Sergey"))
[ [2 :name "Oleg"]
[4 :name "Pavel"]
[5 :name "Petr"]
[3 :name "Sergey"] ]))
(is (= (map dvec (d/index-range db :name nil "P"))
[ [1 :name "Ivan"]
[2 :name "Oleg"] ]))
(is (= (map dvec (d/index-range db :name "R" nil))
[ [3 :name "Sergey"] ]))
(is (= (map dvec (d/index-range db :name nil nil))
[ [1 :name "Ivan"]
[2 :name "Oleg"]
[4 :name "Pavel"]
[5 :name "Petr"]
[3 :name "Sergey"] ]))
(is (= (map dvec (d/index-range db :age 15 20))
[ [1 :age 15]
[2 :age 20]
[5 :age 20]]))
(is (= (map dvec (d/index-range db :age 7 45))
[ [3 :age 7]
[1 :age 15]
[2 :age 20]
[5 :age 20]
[4 :age 45] ]))
(is (= (map dvec (d/index-range db :age 0 100))
[ [3 :age 7]
[1 :age 15]
[2 :age 20]
[5 :age 20]
[4 :age 45] ]))
(d/close-db db)
(u/delete-files dir)))
| null | https://raw.githubusercontent.com/juji-io/datalevin/3a1fccc3cb40531901d51719216fdce3b1aa3483/test/datalevin/test/index.cljc | clojure | should not expect attribute in lexicographic order
attributes are in order of creation
should not expect attributes in lexicographic order | (ns datalevin.test.index
(:require
[datalevin.test.core :as tdc :refer [db-fixture]]
[clojure.test :refer [deftest testing is use-fixtures]]
[datalevin.util :as u]
[datalevin.core :as d]))
(use-fixtures :each db-fixture)
(deftest test-datoms
(let [dir (u/tmp-dir (str "reset-test-" (random-uuid)))
dvec #(vector (:e %) (:a %) (:v %))
db (-> (d/empty-db dir {:name {:db/valueType :db.type/string}
:age {:db/valueType :db.type/long}})
(d/db-with [ [:db/add 1 :name "Petr"]
[:db/add 1 :age 44]
[:db/add 2 :name "Ivan"]
[:db/add 2 :age 25]
[:db/add 3 :name "Sergey"]
[:db/add 3 :age 11] ]))]
(testing "Main indexes, sort order"
(is (= [[1 :name "Petr"]
[1 :age 44]
[2 :name "Ivan"]
[2 :age 25]
[3 :name "Sergey"]
[3 :age 11]]
(map dvec (d/datoms db :eavt))))
(is (= [[2 :name "Ivan"]
[1 :name "Petr"]
[3 :name "Sergey"]
[3 :age 11]
[2 :age 25]
[1 :age 44] ]
(map dvec (d/datoms db :avet)))))
(testing "Components filtration"
(is (= [[1 :name "Petr"]
[1 :age 44]]
(map dvec (d/datoms db :eavt 1))))
(is (= [ [1 :age 44] ]
(map dvec (d/datoms db :eavt 1 :age))))
(is (= [ [3 :age 11]
[2 :age 25]
[1 :age 44] ]
(map dvec (d/datoms db :avet :age)))))
(d/close-db db)
(u/delete-files dir)))
(deftest test-seek-datoms
(let [dir (u/tmp-dir (str "seek-test-" (random-uuid)))
dvec #(vector (:e %) (:a %) (:v %))
db (-> (d/empty-db dir {:name {:db/valueType :db.type/string}
:age {:db/valueType :db.type/long}})
(d/db-with [[:db/add 1 :name "Petr"]
[:db/add 1 :age 44]
[:db/add 2 :name "Ivan"]
[:db/add 2 :age 25]
[:db/add 3 :name "Sergey"]
[:db/add 3 :age 11]]))]
(testing "Non-termination"
(is (= (map dvec (d/seek-datoms db :avet :age 10))
[ [3 :age 11]
[2 :age 25]
[1 :age 44]])))
(testing "Closest value lookup"
(is (= (map dvec (d/seek-datoms db :avet :name "P"))
[
[1 :name "Petr"]
[3 :name "Sergey"]
[3 :age 11]
[2 :age 25]
[1 :age 44]])))
(testing "Exact value lookup"
(is (= (map dvec (d/seek-datoms db :avet :name "Petr"))
[ [1 :name "Petr"]
[3 :name "Sergey"]
[3 :age 11]
[2 :age 25]
[1 :age 44]])))
(d/close-db db)
(u/delete-files dir)))
(deftest test-rseek-datoms
(let [dir (u/tmp-dir (str "rseek-test-" (random-uuid)))
dvec #(vector (:e %) (:a %) (:v %))
db (-> (d/empty-db dir {:name {:db/valueType :db.type/string}
:age {:db/valueType :db.type/long}})
(d/db-with [[:db/add 1 :name "Petr"]
[:db/add 1 :age 44]
[:db/add 2 :name "Ivan"]
[:db/add 2 :age 25]
[:db/add 3 :name "Sergey"]
[:db/add 3 :age 11]]))]
(testing "Non-termination"
(is (= (map dvec (d/rseek-datoms db :avet :name "Petr"))
[ [1 :name "Petr"]
[2 :name "Ivan"]])))
(testing "Closest value lookup"
(is (= (map dvec (d/rseek-datoms db :avet :age 26))
[[2 :age 25]
[3 :age 11]
[3 :name "Sergey"]
[1 :name "Petr"]
[2 :name "Ivan"]])))
(testing "Exact value lookup"
(is (= (map dvec (d/rseek-datoms db :avet :age 25))
[[2 :age 25]
[3 :age 11]
[3 :name "Sergey"]
[1 :name "Petr"]
[2 :name "Ivan"]])))
(d/close-db db)
(u/delete-files dir)))
(deftest test-index-range
(let [dir (u/tmp-dir (str "range-test-" (random-uuid)))
dvec #(vector (:e %) (:a %) (:v %))
db (d/db-with
(d/empty-db dir {:name {:db/valueType :db.type/string}
:age {:db/valueType :db.type/long}})
[ { :db/id 1 :name "Ivan" :age 15 }
{ :db/id 2 :name "Oleg" :age 20 }
{ :db/id 3 :name "Sergey" :age 7 }
{ :db/id 4 :name "Pavel" :age 45 }
{ :db/id 5 :name "Petr" :age 20 } ])]
(is (= (map dvec (d/index-range db :name "Pe" "S"))
[ [5 :name "Petr"] ]))
(is (= (map dvec (d/index-range db :name "O" "Sergey"))
[ [2 :name "Oleg"]
[4 :name "Pavel"]
[5 :name "Petr"]
[3 :name "Sergey"] ]))
(is (= (map dvec (d/index-range db :name nil "P"))
[ [1 :name "Ivan"]
[2 :name "Oleg"] ]))
(is (= (map dvec (d/index-range db :name "R" nil))
[ [3 :name "Sergey"] ]))
(is (= (map dvec (d/index-range db :name nil nil))
[ [1 :name "Ivan"]
[2 :name "Oleg"]
[4 :name "Pavel"]
[5 :name "Petr"]
[3 :name "Sergey"] ]))
(is (= (map dvec (d/index-range db :age 15 20))
[ [1 :age 15]
[2 :age 20]
[5 :age 20]]))
(is (= (map dvec (d/index-range db :age 7 45))
[ [3 :age 7]
[1 :age 15]
[2 :age 20]
[5 :age 20]
[4 :age 45] ]))
(is (= (map dvec (d/index-range db :age 0 100))
[ [3 :age 7]
[1 :age 15]
[2 :age 20]
[5 :age 20]
[4 :age 45] ]))
(d/close-db db)
(u/delete-files dir)))
|
7f6cd5727e12129644f7648fec77e1c3e9d723e6e6e03149efd601a86bf3d3e2 | freizl/dive-into-haskell | Error1.hs | # LANGUAGE ExistentialQuantification #
module Error1 where
data Expr a
= I Int
| B Bool
| Add (Expr a) (Expr a)
| forall b. LessThan (Expr b) (Expr b)
| forall c. Cond (Expr c) (Expr a) (Expr a)
int :: Int -> Expr Int
int = I
bool :: Bool -> Expr Bool
bool = B
add :: Expr Int -> Expr Int -> Expr Int
add = Add
lessThan :: Expr Int -> Expr Int -> Expr Bool
lessThan = LessThan
cond :: Expr Bool -> Expr a -> Expr a -> Expr a
cond = Cond
eval :: Expr a -> a
eval (I i) = i
eval (B b) = b
eval (Add x y) = eval x + eval y
eval (LessThan x y) = eval x < eval y
eval (Cond c t f)
| eval c = eval t
| otherwise = eval f
| null | https://raw.githubusercontent.com/freizl/dive-into-haskell/b18a6bfe212db6c3a5d707b4a640170b8bcf9330/codes/GADT/Error1.hs | haskell | # LANGUAGE ExistentialQuantification #
module Error1 where
data Expr a
= I Int
| B Bool
| Add (Expr a) (Expr a)
| forall b. LessThan (Expr b) (Expr b)
| forall c. Cond (Expr c) (Expr a) (Expr a)
int :: Int -> Expr Int
int = I
bool :: Bool -> Expr Bool
bool = B
add :: Expr Int -> Expr Int -> Expr Int
add = Add
lessThan :: Expr Int -> Expr Int -> Expr Bool
lessThan = LessThan
cond :: Expr Bool -> Expr a -> Expr a -> Expr a
cond = Cond
eval :: Expr a -> a
eval (I i) = i
eval (B b) = b
eval (Add x y) = eval x + eval y
eval (LessThan x y) = eval x < eval y
eval (Cond c t f)
| eval c = eval t
| otherwise = eval f
| |
c3d273d0d16ac821294a41b2daf577e0ba9c6959672090039dcc9c5f161c129c | agentm/project-m36 | Day.hs | module ProjectM36.DataTypes.Day where
import ProjectM36.Base
import ProjectM36.AtomFunctionBody
import ProjectM36.AtomFunctionError
import qualified Data.HashSet as HS
import Data.Time.Calendar
dayAtomFunctions :: AtomFunctions
dayAtomFunctions = HS.fromList [
Function { funcName = "fromGregorian",
funcType = [IntegerAtomType, IntegerAtomType, IntegerAtomType, DayAtomType],
funcBody = compiledAtomFunctionBody $
\case
IntegerAtom year:IntegerAtom month:IntegerAtom day:_ -> pure $ DayAtom (fromGregorian (fromIntegral year) (fromIntegral month) (fromIntegral day))
_ -> Left AtomFunctionTypeMismatchError
},
Function { funcName = "dayEarlierThan",
funcType = [DayAtomType, DayAtomType, BoolAtomType],
funcBody = compiledAtomFunctionBody $
\case
ConstructedAtom _ _ (IntAtom dayA:_):ConstructedAtom _ _ (IntAtom dayB:_):_ -> pure (BoolAtom (dayA < dayB))
_ -> Left AtomFunctionTypeMismatchError
}
]
| null | https://raw.githubusercontent.com/agentm/project-m36/f5b32001db4be8b4525d4e759e831d35a2c200f0/src/lib/ProjectM36/DataTypes/Day.hs | haskell | module ProjectM36.DataTypes.Day where
import ProjectM36.Base
import ProjectM36.AtomFunctionBody
import ProjectM36.AtomFunctionError
import qualified Data.HashSet as HS
import Data.Time.Calendar
dayAtomFunctions :: AtomFunctions
dayAtomFunctions = HS.fromList [
Function { funcName = "fromGregorian",
funcType = [IntegerAtomType, IntegerAtomType, IntegerAtomType, DayAtomType],
funcBody = compiledAtomFunctionBody $
\case
IntegerAtom year:IntegerAtom month:IntegerAtom day:_ -> pure $ DayAtom (fromGregorian (fromIntegral year) (fromIntegral month) (fromIntegral day))
_ -> Left AtomFunctionTypeMismatchError
},
Function { funcName = "dayEarlierThan",
funcType = [DayAtomType, DayAtomType, BoolAtomType],
funcBody = compiledAtomFunctionBody $
\case
ConstructedAtom _ _ (IntAtom dayA:_):ConstructedAtom _ _ (IntAtom dayB:_):_ -> pure (BoolAtom (dayA < dayB))
_ -> Left AtomFunctionTypeMismatchError
}
]
| |
16e5c0dbea854c3732d94ac6e723865c1c0c2d2eb82c4d71d91543ad7c6883a3 | roburio/utcp | params.ml | ( c ) 2019 , all rights reserved
let mclbytes = 2048
and msize = 256
and sb_max = 256 * 1024
(* params:450 *)
let so_sndbuf = 32 * 1024
and so_rcvbuf = 65535
and so_sndlowat = 2048
and so_rcvlowat = 1
and so_min_sndbuf = 1
and so_min_rcvbuf = 1
and so_min_sndlowat =1
and so_min_rcvlowat = 1
and so_max_sndbuf = sb_max * mclbytes / (mclbytes + msize)
and so_max_rcvbuf = sb_max * mclbytes / (mclbytes + msize)
and so_max_sndlowat = sb_max * mclbytes / (mclbytes + msize)
and so_max_rcvlowat = sb_max * mclbytes / (mclbytes + msize)
and dtsinval = Duration.of_day 24
and tcp_maxwin = 65535
and tcp_maxwinscale = 14
and mssdflt = 536
and ss_fltsz = 1
and scale = 6
(* updated from FreeBSD 13 *)
and tcptv_msl = Duration.of_sec 30
and tcptv_srttbase = 0L
and tcptv_rtobase = Duration.of_sec 3
and tcptv_persmin = Duration.of_sec 5
and tcptv_persmax = Duration.of_sec 60
and tcptv_keep_init = Duration.of_sec 75
and tcptv_keep_idle = Duration.of_hour 2
and tcptv_keepintvl = Duration.of_sec 75
and tcptv_keepvnt = 8
and tcptv_finwait2_timeout = Duration.of_sec 60
let tcptv_maxidle = Int64.shift_left tcptv_keepintvl 3
and tcptv_min = Duration.of_ms 30
and tcptv_cpu_var = Duration.of_ms 200
and tcptv_rexmtmax = Duration.of_sec 64
and tcptv_twtrunc = 8
and tcp_lingertime = Duration.of_min 2
and tcp_maxrxtshift = 12
and tcp_synackmaxrxtshift = 3
and tcptv_delack = Duration.of_ms 100
and tcptv_rttvarbase = 0L
let tcp_rtt_invalidate = tcp_maxrxtshift / 4
and tcp_syn_backoff =
Array.map Duration.of_sec
[| 1 ; 1 ; 1 ; 1 ; 1 ; 2 ; 4 ; 8 ; 16 ; 32 ; 64 ; 64 ; 64 |]
and tcp_backoff =
Array.map Duration.of_sec
[| 1 ; 2 ; 4 ; 8 ; 16 ; 32 ; 64 ; 128 ; 256 ; 512 ; 512 ; 512 ; 512 |]
| null | https://raw.githubusercontent.com/roburio/utcp/38cd6c7fa7c1a89aa0b490a5daa815ac02c3d11f/src/params.ml | ocaml | params:450
updated from FreeBSD 13 | ( c ) 2019 , all rights reserved
let mclbytes = 2048
and msize = 256
and sb_max = 256 * 1024
let so_sndbuf = 32 * 1024
and so_rcvbuf = 65535
and so_sndlowat = 2048
and so_rcvlowat = 1
and so_min_sndbuf = 1
and so_min_rcvbuf = 1
and so_min_sndlowat =1
and so_min_rcvlowat = 1
and so_max_sndbuf = sb_max * mclbytes / (mclbytes + msize)
and so_max_rcvbuf = sb_max * mclbytes / (mclbytes + msize)
and so_max_sndlowat = sb_max * mclbytes / (mclbytes + msize)
and so_max_rcvlowat = sb_max * mclbytes / (mclbytes + msize)
and dtsinval = Duration.of_day 24
and tcp_maxwin = 65535
and tcp_maxwinscale = 14
and mssdflt = 536
and ss_fltsz = 1
and scale = 6
and tcptv_msl = Duration.of_sec 30
and tcptv_srttbase = 0L
and tcptv_rtobase = Duration.of_sec 3
and tcptv_persmin = Duration.of_sec 5
and tcptv_persmax = Duration.of_sec 60
and tcptv_keep_init = Duration.of_sec 75
and tcptv_keep_idle = Duration.of_hour 2
and tcptv_keepintvl = Duration.of_sec 75
and tcptv_keepvnt = 8
and tcptv_finwait2_timeout = Duration.of_sec 60
let tcptv_maxidle = Int64.shift_left tcptv_keepintvl 3
and tcptv_min = Duration.of_ms 30
and tcptv_cpu_var = Duration.of_ms 200
and tcptv_rexmtmax = Duration.of_sec 64
and tcptv_twtrunc = 8
and tcp_lingertime = Duration.of_min 2
and tcp_maxrxtshift = 12
and tcp_synackmaxrxtshift = 3
and tcptv_delack = Duration.of_ms 100
and tcptv_rttvarbase = 0L
let tcp_rtt_invalidate = tcp_maxrxtshift / 4
and tcp_syn_backoff =
Array.map Duration.of_sec
[| 1 ; 1 ; 1 ; 1 ; 1 ; 2 ; 4 ; 8 ; 16 ; 32 ; 64 ; 64 ; 64 |]
and tcp_backoff =
Array.map Duration.of_sec
[| 1 ; 2 ; 4 ; 8 ; 16 ; 32 ; 64 ; 128 ; 256 ; 512 ; 512 ; 512 ; 512 |]
|
c6c0c3eaa9606db40093c12c3135d937039d907f13a2b930e16b61c415b927df | techascent/tech.datatype | typed_buffer.clj | (ns tech.v2.datatype.typed-buffer
(:require [tech.v2.datatype.protocols :as dtype-proto]
[tech.v2.datatype.casting :as casting]
[tech.v2.datatype.base :as base]
[tech.jna :as jna]
[tech.v2.datatype.reader :as reader]
[tech.v2.datatype.writer :as writer]
[tech.v2.datatype.mutable :as mutable]
[tech.v2.datatype.pprint :as dtype-pprint])
(:import [com.sun.jna Pointer]
[java.io Writer]
[tech.v2.datatype.protocols PDatatype]
[tech.v2.datatype ObjectReader ObjectWriter]
[clojure.lang Counted Indexed]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(deftype TypedBuffer [datatype backing-store]
dtype-proto/PDatatype
(get-datatype [item] datatype)
dtype-proto/PCopyRawData
(copy-raw->item! [raw-data ary-target target-offset options]
(base/raw-dtype-copy! raw-data ary-target target-offset options))
dtype-proto/PPrototype
(from-prototype [item datatype shape]
(TypedBuffer. datatype
(dtype-proto/from-prototype backing-store
(casting/datatype->host-type datatype)
shape)))
dtype-proto/PToBackingStore
(->backing-store-seq [item]
(dtype-proto/->backing-store-seq backing-store))
dtype-proto/PToNioBuffer
(convertible-to-nio-buffer? [item]
(dtype-proto/nio-convertible? backing-store))
(->buffer-backing-store [item]
(dtype-proto/as-nio-buffer backing-store))
dtype-proto/PToList
(convertible-to-fastutil-list? [item]
(dtype-proto/list-convertible? backing-store))
(->list-backing-store [item]
(when (satisfies? dtype-proto/PToList backing-store)
(dtype-proto/->list-backing-store backing-store)))
dtype-proto/PSetConstant
(set-constant! [item offset value n-elems]
(let [value (-> value
(casting/cast datatype)
(casting/unchecked-cast (dtype-proto/get-datatype
backing-store)))]
(dtype-proto/set-constant! backing-store offset value n-elems)))
dtype-proto/PBuffer
(sub-buffer [buffer offset length]
(TypedBuffer. datatype (dtype-proto/sub-buffer backing-store offset length)))
dtype-proto/PToArray
(->sub-array [item]
(when (= datatype (dtype-proto/get-datatype backing-store))
(dtype-proto/->sub-array backing-store)))
(->array-copy [item]
(if (= datatype (dtype-proto/get-datatype backing-store))
(dtype-proto/->array-copy backing-store)
(let [data-buf (dtype-proto/make-container
:java-array (casting/datatype->safe-host-type datatype)
(base/ecount backing-store)
{})]
(base/copy! item 0 data-buf 0 (base/ecount item)))))
dtype-proto/PToWriter
(convertible-to-writer? [item] (dtype-proto/convertible-to-writer? backing-store))
;;No marshalling/casting on the writer side.
(->writer [item options]
(let [{writer-datatype :datatype
unchecked? :unchecked?} options
writer-datatype (or writer-datatype datatype)
writer-matches? (= writer-datatype datatype)
src-writer-unchecked? (if writer-matches?
unchecked?
false)
direct-writer (cond
(dtype-proto/as-nio-buffer backing-store)
(writer/make-buffer-writer item
(casting/safe-flatten datatype)
datatype
src-writer-unchecked?)
(dtype-proto/as-list backing-store)
(writer/make-list-writer item
(casting/safe-flatten datatype)
datatype
src-writer-unchecked?)
:else
(dtype-proto/->writer backing-store {:datatype datatype}))]
(cond-> direct-writer
(not writer-matches?)
(dtype-proto/->writer {:datatype writer-datatype :unchecked? unchecked?}))))
dtype-proto/PToReader
(convertible-to-reader? [item] (dtype-proto/convertible-to-reader? backing-store))
(->reader [item options]
(let [{reader-datatype :datatype
unchecked? :unchecked?} options
reader-datatype (or reader-datatype datatype)
src-unchecked? true
There is an unchecked fastpath that does not attempt to do elementwise
;;conversions of the data in the buffer.
[intermediate-datatype src-datatype]
(if (and unchecked?
(= reader-datatype (base/get-datatype backing-store)))
[reader-datatype reader-datatype]
[datatype (casting/safe-flatten datatype)])
direct-reader (cond
(dtype-proto/as-nio-buffer backing-store)
(reader/make-buffer-reader item
src-datatype
intermediate-datatype
src-unchecked?)
(dtype-proto/as-list backing-store)
(reader/make-list-reader item
src-datatype
intermediate-datatype
src-unchecked?)
:else
(dtype-proto/->reader backing-store
{:datatype datatype
:unchecked? unchecked?}))
result-datatype (dtype-proto/get-datatype direct-reader)]
(if (not= reader-datatype result-datatype)
(dtype-proto/->reader direct-reader {:datatype reader-datatype
:unchecked? unchecked?})
direct-reader)))
Counted
(count [item] (base/ecount item))
Indexed
(nth [item idx]
((base/->reader item :object) idx))
(nth [item idx def-val]
(if (< idx (base/ecount item))
(nth item idx)
def-val))
dtype-proto/PToIterable
(convertible-to-iterable? [item] true)
(->iterable [item options] (dtype-proto/->reader item options))
dtype-proto/PToMutable
(convertible-to-mutable? [item]
(dtype-proto/convertible-to-mutable? backing-store))
(->mutable [item options]
(let [{mutable-datatype :datatype
unchecked? :unchecked?} options
mutable-datatype (or mutable-datatype datatype)
src-unchecked? (if (= mutable-datatype datatype)
unchecked?
false)
direct-mutable (cond
(dtype-proto/convertible-to-fastutil-list? backing-store)
(mutable/make-list-mutable item
(casting/safe-flatten datatype)
datatype
src-unchecked?)
:else
(dtype-proto/->mutable backing-store
{:datatype datatype
:unchecked? src-unchecked?}))]
(cond-> direct-mutable
(not= mutable-datatype datatype)
(dtype-proto/->mutable {:datatype mutable-datatype
:unchecked? unchecked?}))))
dtype-proto/PRemoveRange
(remove-range! [item idx count]
(dtype-proto/remove-range! backing-store idx count))
dtype-proto/PInsertBlock
(insert-block! [item idx values options]
(dtype-proto/insert-block! backing-store
idx
(if (:unchecked? options)
values
(dtype-proto/->reader values {:datatype datatype}))
options))
dtype-proto/PToJNAPointer
(convertible-to-data-ptr? [item]
(dtype-proto/convertible-to-data-ptr? backing-store))
(->jna-ptr [item] (dtype-proto/->jna-ptr backing-store))
dtype-proto/PToBufferDesc
(convertible-to-buffer-desc? [item]
(when (and (casting/numeric-type? datatype)
(= (casting/numeric-byte-width datatype)
(casting/numeric-byte-width (dtype-proto/get-datatype
backing-store))))
(dtype-proto/convertible-to-buffer-desc? backing-store)))
(->buffer-descriptor [item]
(when (and (casting/numeric-type? datatype)
(= (casting/numeric-byte-width datatype)
(casting/numeric-byte-width (dtype-proto/get-datatype
backing-store))))
(-> (dtype-proto/->buffer-descriptor backing-store)
(assoc :datatype datatype))))
dtype-proto/PCountable
(ecount [item] (dtype-proto/ecount backing-store))
Object
(toString [this]
(let [n-items (base/ecount this)
format-str (if (> n-items 20)
"#tech.v2.datatype.typed-buffer<%s,%s>%s\n[%s...]"
"#tech.v2.datatype.typed-buffer<%s,%s>%s\n[%s]"
)]
(format format-str
(.getName ^Class (type backing-store))
(name datatype)
[n-items]
(-> (dtype-proto/sub-buffer this 0 (min 20 (base/ecount this)))
(dtype-pprint/print-reader-data)))))
(hashCode [this]
(.hashCode {:datatype datatype
:backing-store backing-store}))
(equals [this other]
(.equals other {:datatype datatype
:backing-store backing-store})))
(defmethod print-method TypedBuffer
[buf w]
(.write ^Writer w (.toString ^Object buf)))
(defn typed-buffer?
[item]
(every? #(satisfies? % item)
[dtype-proto/PDatatype
dtype-proto/PCopyRawData
dtype-proto/PPrototype
dtype-proto/PBuffer
dtype-proto/PToWriter dtype-proto/PToReader]))
(defn convertible-to-typed-buffer?
[item]
(or (instance? TypedBuffer item)
(or
(dtype-proto/base-type-convertible? item))))
(defn convert-to-typed-buffer
[item]
(cond
(instance? TypedBuffer item)
item
(dtype-proto/base-type-convertible? item)
(TypedBuffer. (dtype-proto/get-datatype item)
(or (dtype-proto/as-nio-buffer item)
(dtype-proto/as-list item)))
:else
(throw (ex-info "Item is not convertible to typed buffer"
{:item-type (type item)}))))
(defn ->typed-buffer
[item]
(cond
(typed-buffer? item)
item
:else
(convert-to-typed-buffer item)))
(defn make-typed-buffer
([datatype elem-count-or-seq options]
(let [host-dtype (casting/datatype->host-datatype datatype)]
(if (or (:unchecked? options)
(= host-dtype datatype))
(TypedBuffer. datatype
(dtype-proto/make-container
:java-array host-dtype elem-count-or-seq options))
(let [n-elems (if (number? elem-count-or-seq)
elem-count-or-seq
(base/ecount elem-count-or-seq))
container (dtype-proto/make-container :java-array host-dtype
n-elems {})
typed-buf (TypedBuffer. datatype container)]
(when-not (number? elem-count-or-seq)
(dtype-proto/copy-raw->item! elem-count-or-seq
typed-buf 0 options))
typed-buf))))
([datatype elem-count-or-seq]
(make-typed-buffer datatype elem-count-or-seq {})))
(defn set-datatype
"Use this one with care."
[item dtype]
(if (= dtype (dtype-proto/get-datatype item))
item
(let [^TypedBuffer item (convert-to-typed-buffer item)]
(TypedBuffer. dtype (.backing-store item)))))
(defmethod dtype-proto/make-container :typed-buffer
[_container-type datatype elem-count-or-seq options]
(make-typed-buffer datatype elem-count-or-seq options))
| null | https://raw.githubusercontent.com/techascent/tech.datatype/8cc83d771d9621d580fd5d4d0625005bd7ab0e0c/src/tech/v2/datatype/typed_buffer.clj | clojure | No marshalling/casting on the writer side.
conversions of the data in the buffer. | (ns tech.v2.datatype.typed-buffer
(:require [tech.v2.datatype.protocols :as dtype-proto]
[tech.v2.datatype.casting :as casting]
[tech.v2.datatype.base :as base]
[tech.jna :as jna]
[tech.v2.datatype.reader :as reader]
[tech.v2.datatype.writer :as writer]
[tech.v2.datatype.mutable :as mutable]
[tech.v2.datatype.pprint :as dtype-pprint])
(:import [com.sun.jna Pointer]
[java.io Writer]
[tech.v2.datatype.protocols PDatatype]
[tech.v2.datatype ObjectReader ObjectWriter]
[clojure.lang Counted Indexed]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(deftype TypedBuffer [datatype backing-store]
dtype-proto/PDatatype
(get-datatype [item] datatype)
dtype-proto/PCopyRawData
(copy-raw->item! [raw-data ary-target target-offset options]
(base/raw-dtype-copy! raw-data ary-target target-offset options))
dtype-proto/PPrototype
(from-prototype [item datatype shape]
(TypedBuffer. datatype
(dtype-proto/from-prototype backing-store
(casting/datatype->host-type datatype)
shape)))
dtype-proto/PToBackingStore
(->backing-store-seq [item]
(dtype-proto/->backing-store-seq backing-store))
dtype-proto/PToNioBuffer
(convertible-to-nio-buffer? [item]
(dtype-proto/nio-convertible? backing-store))
(->buffer-backing-store [item]
(dtype-proto/as-nio-buffer backing-store))
dtype-proto/PToList
(convertible-to-fastutil-list? [item]
(dtype-proto/list-convertible? backing-store))
(->list-backing-store [item]
(when (satisfies? dtype-proto/PToList backing-store)
(dtype-proto/->list-backing-store backing-store)))
dtype-proto/PSetConstant
(set-constant! [item offset value n-elems]
(let [value (-> value
(casting/cast datatype)
(casting/unchecked-cast (dtype-proto/get-datatype
backing-store)))]
(dtype-proto/set-constant! backing-store offset value n-elems)))
dtype-proto/PBuffer
(sub-buffer [buffer offset length]
(TypedBuffer. datatype (dtype-proto/sub-buffer backing-store offset length)))
dtype-proto/PToArray
(->sub-array [item]
(when (= datatype (dtype-proto/get-datatype backing-store))
(dtype-proto/->sub-array backing-store)))
(->array-copy [item]
(if (= datatype (dtype-proto/get-datatype backing-store))
(dtype-proto/->array-copy backing-store)
(let [data-buf (dtype-proto/make-container
:java-array (casting/datatype->safe-host-type datatype)
(base/ecount backing-store)
{})]
(base/copy! item 0 data-buf 0 (base/ecount item)))))
dtype-proto/PToWriter
(convertible-to-writer? [item] (dtype-proto/convertible-to-writer? backing-store))
(->writer [item options]
(let [{writer-datatype :datatype
unchecked? :unchecked?} options
writer-datatype (or writer-datatype datatype)
writer-matches? (= writer-datatype datatype)
src-writer-unchecked? (if writer-matches?
unchecked?
false)
direct-writer (cond
(dtype-proto/as-nio-buffer backing-store)
(writer/make-buffer-writer item
(casting/safe-flatten datatype)
datatype
src-writer-unchecked?)
(dtype-proto/as-list backing-store)
(writer/make-list-writer item
(casting/safe-flatten datatype)
datatype
src-writer-unchecked?)
:else
(dtype-proto/->writer backing-store {:datatype datatype}))]
(cond-> direct-writer
(not writer-matches?)
(dtype-proto/->writer {:datatype writer-datatype :unchecked? unchecked?}))))
dtype-proto/PToReader
(convertible-to-reader? [item] (dtype-proto/convertible-to-reader? backing-store))
(->reader [item options]
(let [{reader-datatype :datatype
unchecked? :unchecked?} options
reader-datatype (or reader-datatype datatype)
src-unchecked? true
There is an unchecked fastpath that does not attempt to do elementwise
[intermediate-datatype src-datatype]
(if (and unchecked?
(= reader-datatype (base/get-datatype backing-store)))
[reader-datatype reader-datatype]
[datatype (casting/safe-flatten datatype)])
direct-reader (cond
(dtype-proto/as-nio-buffer backing-store)
(reader/make-buffer-reader item
src-datatype
intermediate-datatype
src-unchecked?)
(dtype-proto/as-list backing-store)
(reader/make-list-reader item
src-datatype
intermediate-datatype
src-unchecked?)
:else
(dtype-proto/->reader backing-store
{:datatype datatype
:unchecked? unchecked?}))
result-datatype (dtype-proto/get-datatype direct-reader)]
(if (not= reader-datatype result-datatype)
(dtype-proto/->reader direct-reader {:datatype reader-datatype
:unchecked? unchecked?})
direct-reader)))
Counted
(count [item] (base/ecount item))
Indexed
(nth [item idx]
((base/->reader item :object) idx))
(nth [item idx def-val]
(if (< idx (base/ecount item))
(nth item idx)
def-val))
dtype-proto/PToIterable
(convertible-to-iterable? [item] true)
(->iterable [item options] (dtype-proto/->reader item options))
dtype-proto/PToMutable
(convertible-to-mutable? [item]
(dtype-proto/convertible-to-mutable? backing-store))
(->mutable [item options]
(let [{mutable-datatype :datatype
unchecked? :unchecked?} options
mutable-datatype (or mutable-datatype datatype)
src-unchecked? (if (= mutable-datatype datatype)
unchecked?
false)
direct-mutable (cond
(dtype-proto/convertible-to-fastutil-list? backing-store)
(mutable/make-list-mutable item
(casting/safe-flatten datatype)
datatype
src-unchecked?)
:else
(dtype-proto/->mutable backing-store
{:datatype datatype
:unchecked? src-unchecked?}))]
(cond-> direct-mutable
(not= mutable-datatype datatype)
(dtype-proto/->mutable {:datatype mutable-datatype
:unchecked? unchecked?}))))
dtype-proto/PRemoveRange
(remove-range! [item idx count]
(dtype-proto/remove-range! backing-store idx count))
dtype-proto/PInsertBlock
(insert-block! [item idx values options]
(dtype-proto/insert-block! backing-store
idx
(if (:unchecked? options)
values
(dtype-proto/->reader values {:datatype datatype}))
options))
dtype-proto/PToJNAPointer
(convertible-to-data-ptr? [item]
(dtype-proto/convertible-to-data-ptr? backing-store))
(->jna-ptr [item] (dtype-proto/->jna-ptr backing-store))
dtype-proto/PToBufferDesc
(convertible-to-buffer-desc? [item]
(when (and (casting/numeric-type? datatype)
(= (casting/numeric-byte-width datatype)
(casting/numeric-byte-width (dtype-proto/get-datatype
backing-store))))
(dtype-proto/convertible-to-buffer-desc? backing-store)))
(->buffer-descriptor [item]
(when (and (casting/numeric-type? datatype)
(= (casting/numeric-byte-width datatype)
(casting/numeric-byte-width (dtype-proto/get-datatype
backing-store))))
(-> (dtype-proto/->buffer-descriptor backing-store)
(assoc :datatype datatype))))
dtype-proto/PCountable
(ecount [item] (dtype-proto/ecount backing-store))
Object
(toString [this]
(let [n-items (base/ecount this)
format-str (if (> n-items 20)
"#tech.v2.datatype.typed-buffer<%s,%s>%s\n[%s...]"
"#tech.v2.datatype.typed-buffer<%s,%s>%s\n[%s]"
)]
(format format-str
(.getName ^Class (type backing-store))
(name datatype)
[n-items]
(-> (dtype-proto/sub-buffer this 0 (min 20 (base/ecount this)))
(dtype-pprint/print-reader-data)))))
(hashCode [this]
(.hashCode {:datatype datatype
:backing-store backing-store}))
(equals [this other]
(.equals other {:datatype datatype
:backing-store backing-store})))
(defmethod print-method TypedBuffer
[buf w]
(.write ^Writer w (.toString ^Object buf)))
(defn typed-buffer?
[item]
(every? #(satisfies? % item)
[dtype-proto/PDatatype
dtype-proto/PCopyRawData
dtype-proto/PPrototype
dtype-proto/PBuffer
dtype-proto/PToWriter dtype-proto/PToReader]))
(defn convertible-to-typed-buffer?
[item]
(or (instance? TypedBuffer item)
(or
(dtype-proto/base-type-convertible? item))))
(defn convert-to-typed-buffer
[item]
(cond
(instance? TypedBuffer item)
item
(dtype-proto/base-type-convertible? item)
(TypedBuffer. (dtype-proto/get-datatype item)
(or (dtype-proto/as-nio-buffer item)
(dtype-proto/as-list item)))
:else
(throw (ex-info "Item is not convertible to typed buffer"
{:item-type (type item)}))))
(defn ->typed-buffer
[item]
(cond
(typed-buffer? item)
item
:else
(convert-to-typed-buffer item)))
(defn make-typed-buffer
([datatype elem-count-or-seq options]
(let [host-dtype (casting/datatype->host-datatype datatype)]
(if (or (:unchecked? options)
(= host-dtype datatype))
(TypedBuffer. datatype
(dtype-proto/make-container
:java-array host-dtype elem-count-or-seq options))
(let [n-elems (if (number? elem-count-or-seq)
elem-count-or-seq
(base/ecount elem-count-or-seq))
container (dtype-proto/make-container :java-array host-dtype
n-elems {})
typed-buf (TypedBuffer. datatype container)]
(when-not (number? elem-count-or-seq)
(dtype-proto/copy-raw->item! elem-count-or-seq
typed-buf 0 options))
typed-buf))))
([datatype elem-count-or-seq]
(make-typed-buffer datatype elem-count-or-seq {})))
(defn set-datatype
"Use this one with care."
[item dtype]
(if (= dtype (dtype-proto/get-datatype item))
item
(let [^TypedBuffer item (convert-to-typed-buffer item)]
(TypedBuffer. dtype (.backing-store item)))))
(defmethod dtype-proto/make-container :typed-buffer
[_container-type datatype elem-count-or-seq options]
(make-typed-buffer datatype elem-count-or-seq options))
|
57a676d9ae1888aa36cfe7ce9d1a8198437bfe98824068bbce3eb30b5fce6760 | tsloughter/kuberl | kuberl_extensions_v1beta1_http_ingress_rule_value.erl | -module(kuberl_extensions_v1beta1_http_ingress_rule_value).
-export([encode/1]).
-export_type([kuberl_extensions_v1beta1_http_ingress_rule_value/0]).
-type kuberl_extensions_v1beta1_http_ingress_rule_value() ::
#{ 'paths' := list()
}.
encode(#{ 'paths' := Paths
}) ->
#{ 'paths' => Paths
}.
| null | https://raw.githubusercontent.com/tsloughter/kuberl/f02ae6680d6ea5db6e8b6c7acbee8c4f9df482e2/gen/kuberl_extensions_v1beta1_http_ingress_rule_value.erl | erlang | -module(kuberl_extensions_v1beta1_http_ingress_rule_value).
-export([encode/1]).
-export_type([kuberl_extensions_v1beta1_http_ingress_rule_value/0]).
-type kuberl_extensions_v1beta1_http_ingress_rule_value() ::
#{ 'paths' := list()
}.
encode(#{ 'paths' := Paths
}) ->
#{ 'paths' => Paths
}.
| |
6c6d7604b28ddda97c902cfe14709719311d692a9d29b1df8162bd2c4be80026 | mikera/ironclad | lib.clj | (ns lib)
| null | https://raw.githubusercontent.com/mikera/ironclad/ef647bcd097eeaf45f058d43e9e5f53ce910b4b2/src/main/clojure/ic/lib.clj | clojure | (ns lib)
| |
d5f47e3d35ae44aec7f4773c45f92604a1f317fadbe1ed286790b0259c5d041a | charlieg/Sparser | resource.lisp | ;;; -*- Mode:LISP; Syntax:Common-Lisp; Package:(SPARSER LISP) -*-
Copyright ( c ) 2009 BBNT Solutions LLC . All Rights Reserved
$ Id:$
;;;
;;; File: "resource"
;;; Module: "objects;model:psi:"
version : August 2009
initiated 6/19/09 on the model of the file in individuals . Exercised
and tweeked 7/22 . Working on through 8/24
(in-package :sparser)
;;;---------
;;; globals
;;;---------
(defparameter *next-psi* :not-initialized
"Points to the first available individual object in their resource
list.") ;; these are allocated (or deallocated) but yet to be deployed
(defparameter *active-psi* nil)
;; these are the one's that have been allocated but not yet deallocated
(unless (boundp '*number-of-psi-in-initial-allocation*)
(defparameter *number-of-psi-in-initial-allocation* 100))
(unless (boundp '*number-of-psi-per-increment*)
(defparameter *number-of-psi-per-increment* 50))
(defvar *psi-count* 0
"Used for both allocated (resource-based) and permanent individuals
so that all numbers are uniquely assigned. Number always goes up.")
;;;----------
;;; allocate
;;;----------
(defun allocate-psi ()
(when (eq *next-psi* :not-initialized)
(initialize-psi-resource))
(unless *next-psi*
(allocate-a-rasher-of-psi))
(let ((psi (kpop *next-psi*)))
(initialize-fields/psi psi)
(setf (indiv-type psi) :freshly-allocated)
(tr :allocating-a-psi-object psi)
( break " Allocating ~a~%Record the backtrace " psi )
(kpush psi *active-psi*)
psi ))
;;;------------
;;; deallocate
;;;------------
;;//// needs to hook into per-article-initializations via some sort
;; of reclaiming operation and a notion of permanence.
(defun deallocate-psi (psi)
;; added it to the available list
(unless (deallocated-psi? psi)
(setq *next-psi*
(kcons psi *next-individual*))
;; remove it from the active list
(if (eq psi (first *active-psi*))
(setq *active-psi* (cdr *active-psi*))
(let* ((prior-cell *active-psi*)
(next-cell (cdr *active-psi*))
(next-psi (car next-cell)))
(loop
(when (null next-psi)
(break "Couldn't find ~A amoung the active psi" psi)
(return-from deallocate-psi nil))
(when (eq next-psi psi)
;; splice it out of kcons list
(rplacd prior-cell
(cdr next-cell))
(deallocate-kons next-cell)
(return))
(setq prior-cell next-cell
next-cell (cdr next-cell)
next-individual (car next-cell)))))
do n't zero its fields until it 's allocated again
(setf (unit-plist psi) `(:deallocated t ,@(unit-plist psi)))
indiv ))
;;;-------------
;;; subroutines
;;;-------------
(defun initialize-fields/psi (i)
(initialize-fields/individual i)
(setf (psi-lp i) nil)
(setf (psi-v+v i) nil)
(setf (psi-downlinks i) nil)
(setf (psi-source i) nil)
(setf (psi-path i) nil)
i)
(defun deallocated-psi? (i)
(member :deallocated (unit-plist i)))
;;;------------
;;; initialize
;;;------------
(defun initialize-psi-resource ()
;; would be called from setup-session-globals/parser
;; in drivers/inits/sessions/setup, but not until
;; we fully switch over.
(setq *next-psi* nil)
(allocate-a-rasher-of-psi
*number-of-psi-in-initial-allocation*))
(defun allocate-a-rasher-of-psi
( &optional (max *number-of-psi-per-increment*))
(let ((ptr *next-psi*))
(dotimes (i max)
(setq ptr
(kcons (make-psi
:type :never-used
:plist `(:uid ,(incf *psi-count*)))
ptr)))
(setq *next-psi* ptr)))
;;;--------
;;; lookup
;;;--------
(defun psi-object# (n)
;; modeled on individual-object#
(find n *active-psi*
:key #'(lambda (i)
(cadr (member :uid (unit-plist i))))))
| null | https://raw.githubusercontent.com/charlieg/Sparser/b9bb7d01d2e40f783f3214fc104062db3d15e608/Sparser/code/s/objects/model/psi/resource.lisp | lisp | -*- Mode:LISP; Syntax:Common-Lisp; Package:(SPARSER LISP) -*-
File: "resource"
Module: "objects;model:psi:"
---------
globals
---------
these are allocated (or deallocated) but yet to be deployed
these are the one's that have been allocated but not yet deallocated
----------
allocate
----------
------------
deallocate
------------
//// needs to hook into per-article-initializations via some sort
of reclaiming operation and a notion of permanence.
added it to the available list
remove it from the active list
splice it out of kcons list
-------------
subroutines
-------------
------------
initialize
------------
would be called from setup-session-globals/parser
in drivers/inits/sessions/setup, but not until
we fully switch over.
--------
lookup
--------
modeled on individual-object# | Copyright ( c ) 2009 BBNT Solutions LLC . All Rights Reserved
$ Id:$
version : August 2009
initiated 6/19/09 on the model of the file in individuals . Exercised
and tweeked 7/22 . Working on through 8/24
(in-package :sparser)
(defparameter *next-psi* :not-initialized
"Points to the first available individual object in their resource
(defparameter *active-psi* nil)
(unless (boundp '*number-of-psi-in-initial-allocation*)
(defparameter *number-of-psi-in-initial-allocation* 100))
(unless (boundp '*number-of-psi-per-increment*)
(defparameter *number-of-psi-per-increment* 50))
(defvar *psi-count* 0
"Used for both allocated (resource-based) and permanent individuals
so that all numbers are uniquely assigned. Number always goes up.")
(defun allocate-psi ()
(when (eq *next-psi* :not-initialized)
(initialize-psi-resource))
(unless *next-psi*
(allocate-a-rasher-of-psi))
(let ((psi (kpop *next-psi*)))
(initialize-fields/psi psi)
(setf (indiv-type psi) :freshly-allocated)
(tr :allocating-a-psi-object psi)
( break " Allocating ~a~%Record the backtrace " psi )
(kpush psi *active-psi*)
psi ))
(defun deallocate-psi (psi)
(unless (deallocated-psi? psi)
(setq *next-psi*
(kcons psi *next-individual*))
(if (eq psi (first *active-psi*))
(setq *active-psi* (cdr *active-psi*))
(let* ((prior-cell *active-psi*)
(next-cell (cdr *active-psi*))
(next-psi (car next-cell)))
(loop
(when (null next-psi)
(break "Couldn't find ~A amoung the active psi" psi)
(return-from deallocate-psi nil))
(when (eq next-psi psi)
(rplacd prior-cell
(cdr next-cell))
(deallocate-kons next-cell)
(return))
(setq prior-cell next-cell
next-cell (cdr next-cell)
next-individual (car next-cell)))))
do n't zero its fields until it 's allocated again
(setf (unit-plist psi) `(:deallocated t ,@(unit-plist psi)))
indiv ))
(defun initialize-fields/psi (i)
(initialize-fields/individual i)
(setf (psi-lp i) nil)
(setf (psi-v+v i) nil)
(setf (psi-downlinks i) nil)
(setf (psi-source i) nil)
(setf (psi-path i) nil)
i)
(defun deallocated-psi? (i)
(member :deallocated (unit-plist i)))
(defun initialize-psi-resource ()
(setq *next-psi* nil)
(allocate-a-rasher-of-psi
*number-of-psi-in-initial-allocation*))
(defun allocate-a-rasher-of-psi
( &optional (max *number-of-psi-per-increment*))
(let ((ptr *next-psi*))
(dotimes (i max)
(setq ptr
(kcons (make-psi
:type :never-used
:plist `(:uid ,(incf *psi-count*)))
ptr)))
(setq *next-psi* ptr)))
(defun psi-object# (n)
(find n *active-psi*
:key #'(lambda (i)
(cadr (member :uid (unit-plist i))))))
|
a61ed5a7aebecbe0b1e0c659ba470d1466f978103efcf1c8c1bab50c30d6ee21 | williamleferrand/aws | dynamo.ml | module M = Dynamo_factory.Make (Http_client10)
include M
| null | https://raw.githubusercontent.com/williamleferrand/aws/d591ef0a2b89082caac6ddd6850b2d8b7824e577/src/cohttp/dynamo.ml | ocaml | module M = Dynamo_factory.Make (Http_client10)
include M
| |
3ebe0dbe755ecdb79357b13c5819beba53860f9c3005c51c0057fffcdd2d3da7 | chetmurthy/typpx | forge.mli | * This module provides convenient functions to build .
This does not cover all the construction ... yet .
This does not cover all the construction... yet.
*)
open Asttypes
open Typedtree
open Types
val default_loc : Location.t ref
val with_loc : Location.t -> (unit -> 'a) -> 'a
(** Set [default_loc] and run a function *)
val loc : 'a -> 'a Location.loc
module Dummy : sig
(** Dummy builder. The data is set to some default and you need to override
some of the fields. *)
val type_expr : type_expr
val env : Env.t
val value_description : unit -> value_description
val exp_desc : expression_desc
val exp : unit -> expression
val mod_type : module_type
val structure_item : unit -> structure_item
end
val lidentloc_of_path : Path.t -> Longident.t Location.loc
module Path : sig
type t = Path.t
val of_lident : Longident.t -> t
end
module Typ : sig
open Types
val arrow : ?label:arg_label -> type_expr -> type_expr -> type_expr
end
module Exp : sig
val untyped : Parsetree.expression -> expression
* [ untyped ] is to embed an untyped AST in an typed AST . The embeded untyped AST
will be kept as is when the typed one is untyped .
will be kept as is when the typed one is untyped.
*)
val ident : Path.t -> expression
val let_ :
?recursive:bool ->
value_binding list ->
expression -> expression
val letmodule :
Ident.t option ->
module_presence ->
module_expr -> expression -> expression
val app :
expression ->
(arg_label * expression) list -> expression
val ignore : expression -> expression
* [ ignore e ] creates [ Pervasives.ignore < e > ] .
No check of [ Pervasives ] is the really [ Pervasives ] .
No check of [Pervasives] is the really OCaml stdlib's [Pervasives].
*)
val fun_ : ?label:arg_label -> pattern -> expression -> expression
val tuple : expression list -> expression
val with_env : Env.t -> expression -> expression
(** Override expression's type environment field *)
val none : ?ty: type_expr -> Env.t -> expression
(** Build [None] of the given content type. If [ty] is omitted
the container type is [Dummy.type_expr].
Raises [Assert_failure] when [None] is not accessible in the environment.
*)
val some : Env.t -> expression -> expression
(** Build [Some e] of the given expression.
Raises [Assert_failure] when [Some] is not accessible in the environment.
*)
val list : Env.t -> expression list -> expression
* Build the list of given expressions . The container type is
[ t list ] where [ t ] is the type of the first expression .
If no type is given , [ Dummy.type_expr ] is used .
Raises [ Assert_failure ] when either [ (: :) ] and [ [ ] ] is
not accessible in the environment .
[t list] where [t] is the type of the first expression.
If no type is given, [Dummy.type_expr] is used.
Raises [Assert_failure] when either [(::)] and [[]] is
not accessible in the environment.
*)
val mark : string -> expression -> expression
(** Add [@<string>] to the expression *)
val partition_marks : expression -> (string -> bool) -> string list * expression
(** Filter out matching [@<string>] attributes from the given expression. *)
end
module Pat : sig
val desc : pattern_desc -> pattern
val var : Ident.t -> pattern
end
module MB : sig
val module_binding :
Ident.t option -> module_presence -> module_expr -> module_binding
end
module Mod : sig
val of_module_expr_desc :
module_expr_desc -> module_expr
val ident : Path.t -> module_expr
val unpack : expression -> module_expr
end
| null | https://raw.githubusercontent.com/chetmurthy/typpx/a740750b75739e686da49b46ded7db7d6874e108/src/forge.mli | ocaml | * Set [default_loc] and run a function
* Dummy builder. The data is set to some default and you need to override
some of the fields.
* Override expression's type environment field
* Build [None] of the given content type. If [ty] is omitted
the container type is [Dummy.type_expr].
Raises [Assert_failure] when [None] is not accessible in the environment.
* Build [Some e] of the given expression.
Raises [Assert_failure] when [Some] is not accessible in the environment.
* Add [@<string>] to the expression
* Filter out matching [@<string>] attributes from the given expression. | * This module provides convenient functions to build .
This does not cover all the construction ... yet .
This does not cover all the construction... yet.
*)
open Asttypes
open Typedtree
open Types
val default_loc : Location.t ref
val with_loc : Location.t -> (unit -> 'a) -> 'a
val loc : 'a -> 'a Location.loc
module Dummy : sig
val type_expr : type_expr
val env : Env.t
val value_description : unit -> value_description
val exp_desc : expression_desc
val exp : unit -> expression
val mod_type : module_type
val structure_item : unit -> structure_item
end
val lidentloc_of_path : Path.t -> Longident.t Location.loc
module Path : sig
type t = Path.t
val of_lident : Longident.t -> t
end
module Typ : sig
open Types
val arrow : ?label:arg_label -> type_expr -> type_expr -> type_expr
end
module Exp : sig
val untyped : Parsetree.expression -> expression
* [ untyped ] is to embed an untyped AST in an typed AST . The embeded untyped AST
will be kept as is when the typed one is untyped .
will be kept as is when the typed one is untyped.
*)
val ident : Path.t -> expression
val let_ :
?recursive:bool ->
value_binding list ->
expression -> expression
val letmodule :
Ident.t option ->
module_presence ->
module_expr -> expression -> expression
val app :
expression ->
(arg_label * expression) list -> expression
val ignore : expression -> expression
* [ ignore e ] creates [ Pervasives.ignore < e > ] .
No check of [ Pervasives ] is the really [ Pervasives ] .
No check of [Pervasives] is the really OCaml stdlib's [Pervasives].
*)
val fun_ : ?label:arg_label -> pattern -> expression -> expression
val tuple : expression list -> expression
val with_env : Env.t -> expression -> expression
val none : ?ty: type_expr -> Env.t -> expression
val some : Env.t -> expression -> expression
val list : Env.t -> expression list -> expression
* Build the list of given expressions . The container type is
[ t list ] where [ t ] is the type of the first expression .
If no type is given , [ Dummy.type_expr ] is used .
Raises [ Assert_failure ] when either [ (: :) ] and [ [ ] ] is
not accessible in the environment .
[t list] where [t] is the type of the first expression.
If no type is given, [Dummy.type_expr] is used.
Raises [Assert_failure] when either [(::)] and [[]] is
not accessible in the environment.
*)
val mark : string -> expression -> expression
val partition_marks : expression -> (string -> bool) -> string list * expression
end
module Pat : sig
val desc : pattern_desc -> pattern
val var : Ident.t -> pattern
end
module MB : sig
val module_binding :
Ident.t option -> module_presence -> module_expr -> module_binding
end
module Mod : sig
val of_module_expr_desc :
module_expr_desc -> module_expr
val ident : Path.t -> module_expr
val unpack : expression -> module_expr
end
|
44e37b1bb4a7468a99ca8f0f4768d2d5cfb012ee86722e63e3091cb174768815 | smeruelo/mooc-ocaml | w6_3.1_type_abstraction.ml | module Exp : sig
type e
val int : int -> e
val mul : e -> e -> e
val add : e -> e -> e
val to_string : e -> string
end = struct
type e = EInt of int | EMul of e * e | EAdd of e * e
let int x = EInt x
let mul a b =
match a, b with
| EInt 0, _ | _, EInt 0 -> EInt 0
| EInt 1, e | e, EInt 1 -> e
| a, b -> EMul (a, b)
let add a b =
match a, b with
| EInt 0, e | e, EInt 0 -> e
| a, b -> EAdd (a, b)
let rec to_string = function
| EInt i -> string_of_int i
| EMul (l, r) -> "(" ^ to_string l ^ " * " ^ to_string r ^ ")"
| EAdd (l, r) -> "(" ^ to_string l ^ " + " ^ to_string r ^ ")"
end
| null | https://raw.githubusercontent.com/smeruelo/mooc-ocaml/8e2efb1632ec9dd381489a08465d5341a6c727c9/week6/w6_3.1_type_abstraction.ml | ocaml | module Exp : sig
type e
val int : int -> e
val mul : e -> e -> e
val add : e -> e -> e
val to_string : e -> string
end = struct
type e = EInt of int | EMul of e * e | EAdd of e * e
let int x = EInt x
let mul a b =
match a, b with
| EInt 0, _ | _, EInt 0 -> EInt 0
| EInt 1, e | e, EInt 1 -> e
| a, b -> EMul (a, b)
let add a b =
match a, b with
| EInt 0, e | e, EInt 0 -> e
| a, b -> EAdd (a, b)
let rec to_string = function
| EInt i -> string_of_int i
| EMul (l, r) -> "(" ^ to_string l ^ " * " ^ to_string r ^ ")"
| EAdd (l, r) -> "(" ^ to_string l ^ " + " ^ to_string r ^ ")"
end
| |
6e6e8e197cd8e2d3330f3f16a881ef5ceab7b3e4bbbdcb4ee04acd1a9b08dc11 | kowainik/tomland | Edsl.hs | |
Module : Toml . Type . : ( c ) 2018 - 2022 Kowainik
SPDX - License - Identifier : MPL-2.0
Maintainer : < >
Stability : Stable
Portability : Portable
This module introduces EDSL for manually specifying ' TOML ' data types .
Consider the following raw TOML :
@
key1 = 1
key2 = true
[ meme - quotes ]
= [ \"Oh\ " , , \"Mark\ " ]
[ [ arrayName ] ]
elem1 = " yes "
[ [ arrayName ] ]
[ arrayName.elem2 ]
deep = 7
[ [ arrayName ] ]
@
using functions from this module you can specify the above TOML in safer way :
@
exampleToml : : ' TOML '
exampleToml = ' mkToml ' $ _ _ do _ _
\"key1\ " ' = : ' 1
\"key2\ " ' = : ' Bool True
' table ' \"meme - quotes\ " $
\"quote1\ " ' = : ' Array [ \"Oh\ " , , \"Mark\ " ]
' tableArray ' \"arrayName\ " $
\"elem1\ " ' = : ' \"yes\ " :|
[ ' table ' \"elem2\ " $ \"deep\ " ' = : ' Integer 7
, ' empty '
]
@
@since 0.3
Module : Toml.Type.Edsl
Copyright : (c) 2018-2022 Kowainik
SPDX-License-Identifier : MPL-2.0
Maintainer : Kowainik <>
Stability : Stable
Portability : Portable
This module introduces EDSL for manually specifying 'TOML' data types.
Consider the following raw TOML:
@
key1 = 1
key2 = true
[meme-quotes]
quote1 = [ \"Oh\", \"Hi\", \"Mark\" ]
[[arrayName]]
elem1 = "yes"
[[arrayName]]
[arrayName.elem2]
deep = 7
[[arrayName]]
@
using functions from this module you can specify the above TOML in safer way:
@
exampleToml :: 'TOML'
exampleToml = 'mkToml' $ __do__
\"key1\" '=:' 1
\"key2\" '=:' Bool True
'table' \"meme-quotes\" $
\"quote1\" '=:' Array [\"Oh\", \"Hi\", \"Mark\"]
'tableArray' \"arrayName\" $
\"elem1\" '=:' \"yes\" :|
[ 'table' \"elem2\" $ \"deep\" '=:' Integer 7
, 'empty'
]
@
@since 0.3
-}
module Toml.Type.Edsl
( TDSL
, mkToml
, empty
, (=:)
, table
, tableArray
) where
import Control.Monad.State (State, execState, modify, put)
import Data.List.NonEmpty (NonEmpty)
import Toml.Type.Key (Key)
import Toml.Type.TOML (TOML (..), insertKeyVal, insertTable, insertTableArrays)
import Toml.Type.Value (Value)
| Monad for creating TOML .
@since 0.3
@since 0.3
-}
type TDSL = State TOML ()
| Creates ' TOML ' from the ' TDSL ' .
@since 0.3
@since 0.3
-}
mkToml :: TDSL -> TOML
mkToml env = execState env mempty
# INLINE mkToml #
| Creates an empty ' TDSL ' .
@since 0.3
@since 0.3
-}
empty :: TDSL
empty = put mempty
{-# INLINE empty #-}
| Adds key - value pair to the ' TDSL ' .
@since 0.3
@since 0.3
-}
(=:) :: Key -> Value a -> TDSL
(=:) k v = modify $ insertKeyVal k v
# INLINE (= :) #
| Adds table to the ' TDSL ' .
@since 0.3
@since 0.3
-}
table :: Key -> TDSL -> TDSL
table k = modify . insertTable k . mkToml
# INLINE table #
{- | Adds array of tables to the 'TDSL'.
@since 1.0.0
-}
tableArray :: Key -> NonEmpty TDSL -> TDSL
tableArray k = modify . insertTableArrays k . fmap mkToml
# INLINE tableArray #
| null | https://raw.githubusercontent.com/kowainik/tomland/561aefdbcf177498c06e6c6fcee2b3fe299b3af6/src/Toml/Type/Edsl.hs | haskell | # INLINE empty #
| Adds array of tables to the 'TDSL'.
@since 1.0.0
| |
Module : Toml . Type . : ( c ) 2018 - 2022 Kowainik
SPDX - License - Identifier : MPL-2.0
Maintainer : < >
Stability : Stable
Portability : Portable
This module introduces EDSL for manually specifying ' TOML ' data types .
Consider the following raw TOML :
@
key1 = 1
key2 = true
[ meme - quotes ]
= [ \"Oh\ " , , \"Mark\ " ]
[ [ arrayName ] ]
elem1 = " yes "
[ [ arrayName ] ]
[ arrayName.elem2 ]
deep = 7
[ [ arrayName ] ]
@
using functions from this module you can specify the above TOML in safer way :
@
exampleToml : : ' TOML '
exampleToml = ' mkToml ' $ _ _ do _ _
\"key1\ " ' = : ' 1
\"key2\ " ' = : ' Bool True
' table ' \"meme - quotes\ " $
\"quote1\ " ' = : ' Array [ \"Oh\ " , , \"Mark\ " ]
' tableArray ' \"arrayName\ " $
\"elem1\ " ' = : ' \"yes\ " :|
[ ' table ' \"elem2\ " $ \"deep\ " ' = : ' Integer 7
, ' empty '
]
@
@since 0.3
Module : Toml.Type.Edsl
Copyright : (c) 2018-2022 Kowainik
SPDX-License-Identifier : MPL-2.0
Maintainer : Kowainik <>
Stability : Stable
Portability : Portable
This module introduces EDSL for manually specifying 'TOML' data types.
Consider the following raw TOML:
@
key1 = 1
key2 = true
[meme-quotes]
quote1 = [ \"Oh\", \"Hi\", \"Mark\" ]
[[arrayName]]
elem1 = "yes"
[[arrayName]]
[arrayName.elem2]
deep = 7
[[arrayName]]
@
using functions from this module you can specify the above TOML in safer way:
@
exampleToml :: 'TOML'
exampleToml = 'mkToml' $ __do__
\"key1\" '=:' 1
\"key2\" '=:' Bool True
'table' \"meme-quotes\" $
\"quote1\" '=:' Array [\"Oh\", \"Hi\", \"Mark\"]
'tableArray' \"arrayName\" $
\"elem1\" '=:' \"yes\" :|
[ 'table' \"elem2\" $ \"deep\" '=:' Integer 7
, 'empty'
]
@
@since 0.3
-}
module Toml.Type.Edsl
( TDSL
, mkToml
, empty
, (=:)
, table
, tableArray
) where
import Control.Monad.State (State, execState, modify, put)
import Data.List.NonEmpty (NonEmpty)
import Toml.Type.Key (Key)
import Toml.Type.TOML (TOML (..), insertKeyVal, insertTable, insertTableArrays)
import Toml.Type.Value (Value)
| Monad for creating TOML .
@since 0.3
@since 0.3
-}
type TDSL = State TOML ()
| Creates ' TOML ' from the ' TDSL ' .
@since 0.3
@since 0.3
-}
mkToml :: TDSL -> TOML
mkToml env = execState env mempty
# INLINE mkToml #
| Creates an empty ' TDSL ' .
@since 0.3
@since 0.3
-}
empty :: TDSL
empty = put mempty
| Adds key - value pair to the ' TDSL ' .
@since 0.3
@since 0.3
-}
(=:) :: Key -> Value a -> TDSL
(=:) k v = modify $ insertKeyVal k v
# INLINE (= :) #
| Adds table to the ' TDSL ' .
@since 0.3
@since 0.3
-}
table :: Key -> TDSL -> TDSL
table k = modify . insertTable k . mkToml
# INLINE table #
tableArray :: Key -> NonEmpty TDSL -> TDSL
tableArray k = modify . insertTableArrays k . fmap mkToml
# INLINE tableArray #
|
5651cd097a4063e3f4bf707e1e91e6eea4dd4f10618c2499fd1012cc0bcf79db | elli-lib/elli | elli_example_callback_handover.erl | -module(elli_example_callback_handover).
-export([init/2, handle/2, handle_event/3]).
-include("elli_util.hrl").
-behaviour(elli_handler).
%% @doc Return `{ok, handover}' if `Req''s path is `/hello/world',
%% otherwise `ignore'.
init(Req, _Args) ->
case elli_request:path(Req) of
[<<"hello">>, <<"world">>] ->
{ok, handover};
_ ->
ignore
end.
TODO : write docstring
-spec handle(Req, Args) -> Result when
Req :: elli:req(),
Args :: elli_handler:callback_args(),
Result :: elli_handler:result().
handle(Req, Args) ->
handle(elli_request:method(Req), elli_request:path(Req), Req, Args).
handle('GET', [<<"hello">>, <<"world">>], Req, _Args) ->
Body = <<"Hello World!">>,
Size = integer_to_binary(size(Body)),
Headers = [{"Connection", "close"}, {"Content-Length", Size}],
elli_http:send_response(Req, 200, Headers, Body),
{close, <<>>};
handle('GET', [<<"hello">>], Req, _Args) ->
%% Fetch a GET argument from the URL.
Name = elli_request:get_arg(<<"name">>, Req, <<"undefined">>),
{ok, [], <<"Hello ", Name/binary>>}.
%% @hidden
handle_event(_, _, _) ->
ok.
| null | https://raw.githubusercontent.com/elli-lib/elli/2f2fafb77c67244ba6237ca6b3c7238ff886c478/src/elli_example_callback_handover.erl | erlang | @doc Return `{ok, handover}' if `Req''s path is `/hello/world',
otherwise `ignore'.
Fetch a GET argument from the URL.
@hidden | -module(elli_example_callback_handover).
-export([init/2, handle/2, handle_event/3]).
-include("elli_util.hrl").
-behaviour(elli_handler).
init(Req, _Args) ->
case elli_request:path(Req) of
[<<"hello">>, <<"world">>] ->
{ok, handover};
_ ->
ignore
end.
TODO : write docstring
-spec handle(Req, Args) -> Result when
Req :: elli:req(),
Args :: elli_handler:callback_args(),
Result :: elli_handler:result().
handle(Req, Args) ->
handle(elli_request:method(Req), elli_request:path(Req), Req, Args).
handle('GET', [<<"hello">>, <<"world">>], Req, _Args) ->
Body = <<"Hello World!">>,
Size = integer_to_binary(size(Body)),
Headers = [{"Connection", "close"}, {"Content-Length", Size}],
elli_http:send_response(Req, 200, Headers, Body),
{close, <<>>};
handle('GET', [<<"hello">>], Req, _Args) ->
Name = elli_request:get_arg(<<"name">>, Req, <<"undefined">>),
{ok, [], <<"Hello ", Name/binary>>}.
handle_event(_, _, _) ->
ok.
|
573d1f5ede09154820d153761058ccdb99b1fbd5cafeb1ff5bfd1cb7c8405744 | ibawt/tabby | cluster_test.clj | (ns tabby.cluster-test
(:require [clojure.test :refer :all]
[clojure.tools.logging :refer :all]
[clojure.pprint :refer [pprint]]
[tabby.cluster :refer :all]
[tabby.utils :as utils]
[tabby.server :as server]))
(defn- fields-by-id [cluster field]
(map field (vals (sort (:servers cluster)))))
(defn- print-fields [c & fields]
(pprint (ps c))
c)
(defn- c-trace [msg c]
(println msg)
c)
(defn- s-at [i]
(str i ".localnet:" i))
(defn test-cluster [n]
(-> (create 8090 n)
(assoc-in [:servers "0.localnet:0" :election-timeout] 0)
(assoc-in [:servers (s-at 1) :election-timeout-fn] (constantly 150))
(assoc-in [:servers (s-at 2) :election-timeout-fn] (constantly 300))))
(defn create-and-elect []
(until-empty (step 20 (until-empty (test-cluster 3)))))
(deftest simple-things
(testing "everyone's type"
(let [s (create 80 3)]
(is (= '(:follower :follower :follower) (fields-by-id s :type)))))
(testing "first election"
(let [s (step 0 (test-cluster 3))]
(is (= :candidate (get-in s [:servers (s-at 0) :type])))
(is (= :follower (get-in s [:servers (s-at 1) :type])))
(is (= :follower (get-in s [:servers (s-at 2) :type])))
(is (> (:election-timeout (get-in s [:servers (s-at 0)])) 0))))
(testing "1 - 2 vote"
(let [s (until-empty (step 50 (create-and-elect)))]
(is (= '(:leader :follower :follower) (fields-by-id s :type)))
(is (= '(2 2 2) (fields-by-id s :commit-index))))))
(defn sort-queue [q]
(sort-by :dst q))
(deftest full-write-in-detail
(testing "initial state"
(let [s (test-cluster 3)]
(is (= '(0 0 0) (fields-by-id s :commit-index)))
(is (= '(0 0 0) (fields-by-id s :current-term)))
(is (= '(0 0 0) (fields-by-id s :last-applied)))))
(testing "step 1 - become candidate & send request-vote"
step 1 become candidate & send request - vote
(is (= '(:candidate :follower :follower) (fields-by-id s1 :type)))
(is (= '(1 0 0) (fields-by-id s1 :current-term)))
(is (= '({:dst "1.localnet:1" :src "0.localnet:0" :type :request-vote
:body {:term 1 :candidate-id "0.localnet:0", :prev-log-index 1
:prev-log-term 0}}
{:dst "2.localnet:2" :src "0.localnet:0" :type :request-vote
:body {:term 1 :candidate-id "0.localnet:0", :prev-log-index 1
:prev-log-term 0}}) (sort-queue (:tx-queue (get (:servers s1) (s-at 0))))))))
(testing "step 2 - peers respond to request vote"
(let [s2 (->> (test-cluster 3)
(step 0)
(step 0))]
;; others respond to request-vote
(is (= '(1 1 1) (fields-by-id s2 :current-term)))
(is (= '("0.localnet:0" "0.localnet:0" "0.localnet:0") (fields-by-id s2 :voted-for)))
(is (= '({:dst "0.localnet:0" :src "1.localnet:1" :type :request-vote-reply
:body {:term 1 :vote-granted? true}}) (:tx-queue (get (:servers s2) (s-at 1))) ))
(is (= '({:dst "0.localnet:0" :src "2.localnet:2" :type :request-vote-reply
:body {:term 1 :vote-granted? true}}) (:tx-queue (get (:servers s2) (s-at 2)))))))
(testing "step 3 - become leader, send heart beat"
(let [s (->> (test-cluster 3)
(step 0)
(step 0)
(step 0))]
(is (= '(:leader :follower :follower) (fields-by-id s :type)))
(is (= (list {:dst (s-at 1) :type :append-entries :src (s-at 0)
:body {:term 1 :leader-id (s-at 0)
:prev-log-index 1 :prev-log-term 0
:entries [{:term 1, :cmd {:op :noop}}] :leader-commit 0}}
{:dst (s-at 2) :type :append-entries :src (s-at 0)
:body {:term 1 :leader-id (s-at 0)
:prev-log-index 1 :prev-log-term 0
:entries [{:term 1, :cmd {:op :noop}}] :leader-commit 0}})
(sort-by :dst (:tx-queue (srv s (s-at 0))))))
(is (= {(s-at 2) 0 (s-at 1) 0} (:match-index (srv s (s-at 0)))))
(is (= {(s-at 2) 2 (s-at 1) 2} (:next-index (srv s (s-at 0)))))))
(testing "step 4 - process heart beat responses"
(let [s (->> (test-cluster 3)
(step-times 0 4))]
(is (= (list {:dst (s-at 0) :src (s-at 1) :type :append-entries-response
:body {:term 1 :success true :count 1}}) (:tx-queue (srv s (s-at 1)))))
(is (= (list {:dst (s-at 0) :src (s-at 2) :type :append-entries-response
:body {:term 1 :success true :count 1}}) (:tx-queue (srv s (s-at 2)))))))
(testing "step 5 - heart beat response"
(let [s (->> (test-cluster 3)
(step-times 0 5))]
(is (= {(s-at 2) 2 (s-at 1) 2} (:match-index (srv s (s-at 0)))))))
(testing "step 7 wait for commit index"
(let [s (->> (create-and-elect)
(write {:a "a"})
(until-empty)
(step 75)
(until-empty))]
(is (= '(3 3 3) (fields-by-id s :commit-index))))))
(deftest test-election-responses
(testing "election with one server not responding"
(let [s (->> (test-cluster 3)
(add-packet-loss (s-at 0) (s-at 1))
(step-times 0 3))]
(is (= '(:leader :follower :follower) (fields-by-id s :type)))))
(testing "election with two servers not responding, (election should fail)"
(let [s (->> (test-cluster 3)
(add-packet-loss (s-at 1) (s-at 0))
(add-packet-loss (s-at 2) (s-at 0))
(step-times 0 3))]
(is (= '(:candidate :follower :follower) (fields-by-id s :type)))))
(testing "election requests from out of date candidates"
;; we should detect that the client term is greater than ours
;; convert to follower and increment current-term
(let [s (-> (test-cluster 3)
(assoc-in [:servers (s-at 1) :current-term] 2)
(assoc-in [:servers (s-at 2) :current-term] 2)
((partial step-times 0 3)))]
(is (= '(:follower :follower :follower) (fields-by-id s :type)))
(is (= 2 (get-in s [:servers (s-at 0) :current-term]))))))
(defn- packets-from [server from-id]
(filter (fn [p]
(= (:src p) from-id)) (:rx-queue server)))
(deftest packet-filtering-test
(testing "packet filtering"
(let [s (-> (create 8090 3)
(assoc-in [:servers (s-at 0) :tx-queue]
`({:dst ~(s-at 1) :src ~(s-at 0)}
{:dst ~(s-at 2) :src ~(s-at 0)}))
(assoc-in [:servers (s-at 1) :tx-queue]
`({:dst ~(s-at 0) :src ~(s-at 1) :foo :bar}))
(kill-server (s-at 0))
(pump-transmit-queues))]
(is (= 0 (count (get-in s [:servers (s-at 0) :rx-queue]))))
(is (= 0 (count (packets-from (get-in s [:servers (s-at 1)]) (s-at 0)))))
(is (= 0 (count (packets-from (get-in s [:servers (s-at 2)]) (s-at 0))))))))
(defn testy []
(->> (create-and-elect)
(write {:a "a"})
(step 50)
(until-empty)
(step 50)
(until-empty)))
(defn server-types
"returns a set of the server types"
[s]
(into #{} (map (comp :type second) (filter #(not= (s-at 0) (first %)) (:servers s)))))
(deftest leadership-change
(testing "a new leader should be chosen"
;; FIXME: we should rebind the random-election-timeout
;; to make this not so hand-wavy
;; (pprint (ps (testy)))
(let [s (-> (testy)
(kill-server (s-at 0))
(assoc-in [:servers (s-at 1) :election-timeout] 0)
(step-until-empty 0))]
(is (= #{:leader :follower} (server-types s))))))
(deftest test-log-catch-up
(testing "log is missing 1"
(let [s (-> (create-and-elect)
(kill-server (s-at 1))
(#(write {:a "a"} %))
(until-empty)
(step-until-empty 50))]
(is (= '(3 2 3) (map count (fields-by-id s :log))))
(is (= '(3 0 3) (fields-by-id s :commit-index)))
(is (= '({:a "a"} {} {:a "a"}) (fields-by-id s :db)))
(let [s1 (-> (clear-packet-loss s)
(step-until-empty 50) ;; under election timeout
(step-until-empty 50))]
(is (= '(3 3 3) (fields-by-id s1 :last-applied))) ;; TODO: revisit this assertion
(is (= '(3 3 3) (fields-by-id s1 :commit-index)))
(is (= '({:a "a"} {:a "a"} {:a "a"}) (fields-by-id s1 :db)))))))
(deftest test-bigger-cluster
(testing "election"
(let [s (-> (test-cluster 5) (until-empty))]
(is (= '(:leader :follower :follower :follower :follower) (fields-by-id s :type)))))
(testing "write"
(let [s (->> (test-cluster 5)
(until-empty)
(write {:a "a"})
(until-empty)
(step 75)
(until-empty))]
(is (= (take 5 (repeat {:a "a"})) (fields-by-id s :db)))))
(testing "missing two"
(let [s (->> (test-cluster 5)
(until-empty)
(add-packet-loss (s-at 0) (s-at 1))
(add-packet-loss (s-at 0) (s-at 2))
(write {:a "a"})
(until-empty)
(step 75)
(until-empty)
(step 75)
(until-empty))]
(is (= '({:a "a"} {} {} {:a "a"} {:a "a"}) (fields-by-id s :db)))))
(testing "missing 3 - no quorum"
(let [s (->> (test-cluster 5)
(until-empty)
(add-packet-loss (s-at 0) (s-at 1))
(add-packet-loss (s-at 0) (s-at 2))
(add-packet-loss (s-at 0) (s-at 3))
(write {:a "a"})
(until-empty)
(step 10)
(until-empty)
(step 10)
(until-empty))]
(is (= '(3 2 2 2 3) (map count (fields-by-id s :log))))
(is (= '(2 0 0 0 2) (fields-by-id s :last-applied)))
(is (= '({} {} {} {} {}) (fields-by-id s :db))))))
(deftest test-write-no-response
(testing "shouldn't write the same log entry over if the same one is sent"
(let [s (->> (test-cluster 3)
(until-empty)
(add-packet-loss (s-at 0) (s-at 1))
(write {:a "a"})
(until-empty)
(step 50)
(until-empty)
(step 50)
(until-empty))]
(is (= '(3 2 3) (map count (fields-by-id s :log))))
(is (= '({:a "a"} {} {:a "a"}) (fields-by-id s :db))))))
| null | https://raw.githubusercontent.com/ibawt/tabby/19601e406ee097663b5d45d92c2ae53318baa9c3/test/tabby/cluster_test.clj | clojure | others respond to request-vote
we should detect that the client term is greater than ours
convert to follower and increment current-term
FIXME: we should rebind the random-election-timeout
to make this not so hand-wavy
(pprint (ps (testy)))
under election timeout
TODO: revisit this assertion | (ns tabby.cluster-test
(:require [clojure.test :refer :all]
[clojure.tools.logging :refer :all]
[clojure.pprint :refer [pprint]]
[tabby.cluster :refer :all]
[tabby.utils :as utils]
[tabby.server :as server]))
(defn- fields-by-id [cluster field]
(map field (vals (sort (:servers cluster)))))
(defn- print-fields [c & fields]
(pprint (ps c))
c)
(defn- c-trace [msg c]
(println msg)
c)
(defn- s-at [i]
(str i ".localnet:" i))
(defn test-cluster [n]
(-> (create 8090 n)
(assoc-in [:servers "0.localnet:0" :election-timeout] 0)
(assoc-in [:servers (s-at 1) :election-timeout-fn] (constantly 150))
(assoc-in [:servers (s-at 2) :election-timeout-fn] (constantly 300))))
(defn create-and-elect []
(until-empty (step 20 (until-empty (test-cluster 3)))))
(deftest simple-things
(testing "everyone's type"
(let [s (create 80 3)]
(is (= '(:follower :follower :follower) (fields-by-id s :type)))))
(testing "first election"
(let [s (step 0 (test-cluster 3))]
(is (= :candidate (get-in s [:servers (s-at 0) :type])))
(is (= :follower (get-in s [:servers (s-at 1) :type])))
(is (= :follower (get-in s [:servers (s-at 2) :type])))
(is (> (:election-timeout (get-in s [:servers (s-at 0)])) 0))))
(testing "1 - 2 vote"
(let [s (until-empty (step 50 (create-and-elect)))]
(is (= '(:leader :follower :follower) (fields-by-id s :type)))
(is (= '(2 2 2) (fields-by-id s :commit-index))))))
(defn sort-queue [q]
(sort-by :dst q))
(deftest full-write-in-detail
(testing "initial state"
(let [s (test-cluster 3)]
(is (= '(0 0 0) (fields-by-id s :commit-index)))
(is (= '(0 0 0) (fields-by-id s :current-term)))
(is (= '(0 0 0) (fields-by-id s :last-applied)))))
(testing "step 1 - become candidate & send request-vote"
step 1 become candidate & send request - vote
(is (= '(:candidate :follower :follower) (fields-by-id s1 :type)))
(is (= '(1 0 0) (fields-by-id s1 :current-term)))
(is (= '({:dst "1.localnet:1" :src "0.localnet:0" :type :request-vote
:body {:term 1 :candidate-id "0.localnet:0", :prev-log-index 1
:prev-log-term 0}}
{:dst "2.localnet:2" :src "0.localnet:0" :type :request-vote
:body {:term 1 :candidate-id "0.localnet:0", :prev-log-index 1
:prev-log-term 0}}) (sort-queue (:tx-queue (get (:servers s1) (s-at 0))))))))
(testing "step 2 - peers respond to request vote"
(let [s2 (->> (test-cluster 3)
(step 0)
(step 0))]
(is (= '(1 1 1) (fields-by-id s2 :current-term)))
(is (= '("0.localnet:0" "0.localnet:0" "0.localnet:0") (fields-by-id s2 :voted-for)))
(is (= '({:dst "0.localnet:0" :src "1.localnet:1" :type :request-vote-reply
:body {:term 1 :vote-granted? true}}) (:tx-queue (get (:servers s2) (s-at 1))) ))
(is (= '({:dst "0.localnet:0" :src "2.localnet:2" :type :request-vote-reply
:body {:term 1 :vote-granted? true}}) (:tx-queue (get (:servers s2) (s-at 2)))))))
(testing "step 3 - become leader, send heart beat"
(let [s (->> (test-cluster 3)
(step 0)
(step 0)
(step 0))]
(is (= '(:leader :follower :follower) (fields-by-id s :type)))
(is (= (list {:dst (s-at 1) :type :append-entries :src (s-at 0)
:body {:term 1 :leader-id (s-at 0)
:prev-log-index 1 :prev-log-term 0
:entries [{:term 1, :cmd {:op :noop}}] :leader-commit 0}}
{:dst (s-at 2) :type :append-entries :src (s-at 0)
:body {:term 1 :leader-id (s-at 0)
:prev-log-index 1 :prev-log-term 0
:entries [{:term 1, :cmd {:op :noop}}] :leader-commit 0}})
(sort-by :dst (:tx-queue (srv s (s-at 0))))))
(is (= {(s-at 2) 0 (s-at 1) 0} (:match-index (srv s (s-at 0)))))
(is (= {(s-at 2) 2 (s-at 1) 2} (:next-index (srv s (s-at 0)))))))
(testing "step 4 - process heart beat responses"
(let [s (->> (test-cluster 3)
(step-times 0 4))]
(is (= (list {:dst (s-at 0) :src (s-at 1) :type :append-entries-response
:body {:term 1 :success true :count 1}}) (:tx-queue (srv s (s-at 1)))))
(is (= (list {:dst (s-at 0) :src (s-at 2) :type :append-entries-response
:body {:term 1 :success true :count 1}}) (:tx-queue (srv s (s-at 2)))))))
(testing "step 5 - heart beat response"
(let [s (->> (test-cluster 3)
(step-times 0 5))]
(is (= {(s-at 2) 2 (s-at 1) 2} (:match-index (srv s (s-at 0)))))))
(testing "step 7 wait for commit index"
(let [s (->> (create-and-elect)
(write {:a "a"})
(until-empty)
(step 75)
(until-empty))]
(is (= '(3 3 3) (fields-by-id s :commit-index))))))
(deftest test-election-responses
(testing "election with one server not responding"
(let [s (->> (test-cluster 3)
(add-packet-loss (s-at 0) (s-at 1))
(step-times 0 3))]
(is (= '(:leader :follower :follower) (fields-by-id s :type)))))
(testing "election with two servers not responding, (election should fail)"
(let [s (->> (test-cluster 3)
(add-packet-loss (s-at 1) (s-at 0))
(add-packet-loss (s-at 2) (s-at 0))
(step-times 0 3))]
(is (= '(:candidate :follower :follower) (fields-by-id s :type)))))
(testing "election requests from out of date candidates"
(let [s (-> (test-cluster 3)
(assoc-in [:servers (s-at 1) :current-term] 2)
(assoc-in [:servers (s-at 2) :current-term] 2)
((partial step-times 0 3)))]
(is (= '(:follower :follower :follower) (fields-by-id s :type)))
(is (= 2 (get-in s [:servers (s-at 0) :current-term]))))))
(defn- packets-from [server from-id]
(filter (fn [p]
(= (:src p) from-id)) (:rx-queue server)))
(deftest packet-filtering-test
(testing "packet filtering"
(let [s (-> (create 8090 3)
(assoc-in [:servers (s-at 0) :tx-queue]
`({:dst ~(s-at 1) :src ~(s-at 0)}
{:dst ~(s-at 2) :src ~(s-at 0)}))
(assoc-in [:servers (s-at 1) :tx-queue]
`({:dst ~(s-at 0) :src ~(s-at 1) :foo :bar}))
(kill-server (s-at 0))
(pump-transmit-queues))]
(is (= 0 (count (get-in s [:servers (s-at 0) :rx-queue]))))
(is (= 0 (count (packets-from (get-in s [:servers (s-at 1)]) (s-at 0)))))
(is (= 0 (count (packets-from (get-in s [:servers (s-at 2)]) (s-at 0))))))))
(defn testy []
(->> (create-and-elect)
(write {:a "a"})
(step 50)
(until-empty)
(step 50)
(until-empty)))
(defn server-types
"returns a set of the server types"
[s]
(into #{} (map (comp :type second) (filter #(not= (s-at 0) (first %)) (:servers s)))))
(deftest leadership-change
(testing "a new leader should be chosen"
(let [s (-> (testy)
(kill-server (s-at 0))
(assoc-in [:servers (s-at 1) :election-timeout] 0)
(step-until-empty 0))]
(is (= #{:leader :follower} (server-types s))))))
(deftest test-log-catch-up
(testing "log is missing 1"
(let [s (-> (create-and-elect)
(kill-server (s-at 1))
(#(write {:a "a"} %))
(until-empty)
(step-until-empty 50))]
(is (= '(3 2 3) (map count (fields-by-id s :log))))
(is (= '(3 0 3) (fields-by-id s :commit-index)))
(is (= '({:a "a"} {} {:a "a"}) (fields-by-id s :db)))
(let [s1 (-> (clear-packet-loss s)
(step-until-empty 50))]
(is (= '(3 3 3) (fields-by-id s1 :commit-index)))
(is (= '({:a "a"} {:a "a"} {:a "a"}) (fields-by-id s1 :db)))))))
(deftest test-bigger-cluster
(testing "election"
(let [s (-> (test-cluster 5) (until-empty))]
(is (= '(:leader :follower :follower :follower :follower) (fields-by-id s :type)))))
(testing "write"
(let [s (->> (test-cluster 5)
(until-empty)
(write {:a "a"})
(until-empty)
(step 75)
(until-empty))]
(is (= (take 5 (repeat {:a "a"})) (fields-by-id s :db)))))
(testing "missing two"
(let [s (->> (test-cluster 5)
(until-empty)
(add-packet-loss (s-at 0) (s-at 1))
(add-packet-loss (s-at 0) (s-at 2))
(write {:a "a"})
(until-empty)
(step 75)
(until-empty)
(step 75)
(until-empty))]
(is (= '({:a "a"} {} {} {:a "a"} {:a "a"}) (fields-by-id s :db)))))
(testing "missing 3 - no quorum"
(let [s (->> (test-cluster 5)
(until-empty)
(add-packet-loss (s-at 0) (s-at 1))
(add-packet-loss (s-at 0) (s-at 2))
(add-packet-loss (s-at 0) (s-at 3))
(write {:a "a"})
(until-empty)
(step 10)
(until-empty)
(step 10)
(until-empty))]
(is (= '(3 2 2 2 3) (map count (fields-by-id s :log))))
(is (= '(2 0 0 0 2) (fields-by-id s :last-applied)))
(is (= '({} {} {} {} {}) (fields-by-id s :db))))))
(deftest test-write-no-response
(testing "shouldn't write the same log entry over if the same one is sent"
(let [s (->> (test-cluster 3)
(until-empty)
(add-packet-loss (s-at 0) (s-at 1))
(write {:a "a"})
(until-empty)
(step 50)
(until-empty)
(step 50)
(until-empty))]
(is (= '(3 2 3) (map count (fields-by-id s :log))))
(is (= '({:a "a"} {} {:a "a"}) (fields-by-id s :db))))))
|
515468a5302feb57740e6d6ddd2d7e2603370607eea2218e0beda16647b9269b | haskell-servant/servant-snap | SnapTestUtils.hs | {-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TemplateHaskell #
module Servant.Utils.SnapTestUtils where
import Control.Lens (makeLenses)
import Control.Monad (forM_, unless,
void, when)
import Control.Monad.IO.Class (liftIO)
import Data.Aeson
import qualified Data.Aeson as A
import qualified Data.ByteString.Char8 as B8
import qualified Data.ByteString.Lazy as BL
import Data.CaseInsensitive (mk)
import Data.List (foldl')
import Data.Maybe (fromMaybe)
import Data.Proxy
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Network.HTTP.Types (hAccept,
hContentType)
import qualified Network.HTTP.Types
import Servant.API ((:<|>) (..), (:>),
BasicAuth,
Capture,
CaptureAll,
Header (..),
Headers,
IsSecure (..),
JSON,
NoContent (..),
NoFraming,
OctetStream,
PlainText,
QueryFlag,
QueryParam,
QueryParams, Raw,
RemoteHost,
ReqBody, SourceIO,
Stream, addHeader)
import Servant.API.Verbs (Delete, Get,
Patch, Post, Put,
Verb)
import Servant.Server hiding (route)
import Servant.Server.Internal (HasServer)
import Snap
import qualified Snap.Core as SC
import Snap.Snaplet
import Snap.Snaplet.Auth
import Snap.Snaplet.Auth.Backends.JsonFile
import Snap.Snaplet.Session
import Snap.Snaplet.Session.Backends.CookieSession
import qualified Snap.Test as ST
import qualified Snap.Util.CORS as CORS
import Test.Hspec
import qualified Test.HUnit as HU
data App = App { _auth :: Snaplet (AuthManager App)
, _sess :: Snaplet SessionManager}
makeLenses 'App
type AppHandler = Handler App App
app :: SnapletInit App App
app = app' []
app' :: [(B8.ByteString, AppHandler ())] -> SnapletInit App App
app' rs = makeSnaplet "servantsnap" "A test app for servant-snap" Nothing $ do
s <- nestSnaplet "sess" sess $
initCookieSessionManager "site_key.txt" "sess" Nothing (Just 3600)
a <- nestSnaplet "auth" auth $ initJsonFileAuthManager defAuthSettings sess "users.json"
addRoutes rs
wrapSite (\h -> createTestUserIfMissing >> CORS.applyCORS CORS.defaultOptions h)
return (App a s)
createTestUserIfMissing :: Handler App App ()
createTestUserIfMissing =
with auth $ usernameExists testLogin >>= \case
True -> return ()
False -> void $ createUser testLogin testPassword
testLogin = "greg"
testPassword = "p@ssword"
------------------------------------------------------------------------------
-- * Assorted Snap helpers
------------------------------------------------------------------------------
mkInitAndServer :: (HasServer api context m, m ~ AppHandler)
=> Proxy (api :: *)
-> Context context
-> Server api context (AppHandler)
-> (SnapletInit App App, AppHandler ())
mkInitAndServer api ctx serv =
let sRoute = serveSnapWithContext api ctx serv
in (app' [("", sRoute)], sRoute)
mkRequest :: Method
-> B8.ByteString
-> B8.ByteString
-> [Network.HTTP.Types.Header]
-> B8.ByteString
-> ST.RequestBuilder IO ()
mkRequest mth pth qs hds bdy = do
let ct = fromMaybe "" (Prelude.lookup hContentType hds)
ST.postRaw pth ct bdy
ST.setQueryStringRaw qs
unless (mth == SC.POST) $ ST.setRequestType (ST.RequestWithRawBody mth bdy)
forM_ hds (\(k, v) -> unless (k == hContentType) $ ST.addHeader k v)
-- req <- State.get -- Useful for debugging
liftIO $ print req
runReqOnApi :: (HasServer api context m, m ~ AppHandler)
=> Proxy (api :: *)
-> Context context
-> Server api context AppHandler
-> Method
-> B8.ByteString
-> B8.ByteString
-> [Network.HTTP.Types.Header]
-> B8.ByteString
-> IO (Either T.Text Response)
runReqOnApi api ctx serv method route qs hds bod =
let (sInit, serv') = mkInitAndServer api ctx serv
in SST.runHandler Nothing ( mkRequest method route qs hds bod ) serv ' sInit
in testSnaplet sInit (mkRequest method route qs hds bod)
routes :: (HasServer api context m, m ~ AppHandler)
=> Proxy (api :: *)
-> Context context
-> Server api context (AppHandler)
-> [(B8.ByteString, AppHandler ())]
routes p ctx s = [("", serveSnapWithContext p ctx s)]
testSnaplet :: SnapletInit b b -> ST.RequestBuilder IO () -> IO (Either T.Text Response)
testSnaplet snapletInit req = do
(_, snapm, _) <- runSnaplet Nothing snapletInit
fmap Right $ ST.runHandler req snapm
------------------------------------------------------------------------------
* hspec helpers
------------------------------------------------------------------------------
shouldHaveBody :: Either T.Text Response -> T.Text -> IO ()
shouldHaveBody (Left e) _ = HU.assertFailure $
"Failed to respond: " ++ T.unpack e
shouldHaveBody (Right r) a = do
bod <- ST.getResponseBody r
bod `shouldBe` T.encodeUtf8 a
shouldHaveStatus :: Either T.Text Response -> Int -> IO ()
shouldHaveStatus (Left e) _ = HU.assertFailure $
"Failed to respond: " ++ T.unpack e
shouldHaveStatus (Right r) a = do
SC.rspStatus r `shouldBe` a
shouldDecodeTo :: (FromJSON a, Eq a, Show a)
=> Either T.Text Response
-> a
-> IO ()
shouldDecodeTo (Left e) _ = HU.assertFailure $
"Failed to respond: " ++ T.unpack e
shouldDecodeTo (Right resp) a = do
bod <- ST.getResponseBody resp
case A.decode' $ BL.fromStrict bod of
Just x | x == a -> return ()
Just _ -> HU.assertFailure $
"Failed to decode response to " ++ show a ++
" from body: " ++ B8.unpack bod
Nothing -> HU.assertFailure $ "Failed to decode respone from body: " ++
B8.unpack bod ++ "\nResponse: " ++ show resp
shouldHaveHeaders :: Either T.Text Response
-> [(B8.ByteString, B8.ByteString)]
-> Expectation
shouldHaveHeaders (Left e) _ = expectationFailure $ T.unpack e
shouldHaveHeaders (Right resp) hs = do
let respHs = Set.fromList $ SC.listHeaders resp
hs' = Set.fromList $ (\(k,v) -> (mk k,v)) <$> hs
missing = Set.toList $ Set.difference hs' respHs
case missing of
[] -> return ()
_ -> expectationFailure $
"These expected headers and values were missing: " ++ show missing ++
" from the response's: " ++ show (Set.toList respHs)
| null | https://raw.githubusercontent.com/haskell-servant/servant-snap/b54c5da86f2f2ed994e9dfbb0694c72301b5a220/test/Servant/Utils/SnapTestUtils.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE KindSignatures #
# LANGUAGE OverloadedStrings #
----------------------------------------------------------------------------
* Assorted Snap helpers
----------------------------------------------------------------------------
req <- State.get -- Useful for debugging
----------------------------------------------------------------------------
---------------------------------------------------------------------------- | # LANGUAGE LambdaCase #
# LANGUAGE TemplateHaskell #
module Servant.Utils.SnapTestUtils where
import Control.Lens (makeLenses)
import Control.Monad (forM_, unless,
void, when)
import Control.Monad.IO.Class (liftIO)
import Data.Aeson
import qualified Data.Aeson as A
import qualified Data.ByteString.Char8 as B8
import qualified Data.ByteString.Lazy as BL
import Data.CaseInsensitive (mk)
import Data.List (foldl')
import Data.Maybe (fromMaybe)
import Data.Proxy
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Network.HTTP.Types (hAccept,
hContentType)
import qualified Network.HTTP.Types
import Servant.API ((:<|>) (..), (:>),
BasicAuth,
Capture,
CaptureAll,
Header (..),
Headers,
IsSecure (..),
JSON,
NoContent (..),
NoFraming,
OctetStream,
PlainText,
QueryFlag,
QueryParam,
QueryParams, Raw,
RemoteHost,
ReqBody, SourceIO,
Stream, addHeader)
import Servant.API.Verbs (Delete, Get,
Patch, Post, Put,
Verb)
import Servant.Server hiding (route)
import Servant.Server.Internal (HasServer)
import Snap
import qualified Snap.Core as SC
import Snap.Snaplet
import Snap.Snaplet.Auth
import Snap.Snaplet.Auth.Backends.JsonFile
import Snap.Snaplet.Session
import Snap.Snaplet.Session.Backends.CookieSession
import qualified Snap.Test as ST
import qualified Snap.Util.CORS as CORS
import Test.Hspec
import qualified Test.HUnit as HU
data App = App { _auth :: Snaplet (AuthManager App)
, _sess :: Snaplet SessionManager}
makeLenses 'App
type AppHandler = Handler App App
app :: SnapletInit App App
app = app' []
app' :: [(B8.ByteString, AppHandler ())] -> SnapletInit App App
app' rs = makeSnaplet "servantsnap" "A test app for servant-snap" Nothing $ do
s <- nestSnaplet "sess" sess $
initCookieSessionManager "site_key.txt" "sess" Nothing (Just 3600)
a <- nestSnaplet "auth" auth $ initJsonFileAuthManager defAuthSettings sess "users.json"
addRoutes rs
wrapSite (\h -> createTestUserIfMissing >> CORS.applyCORS CORS.defaultOptions h)
return (App a s)
createTestUserIfMissing :: Handler App App ()
createTestUserIfMissing =
with auth $ usernameExists testLogin >>= \case
True -> return ()
False -> void $ createUser testLogin testPassword
testLogin = "greg"
testPassword = "p@ssword"
mkInitAndServer :: (HasServer api context m, m ~ AppHandler)
=> Proxy (api :: *)
-> Context context
-> Server api context (AppHandler)
-> (SnapletInit App App, AppHandler ())
mkInitAndServer api ctx serv =
let sRoute = serveSnapWithContext api ctx serv
in (app' [("", sRoute)], sRoute)
mkRequest :: Method
-> B8.ByteString
-> B8.ByteString
-> [Network.HTTP.Types.Header]
-> B8.ByteString
-> ST.RequestBuilder IO ()
mkRequest mth pth qs hds bdy = do
let ct = fromMaybe "" (Prelude.lookup hContentType hds)
ST.postRaw pth ct bdy
ST.setQueryStringRaw qs
unless (mth == SC.POST) $ ST.setRequestType (ST.RequestWithRawBody mth bdy)
forM_ hds (\(k, v) -> unless (k == hContentType) $ ST.addHeader k v)
liftIO $ print req
runReqOnApi :: (HasServer api context m, m ~ AppHandler)
=> Proxy (api :: *)
-> Context context
-> Server api context AppHandler
-> Method
-> B8.ByteString
-> B8.ByteString
-> [Network.HTTP.Types.Header]
-> B8.ByteString
-> IO (Either T.Text Response)
runReqOnApi api ctx serv method route qs hds bod =
let (sInit, serv') = mkInitAndServer api ctx serv
in SST.runHandler Nothing ( mkRequest method route qs hds bod ) serv ' sInit
in testSnaplet sInit (mkRequest method route qs hds bod)
routes :: (HasServer api context m, m ~ AppHandler)
=> Proxy (api :: *)
-> Context context
-> Server api context (AppHandler)
-> [(B8.ByteString, AppHandler ())]
routes p ctx s = [("", serveSnapWithContext p ctx s)]
testSnaplet :: SnapletInit b b -> ST.RequestBuilder IO () -> IO (Either T.Text Response)
testSnaplet snapletInit req = do
(_, snapm, _) <- runSnaplet Nothing snapletInit
fmap Right $ ST.runHandler req snapm
* hspec helpers
shouldHaveBody :: Either T.Text Response -> T.Text -> IO ()
shouldHaveBody (Left e) _ = HU.assertFailure $
"Failed to respond: " ++ T.unpack e
shouldHaveBody (Right r) a = do
bod <- ST.getResponseBody r
bod `shouldBe` T.encodeUtf8 a
shouldHaveStatus :: Either T.Text Response -> Int -> IO ()
shouldHaveStatus (Left e) _ = HU.assertFailure $
"Failed to respond: " ++ T.unpack e
shouldHaveStatus (Right r) a = do
SC.rspStatus r `shouldBe` a
shouldDecodeTo :: (FromJSON a, Eq a, Show a)
=> Either T.Text Response
-> a
-> IO ()
shouldDecodeTo (Left e) _ = HU.assertFailure $
"Failed to respond: " ++ T.unpack e
shouldDecodeTo (Right resp) a = do
bod <- ST.getResponseBody resp
case A.decode' $ BL.fromStrict bod of
Just x | x == a -> return ()
Just _ -> HU.assertFailure $
"Failed to decode response to " ++ show a ++
" from body: " ++ B8.unpack bod
Nothing -> HU.assertFailure $ "Failed to decode respone from body: " ++
B8.unpack bod ++ "\nResponse: " ++ show resp
shouldHaveHeaders :: Either T.Text Response
-> [(B8.ByteString, B8.ByteString)]
-> Expectation
shouldHaveHeaders (Left e) _ = expectationFailure $ T.unpack e
shouldHaveHeaders (Right resp) hs = do
let respHs = Set.fromList $ SC.listHeaders resp
hs' = Set.fromList $ (\(k,v) -> (mk k,v)) <$> hs
missing = Set.toList $ Set.difference hs' respHs
case missing of
[] -> return ()
_ -> expectationFailure $
"These expected headers and values were missing: " ++ show missing ++
" from the response's: " ++ show (Set.toList respHs)
|
40c5056a51a4f7d1b73cd5c86c509c9df75af77ec0ad538503454a0a9322eed8 | RichiH/git-annex | Handle.hs | Persistent sqlite database handles .
-
- Copyright 2015 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2015 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
{-# LANGUAGE BangPatterns #-}
module Database.Handle (
DbHandle,
DbConcurrency(..),
openDb,
TableName,
queryDb,
closeDb,
commitDb,
commitDb',
) where
import Utility.Exception
import Utility.FileSystemEncoding
import Database.Persist.Sqlite
import qualified Database.Sqlite as Sqlite
import Control.Monad
import Control.Monad.IO.Class (liftIO)
import Control.Concurrent
import Control.Concurrent.Async
import Control.Exception (throwIO, BlockedIndefinitelyOnMVar(..))
import qualified Data.Text as T
import Control.Monad.Trans.Resource (runResourceT)
import Control.Monad.Logger (runNoLoggingT)
import Data.List
import System.IO
A DbHandle is a reference to a worker thread that communicates with
- the database . It has a MVar which Jobs are submitted to .
- the database. It has a MVar which Jobs are submitted to. -}
data DbHandle = DbHandle DbConcurrency (Async ()) (MVar Job)
{- Name of a table that should exist once the database is initialized. -}
type TableName = String
Sqlite only allows a single write to a database at a time ; a concurrent
- write will crash .
-
- While a DbHandle serializes concurrent writes from
- multiple threads . But , when a database can be written to by
- multiple processes concurrently , use to make writes
- to the database be done robustly .
-
- The downside of using MultiWriter is that after writing a change to the
- database , the a query using the same DbHandle will not immediately see
- the change ! This is because the change is actually written using a
- separate database connection , and caching can prevent seeing the change .
- Also , consider that if multiple processes are writing to a database ,
- you ca n't rely on seeing values you 've just written anyway , as another
- process may change them .
-
- When a database can only be written to by a single process , use
- SingleWriter . Changes written to the database will always be immediately
- visible then .
- write will crash.
-
- While a DbHandle serializes concurrent writes from
- multiple threads. But, when a database can be written to by
- multiple processes concurrently, use MultiWriter to make writes
- to the database be done robustly.
-
- The downside of using MultiWriter is that after writing a change to the
- database, the a query using the same DbHandle will not immediately see
- the change! This is because the change is actually written using a
- separate database connection, and caching can prevent seeing the change.
- Also, consider that if multiple processes are writing to a database,
- you can't rely on seeing values you've just written anyway, as another
- process may change them.
-
- When a database can only be written to by a single process, use
- SingleWriter. Changes written to the database will always be immediately
- visible then.
-}
data DbConcurrency = SingleWriter | MultiWriter
Opens the database , but does not perform any migrations . Only use
- once the database is known to exist and have the right tables .
- once the database is known to exist and have the right tables. -}
openDb :: DbConcurrency -> FilePath -> TableName -> IO DbHandle
openDb dbconcurrency db tablename = do
jobs <- newEmptyMVar
worker <- async (workerThread (T.pack db) tablename jobs)
work around
liftIO $ fileEncoding stderr
return $ DbHandle dbconcurrency worker jobs
This is optional ; when the DbHandle gets garbage collected it will
- auto - close .
- auto-close. -}
closeDb :: DbHandle -> IO ()
closeDb (DbHandle _ worker jobs) = do
putMVar jobs CloseJob
wait worker
Makes a query using the DbHandle . This should not be used to make
- changes to the database !
-
- Note that the action is not run by the calling thread , but by a
- worker thread . Exceptions are propigated to the calling thread .
-
- Only one action can be run at a time against a given DbHandle .
- If called concurrently in the same process , this will block until
- it is able to run .
-
- Note that when the DbHandle was opened in MultiWriter mode , recent
- writes may not be seen by queryDb .
- changes to the database!
-
- Note that the action is not run by the calling thread, but by a
- worker thread. Exceptions are propigated to the calling thread.
-
- Only one action can be run at a time against a given DbHandle.
- If called concurrently in the same process, this will block until
- it is able to run.
-
- Note that when the DbHandle was opened in MultiWriter mode, recent
- writes may not be seen by queryDb.
-}
queryDb :: DbHandle -> SqlPersistM a -> IO a
queryDb (DbHandle _ _ jobs) a = do
res <- newEmptyMVar
putMVar jobs $ QueryJob $
liftIO . putMVar res =<< tryNonAsync a
(either throwIO return =<< takeMVar res)
`catchNonAsync` (const $ error "sqlite query crashed")
Writes a change to the database .
-
- In MultiWriter mode , catches failure to write to the database ,
- and retries repeatedly for up to 10 seconds , which should avoid
- all but the most exceptional problems .
-
- In MultiWriter mode, catches failure to write to the database,
- and retries repeatedly for up to 10 seconds, which should avoid
- all but the most exceptional problems.
-}
commitDb :: DbHandle -> SqlPersistM () -> IO ()
commitDb h wa = robustly Nothing 100 (commitDb' h wa)
where
robustly :: Maybe SomeException -> Int -> IO (Either SomeException ()) -> IO ()
robustly e 0 _ = error $ "failed to commit changes to sqlite database: " ++ show e
robustly _ n a = do
r <- a
case r of
Right _ -> return ()
Left e -> do
1/10th second
robustly (Just e) (n-1) a
commitDb' :: DbHandle -> SqlPersistM () -> IO (Either SomeException ())
commitDb' (DbHandle MultiWriter _ jobs) a = do
res <- newEmptyMVar
putMVar jobs $ RobustChangeJob $ \runner ->
liftIO $ putMVar res =<< tryNonAsync (runner a)
takeMVar res
commitDb' (DbHandle SingleWriter _ jobs) a = do
res <- newEmptyMVar
putMVar jobs $ ChangeJob $
liftIO . putMVar res =<< tryNonAsync a
takeMVar res
`catchNonAsync` (const $ error "sqlite commit crashed")
data Job
= QueryJob (SqlPersistM ())
| ChangeJob (SqlPersistM ())
| RobustChangeJob ((SqlPersistM () -> IO ()) -> IO ())
| CloseJob
workerThread :: T.Text -> TableName -> MVar Job -> IO ()
workerThread db tablename jobs = go
where
go = do
v <- tryNonAsync (runSqliteRobustly tablename db loop)
case v of
Left e -> hPutStrLn stderr $
"sqlite worker thread crashed: " ++ show e
Right True -> go
Right False -> return ()
getjob :: IO (Either BlockedIndefinitelyOnMVar Job)
getjob = try $ takeMVar jobs
loop = do
job <- liftIO getjob
case job of
Exception is thrown when the MVar is garbage
-- collected, which means the whole DbHandle
-- is not used any longer. Shutdown cleanly.
Left BlockedIndefinitelyOnMVar -> return False
Right CloseJob -> return False
Right (QueryJob a) -> a >> loop
Right (ChangeJob a) -> do
a
-- Exit this sqlite transaction so the
-- database gets updated on disk.
return True
-- Change is run in a separate database connection
-- since sqlite only supports a single writer at a
-- time, and it may crash the database connection
-- that the write is made to.
Right (RobustChangeJob a) -> do
liftIO (a (runSqliteRobustly tablename db))
loop
like runSqlite , but calls settle on the raw sql Connection .
runSqliteRobustly :: TableName -> T.Text -> (SqlPersistM a) -> IO a
runSqliteRobustly tablename db a = do
conn <- Sqlite.open db
settle conn
runResourceT $ runNoLoggingT $
withSqlConn (wrapConnection conn) $
runSqlConn a
where
-- Work around a bug in sqlite: New database connections can
-- sometimes take a while to become usable; select statements will
fail with ErrorBusy for some time . So , loop until a select
-- succeeds; once one succeeds the connection will stay usable.
-- <>
settle conn = do
r <- tryNonAsync $ do
stmt <- Sqlite.prepare conn nullselect
void $ Sqlite.step stmt
void $ Sqlite.finalize stmt
case r of
Right _ -> return ()
Left e -> do
if "ErrorBusy" `isInfixOf` show e
then do
1/1000th second
settle conn
else throwIO e
-- This should succeed for any table.
nullselect = T.pack $ "SELECT null from " ++ tablename ++ " limit 1"
| null | https://raw.githubusercontent.com/RichiH/git-annex/bbcad2b0af8cd9264d0cb86e6ca126ae626171f3/Database/Handle.hs | haskell | # LANGUAGE BangPatterns #
Name of a table that should exist once the database is initialized.
collected, which means the whole DbHandle
is not used any longer. Shutdown cleanly.
Exit this sqlite transaction so the
database gets updated on disk.
Change is run in a separate database connection
since sqlite only supports a single writer at a
time, and it may crash the database connection
that the write is made to.
Work around a bug in sqlite: New database connections can
sometimes take a while to become usable; select statements will
succeeds; once one succeeds the connection will stay usable.
<>
This should succeed for any table. | Persistent sqlite database handles .
-
- Copyright 2015 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2015 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Database.Handle (
DbHandle,
DbConcurrency(..),
openDb,
TableName,
queryDb,
closeDb,
commitDb,
commitDb',
) where
import Utility.Exception
import Utility.FileSystemEncoding
import Database.Persist.Sqlite
import qualified Database.Sqlite as Sqlite
import Control.Monad
import Control.Monad.IO.Class (liftIO)
import Control.Concurrent
import Control.Concurrent.Async
import Control.Exception (throwIO, BlockedIndefinitelyOnMVar(..))
import qualified Data.Text as T
import Control.Monad.Trans.Resource (runResourceT)
import Control.Monad.Logger (runNoLoggingT)
import Data.List
import System.IO
A DbHandle is a reference to a worker thread that communicates with
- the database . It has a MVar which Jobs are submitted to .
- the database. It has a MVar which Jobs are submitted to. -}
data DbHandle = DbHandle DbConcurrency (Async ()) (MVar Job)
type TableName = String
Sqlite only allows a single write to a database at a time ; a concurrent
- write will crash .
-
- While a DbHandle serializes concurrent writes from
- multiple threads . But , when a database can be written to by
- multiple processes concurrently , use to make writes
- to the database be done robustly .
-
- The downside of using MultiWriter is that after writing a change to the
- database , the a query using the same DbHandle will not immediately see
- the change ! This is because the change is actually written using a
- separate database connection , and caching can prevent seeing the change .
- Also , consider that if multiple processes are writing to a database ,
- you ca n't rely on seeing values you 've just written anyway , as another
- process may change them .
-
- When a database can only be written to by a single process , use
- SingleWriter . Changes written to the database will always be immediately
- visible then .
- write will crash.
-
- While a DbHandle serializes concurrent writes from
- multiple threads. But, when a database can be written to by
- multiple processes concurrently, use MultiWriter to make writes
- to the database be done robustly.
-
- The downside of using MultiWriter is that after writing a change to the
- database, the a query using the same DbHandle will not immediately see
- the change! This is because the change is actually written using a
- separate database connection, and caching can prevent seeing the change.
- Also, consider that if multiple processes are writing to a database,
- you can't rely on seeing values you've just written anyway, as another
- process may change them.
-
- When a database can only be written to by a single process, use
- SingleWriter. Changes written to the database will always be immediately
- visible then.
-}
data DbConcurrency = SingleWriter | MultiWriter
Opens the database , but does not perform any migrations . Only use
- once the database is known to exist and have the right tables .
- once the database is known to exist and have the right tables. -}
openDb :: DbConcurrency -> FilePath -> TableName -> IO DbHandle
openDb dbconcurrency db tablename = do
jobs <- newEmptyMVar
worker <- async (workerThread (T.pack db) tablename jobs)
work around
liftIO $ fileEncoding stderr
return $ DbHandle dbconcurrency worker jobs
This is optional ; when the DbHandle gets garbage collected it will
- auto - close .
- auto-close. -}
closeDb :: DbHandle -> IO ()
closeDb (DbHandle _ worker jobs) = do
putMVar jobs CloseJob
wait worker
Makes a query using the DbHandle . This should not be used to make
- changes to the database !
-
- Note that the action is not run by the calling thread , but by a
- worker thread . Exceptions are propigated to the calling thread .
-
- Only one action can be run at a time against a given DbHandle .
- If called concurrently in the same process , this will block until
- it is able to run .
-
- Note that when the DbHandle was opened in MultiWriter mode , recent
- writes may not be seen by queryDb .
- changes to the database!
-
- Note that the action is not run by the calling thread, but by a
- worker thread. Exceptions are propigated to the calling thread.
-
- Only one action can be run at a time against a given DbHandle.
- If called concurrently in the same process, this will block until
- it is able to run.
-
- Note that when the DbHandle was opened in MultiWriter mode, recent
- writes may not be seen by queryDb.
-}
queryDb :: DbHandle -> SqlPersistM a -> IO a
queryDb (DbHandle _ _ jobs) a = do
res <- newEmptyMVar
putMVar jobs $ QueryJob $
liftIO . putMVar res =<< tryNonAsync a
(either throwIO return =<< takeMVar res)
`catchNonAsync` (const $ error "sqlite query crashed")
Writes a change to the database .
-
- In MultiWriter mode , catches failure to write to the database ,
- and retries repeatedly for up to 10 seconds , which should avoid
- all but the most exceptional problems .
-
- In MultiWriter mode, catches failure to write to the database,
- and retries repeatedly for up to 10 seconds, which should avoid
- all but the most exceptional problems.
-}
commitDb :: DbHandle -> SqlPersistM () -> IO ()
commitDb h wa = robustly Nothing 100 (commitDb' h wa)
where
robustly :: Maybe SomeException -> Int -> IO (Either SomeException ()) -> IO ()
robustly e 0 _ = error $ "failed to commit changes to sqlite database: " ++ show e
robustly _ n a = do
r <- a
case r of
Right _ -> return ()
Left e -> do
1/10th second
robustly (Just e) (n-1) a
commitDb' :: DbHandle -> SqlPersistM () -> IO (Either SomeException ())
commitDb' (DbHandle MultiWriter _ jobs) a = do
res <- newEmptyMVar
putMVar jobs $ RobustChangeJob $ \runner ->
liftIO $ putMVar res =<< tryNonAsync (runner a)
takeMVar res
commitDb' (DbHandle SingleWriter _ jobs) a = do
res <- newEmptyMVar
putMVar jobs $ ChangeJob $
liftIO . putMVar res =<< tryNonAsync a
takeMVar res
`catchNonAsync` (const $ error "sqlite commit crashed")
data Job
= QueryJob (SqlPersistM ())
| ChangeJob (SqlPersistM ())
| RobustChangeJob ((SqlPersistM () -> IO ()) -> IO ())
| CloseJob
workerThread :: T.Text -> TableName -> MVar Job -> IO ()
workerThread db tablename jobs = go
where
go = do
v <- tryNonAsync (runSqliteRobustly tablename db loop)
case v of
Left e -> hPutStrLn stderr $
"sqlite worker thread crashed: " ++ show e
Right True -> go
Right False -> return ()
getjob :: IO (Either BlockedIndefinitelyOnMVar Job)
getjob = try $ takeMVar jobs
loop = do
job <- liftIO getjob
case job of
Exception is thrown when the MVar is garbage
Left BlockedIndefinitelyOnMVar -> return False
Right CloseJob -> return False
Right (QueryJob a) -> a >> loop
Right (ChangeJob a) -> do
a
return True
Right (RobustChangeJob a) -> do
liftIO (a (runSqliteRobustly tablename db))
loop
like runSqlite , but calls settle on the raw sql Connection .
runSqliteRobustly :: TableName -> T.Text -> (SqlPersistM a) -> IO a
runSqliteRobustly tablename db a = do
conn <- Sqlite.open db
settle conn
runResourceT $ runNoLoggingT $
withSqlConn (wrapConnection conn) $
runSqlConn a
where
fail with ErrorBusy for some time . So , loop until a select
settle conn = do
r <- tryNonAsync $ do
stmt <- Sqlite.prepare conn nullselect
void $ Sqlite.step stmt
void $ Sqlite.finalize stmt
case r of
Right _ -> return ()
Left e -> do
if "ErrorBusy" `isInfixOf` show e
then do
1/1000th second
settle conn
else throwIO e
nullselect = T.pack $ "SELECT null from " ++ tablename ++ " limit 1"
|
cfa713faea332661f463e7b7ec56f0ae49df9c76159c6a35a8e0d5c76469b4e1 | grin-compiler/ghc-wpc-sample-programs | Citation.hs | # LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
|
Module : Tests . Readers . Org . Inline . Citation
Copyright : © 2014 - 2020 License : GNU GPL , version 2 or above
Maintainer : >
Stability : alpha
Portability : portable
Test parsing of citations in org input .
Module : Tests.Readers.Org.Inline.Citation
Copyright : © 2014-2020 Albert Krewinkel
License : GNU GPL, version 2 or above
Maintainer : Albert Krewinkel <>
Stability : alpha
Portability : portable
Test parsing of citations in org input.
-}
module Tests.Readers.Org.Inline.Citation (tests) where
import Prelude
import Test.Tasty (TestTree, testGroup)
import Tests.Helpers ((=?>))
import Tests.Readers.Org.Shared ((=:))
import Text.Pandoc.Builder
tests :: [TestTree]
tests =
[ testGroup "Markdown-style citations"
[ "Citation" =:
"[@nonexistent]" =?>
let citation = Citation
{ citationId = "nonexistent"
, citationPrefix = []
, citationSuffix = []
, citationMode = NormalCitation
, citationNoteNum = 0
, citationHash = 0}
in (para $ cite [citation] "[@nonexistent]")
, "Citation containing text" =:
"[see @item1 p. 34-35]" =?>
let citation = Citation
{ citationId = "item1"
, citationPrefix = [Str "see"]
, citationSuffix = [Space ,Str "p.",Space,Str "34-35"]
, citationMode = NormalCitation
, citationNoteNum = 0
, citationHash = 0}
in (para $ cite [citation] "[see @item1 p. 34-35]")
]
, testGroup "org-ref citations"
[ "simple citation" =:
"cite:pandoc" =?>
let citation = Citation
{ citationId = "pandoc"
, citationPrefix = mempty
, citationSuffix = mempty
, citationMode = AuthorInText
, citationNoteNum = 0
, citationHash = 0
}
in (para $ cite [citation] "cite:pandoc")
, "simple citation with underscores" =:
"cite:pandoc_org_ref" =?>
let citation = Citation
{ citationId = "pandoc_org_ref"
, citationPrefix = mempty
, citationSuffix = mempty
, citationMode = AuthorInText
, citationNoteNum = 0
, citationHash = 0
}
in (para $ cite [citation] "cite:pandoc_org_ref")
, "simple citation succeeded by comma" =:
"cite:pandoc," =?>
let citation = Citation
{ citationId = "pandoc"
, citationPrefix = mempty
, citationSuffix = mempty
, citationMode = AuthorInText
, citationNoteNum = 0
, citationHash = 0
}
in (para $ cite [citation] "cite:pandoc" <> str ",")
, "simple citation succeeded by dot" =:
"cite:pandoc." =?>
let citation = Citation
{ citationId = "pandoc"
, citationPrefix = mempty
, citationSuffix = mempty
, citationMode = AuthorInText
, citationNoteNum = 0
, citationHash = 0
}
in (para $ cite [citation] "cite:pandoc" <> str ".")
, "simple citation succeeded by colon" =:
"cite:pandoc:" =?>
let citation = Citation
{ citationId = "pandoc"
, citationPrefix = mempty
, citationSuffix = mempty
, citationMode = AuthorInText
, citationNoteNum = 0
, citationHash = 0
}
in (para $ cite [citation] "cite:pandoc" <> str ":")
, "simple citep citation" =:
"citep:pandoc" =?>
let citation = Citation
{ citationId = "pandoc"
, citationPrefix = mempty
, citationSuffix = mempty
, citationMode = NormalCitation
, citationNoteNum = 0
, citationHash = 0
}
in (para $ cite [citation] "citep:pandoc")
, "extended citation" =:
"[[citep:Dominik201408][See page 20::, for example]]" =?>
let citation = Citation
{ citationId = "Dominik201408"
, citationPrefix = toList "See page 20"
, citationSuffix = toList ", for example"
, citationMode = NormalCitation
, citationNoteNum = 0
, citationHash = 0
}
in (para $ cite [citation] "[[citep:Dominik201408][See page 20::, for example]]")
]
, testGroup "Berkeley-style citations" $
let pandocCite = Citation
{ citationId = "Pandoc"
, citationPrefix = mempty
, citationSuffix = mempty
, citationMode = NormalCitation
, citationNoteNum = 0
, citationHash = 0
}
pandocInText = pandocCite { citationMode = AuthorInText }
dominikCite = Citation
{ citationId = "Dominik201408"
, citationPrefix = mempty
, citationSuffix = mempty
, citationMode = NormalCitation
, citationNoteNum = 0
, citationHash = 0
}
dominikInText = dominikCite { citationMode = AuthorInText }
in
[ "Berkeley-style in-text citation" =:
"See @Dominik201408." =?>
para ("See "
<> cite [dominikInText] "@Dominik201408"
<> ".")
, "Berkeley-style parenthetical citation list" =:
"[(cite): see; @Dominik201408;also @Pandoc; and others]" =?>
let pandocCite' = pandocCite {
citationPrefix = toList "also"
, citationSuffix = toList "and others"
}
dominikCite' = dominikCite {
citationPrefix = toList "see"
}
in (para $ cite [dominikCite', pandocCite'] "")
, "Berkeley-style plain citation list" =:
"[cite: See; @Dominik201408; and @Pandoc; and others]" =?>
let pandocCite' = pandocInText { citationPrefix = toList "and" }
in (para $ "See "
<> cite [dominikInText] ""
<> "," <> space
<> cite [pandocCite'] ""
<> "," <> space <> "and others")
]
, "LaTeX citation" =:
"\\cite{Coffee}" =?>
let citation = Citation
{ citationId = "Coffee"
, citationPrefix = []
, citationSuffix = []
, citationMode = NormalCitation
, citationNoteNum = 0
, citationHash = 0}
in (para . cite [citation] $ rawInline "latex" "\\cite{Coffee}")
]
| null | https://raw.githubusercontent.com/grin-compiler/ghc-wpc-sample-programs/0e3a9b8b7cc3fa0da7c77fb7588dd4830fb087f7/pandoc-11df2a3c0f2b1b8e351ad8caaa7cdf583e1b3b2e/test/Tests/Readers/Org/Inline/Citation.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE NoImplicitPrelude #
|
Module : Tests . Readers . Org . Inline . Citation
Copyright : © 2014 - 2020 License : GNU GPL , version 2 or above
Maintainer : >
Stability : alpha
Portability : portable
Test parsing of citations in org input .
Module : Tests.Readers.Org.Inline.Citation
Copyright : © 2014-2020 Albert Krewinkel
License : GNU GPL, version 2 or above
Maintainer : Albert Krewinkel <>
Stability : alpha
Portability : portable
Test parsing of citations in org input.
-}
module Tests.Readers.Org.Inline.Citation (tests) where
import Prelude
import Test.Tasty (TestTree, testGroup)
import Tests.Helpers ((=?>))
import Tests.Readers.Org.Shared ((=:))
import Text.Pandoc.Builder
tests :: [TestTree]
tests =
[ testGroup "Markdown-style citations"
[ "Citation" =:
"[@nonexistent]" =?>
let citation = Citation
{ citationId = "nonexistent"
, citationPrefix = []
, citationSuffix = []
, citationMode = NormalCitation
, citationNoteNum = 0
, citationHash = 0}
in (para $ cite [citation] "[@nonexistent]")
, "Citation containing text" =:
"[see @item1 p. 34-35]" =?>
let citation = Citation
{ citationId = "item1"
, citationPrefix = [Str "see"]
, citationSuffix = [Space ,Str "p.",Space,Str "34-35"]
, citationMode = NormalCitation
, citationNoteNum = 0
, citationHash = 0}
in (para $ cite [citation] "[see @item1 p. 34-35]")
]
, testGroup "org-ref citations"
[ "simple citation" =:
"cite:pandoc" =?>
let citation = Citation
{ citationId = "pandoc"
, citationPrefix = mempty
, citationSuffix = mempty
, citationMode = AuthorInText
, citationNoteNum = 0
, citationHash = 0
}
in (para $ cite [citation] "cite:pandoc")
, "simple citation with underscores" =:
"cite:pandoc_org_ref" =?>
let citation = Citation
{ citationId = "pandoc_org_ref"
, citationPrefix = mempty
, citationSuffix = mempty
, citationMode = AuthorInText
, citationNoteNum = 0
, citationHash = 0
}
in (para $ cite [citation] "cite:pandoc_org_ref")
, "simple citation succeeded by comma" =:
"cite:pandoc," =?>
let citation = Citation
{ citationId = "pandoc"
, citationPrefix = mempty
, citationSuffix = mempty
, citationMode = AuthorInText
, citationNoteNum = 0
, citationHash = 0
}
in (para $ cite [citation] "cite:pandoc" <> str ",")
, "simple citation succeeded by dot" =:
"cite:pandoc." =?>
let citation = Citation
{ citationId = "pandoc"
, citationPrefix = mempty
, citationSuffix = mempty
, citationMode = AuthorInText
, citationNoteNum = 0
, citationHash = 0
}
in (para $ cite [citation] "cite:pandoc" <> str ".")
, "simple citation succeeded by colon" =:
"cite:pandoc:" =?>
let citation = Citation
{ citationId = "pandoc"
, citationPrefix = mempty
, citationSuffix = mempty
, citationMode = AuthorInText
, citationNoteNum = 0
, citationHash = 0
}
in (para $ cite [citation] "cite:pandoc" <> str ":")
, "simple citep citation" =:
"citep:pandoc" =?>
let citation = Citation
{ citationId = "pandoc"
, citationPrefix = mempty
, citationSuffix = mempty
, citationMode = NormalCitation
, citationNoteNum = 0
, citationHash = 0
}
in (para $ cite [citation] "citep:pandoc")
, "extended citation" =:
"[[citep:Dominik201408][See page 20::, for example]]" =?>
let citation = Citation
{ citationId = "Dominik201408"
, citationPrefix = toList "See page 20"
, citationSuffix = toList ", for example"
, citationMode = NormalCitation
, citationNoteNum = 0
, citationHash = 0
}
in (para $ cite [citation] "[[citep:Dominik201408][See page 20::, for example]]")
]
, testGroup "Berkeley-style citations" $
let pandocCite = Citation
{ citationId = "Pandoc"
, citationPrefix = mempty
, citationSuffix = mempty
, citationMode = NormalCitation
, citationNoteNum = 0
, citationHash = 0
}
pandocInText = pandocCite { citationMode = AuthorInText }
dominikCite = Citation
{ citationId = "Dominik201408"
, citationPrefix = mempty
, citationSuffix = mempty
, citationMode = NormalCitation
, citationNoteNum = 0
, citationHash = 0
}
dominikInText = dominikCite { citationMode = AuthorInText }
in
[ "Berkeley-style in-text citation" =:
"See @Dominik201408." =?>
para ("See "
<> cite [dominikInText] "@Dominik201408"
<> ".")
, "Berkeley-style parenthetical citation list" =:
"[(cite): see; @Dominik201408;also @Pandoc; and others]" =?>
let pandocCite' = pandocCite {
citationPrefix = toList "also"
, citationSuffix = toList "and others"
}
dominikCite' = dominikCite {
citationPrefix = toList "see"
}
in (para $ cite [dominikCite', pandocCite'] "")
, "Berkeley-style plain citation list" =:
"[cite: See; @Dominik201408; and @Pandoc; and others]" =?>
let pandocCite' = pandocInText { citationPrefix = toList "and" }
in (para $ "See "
<> cite [dominikInText] ""
<> "," <> space
<> cite [pandocCite'] ""
<> "," <> space <> "and others")
]
, "LaTeX citation" =:
"\\cite{Coffee}" =?>
let citation = Citation
{ citationId = "Coffee"
, citationPrefix = []
, citationSuffix = []
, citationMode = NormalCitation
, citationNoteNum = 0
, citationHash = 0}
in (para . cite [citation] $ rawInline "latex" "\\cite{Coffee}")
]
|
d5010e3656622691628cca97da38d83911355b6e51a0d0a76c1a261c6250d552 | tomhanika/conexp-clj | implications.clj | ;; Copyright ⓒ the conexp-clj developers; all rights reserved.
;; The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 ( -1.0.php )
;; which can be found in the file LICENSE at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns conexp.fca.implications
"Implications for Formal Concept Analysis."
(:require [clojure.core.reducers :as r]
[conexp.base :refer :all]
[conexp.math.algebra :refer :all]
[conexp.fca.contexts :refer :all]))
;;;
(deftype Implication [premise conclusion]
Object
(equals [this other]
(generic-equals [this other] Implication [premise conclusion]))
(hashCode [this]
(hash-combine-hash Implication premise conclusion))
(toString [this]
(str "(" premise " ⟶ " conclusion ")")))
(defmulti premise
"Returns premise of given object."
{:arglists '([thing])}
type)
(defmethod premise Implication [^Implication impl]
(.premise impl))
(defmulti conclusion
"Returns conclusion of given object."
{:arglists '([thing])}
type)
(defmethod conclusion Implication [^Implication impl]
(.conclusion impl))
(defmethod print-method Implication
[impl out]
(.write ^java.io.Writer out
^String (str impl)))
(defn implication?
"Returns true iff thing is an implication."
[thing]
(instance? Implication thing))
;;;
(defn make-implication
"Creates an implication (premise => conclusion \\ premise)."
[premise conclusion]
(let [premise (set premise)
conclusion (set conclusion)]
(Implication. premise (difference conclusion premise))))
(defmacro impl
"Convenience interface for creating implications. Write implications just as
user=> (impl 1 2 3 ==> 4 5 6)
(#{1 2 3} ==> #{4 5 6})"
[& elements]
(let [[premise conclusion] (split-with (fn [x]
(not= x '==>))
elements)]
(when (empty? conclusion)
(warn "«impl» does not contain ==>"))
`(make-implication (list ~@premise) (list ~@(rest conclusion)))))
;;;
(defn respects?
"Returns true iff set respects given implication impl."
[set impl]
(or (not (subset? (premise impl) set))
(subset? (conclusion impl) set)))
(defn holds?
"Returns true iff impl holds in given context ctx."
[impl ctx]
(subset? (conclusion impl) (adprime ctx (premise impl))))
(defn tautology?
"Returns true iff impl has empty conclusion."
[impl]
(empty? (conclusion impl)))
(defn- implication-graph
"Compute setup for Downing-Gallier"
[implications]
(let [implications (vec implications),
where-in-premise (persistent!
(reduce (fn [map i]
(reduce (fn [map m]
(assoc! map m (conj (map m) i)))
map
(premise (implications i))))
(transient {})
(range (count implications))))
numargs (loop [numargs []
impls implications]
(if (empty? impls)
numargs
(recur (conj numargs (count (premise (first impls))))
(rest impls))))]
[implications where-in-premise numargs]))
(defn- close-with-downing-gallier
"Downing-Gallier"
[[implications in-premise numargs] input-set]
(let [numargs (reduce (fn [numargs i]
(assoc! numargs i (dec (numargs i))))
(transient numargs)
(mapcat in-premise input-set))]
(loop [queue (reduce (fn [queue i]
(if (zero? (numargs i))
(conj queue i)
queue))
(clojure.lang.PersistentQueue/EMPTY)
(range (count numargs))),
numargs numargs,
result input-set]
(if (empty? queue)
result
(let [idx (first queue),
new (difference (conclusion (implications idx)) result)
[numargs queue] (reduce (fn [[numargs queue] i]
(let [numargs (assoc! numargs i (dec (numargs i)))]
[numargs (if (pos? (numargs i))
queue
(conj queue i))]))
[numargs (pop queue)]
(mapcat in-premise new))]
(recur queue numargs (into result new)))))))
(defn clop-by-implications
"Returns closure operator given by implications."
[implications]
(let [predata (implication-graph implications)]
(fn [input-set]
(close-with-downing-gallier predata input-set))))
(defn close-under-implications
"Computes smallest superset of set being closed under given implications."
[implications input-set]
((clop-by-implications implications) input-set))
(defn- add-immediate-elements
"Iterating through the sequence of implications, tries to apply as many
implications as possible. Uses subset-test to determine whether a given
implication can be used to extend a given set, i.e. an implication impl can be
used to extend a set s if and only if
(subset-test (premise impl) s)
is true. Note that if (conclusion impl) is already a subset of s, then s is
effectively not extended."
[implications initial-set subset-test]
(loop [conclusions (transient initial-set),
impls implications,
unused-impls (transient [])]
(if-let [impl (first impls)]
(if (subset-test (premise impl) initial-set)
(recur (reduce conj! conclusions (conclusion impl))
(rest impls)
unused-impls)
(recur conclusions
(rest impls)
(conj! unused-impls impl)))
[(persistent! conclusions)
(persistent! unused-impls)])))
(defn pseudo-close-under-implications
"Computes smallest superset of set being pseudo-closed under given
implications."
[implications set]
(assert (set? set))
(loop [set set,
impls implications]
(let [[new impls] (add-immediate-elements impls set proper-subset?)]
(if (= new set)
new
(recur new impls)))))
(defn pseudo-clop-by-implications
"Returns for a given set of implications the corresponding closure
operator whose closures are all closed and pseudo-closed sets."
[implications]
(partial pseudo-close-under-implications implications))
(defn follows-semantically?
"Returns true iff implication follows semantically from given
implications."
[implication implications]
(subset? (conclusion implication)
(close-under-implications implications (premise implication))))
(defalias follows? follows-semantically?)
(defn equivalent-implications?
"Returns true iff the two seqs of implications are equivalent."
[impls-1 impls-2]
(and (forall [impl impls-1] (follows-semantically? impl impls-2))
(forall [impl impls-2] (follows-semantically? impl impls-1))))
(defn minimal-implication-set?
"Checks whether given set of implications is minimal, i.e. no
implication in this set follows from the others."
[impl-set]
(let [impl-set (set impl-set)]
(forall [impl impl-set]
(not (follows-semantically? impl (disj impl-set impl))))))
(defn sound-implication-set?
"Checks whether given set of implications is sound, i.e. every
implication holds in the given context."
[ctx impl-set]
(forall [impl impl-set]
(holds? impl ctx)))
(defn complete-implication-set?
"Checks wheter given set of implications is complete in context ctx. This is a
very costly computation."
[ctx impl-set]
(and (forall [impl impl-set]
(and (subset? (premise impl) (attributes ctx))
(subset? (conclusion impl) (attributes ctx))))
(forall [A (subsets (attributes ctx))]
(subset? (adprime ctx A)
(close-under-implications impl-set A)))))
(defn irredundant-subset
"Given a set impls of implications, returns an irredundant subset of impls.
Note that this set does not need to be of minimal cardinality."
[impls]
(reduce (fn [impls impl]
(if (follows-semantically? impl impls)
impls
(loop [impls impls, ; implications to check
new-impls (conj impls impl)] ; all implications
(if-not (seq impls)
new-impls
(let [next-impl (first impls)]
(if (follows-semantically? next-impl (disj new-impls next-impl))
(recur (rest impls) (disj new-impls next-impl)) ; first implication entailed by others
(recur (rest impls) new-impls))))))) ; not
#{}
impls))
;;; Bases for closure operators
(defn canonical-base-from-clop
"Given a closure operator «clop» on the set «base», computes its canonical base,
optionally using the set «background-knowledge» of implications on «base-set»
as background knowledge. The result will be a lazy sequence. If «predicate»
is given as third argument, computes only those implications whose premise
satisfy this predicate. Note that «predicate» has to satisfy the same
conditions as the one of «next-closed-set-in-family»."
([clop base]
(canonical-base-from-clop clop base #{} (constantly true)))
([clop base background-knowledge]
(canonical-base-from-clop clop base background-knowledge (constantly true)))
([clop base background-knowledge predicate]
(assert (fn? clop)
"Given closure operator must be a function")
(assert (coll? base)
"Base must be a collection")
(assert (fn? predicate)
"Predicate must be a function")
(assert (and (set? background-knowledge)
(forall [x background-knowledge]
(implication? x)))
"Background knowledge must be a set of implications")
(let [next-closure (fn [implications last]
(next-closed-set-in-family predicate
base
(clop-by-implications implications)
last)),
runner (fn runner [implications candidate]
(when candidate
(let [conclusions (clop candidate)]
(if (not= candidate conclusions)
(let [impl (make-implication candidate conclusions),
impls (conj implications impl)]
(cons impl
(lazy-seq (runner impls (next-closure impls candidate)))))
(recur implications (next-closure implications candidate))))))]
(lazy-seq (runner background-knowledge
(close-under-implications background-knowledge #{}))))))
(defn intersect-implicational-theories
"Given a set «base-set» and collections «implication-sets» of implications,
returns the canonical base of the intersection of the corresponding closure
theories."
[base-set & implication-sets]
(let [implication-clops (vec (map clop-by-implications implication-sets)),
clop (fn [A]
(r/fold (r/monoid intersection (constantly base-set))
(r/map #(% A) implication-clops)))]
(canonical-base-from-clop clop base-set)))
(defn canonical-base
"Returns the canonical base of given context, as a lazy sequence. Uses
«background-knowledge» as starting set of implications, which will not appear
in the result. If «predicate» is given (a function), computes only those
implications from the canonical base whose premise satisfy this predicate,
i.e. «predicate» returns true on these premises. Note that «predicate» has to
satisfy the same conditions as the predicate to «next-closed-set-in-family»."
([ctx]
(canonical-base ctx #{} (constantly true)))
([ctx background-knowledge]
(canonical-base ctx background-knowledge (constantly true)))
([ctx background-knowledge predicate]
(assert (context? ctx)
"First argument must be a formal context")
(canonical-base-from-clop #(context-attribute-closure ctx %)
(attributes ctx)
background-knowledge
predicate)))
(defalias stem-base canonical-base)
(defn pseudo-intents
"Returns the pseudo intents of the given context ctx."
[ctx]
(map premise (stem-base ctx)))
(defn parallel-canonical-base-from-clop
"Computes the canonical base of the given closure operator in parallel.
Accepts the same parameters as «canonical-base-from-clop», except for the
predicate."
([clop base]
(parallel-canonical-base-from-clop clop base #{}))
([clop base background-knowledge]
(let [implications (atom (set background-knowledge))
current (atom #{#{}})]
(loop [n 0]
(if (< (count base) n)
(difference @implications (set background-knowledge))
(do
(dopar [C (filter #(= n (count %)) @current)]
(swap! current #(disj % C))
(let [impl-C (close-under-implications @implications C)]
(if (= C impl-C)
(let [clop-C (clop C)]
(when (not= C clop-C)
(swap! implications
#(conj % (make-implication C clop-C))))
(doseq [m base :when (not (contains? clop-C m))]
(swap! current #(conj % (conj clop-C m)))))
(swap! current #(conj % impl-C)))))
(recur (inc n))))))))
(defn parallel-canonical-base
"Computes the canonical base of the given formal context.
Background knowledge can be provided as a set of implications on the attribute
set of the given context. Computation is eager and is done in parallel."
([ctx]
(parallel-canonical-base ctx #{}))
([ctx background-knowledge]
(parallel-canonical-base-from-clop (partial adprime ctx)
(attributes ctx)
background-knowledge)))
Proper Premises
(defn proper-conclusion
"Returns all elements which are implied in context ctx by A but are neither
contained in A or follow from a strict subsets of A."
[ctx A]
(difference (context-attribute-closure ctx A)
(reduce into
A
(map #(context-attribute-closure ctx (disj A %))
A))))
(defn proper-premise?
"Returns true iff set A is a subset of the attributes of context ctx
and is a proper premise in ctx."
[ctx A]
(and (subset? A (attributes ctx))
(not (empty? (proper-conclusion ctx A)))))
(defn- proper-premises-by-hypertrans
"Returns all proper premises for the attribute «m» in the formal context
«ctx». The set «objs» should contain all objects from ctx which are in
down-arrow relation to m."
[ctx m objs]
(minimal-hypergraph-transversals
(disj (attributes ctx) m)
(set-of (difference (attributes ctx) (oprime ctx #{g})) | g objs)))
(defn proper-premises-for-attribute
"Returns all proper premises for the attribute «m» in the formal context «ctx»."
[ctx m]
(proper-premises-by-hypertrans ctx m (set-of g | [g n] (down-arrows ctx) :when (= n m))))
(defn proper-premises
"Returns the proper premises of the given context ctx as a lazy sequence."
[ctx]
(let [down-arrow-map (loop [arrows (down-arrows ctx),
arrow-map (map-by-fn (constantly #{}) (attributes ctx))]
(if-let [[g m] (first arrows)]
(recur (rest arrows)
(update-in arrow-map [m] conj g))
arrow-map))]
(distinct
(reduce concat
(pmap #(apply proper-premises-by-hypertrans ctx %)
down-arrow-map)))))
(defn proper-premise-implications
"Returns all implications based on the proper premises of the
context ctx."
[ctx]
(set-of (make-implication A (context-attribute-closure ctx A))
[A (proper-premises ctx)]))
Ryssel 's Algorithm
(defn- cover [base-set candidates A]
(let [object-covers (minimum-set-covers
(difference base-set A)
(set-of (difference base-set N) | N candidates))]
(map (fn [cover]
(map #(difference base-set %) cover))
object-covers)))
(defn ryssel-base
"Returns the implications computed by Ryssels Algorithm, as a lazy sequence."
[ctx]
(let [gens (reduce! (fn [map x] ;generating elements of attribute extents
(let [extent (aprime ctx #{x})]
(assoc! map extent
(conj (get map extent #{}) x))))
{}
(attributes ctx)),
all-extents (set (keys gens)), ;all attribute extents
irr-extents (set-of (aprime ctx #{m}) ;attribute extents of irreducible attributes
| m (attributes (reduce-attributes ctx))),
empty-prime (adprime ctx #{})]
(->> (reduce into
(for [m (attributes ctx)
:when (not= (adprime ctx #{m})
(conj empty-prime m))]
#{m})
(pmap (fn [A]
(let [candidates (set-of U | U (disj irr-extents A),
:let [U-cap-A (intersection U A)]
:when (not (exists [V all-extents]
(and (proper-subset? V A)
(subset? U-cap-A V))))),
covers (cover (objects ctx) candidates A)]
(for [X covers]
(set-of m | Y X, m (gens Y)))))
all-extents))
distinct
(map #(make-implication % (adprime ctx %))))))
;;; Convert arbitrary bases to the Canonical Base
(defn stem-base-from-base
"For a given set of implications returns its stem-base, see:
Rudolph 2007
-3-540-70901-5_10"
[implications]
(let [implications (pmap (fn [impl]
(make-implication
(premise impl)
(close-under-implications implications
(union (premise impl)
(conclusion impl)))))
implications)]
(loop [stem-base #{},
implications implications,
all (set implications)]
(if (empty? implications)
stem-base
(let [A->B (first implications),
implications (rest implications),
all (disj all A->B)
A* (close-under-implications all (premise A->B)),
A*->B (make-implication A* (conclusion A->B))]
(if (not-empty (conclusion A*->B))
(recur (conj stem-base A*->B)
implications
(conj all A*->B))
(recur stem-base
implications
all)))))))
(defalias canonical-base-from-base stem-base-from-base)
;;; Ganter Base
(defn ganter-base
"Given an implication base transforms it into the Ganter Base, a second
argument may be given as a function by which the representative element
will be chosen.
The default takes whichever element comes first.
Defined in:
“Properties of Finite Lattices” by S. Reeg and W. Weiß, Revisited
In Memoriam Peter Burmeister (1941–2019), Bernhard Ganter 2019
-3-030-21462-3_8"
([base]
(ganter-base base first))
([base choose]
first step ; representatives
atts (reduce union (map #(union (premise %) (conclusion %)) base))
equiv (closure-equivalence atts
#(close-under-implications base #{%}))
reps (reduce merge (for [[k v] equiv] (hash-map (choose v) v)))
second / third step ; replace / remove elements
zero (close-under-implications base #{})
impl2 (for [i base]
(let [prem (difference (premise i) zero)
concl (difference (conclusion i) zero)]
(make-implication
(set (for [[k v] reps :when (some (set v) prem)] k))
(set (for [[k v] reps :when (some (set v) concl)] k)))))
fourth step ; remove proper consequences
closures (apply merge
(for [[k1 v1] equiv [k2 v2] reps :when (= v1 v2)]
(hash-map k2 (difference k1 #{k2}))))
impl4 (for [i impl2]
(let [prem (premise i)
concl (conclusion i)]
(make-implication
(if (< 1 (count prem))
(difference
prem
(reduce union (map #(get closures %) prem)))
prem)
(if (< 1 (count concl))
(difference
concl
(reduce union (map #(get closures %) concl)))
concl))))
fifth / sixth step ; add cyclic implications
cycles (flatten
(for [[k v] reps :when (< 1 (count v))]
(map
#(make-implication #{%1} #{%2})
v
(conj (drop-last 1 v) (last v)))))
cycles+ (if (< 0 (count zero))
(conj cycles (make-implication #{} zero))
cycles)]
seventh / eight step ; merge by conclusion
(set (for [[k v] (group-by premise (concat impl4 cycles+))]
(make-implication k (reduce union (map conclusion v))))))))
;;; Association Rules
(defn support
"Computes the support of the set of attributes B in context ctx. If an
implications is given, returns the support of this implication in the given
context."
[thing ctx]
(cond
(set? thing)
(if (empty? (objects ctx))
1
(/ (count (attribute-derivation ctx thing))
(count (objects ctx)))),
(implication? thing)
(recur (premise thing) ctx),
:else
(illegal-argument "Cannot determine support of " (print-str thing))))
(defn confidence
"Computes the confidence of the given implication in the given context."
[implication context]
(let [premise-count (count (attribute-derivation context (premise implication)))]
(if (zero? premise-count)
1
(/ (count (attribute-derivation context
(union (premise implication) (conclusion implication))))
premise-count))))
;;
(defn- frequent-itemsets
"Returns all frequent itemsets of context, given minsupp as minimal support."
UNTESTED !
[context minsupp]
(let [mincount (* minsupp (count (objects context)))]
(all-closed-sets-in-family (fn [intent]
(>= (count (attribute-derivation context intent))
mincount))
(attributes context)
identity)))
(defn- association-rules
"Returns all association rules of context with the parameters minsupp as
minimal support and minconf as minimal confidence. The result returned is a
lazy sequence."
UNTESTED !
[context minsupp minconf]
(let [fitemsets (frequent-itemsets context minsupp)]
(for [A fitemsets,
B fitemsets,
:let [impl (make-implication A B)]
:when (>= (confidence impl context) minconf)]
impl)))
;;
(defn frequent-closed-itemsets
"Computes for context a lazy sequence of all frequent and closed itemsets,
given minsupp as minimal support."
[context minsupp]
(let [mincount (* minsupp (count (objects context)))]
(intents context
(fn [intent]
(>= (count (attribute-derivation context intent))
mincount)))))
(defn luxenburger-basis
"Computes the luxenburger-base of a given context «context», returning the
result as a lazy sequence. Uses «minconf» as minimal confidence. If
«minsupp-or-predicate» is a number, uses that as a minimal support threshold.
In this case, «minsupp» ∈ [0,1] must hold. If «minsupp-or-predicate» is a
function, uses this as a predicate to filter all candidate itemsets. In this
case, the predicate should be valid predicate value for «intents»."
[context minsupp-or-predicate minconf]
(let [pred (cond (and (number? minsupp-or-predicate)
(<= 0 minsupp-or-predicate 1))
(let [mincount (* minsupp-or-predicate (count (objects context)))]
#(>= (count (aprime context %)) mincount))
;;
(fn? minsupp-or-predicate)
minsupp-or-predicate
;;
true
(illegal-argument "Value for parameter «minsupp-or-predicate» is invalid:"
(str minsupp-or-predicate))),
fqis (vec (doall (intents context pred)))]
(r/fold concat
(fn [impls B_2]
(let [proper-subsets (filter #(proper-subset? % B_2)
(take-while #(not= % B_2) fqis)) ; fqis in lectic order
lowers (filter (fn [B_1]
(not (exists [B_3 proper-subsets]
(proper-subset? B_1 B_3))))
proper-subsets)]
(concat impls
(doall ; do actual computation here, to allow for parallelism
(filter (fn [impl]
(<= minconf (confidence impl context)))
(map (fn [B_1] (make-implication B_1 B_2)) lowers))))))
fqis)))
(defalias luxenburger-base luxenburger-basis)
Learn Implicational Theories by Query Learning
(defn- horn1-reduce-implication
[implication counterexample]
"Reduce implication by counterexample as needed by the HORN1 algorithm."
(make-implication (premise implication)
(intersection (conclusion implication)
counterexample)))
(defn- horn1-refine-implication
[implication counterexample]
"Refine implication by counterexample as needed by the HORN1 algorithm."
(make-implication counterexample
(union (conclusion implication)
(difference (premise implication)
counterexample))))
(defn learn-implications-by-queries
"Learn an implicational theory on base-set with access to membership oracle
`member?' and equivalence oracle `equivalent?'.
The membership oracle has to decide for a given set S whether S is a model of
the background theory to be learned. The equivalence oracle has to decide
whether a given set of implications is equivalent to the background theory.
For this it needs to return true if the theories are equivalent, and a
counterexample otherwise, i.e., a subset of base-set that is a model of the
current hypothesis and not a model of the background theory, or vice versa.
This function implements the HORN1 algorithm of Angluin, Frazier, and Pitt:
“Learning Conjunctions of Horn Clauses”, 1992."
[base-set member? equivalent?]
(loop [hypothesis []]
(let [equivalence-result (equivalent? hypothesis)]
(if (= true equivalence-result) ; we need to check this explicitly
hypothesis
(let [counterexample equivalence-result] ; rename for better readability
(if (some #(not (respects? counterexample %)) hypothesis)
(recur (mapv (fn [implication]
(if (respects? counterexample implication)
implication
(horn1-reduce-implication implication counterexample)))
hypothesis))
(let [minimal-index (first-position-if
(fn [implication]
(let [reduced-premise (intersection counterexample
(premise implication))]
(and (proper-subset? reduced-premise
(premise implication))
(not (member? reduced-premise)))))
hypothesis)]
(if minimal-index
(let [implication (get hypothesis minimal-index)]
(recur (assoc hypothesis
minimal-index
(horn1-refine-implication implication
(intersection counterexample
(premise implication))))))
(recur (conj hypothesis
(make-implication counterexample base-set)))))))))))
(defn equivalence-oracle-by-implications
"Return a function that can serve as an equivalence oracle for query learning.
The returned oracle will return true if a given set S of implications is
equivalent to background-implications. Otherwise, it will return a
counterexample, i.e., model of S that is not a model ov
background-implications or vice versa."
[background-implications]
(fn [hypothesis]
(let [model-non-model (fn [impl-set-1 impl-set-2]
;; Return a model of impl-set-1 that is not a model
;; of impl-set-2
(keep (fn [implication]
(when-not (follows-semantically? implication impl-set-1)
(close-under-implications impl-set-1
(premise implication))))
impl-set-2))]
(or (first (model-non-model hypothesis background-implications)) ; positive counterexamples
(first (model-non-model background-implications hypothesis)) ; negative counterexamples
true))))
(defn membership-oracle-by-implications
"Return a function that can serve as a membership oracle for query learning.
The returned oracle will return true if a given set S of elements is a model
of implications, and false otherwise."
[implications]
#(every? (fn [implication] (respects? % implication)) implications))
;;; Approximate Computation of the Canonical Base
(defn approx-canonical-base
"Compute a set L of implications that is an approximation to the canonical
base of the formal context `ctx'. More precisely, if H is the canonical base
of ctx, then
|Mod(L) Δ Mod(H)|/2^{|M|} ≤ ε
with probability at least 1-δ. The computation is done in polynomial time
with respect to |M|, |L|, 1/ε, and 1/δ. "
[ctx ε δ]
(assert (context? ctx))
(assert (and (number? ε)
(< 0 ε 1)))
(assert (and (number? δ)
(< 0 δ 1)))
(let [random-subset #(set (random-sample 0.5 (attributes ctx)))
intent? #(= % (adprime ctx %))
respects-all? (fn [set impls]
(every? (fn [impl] (respects? set impl)) impls))
iter-counter (atom 0)]
(learn-implications-by-queries (attributes ctx)
intent?
(fn [implications]
(let [nr-iter (ceil (* (/ ε) (+ (swap! iter-counter inc)
(/ (Math/log (/ δ))
(Math/log 2)))))]
(or (some (fn [test-set]
(when-not (<=> (intent? test-set)
(respects-all? test-set
implications))
test-set))
(repeatedly nr-iter random-subset))
true))))))
;;; Extension
(defn unitary?
"Returns true iff implication is unitary (premise of length one)."
[impl]
(= 1 (count (premise impl))))
(defn unitary-subset
"Returns the subset of unitary implications (premise of length one)."
[impls]
(set (filter unitary? impls)))
(defn non-unitary-subset
"Returns the subset of non-unitary implications (premise of length other
than one)."
[impls]
(set (filter #(not (unitary? %)) impls)))
(defn ideal-closed?
"Given a base tests if it is ideal-closed.
A base is ideal-closed iff for any A → B the closure of A under all
non-unitary implications is closed under all unitary implications."
[impls]
(let [clop-u (clop-by-implications (unitary-subset impls))
clop-nu (clop-by-implications (non-unitary-subset impls))]
(every? identity
(for [impl impls]
(let [nu-closure (clop-nu (premise impl))]
(= nu-closure (clop-u nu-closure)))))))
(defn largest-extension-by-implications
"Given a closure system and implications returns the
largest extension of the clop by use of the implications. Algorithm from:
'Representations for the largest Extension of a closure system'
Karima Ennaoui, Khaled Maafa, Lhouari Nourine 2020
"
[closure impls]
(let [unitary (unitary-subset (set impls))
extension (atom (set closure))
rem-impls (atom (set impls))]
(doall (for [impl unitary]
(let [clop (clop-by-implications @rem-impls)]
(swap! extension
union
(extension-set @extension clop (first (premise impl))))
(swap! rem-impls difference #{impl}))))
@extension))
;;; The End
true
| null | https://raw.githubusercontent.com/tomhanika/conexp-clj/5e4c15697f06446f925f53d1d143528155d7dd3a/src/main/clojure/conexp/fca/implications.clj | clojure | Copyright ⓒ the conexp-clj developers; all rights reserved.
The use and distribution terms for this software are covered by the
which can be found in the file LICENSE at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
implications to check
all implications
first implication entailed by others
not
Bases for closure operators
generating elements of attribute extents
all attribute extents
attribute extents of irreducible attributes
Convert arbitrary bases to the Canonical Base
Ganter Base
representatives
replace / remove elements
remove proper consequences
add cyclic implications
merge by conclusion
Association Rules
fqis in lectic order
do actual computation here, to allow for parallelism
we need to check this explicitly
rename for better readability
Return a model of impl-set-1 that is not a model
of impl-set-2
positive counterexamples
negative counterexamples
Approximate Computation of the Canonical Base
Extension
The End | Eclipse Public License 1.0 ( -1.0.php )
(ns conexp.fca.implications
"Implications for Formal Concept Analysis."
(:require [clojure.core.reducers :as r]
[conexp.base :refer :all]
[conexp.math.algebra :refer :all]
[conexp.fca.contexts :refer :all]))
(deftype Implication [premise conclusion]
Object
(equals [this other]
(generic-equals [this other] Implication [premise conclusion]))
(hashCode [this]
(hash-combine-hash Implication premise conclusion))
(toString [this]
(str "(" premise " ⟶ " conclusion ")")))
(defmulti premise
"Returns premise of given object."
{:arglists '([thing])}
type)
(defmethod premise Implication [^Implication impl]
(.premise impl))
(defmulti conclusion
"Returns conclusion of given object."
{:arglists '([thing])}
type)
(defmethod conclusion Implication [^Implication impl]
(.conclusion impl))
(defmethod print-method Implication
[impl out]
(.write ^java.io.Writer out
^String (str impl)))
(defn implication?
"Returns true iff thing is an implication."
[thing]
(instance? Implication thing))
(defn make-implication
"Creates an implication (premise => conclusion \\ premise)."
[premise conclusion]
(let [premise (set premise)
conclusion (set conclusion)]
(Implication. premise (difference conclusion premise))))
(defmacro impl
"Convenience interface for creating implications. Write implications just as
user=> (impl 1 2 3 ==> 4 5 6)
(#{1 2 3} ==> #{4 5 6})"
[& elements]
(let [[premise conclusion] (split-with (fn [x]
(not= x '==>))
elements)]
(when (empty? conclusion)
(warn "«impl» does not contain ==>"))
`(make-implication (list ~@premise) (list ~@(rest conclusion)))))
(defn respects?
"Returns true iff set respects given implication impl."
[set impl]
(or (not (subset? (premise impl) set))
(subset? (conclusion impl) set)))
(defn holds?
"Returns true iff impl holds in given context ctx."
[impl ctx]
(subset? (conclusion impl) (adprime ctx (premise impl))))
(defn tautology?
"Returns true iff impl has empty conclusion."
[impl]
(empty? (conclusion impl)))
(defn- implication-graph
"Compute setup for Downing-Gallier"
[implications]
(let [implications (vec implications),
where-in-premise (persistent!
(reduce (fn [map i]
(reduce (fn [map m]
(assoc! map m (conj (map m) i)))
map
(premise (implications i))))
(transient {})
(range (count implications))))
numargs (loop [numargs []
impls implications]
(if (empty? impls)
numargs
(recur (conj numargs (count (premise (first impls))))
(rest impls))))]
[implications where-in-premise numargs]))
(defn- close-with-downing-gallier
"Downing-Gallier"
[[implications in-premise numargs] input-set]
(let [numargs (reduce (fn [numargs i]
(assoc! numargs i (dec (numargs i))))
(transient numargs)
(mapcat in-premise input-set))]
(loop [queue (reduce (fn [queue i]
(if (zero? (numargs i))
(conj queue i)
queue))
(clojure.lang.PersistentQueue/EMPTY)
(range (count numargs))),
numargs numargs,
result input-set]
(if (empty? queue)
result
(let [idx (first queue),
new (difference (conclusion (implications idx)) result)
[numargs queue] (reduce (fn [[numargs queue] i]
(let [numargs (assoc! numargs i (dec (numargs i)))]
[numargs (if (pos? (numargs i))
queue
(conj queue i))]))
[numargs (pop queue)]
(mapcat in-premise new))]
(recur queue numargs (into result new)))))))
(defn clop-by-implications
"Returns closure operator given by implications."
[implications]
(let [predata (implication-graph implications)]
(fn [input-set]
(close-with-downing-gallier predata input-set))))
(defn close-under-implications
"Computes smallest superset of set being closed under given implications."
[implications input-set]
((clop-by-implications implications) input-set))
(defn- add-immediate-elements
"Iterating through the sequence of implications, tries to apply as many
implications as possible. Uses subset-test to determine whether a given
implication can be used to extend a given set, i.e. an implication impl can be
used to extend a set s if and only if
(subset-test (premise impl) s)
is true. Note that if (conclusion impl) is already a subset of s, then s is
effectively not extended."
[implications initial-set subset-test]
(loop [conclusions (transient initial-set),
impls implications,
unused-impls (transient [])]
(if-let [impl (first impls)]
(if (subset-test (premise impl) initial-set)
(recur (reduce conj! conclusions (conclusion impl))
(rest impls)
unused-impls)
(recur conclusions
(rest impls)
(conj! unused-impls impl)))
[(persistent! conclusions)
(persistent! unused-impls)])))
(defn pseudo-close-under-implications
"Computes smallest superset of set being pseudo-closed under given
implications."
[implications set]
(assert (set? set))
(loop [set set,
impls implications]
(let [[new impls] (add-immediate-elements impls set proper-subset?)]
(if (= new set)
new
(recur new impls)))))
(defn pseudo-clop-by-implications
"Returns for a given set of implications the corresponding closure
operator whose closures are all closed and pseudo-closed sets."
[implications]
(partial pseudo-close-under-implications implications))
(defn follows-semantically?
"Returns true iff implication follows semantically from given
implications."
[implication implications]
(subset? (conclusion implication)
(close-under-implications implications (premise implication))))
(defalias follows? follows-semantically?)
(defn equivalent-implications?
"Returns true iff the two seqs of implications are equivalent."
[impls-1 impls-2]
(and (forall [impl impls-1] (follows-semantically? impl impls-2))
(forall [impl impls-2] (follows-semantically? impl impls-1))))
(defn minimal-implication-set?
"Checks whether given set of implications is minimal, i.e. no
implication in this set follows from the others."
[impl-set]
(let [impl-set (set impl-set)]
(forall [impl impl-set]
(not (follows-semantically? impl (disj impl-set impl))))))
(defn sound-implication-set?
"Checks whether given set of implications is sound, i.e. every
implication holds in the given context."
[ctx impl-set]
(forall [impl impl-set]
(holds? impl ctx)))
(defn complete-implication-set?
"Checks wheter given set of implications is complete in context ctx. This is a
very costly computation."
[ctx impl-set]
(and (forall [impl impl-set]
(and (subset? (premise impl) (attributes ctx))
(subset? (conclusion impl) (attributes ctx))))
(forall [A (subsets (attributes ctx))]
(subset? (adprime ctx A)
(close-under-implications impl-set A)))))
(defn irredundant-subset
"Given a set impls of implications, returns an irredundant subset of impls.
Note that this set does not need to be of minimal cardinality."
[impls]
(reduce (fn [impls impl]
(if (follows-semantically? impl impls)
impls
(if-not (seq impls)
new-impls
(let [next-impl (first impls)]
(if (follows-semantically? next-impl (disj new-impls next-impl))
#{}
impls))
(defn canonical-base-from-clop
"Given a closure operator «clop» on the set «base», computes its canonical base,
optionally using the set «background-knowledge» of implications on «base-set»
as background knowledge. The result will be a lazy sequence. If «predicate»
is given as third argument, computes only those implications whose premise
satisfy this predicate. Note that «predicate» has to satisfy the same
conditions as the one of «next-closed-set-in-family»."
([clop base]
(canonical-base-from-clop clop base #{} (constantly true)))
([clop base background-knowledge]
(canonical-base-from-clop clop base background-knowledge (constantly true)))
([clop base background-knowledge predicate]
(assert (fn? clop)
"Given closure operator must be a function")
(assert (coll? base)
"Base must be a collection")
(assert (fn? predicate)
"Predicate must be a function")
(assert (and (set? background-knowledge)
(forall [x background-knowledge]
(implication? x)))
"Background knowledge must be a set of implications")
(let [next-closure (fn [implications last]
(next-closed-set-in-family predicate
base
(clop-by-implications implications)
last)),
runner (fn runner [implications candidate]
(when candidate
(let [conclusions (clop candidate)]
(if (not= candidate conclusions)
(let [impl (make-implication candidate conclusions),
impls (conj implications impl)]
(cons impl
(lazy-seq (runner impls (next-closure impls candidate)))))
(recur implications (next-closure implications candidate))))))]
(lazy-seq (runner background-knowledge
(close-under-implications background-knowledge #{}))))))
(defn intersect-implicational-theories
"Given a set «base-set» and collections «implication-sets» of implications,
returns the canonical base of the intersection of the corresponding closure
theories."
[base-set & implication-sets]
(let [implication-clops (vec (map clop-by-implications implication-sets)),
clop (fn [A]
(r/fold (r/monoid intersection (constantly base-set))
(r/map #(% A) implication-clops)))]
(canonical-base-from-clop clop base-set)))
(defn canonical-base
"Returns the canonical base of given context, as a lazy sequence. Uses
«background-knowledge» as starting set of implications, which will not appear
in the result. If «predicate» is given (a function), computes only those
implications from the canonical base whose premise satisfy this predicate,
i.e. «predicate» returns true on these premises. Note that «predicate» has to
satisfy the same conditions as the predicate to «next-closed-set-in-family»."
([ctx]
(canonical-base ctx #{} (constantly true)))
([ctx background-knowledge]
(canonical-base ctx background-knowledge (constantly true)))
([ctx background-knowledge predicate]
(assert (context? ctx)
"First argument must be a formal context")
(canonical-base-from-clop #(context-attribute-closure ctx %)
(attributes ctx)
background-knowledge
predicate)))
(defalias stem-base canonical-base)
(defn pseudo-intents
"Returns the pseudo intents of the given context ctx."
[ctx]
(map premise (stem-base ctx)))
(defn parallel-canonical-base-from-clop
"Computes the canonical base of the given closure operator in parallel.
Accepts the same parameters as «canonical-base-from-clop», except for the
predicate."
([clop base]
(parallel-canonical-base-from-clop clop base #{}))
([clop base background-knowledge]
(let [implications (atom (set background-knowledge))
current (atom #{#{}})]
(loop [n 0]
(if (< (count base) n)
(difference @implications (set background-knowledge))
(do
(dopar [C (filter #(= n (count %)) @current)]
(swap! current #(disj % C))
(let [impl-C (close-under-implications @implications C)]
(if (= C impl-C)
(let [clop-C (clop C)]
(when (not= C clop-C)
(swap! implications
#(conj % (make-implication C clop-C))))
(doseq [m base :when (not (contains? clop-C m))]
(swap! current #(conj % (conj clop-C m)))))
(swap! current #(conj % impl-C)))))
(recur (inc n))))))))
(defn parallel-canonical-base
"Computes the canonical base of the given formal context.
Background knowledge can be provided as a set of implications on the attribute
set of the given context. Computation is eager and is done in parallel."
([ctx]
(parallel-canonical-base ctx #{}))
([ctx background-knowledge]
(parallel-canonical-base-from-clop (partial adprime ctx)
(attributes ctx)
background-knowledge)))
Proper Premises
(defn proper-conclusion
"Returns all elements which are implied in context ctx by A but are neither
contained in A or follow from a strict subsets of A."
[ctx A]
(difference (context-attribute-closure ctx A)
(reduce into
A
(map #(context-attribute-closure ctx (disj A %))
A))))
(defn proper-premise?
"Returns true iff set A is a subset of the attributes of context ctx
and is a proper premise in ctx."
[ctx A]
(and (subset? A (attributes ctx))
(not (empty? (proper-conclusion ctx A)))))
(defn- proper-premises-by-hypertrans
"Returns all proper premises for the attribute «m» in the formal context
«ctx». The set «objs» should contain all objects from ctx which are in
down-arrow relation to m."
[ctx m objs]
(minimal-hypergraph-transversals
(disj (attributes ctx) m)
(set-of (difference (attributes ctx) (oprime ctx #{g})) | g objs)))
(defn proper-premises-for-attribute
"Returns all proper premises for the attribute «m» in the formal context «ctx»."
[ctx m]
(proper-premises-by-hypertrans ctx m (set-of g | [g n] (down-arrows ctx) :when (= n m))))
(defn proper-premises
"Returns the proper premises of the given context ctx as a lazy sequence."
[ctx]
(let [down-arrow-map (loop [arrows (down-arrows ctx),
arrow-map (map-by-fn (constantly #{}) (attributes ctx))]
(if-let [[g m] (first arrows)]
(recur (rest arrows)
(update-in arrow-map [m] conj g))
arrow-map))]
(distinct
(reduce concat
(pmap #(apply proper-premises-by-hypertrans ctx %)
down-arrow-map)))))
(defn proper-premise-implications
"Returns all implications based on the proper premises of the
context ctx."
[ctx]
(set-of (make-implication A (context-attribute-closure ctx A))
[A (proper-premises ctx)]))
Ryssel 's Algorithm
(defn- cover [base-set candidates A]
(let [object-covers (minimum-set-covers
(difference base-set A)
(set-of (difference base-set N) | N candidates))]
(map (fn [cover]
(map #(difference base-set %) cover))
object-covers)))
(defn ryssel-base
"Returns the implications computed by Ryssels Algorithm, as a lazy sequence."
[ctx]
(let [extent (aprime ctx #{x})]
(assoc! map extent
(conj (get map extent #{}) x))))
{}
(attributes ctx)),
| m (attributes (reduce-attributes ctx))),
empty-prime (adprime ctx #{})]
(->> (reduce into
(for [m (attributes ctx)
:when (not= (adprime ctx #{m})
(conj empty-prime m))]
#{m})
(pmap (fn [A]
(let [candidates (set-of U | U (disj irr-extents A),
:let [U-cap-A (intersection U A)]
:when (not (exists [V all-extents]
(and (proper-subset? V A)
(subset? U-cap-A V))))),
covers (cover (objects ctx) candidates A)]
(for [X covers]
(set-of m | Y X, m (gens Y)))))
all-extents))
distinct
(map #(make-implication % (adprime ctx %))))))
(defn stem-base-from-base
"For a given set of implications returns its stem-base, see:
Rudolph 2007
-3-540-70901-5_10"
[implications]
(let [implications (pmap (fn [impl]
(make-implication
(premise impl)
(close-under-implications implications
(union (premise impl)
(conclusion impl)))))
implications)]
(loop [stem-base #{},
implications implications,
all (set implications)]
(if (empty? implications)
stem-base
(let [A->B (first implications),
implications (rest implications),
all (disj all A->B)
A* (close-under-implications all (premise A->B)),
A*->B (make-implication A* (conclusion A->B))]
(if (not-empty (conclusion A*->B))
(recur (conj stem-base A*->B)
implications
(conj all A*->B))
(recur stem-base
implications
all)))))))
(defalias canonical-base-from-base stem-base-from-base)
(defn ganter-base
"Given an implication base transforms it into the Ganter Base, a second
argument may be given as a function by which the representative element
will be chosen.
The default takes whichever element comes first.
Defined in:
“Properties of Finite Lattices” by S. Reeg and W. Weiß, Revisited
In Memoriam Peter Burmeister (1941–2019), Bernhard Ganter 2019
-3-030-21462-3_8"
([base]
(ganter-base base first))
([base choose]
atts (reduce union (map #(union (premise %) (conclusion %)) base))
equiv (closure-equivalence atts
#(close-under-implications base #{%}))
reps (reduce merge (for [[k v] equiv] (hash-map (choose v) v)))
zero (close-under-implications base #{})
impl2 (for [i base]
(let [prem (difference (premise i) zero)
concl (difference (conclusion i) zero)]
(make-implication
(set (for [[k v] reps :when (some (set v) prem)] k))
(set (for [[k v] reps :when (some (set v) concl)] k)))))
closures (apply merge
(for [[k1 v1] equiv [k2 v2] reps :when (= v1 v2)]
(hash-map k2 (difference k1 #{k2}))))
impl4 (for [i impl2]
(let [prem (premise i)
concl (conclusion i)]
(make-implication
(if (< 1 (count prem))
(difference
prem
(reduce union (map #(get closures %) prem)))
prem)
(if (< 1 (count concl))
(difference
concl
(reduce union (map #(get closures %) concl)))
concl))))
cycles (flatten
(for [[k v] reps :when (< 1 (count v))]
(map
#(make-implication #{%1} #{%2})
v
(conj (drop-last 1 v) (last v)))))
cycles+ (if (< 0 (count zero))
(conj cycles (make-implication #{} zero))
cycles)]
(set (for [[k v] (group-by premise (concat impl4 cycles+))]
(make-implication k (reduce union (map conclusion v))))))))
(defn support
"Computes the support of the set of attributes B in context ctx. If an
implications is given, returns the support of this implication in the given
context."
[thing ctx]
(cond
(set? thing)
(if (empty? (objects ctx))
1
(/ (count (attribute-derivation ctx thing))
(count (objects ctx)))),
(implication? thing)
(recur (premise thing) ctx),
:else
(illegal-argument "Cannot determine support of " (print-str thing))))
(defn confidence
"Computes the confidence of the given implication in the given context."
[implication context]
(let [premise-count (count (attribute-derivation context (premise implication)))]
(if (zero? premise-count)
1
(/ (count (attribute-derivation context
(union (premise implication) (conclusion implication))))
premise-count))))
(defn- frequent-itemsets
"Returns all frequent itemsets of context, given minsupp as minimal support."
UNTESTED !
[context minsupp]
(let [mincount (* minsupp (count (objects context)))]
(all-closed-sets-in-family (fn [intent]
(>= (count (attribute-derivation context intent))
mincount))
(attributes context)
identity)))
(defn- association-rules
"Returns all association rules of context with the parameters minsupp as
minimal support and minconf as minimal confidence. The result returned is a
lazy sequence."
UNTESTED !
[context minsupp minconf]
(let [fitemsets (frequent-itemsets context minsupp)]
(for [A fitemsets,
B fitemsets,
:let [impl (make-implication A B)]
:when (>= (confidence impl context) minconf)]
impl)))
(defn frequent-closed-itemsets
"Computes for context a lazy sequence of all frequent and closed itemsets,
given minsupp as minimal support."
[context minsupp]
(let [mincount (* minsupp (count (objects context)))]
(intents context
(fn [intent]
(>= (count (attribute-derivation context intent))
mincount)))))
(defn luxenburger-basis
"Computes the luxenburger-base of a given context «context», returning the
result as a lazy sequence. Uses «minconf» as minimal confidence. If
«minsupp-or-predicate» is a number, uses that as a minimal support threshold.
In this case, «minsupp» ∈ [0,1] must hold. If «minsupp-or-predicate» is a
function, uses this as a predicate to filter all candidate itemsets. In this
case, the predicate should be valid predicate value for «intents»."
[context minsupp-or-predicate minconf]
(let [pred (cond (and (number? minsupp-or-predicate)
(<= 0 minsupp-or-predicate 1))
(let [mincount (* minsupp-or-predicate (count (objects context)))]
#(>= (count (aprime context %)) mincount))
(fn? minsupp-or-predicate)
minsupp-or-predicate
true
(illegal-argument "Value for parameter «minsupp-or-predicate» is invalid:"
(str minsupp-or-predicate))),
fqis (vec (doall (intents context pred)))]
(r/fold concat
(fn [impls B_2]
(let [proper-subsets (filter #(proper-subset? % B_2)
lowers (filter (fn [B_1]
(not (exists [B_3 proper-subsets]
(proper-subset? B_1 B_3))))
proper-subsets)]
(concat impls
(filter (fn [impl]
(<= minconf (confidence impl context)))
(map (fn [B_1] (make-implication B_1 B_2)) lowers))))))
fqis)))
(defalias luxenburger-base luxenburger-basis)
Learn Implicational Theories by Query Learning
(defn- horn1-reduce-implication
[implication counterexample]
"Reduce implication by counterexample as needed by the HORN1 algorithm."
(make-implication (premise implication)
(intersection (conclusion implication)
counterexample)))
(defn- horn1-refine-implication
[implication counterexample]
"Refine implication by counterexample as needed by the HORN1 algorithm."
(make-implication counterexample
(union (conclusion implication)
(difference (premise implication)
counterexample))))
(defn learn-implications-by-queries
"Learn an implicational theory on base-set with access to membership oracle
`member?' and equivalence oracle `equivalent?'.
The membership oracle has to decide for a given set S whether S is a model of
the background theory to be learned. The equivalence oracle has to decide
whether a given set of implications is equivalent to the background theory.
For this it needs to return true if the theories are equivalent, and a
counterexample otherwise, i.e., a subset of base-set that is a model of the
current hypothesis and not a model of the background theory, or vice versa.
This function implements the HORN1 algorithm of Angluin, Frazier, and Pitt:
“Learning Conjunctions of Horn Clauses”, 1992."
[base-set member? equivalent?]
(loop [hypothesis []]
(let [equivalence-result (equivalent? hypothesis)]
hypothesis
(if (some #(not (respects? counterexample %)) hypothesis)
(recur (mapv (fn [implication]
(if (respects? counterexample implication)
implication
(horn1-reduce-implication implication counterexample)))
hypothesis))
(let [minimal-index (first-position-if
(fn [implication]
(let [reduced-premise (intersection counterexample
(premise implication))]
(and (proper-subset? reduced-premise
(premise implication))
(not (member? reduced-premise)))))
hypothesis)]
(if minimal-index
(let [implication (get hypothesis minimal-index)]
(recur (assoc hypothesis
minimal-index
(horn1-refine-implication implication
(intersection counterexample
(premise implication))))))
(recur (conj hypothesis
(make-implication counterexample base-set)))))))))))
(defn equivalence-oracle-by-implications
"Return a function that can serve as an equivalence oracle for query learning.
The returned oracle will return true if a given set S of implications is
equivalent to background-implications. Otherwise, it will return a
counterexample, i.e., model of S that is not a model ov
background-implications or vice versa."
[background-implications]
(fn [hypothesis]
(let [model-non-model (fn [impl-set-1 impl-set-2]
(keep (fn [implication]
(when-not (follows-semantically? implication impl-set-1)
(close-under-implications impl-set-1
(premise implication))))
impl-set-2))]
true))))
(defn membership-oracle-by-implications
"Return a function that can serve as a membership oracle for query learning.
The returned oracle will return true if a given set S of elements is a model
of implications, and false otherwise."
[implications]
#(every? (fn [implication] (respects? % implication)) implications))
(defn approx-canonical-base
"Compute a set L of implications that is an approximation to the canonical
base of the formal context `ctx'. More precisely, if H is the canonical base
of ctx, then
|Mod(L) Δ Mod(H)|/2^{|M|} ≤ ε
with probability at least 1-δ. The computation is done in polynomial time
with respect to |M|, |L|, 1/ε, and 1/δ. "
[ctx ε δ]
(assert (context? ctx))
(assert (and (number? ε)
(< 0 ε 1)))
(assert (and (number? δ)
(< 0 δ 1)))
(let [random-subset #(set (random-sample 0.5 (attributes ctx)))
intent? #(= % (adprime ctx %))
respects-all? (fn [set impls]
(every? (fn [impl] (respects? set impl)) impls))
iter-counter (atom 0)]
(learn-implications-by-queries (attributes ctx)
intent?
(fn [implications]
(let [nr-iter (ceil (* (/ ε) (+ (swap! iter-counter inc)
(/ (Math/log (/ δ))
(Math/log 2)))))]
(or (some (fn [test-set]
(when-not (<=> (intent? test-set)
(respects-all? test-set
implications))
test-set))
(repeatedly nr-iter random-subset))
true))))))
(defn unitary?
"Returns true iff implication is unitary (premise of length one)."
[impl]
(= 1 (count (premise impl))))
(defn unitary-subset
"Returns the subset of unitary implications (premise of length one)."
[impls]
(set (filter unitary? impls)))
(defn non-unitary-subset
"Returns the subset of non-unitary implications (premise of length other
than one)."
[impls]
(set (filter #(not (unitary? %)) impls)))
(defn ideal-closed?
"Given a base tests if it is ideal-closed.
A base is ideal-closed iff for any A → B the closure of A under all
non-unitary implications is closed under all unitary implications."
[impls]
(let [clop-u (clop-by-implications (unitary-subset impls))
clop-nu (clop-by-implications (non-unitary-subset impls))]
(every? identity
(for [impl impls]
(let [nu-closure (clop-nu (premise impl))]
(= nu-closure (clop-u nu-closure)))))))
(defn largest-extension-by-implications
"Given a closure system and implications returns the
largest extension of the clop by use of the implications. Algorithm from:
'Representations for the largest Extension of a closure system'
Karima Ennaoui, Khaled Maafa, Lhouari Nourine 2020
"
[closure impls]
(let [unitary (unitary-subset (set impls))
extension (atom (set closure))
rem-impls (atom (set impls))]
(doall (for [impl unitary]
(let [clop (clop-by-implications @rem-impls)]
(swap! extension
union
(extension-set @extension clop (first (premise impl))))
(swap! rem-impls difference #{impl}))))
@extension))
true
|
cb7231f6e123ea5fb94d2ecf03cce5db5b1a8f98fbbae5eb26cbc8e148a68178 | NorfairKing/the-notes | Order.hs | module Functions.Order where
import Notes
import Data.List (intercalate, isSubsequenceOf,
subsequences)
import qualified Data.Text as T
import qualified Prelude as P
import Logic.FirstOrderLogic.Macro
import Logic.PropositionalLogic.Macro
import NumberTheory.Macro
import Relations.Basics.Terms
import Relations.Orders.Hasse
import Relations.Orders.Macro
import Relations.Orders.Terms
import Relations.Preorders.Terms
import Sets.Basics.Terms
import Sets.Powerset.Terms
import Functions.Application.Macro
import Functions.Basics.Macro
import Functions.Basics.Terms
import Functions.Composition.Macro hiding (comp)
import Functions.Composition.Terms
import Functions.Jections.Terms
import Functions.Order.Diagrams
import Functions.Order.Macro
import Functions.Order.Terms
order :: Note
order = section "Functions and orders" $ do
conjunctiveOrderDefinition
subsection "Monotonic functions" $ do
monotonicDefinition
monotonicFunctionsClosedUnderComposition
scottContinuousDefinition
scottContinuousImpliesMonotonicTheorem
subsection "Fixed points" $ do
fixedPointDefinition
leastFixedPointDefinition
greatestFixedPointDefinition
fixedPointExamples
regions
tarskiFixedPointTheorem
kleeneChainDefinition
kleenesFixedPointTheorem
latticesOverFunctions
completelyMeetPreservingDefinition
completelyJoinPreservingDefinition
preservingExamples
galoisConnectionS
approximationS
regions :: Note
regions = subsection "Regions" $ do
fixedPointRegionDefinition
ascendingRegionDefinition
descendingRegionDefinition
ascendingRegionIsClosedUnderApplication
descendingRegionIsClosedUnderApplication
topInDescendingRegion
botInAscendingRegion
fixedPointRegionIsIntersectionOfAscAndDesc
galoisConnectionS :: Note
galoisConnectionS = subsection "Galois connections" $ do
galoisConnectionDefinition
galoisConnectionEquivalentDefinition
galoisConnectionExamples
galoisConnectionsCompose
galoisConnectionsPreserves
preservesNoGaloisConnection
galoisConnectionDetermination
galoisConnectionExistenceAlpha
galoisConnectionExistenceGamma
galoisInsertionDefinition
galoisInsertionOtherJections
approximationS :: Note
approximationS = subsection "Approximations" $ do
approximationDefinition
approximationEquivalentDefinition
approximationExamples
monotoneEquivalences
approximationExists
mostPreciseApproximationDefinition
leastFixedPointApproximationTheorem
leastFixedPointApproximationTheoremWithoutGalois
conjunctiveOrderDefinition :: Note
conjunctiveOrderDefinition = de $ do
lab conjunctiveOrderDefinitionLabel
let a = "A"
b = "B"
po = partord_
s ["Let", m po, "be a", partialOrder, "on a", set, m b, and, "let", m a, "be a", set, "as well"]
let f_ = "f"
f = fn f_
co = cordsign partord_
s [the, conjunctiveOrder', m co, "on the", set, "of", functions, m $ setcmpr f_ (fun f_ a b), "is defined as follows"]
let g_ = "g"
g = fn g_
(<.) = inposet po
(<<) = cord partord_
x = "x"
ma $ (f_ << g_) === (fa (x ∈ a) (f x <. g x))
monotonicDefinition :: Note
monotonicDefinition = de $ do
lab monotonicDefinitionLabel
lab monotoneDefinitionLabel
lab isotoneDefinitionLabel
lab orderPreservingDefinitionLabel
s ["Let ", m $ relposet x rx, and, m $ relposet y ry, " each be a ", poset_, and, m $ fun f x y, " a function"]
s [m $ fun f x y, " is said to be ", monotonic' <> "," , monotone' <> ",", isotone', or, orderPreserving', " if it has the following property"]
ma $ fa (cs [x1, x2] ∈ x) $ inposet rx x1 x2 ⇒ inposet ry (f_ x1) (f_ x2)
where
x1 = x !: 1
x2 = x !: 2
f = fun_
f_ = fn f
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
monotonicFunctionsClosedUnderComposition :: Note
monotonicFunctionsClosedUnderComposition = thm $ do
lab monotonicFunctionsClosedUnderCompositionTheoremLabel
s [the, composition, "of two", monotonic, functions, "is", monotonic]
s ["Let ", m f1, and, m f2, "be", monotonic, functions]
s [m $ f2 ● f1, "is a", monotonic, function]
proof $ do
let a = "A"
b = "B"
c = "C"
ra = partord_ !: a
rb = partord_ !: b
s ["Let ", m $ fun f1 a b, and, m $ fun f2 b c, "be", monotonic, functions, "on the", posets, m $ relposet a ra, and, m $ relposet b rb]
let x = "x"
y = "y"
oa = binop $ raw "\\ " <> partord_ !: "a" <> raw "\\ "
s ["Let ", m x, and, m y, "be elements of", m a, and, m b, "respectively, such that the following holds"]
ma $ x `oa` y
let ob = binop $ raw "\\ " <> partord_ !: "b" <> raw "\\ "
s ["Because ", m f1, "is", monotonic, "the following must hold as well"]
ma $ fn f1 x `ob` fn f1 y
s ["Because ", m f2, "is", monotonic, "the following must hold as well"]
ma $ fn f2 (fn f1 x) `ob` fn f2 (fn f1 y)
ma $ fn (pars $ f2 ● f1) x `ob` fn (pars $ f2 ● f1) y
s ["This means that", m $ f2 ● f1, "is monotonic"]
where
f1 = fun_ !: 1
f2 = fun_ !: 2
scottContinuousDefinition :: Note
scottContinuousDefinition = de $ do
lab scottContinuousDefinitionLabel
s ["Let ", m $ lat x rx, and, m $ lat y ry, " each be a ", lattice_, and, m $ fun f x y, " a function"]
s [m $ fun fun_ x y, " is called ", scottContinuous', " if it has the following property"]
ma $ fa (ss ⊆ x) $ f_ (sup ss) =: sup (f □ ss)
where
ss = "S"
f = fun_
f_ = fn f
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
scottContinuousImpliesMonotonicTheorem :: Note
scottContinuousImpliesMonotonicTheorem = thm $ do
let f = fun_
f_ = fn f
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["Let ", m $ lat x rx, and, m $ lat y ry, " each be a ", lattice_, and, m $ fun f x y, "a", function]
s ["If", m f, "is", scottContinuous <> ",", "then", m f, "is", monotonic]
proof $ do
s ["Let", m f, "be a", scottContinuous, function]
let a = "a"
b = "b"
let (<<) = inposet rx
(<.) = inposet ry
s ["Let", m a, and, m b, "be elements of", m x, "such that", m $ a << b, "holds"]
s ["According to the definition of a", scottContinuous, function, "we observe the following"]
ma $ f_ (sup $ setofs [a, b]) =: sup (setofs [f_ a, f_ b])
s [the, supremum, "of", m $ setofs [a, b], "is", m b]
ma $ f_ b =: sup (setofs [f_ a, f_ b])
s ["By the definition of a", supremum <> ", this means that", m $ f_ a <. f_ b, "must hold"]
fixedPointDefinition :: Note
fixedPointDefinition = de $ do
lab fixedPointDefinitionLabel
s ["Let ", m x, and, m y, " be ", set, "s ", m $ fun f x y, " be a function"]
s ["An element ", m a, " of ", m x, " is called a ", fixedPoint', " of ", m f, " if ", m f, " leaves a unchanged"]
ma $ fn f a =: a
where
f = fun_
a = "a"
x = "X"
y = "Y"
leastFixedPointDefinition :: Note
leastFixedPointDefinition = de $ do
lab leastFixedPointDefinitionLabel
s ["Let ", m relposet_, " be a ", poset_, and, m $ fun f x x, " a ", function]
s [the, leastFixedPoint', m $ lfp f, "of", m f, "is a", fixedPoint, "such that the following holds"]
let a = "a"
ma $ fa (a ∈ x) $ (a =: (fn f a)) ⇒ lfp f ⊆: a
where
f = fun_
x = posetset_
greatestFixedPointDefinition :: Note
greatestFixedPointDefinition = de $ do
lab greatestFixedPointDefinitionLabel
s ["Let ", m relposet_, " be a ", poset_, and, m $ fun f x x, " a ", function]
s [the, greatestFixedPoint', m $ lfp f, "of", m f, "is a", fixedPoint, "such that the following holds"]
let a = "a"
ma $ fa (a ∈ x) $ (a =: (fn f a)) ⇒ a ⊆: gfp f
where
f = fun_
x = posetset_
fixedPointExamples :: Note
fixedPointExamples = do
ex $ do
s ["The following", function, is, monotone, "but has no", fixedPoints]
mempty
let c1 = "blue"
let (a, b, c) = ("a", "b", "c")
hd1 = hasseDiagram [a, b, c] [(a, b), (a, c)]
fun1 = [(a, b), (b, c), (c, b)]
orderFunctionFig 4 normalConfig $ OrderFunctionFig
[("A", hd1)]
[(c1, fun1)]
ex $ do
s ["The following", function, is, "not", monotone, "has two", fixedPoints, "but no", leastFixedPoint]
mempty
let c1 = "blue"
let (a, b, c, d) = ("a", "b", "c", "d")
hd1 = hasseDiagram [a, b, c, d] [(a, b), (a, c), (b, d), (c, d)]
fun1 = [(a, c), (b, b), (c, c), (d, b)]
orderFunctionFig 4 normalConfig $ OrderFunctionFig
[("A", hd1)]
[(c1, fun1)]
ex $ do
s ["The following", function, is, monotone, "has one", fixedPoint, "which is subsequently the", leastFixedPoint]
mempty
let c1 = "blue"
let (a, b, c, d) = ("a", "b", "c", "d")
hd1 = hasseDiagram [a, b, c, d] [(a, b), (a, c), (b, d), (c, d)]
fun1 = [(a, c), (b, c), (c, c), (d, c)]
orderFunctionFig 4 normalConfig $ OrderFunctionFig
[("A", hd1)]
[(c1, fun1)]
ex $ do
s ["The following", function, is, monotone, "has two", fixedPoints, "but no", leastFixedPoint]
mempty
let c1 = "blue"
let (a, b) = ("a", "b")
hd1 = hasseDiagram [a, b] [(a, a), (b, b)]
fun1 = [(a, a), (b, b)]
orderFunctionFig 2 normalConfig $ OrderFunctionFig
[("A", hd1)]
[(c1, fun1)]
ex $ do
s ["The following", function, is, monotone, "has four", fixedPoints, "but no", leastFixedPoint]
mempty
let c1 = "blue"
let (a, b, c, d) = ("a", "b", "c", "d")
hd1 = hasseDiagram [a, b, c, d] [(a, b), (a, c), (b, d), (c, d)]
fun1 = [(a, a), (b, b), (c, c), (d, d)]
orderFunctionFig 4 normalConfig $ OrderFunctionFig
[("A", hd1)]
[(c1, fun1)]
fixedPointRegionDefinition :: Note
fixedPointRegionDefinition = de $ do
lab fixedPointRegionDefinitionLabel
s ["Let ", m relposet_, " be a ", poset_, and, m $ fun f x x, " a ", function]
s ["The ", fixedPointRegion', " ", m $ fix f, " is the ", set, " of ", fixedPoints, " of ", m latset_]
ma $ fix f === setcmpr (a ∈ latset_) (a =: f_ a)
where
f = fun_
f_ = fn f
a = "x"
x = posetset_
ascendingRegionDefinition :: Note
ascendingRegionDefinition = de $ do
lab ascendingRegionDefinitionLabel
lab preFixedpointDefinitionLabel
s ["Let ", m relposet_, " be a ", poset_, and, m $ fun f x x, " a ", function]
s [the , ascendingRegion', " ", m $ asc f, " is the following ", set]
ma $ asc f === setcmpr (a ∈ latset_) (a ⊆: f_ a)
s [elements, "of the", ascendingRegion, "are sometimes called", preFixedpoints']
where
f = fun_
f_ = fn f
a = "x"
x = posetset_
descendingRegionDefinition :: Note
descendingRegionDefinition = de $ do
lab descendingRegionDefinitionLabel
lab postFixedpointDefinitionLabel
s ["Let ", m relposet_, " be a ", poset_, and, m $ fun f x x, " a ", function]
s [the, descendingRegion', " ", m $ desc f, " is the following ", set]
ma $ desc f === setcmpr (a ∈ latset_) (f_ a ⊆: a)
s [elements, "of the", descendingRegion, "are sometimes called", postFixedpoints']
where
f = fun_
f_ = fn f
a = "x"
x = posetset_
ascendingRegionIsClosedUnderApplication :: Note
ascendingRegionIsClosedUnderApplication = thm $ do
lab ascendingRegionIsClosedUnderApplicationTheoremLabel
s ["Let ", m relposet_, " be a ", poset_, and, m $ fun f x x, " a ", monotonic, " ", function]
ma $ fa (a ∈ x) $ x ∈ asc f ⇒ f_ x ∈ asc f
proof $ do
s ["Let ", m a, " be an element of ", m $ asc f]
s ["Because ", m $ a ⊆: f_ a, " holds, and because ", m f, " is monotonic, ", m $ f_ a ⊆: f_ (f_ a), " must also hold"]
s ["This means that ", m $ f_ a, " is in the ascending region"]
where
f = fun_
f_ = fn f
a = "x"
x = posetset_
descendingRegionIsClosedUnderApplication :: Note
descendingRegionIsClosedUnderApplication = thm $ do
lab descendingRegionIsClosedUnderApplicationTheoremLabel
s ["Let ", m relposet_, " be a ", poset_, and, m $ fun f x x, " a ", monotonic, " ", function]
ma $ fa (a ∈ x) $ x ∈ desc f ⇒ f_ x ∈ desc f
proof $ do
s ["Let ", m a, " be an element of ", m $ desc f]
s ["Because ", m $ f_ a ⊆: a, " holds, and because ", m f, " is monotonic, ", m $ f_ (f_ a) ⊆: f_ a, " must also hold"]
s ["This means that ", m $ f_ a, " is in the descending region"]
where
f = fun_
f_ = fn f
a = "x"
x = posetset_
topInDescendingRegion :: Note
topInDescendingRegion = thm $ do
lab topElementIsInDescendingRegionTheoremLabel
s ["Let ", m lat_, " be a ", boundedLattice_, " and let ", m $ fun f x x, " a ", monotonic, " ", function]
ma $ bot ∈ asc f
proof $ do
s [m $ f_ bot, " is an element of ", m x, " and must therefore have the property ", m $ bot ⊆: f_ bot]
s ["This means that ", m bot, " is an element of the ascending region"]
where
f_ = fn f
f = fun_
x = latset_
botInAscendingRegion :: Note
botInAscendingRegion = thm $ do
lab bottomElementIsInAscendingRegionTheoremLabel
s ["Let ", m lat_, " be a ", boundedLattice_, " and let ", m $ fun f x x, " a ", monotonic, " ", function]
ma $ top ∈ desc f
proof $ do
s [m $ f_ top, " is an element of ", m x, " and must therefore have the property ", m $ f_ top ⊆: top]
s ["This means that ", m top, " is an element of the descending region"]
where
f_ = fn f
f = fun_
x = latset_
fixedPointRegionIsIntersectionOfAscAndDesc :: Note
fixedPointRegionIsIntersectionOfAscAndDesc = thm $ do
lab fixedPointRegionIsIntersectionOfAscendingRegionAndDescendingRegionTheoremLabel
s ["Let ", m relposet_, " be a ", poset_, and, m $ fun f x x, " a ", monotonic, " ", function]
ma $ fix f =: asc f ∩ desc f
proof $ do
noindent
itemize $ do
item $ do
bsub
newline
s ["Let ", m a, " be an element of ", m $ fix f]
s ["By definition of ", m $ fix f, ", ", m $ f_ a, " is equal to ", m a]
s ["Because ", m partord_, is, reflexive_, ref partialOrderDefinitionLabel, ref preorderDefinitionLabel, ", ", m $ a ⊆: a, " must hold"]
s ["This means that ", m a, " is both an element of ", m $ asc f, " and of ", m $ desc f, " and therefore in their intersection"]
item $ do
bsup
newline
s ["Let ", m a, " be an element of both ", m $ asc f, and, m $ desc f]
s ["This means that both ", m $ a ⊆: f_ a, and, m $ f_ a ⊆: a, " hold"]
s ["Because ", m partord_, is, antisymmetric_, ", that means that ", m a, " equals ", m $ f_ a, " which entails that ", m a, " is a fixed point of ", m f]
where
f = fun_
f_ = fn f
a = "a"
x = posetset_
tarskiFixedPointTheorem :: Note
tarskiFixedPointTheorem = thm $ do
defineTerm "Tarski's fixed point theorem"
newline
s ["Let", m lat_, "be a", completeLattice_, "and let", m $ fun f x x, "be a", monotone, function]
s [the, fixedPointRegion, m $ fix f, "of", m f, "is a", completeLattice]
s ["Consequently, ", m f, "has a", greatestFixedPoint_, "and a", leastFixedPoint_]
toprove
where
f = fun_
x = latset_
kleeneChainDefinition :: Note
kleeneChainDefinition = de $ do
lab kleeneChainDefinitionLabel
s ["Let ", m lat_, " be a ", lattice_, and, m $ fun f x x, " a ", scottContinuous, " function"]
s [the , kleeneChain', " starting at a point ", m $ a ∈ x, " is the set ", m $ kleeneCh a]
ma $ kleeneCh a === setcmpr (i ∈ naturals) (f ^: i `fn` x)
s [the, kleeneChain, "is sometimes also called the", set, "of", functionIterates]
where
i = "i"
f = fun_
a = "x"
x = latset_
kleenesFixedPointTheorem :: Note
kleenesFixedPointTheorem = do
thm $ do
defineTerm "Kleene's fixed point theorem"
newline
s ["Let ", m lat_, " be a ", completeLattice_, and, m $ fun f x x, " a ", scottContinuous, " function"]
ma $ lfp f =: sup (kleeneCh bot)
toprove
nte $ do
s ["This gives us an algorithm to compute the least fixed point."]
s ["Repeatedly applying ", m f, " to bot until we find a fixed point is enough to find ", m $ lfp f]
where
f = fun_
x = latset_
latticesOverFunctions :: Note
latticesOverFunctions = thm $ do
lab latticesOverFunctionsTheoremLabel
s ["Let ", m $ lat y partord_, " be a ", lattice, and, m x, " a set"]
s [m $ lat (funt x y) po, " is a ", lattice, " where ", m po, " is defined as follows"]
ma $ f << g ⇔ fa (a ∈ dom f) (f -: a ⊆: g -: a)
s ["This also implies the following"]
ma $ (pars $ f ⊔ g) -: a =: (f -: a ⊔ g -: a)
ma $ (pars $ f ⊓ g) -: a =: (f -: a ⊓ g -: a)
toprove
where
f = "f"
g = "g"
a = "a"
x = latset_
y = "Y"
po = partord_ !: (x <> rightarrow <> y)
(<<) = inposet po
completelyMeetPreservingDefinition :: Note
completelyMeetPreservingDefinition = de $ do
let f_ = fun_
f = fn f_
x = "X"
a = "A"
rx = partord_ !: x
infx n = (infsign !: x) <> n
y = "Y"
ry = partord_ !: y
infy n = (infsign !: y) <> n
s ["Let", m $ relposet x rx, and, m $ relposet y ry, "be", posets]
s ["A", function, m $ fun f_ x y, "is called", completelyMeetPreserving', "if the following holds"]
ma $ fa (a ⊆ x) $ f (infx a) =: infy (f_ □ a)
completelyJoinPreservingDefinition :: Note
completelyJoinPreservingDefinition = de $ do
let f_ = fun_
f = fn f_
x = "X"
a = "A"
rx = partord_ !: x
supx n = (supsign !: x) <> n
y = "Y"
ry = partord_ !: y
supy n = (supsign !: y) <> n
s ["Let", m $ relposet x rx, and, m $ relposet y ry, "be", posets]
s ["A", function, m $ fun f_ x y, "is called", completelyJoinPreserving', "if the following holds"]
ma $ fa (a ⊆ x) $ f (supx a) =: supy (f_ □ a)
preservingExamples :: Note
preservingExamples = do
ex $ do
let c1 = "darkgreen"
let (a, b, c, x, y, z) = ("a", "b", "c", "x", "y", "z")
hd1 = hasseDiagram [a, b, c] [(a, c), (b, c)]
hd2 = hasseDiagram [x, y, z] [(x, y), (y, z)]
fun1 = [(a, x), (b, y), (c, z)]
orderFunctionFig 7 normalConfig $ OrderFunctionFig
[("A", hd1),("B", hd2)]
[(c1, fun1)]
s ["In this case, the", function, "is", monotone, "but not", completelyJoinPreserving]
s ["The image of the join of", m "a", and, m "b", is, m "z" <> ", but the join of the images of", m "a", and, "b", "is", m "y"]
ex $ do
let c1 = "darkgreen"
let (a, b, c, x, y) = ("a", "b", "c", "x", "y")
hd1 = hasseDiagram [a, b, c] [(a, c), (b, c)]
hd2 = hasseDiagram [x, y] [(x, y)]
fun1 = [(a, x), (b, x), (c, x)]
orderFunctionFig 5 dotsConfig $ OrderFunctionFig
[("A", hd1),("B", hd2)]
[(c1, fun1)]
s ["In this case, the", function, "is both", monotone, "and", completelyJoinPreserving]
ex $ do
let c = "darkgreen"
let full = [1, 2, 3]
tshow :: [P.Int] -> Text
tshow ls = T.pack $ "{" P.++ intercalate ", " (P.map show ls) P.++ "}"
nodes = [ tshow ls | ls <- subsequences full ]
edges = [ (tshow l1, tshow l2) | l1 <- subsequences full, l2 <- subsequences full, l1 `isSubsequenceOf` l2]
hd = hasseDiagram nodes edges
fun = P.map (\(l1, l2) -> (tshow l1, tshow l2)) [([],[]), ([1],[1]), ([2], [1,2]), ([3],[3]), ([1, 2], [1, 2]), ([2,3], [1,2,3]), ([1,3], [1,2,3]), ([1,2,3],[1,2,3])]
orderFunctionFig 7 normalConfig $ OrderFunctionFig
[(tshow full, hd)]
[(c, fun)]
s ["In this case, the", function, "is both", monotone, "and", completelyJoinPreserving, "but not", completelyMeetPreserving]
galoisConnectionDefinition :: Note
galoisConnectionDefinition = de $ do
lab galoisConnectionDefinitionLabel
lab reductiveDefinitionLabel
lab extensiveDefinitionLabel
s ["Let", m $ lat x rx, and, m $ lat y ry, "be", completeLattices]
s ["Let", m $ fun a x y, and, m $ fun g y x, "be", monotone, functions]
s [m a, and, m g, "form a", galoisConnection', "if the following hold"]
itemize $ do
item $ s [m $ a ● g, "is", reductive' <> ":", m $ fa (y_ ∈ y) $ inposet ry (fn a (fn g y_)) y_]
item $ s [m $ g ● a, "is", extensive' <> ":", m $ fa (x_ ∈ x) $ inposet rx x_ (fn g (fn a x_))]
s ["This is denoted as follows"]
ma $ gcon a g (lat x rx) (lat y ry)
where
a = alpha
g = gamma
x = "X"
x_ = "x"
rx = partord_ !: x
y = "Y"
y_ = "y"
ry = partord_ !: y
galoisConnectionEquivalentDefinition :: Note
galoisConnectionEquivalentDefinition = thm $ do
s ["The following is an equivalent definition of a", galoisConnection]
newline
s ["Let", m $ lat x rx, and, m $ lat y ry, "be", completeLattices]
s ["Let", m $ fun a x y, and, m $ fun g y x, "be", monotone, functions]
s [m a, and, m g, "form a", galoisConnection', "if the following hold"]
ma $ fa (x_ ∈ x) $ fa (y_ ∈ y) $ inposet ry (fn a x_) y_ ⇔ inposet rx x_ (fn g y_)
toprove
where
a = alpha
g = gamma
x = "X"
x_ = "x"
rx = partord_ !: x
y = "Y"
y_ = "y"
ry = partord_ !: y
galoisConnectionExamples :: Note
galoisConnectionExamples = do
let c1 = "red"
c2 = "blue"
s ["In the following examples, the", raw c1, "arrows correspond to", m alpha, "and the", raw c2, "arrows correspond to", m gamma]
ex $ do
s ["The following diagram shows a simple non-trivial", galoisConnection]
let (a, b, c) = ("a", "b", "c")
hd1 = hasseDiagram [a, c] [(a, c)]
hd2 = hasseDiagram [b] []
fun1 = [(a, b), (c, b)]
fun2 = [(b, c)]
orderFunctionFig 3 dotsConfig $ OrderFunctionFig
[("A", hd1), ("B", hd2)]
[(c1, fun1), (c2, fun2)]
ex $ do
s ["The following diagram shows another simple non-trivial", galoisConnection]
let (a, b, c, d) = ("a", "b", "c", "d")
hd1 = hasseDiagram [a, c] [(a, c)]
hd2 = hasseDiagram [b, d] [(b, d)]
fun1 = [(a, b), (c, b)]
fun2 = [(b, c), (d, c)]
orderFunctionFig 4 dotsConfig $ OrderFunctionFig
[("A", hd1), ("B", hd2)]
[(c1, fun1), (c2, fun2)]
ex $ do
s ["The following diagram shows a", galoisConnection, "between two", posets]
s ["One", poset, "is a", subset, "of the", powerset, "of", m ints]
s ["The other is the set of information we can have about the sign of an integer"]
s ["top means it could be anything, bot means it's impossible for this situation to occur, + means that the sign is positive and - means that the sign is negative"]
let hd1 = hasseDiagram [all1, pos1, neg1, zp1, zm1, zero1, none] [(none, zero1), (zero1, zm1), (zero1, zp1), (zp1, pos1), (zm1, neg1), (zero1, neg1), (zero1, pos1), (neg1, all1), (pos1, all1)]
hd2 = hasseDiagram [all2, pos2, neg2, zero2] [(zero2, neg2), (zero2, pos2), (neg2, all2), (pos2, all2)]
fun1 = [(none, zero2), (zero1, pos2), (zp1, pos2), (zm1, neg2), (neg1, neg2), (pos1, pos2), (all1, all2)]
fun2 = [(zero2, none), (neg2, neg1), (pos2, pos1), (all2, all1)]
all1 = "{..., -1, 0, 1, ...}"
pos1 = "{0, 1, ...}"
neg1 = "{... -1, 0}"
zm1 = "{-1, 0}"
zp1 = "{0, 1}"
zero1 = "{0}"
none = "{}"
all2 = "top"
pos2 = "+"
neg2 = "-"
zero2 = "bot"
orderFunctionFig 8 normalConfig $ OrderFunctionFig
[("Concrete", hd1), ("Abstract", hd2)]
[(c1, fun1), (c2, fun2)]
galoisInsertionDefinition :: Note
galoisInsertionDefinition = de $ do
lab galoisInsertionDefinitionLabel
s ["Let", m a, and, m g, "form a", galoisConnection]
s ["This", galoisConnection, "is called a", galoisInsertion', "if", m g, "is", injective]
s ["This is denoted as follows"]
ma $ gins a g (lat x rx) (lat y ry)
where
a = alpha
g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
galoisInsertionOtherJections :: Note
galoisInsertionOtherJections = thm $ do
s ["Let", m a, and, m g, "form a", galoisInsertion]
s [m a, "is", surjective, and, m $ a ● g, "is the identity", function]
toprove
where
a = alpha
g = gamma
galoisConnectionsCompose :: Note
galoisConnectionsCompose = thm $ do
s ["Let", m a1, and, m g1 <> ", as well as", m a2, and, m g2, "form", galoisConnections]
ma $ gcon a1 g1 (lat x rx) (lat y ry)
ma $ gcon a2 g2 (lat y ry) (lat z rz)
s [m (a2 ● a1), and, m (g1 ● g2), "then form a", galoisConnection]
ma $ gcon (a2 ● a1) (g1 ● g2) (lat x rx) (lat z rz)
toprove
where
a = alpha
a1 = a !: 1
a2 = a !: 2
g = gamma
g1 = g !: 1
g2 = g !: 2
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
z = "Z"
rz = partord_ !: z
galoisConnectionsPreserves :: Note
galoisConnectionsPreserves = thm $ do
let a = alpha
g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["Let", m a, and, m g, "form a", galoisConnection]
ma $ gcon a g (lat x rx) (lat y ry)
s [m a, "is", completelyJoinPreserving]
s [m g, "is", completelyMeetPreserving]
toprove
preservesNoGaloisConnection :: Note
preservesNoGaloisConnection = cex $ do
let a = alpha
g = gamma
s ["Let", m a, and, m g, "be", functions, "such that the following hold"]
itemize $ do
item $ s [m a, "is", completelyJoinPreserving]
item $ s [m g, "is", completelyMeetPreserving]
s [m a, and, m g, "do not necessarily form a", galoisConnection]
proof $ do
let c1 = "red"
c2 = "blue"
s ["The following is a diagram of a counter example"]
let (a, b, c, d) = ("a", "b", "c", "d")
(e, f, g, h) = ("e", "f", "g", "h")
hd1 = hasseDiagram [a, b, c, d] [(a, b), (a, c), (b, d), (c, d)]
hd2 = hasseDiagram [e, f, g, h] [(e, f), (e, g), (f, h), (g, h)]
fun1 = [(a, e), (b, f), (c, g), (d, h)]
fun2 = [(e, a), (f, c), (g, b), (h, d)]
orderFunctionFig 7 normalConfig $ OrderFunctionFig
[("A", hd1), ("B", hd2)]
[(c1, fun1), (c2, fun2)]
s ["In this situation", m alpha, "is", completelyJoinPreserving, and, m gamma, "is", completelyMeetPreserving, "but they don't form a", galoisConnection]
let ct = raw c
bt = raw b
s ["Take for example the", element, m ct]
s [m $ fn gamma $ fn alpha ct, "is", m bt, "but", m $ inposet (partord_ !: "A") ct bt, "does not hold"]
galoisConnectionDetermination :: Note
galoisConnectionDetermination = thm $ do
let a = alpha
g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["Let", m a, and, m g, "form a", galoisConnection]
ma $ gcon a g (lat x rx) (lat y ry)
let (<<) = inposet rx
let p = "p"
q = "q"
s [m a, "completely determines", m g, "as follows"]
ma $ fn a p =: infofm y (setcmpr (q ∈ y) (p << fn g q))
s [m g, "completely determines", m a, "as follows"]
ma $ fn a p =: supofm x (setcmpr (p ∈ x) (fn a p << q))
toprove
galoisConnectionExistenceAlpha :: Note
galoisConnectionExistenceAlpha = thm $ do
let a = alpha
g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["If", m $ fun a x y, "is a", completelyJoinPreserving, function <> ", then there exists a function", m $ fun g y x, "such that", m a, and, m g, "form a", galoisConnection]
ma $ gcon a g (lat x rx) (lat y ry)
toprove
galoisConnectionExistenceGamma :: Note
galoisConnectionExistenceGamma = thm $ do
let a = alpha
g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["If", m $ fun g y x, "is a", completelyMeetPreserving, function <> ", then there exists a function", m $ fun a x y, "such that", m a, and, m g, "form a", galoisConnection]
ma $ gcon a g (lat x rx) (lat y ry)
toprove
approximationDefinition :: Note
approximationDefinition = de $ do
lab approximatesDefinitionLabel
lab approximationDefinitionLabel
let a = alpha
g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["Let", m $ fun a x y, and, m $ fun g y x, "form a", galoisConnection]
ma $ gcon a g (lat x rx) (lat y ry)
let f = "f"
h = "h"
s ["Let", m $ fun f x x, and, m $ fun h y y, "be", functions]
s ["We say that", m h, approximates', m f, "if the following holds"]
let x_ = "x"
y_ = "y"
ma $ fa (x_ ∈ x) $ fa (y_ ∈ y) $ inposet ry (fn a x_) y_ ⇒ inposet ry (fn a (fn f x_)) (fn h y_)
approximationEquivalentDefinition :: Note
approximationEquivalentDefinition = thm $ do
s ["An", approximation, "of a", function, "can equivalently be defined as follows"]
newline
let g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["Let", m $ fun g y x, "be a monotone", function, "on the posets", m $ relposet x rx, and, m $ relposet y ry]
let f_ = "F"
f = fn f_
f'_ = "F" <> comm0 "sharp"
f' = fn f'_
s ["Let", m $ fun f_ x x, and, m $ fun f'_ y y, "be", monotone, functions]
s ["We say that", m f'_, approximates, m f_, "if the following holds"]
let go = fn g
(<<) = inposet rx
z = "z"
ma $ fa (z ∈ y) $ f (go z) << go (f' z)
toprove_ "prove that these definitions are in fact equivalent"
approximationExamples :: Note
approximationExamples = do
ex $ do
s ["In the following diagram, the function represented by the blue arrows in the", set, m "A", approximates, "the blue arrow in the", set, m "B"]
s ["The green arrows represent", m alpha]
let c1 = "blue"
c2 = "darkgreen"
(x, fx) = ("x", "f(x)")
(ax, afx, z, gz) = ("a(x)", "a(f(x))", "z", "g(z)")
hd1 = hasseDiagram [x, fx] [(x, fx)]
hd2 = hasseDiagram [ax, afx, z, gz] [(ax, z), (afx, gz)]
funf = [(x, fx)]
fung = [(z, gz)]
funa = [(x, ax), (fx, afx)]
orderFunctionFig 7 normalConfig $ OrderFunctionFig
[("A", hd1), ("B", hd2)]
[(c1, funf), (c1, fung), (c2, funa)]
monotoneEquivalences :: Note
monotoneEquivalences = thm $ do
let a = alpha
g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["Let", m $ fun a x y, and, m $ fun g y x, "form a", galoisConnection]
ma $ gcon a g (lat x rx) (lat y ry)
let f = "f"
h = "h"
s ["Let", m $ fun f x x, and, m $ fun h y y, "be", monotone, functions]
s ["The following statements are equivalent"]
let x_ = "x"
y_ = "y"
(<<) = inposet ry
(<.) = inposet rx
a_ = fn a
g_ = fn g
f_ = fn f
h_ = fn h
enumerate $ do
item $ ma $ fa (x_ ∈ x) $ fa (y_ ∈ y) $ (a_ x_) << y_ ⇒ (a_ (f_ x_)) << (h_ y_)
item $ ma $ fa (y_ ∈ y) $ (a_ (f_ (g_ y_))) << (h_ y_)
item $ ma $ fa (x_ ∈ x) $ (a_ (f_ x_)) << (h_ (a_ x_))
item $ ma $ fa (y_ ⊆ y) $ (f_ (g_ y_)) <. (g_ (h_ y_))
toprove
approximationExists :: Note
approximationExists = thm $ do
let a = alpha
g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["Let", m $ fun a x y, and, m $ fun g y x, "form a", galoisConnection]
ma $ gcon a g (lat x rx) (lat y ry)
let f_ = "f"
f = fn f_
s ["Let", m $ fun f_ x x, "be a", monotone, function]
s ["There always exists an", approximation, "of", m f_]
proof $ do
s ["Because", m x, "is a", completeLattice, "it must contain its", supremum]
let h_ = "h"
h = fn h_
y_ = "y"
s [the, function, m $ func h_ y y y_ (supof x), "therefore", approximates, m f_]
let p = "p"
q = "q"
let ao = fn a
(<<) = inposet ry
s ["Indeed, let", m p, and, m q, "be arbitrary", elements, "of", m x, and, m y, "respectively such that", m $ ao p << y]
s [m $ h y, "is", m $ supof x, "by definition, so ", m $ ao (f x) << h y, "holds by construction"]
s [m h_, "is called the", leastPreciseApproximation', "of", m f_]
mostPreciseApproximationDefinition :: Note
mostPreciseApproximationDefinition = de $ do
lab mostPreciseApproximationDefinitionLabel
let a = alpha
g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["Let", m $ fun a x y, and, m $ fun g y x, "form a", galoisConnection]
ma $ gcon a g (lat x rx) (lat y ry)
let f_ = "f"
f = fn f_
s ["Let", m $ fun f_ x x, "be a", monotone, function]
let h_ = "h"
h = fn h_
z = "z"
s [m $ func h_ y y z $ h z =: fn a (f (fn g z)), "is called the", mostPreciseApproximation', "of", m f_]
leastFixedPointApproximationTheorem :: Note
leastFixedPointApproximationTheorem = thm $ do
defineTerm "Least fixed point approximation"
newline
let a = alpha
g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["Let", m $ fun a x y, and, m $ fun g y x, "form a", galoisConnection]
ma $ gcon a g (lat x rx) (lat y ry)
let f_ = "F"
f = fn f_
f'_ = "F" <> comm0 "sharp"
f' = fn f'_
s ["Let", m $ fun f_ x x, and, m $ fun f'_ y y, "be", monotone, functions, "such that", m f'_, approximates, m f_]
let ao = fn a
(<<) = inposet ry
ma $ ao (f x) << f' y
s ["The following then holds about the", leastFixedPoints, "of", m f_, and, m f'_]
ma $ ao (lfp f_) << lfp f'_
toprove
leastFixedPointApproximationTheoremWithoutGalois :: Note
leastFixedPointApproximationTheoremWithoutGalois = thm $ do
defineTerm "Least fixed point approximation without a Galois connection"
newline
let g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["Let", m $ fun g y x, "be a monotone", function, "on the posets", m $ relposet x rx, and, m $ relposet y ry]
let f_ = "F"
f = fn f_
f'_ = "F" <> comm0 "sharp"
f' = fn f'_
s ["Let", m $ fun f_ x x, and, m $ fun f'_ y y, "be", monotone, functions, "such that", m f'_, approximates, m f_]
let go = fn g
(<<) = inposet rx
z = "z"
ma $ fa (z ∈ y) $ f (go z) << go (f' z)
s ["The following then holds about the", leastFixedPoints, "of", m f_, and, m f'_]
ma $ lfp f_ << go (lfp (f'_))
toprove
| null | https://raw.githubusercontent.com/NorfairKing/the-notes/ff9551b05ec3432d21dd56d43536251bf337be04/src/Functions/Order.hs | haskell | module Functions.Order where
import Notes
import Data.List (intercalate, isSubsequenceOf,
subsequences)
import qualified Data.Text as T
import qualified Prelude as P
import Logic.FirstOrderLogic.Macro
import Logic.PropositionalLogic.Macro
import NumberTheory.Macro
import Relations.Basics.Terms
import Relations.Orders.Hasse
import Relations.Orders.Macro
import Relations.Orders.Terms
import Relations.Preorders.Terms
import Sets.Basics.Terms
import Sets.Powerset.Terms
import Functions.Application.Macro
import Functions.Basics.Macro
import Functions.Basics.Terms
import Functions.Composition.Macro hiding (comp)
import Functions.Composition.Terms
import Functions.Jections.Terms
import Functions.Order.Diagrams
import Functions.Order.Macro
import Functions.Order.Terms
order :: Note
order = section "Functions and orders" $ do
conjunctiveOrderDefinition
subsection "Monotonic functions" $ do
monotonicDefinition
monotonicFunctionsClosedUnderComposition
scottContinuousDefinition
scottContinuousImpliesMonotonicTheorem
subsection "Fixed points" $ do
fixedPointDefinition
leastFixedPointDefinition
greatestFixedPointDefinition
fixedPointExamples
regions
tarskiFixedPointTheorem
kleeneChainDefinition
kleenesFixedPointTheorem
latticesOverFunctions
completelyMeetPreservingDefinition
completelyJoinPreservingDefinition
preservingExamples
galoisConnectionS
approximationS
regions :: Note
regions = subsection "Regions" $ do
fixedPointRegionDefinition
ascendingRegionDefinition
descendingRegionDefinition
ascendingRegionIsClosedUnderApplication
descendingRegionIsClosedUnderApplication
topInDescendingRegion
botInAscendingRegion
fixedPointRegionIsIntersectionOfAscAndDesc
galoisConnectionS :: Note
galoisConnectionS = subsection "Galois connections" $ do
galoisConnectionDefinition
galoisConnectionEquivalentDefinition
galoisConnectionExamples
galoisConnectionsCompose
galoisConnectionsPreserves
preservesNoGaloisConnection
galoisConnectionDetermination
galoisConnectionExistenceAlpha
galoisConnectionExistenceGamma
galoisInsertionDefinition
galoisInsertionOtherJections
approximationS :: Note
approximationS = subsection "Approximations" $ do
approximationDefinition
approximationEquivalentDefinition
approximationExamples
monotoneEquivalences
approximationExists
mostPreciseApproximationDefinition
leastFixedPointApproximationTheorem
leastFixedPointApproximationTheoremWithoutGalois
conjunctiveOrderDefinition :: Note
conjunctiveOrderDefinition = de $ do
lab conjunctiveOrderDefinitionLabel
let a = "A"
b = "B"
po = partord_
s ["Let", m po, "be a", partialOrder, "on a", set, m b, and, "let", m a, "be a", set, "as well"]
let f_ = "f"
f = fn f_
co = cordsign partord_
s [the, conjunctiveOrder', m co, "on the", set, "of", functions, m $ setcmpr f_ (fun f_ a b), "is defined as follows"]
let g_ = "g"
g = fn g_
(<.) = inposet po
(<<) = cord partord_
x = "x"
ma $ (f_ << g_) === (fa (x ∈ a) (f x <. g x))
monotonicDefinition :: Note
monotonicDefinition = de $ do
lab monotonicDefinitionLabel
lab monotoneDefinitionLabel
lab isotoneDefinitionLabel
lab orderPreservingDefinitionLabel
s ["Let ", m $ relposet x rx, and, m $ relposet y ry, " each be a ", poset_, and, m $ fun f x y, " a function"]
s [m $ fun f x y, " is said to be ", monotonic' <> "," , monotone' <> ",", isotone', or, orderPreserving', " if it has the following property"]
ma $ fa (cs [x1, x2] ∈ x) $ inposet rx x1 x2 ⇒ inposet ry (f_ x1) (f_ x2)
where
x1 = x !: 1
x2 = x !: 2
f = fun_
f_ = fn f
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
monotonicFunctionsClosedUnderComposition :: Note
monotonicFunctionsClosedUnderComposition = thm $ do
lab monotonicFunctionsClosedUnderCompositionTheoremLabel
s [the, composition, "of two", monotonic, functions, "is", monotonic]
s ["Let ", m f1, and, m f2, "be", monotonic, functions]
s [m $ f2 ● f1, "is a", monotonic, function]
proof $ do
let a = "A"
b = "B"
c = "C"
ra = partord_ !: a
rb = partord_ !: b
s ["Let ", m $ fun f1 a b, and, m $ fun f2 b c, "be", monotonic, functions, "on the", posets, m $ relposet a ra, and, m $ relposet b rb]
let x = "x"
y = "y"
oa = binop $ raw "\\ " <> partord_ !: "a" <> raw "\\ "
s ["Let ", m x, and, m y, "be elements of", m a, and, m b, "respectively, such that the following holds"]
ma $ x `oa` y
let ob = binop $ raw "\\ " <> partord_ !: "b" <> raw "\\ "
s ["Because ", m f1, "is", monotonic, "the following must hold as well"]
ma $ fn f1 x `ob` fn f1 y
s ["Because ", m f2, "is", monotonic, "the following must hold as well"]
ma $ fn f2 (fn f1 x) `ob` fn f2 (fn f1 y)
ma $ fn (pars $ f2 ● f1) x `ob` fn (pars $ f2 ● f1) y
s ["This means that", m $ f2 ● f1, "is monotonic"]
where
f1 = fun_ !: 1
f2 = fun_ !: 2
scottContinuousDefinition :: Note
scottContinuousDefinition = de $ do
lab scottContinuousDefinitionLabel
s ["Let ", m $ lat x rx, and, m $ lat y ry, " each be a ", lattice_, and, m $ fun f x y, " a function"]
s [m $ fun fun_ x y, " is called ", scottContinuous', " if it has the following property"]
ma $ fa (ss ⊆ x) $ f_ (sup ss) =: sup (f □ ss)
where
ss = "S"
f = fun_
f_ = fn f
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
scottContinuousImpliesMonotonicTheorem :: Note
scottContinuousImpliesMonotonicTheorem = thm $ do
let f = fun_
f_ = fn f
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["Let ", m $ lat x rx, and, m $ lat y ry, " each be a ", lattice_, and, m $ fun f x y, "a", function]
s ["If", m f, "is", scottContinuous <> ",", "then", m f, "is", monotonic]
proof $ do
s ["Let", m f, "be a", scottContinuous, function]
let a = "a"
b = "b"
let (<<) = inposet rx
(<.) = inposet ry
s ["Let", m a, and, m b, "be elements of", m x, "such that", m $ a << b, "holds"]
s ["According to the definition of a", scottContinuous, function, "we observe the following"]
ma $ f_ (sup $ setofs [a, b]) =: sup (setofs [f_ a, f_ b])
s [the, supremum, "of", m $ setofs [a, b], "is", m b]
ma $ f_ b =: sup (setofs [f_ a, f_ b])
s ["By the definition of a", supremum <> ", this means that", m $ f_ a <. f_ b, "must hold"]
fixedPointDefinition :: Note
fixedPointDefinition = de $ do
lab fixedPointDefinitionLabel
s ["Let ", m x, and, m y, " be ", set, "s ", m $ fun f x y, " be a function"]
s ["An element ", m a, " of ", m x, " is called a ", fixedPoint', " of ", m f, " if ", m f, " leaves a unchanged"]
ma $ fn f a =: a
where
f = fun_
a = "a"
x = "X"
y = "Y"
leastFixedPointDefinition :: Note
leastFixedPointDefinition = de $ do
lab leastFixedPointDefinitionLabel
s ["Let ", m relposet_, " be a ", poset_, and, m $ fun f x x, " a ", function]
s [the, leastFixedPoint', m $ lfp f, "of", m f, "is a", fixedPoint, "such that the following holds"]
let a = "a"
ma $ fa (a ∈ x) $ (a =: (fn f a)) ⇒ lfp f ⊆: a
where
f = fun_
x = posetset_
greatestFixedPointDefinition :: Note
greatestFixedPointDefinition = de $ do
lab greatestFixedPointDefinitionLabel
s ["Let ", m relposet_, " be a ", poset_, and, m $ fun f x x, " a ", function]
s [the, greatestFixedPoint', m $ lfp f, "of", m f, "is a", fixedPoint, "such that the following holds"]
let a = "a"
ma $ fa (a ∈ x) $ (a =: (fn f a)) ⇒ a ⊆: gfp f
where
f = fun_
x = posetset_
fixedPointExamples :: Note
fixedPointExamples = do
ex $ do
s ["The following", function, is, monotone, "but has no", fixedPoints]
mempty
let c1 = "blue"
let (a, b, c) = ("a", "b", "c")
hd1 = hasseDiagram [a, b, c] [(a, b), (a, c)]
fun1 = [(a, b), (b, c), (c, b)]
orderFunctionFig 4 normalConfig $ OrderFunctionFig
[("A", hd1)]
[(c1, fun1)]
ex $ do
s ["The following", function, is, "not", monotone, "has two", fixedPoints, "but no", leastFixedPoint]
mempty
let c1 = "blue"
let (a, b, c, d) = ("a", "b", "c", "d")
hd1 = hasseDiagram [a, b, c, d] [(a, b), (a, c), (b, d), (c, d)]
fun1 = [(a, c), (b, b), (c, c), (d, b)]
orderFunctionFig 4 normalConfig $ OrderFunctionFig
[("A", hd1)]
[(c1, fun1)]
ex $ do
s ["The following", function, is, monotone, "has one", fixedPoint, "which is subsequently the", leastFixedPoint]
mempty
let c1 = "blue"
let (a, b, c, d) = ("a", "b", "c", "d")
hd1 = hasseDiagram [a, b, c, d] [(a, b), (a, c), (b, d), (c, d)]
fun1 = [(a, c), (b, c), (c, c), (d, c)]
orderFunctionFig 4 normalConfig $ OrderFunctionFig
[("A", hd1)]
[(c1, fun1)]
ex $ do
s ["The following", function, is, monotone, "has two", fixedPoints, "but no", leastFixedPoint]
mempty
let c1 = "blue"
let (a, b) = ("a", "b")
hd1 = hasseDiagram [a, b] [(a, a), (b, b)]
fun1 = [(a, a), (b, b)]
orderFunctionFig 2 normalConfig $ OrderFunctionFig
[("A", hd1)]
[(c1, fun1)]
ex $ do
s ["The following", function, is, monotone, "has four", fixedPoints, "but no", leastFixedPoint]
mempty
let c1 = "blue"
let (a, b, c, d) = ("a", "b", "c", "d")
hd1 = hasseDiagram [a, b, c, d] [(a, b), (a, c), (b, d), (c, d)]
fun1 = [(a, a), (b, b), (c, c), (d, d)]
orderFunctionFig 4 normalConfig $ OrderFunctionFig
[("A", hd1)]
[(c1, fun1)]
fixedPointRegionDefinition :: Note
fixedPointRegionDefinition = de $ do
lab fixedPointRegionDefinitionLabel
s ["Let ", m relposet_, " be a ", poset_, and, m $ fun f x x, " a ", function]
s ["The ", fixedPointRegion', " ", m $ fix f, " is the ", set, " of ", fixedPoints, " of ", m latset_]
ma $ fix f === setcmpr (a ∈ latset_) (a =: f_ a)
where
f = fun_
f_ = fn f
a = "x"
x = posetset_
ascendingRegionDefinition :: Note
ascendingRegionDefinition = de $ do
lab ascendingRegionDefinitionLabel
lab preFixedpointDefinitionLabel
s ["Let ", m relposet_, " be a ", poset_, and, m $ fun f x x, " a ", function]
s [the , ascendingRegion', " ", m $ asc f, " is the following ", set]
ma $ asc f === setcmpr (a ∈ latset_) (a ⊆: f_ a)
s [elements, "of the", ascendingRegion, "are sometimes called", preFixedpoints']
where
f = fun_
f_ = fn f
a = "x"
x = posetset_
descendingRegionDefinition :: Note
descendingRegionDefinition = de $ do
lab descendingRegionDefinitionLabel
lab postFixedpointDefinitionLabel
s ["Let ", m relposet_, " be a ", poset_, and, m $ fun f x x, " a ", function]
s [the, descendingRegion', " ", m $ desc f, " is the following ", set]
ma $ desc f === setcmpr (a ∈ latset_) (f_ a ⊆: a)
s [elements, "of the", descendingRegion, "are sometimes called", postFixedpoints']
where
f = fun_
f_ = fn f
a = "x"
x = posetset_
ascendingRegionIsClosedUnderApplication :: Note
ascendingRegionIsClosedUnderApplication = thm $ do
lab ascendingRegionIsClosedUnderApplicationTheoremLabel
s ["Let ", m relposet_, " be a ", poset_, and, m $ fun f x x, " a ", monotonic, " ", function]
ma $ fa (a ∈ x) $ x ∈ asc f ⇒ f_ x ∈ asc f
proof $ do
s ["Let ", m a, " be an element of ", m $ asc f]
s ["Because ", m $ a ⊆: f_ a, " holds, and because ", m f, " is monotonic, ", m $ f_ a ⊆: f_ (f_ a), " must also hold"]
s ["This means that ", m $ f_ a, " is in the ascending region"]
where
f = fun_
f_ = fn f
a = "x"
x = posetset_
descendingRegionIsClosedUnderApplication :: Note
descendingRegionIsClosedUnderApplication = thm $ do
lab descendingRegionIsClosedUnderApplicationTheoremLabel
s ["Let ", m relposet_, " be a ", poset_, and, m $ fun f x x, " a ", monotonic, " ", function]
ma $ fa (a ∈ x) $ x ∈ desc f ⇒ f_ x ∈ desc f
proof $ do
s ["Let ", m a, " be an element of ", m $ desc f]
s ["Because ", m $ f_ a ⊆: a, " holds, and because ", m f, " is monotonic, ", m $ f_ (f_ a) ⊆: f_ a, " must also hold"]
s ["This means that ", m $ f_ a, " is in the descending region"]
where
f = fun_
f_ = fn f
a = "x"
x = posetset_
topInDescendingRegion :: Note
topInDescendingRegion = thm $ do
lab topElementIsInDescendingRegionTheoremLabel
s ["Let ", m lat_, " be a ", boundedLattice_, " and let ", m $ fun f x x, " a ", monotonic, " ", function]
ma $ bot ∈ asc f
proof $ do
s [m $ f_ bot, " is an element of ", m x, " and must therefore have the property ", m $ bot ⊆: f_ bot]
s ["This means that ", m bot, " is an element of the ascending region"]
where
f_ = fn f
f = fun_
x = latset_
botInAscendingRegion :: Note
botInAscendingRegion = thm $ do
lab bottomElementIsInAscendingRegionTheoremLabel
s ["Let ", m lat_, " be a ", boundedLattice_, " and let ", m $ fun f x x, " a ", monotonic, " ", function]
ma $ top ∈ desc f
proof $ do
s [m $ f_ top, " is an element of ", m x, " and must therefore have the property ", m $ f_ top ⊆: top]
s ["This means that ", m top, " is an element of the descending region"]
where
f_ = fn f
f = fun_
x = latset_
fixedPointRegionIsIntersectionOfAscAndDesc :: Note
fixedPointRegionIsIntersectionOfAscAndDesc = thm $ do
lab fixedPointRegionIsIntersectionOfAscendingRegionAndDescendingRegionTheoremLabel
s ["Let ", m relposet_, " be a ", poset_, and, m $ fun f x x, " a ", monotonic, " ", function]
ma $ fix f =: asc f ∩ desc f
proof $ do
noindent
itemize $ do
item $ do
bsub
newline
s ["Let ", m a, " be an element of ", m $ fix f]
s ["By definition of ", m $ fix f, ", ", m $ f_ a, " is equal to ", m a]
s ["Because ", m partord_, is, reflexive_, ref partialOrderDefinitionLabel, ref preorderDefinitionLabel, ", ", m $ a ⊆: a, " must hold"]
s ["This means that ", m a, " is both an element of ", m $ asc f, " and of ", m $ desc f, " and therefore in their intersection"]
item $ do
bsup
newline
s ["Let ", m a, " be an element of both ", m $ asc f, and, m $ desc f]
s ["This means that both ", m $ a ⊆: f_ a, and, m $ f_ a ⊆: a, " hold"]
s ["Because ", m partord_, is, antisymmetric_, ", that means that ", m a, " equals ", m $ f_ a, " which entails that ", m a, " is a fixed point of ", m f]
where
f = fun_
f_ = fn f
a = "a"
x = posetset_
tarskiFixedPointTheorem :: Note
tarskiFixedPointTheorem = thm $ do
defineTerm "Tarski's fixed point theorem"
newline
s ["Let", m lat_, "be a", completeLattice_, "and let", m $ fun f x x, "be a", monotone, function]
s [the, fixedPointRegion, m $ fix f, "of", m f, "is a", completeLattice]
s ["Consequently, ", m f, "has a", greatestFixedPoint_, "and a", leastFixedPoint_]
toprove
where
f = fun_
x = latset_
kleeneChainDefinition :: Note
kleeneChainDefinition = de $ do
lab kleeneChainDefinitionLabel
s ["Let ", m lat_, " be a ", lattice_, and, m $ fun f x x, " a ", scottContinuous, " function"]
s [the , kleeneChain', " starting at a point ", m $ a ∈ x, " is the set ", m $ kleeneCh a]
ma $ kleeneCh a === setcmpr (i ∈ naturals) (f ^: i `fn` x)
s [the, kleeneChain, "is sometimes also called the", set, "of", functionIterates]
where
i = "i"
f = fun_
a = "x"
x = latset_
kleenesFixedPointTheorem :: Note
kleenesFixedPointTheorem = do
thm $ do
defineTerm "Kleene's fixed point theorem"
newline
s ["Let ", m lat_, " be a ", completeLattice_, and, m $ fun f x x, " a ", scottContinuous, " function"]
ma $ lfp f =: sup (kleeneCh bot)
toprove
nte $ do
s ["This gives us an algorithm to compute the least fixed point."]
s ["Repeatedly applying ", m f, " to bot until we find a fixed point is enough to find ", m $ lfp f]
where
f = fun_
x = latset_
latticesOverFunctions :: Note
latticesOverFunctions = thm $ do
lab latticesOverFunctionsTheoremLabel
s ["Let ", m $ lat y partord_, " be a ", lattice, and, m x, " a set"]
s [m $ lat (funt x y) po, " is a ", lattice, " where ", m po, " is defined as follows"]
ma $ f << g ⇔ fa (a ∈ dom f) (f -: a ⊆: g -: a)
s ["This also implies the following"]
ma $ (pars $ f ⊔ g) -: a =: (f -: a ⊔ g -: a)
ma $ (pars $ f ⊓ g) -: a =: (f -: a ⊓ g -: a)
toprove
where
f = "f"
g = "g"
a = "a"
x = latset_
y = "Y"
po = partord_ !: (x <> rightarrow <> y)
(<<) = inposet po
completelyMeetPreservingDefinition :: Note
completelyMeetPreservingDefinition = de $ do
let f_ = fun_
f = fn f_
x = "X"
a = "A"
rx = partord_ !: x
infx n = (infsign !: x) <> n
y = "Y"
ry = partord_ !: y
infy n = (infsign !: y) <> n
s ["Let", m $ relposet x rx, and, m $ relposet y ry, "be", posets]
s ["A", function, m $ fun f_ x y, "is called", completelyMeetPreserving', "if the following holds"]
ma $ fa (a ⊆ x) $ f (infx a) =: infy (f_ □ a)
completelyJoinPreservingDefinition :: Note
completelyJoinPreservingDefinition = de $ do
let f_ = fun_
f = fn f_
x = "X"
a = "A"
rx = partord_ !: x
supx n = (supsign !: x) <> n
y = "Y"
ry = partord_ !: y
supy n = (supsign !: y) <> n
s ["Let", m $ relposet x rx, and, m $ relposet y ry, "be", posets]
s ["A", function, m $ fun f_ x y, "is called", completelyJoinPreserving', "if the following holds"]
ma $ fa (a ⊆ x) $ f (supx a) =: supy (f_ □ a)
preservingExamples :: Note
preservingExamples = do
ex $ do
let c1 = "darkgreen"
let (a, b, c, x, y, z) = ("a", "b", "c", "x", "y", "z")
hd1 = hasseDiagram [a, b, c] [(a, c), (b, c)]
hd2 = hasseDiagram [x, y, z] [(x, y), (y, z)]
fun1 = [(a, x), (b, y), (c, z)]
orderFunctionFig 7 normalConfig $ OrderFunctionFig
[("A", hd1),("B", hd2)]
[(c1, fun1)]
s ["In this case, the", function, "is", monotone, "but not", completelyJoinPreserving]
s ["The image of the join of", m "a", and, m "b", is, m "z" <> ", but the join of the images of", m "a", and, "b", "is", m "y"]
ex $ do
let c1 = "darkgreen"
let (a, b, c, x, y) = ("a", "b", "c", "x", "y")
hd1 = hasseDiagram [a, b, c] [(a, c), (b, c)]
hd2 = hasseDiagram [x, y] [(x, y)]
fun1 = [(a, x), (b, x), (c, x)]
orderFunctionFig 5 dotsConfig $ OrderFunctionFig
[("A", hd1),("B", hd2)]
[(c1, fun1)]
s ["In this case, the", function, "is both", monotone, "and", completelyJoinPreserving]
ex $ do
let c = "darkgreen"
let full = [1, 2, 3]
tshow :: [P.Int] -> Text
tshow ls = T.pack $ "{" P.++ intercalate ", " (P.map show ls) P.++ "}"
nodes = [ tshow ls | ls <- subsequences full ]
edges = [ (tshow l1, tshow l2) | l1 <- subsequences full, l2 <- subsequences full, l1 `isSubsequenceOf` l2]
hd = hasseDiagram nodes edges
fun = P.map (\(l1, l2) -> (tshow l1, tshow l2)) [([],[]), ([1],[1]), ([2], [1,2]), ([3],[3]), ([1, 2], [1, 2]), ([2,3], [1,2,3]), ([1,3], [1,2,3]), ([1,2,3],[1,2,3])]
orderFunctionFig 7 normalConfig $ OrderFunctionFig
[(tshow full, hd)]
[(c, fun)]
s ["In this case, the", function, "is both", monotone, "and", completelyJoinPreserving, "but not", completelyMeetPreserving]
galoisConnectionDefinition :: Note
galoisConnectionDefinition = de $ do
lab galoisConnectionDefinitionLabel
lab reductiveDefinitionLabel
lab extensiveDefinitionLabel
s ["Let", m $ lat x rx, and, m $ lat y ry, "be", completeLattices]
s ["Let", m $ fun a x y, and, m $ fun g y x, "be", monotone, functions]
s [m a, and, m g, "form a", galoisConnection', "if the following hold"]
itemize $ do
item $ s [m $ a ● g, "is", reductive' <> ":", m $ fa (y_ ∈ y) $ inposet ry (fn a (fn g y_)) y_]
item $ s [m $ g ● a, "is", extensive' <> ":", m $ fa (x_ ∈ x) $ inposet rx x_ (fn g (fn a x_))]
s ["This is denoted as follows"]
ma $ gcon a g (lat x rx) (lat y ry)
where
a = alpha
g = gamma
x = "X"
x_ = "x"
rx = partord_ !: x
y = "Y"
y_ = "y"
ry = partord_ !: y
galoisConnectionEquivalentDefinition :: Note
galoisConnectionEquivalentDefinition = thm $ do
s ["The following is an equivalent definition of a", galoisConnection]
newline
s ["Let", m $ lat x rx, and, m $ lat y ry, "be", completeLattices]
s ["Let", m $ fun a x y, and, m $ fun g y x, "be", monotone, functions]
s [m a, and, m g, "form a", galoisConnection', "if the following hold"]
ma $ fa (x_ ∈ x) $ fa (y_ ∈ y) $ inposet ry (fn a x_) y_ ⇔ inposet rx x_ (fn g y_)
toprove
where
a = alpha
g = gamma
x = "X"
x_ = "x"
rx = partord_ !: x
y = "Y"
y_ = "y"
ry = partord_ !: y
galoisConnectionExamples :: Note
galoisConnectionExamples = do
let c1 = "red"
c2 = "blue"
s ["In the following examples, the", raw c1, "arrows correspond to", m alpha, "and the", raw c2, "arrows correspond to", m gamma]
ex $ do
s ["The following diagram shows a simple non-trivial", galoisConnection]
let (a, b, c) = ("a", "b", "c")
hd1 = hasseDiagram [a, c] [(a, c)]
hd2 = hasseDiagram [b] []
fun1 = [(a, b), (c, b)]
fun2 = [(b, c)]
orderFunctionFig 3 dotsConfig $ OrderFunctionFig
[("A", hd1), ("B", hd2)]
[(c1, fun1), (c2, fun2)]
ex $ do
s ["The following diagram shows another simple non-trivial", galoisConnection]
let (a, b, c, d) = ("a", "b", "c", "d")
hd1 = hasseDiagram [a, c] [(a, c)]
hd2 = hasseDiagram [b, d] [(b, d)]
fun1 = [(a, b), (c, b)]
fun2 = [(b, c), (d, c)]
orderFunctionFig 4 dotsConfig $ OrderFunctionFig
[("A", hd1), ("B", hd2)]
[(c1, fun1), (c2, fun2)]
ex $ do
s ["The following diagram shows a", galoisConnection, "between two", posets]
s ["One", poset, "is a", subset, "of the", powerset, "of", m ints]
s ["The other is the set of information we can have about the sign of an integer"]
s ["top means it could be anything, bot means it's impossible for this situation to occur, + means that the sign is positive and - means that the sign is negative"]
let hd1 = hasseDiagram [all1, pos1, neg1, zp1, zm1, zero1, none] [(none, zero1), (zero1, zm1), (zero1, zp1), (zp1, pos1), (zm1, neg1), (zero1, neg1), (zero1, pos1), (neg1, all1), (pos1, all1)]
hd2 = hasseDiagram [all2, pos2, neg2, zero2] [(zero2, neg2), (zero2, pos2), (neg2, all2), (pos2, all2)]
fun1 = [(none, zero2), (zero1, pos2), (zp1, pos2), (zm1, neg2), (neg1, neg2), (pos1, pos2), (all1, all2)]
fun2 = [(zero2, none), (neg2, neg1), (pos2, pos1), (all2, all1)]
all1 = "{..., -1, 0, 1, ...}"
pos1 = "{0, 1, ...}"
neg1 = "{... -1, 0}"
zm1 = "{-1, 0}"
zp1 = "{0, 1}"
zero1 = "{0}"
none = "{}"
all2 = "top"
pos2 = "+"
neg2 = "-"
zero2 = "bot"
orderFunctionFig 8 normalConfig $ OrderFunctionFig
[("Concrete", hd1), ("Abstract", hd2)]
[(c1, fun1), (c2, fun2)]
galoisInsertionDefinition :: Note
galoisInsertionDefinition = de $ do
lab galoisInsertionDefinitionLabel
s ["Let", m a, and, m g, "form a", galoisConnection]
s ["This", galoisConnection, "is called a", galoisInsertion', "if", m g, "is", injective]
s ["This is denoted as follows"]
ma $ gins a g (lat x rx) (lat y ry)
where
a = alpha
g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
galoisInsertionOtherJections :: Note
galoisInsertionOtherJections = thm $ do
s ["Let", m a, and, m g, "form a", galoisInsertion]
s [m a, "is", surjective, and, m $ a ● g, "is the identity", function]
toprove
where
a = alpha
g = gamma
galoisConnectionsCompose :: Note
galoisConnectionsCompose = thm $ do
s ["Let", m a1, and, m g1 <> ", as well as", m a2, and, m g2, "form", galoisConnections]
ma $ gcon a1 g1 (lat x rx) (lat y ry)
ma $ gcon a2 g2 (lat y ry) (lat z rz)
s [m (a2 ● a1), and, m (g1 ● g2), "then form a", galoisConnection]
ma $ gcon (a2 ● a1) (g1 ● g2) (lat x rx) (lat z rz)
toprove
where
a = alpha
a1 = a !: 1
a2 = a !: 2
g = gamma
g1 = g !: 1
g2 = g !: 2
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
z = "Z"
rz = partord_ !: z
galoisConnectionsPreserves :: Note
galoisConnectionsPreserves = thm $ do
let a = alpha
g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["Let", m a, and, m g, "form a", galoisConnection]
ma $ gcon a g (lat x rx) (lat y ry)
s [m a, "is", completelyJoinPreserving]
s [m g, "is", completelyMeetPreserving]
toprove
preservesNoGaloisConnection :: Note
preservesNoGaloisConnection = cex $ do
let a = alpha
g = gamma
s ["Let", m a, and, m g, "be", functions, "such that the following hold"]
itemize $ do
item $ s [m a, "is", completelyJoinPreserving]
item $ s [m g, "is", completelyMeetPreserving]
s [m a, and, m g, "do not necessarily form a", galoisConnection]
proof $ do
let c1 = "red"
c2 = "blue"
s ["The following is a diagram of a counter example"]
let (a, b, c, d) = ("a", "b", "c", "d")
(e, f, g, h) = ("e", "f", "g", "h")
hd1 = hasseDiagram [a, b, c, d] [(a, b), (a, c), (b, d), (c, d)]
hd2 = hasseDiagram [e, f, g, h] [(e, f), (e, g), (f, h), (g, h)]
fun1 = [(a, e), (b, f), (c, g), (d, h)]
fun2 = [(e, a), (f, c), (g, b), (h, d)]
orderFunctionFig 7 normalConfig $ OrderFunctionFig
[("A", hd1), ("B", hd2)]
[(c1, fun1), (c2, fun2)]
s ["In this situation", m alpha, "is", completelyJoinPreserving, and, m gamma, "is", completelyMeetPreserving, "but they don't form a", galoisConnection]
let ct = raw c
bt = raw b
s ["Take for example the", element, m ct]
s [m $ fn gamma $ fn alpha ct, "is", m bt, "but", m $ inposet (partord_ !: "A") ct bt, "does not hold"]
galoisConnectionDetermination :: Note
galoisConnectionDetermination = thm $ do
let a = alpha
g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["Let", m a, and, m g, "form a", galoisConnection]
ma $ gcon a g (lat x rx) (lat y ry)
let (<<) = inposet rx
let p = "p"
q = "q"
s [m a, "completely determines", m g, "as follows"]
ma $ fn a p =: infofm y (setcmpr (q ∈ y) (p << fn g q))
s [m g, "completely determines", m a, "as follows"]
ma $ fn a p =: supofm x (setcmpr (p ∈ x) (fn a p << q))
toprove
galoisConnectionExistenceAlpha :: Note
galoisConnectionExistenceAlpha = thm $ do
let a = alpha
g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["If", m $ fun a x y, "is a", completelyJoinPreserving, function <> ", then there exists a function", m $ fun g y x, "such that", m a, and, m g, "form a", galoisConnection]
ma $ gcon a g (lat x rx) (lat y ry)
toprove
galoisConnectionExistenceGamma :: Note
galoisConnectionExistenceGamma = thm $ do
let a = alpha
g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["If", m $ fun g y x, "is a", completelyMeetPreserving, function <> ", then there exists a function", m $ fun a x y, "such that", m a, and, m g, "form a", galoisConnection]
ma $ gcon a g (lat x rx) (lat y ry)
toprove
approximationDefinition :: Note
approximationDefinition = de $ do
lab approximatesDefinitionLabel
lab approximationDefinitionLabel
let a = alpha
g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["Let", m $ fun a x y, and, m $ fun g y x, "form a", galoisConnection]
ma $ gcon a g (lat x rx) (lat y ry)
let f = "f"
h = "h"
s ["Let", m $ fun f x x, and, m $ fun h y y, "be", functions]
s ["We say that", m h, approximates', m f, "if the following holds"]
let x_ = "x"
y_ = "y"
ma $ fa (x_ ∈ x) $ fa (y_ ∈ y) $ inposet ry (fn a x_) y_ ⇒ inposet ry (fn a (fn f x_)) (fn h y_)
approximationEquivalentDefinition :: Note
approximationEquivalentDefinition = thm $ do
s ["An", approximation, "of a", function, "can equivalently be defined as follows"]
newline
let g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["Let", m $ fun g y x, "be a monotone", function, "on the posets", m $ relposet x rx, and, m $ relposet y ry]
let f_ = "F"
f = fn f_
f'_ = "F" <> comm0 "sharp"
f' = fn f'_
s ["Let", m $ fun f_ x x, and, m $ fun f'_ y y, "be", monotone, functions]
s ["We say that", m f'_, approximates, m f_, "if the following holds"]
let go = fn g
(<<) = inposet rx
z = "z"
ma $ fa (z ∈ y) $ f (go z) << go (f' z)
toprove_ "prove that these definitions are in fact equivalent"
approximationExamples :: Note
approximationExamples = do
ex $ do
s ["In the following diagram, the function represented by the blue arrows in the", set, m "A", approximates, "the blue arrow in the", set, m "B"]
s ["The green arrows represent", m alpha]
let c1 = "blue"
c2 = "darkgreen"
(x, fx) = ("x", "f(x)")
(ax, afx, z, gz) = ("a(x)", "a(f(x))", "z", "g(z)")
hd1 = hasseDiagram [x, fx] [(x, fx)]
hd2 = hasseDiagram [ax, afx, z, gz] [(ax, z), (afx, gz)]
funf = [(x, fx)]
fung = [(z, gz)]
funa = [(x, ax), (fx, afx)]
orderFunctionFig 7 normalConfig $ OrderFunctionFig
[("A", hd1), ("B", hd2)]
[(c1, funf), (c1, fung), (c2, funa)]
monotoneEquivalences :: Note
monotoneEquivalences = thm $ do
let a = alpha
g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["Let", m $ fun a x y, and, m $ fun g y x, "form a", galoisConnection]
ma $ gcon a g (lat x rx) (lat y ry)
let f = "f"
h = "h"
s ["Let", m $ fun f x x, and, m $ fun h y y, "be", monotone, functions]
s ["The following statements are equivalent"]
let x_ = "x"
y_ = "y"
(<<) = inposet ry
(<.) = inposet rx
a_ = fn a
g_ = fn g
f_ = fn f
h_ = fn h
enumerate $ do
item $ ma $ fa (x_ ∈ x) $ fa (y_ ∈ y) $ (a_ x_) << y_ ⇒ (a_ (f_ x_)) << (h_ y_)
item $ ma $ fa (y_ ∈ y) $ (a_ (f_ (g_ y_))) << (h_ y_)
item $ ma $ fa (x_ ∈ x) $ (a_ (f_ x_)) << (h_ (a_ x_))
item $ ma $ fa (y_ ⊆ y) $ (f_ (g_ y_)) <. (g_ (h_ y_))
toprove
approximationExists :: Note
approximationExists = thm $ do
let a = alpha
g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["Let", m $ fun a x y, and, m $ fun g y x, "form a", galoisConnection]
ma $ gcon a g (lat x rx) (lat y ry)
let f_ = "f"
f = fn f_
s ["Let", m $ fun f_ x x, "be a", monotone, function]
s ["There always exists an", approximation, "of", m f_]
proof $ do
s ["Because", m x, "is a", completeLattice, "it must contain its", supremum]
let h_ = "h"
h = fn h_
y_ = "y"
s [the, function, m $ func h_ y y y_ (supof x), "therefore", approximates, m f_]
let p = "p"
q = "q"
let ao = fn a
(<<) = inposet ry
s ["Indeed, let", m p, and, m q, "be arbitrary", elements, "of", m x, and, m y, "respectively such that", m $ ao p << y]
s [m $ h y, "is", m $ supof x, "by definition, so ", m $ ao (f x) << h y, "holds by construction"]
s [m h_, "is called the", leastPreciseApproximation', "of", m f_]
mostPreciseApproximationDefinition :: Note
mostPreciseApproximationDefinition = de $ do
lab mostPreciseApproximationDefinitionLabel
let a = alpha
g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["Let", m $ fun a x y, and, m $ fun g y x, "form a", galoisConnection]
ma $ gcon a g (lat x rx) (lat y ry)
let f_ = "f"
f = fn f_
s ["Let", m $ fun f_ x x, "be a", monotone, function]
let h_ = "h"
h = fn h_
z = "z"
s [m $ func h_ y y z $ h z =: fn a (f (fn g z)), "is called the", mostPreciseApproximation', "of", m f_]
leastFixedPointApproximationTheorem :: Note
leastFixedPointApproximationTheorem = thm $ do
defineTerm "Least fixed point approximation"
newline
let a = alpha
g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["Let", m $ fun a x y, and, m $ fun g y x, "form a", galoisConnection]
ma $ gcon a g (lat x rx) (lat y ry)
let f_ = "F"
f = fn f_
f'_ = "F" <> comm0 "sharp"
f' = fn f'_
s ["Let", m $ fun f_ x x, and, m $ fun f'_ y y, "be", monotone, functions, "such that", m f'_, approximates, m f_]
let ao = fn a
(<<) = inposet ry
ma $ ao (f x) << f' y
s ["The following then holds about the", leastFixedPoints, "of", m f_, and, m f'_]
ma $ ao (lfp f_) << lfp f'_
toprove
leastFixedPointApproximationTheoremWithoutGalois :: Note
leastFixedPointApproximationTheoremWithoutGalois = thm $ do
defineTerm "Least fixed point approximation without a Galois connection"
newline
let g = gamma
x = "X"
rx = partord_ !: x
y = "Y"
ry = partord_ !: y
s ["Let", m $ fun g y x, "be a monotone", function, "on the posets", m $ relposet x rx, and, m $ relposet y ry]
let f_ = "F"
f = fn f_
f'_ = "F" <> comm0 "sharp"
f' = fn f'_
s ["Let", m $ fun f_ x x, and, m $ fun f'_ y y, "be", monotone, functions, "such that", m f'_, approximates, m f_]
let go = fn g
(<<) = inposet rx
z = "z"
ma $ fa (z ∈ y) $ f (go z) << go (f' z)
s ["The following then holds about the", leastFixedPoints, "of", m f_, and, m f'_]
ma $ lfp f_ << go (lfp (f'_))
toprove
| |
9885cc8793dec3179ac129e24fd4438f31307305f2e98bd6552b8c967f94e2c1 | LexiFi/gen_js_api | binding_manual.mli | The gen_js_api is released under the terms of an MIT - like license .
(* See the attached LICENSE file. *)
Copyright 2015 by LexiFi .
module M : sig
type t = private Ojs.t
val t_to_js: t -> Ojs.t
val t_of_js: Ojs.t -> t
val prop_get_arg: t -> int [@@js.get "propGetArg"]
val prop_get: unit -> int [@@js.get "propGet"]
val set_prop: t -> int -> unit [@@js.set "prop"]
val set_global: int -> unit [@@js.set "global"]
val new_thing_unit: unit -> t [@@js.new "ThingUnit"]
val new_thing_args: int -> t [@@js.new "ThingArgs"]
val method_call_global: t -> unit [@@js.call "methodCallGlobal"]
val method_call_unit: t -> unit -> int [@@js.call "methodCallUnit"]
val method_call_args: t -> int -> int[@@js.call "methodCallArgs"]
val method_call_unit_unit: t -> unit -> unit[@@js.call "methodCallUnitUnit"]
val method_call_args_unit: t -> int -> unit[@@js.call "methodCallArgsUnit"]
val global: t[@@js.global "global"]
[@@@warning "-32"]
val get: t -> int -> string option [@@js.index_get]
val set: t -> int -> string -> unit [@@js.index_set]
val get: t -> string -> string option [@@js.index_get]
val set: t -> string -> string -> unit [@@js.index_set]
[@@@warning "+32"]
val get: t -> Ojs.t -> string option [@@js.index_get]
val set: t -> Ojs.t -> string -> unit [@@js.index_set]
end | null | https://raw.githubusercontent.com/LexiFi/gen_js_api/bee3b595898fdaf7db0366a9b1a009db9a6c6026/ppx-test/binding_manual.mli | ocaml | See the attached LICENSE file. | The gen_js_api is released under the terms of an MIT - like license .
Copyright 2015 by LexiFi .
module M : sig
type t = private Ojs.t
val t_to_js: t -> Ojs.t
val t_of_js: Ojs.t -> t
val prop_get_arg: t -> int [@@js.get "propGetArg"]
val prop_get: unit -> int [@@js.get "propGet"]
val set_prop: t -> int -> unit [@@js.set "prop"]
val set_global: int -> unit [@@js.set "global"]
val new_thing_unit: unit -> t [@@js.new "ThingUnit"]
val new_thing_args: int -> t [@@js.new "ThingArgs"]
val method_call_global: t -> unit [@@js.call "methodCallGlobal"]
val method_call_unit: t -> unit -> int [@@js.call "methodCallUnit"]
val method_call_args: t -> int -> int[@@js.call "methodCallArgs"]
val method_call_unit_unit: t -> unit -> unit[@@js.call "methodCallUnitUnit"]
val method_call_args_unit: t -> int -> unit[@@js.call "methodCallArgsUnit"]
val global: t[@@js.global "global"]
[@@@warning "-32"]
val get: t -> int -> string option [@@js.index_get]
val set: t -> int -> string -> unit [@@js.index_set]
val get: t -> string -> string option [@@js.index_get]
val set: t -> string -> string -> unit [@@js.index_set]
[@@@warning "+32"]
val get: t -> Ojs.t -> string option [@@js.index_get]
val set: t -> Ojs.t -> string -> unit [@@js.index_set]
end |
3a95098cfc3b57d38b98814a66f9d4da712eeedace92b023247c6b30a08456e8 | hadolint/hadolint | DL3021.hs | module Hadolint.Rule.DL3021 (rule) where
import qualified Data.Text as Text
import Hadolint.Rule
import Language.Docker.Syntax
rule :: Rule args
rule = simpleRule code severity message check
where
code = "DL3021"
severity = DLErrorC
message = "COPY with more than 2 arguments requires the last argument to end with /"
check (Copy (CopyArgs sources t) _)
| length sources > 1 = endsWithSlash t
| otherwise = True
check _ = True
# INLINEABLE rule #
endsWithSlash :: TargetPath -> Bool
endsWithSlash (TargetPath t) =
not (Text.null t) && (Text.last . dropQuotes) t == '/'
| null | https://raw.githubusercontent.com/hadolint/hadolint/321ffc1e00b5e97ec6b516775dae32b616fccc33/src/Hadolint/Rule/DL3021.hs | haskell | module Hadolint.Rule.DL3021 (rule) where
import qualified Data.Text as Text
import Hadolint.Rule
import Language.Docker.Syntax
rule :: Rule args
rule = simpleRule code severity message check
where
code = "DL3021"
severity = DLErrorC
message = "COPY with more than 2 arguments requires the last argument to end with /"
check (Copy (CopyArgs sources t) _)
| length sources > 1 = endsWithSlash t
| otherwise = True
check _ = True
# INLINEABLE rule #
endsWithSlash :: TargetPath -> Bool
endsWithSlash (TargetPath t) =
not (Text.null t) && (Text.last . dropQuotes) t == '/'
| |
78945f469254bafd04485524b0516c9a55e9467536301e0fb9b30f4c8feea6ed | dbousque/lymp | reference.ml |
(* example usage of an object through a reference (here a dict object) *)
open Lymp
let py = init "."
let builtin = builtins py
let () =
(* create a dict *)
let dict = get_ref builtin "dict" [] in
dict["field1 " ] = " value1 "
call dict "__setitem__" [Pystr "field1" ; Pystr "value1"] ;
call dict "__setitem__" [Pystr "field2" ; Pyint 2] ;
call dict "__setitem__" [Pystr "field3" ; Pyfloat 3.3] ;
getting fields , for example ' val1 ' is the string " value1 "
let val1 = get_string dict "get" [Pystr "field1"] in
let val2 = get_int dict "get" [Pystr "field2"] in
let val3 = get_float dict "get" [Pystr "field3"] in
' values ' will be : [ " value1 " ; Pyint 2 ; Pyfloat 3.3 ]
let values_ref = get dict "values" [] in
my_dict.values ( ) returns a ' dict_values ' and not a ' list ' in python 3 ,
so we make a conversion with list(values_ref )
so we make a conversion with list(values_ref) *)
let values = get_list builtin "list" [values_ref] in
print_endline val1 ;
print_endline (string_of_int val2) ;
print_endline (string_of_float val3) ;
print_endline (string_of_int (List.length values)) ;
ouput will be :
2
3.3
3
value1
2
3.3
3
*)
close py | null | https://raw.githubusercontent.com/dbousque/lymp/863d134b33499a7234ae6a5e43d40e3357867031/examples/reference.ml | ocaml | example usage of an object through a reference (here a dict object)
create a dict |
open Lymp
let py = init "."
let builtin = builtins py
let () =
let dict = get_ref builtin "dict" [] in
dict["field1 " ] = " value1 "
call dict "__setitem__" [Pystr "field1" ; Pystr "value1"] ;
call dict "__setitem__" [Pystr "field2" ; Pyint 2] ;
call dict "__setitem__" [Pystr "field3" ; Pyfloat 3.3] ;
getting fields , for example ' val1 ' is the string " value1 "
let val1 = get_string dict "get" [Pystr "field1"] in
let val2 = get_int dict "get" [Pystr "field2"] in
let val3 = get_float dict "get" [Pystr "field3"] in
' values ' will be : [ " value1 " ; Pyint 2 ; Pyfloat 3.3 ]
let values_ref = get dict "values" [] in
my_dict.values ( ) returns a ' dict_values ' and not a ' list ' in python 3 ,
so we make a conversion with list(values_ref )
so we make a conversion with list(values_ref) *)
let values = get_list builtin "list" [values_ref] in
print_endline val1 ;
print_endline (string_of_int val2) ;
print_endline (string_of_float val3) ;
print_endline (string_of_int (List.length values)) ;
ouput will be :
2
3.3
3
value1
2
3.3
3
*)
close py |
e1317bf9e89e776a58271a8572fe4fc7ab94794cd29208b19a1a7f1c708b59e5 | tensorflow/haskell | Gradient.hs | Copyright 2016 TensorFlow authors .
--
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- -2.0
--
-- Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
{-# LANGUAGE ConstraintKinds #-}
# LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE ViewPatterns #
# LANGUAGE TypeApplications #
module TensorFlow.Gradient
( GradientCompatible
, gradients
) where
import Control.Monad (forM, zipWithM)
import Control.Monad.State.Strict (State, evalState, gets, modify)
import Data.ByteString (ByteString)
import Data.Complex (Complex)
import Data.ProtoLens.Default(def)
import Data.Int (Int32, Int64)
import Data.Foldable (foldlM)
import Data.List (foldl', sortBy)
import Data.Map.Strict (Map)
import qualified Data.IntSet as IntSet
import Data.Maybe (fromMaybe, maybeToList, mapMaybe)
import Data.Ord (comparing)
import Data.ProtoLens.TextFormat (showMessage)
import Data.Set (Set)
import Data.Text (Text)
import Data.Tuple (swap)
import Lens.Family2 (Lens', view, (&), (^.), (.~), (%~), under)
import Lens.Family2.State.Strict (uses)
import Lens.Family2.Stock (at, intAt)
import Lens.Family2.Unchecked (lens, adapter)
import Prelude hiding (sum, tanh)
import Text.Printf (printf)
import qualified Data.Graph.Inductive.Basic as FGL
import qualified Data.Graph.Inductive.Graph as FGL
import qualified Data.Graph.Inductive.PatriciaTree as FGL
import qualified Data.Graph.Inductive.Query.DFS as FGL
import qualified Data.IntMap.Strict as IntMap
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import qualified Data.Text as Text
import qualified TensorFlow.GenOps.Core as CoreOps
import TensorFlow.Build
( MonadBuild
, Build
, build
, renderedNodeDefs
, opDef
, opAttr
, opInputs
)
import TensorFlow.BuildOp
import TensorFlow.Ops
( addN
, broadcastGradientArgs
, expandDims
, fill
, matMul
, matMul'
, reducedShape
, reluGrad
, tanh
, tanhGrad
, reshape
, scalar
, shape
, softmaxCrossEntropyWithLogits
, sum
, sigmoid
, sigmoidGrad
, scalarize
, vector
, zerosLike
)
import TensorFlow.Output
( NodeName(..)
, Output(..)
, OutputIx(..)
, outputIndex
)
import TensorFlow.Tensor
( Tensor(..)
, Value
, render
, expr
, Rendered
, tensorNodeName
, renderedOutput
, renderValue
, ToTensor(..)
)
import TensorFlow.Types (Attribute, OneOf, TensorType, attrLens)
import Proto.Tensorflow.Core.Framework.NodeDef (NodeDef)
import Proto.Tensorflow.Core.Framework.NodeDef_Fields
( attr, input, op, name)
type GradientCompatible a =
-- TODO(fmayle): MaxPoolGrad doesn't support Double for some reason.
(Num a, OneOf '[ Float, Complex Float, Complex Double ] a)
-- TODO(fmayle): Support control flow.
-- TODO(fmayle): Support gate_gradients-like option to avoid race conditions.
-- TODO(fmayle): Do we need to consider control inputs? See _PendingCount in
-- tensorflow/python/ops/gradients.py.
TODO(fmayle ): Maybe store the gradient functions and numOutputs on the OpDef .
| Gradient of @y@ w.r.t . each element of @xs@.
gradients :: forall a v1 t m . ( MonadBuild m
, Rendered t
, ToTensor t
, GradientCompatible a
)
=> Tensor v1 a -- ^ The output of the graph.
-> [t a] -- ^ Tensors for which gradients are computed.
-> m [Tensor Value a]
gradients y xs = build $ do
-- The gradients are computed using "reverse accumulation", similarly to
-- what is described here:
#The_chain_rule.2C_forward_and_reverse_accumulation
--
-- The code is summarised as follows:
--
1 . Create an fgl graph of the relevant nodes ( ops ) and edges ( tensors ) .
2 . Initialize the gradient of y to 1 ( ∂y/∂y = 1 ) and the rest of tensor 's
-- gradients to nothing.
3 . Process the nodes in reverse topological order ( i.e. each node comes
-- after all of its outputs so that the output gradients for a node have
-- been completely calculated before it is processed):
a. Record the gradient for each of the node 's output tensors ( ∂y/∂w
-- for each output tensor w).
-- b. Calculate the gradient of y w.r.t. each of the node's input
-- tensors using the gradients of the node's output tensors.
--
-- Written differently, for each output tensor w and input tensor v:
∂y/∂w = ... ( calculated in previous steps )
-- ∂w/∂v = ... (op specific)
-- ∂y/∂v = ∂y/∂w * ∂w/∂v (technically, if tensor v is an input
-- to multiple nodes, then this is only
-- part of ∂y/∂v)
--
4 . Lookup the recorded gradient for each x in xs .
y' <- renderValue y
let yName = tensorNodeName y'
yOne <- render $ fill (shape y') (scalar 1)
TODO(fmayle ): Move this into Build.hs and call it unsafeNodeDefFromName ?
nodeDefLookup :: (NodeName -> NodeDef) <- uses renderedNodeDefs $
(\f x -> fromMaybe (error $ "no NodeDef found for " ++ show x) (f x))
. flip Map.lookup
let (gr, nodeMap) = createGraph yName nodeDefLookup
xnodes = mapMaybe (\x -> nodeMap ^. (at $ outputNodeName $ renderedOutput x)) xs
make a set of the nodes reachable from the xnodes
-- The xnodes are not part of this set (unless reachable from another xnode)
reachableSet = computeReachableSet xnodes gr
-- Set gradient of y to one.
-- TODO: nicer
let initPending :: Map.Map FGL.Node (PendingGradients a)
= Map.empty & (at (nodeMap Map.! yName)
. nonEmpty
. outputIxAt (outputIndex $ renderedOutput y')
. nonEmpty
.~ [yOne]
)
-- Calculate the gradients of y w.r.t. each node in the graph.
gradientMap <- graphGrads gr reachableSet initPending
-- Lookup the gradients for each x.
forM xs $ \x ->
let Output i xName = renderedOutput x
in maybe (render $ zerosLike $ toTensor x) return $ do
n <- nodeMap ^. at xName
gradientMap ^. at n . nonEmpty . outputIxAt i
-- | Compute a set of nodes reachable from the start nodes
--
-- the start nodes are excluded, unless reachable from another start node
computeReachableSet :: [FGL.Node] -> Graph -> IntSet.IntSet
computeReachableSet vs g =
IntSet.fromList $ concatMap (drop 1 . FGL.preorder) (FGL.dff vs g)
outputIxAt :: OutputIx -> Lens' (IntMap.IntMap v) (Maybe v)
-- NOTE: point-free notation leads to unification problems here
outputIxAt x = intAt (unOutputIx x)
-- | Incomplete gradients of a node's outputs.
--
The lists represent partial sums . The key is an OutputIx sans newtype .
type PendingGradients a = IntMap.IntMap [Tensor Value a]
| Gradients of a node 's outputs . The key is an OutputIx sans newtype .
-- TODO: precache the rendering?
type Gradients a = IntMap.IntMap (Tensor Value a)
| Graph of TensorFlow operations .
type Graph = FGL.Gr NodeDef EdgeLabel
-- | Data associated with an edge.
--
-- Pair of
-- 1. Output index of a tensor from the source node.
2 . Input index that the tensor connects to on the destination node .
type EdgeLabel = (OutputIx, OutputIx)
-- | State used for calculating gradients.
data GradientsState a = GradientsState
{ _gradientsPending :: !(Map FGL.Node (PendingGradients a))
, _gradientsResult :: !(Map FGL.Node (Gradients a))
}
gradientsPending :: Lens' (GradientsState a) (Map FGL.Node (PendingGradients a))
gradientsPending = lens _gradientsPending (\x y -> x { _gradientsPending = y })
gradientsResult :: Lens' (GradientsState a) (Map FGL.Node (Gradients a))
gradientsResult = lens _gradientsResult (\x y -> x { _gradientsResult = y })
-- TODO(fmayle): Use something like Data.List.Safe.
-- | Safe version of (!!).
safeIndex :: [a] -> Int -> Maybe a
_ `safeIndex` n | n < 0 = Nothing
[] `safeIndex` _ = Nothing
(x:_) `safeIndex` 0 = Just x
(_:xs) `safeIndex` n = xs `safeIndex` (n-1)
-- Copy of -3.9.0.2/docs/Control-Lens-Iso.html#v%3anon
anon :: a -> (a -> Bool) -> Lens' (Maybe a) a
anon a p = under (adapter (fromMaybe a) go) where
go b | p b = Nothing
| otherwise = Just b
non :: Eq a => a -> Lens' (Maybe a) a
non a = anon a (a==)
| Lens that defaults Nothing to .
nonEmpty :: (Monoid (t v), Foldable t) => Lens' (Maybe (t v)) (t v)
nonEmpty = anon mempty null
-- TODO: strictness (e.g., foldlM')
-- | Calculate the gradients for every node in a graph.
graphGrads :: forall a. GradientCompatible a
=> Graph
-> IntSet.IntSet
-> Map FGL.Node (PendingGradients a)
^ Initial gradients ( usually just 1 for the node of interest ) .
-> Build (Map FGL.Node (Gradients a))
graphGrads gr reachableSet initPending = view gradientsResult <$> foldlM go initState nodeOrder
where
initState = GradientsState initPending Map.empty
-- Reverse topological sort.
nodeOrder = FGL.topsort . FGL.grev $ gr
go :: GradientsState a -> Int -> Build (GradientsState a)
go state node = do
-- Aggregate the accumulated gradients for this node.
outputGrads <-
sumPendingGradient (state ^. gradientsPending . at node . nonEmpty)
if null outputGrads
then pure state
else do
let nextState = state & gradientsResult %~ Map.insert node outputGrads
-- Only consider nodes that are reachable from the inputs to
-- avoid calculating gradients that won't be used.
if node `IntSet.member` reachableSet
then do
let ctx = FGL.context gr node
inputGrads <- calculateInputGrads ctx outputGrads gr
-- Calculate the gradients for each of the node's inputs.
pure $ updatePendingGradients ctx inputGrads nextState
else
pure nextState
| Reduce accumulated gradients for each output to one Tensor .
sumPendingGradient :: GradientCompatible a
=> PendingGradients a -> Build (Gradients a)
sumPendingGradient = sequence . IntMap.mapMaybe f
where
f [] = Nothing
f [x] = Just (pure x)
f xs = Just (render $ addN xs)
-- | Calculate the gradients of a node's input tensors.
--
-- This is mostly just a wrapper around opGrad.
calculateInputGrads :: forall a. GradientCompatible a
=> FGL.Context NodeDef EdgeLabel
-> Gradients a -- ^ Output gradients of the node.
-> Graph
-> Build [Maybe (Tensor Value a)]
calculateInputGrads (inputEdges, _, nodeDef, _) outputGrads gr = do
fullOutGrads <- fullOutputGrads (numOutputs nodeDef) (nodeDefName nodeDef)
outputGrads
traverse (traverse render) $ opGrad (nodeDef ^. op) nodeDef inputTensors fullOutGrads
where
-- Create a tensor from an edge (technically an Output, but it seems less
-- confusing to refer to it as a tensor here).
edgeToTensor :: (EdgeLabel, FGL.Node) -> Output
edgeToTensor ((i, _), n) =
case FGL.lab gr n of
Just edgeNodeDef -> Output i (NodeName $ edgeNodeDef ^. name)
Nothing -> error $ "calculateInputGrads: missing input node for "
++ Text.unpack (nodeDef ^. name)
-- Input tensors, sorted by input index.
inputTensors = map edgeToTensor $ sortBy (comparing (snd . fst)) inputEdges
| Convert a Map of gradients to a list , with zeros for missing outputs .
fullOutputGrads :: (TensorType a, Num a)
=> OutputIx -- ^ Number of outputs.
-> NodeName
-> Gradients a
-> Build [Tensor Value a]
fullOutputGrads n o gs =
mapM (\i -> maybe (render $ zero i) return (gs ^. outputIxAt i)) [0..n-1]
where
A tensor of zeros with the same shape as the i'th output .
zero i = zerosLike $ toT (Output i o)
-- | Update the pending gradients of a node's inputs.
updatePendingGradients :: forall a. (TensorType a, Num a)
=> FGL.Context NodeDef EdgeLabel
-> [Maybe (Tensor Value a)]
-- ^ Gradient of each input tensor.
-> GradientsState a
-> GradientsState a
updatePendingGradients (inputEdges, _, nodeDef, _) inputGrads initState =
foldl' go initState inputEdges
where
go :: GradientsState a -> (EdgeLabel, FGL.Node) -> GradientsState a
go state ((outIndex, OutputIx inIndex), node) =
case maybeGradient of
Nothing -> state
Just g ->
-- Add to the list of pending gradients for this tensor.
state & gradientsPending
. at node
. nonEmpty
. outputIxAt outIndex
. nonEmpty
%~ (g:)
where
badSizeErr = error $ printf "updatePendingGradients: bad input index \
\%d for inputGrads of length %d in %s"
inIndex (length inputGrads)
(show (nodeDef ^. name))
maybeGradient = fromMaybe badSizeErr (safeIndex inputGrads inIndex)
-- | Create a graph that includes a node and its transitive dependencies.
createGraph :: NodeName -> (NodeName -> NodeDef)
-> (Graph, Map NodeName FGL.Node)
createGraph nodeName nodeDefLookup = (FGL.nmap nodeDefLookup graph, nodeMap)
where
Parse a tensor name .
parseTensorName :: Text -> Maybe (NodeName, OutputIx)
parseTensorName n
| Text.null n = error "parseTensorName: empty name"
| Text.head n == '^' = Nothing -- Control edge
| otherwise =
let (nm, indexStr) = Text.breakOn ":" n
index | Text.null indexStr = 0
| otherwise = read $ Text.unpack $ Text.tail indexStr
in Just (NodeName nm, OutputIx index)
-- Build a map from node name to outward edges.
--
-- The state is the set of visited nodes.
collect :: Maybe (NodeName, OutputIx, OutputIx)
-> NodeName
-> State (Set NodeName)
(Map NodeName [(NodeName, OutputIx, OutputIx)])
collect outgoingEdge nm = do
let nextLookup = Map.singleton nm (maybeToList outgoingEdge)
seen <- gets (Set.member nm)
modify (Set.insert nm)
if seen
then pure nextLookup
else do
let inputs = nodeDefLookup nm ^. input
recurse inIndex (parentName, outIndex) =
collect (Just (nm, outIndex, inIndex)) parentName
subEdgeLookups <-
zipWithM recurse [0..] $ mapMaybe parseTensorName inputs
pure $ Map.unionsWith (++) (nextLookup:subEdgeLookups)
edgeLookup = evalState (collect Nothing nodeName) Set.empty
-- Associate an ID with each node name.
nodeMap = Map.fromList $ zip (Map.keys edgeLookup) [0..]
-- Create the graph.
graph = FGL.mkGraph (swap <$> Map.toList nodeMap)
[ (nodeMap Map.! n, nodeMap Map.! m, (i, j))
| (n, edges) <- Map.toList edgeLookup
, (m, i, j) <- edges
]
-- | Function to compute the gradient of y w.r.t. each input.
--
-- Let y be an arbitrary tensor
and [ , ... , w_n ] be the output tensors of a node
-- and [v_0, ..., v_n] be the input tensors of the same node.
--
Given [ ∂y/∂w_0 , ... , ∂y/∂w_n ] and [ v_0 , ... , v_n ] , a GradientFunc computes
-- [∂y/∂v_0, ..., ∂y/∂v_n] for a particular op type.
--
A Nothing gradient is equivalent to zero ( but allows for short circuiting
-- computation when all the gradients for something are Nothing).
type GradientFunc a = NodeDef
-> [Output]
-- ^ Input tensors.
-> [Tensor Value a]
-- ^ Gradient of y w.r.t. each output tensor.
-> [Maybe (Tensor Build a)]
-- ^ Gradient of y w.r.t. each input tensor.
-- TODO(fmayle): Assert the type is correct.
-- | Create a Tensor from an Output.
toT :: Output -> Tensor Build a
toT = Tensor . pure
-- | Wrapper around `TensorFlow.GenOps.Core.slice` that builds vectors from scalars for
-- simple slicing operations.
flatSlice :: forall v1 t . TensorType t
=> Tensor v1 t -- ^ __input__
^ _ _ begin _ _ : specifies the offset into the first dimension of
-- 'input' to slice from.
^ _ _ size _ _ : specifies the number of elements of the first dimension
-- of 'input' to slice. If size is -1, all remaining elements in the dimension
-- are included in the slice (i.e. this is equivalent to setting
-- size = input.dim_size(0) - begin).
-> Tensor Build t -- ^ __output__
flatSlice t begin size = CoreOps.slice t (vector [begin]) (vector [size])
nodeDefName :: NodeDef -> NodeName
nodeDefName = NodeName . view name
-- | Gradient helper for binary component wise operations
-- See #L329
gradForBinaryCwise :: ( OneOf '[ Int32, Int64, Float, Double, Complex Float, Complex Double ] t
)
=> (Tensor v1 t, Tensor v1 t)
-> (Tensor v1 t, Tensor v1 t)
-> [ Maybe (Tensor Build t) ]
gradForBinaryCwise (x, gx) (y, gy) =
[ Just dx
, Just dy ]
where
dx = reshape (sum gx rx) sx
dy = reshape (sum gy ry) sy
sx = shape x
sy = shape y
(rx, ry) = broadcastGradientArgs sx sy
-- | The gradient function for an op type.
--
-- These implementations should match their python counterparts in:
-- third_party/tensorflow/python/ops/*_grad.py
opGrad :: forall a . GradientCompatible a => Text -> GradientFunc a
opGrad "Abs" _ [toT -> x] [dz] = [Just $ expr dz * signum x]
opGrad "Neg" _ [_] [dz] = [Just $ negate $ expr dz]
opGrad "Relu" _ [toT -> x] [dz] = [Just $ reluGrad dz x]
opGrad "ReluGrad" _ [_, toT -> x ] [dz] = [Just $ reluGrad dz x, Just $ CoreOps.zerosLike x]
opGrad "Tanh" _ [toT -> x] [dz] = [Just $ tanhGrad (tanh x) dz]
opGrad "Sigmoid" _ [toT -> x] [dz] = [Just $ sigmoidGrad (sigmoid x) dz]
opGrad "Concat" _ _ix [dy]
Concat concatenates input tensors
-- x1 of shape s1 = [k1, ..., ki_1, ..., kn]
x2 of shape s2 = [ k1 , ... , ki_2 , ... , kn ]
-- . . . . .
-- . . . . .
-- . . . . .
-- xm of shape sm = [k1, ..., ki_m, ..., kn]
-- along dimension i to an output tensor
-- y of shape sy = [k1, ..., k, ..., kn]
where k = sum ki = sum [ ... ,ki_m ]
--
-- The incoming gradient dy from backpropagation is
-- simply forwarded split across input tensors yielding dx.
-- Forwarded gradients have shapes s = [s1, ..., sm].
| m == 1 = Nothing : [Just $ expr dy]
| otherwise = Nothing : map Just (dx `reshapeZip` s)
where
reshapeZip = zipWith reshape
dx = CoreOps.splitV (fromIntegral m) dy ki _i
s :: [Tensor Build Int32]
s = map shape x
x :: [Tensor Build a]
x = map toT $ tail _ix
-- i: concat dimension. Adjusted modulo n to handle negative indices.
_i = toT (head _ix) `CoreOps.floorMod` n
i = reshape _i $ vector [1 :: Int32]
-- sizes along concatenated dimension
ki :: Tensor Build Int32
ki = CoreOps.concat 0 $ map (\t -> CoreOps.slice t i $ vector [1 :: Int32]) s
m = length x
n = CoreOps.rank (head x)
opGrad "Square" _ [toT -> x] [dz] =
-- TODO(fmayle): Handle complex numbers.
TODO(fmayle ): The python code makes dz a control dependency of the 2*x
-- (for performance reasons?). Will need to put these functions in the Build
-- monad to replicate that.
[Just $ dz `CoreOps.mul` (2 * x)]
opGrad "Gather" _ [toT -> x, toT -> indices] [dz] =
-- TODO(fmayle): The python version uses a better performance implementation
-- when the shape is known without having to run the graph.
-- TODO(fmayle): We shouldn't convert the result to a dense tensor. Sparse
-- tensor support will require some thinking.
[ Just $ CoreOps.unsortedSegmentSum values indices' numRows
, Nothing
]
where
-- TODO(gnezdo): Use colocateWith but it requires Build monad.
denseShape = shape (x :: Tensor Build a)
numRows = scalarize $ flatSlice denseShape 0 1
valuesShape = CoreOps.concat 0 [ allDimensions
, flatSlice denseShape 1 (-1)
]
values = reshape dz valuesShape
TODO(fmayle ): This could be either Int32 or Int64 .
indices' = reshape indices allDimensions :: Tensor Build Int32
opGrad "Max" _ [toT -> x, toT -> indices] [dz] =
[Just $ indicators `CoreOps.div` numSelected * dz', Nothing]
where
sx = shape (x :: Tensor Build a)
outputShapeKeptDims = reducedShape sx (indices :: Tensor Build Int32)
y = CoreOps.max x indices
y' = reshape y outputShapeKeptDims
dz' = reshape dz outputShapeKeptDims
indicators = CoreOps.cast $ CoreOps.equal y' x
numSelected = reshape (sum indicators indices) outputShapeKeptDims
Min and have identical gradient implementations .
opGrad "Min" u v w = opGrad "Max" u v w
-- Element wise maximum gradient
-- See #L473
opGrad "Maximum" _ [toT -> x, toT -> y] [dz] =
gradForBinaryCwise (x, gx) (y, gy)
where
xmask = CoreOps.greaterEqual x y
gx = CoreOps.select xmask dz (CoreOps.zerosLike dz)
gy = CoreOps.select (CoreOps.logicalNot xmask) dz (CoreOps.zerosLike dz)
opGrad "Sum" _ [toT -> x, toT -> indices] [dz] =
[ Just $ CoreOps.tile grad tileScaling, Nothing ]
where
TODO(gnezdo ): Implement the fast - path from math_grad._SumGrad .
sx = shape (x :: Tensor Build a)
outputShapeKeptDims = reducedShape sx (indices :: Tensor Build Int32)
tileScaling = safeShapeDiv sx outputShapeKeptDims
grad = reshape dz outputShapeKeptDims
opGrad "Mean" u v@[toT -> x, _] w =
[Just $ dz `CoreOps.div` (CoreOps.stopGradient $ CoreOps.cast $ factor), Nothing]
where
[Just dz, Nothing] = opGrad "Sum" u v w
inputShape = shape (x :: Tensor Build a)
outputShape = shape (dz :: Tensor Build a)
-- TODO(fmayle): Add fast path when shape is known.
inputSize = CoreOps.prod inputShape $ rangeOfRank inputShape
outputSize = CoreOps.prod outputShape $ rangeOfRank outputShape
factor = safeShapeDiv inputSize outputSize
opGrad "Add" _ [toT -> x, toT -> y] [dz] =
[ Just $ reshape (sum dz rx) sx
, Just $ reshape (sum dz ry) sy ]
where
sx = shape (x :: Tensor Build a)
sy = shape (y :: Tensor Build a)
(rx, ry) = broadcastGradientArgs sx sy
-- Copies the gradients to all inputs
-- Not broadcasting
opGrad "AddN" _ inputs [dz] =
map ((const . Just . expr) dz) inputs
opGrad "Sub" u v w =
[Just x, Just (-y)]
where
[Just x, Just y] = opGrad "Add" u v w
opGrad "SoftmaxCrossEntropyWithLogits" _ [toT -> x, toT -> y] [dz, _] =
[ Just $ expandDims dz (-1) * snd (softmaxCrossEntropyWithLogits x y)
, Nothing ]
opGrad "Mul" _ [toT -> x, toT -> y] [dz] =
-- TODO(fmayle): Handle complex numbers.
[ Just $ reshape (sum (dz `CoreOps.mul` y) rx) sx
, Just $ reshape (sum (x `CoreOps.mul` dz) ry) sy ]
where
sx = shape (x :: Tensor Build a)
sy = shape (y :: Tensor Build a)
(rx, ry) = broadcastGradientArgs sx sy
opGrad "Div" _ [toT -> x, toT -> y] [dz] =
-- TODO(fmayle): Handle complex numbers.
-- TODO(gnezdo): Provide Fractional instance and use '/' instead of div.
[ Just $ reshape (sum (dz `CoreOps.div` y) rx) sx
, Just $ reshape (sum (dz `CoreOps.mul` (negate x `CoreOps.div` (y * y)))
ry)
sy
]
where
sx = shape (x :: Tensor Build a)
sy = shape (y :: Tensor Build a)
(rx, ry) = broadcastGradientArgs sx sy
opGrad "MatMul" nodeDef [toT -> x, toT -> y] [dz] =
let transposeA = lookupAttr nodeDef "transpose_a"
transposeB = lookupAttr nodeDef "transpose_b"
transAttrs a b =
(opAttr "transpose_a" .~ a) . (opAttr "transpose_b" .~ b)
in case (transposeA, transposeB) of
(False, False) ->
[ Just $ matMul' (transAttrs False True) dz y
, Just $ matMul' (transAttrs True False) x dz]
(False, True) ->
[ Just $ matMul dz y
, Just $ matMul' (transAttrs True False) dz x]
(True, False) ->
[ Just $ matMul' (transAttrs False True) y dz
, Just $ matMul x dz]
(True, True) ->
[ Just $ matMul' (transAttrs True True) y dz
, Just $ matMul' (transAttrs True True) dz x]
opGrad "BatchMatMul" nodeDef [toT -> x, toT -> y] [dz] =
let adjX = lookupAttr nodeDef "adj_x"
adjY = lookupAttr nodeDef "adj_y"
adjAttrs a b =
(opAttr "adj_x" .~ a) . (opAttr "adj_y" .~ b)
in case (adjX, adjY) of
(False, False) ->
[ Just $ CoreOps.batchMatMul' (adjAttrs False True) dz y
, Just $ CoreOps.batchMatMul' (adjAttrs True False) x dz]
(False, True) ->
[ Just $ CoreOps.batchMatMul dz y
, Just $ CoreOps.batchMatMul' (adjAttrs True False) dz x]
(True, False) ->
[ Just $ CoreOps.batchMatMul' (adjAttrs False True) y dz
, Just $ CoreOps.batchMatMul x dz]
(True, True) ->
[ Just $ CoreOps.batchMatMul' (adjAttrs True True) y dz
, Just $ CoreOps.batchMatMul' (adjAttrs True True) dz x]
opGrad "Transpose" _ [_, toT -> p] [dz] =
[ Just $ CoreOps.transpose dz
(CoreOps.invertPermutation p :: Tensor Build Int32)
, Nothing
]
opGrad "Conv2D" nodeDef [toT -> x, toT -> y] [dz] =
[ Just $ CoreOps.conv2DBackpropInput'
((opAttr "strides" .~ strides)
. (opAttr "use_cudnn_on_gpu" .~ useCudnnOnGpu)
. (opAttr "data_format" .~ dataFormat))
padding (shape x) y dz
, Just $ CoreOps.conv2DBackpropFilter'
((opAttr "strides" .~ strides)
. (opAttr "use_cudnn_on_gpu" .~ useCudnnOnGpu)
. (opAttr "data_format" .~ dataFormat))
padding x (shape y) dz
]
where
strides = lookupAttr nodeDef "strides" :: [Int64]
padding = lookupAttr nodeDef "padding" :: ByteString
useCudnnOnGpu = lookupAttr nodeDef "use_cudnn_on_gpu" :: Bool
dataFormat = lookupAttr nodeDef "data_format" :: ByteString
opGrad "Conv2DBackpropInput" nodeDef [_, toT -> x, toT -> y] [dz] =
[ Nothing
, Just $ CoreOps.conv2DBackpropFilter'
((opAttr "strides" .~ strides)
. (opAttr "use_cudnn_on_gpu" .~ useCudnnOnGpu)
. (opAttr "data_format" .~ dataFormat))
padding dz (shape x) y
, Just $ CoreOps.conv2D'
((opAttr "strides" .~ strides)
. (opAttr "use_cudnn_on_gpu" .~ useCudnnOnGpu)
. (opAttr "data_format" .~ dataFormat))
padding dz x
]
where
strides = lookupAttr nodeDef "strides" :: [Int64]
padding = lookupAttr nodeDef "padding" :: ByteString
useCudnnOnGpu = lookupAttr nodeDef "use_cudnn_on_gpu" :: Bool
dataFormat = lookupAttr nodeDef "data_format" :: ByteString
opGrad "DepthwiseConv2dNative" nodeDef [toT -> x, toT -> y] [dz] =
[ Just $ CoreOps.depthwiseConv2dNativeBackpropInput'
((opAttr "strides" .~ strides)
. (opAttr "data_format" .~ dataFormat))
padding (shape x) y dz
, Just $ CoreOps.depthwiseConv2dNativeBackpropFilter'
((opAttr "strides" .~ strides)
. (opAttr "data_format" .~ dataFormat))
padding x (shape y) dz
]
where
strides = lookupAttr nodeDef "strides" :: [Int64]
padding = lookupAttr nodeDef "padding" :: ByteString
dataFormat = lookupAttr nodeDef "data_format" :: ByteString
opGrad "DepthwiseConv2dNativeBackpropInput" nodeDef [_, toT -> x, toT -> y] [dz] =
[ Nothing
, Just $ CoreOps.depthwiseConv2dNativeBackpropFilter'
((opAttr "strides" .~ strides)
. (opAttr "data_format" .~ dataFormat))
padding dz (shape x) y
, Just $ CoreOps.depthwiseConv2dNative'
((opAttr "strides" .~ strides)
. (opAttr "data_format" .~ dataFormat))
padding dz x
]
where
strides = lookupAttr nodeDef "strides" :: [Int64]
padding = lookupAttr nodeDef "padding" :: ByteString
dataFormat = lookupAttr nodeDef "data_format" :: ByteString
opGrad "MaxPool" nodeDef [toT -> x] [dz] =
[ Just $ CoreOps.maxPoolGrad'
((opAttr "ksize" .~ ksize)
. (opAttr "strides" .~ strides)
. (opAttr "data_format" .~ dataFormat))
padding x output dz
]
where
output :: Tensor Build a
output = toT $ Output 0 (nodeDefName nodeDef)
ksize = lookupAttr nodeDef "ksize" :: [Int64]
strides = lookupAttr nodeDef "strides" :: [Int64]
padding = lookupAttr nodeDef "padding" :: ByteString
dataFormat = lookupAttr nodeDef "data_format" :: ByteString
opGrad "Reshape" _ [toT -> x, _] [dz] = [Just $ reshape dz $ shape (x :: Tensor Build a), Nothing]
opGrad "ExpandDims" n xs@[toT -> _, _] dzs@[_] = opGrad "Reshape" n xs dzs
opGrad "Squeeze" _ [toT -> x] [dz] = [Just $ reshape dz $ shape (x :: Tensor Build a)]
opGrad "Pad" _ [toT -> x, toT -> padPattern] [dz] =
[Just $ CoreOps.slice dz gradientSliceBegin gradientSliceSize, Nothing]
where
v1 = vector [1]
-- For some reason rankx' has an empty shape
rankx' = CoreOps.rank (x :: Tensor Build Float)
rankx = CoreOps.reshape rankx' v1
-- Size of column that is sliced from pad pattern
padPatternSliceSize = CoreOps.concat 0 [rankx, v1]
padPatternSliceBegin = vector [0, 0]
padPatternSliced :: Tensor Build Int32 = CoreOps.slice padPattern padPatternSliceBegin padPatternSliceSize
-- The slice of the pad pattern has the same rank as the pad pattern itself
gradientSliceBegin = CoreOps.reshape padPatternSliced rankx
gradientSliceSize = shape (x :: Tensor Build Float)
-- Gradient for Slice
Create an Nx2 padding where N is the rank of ( grad of ) Slice and the first
column represents how many zeros are to be prepended for each dimension , and the second
column indicates how many zeros are appended .
The number of zeros to prepend is the shape of the beginvec .
The number of zeros to append is the shape of the inputvec
elementwise - subtracted by both the beginvec and .
-- Some more reshaping is needed to assemble this tensor with the
-- right dimensions.
opGrad "Slice" _ [toT -> inputvec, toT -> beginvec, _] [dz] =
[Just $ CoreOps.pad dz paddings, Nothing, Nothing]
where
v1 = vector [1 :: Int32]
inputRank' = CoreOps.rank (inputvec :: Tensor Build Float)
-- For some reason inputRank' has an empty shape
inputRank = CoreOps.reshape inputRank' v1
padShape = CoreOps.concat 0 [inputRank, v1]
beforePad = CoreOps.reshape beginvec padShape
afterPad = CoreOps.reshape (shape inputvec - shape dz - beginvec) padShape
paddings = CoreOps.concat 1 [beforePad, afterPad]
TODO : This could be either Int32 or Int64 .
opGrad "BatchToSpaceND" _ [_, toT @Int32 -> blockShape, toT @Int32 -> crops] [dz] =
[Just $ CoreOps.spaceToBatchND dz blockShape crops, Nothing, Nothing]
TODO : This could be either Int32 or Int64 .
opGrad "SpaceToBatchND" _ [_, toT @Int32 -> blockShape, toT @Int32 -> paddings] [dz] =
[Just $ CoreOps.batchToSpaceND dz blockShape paddings, Nothing, Nothing]
opGrad "OneHot" _ _ _ = [Nothing, Nothing, Nothing, Nothing]
opGrad "TruncatedNormal" _ _ _ = [Nothing]
opGrad "RefIdentity" _ _ [dz] = [Just $ expr dz]
opGrad "Cast" nodeDef _ [dz] = [Just reverseCast]
where
-- TODO(gnezdo): too permissive, python only allows float types as src_type.
reverseCast =
pureOp [] $ pure (opDef "Cast"
& opAttr "DstT" .~ (lookupAttr nodeDef "SrcT" :: ByteString)
& opAttr "SrcT" .~ (lookupAttr nodeDef "DstT" :: ByteString)
& opInputs .~ [renderedOutput dz])
opGrad "DynamicStitch" nodeDef inputs [dz] =
replicate halfLen Nothing ++ valuesGrads
where
halfLen =
let len = length inputs
half = len `div` 2
in if 2 * half == len
then half
else error ("Uneven input size " ++ show (len, showMessage nodeDef))
valuesGrads = [ Just $ CoreOps.gather dz (toT idx :: Tensor Build Int32)
| idx <- take halfLen inputs
]
opGrad "DynamicPartition" nodeDef [toT -> xs, toT -> indices] dz =
[ Just reconstructed, Nothing ]
where
reconstructed = CoreOps.reshape stitched
(CoreOps.shape (xs :: Tensor Build a) :: Tensor Build Int32)
stitched = CoreOps.dynamicStitch partitionedIndices dz
partitionedIndices = CoreOps.dynamicPartition np originalIndices indices
np = lookupAttr nodeDef "num_partitions" :: Int64
originalIndices =
CoreOps.reshape (CoreOps.range 0 (CoreOps.size indices) 1) prefixShape
prefixShape = shapeInt32 indices
shapeInt32 t = CoreOps.shape t :: Tensor Build Int32
opGrad "Select" _ [toT -> c, toT -> x, _] [dz] =
[ Nothing
, Just $ CoreOps.select c dz zeros
, Just $ CoreOps.select c zeros dz
]
where zeros = CoreOps.zerosLike x
TODO(gnezdo ): Unlike Python , no control dependency on dz .
opGrad "Log" _ [toT -> x] [dz] = [ Just $ dz `CoreOps.mul` CoreOps.inv x ]
TODO(gnezdo ): Reuse the output instead of doing another exp ,
though , it is probably CSE'd away anyway .
opGrad "Exp" _ [toT -> x] [dz] = [ Just $ dz `CoreOps.mul` CoreOps.exp x ]
opGrad "SparseSegmentSum" _ [toT -> x, toT -> y, toT -> t] [dz] =
[ Just $ CoreOps.unsortedSegmentSum
(CoreOps.gather dz (t :: Tensor Build Int32))
(y :: Tensor Build Int32) inputRows
, Nothing
, Nothing
]
where inputRows = flatSlice (shape (x :: Tensor Build a)) 0 1
opGrad "LabelClasses" _ _ _ = [Nothing, Nothing]
opGrad "LabelWeights" _ _ _ = [Nothing]
opGrad "Size" _ _ _ = [Nothing]
TODO ( ): Python implementation uses set_shape for
-- static shape inference, which is unsupported.
-- TODO: implement support for static shape inference
opGrad "Tile" _ [toT -> x, toT -> multiples] [dz] =
[Just inputGrad, Nothing]
where
inputGrad = sum reshapedDz axes
inputShape = shape (x :: Tensor Build a)
packed = CoreOps.pack [multiples, inputShape]
perm = vector [1, 0 :: Int32]
splitShape = CoreOps.reshape (CoreOps.transpose packed perm) allDimensions
axes = CoreOps.range 0 (CoreOps.size splitShape) (2 :: Tensor Build Int32)
reshapedDz = CoreOps.reshape dz splitShape
opGrad "ResizeBilinear" nodeDef [toT -> x, _] [dz] =
[ Just $ CoreOps.resizeBilinearGrad'
(opAttr "align_corners" .~ align)
(CoreOps.cast dz)
x
, Nothing
]
where
align = lookupAttr nodeDef "align_corners" :: Bool
opGrad "ZerosLike" _ _ _ = [Nothing]
opGrad "Fill" _ _ [dz] = [Nothing, Just $ sum dz rx]
where
rx = rangeOfRank dz
-- Treat read ops as an identity function on the variable. This allows us to
-- take gradients w.r.t. to the variable handle instead of the result of a read
-- op. If a variable is read multiple times, the gradients will propagate back
-- through each read.
opGrad "ReadVariableOp" _ _ [dz] = [Just $ expr dz]
opGrad "Const" _ _ _ = [Nothing, Nothing]
opGrad "StopGradient" _ _ _ = [Nothing]
opGrad "VarHandleOp" _ _ _ = []
opGrad "Sqrt" _ [toT -> x] [dz] = [Just $ sq' `CoreOps.mul` dz]
where
sq' = scalar 1 `CoreOps.div` (scalar 2 `CoreOps.mul` CoreOps.sqrt x)
opGrad n nodeDef ins grads =
error $ "no gradient implemented for " ++
show (n, length ins, length grads, showMessage nodeDef, ins)
-- | The number of outputs for an op type.
numOutputs :: NodeDef -> OutputIx
numOutputs o =
case o ^. op of
"Abs" -> 1
"Add" -> 1
"AddN" -> 1
"BatchToSpaceND" -> 1
"BatchMatMul" -> 1
"Cast" -> 1
"Const" -> 1
"Concat" -> 1
"Conv2D" -> 1
"Conv2DBackpropInput" -> 1
"DepthwiseConv2dNative" -> 1
"DepthwiseConv2dNativeBackpropInput" -> 1
"Div" -> 1
"DynamicStitch" -> 1
"DynamicPartition" ->
fromIntegral (lookupAttr o "num_partitions" :: Int64)
"Exp" -> 1
"ExpandDims" -> 1
"Gather" -> 1
"LabelClasses" -> 1
"LabelWeights" -> 1
"Log" -> 1
"MatMul" -> 1
"Max" -> 1
"Maximum" -> 1
"MaxPool" -> 1
"Mean" -> 1
"Min" -> 1
"Mul" -> 1
"Neg" -> 1
"Pad" -> 1
"Placeholder" -> 1
"StopGradient" -> 1
"OneHot" -> 1
"ReadVariableOp" -> 1
"RefIdentity" -> 1
"Relu" -> 1
"ReluGrad" -> 1
"Reshape" -> 1
"Select" -> 1
"Sigmoid" -> 1
"Size" -> 1
"Slice" -> 1
"SoftmaxCrossEntropyWithLogits" -> 2
"SpaceToBatchND" -> 1
"SparseSegmentSum" -> 1
"Square" -> 1
"Squeeze" -> 1
"Sqrt" -> 1
"Sub" -> 1
"Sum" -> 1
"Tanh" -> 1
"Tile" -> 1
"ResizeBilinear" -> 1
"Transpose" -> 1
"TruncatedNormal" -> 1
"VarHandleOp" -> 1
"Variable" -> 1
"ZerosLike" -> 1
"Fill" -> 1
_ -> error $ "numOutputs not implemented for " ++ show (o ^. op)
-- Divides `x / y` assuming `x, y >= 0`, treating `0 / 0 = 0`
safeShapeDiv :: Tensor v1 Int32 -> Tensor v2 Int32 -> Tensor Build Int32
safeShapeDiv x y = x `CoreOps.div` (CoreOps.maximum y 1)
allDimensions :: Tensor Build Int32
allDimensions = vector [-1 :: Int32]
rangeOfRank :: forall v1 t. TensorType t => Tensor v1 t -> Tensor Build Int32
rangeOfRank x = CoreOps.range 0 (CoreOps.rank x) 1
lookupAttr :: Attribute a1 => NodeDef -> Text -> a1
lookupAttr nodeDef attrName = nodeDef ^. attr . at attrName . non def . attrLens
| null | https://raw.githubusercontent.com/tensorflow/haskell/bfd8de5582891137a50f23e160c1e65eb7d4b931/tensorflow-ops/src/TensorFlow/Gradient.hs | haskell |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
# LANGUAGE ConstraintKinds #
# LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
TODO(fmayle): MaxPoolGrad doesn't support Double for some reason.
TODO(fmayle): Support control flow.
TODO(fmayle): Support gate_gradients-like option to avoid race conditions.
TODO(fmayle): Do we need to consider control inputs? See _PendingCount in
tensorflow/python/ops/gradients.py.
^ The output of the graph.
^ Tensors for which gradients are computed.
The gradients are computed using "reverse accumulation", similarly to
what is described here:
The code is summarised as follows:
gradients to nothing.
after all of its outputs so that the output gradients for a node have
been completely calculated before it is processed):
for each output tensor w).
b. Calculate the gradient of y w.r.t. each of the node's input
tensors using the gradients of the node's output tensors.
Written differently, for each output tensor w and input tensor v:
∂w/∂v = ... (op specific)
∂y/∂v = ∂y/∂w * ∂w/∂v (technically, if tensor v is an input
to multiple nodes, then this is only
part of ∂y/∂v)
The xnodes are not part of this set (unless reachable from another xnode)
Set gradient of y to one.
TODO: nicer
Calculate the gradients of y w.r.t. each node in the graph.
Lookup the gradients for each x.
| Compute a set of nodes reachable from the start nodes
the start nodes are excluded, unless reachable from another start node
NOTE: point-free notation leads to unification problems here
| Incomplete gradients of a node's outputs.
TODO: precache the rendering?
| Data associated with an edge.
Pair of
1. Output index of a tensor from the source node.
| State used for calculating gradients.
TODO(fmayle): Use something like Data.List.Safe.
| Safe version of (!!).
Copy of -3.9.0.2/docs/Control-Lens-Iso.html#v%3anon
TODO: strictness (e.g., foldlM')
| Calculate the gradients for every node in a graph.
Reverse topological sort.
Aggregate the accumulated gradients for this node.
Only consider nodes that are reachable from the inputs to
avoid calculating gradients that won't be used.
Calculate the gradients for each of the node's inputs.
| Calculate the gradients of a node's input tensors.
This is mostly just a wrapper around opGrad.
^ Output gradients of the node.
Create a tensor from an edge (technically an Output, but it seems less
confusing to refer to it as a tensor here).
Input tensors, sorted by input index.
^ Number of outputs.
| Update the pending gradients of a node's inputs.
^ Gradient of each input tensor.
Add to the list of pending gradients for this tensor.
| Create a graph that includes a node and its transitive dependencies.
Control edge
Build a map from node name to outward edges.
The state is the set of visited nodes.
Associate an ID with each node name.
Create the graph.
| Function to compute the gradient of y w.r.t. each input.
Let y be an arbitrary tensor
and [v_0, ..., v_n] be the input tensors of the same node.
[∂y/∂v_0, ..., ∂y/∂v_n] for a particular op type.
computation when all the gradients for something are Nothing).
^ Input tensors.
^ Gradient of y w.r.t. each output tensor.
^ Gradient of y w.r.t. each input tensor.
TODO(fmayle): Assert the type is correct.
| Create a Tensor from an Output.
| Wrapper around `TensorFlow.GenOps.Core.slice` that builds vectors from scalars for
simple slicing operations.
^ __input__
'input' to slice from.
of 'input' to slice. If size is -1, all remaining elements in the dimension
are included in the slice (i.e. this is equivalent to setting
size = input.dim_size(0) - begin).
^ __output__
| Gradient helper for binary component wise operations
See #L329
| The gradient function for an op type.
These implementations should match their python counterparts in:
third_party/tensorflow/python/ops/*_grad.py
x1 of shape s1 = [k1, ..., ki_1, ..., kn]
. . . . .
. . . . .
. . . . .
xm of shape sm = [k1, ..., ki_m, ..., kn]
along dimension i to an output tensor
y of shape sy = [k1, ..., k, ..., kn]
The incoming gradient dy from backpropagation is
simply forwarded split across input tensors yielding dx.
Forwarded gradients have shapes s = [s1, ..., sm].
i: concat dimension. Adjusted modulo n to handle negative indices.
sizes along concatenated dimension
TODO(fmayle): Handle complex numbers.
(for performance reasons?). Will need to put these functions in the Build
monad to replicate that.
TODO(fmayle): The python version uses a better performance implementation
when the shape is known without having to run the graph.
TODO(fmayle): We shouldn't convert the result to a dense tensor. Sparse
tensor support will require some thinking.
TODO(gnezdo): Use colocateWith but it requires Build monad.
Element wise maximum gradient
See #L473
TODO(fmayle): Add fast path when shape is known.
Copies the gradients to all inputs
Not broadcasting
TODO(fmayle): Handle complex numbers.
TODO(fmayle): Handle complex numbers.
TODO(gnezdo): Provide Fractional instance and use '/' instead of div.
For some reason rankx' has an empty shape
Size of column that is sliced from pad pattern
The slice of the pad pattern has the same rank as the pad pattern itself
Gradient for Slice
Some more reshaping is needed to assemble this tensor with the
right dimensions.
For some reason inputRank' has an empty shape
TODO(gnezdo): too permissive, python only allows float types as src_type.
static shape inference, which is unsupported.
TODO: implement support for static shape inference
Treat read ops as an identity function on the variable. This allows us to
take gradients w.r.t. to the variable handle instead of the result of a read
op. If a variable is read multiple times, the gradients will propagate back
through each read.
| The number of outputs for an op type.
Divides `x / y` assuming `x, y >= 0`, treating `0 / 0 = 0` | Copyright 2016 TensorFlow authors .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
# LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE ViewPatterns #
# LANGUAGE TypeApplications #
module TensorFlow.Gradient
( GradientCompatible
, gradients
) where
import Control.Monad (forM, zipWithM)
import Control.Monad.State.Strict (State, evalState, gets, modify)
import Data.ByteString (ByteString)
import Data.Complex (Complex)
import Data.ProtoLens.Default(def)
import Data.Int (Int32, Int64)
import Data.Foldable (foldlM)
import Data.List (foldl', sortBy)
import Data.Map.Strict (Map)
import qualified Data.IntSet as IntSet
import Data.Maybe (fromMaybe, maybeToList, mapMaybe)
import Data.Ord (comparing)
import Data.ProtoLens.TextFormat (showMessage)
import Data.Set (Set)
import Data.Text (Text)
import Data.Tuple (swap)
import Lens.Family2 (Lens', view, (&), (^.), (.~), (%~), under)
import Lens.Family2.State.Strict (uses)
import Lens.Family2.Stock (at, intAt)
import Lens.Family2.Unchecked (lens, adapter)
import Prelude hiding (sum, tanh)
import Text.Printf (printf)
import qualified Data.Graph.Inductive.Basic as FGL
import qualified Data.Graph.Inductive.Graph as FGL
import qualified Data.Graph.Inductive.PatriciaTree as FGL
import qualified Data.Graph.Inductive.Query.DFS as FGL
import qualified Data.IntMap.Strict as IntMap
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import qualified Data.Text as Text
import qualified TensorFlow.GenOps.Core as CoreOps
import TensorFlow.Build
( MonadBuild
, Build
, build
, renderedNodeDefs
, opDef
, opAttr
, opInputs
)
import TensorFlow.BuildOp
import TensorFlow.Ops
( addN
, broadcastGradientArgs
, expandDims
, fill
, matMul
, matMul'
, reducedShape
, reluGrad
, tanh
, tanhGrad
, reshape
, scalar
, shape
, softmaxCrossEntropyWithLogits
, sum
, sigmoid
, sigmoidGrad
, scalarize
, vector
, zerosLike
)
import TensorFlow.Output
( NodeName(..)
, Output(..)
, OutputIx(..)
, outputIndex
)
import TensorFlow.Tensor
( Tensor(..)
, Value
, render
, expr
, Rendered
, tensorNodeName
, renderedOutput
, renderValue
, ToTensor(..)
)
import TensorFlow.Types (Attribute, OneOf, TensorType, attrLens)
import Proto.Tensorflow.Core.Framework.NodeDef (NodeDef)
import Proto.Tensorflow.Core.Framework.NodeDef_Fields
( attr, input, op, name)
type GradientCompatible a =
(Num a, OneOf '[ Float, Complex Float, Complex Double ] a)
TODO(fmayle ): Maybe store the gradient functions and numOutputs on the OpDef .
| Gradient of @y@ w.r.t . each element of @xs@.
gradients :: forall a v1 t m . ( MonadBuild m
, Rendered t
, ToTensor t
, GradientCompatible a
)
-> m [Tensor Value a]
gradients y xs = build $ do
#The_chain_rule.2C_forward_and_reverse_accumulation
1 . Create an fgl graph of the relevant nodes ( ops ) and edges ( tensors ) .
2 . Initialize the gradient of y to 1 ( ∂y/∂y = 1 ) and the rest of tensor 's
3 . Process the nodes in reverse topological order ( i.e. each node comes
a. Record the gradient for each of the node 's output tensors ( ∂y/∂w
∂y/∂w = ... ( calculated in previous steps )
4 . Lookup the recorded gradient for each x in xs .
y' <- renderValue y
let yName = tensorNodeName y'
yOne <- render $ fill (shape y') (scalar 1)
TODO(fmayle ): Move this into Build.hs and call it unsafeNodeDefFromName ?
nodeDefLookup :: (NodeName -> NodeDef) <- uses renderedNodeDefs $
(\f x -> fromMaybe (error $ "no NodeDef found for " ++ show x) (f x))
. flip Map.lookup
let (gr, nodeMap) = createGraph yName nodeDefLookup
xnodes = mapMaybe (\x -> nodeMap ^. (at $ outputNodeName $ renderedOutput x)) xs
make a set of the nodes reachable from the xnodes
reachableSet = computeReachableSet xnodes gr
let initPending :: Map.Map FGL.Node (PendingGradients a)
= Map.empty & (at (nodeMap Map.! yName)
. nonEmpty
. outputIxAt (outputIndex $ renderedOutput y')
. nonEmpty
.~ [yOne]
)
gradientMap <- graphGrads gr reachableSet initPending
forM xs $ \x ->
let Output i xName = renderedOutput x
in maybe (render $ zerosLike $ toTensor x) return $ do
n <- nodeMap ^. at xName
gradientMap ^. at n . nonEmpty . outputIxAt i
computeReachableSet :: [FGL.Node] -> Graph -> IntSet.IntSet
computeReachableSet vs g =
IntSet.fromList $ concatMap (drop 1 . FGL.preorder) (FGL.dff vs g)
outputIxAt :: OutputIx -> Lens' (IntMap.IntMap v) (Maybe v)
outputIxAt x = intAt (unOutputIx x)
The lists represent partial sums . The key is an OutputIx sans newtype .
type PendingGradients a = IntMap.IntMap [Tensor Value a]
| Gradients of a node 's outputs . The key is an OutputIx sans newtype .
type Gradients a = IntMap.IntMap (Tensor Value a)
| Graph of TensorFlow operations .
type Graph = FGL.Gr NodeDef EdgeLabel
2 . Input index that the tensor connects to on the destination node .
type EdgeLabel = (OutputIx, OutputIx)
data GradientsState a = GradientsState
{ _gradientsPending :: !(Map FGL.Node (PendingGradients a))
, _gradientsResult :: !(Map FGL.Node (Gradients a))
}
gradientsPending :: Lens' (GradientsState a) (Map FGL.Node (PendingGradients a))
gradientsPending = lens _gradientsPending (\x y -> x { _gradientsPending = y })
gradientsResult :: Lens' (GradientsState a) (Map FGL.Node (Gradients a))
gradientsResult = lens _gradientsResult (\x y -> x { _gradientsResult = y })
safeIndex :: [a] -> Int -> Maybe a
_ `safeIndex` n | n < 0 = Nothing
[] `safeIndex` _ = Nothing
(x:_) `safeIndex` 0 = Just x
(_:xs) `safeIndex` n = xs `safeIndex` (n-1)
anon :: a -> (a -> Bool) -> Lens' (Maybe a) a
anon a p = under (adapter (fromMaybe a) go) where
go b | p b = Nothing
| otherwise = Just b
non :: Eq a => a -> Lens' (Maybe a) a
non a = anon a (a==)
| Lens that defaults Nothing to .
nonEmpty :: (Monoid (t v), Foldable t) => Lens' (Maybe (t v)) (t v)
nonEmpty = anon mempty null
graphGrads :: forall a. GradientCompatible a
=> Graph
-> IntSet.IntSet
-> Map FGL.Node (PendingGradients a)
^ Initial gradients ( usually just 1 for the node of interest ) .
-> Build (Map FGL.Node (Gradients a))
graphGrads gr reachableSet initPending = view gradientsResult <$> foldlM go initState nodeOrder
where
initState = GradientsState initPending Map.empty
nodeOrder = FGL.topsort . FGL.grev $ gr
go :: GradientsState a -> Int -> Build (GradientsState a)
go state node = do
outputGrads <-
sumPendingGradient (state ^. gradientsPending . at node . nonEmpty)
if null outputGrads
then pure state
else do
let nextState = state & gradientsResult %~ Map.insert node outputGrads
if node `IntSet.member` reachableSet
then do
let ctx = FGL.context gr node
inputGrads <- calculateInputGrads ctx outputGrads gr
pure $ updatePendingGradients ctx inputGrads nextState
else
pure nextState
| Reduce accumulated gradients for each output to one Tensor .
sumPendingGradient :: GradientCompatible a
=> PendingGradients a -> Build (Gradients a)
sumPendingGradient = sequence . IntMap.mapMaybe f
where
f [] = Nothing
f [x] = Just (pure x)
f xs = Just (render $ addN xs)
calculateInputGrads :: forall a. GradientCompatible a
=> FGL.Context NodeDef EdgeLabel
-> Graph
-> Build [Maybe (Tensor Value a)]
calculateInputGrads (inputEdges, _, nodeDef, _) outputGrads gr = do
fullOutGrads <- fullOutputGrads (numOutputs nodeDef) (nodeDefName nodeDef)
outputGrads
traverse (traverse render) $ opGrad (nodeDef ^. op) nodeDef inputTensors fullOutGrads
where
edgeToTensor :: (EdgeLabel, FGL.Node) -> Output
edgeToTensor ((i, _), n) =
case FGL.lab gr n of
Just edgeNodeDef -> Output i (NodeName $ edgeNodeDef ^. name)
Nothing -> error $ "calculateInputGrads: missing input node for "
++ Text.unpack (nodeDef ^. name)
inputTensors = map edgeToTensor $ sortBy (comparing (snd . fst)) inputEdges
| Convert a Map of gradients to a list , with zeros for missing outputs .
fullOutputGrads :: (TensorType a, Num a)
-> NodeName
-> Gradients a
-> Build [Tensor Value a]
fullOutputGrads n o gs =
mapM (\i -> maybe (render $ zero i) return (gs ^. outputIxAt i)) [0..n-1]
where
A tensor of zeros with the same shape as the i'th output .
zero i = zerosLike $ toT (Output i o)
updatePendingGradients :: forall a. (TensorType a, Num a)
=> FGL.Context NodeDef EdgeLabel
-> [Maybe (Tensor Value a)]
-> GradientsState a
-> GradientsState a
updatePendingGradients (inputEdges, _, nodeDef, _) inputGrads initState =
foldl' go initState inputEdges
where
go :: GradientsState a -> (EdgeLabel, FGL.Node) -> GradientsState a
go state ((outIndex, OutputIx inIndex), node) =
case maybeGradient of
Nothing -> state
Just g ->
state & gradientsPending
. at node
. nonEmpty
. outputIxAt outIndex
. nonEmpty
%~ (g:)
where
badSizeErr = error $ printf "updatePendingGradients: bad input index \
\%d for inputGrads of length %d in %s"
inIndex (length inputGrads)
(show (nodeDef ^. name))
maybeGradient = fromMaybe badSizeErr (safeIndex inputGrads inIndex)
createGraph :: NodeName -> (NodeName -> NodeDef)
-> (Graph, Map NodeName FGL.Node)
createGraph nodeName nodeDefLookup = (FGL.nmap nodeDefLookup graph, nodeMap)
where
Parse a tensor name .
parseTensorName :: Text -> Maybe (NodeName, OutputIx)
parseTensorName n
| Text.null n = error "parseTensorName: empty name"
| otherwise =
let (nm, indexStr) = Text.breakOn ":" n
index | Text.null indexStr = 0
| otherwise = read $ Text.unpack $ Text.tail indexStr
in Just (NodeName nm, OutputIx index)
collect :: Maybe (NodeName, OutputIx, OutputIx)
-> NodeName
-> State (Set NodeName)
(Map NodeName [(NodeName, OutputIx, OutputIx)])
collect outgoingEdge nm = do
let nextLookup = Map.singleton nm (maybeToList outgoingEdge)
seen <- gets (Set.member nm)
modify (Set.insert nm)
if seen
then pure nextLookup
else do
let inputs = nodeDefLookup nm ^. input
recurse inIndex (parentName, outIndex) =
collect (Just (nm, outIndex, inIndex)) parentName
subEdgeLookups <-
zipWithM recurse [0..] $ mapMaybe parseTensorName inputs
pure $ Map.unionsWith (++) (nextLookup:subEdgeLookups)
edgeLookup = evalState (collect Nothing nodeName) Set.empty
nodeMap = Map.fromList $ zip (Map.keys edgeLookup) [0..]
graph = FGL.mkGraph (swap <$> Map.toList nodeMap)
[ (nodeMap Map.! n, nodeMap Map.! m, (i, j))
| (n, edges) <- Map.toList edgeLookup
, (m, i, j) <- edges
]
and [ , ... , w_n ] be the output tensors of a node
Given [ ∂y/∂w_0 , ... , ∂y/∂w_n ] and [ v_0 , ... , v_n ] , a GradientFunc computes
A Nothing gradient is equivalent to zero ( but allows for short circuiting
type GradientFunc a = NodeDef
-> [Output]
-> [Tensor Value a]
-> [Maybe (Tensor Build a)]
toT :: Output -> Tensor Build a
toT = Tensor . pure
flatSlice :: forall v1 t . TensorType t
^ _ _ begin _ _ : specifies the offset into the first dimension of
^ _ _ size _ _ : specifies the number of elements of the first dimension
flatSlice t begin size = CoreOps.slice t (vector [begin]) (vector [size])
nodeDefName :: NodeDef -> NodeName
nodeDefName = NodeName . view name
gradForBinaryCwise :: ( OneOf '[ Int32, Int64, Float, Double, Complex Float, Complex Double ] t
)
=> (Tensor v1 t, Tensor v1 t)
-> (Tensor v1 t, Tensor v1 t)
-> [ Maybe (Tensor Build t) ]
gradForBinaryCwise (x, gx) (y, gy) =
[ Just dx
, Just dy ]
where
dx = reshape (sum gx rx) sx
dy = reshape (sum gy ry) sy
sx = shape x
sy = shape y
(rx, ry) = broadcastGradientArgs sx sy
opGrad :: forall a . GradientCompatible a => Text -> GradientFunc a
opGrad "Abs" _ [toT -> x] [dz] = [Just $ expr dz * signum x]
opGrad "Neg" _ [_] [dz] = [Just $ negate $ expr dz]
opGrad "Relu" _ [toT -> x] [dz] = [Just $ reluGrad dz x]
opGrad "ReluGrad" _ [_, toT -> x ] [dz] = [Just $ reluGrad dz x, Just $ CoreOps.zerosLike x]
opGrad "Tanh" _ [toT -> x] [dz] = [Just $ tanhGrad (tanh x) dz]
opGrad "Sigmoid" _ [toT -> x] [dz] = [Just $ sigmoidGrad (sigmoid x) dz]
opGrad "Concat" _ _ix [dy]
Concat concatenates input tensors
x2 of shape s2 = [ k1 , ... , ki_2 , ... , kn ]
where k = sum ki = sum [ ... ,ki_m ]
| m == 1 = Nothing : [Just $ expr dy]
| otherwise = Nothing : map Just (dx `reshapeZip` s)
where
reshapeZip = zipWith reshape
dx = CoreOps.splitV (fromIntegral m) dy ki _i
s :: [Tensor Build Int32]
s = map shape x
x :: [Tensor Build a]
x = map toT $ tail _ix
_i = toT (head _ix) `CoreOps.floorMod` n
i = reshape _i $ vector [1 :: Int32]
ki :: Tensor Build Int32
ki = CoreOps.concat 0 $ map (\t -> CoreOps.slice t i $ vector [1 :: Int32]) s
m = length x
n = CoreOps.rank (head x)
opGrad "Square" _ [toT -> x] [dz] =
TODO(fmayle ): The python code makes dz a control dependency of the 2*x
[Just $ dz `CoreOps.mul` (2 * x)]
opGrad "Gather" _ [toT -> x, toT -> indices] [dz] =
[ Just $ CoreOps.unsortedSegmentSum values indices' numRows
, Nothing
]
where
denseShape = shape (x :: Tensor Build a)
numRows = scalarize $ flatSlice denseShape 0 1
valuesShape = CoreOps.concat 0 [ allDimensions
, flatSlice denseShape 1 (-1)
]
values = reshape dz valuesShape
TODO(fmayle ): This could be either Int32 or Int64 .
indices' = reshape indices allDimensions :: Tensor Build Int32
opGrad "Max" _ [toT -> x, toT -> indices] [dz] =
[Just $ indicators `CoreOps.div` numSelected * dz', Nothing]
where
sx = shape (x :: Tensor Build a)
outputShapeKeptDims = reducedShape sx (indices :: Tensor Build Int32)
y = CoreOps.max x indices
y' = reshape y outputShapeKeptDims
dz' = reshape dz outputShapeKeptDims
indicators = CoreOps.cast $ CoreOps.equal y' x
numSelected = reshape (sum indicators indices) outputShapeKeptDims
Min and have identical gradient implementations .
opGrad "Min" u v w = opGrad "Max" u v w
opGrad "Maximum" _ [toT -> x, toT -> y] [dz] =
gradForBinaryCwise (x, gx) (y, gy)
where
xmask = CoreOps.greaterEqual x y
gx = CoreOps.select xmask dz (CoreOps.zerosLike dz)
gy = CoreOps.select (CoreOps.logicalNot xmask) dz (CoreOps.zerosLike dz)
opGrad "Sum" _ [toT -> x, toT -> indices] [dz] =
[ Just $ CoreOps.tile grad tileScaling, Nothing ]
where
TODO(gnezdo ): Implement the fast - path from math_grad._SumGrad .
sx = shape (x :: Tensor Build a)
outputShapeKeptDims = reducedShape sx (indices :: Tensor Build Int32)
tileScaling = safeShapeDiv sx outputShapeKeptDims
grad = reshape dz outputShapeKeptDims
opGrad "Mean" u v@[toT -> x, _] w =
[Just $ dz `CoreOps.div` (CoreOps.stopGradient $ CoreOps.cast $ factor), Nothing]
where
[Just dz, Nothing] = opGrad "Sum" u v w
inputShape = shape (x :: Tensor Build a)
outputShape = shape (dz :: Tensor Build a)
inputSize = CoreOps.prod inputShape $ rangeOfRank inputShape
outputSize = CoreOps.prod outputShape $ rangeOfRank outputShape
factor = safeShapeDiv inputSize outputSize
opGrad "Add" _ [toT -> x, toT -> y] [dz] =
[ Just $ reshape (sum dz rx) sx
, Just $ reshape (sum dz ry) sy ]
where
sx = shape (x :: Tensor Build a)
sy = shape (y :: Tensor Build a)
(rx, ry) = broadcastGradientArgs sx sy
opGrad "AddN" _ inputs [dz] =
map ((const . Just . expr) dz) inputs
opGrad "Sub" u v w =
[Just x, Just (-y)]
where
[Just x, Just y] = opGrad "Add" u v w
opGrad "SoftmaxCrossEntropyWithLogits" _ [toT -> x, toT -> y] [dz, _] =
[ Just $ expandDims dz (-1) * snd (softmaxCrossEntropyWithLogits x y)
, Nothing ]
opGrad "Mul" _ [toT -> x, toT -> y] [dz] =
[ Just $ reshape (sum (dz `CoreOps.mul` y) rx) sx
, Just $ reshape (sum (x `CoreOps.mul` dz) ry) sy ]
where
sx = shape (x :: Tensor Build a)
sy = shape (y :: Tensor Build a)
(rx, ry) = broadcastGradientArgs sx sy
opGrad "Div" _ [toT -> x, toT -> y] [dz] =
[ Just $ reshape (sum (dz `CoreOps.div` y) rx) sx
, Just $ reshape (sum (dz `CoreOps.mul` (negate x `CoreOps.div` (y * y)))
ry)
sy
]
where
sx = shape (x :: Tensor Build a)
sy = shape (y :: Tensor Build a)
(rx, ry) = broadcastGradientArgs sx sy
opGrad "MatMul" nodeDef [toT -> x, toT -> y] [dz] =
let transposeA = lookupAttr nodeDef "transpose_a"
transposeB = lookupAttr nodeDef "transpose_b"
transAttrs a b =
(opAttr "transpose_a" .~ a) . (opAttr "transpose_b" .~ b)
in case (transposeA, transposeB) of
(False, False) ->
[ Just $ matMul' (transAttrs False True) dz y
, Just $ matMul' (transAttrs True False) x dz]
(False, True) ->
[ Just $ matMul dz y
, Just $ matMul' (transAttrs True False) dz x]
(True, False) ->
[ Just $ matMul' (transAttrs False True) y dz
, Just $ matMul x dz]
(True, True) ->
[ Just $ matMul' (transAttrs True True) y dz
, Just $ matMul' (transAttrs True True) dz x]
opGrad "BatchMatMul" nodeDef [toT -> x, toT -> y] [dz] =
let adjX = lookupAttr nodeDef "adj_x"
adjY = lookupAttr nodeDef "adj_y"
adjAttrs a b =
(opAttr "adj_x" .~ a) . (opAttr "adj_y" .~ b)
in case (adjX, adjY) of
(False, False) ->
[ Just $ CoreOps.batchMatMul' (adjAttrs False True) dz y
, Just $ CoreOps.batchMatMul' (adjAttrs True False) x dz]
(False, True) ->
[ Just $ CoreOps.batchMatMul dz y
, Just $ CoreOps.batchMatMul' (adjAttrs True False) dz x]
(True, False) ->
[ Just $ CoreOps.batchMatMul' (adjAttrs False True) y dz
, Just $ CoreOps.batchMatMul x dz]
(True, True) ->
[ Just $ CoreOps.batchMatMul' (adjAttrs True True) y dz
, Just $ CoreOps.batchMatMul' (adjAttrs True True) dz x]
opGrad "Transpose" _ [_, toT -> p] [dz] =
[ Just $ CoreOps.transpose dz
(CoreOps.invertPermutation p :: Tensor Build Int32)
, Nothing
]
opGrad "Conv2D" nodeDef [toT -> x, toT -> y] [dz] =
[ Just $ CoreOps.conv2DBackpropInput'
((opAttr "strides" .~ strides)
. (opAttr "use_cudnn_on_gpu" .~ useCudnnOnGpu)
. (opAttr "data_format" .~ dataFormat))
padding (shape x) y dz
, Just $ CoreOps.conv2DBackpropFilter'
((opAttr "strides" .~ strides)
. (opAttr "use_cudnn_on_gpu" .~ useCudnnOnGpu)
. (opAttr "data_format" .~ dataFormat))
padding x (shape y) dz
]
where
strides = lookupAttr nodeDef "strides" :: [Int64]
padding = lookupAttr nodeDef "padding" :: ByteString
useCudnnOnGpu = lookupAttr nodeDef "use_cudnn_on_gpu" :: Bool
dataFormat = lookupAttr nodeDef "data_format" :: ByteString
opGrad "Conv2DBackpropInput" nodeDef [_, toT -> x, toT -> y] [dz] =
[ Nothing
, Just $ CoreOps.conv2DBackpropFilter'
((opAttr "strides" .~ strides)
. (opAttr "use_cudnn_on_gpu" .~ useCudnnOnGpu)
. (opAttr "data_format" .~ dataFormat))
padding dz (shape x) y
, Just $ CoreOps.conv2D'
((opAttr "strides" .~ strides)
. (opAttr "use_cudnn_on_gpu" .~ useCudnnOnGpu)
. (opAttr "data_format" .~ dataFormat))
padding dz x
]
where
strides = lookupAttr nodeDef "strides" :: [Int64]
padding = lookupAttr nodeDef "padding" :: ByteString
useCudnnOnGpu = lookupAttr nodeDef "use_cudnn_on_gpu" :: Bool
dataFormat = lookupAttr nodeDef "data_format" :: ByteString
opGrad "DepthwiseConv2dNative" nodeDef [toT -> x, toT -> y] [dz] =
[ Just $ CoreOps.depthwiseConv2dNativeBackpropInput'
((opAttr "strides" .~ strides)
. (opAttr "data_format" .~ dataFormat))
padding (shape x) y dz
, Just $ CoreOps.depthwiseConv2dNativeBackpropFilter'
((opAttr "strides" .~ strides)
. (opAttr "data_format" .~ dataFormat))
padding x (shape y) dz
]
where
strides = lookupAttr nodeDef "strides" :: [Int64]
padding = lookupAttr nodeDef "padding" :: ByteString
dataFormat = lookupAttr nodeDef "data_format" :: ByteString
opGrad "DepthwiseConv2dNativeBackpropInput" nodeDef [_, toT -> x, toT -> y] [dz] =
[ Nothing
, Just $ CoreOps.depthwiseConv2dNativeBackpropFilter'
((opAttr "strides" .~ strides)
. (opAttr "data_format" .~ dataFormat))
padding dz (shape x) y
, Just $ CoreOps.depthwiseConv2dNative'
((opAttr "strides" .~ strides)
. (opAttr "data_format" .~ dataFormat))
padding dz x
]
where
strides = lookupAttr nodeDef "strides" :: [Int64]
padding = lookupAttr nodeDef "padding" :: ByteString
dataFormat = lookupAttr nodeDef "data_format" :: ByteString
opGrad "MaxPool" nodeDef [toT -> x] [dz] =
[ Just $ CoreOps.maxPoolGrad'
((opAttr "ksize" .~ ksize)
. (opAttr "strides" .~ strides)
. (opAttr "data_format" .~ dataFormat))
padding x output dz
]
where
output :: Tensor Build a
output = toT $ Output 0 (nodeDefName nodeDef)
ksize = lookupAttr nodeDef "ksize" :: [Int64]
strides = lookupAttr nodeDef "strides" :: [Int64]
padding = lookupAttr nodeDef "padding" :: ByteString
dataFormat = lookupAttr nodeDef "data_format" :: ByteString
opGrad "Reshape" _ [toT -> x, _] [dz] = [Just $ reshape dz $ shape (x :: Tensor Build a), Nothing]
opGrad "ExpandDims" n xs@[toT -> _, _] dzs@[_] = opGrad "Reshape" n xs dzs
opGrad "Squeeze" _ [toT -> x] [dz] = [Just $ reshape dz $ shape (x :: Tensor Build a)]
opGrad "Pad" _ [toT -> x, toT -> padPattern] [dz] =
[Just $ CoreOps.slice dz gradientSliceBegin gradientSliceSize, Nothing]
where
v1 = vector [1]
rankx' = CoreOps.rank (x :: Tensor Build Float)
rankx = CoreOps.reshape rankx' v1
padPatternSliceSize = CoreOps.concat 0 [rankx, v1]
padPatternSliceBegin = vector [0, 0]
padPatternSliced :: Tensor Build Int32 = CoreOps.slice padPattern padPatternSliceBegin padPatternSliceSize
gradientSliceBegin = CoreOps.reshape padPatternSliced rankx
gradientSliceSize = shape (x :: Tensor Build Float)
Create an Nx2 padding where N is the rank of ( grad of ) Slice and the first
column represents how many zeros are to be prepended for each dimension , and the second
column indicates how many zeros are appended .
The number of zeros to prepend is the shape of the beginvec .
The number of zeros to append is the shape of the inputvec
elementwise - subtracted by both the beginvec and .
opGrad "Slice" _ [toT -> inputvec, toT -> beginvec, _] [dz] =
[Just $ CoreOps.pad dz paddings, Nothing, Nothing]
where
v1 = vector [1 :: Int32]
inputRank' = CoreOps.rank (inputvec :: Tensor Build Float)
inputRank = CoreOps.reshape inputRank' v1
padShape = CoreOps.concat 0 [inputRank, v1]
beforePad = CoreOps.reshape beginvec padShape
afterPad = CoreOps.reshape (shape inputvec - shape dz - beginvec) padShape
paddings = CoreOps.concat 1 [beforePad, afterPad]
TODO : This could be either Int32 or Int64 .
opGrad "BatchToSpaceND" _ [_, toT @Int32 -> blockShape, toT @Int32 -> crops] [dz] =
[Just $ CoreOps.spaceToBatchND dz blockShape crops, Nothing, Nothing]
TODO : This could be either Int32 or Int64 .
opGrad "SpaceToBatchND" _ [_, toT @Int32 -> blockShape, toT @Int32 -> paddings] [dz] =
[Just $ CoreOps.batchToSpaceND dz blockShape paddings, Nothing, Nothing]
opGrad "OneHot" _ _ _ = [Nothing, Nothing, Nothing, Nothing]
opGrad "TruncatedNormal" _ _ _ = [Nothing]
opGrad "RefIdentity" _ _ [dz] = [Just $ expr dz]
opGrad "Cast" nodeDef _ [dz] = [Just reverseCast]
where
reverseCast =
pureOp [] $ pure (opDef "Cast"
& opAttr "DstT" .~ (lookupAttr nodeDef "SrcT" :: ByteString)
& opAttr "SrcT" .~ (lookupAttr nodeDef "DstT" :: ByteString)
& opInputs .~ [renderedOutput dz])
opGrad "DynamicStitch" nodeDef inputs [dz] =
replicate halfLen Nothing ++ valuesGrads
where
halfLen =
let len = length inputs
half = len `div` 2
in if 2 * half == len
then half
else error ("Uneven input size " ++ show (len, showMessage nodeDef))
valuesGrads = [ Just $ CoreOps.gather dz (toT idx :: Tensor Build Int32)
| idx <- take halfLen inputs
]
opGrad "DynamicPartition" nodeDef [toT -> xs, toT -> indices] dz =
[ Just reconstructed, Nothing ]
where
reconstructed = CoreOps.reshape stitched
(CoreOps.shape (xs :: Tensor Build a) :: Tensor Build Int32)
stitched = CoreOps.dynamicStitch partitionedIndices dz
partitionedIndices = CoreOps.dynamicPartition np originalIndices indices
np = lookupAttr nodeDef "num_partitions" :: Int64
originalIndices =
CoreOps.reshape (CoreOps.range 0 (CoreOps.size indices) 1) prefixShape
prefixShape = shapeInt32 indices
shapeInt32 t = CoreOps.shape t :: Tensor Build Int32
opGrad "Select" _ [toT -> c, toT -> x, _] [dz] =
[ Nothing
, Just $ CoreOps.select c dz zeros
, Just $ CoreOps.select c zeros dz
]
where zeros = CoreOps.zerosLike x
TODO(gnezdo ): Unlike Python , no control dependency on dz .
opGrad "Log" _ [toT -> x] [dz] = [ Just $ dz `CoreOps.mul` CoreOps.inv x ]
TODO(gnezdo ): Reuse the output instead of doing another exp ,
though , it is probably CSE'd away anyway .
opGrad "Exp" _ [toT -> x] [dz] = [ Just $ dz `CoreOps.mul` CoreOps.exp x ]
opGrad "SparseSegmentSum" _ [toT -> x, toT -> y, toT -> t] [dz] =
[ Just $ CoreOps.unsortedSegmentSum
(CoreOps.gather dz (t :: Tensor Build Int32))
(y :: Tensor Build Int32) inputRows
, Nothing
, Nothing
]
where inputRows = flatSlice (shape (x :: Tensor Build a)) 0 1
opGrad "LabelClasses" _ _ _ = [Nothing, Nothing]
opGrad "LabelWeights" _ _ _ = [Nothing]
opGrad "Size" _ _ _ = [Nothing]
TODO ( ): Python implementation uses set_shape for
opGrad "Tile" _ [toT -> x, toT -> multiples] [dz] =
[Just inputGrad, Nothing]
where
inputGrad = sum reshapedDz axes
inputShape = shape (x :: Tensor Build a)
packed = CoreOps.pack [multiples, inputShape]
perm = vector [1, 0 :: Int32]
splitShape = CoreOps.reshape (CoreOps.transpose packed perm) allDimensions
axes = CoreOps.range 0 (CoreOps.size splitShape) (2 :: Tensor Build Int32)
reshapedDz = CoreOps.reshape dz splitShape
opGrad "ResizeBilinear" nodeDef [toT -> x, _] [dz] =
[ Just $ CoreOps.resizeBilinearGrad'
(opAttr "align_corners" .~ align)
(CoreOps.cast dz)
x
, Nothing
]
where
align = lookupAttr nodeDef "align_corners" :: Bool
opGrad "ZerosLike" _ _ _ = [Nothing]
opGrad "Fill" _ _ [dz] = [Nothing, Just $ sum dz rx]
where
rx = rangeOfRank dz
opGrad "ReadVariableOp" _ _ [dz] = [Just $ expr dz]
opGrad "Const" _ _ _ = [Nothing, Nothing]
opGrad "StopGradient" _ _ _ = [Nothing]
opGrad "VarHandleOp" _ _ _ = []
opGrad "Sqrt" _ [toT -> x] [dz] = [Just $ sq' `CoreOps.mul` dz]
where
sq' = scalar 1 `CoreOps.div` (scalar 2 `CoreOps.mul` CoreOps.sqrt x)
opGrad n nodeDef ins grads =
error $ "no gradient implemented for " ++
show (n, length ins, length grads, showMessage nodeDef, ins)
numOutputs :: NodeDef -> OutputIx
numOutputs o =
case o ^. op of
"Abs" -> 1
"Add" -> 1
"AddN" -> 1
"BatchToSpaceND" -> 1
"BatchMatMul" -> 1
"Cast" -> 1
"Const" -> 1
"Concat" -> 1
"Conv2D" -> 1
"Conv2DBackpropInput" -> 1
"DepthwiseConv2dNative" -> 1
"DepthwiseConv2dNativeBackpropInput" -> 1
"Div" -> 1
"DynamicStitch" -> 1
"DynamicPartition" ->
fromIntegral (lookupAttr o "num_partitions" :: Int64)
"Exp" -> 1
"ExpandDims" -> 1
"Gather" -> 1
"LabelClasses" -> 1
"LabelWeights" -> 1
"Log" -> 1
"MatMul" -> 1
"Max" -> 1
"Maximum" -> 1
"MaxPool" -> 1
"Mean" -> 1
"Min" -> 1
"Mul" -> 1
"Neg" -> 1
"Pad" -> 1
"Placeholder" -> 1
"StopGradient" -> 1
"OneHot" -> 1
"ReadVariableOp" -> 1
"RefIdentity" -> 1
"Relu" -> 1
"ReluGrad" -> 1
"Reshape" -> 1
"Select" -> 1
"Sigmoid" -> 1
"Size" -> 1
"Slice" -> 1
"SoftmaxCrossEntropyWithLogits" -> 2
"SpaceToBatchND" -> 1
"SparseSegmentSum" -> 1
"Square" -> 1
"Squeeze" -> 1
"Sqrt" -> 1
"Sub" -> 1
"Sum" -> 1
"Tanh" -> 1
"Tile" -> 1
"ResizeBilinear" -> 1
"Transpose" -> 1
"TruncatedNormal" -> 1
"VarHandleOp" -> 1
"Variable" -> 1
"ZerosLike" -> 1
"Fill" -> 1
_ -> error $ "numOutputs not implemented for " ++ show (o ^. op)
safeShapeDiv :: Tensor v1 Int32 -> Tensor v2 Int32 -> Tensor Build Int32
safeShapeDiv x y = x `CoreOps.div` (CoreOps.maximum y 1)
allDimensions :: Tensor Build Int32
allDimensions = vector [-1 :: Int32]
rangeOfRank :: forall v1 t. TensorType t => Tensor v1 t -> Tensor Build Int32
rangeOfRank x = CoreOps.range 0 (CoreOps.rank x) 1
lookupAttr :: Attribute a1 => NodeDef -> Text -> a1
lookupAttr nodeDef attrName = nodeDef ^. attr . at attrName . non def . attrLens
|
5d74d4287640b9d8b339f15b19aeecc797a3ce7250bf19b65e3d1b072f925dd3 | bittide/bittide-hardware | StabilityChecker.hs | SPDX - FileCopyrightText : 2022 Google LLC
--
SPDX - License - Identifier : Apache-2.0
{-# LANGUAGE GADTs #-}
module Bittide.ClockControl.StabilityChecker where
import Clash.Prelude
import Bittide.ClockControl (targetDataCount)
import Clash.Sized.Extra
-- | Checks whether the @Signal@ of buffer occupancies from an elastic buffer is stable.
-- The @Signal@ is considered stable if it stays within a @margin@ of the target buffer
-- occupancy for @cyclesStable@ number of cycles. The next target is set to the current
-- buffer occupancy when the current buffer occupancy is not within margin of
-- the target.
stabilityChecker ::
forall dom margin cyclesStable n .
(HiddenClockResetEnable dom, 1 <= cyclesStable, KnownNat n) =>
-- | Maximum number of elements the incoming buffer occupancy is allowed to deviate
-- from the current @target@ for it to be considered "stable".
SNat margin ->
-- | Minimum number of clock cycles the incoming buffer occupancy must remain within the
-- @margin@ for it to be considered "stable".
SNat cyclesStable ->
-- | Incoming buffer occupancy.
Signal dom (Unsigned n) ->
-- | Stability indicator.
Signal dom Bool
stabilityChecker SNat SNat = mealy go (0, targetDataCount)
where
go (cnt, target) input = (newState, isStable)
where
withinMargin =
abs (unsignedToSigned target `sub` unsignedToSigned input) <= (natToNum @margin)
newState :: (Index (cyclesStable + 1), Unsigned n)
newState
| withinMargin = (satSucc SatBound cnt, target)
| otherwise = (0, input)
isStable = withinMargin && cnt == maxBound
| null | https://raw.githubusercontent.com/bittide/bittide-hardware/b44dac8ee0fb14b0c6a94fcbe830fdd8d140bec4/bittide/src/Bittide/ClockControl/StabilityChecker.hs | haskell |
# LANGUAGE GADTs #
| Checks whether the @Signal@ of buffer occupancies from an elastic buffer is stable.
The @Signal@ is considered stable if it stays within a @margin@ of the target buffer
occupancy for @cyclesStable@ number of cycles. The next target is set to the current
buffer occupancy when the current buffer occupancy is not within margin of
the target.
| Maximum number of elements the incoming buffer occupancy is allowed to deviate
from the current @target@ for it to be considered "stable".
| Minimum number of clock cycles the incoming buffer occupancy must remain within the
@margin@ for it to be considered "stable".
| Incoming buffer occupancy.
| Stability indicator. | SPDX - FileCopyrightText : 2022 Google LLC
SPDX - License - Identifier : Apache-2.0
module Bittide.ClockControl.StabilityChecker where
import Clash.Prelude
import Bittide.ClockControl (targetDataCount)
import Clash.Sized.Extra
stabilityChecker ::
forall dom margin cyclesStable n .
(HiddenClockResetEnable dom, 1 <= cyclesStable, KnownNat n) =>
SNat margin ->
SNat cyclesStable ->
Signal dom (Unsigned n) ->
Signal dom Bool
stabilityChecker SNat SNat = mealy go (0, targetDataCount)
where
go (cnt, target) input = (newState, isStable)
where
withinMargin =
abs (unsignedToSigned target `sub` unsignedToSigned input) <= (natToNum @margin)
newState :: (Index (cyclesStable + 1), Unsigned n)
newState
| withinMargin = (satSucc SatBound cnt, target)
| otherwise = (0, input)
isStable = withinMargin && cnt == maxBound
|
d37836af4774202fcc7e78761eeeb3960a0d88f4413cda95875594b497d30182 | masashi-y/abduction_kbc | message_pb.ml | [@@@ocaml.warning "-27-30-39"]
type predicate_mutable = {
mutable str : string;
mutable nargs : int;
}
let default_predicate_mutable () : predicate_mutable = {
str = "";
nargs = 0;
}
type candidate_mutable = {
mutable pred1 : Message_types.predicate option;
mutable pred2 : Message_types.predicate option;
mutable rel : string;
mutable score : float;
}
let default_candidate_mutable () : candidate_mutable = {
pred1 = None;
pred2 = None;
rel = "";
score = 0.;
}
type rank_mutable = {
mutable list : Message_types.candidate list;
}
let default_rank_mutable () : rank_mutable = {
list = [];
}
type echo_mutable = {
mutable msg : string;
mutable rank : Message_types.rank option;
}
let default_echo_mutable () : echo_mutable = {
msg = "";
rank = None;
}
let rec decode_predicate d =
let v = default_predicate_mutable () in
let continue__= ref true in
while !continue__ do
match Pbrt.Decoder.key d with
| None -> (
); continue__ := false
| Some (1, Pbrt.Bytes) -> begin
v.str <- Pbrt.Decoder.string d;
end
| Some (1, pk) ->
Pbrt.Decoder.unexpected_payload "Message(predicate), field(1)" pk
| Some (2, Pbrt.Varint) -> begin
v.nargs <- Pbrt.Decoder.int_as_varint d;
end
| Some (2, pk) ->
Pbrt.Decoder.unexpected_payload "Message(predicate), field(2)" pk
| Some (_, payload_kind) -> Pbrt.Decoder.skip d payload_kind
done;
({
Message_types.str = v.str;
Message_types.nargs = v.nargs;
} : Message_types.predicate)
let rec decode_candidate d =
let v = default_candidate_mutable () in
let continue__= ref true in
while !continue__ do
match Pbrt.Decoder.key d with
| None -> (
); continue__ := false
| Some (1, Pbrt.Bytes) -> begin
v.pred1 <- Some (decode_predicate (Pbrt.Decoder.nested d));
end
| Some (1, pk) ->
Pbrt.Decoder.unexpected_payload "Message(candidate), field(1)" pk
| Some (2, Pbrt.Bytes) -> begin
v.pred2 <- Some (decode_predicate (Pbrt.Decoder.nested d));
end
| Some (2, pk) ->
Pbrt.Decoder.unexpected_payload "Message(candidate), field(2)" pk
| Some (3, Pbrt.Bytes) -> begin
v.rel <- Pbrt.Decoder.string d;
end
| Some (3, pk) ->
Pbrt.Decoder.unexpected_payload "Message(candidate), field(3)" pk
| Some (4, Pbrt.Bits64) -> begin
v.score <- Pbrt.Decoder.float_as_bits64 d;
end
| Some (4, pk) ->
Pbrt.Decoder.unexpected_payload "Message(candidate), field(4)" pk
| Some (_, payload_kind) -> Pbrt.Decoder.skip d payload_kind
done;
({
Message_types.pred1 = v.pred1;
Message_types.pred2 = v.pred2;
Message_types.rel = v.rel;
Message_types.score = v.score;
} : Message_types.candidate)
let rec decode_rank d =
let v = default_rank_mutable () in
let continue__= ref true in
while !continue__ do
match Pbrt.Decoder.key d with
| None -> (
v.list <- List.rev v.list;
); continue__ := false
| Some (1, Pbrt.Bytes) -> begin
v.list <- (decode_candidate (Pbrt.Decoder.nested d)) :: v.list;
end
| Some (1, pk) ->
Pbrt.Decoder.unexpected_payload "Message(rank), field(1)" pk
| Some (_, payload_kind) -> Pbrt.Decoder.skip d payload_kind
done;
({
Message_types.list = v.list;
} : Message_types.rank)
let rec decode_echo d =
let v = default_echo_mutable () in
let continue__= ref true in
while !continue__ do
match Pbrt.Decoder.key d with
| None -> (
); continue__ := false
| Some (1, Pbrt.Bytes) -> begin
v.msg <- Pbrt.Decoder.string d;
end
| Some (1, pk) ->
Pbrt.Decoder.unexpected_payload "Message(echo), field(1)" pk
| Some (2, Pbrt.Bytes) -> begin
v.rank <- Some (decode_rank (Pbrt.Decoder.nested d));
end
| Some (2, pk) ->
Pbrt.Decoder.unexpected_payload "Message(echo), field(2)" pk
| Some (_, payload_kind) -> Pbrt.Decoder.skip d payload_kind
done;
({
Message_types.msg = v.msg;
Message_types.rank = v.rank;
} : Message_types.echo)
let rec encode_predicate (v:Message_types.predicate) encoder =
Pbrt.Encoder.key (1, Pbrt.Bytes) encoder;
Pbrt.Encoder.string v.Message_types.str encoder;
Pbrt.Encoder.key (2, Pbrt.Varint) encoder;
Pbrt.Encoder.int_as_varint v.Message_types.nargs encoder;
()
let rec encode_candidate (v:Message_types.candidate) encoder =
begin match v.Message_types.pred1 with
| Some x ->
Pbrt.Encoder.key (1, Pbrt.Bytes) encoder;
Pbrt.Encoder.nested (encode_predicate x) encoder;
| None -> ();
end;
begin match v.Message_types.pred2 with
| Some x ->
Pbrt.Encoder.key (2, Pbrt.Bytes) encoder;
Pbrt.Encoder.nested (encode_predicate x) encoder;
| None -> ();
end;
Pbrt.Encoder.key (3, Pbrt.Bytes) encoder;
Pbrt.Encoder.string v.Message_types.rel encoder;
Pbrt.Encoder.key (4, Pbrt.Bits64) encoder;
Pbrt.Encoder.float_as_bits64 v.Message_types.score encoder;
()
let rec encode_rank (v:Message_types.rank) encoder =
List.iter (fun x ->
Pbrt.Encoder.key (1, Pbrt.Bytes) encoder;
Pbrt.Encoder.nested (encode_candidate x) encoder;
) v.Message_types.list;
()
let rec encode_echo (v:Message_types.echo) encoder =
Pbrt.Encoder.key (1, Pbrt.Bytes) encoder;
Pbrt.Encoder.string v.Message_types.msg encoder;
begin match v.Message_types.rank with
| Some x ->
Pbrt.Encoder.key (2, Pbrt.Bytes) encoder;
Pbrt.Encoder.nested (encode_rank x) encoder;
| None -> ();
end;
()
| null | https://raw.githubusercontent.com/masashi-y/abduction_kbc/cee8c6fbd7cd246397243f879b9c984f6762e5d7/src/message_pb.ml | ocaml | [@@@ocaml.warning "-27-30-39"]
type predicate_mutable = {
mutable str : string;
mutable nargs : int;
}
let default_predicate_mutable () : predicate_mutable = {
str = "";
nargs = 0;
}
type candidate_mutable = {
mutable pred1 : Message_types.predicate option;
mutable pred2 : Message_types.predicate option;
mutable rel : string;
mutable score : float;
}
let default_candidate_mutable () : candidate_mutable = {
pred1 = None;
pred2 = None;
rel = "";
score = 0.;
}
type rank_mutable = {
mutable list : Message_types.candidate list;
}
let default_rank_mutable () : rank_mutable = {
list = [];
}
type echo_mutable = {
mutable msg : string;
mutable rank : Message_types.rank option;
}
let default_echo_mutable () : echo_mutable = {
msg = "";
rank = None;
}
let rec decode_predicate d =
let v = default_predicate_mutable () in
let continue__= ref true in
while !continue__ do
match Pbrt.Decoder.key d with
| None -> (
); continue__ := false
| Some (1, Pbrt.Bytes) -> begin
v.str <- Pbrt.Decoder.string d;
end
| Some (1, pk) ->
Pbrt.Decoder.unexpected_payload "Message(predicate), field(1)" pk
| Some (2, Pbrt.Varint) -> begin
v.nargs <- Pbrt.Decoder.int_as_varint d;
end
| Some (2, pk) ->
Pbrt.Decoder.unexpected_payload "Message(predicate), field(2)" pk
| Some (_, payload_kind) -> Pbrt.Decoder.skip d payload_kind
done;
({
Message_types.str = v.str;
Message_types.nargs = v.nargs;
} : Message_types.predicate)
let rec decode_candidate d =
let v = default_candidate_mutable () in
let continue__= ref true in
while !continue__ do
match Pbrt.Decoder.key d with
| None -> (
); continue__ := false
| Some (1, Pbrt.Bytes) -> begin
v.pred1 <- Some (decode_predicate (Pbrt.Decoder.nested d));
end
| Some (1, pk) ->
Pbrt.Decoder.unexpected_payload "Message(candidate), field(1)" pk
| Some (2, Pbrt.Bytes) -> begin
v.pred2 <- Some (decode_predicate (Pbrt.Decoder.nested d));
end
| Some (2, pk) ->
Pbrt.Decoder.unexpected_payload "Message(candidate), field(2)" pk
| Some (3, Pbrt.Bytes) -> begin
v.rel <- Pbrt.Decoder.string d;
end
| Some (3, pk) ->
Pbrt.Decoder.unexpected_payload "Message(candidate), field(3)" pk
| Some (4, Pbrt.Bits64) -> begin
v.score <- Pbrt.Decoder.float_as_bits64 d;
end
| Some (4, pk) ->
Pbrt.Decoder.unexpected_payload "Message(candidate), field(4)" pk
| Some (_, payload_kind) -> Pbrt.Decoder.skip d payload_kind
done;
({
Message_types.pred1 = v.pred1;
Message_types.pred2 = v.pred2;
Message_types.rel = v.rel;
Message_types.score = v.score;
} : Message_types.candidate)
let rec decode_rank d =
let v = default_rank_mutable () in
let continue__= ref true in
while !continue__ do
match Pbrt.Decoder.key d with
| None -> (
v.list <- List.rev v.list;
); continue__ := false
| Some (1, Pbrt.Bytes) -> begin
v.list <- (decode_candidate (Pbrt.Decoder.nested d)) :: v.list;
end
| Some (1, pk) ->
Pbrt.Decoder.unexpected_payload "Message(rank), field(1)" pk
| Some (_, payload_kind) -> Pbrt.Decoder.skip d payload_kind
done;
({
Message_types.list = v.list;
} : Message_types.rank)
let rec decode_echo d =
let v = default_echo_mutable () in
let continue__= ref true in
while !continue__ do
match Pbrt.Decoder.key d with
| None -> (
); continue__ := false
| Some (1, Pbrt.Bytes) -> begin
v.msg <- Pbrt.Decoder.string d;
end
| Some (1, pk) ->
Pbrt.Decoder.unexpected_payload "Message(echo), field(1)" pk
| Some (2, Pbrt.Bytes) -> begin
v.rank <- Some (decode_rank (Pbrt.Decoder.nested d));
end
| Some (2, pk) ->
Pbrt.Decoder.unexpected_payload "Message(echo), field(2)" pk
| Some (_, payload_kind) -> Pbrt.Decoder.skip d payload_kind
done;
({
Message_types.msg = v.msg;
Message_types.rank = v.rank;
} : Message_types.echo)
let rec encode_predicate (v:Message_types.predicate) encoder =
Pbrt.Encoder.key (1, Pbrt.Bytes) encoder;
Pbrt.Encoder.string v.Message_types.str encoder;
Pbrt.Encoder.key (2, Pbrt.Varint) encoder;
Pbrt.Encoder.int_as_varint v.Message_types.nargs encoder;
()
let rec encode_candidate (v:Message_types.candidate) encoder =
begin match v.Message_types.pred1 with
| Some x ->
Pbrt.Encoder.key (1, Pbrt.Bytes) encoder;
Pbrt.Encoder.nested (encode_predicate x) encoder;
| None -> ();
end;
begin match v.Message_types.pred2 with
| Some x ->
Pbrt.Encoder.key (2, Pbrt.Bytes) encoder;
Pbrt.Encoder.nested (encode_predicate x) encoder;
| None -> ();
end;
Pbrt.Encoder.key (3, Pbrt.Bytes) encoder;
Pbrt.Encoder.string v.Message_types.rel encoder;
Pbrt.Encoder.key (4, Pbrt.Bits64) encoder;
Pbrt.Encoder.float_as_bits64 v.Message_types.score encoder;
()
let rec encode_rank (v:Message_types.rank) encoder =
List.iter (fun x ->
Pbrt.Encoder.key (1, Pbrt.Bytes) encoder;
Pbrt.Encoder.nested (encode_candidate x) encoder;
) v.Message_types.list;
()
let rec encode_echo (v:Message_types.echo) encoder =
Pbrt.Encoder.key (1, Pbrt.Bytes) encoder;
Pbrt.Encoder.string v.Message_types.msg encoder;
begin match v.Message_types.rank with
| Some x ->
Pbrt.Encoder.key (2, Pbrt.Bytes) encoder;
Pbrt.Encoder.nested (encode_rank x) encoder;
| None -> ();
end;
()
| |
34decc4a876a34e202fb016d1fa95778cfc621fd5025176e52d127ee5866bc4b | basho/riak_kv | riak_kv_index_fsm_sup.erl | %% -------------------------------------------------------------------
%%
%% riak_kv_index_fsm_sup: supervise the riak_kv index state machines.
%%
Copyright ( c ) 2007 - 2011 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc supervise the riak_kv index state machines used to
%% process secondary index queries.
-module(riak_kv_index_fsm_sup).
-behaviour(supervisor).
-export([start_index_fsm/2]).
-export([start_link/0]).
-export([init/1]).
start_index_fsm(Node, Args) ->
case supervisor:start_child({?MODULE, Node}, Args) of
{ok, Pid} ->
ok = riak_kv_stat:update({index_create, Pid}),
{ok, Pid};
Error ->
ok = riak_kv_stat:update(index_create_error),
Error
end.
( ) - > ServerRet
%% @doc API for starting the supervisor.
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
@spec init ( [ ] ) - > SupervisorTree
%% @doc supervisor callback.
init([]) ->
IndexFsmSpec = {undefined,
{riak_core_coverage_fsm, start_link, [riak_kv_index_fsm]},
temporary, 5000, worker, [riak_kv_index_fsm]},
{ok, {{simple_one_for_one, 10, 10}, [IndexFsmSpec]}}.
| null | https://raw.githubusercontent.com/basho/riak_kv/aeef1591704d32230b773d952a2f1543cbfa1889/src/riak_kv_index_fsm_sup.erl | erlang | -------------------------------------------------------------------
riak_kv_index_fsm_sup: supervise the riak_kv index state machines.
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
@doc supervise the riak_kv index state machines used to
process secondary index queries.
@doc API for starting the supervisor.
@doc supervisor callback. | Copyright ( c ) 2007 - 2011 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(riak_kv_index_fsm_sup).
-behaviour(supervisor).
-export([start_index_fsm/2]).
-export([start_link/0]).
-export([init/1]).
start_index_fsm(Node, Args) ->
case supervisor:start_child({?MODULE, Node}, Args) of
{ok, Pid} ->
ok = riak_kv_stat:update({index_create, Pid}),
{ok, Pid};
Error ->
ok = riak_kv_stat:update(index_create_error),
Error
end.
( ) - > ServerRet
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
@spec init ( [ ] ) - > SupervisorTree
init([]) ->
IndexFsmSpec = {undefined,
{riak_core_coverage_fsm, start_link, [riak_kv_index_fsm]},
temporary, 5000, worker, [riak_kv_index_fsm]},
{ok, {{simple_one_for_one, 10, 10}, [IndexFsmSpec]}}.
|
420be27af9d10244548ecbb0683d279a76921213fd48a435c889af3e90540753 | aantron/luv | pid.mli | This file is part of Luv , released under the MIT license . See LICENSE.md for
details , or visit .
details, or visit . *)
val getpid : unit -> int
* Evaluates to the pid of the current process .
Binds { { : #c.uv_os_getpid }
[ uv_os_getpid ] } . See { { : -pages/man3/getpid.3p.html }
[ getpid(3p ) ] } .
Requires libuv 1.18.0 .
{ { ! Luv . Require } Feature check } : [ Luv . Require.(has os_getpid ) ]
Binds {{:#c.uv_os_getpid}
[uv_os_getpid]}. See {{:-pages/man3/getpid.3p.html}
[getpid(3p)]}.
Requires libuv 1.18.0.
{{!Luv.Require} Feature check}: [Luv.Require.(has os_getpid)] *)
val getppid : unit -> int
* Evaluates to the pid of the parent process .
Binds { { : #c.uv_os_getppid }
[ uv_os_getppid ] } . See
{ { : } [ getppid(3p ) ] } .
Requires libuv 1.16.0 .
{ { ! Luv . Require } Feature check } : [ Luv . Require.(has os_getppid ) ]
Binds {{:#c.uv_os_getppid}
[uv_os_getppid]}. See
{{:-pages/man3/getppid.3p.html} [getppid(3p)]}.
Requires libuv 1.16.0.
{{!Luv.Require} Feature check}: [Luv.Require.(has os_getppid)] *)
| null | https://raw.githubusercontent.com/aantron/luv/4b49d3edad2179c76d685500edf1b44f61ec4be8/src/pid.mli | ocaml | This file is part of Luv , released under the MIT license . See LICENSE.md for
details , or visit .
details, or visit . *)
val getpid : unit -> int
* Evaluates to the pid of the current process .
Binds { { : #c.uv_os_getpid }
[ uv_os_getpid ] } . See { { : -pages/man3/getpid.3p.html }
[ getpid(3p ) ] } .
Requires libuv 1.18.0 .
{ { ! Luv . Require } Feature check } : [ Luv . Require.(has os_getpid ) ]
Binds {{:#c.uv_os_getpid}
[uv_os_getpid]}. See {{:-pages/man3/getpid.3p.html}
[getpid(3p)]}.
Requires libuv 1.18.0.
{{!Luv.Require} Feature check}: [Luv.Require.(has os_getpid)] *)
val getppid : unit -> int
* Evaluates to the pid of the parent process .
Binds { { : #c.uv_os_getppid }
[ uv_os_getppid ] } . See
{ { : } [ getppid(3p ) ] } .
Requires libuv 1.16.0 .
{ { ! Luv . Require } Feature check } : [ Luv . Require.(has os_getppid ) ]
Binds {{:#c.uv_os_getppid}
[uv_os_getppid]}. See
{{:-pages/man3/getppid.3p.html} [getppid(3p)]}.
Requires libuv 1.16.0.
{{!Luv.Require} Feature check}: [Luv.Require.(has os_getppid)] *)
| |
2d8304e52b61c1be3a66ab5a8d2288e0afa6e976d484c636eeaddcf39c792ff7 | compufox/trivial-gamekit-ui | helpers.lisp | (in-package :gamekit.ui)
(defun ui-mouse-handler (x y)
(setf *mouse-position* (vec2 x y)))
(defun ui-click-handler ()
(setf *mouse-clicked* t))
(defun ui-release-handler ()
(setf *mouse-clicked* nil))
;; gamekit class/state stuff
(defclass with-ui ()
((ui :initform (make-hash-table)))
(:documentation "represents trivial gamekit game/state that has a UI"))
(defmethod ui-element ((this with-ui) elt)
"get a widget from THIS identified by ELT"
(gethash elt (slot-value this 'ui)))
(defmethod (setf ui-element) (value (this with-ui) elt)
(setf (gethash elt (slot-value this 'ui)) value))
(defmethod draw-ui ((this with-ui))
"iterate over all ui-elements of THIS and draw them"
(loop :for w :being :the :hash-value :of (slot-value this 'ui)
:do (draw-widget w)))
(defmacro initialize-ui (state &rest forms)
"macro that allows you to set multiple widgets in STATE-OR-GAME at once
each form in FORMS should be (IDENTIFIER WIDGET)"
`(progn
,@(loop :for (k . v) :in forms
:collect `(setf (ui-element ,state ,k) ,@v))))
| null | https://raw.githubusercontent.com/compufox/trivial-gamekit-ui/e35aff114538b6e0f780b9a14b34dc59d28ecab5/helpers.lisp | lisp | gamekit class/state stuff
| (in-package :gamekit.ui)
(defun ui-mouse-handler (x y)
(setf *mouse-position* (vec2 x y)))
(defun ui-click-handler ()
(setf *mouse-clicked* t))
(defun ui-release-handler ()
(setf *mouse-clicked* nil))
(defclass with-ui ()
((ui :initform (make-hash-table)))
(:documentation "represents trivial gamekit game/state that has a UI"))
(defmethod ui-element ((this with-ui) elt)
"get a widget from THIS identified by ELT"
(gethash elt (slot-value this 'ui)))
(defmethod (setf ui-element) (value (this with-ui) elt)
(setf (gethash elt (slot-value this 'ui)) value))
(defmethod draw-ui ((this with-ui))
"iterate over all ui-elements of THIS and draw them"
(loop :for w :being :the :hash-value :of (slot-value this 'ui)
:do (draw-widget w)))
(defmacro initialize-ui (state &rest forms)
"macro that allows you to set multiple widgets in STATE-OR-GAME at once
each form in FORMS should be (IDENTIFIER WIDGET)"
`(progn
,@(loop :for (k . v) :in forms
:collect `(setf (ui-element ,state ,k) ,@v))))
|
5e754c762cdf1290429aa934c2979ad448fb4523ecc70aaef6531ccad7f5afad | city41/reagent-scroll-demo | handler.clj | (ns scroll-demo.handler
(:require [compojure.core :refer [GET defroutes]]
[compojure.route :refer [not-found resources]]
[ring.middleware.defaults :refer [site-defaults wrap-defaults]]
[selmer.parser :refer [render-file]]
[prone.middleware :refer [wrap-exceptions]]
[environ.core :refer [env]]))
(defroutes routes
(GET "/" [] (render-file "templates/index.html" {:dev (env :dev?)}))
(resources "/")
(not-found "Not Found"))
(def app
(let [handler (wrap-defaults routes site-defaults)]
(if (env :dev?) (wrap-exceptions handler) handler)))
| null | https://raw.githubusercontent.com/city41/reagent-scroll-demo/98af21a9f8d403f59bffe4c8a630ab32eed0e33f/src/clj/scroll_demo/handler.clj | clojure | (ns scroll-demo.handler
(:require [compojure.core :refer [GET defroutes]]
[compojure.route :refer [not-found resources]]
[ring.middleware.defaults :refer [site-defaults wrap-defaults]]
[selmer.parser :refer [render-file]]
[prone.middleware :refer [wrap-exceptions]]
[environ.core :refer [env]]))
(defroutes routes
(GET "/" [] (render-file "templates/index.html" {:dev (env :dev?)}))
(resources "/")
(not-found "Not Found"))
(def app
(let [handler (wrap-defaults routes site-defaults)]
(if (env :dev?) (wrap-exceptions handler) handler)))
| |
cc02d5a95bb303b70ebff2ec926147a8f29cd16158f5b9c890ba49fc5f679e5b | janestreet/bonsai | automator.mli | open! Core
open! Bonsai_web
(** Given values and effects for manipulating the other components in
the app, [Automator.component] will run through testing scenarios,
pausing after every frame to see if the results can be validated. *)
val component
: is_running:bool Value.t
-> reset_all:unit Effect.t Value.t
-> step:unit Effect.t Value.t
-> is_done:bool Value.t
-> unit Computation.t
| null | https://raw.githubusercontent.com/janestreet/bonsai/4baeedc75bf73a0915e04dc02d8a49b78779e9b0/examples/node_with_map_children/automator.mli | ocaml | * Given values and effects for manipulating the other components in
the app, [Automator.component] will run through testing scenarios,
pausing after every frame to see if the results can be validated. | open! Core
open! Bonsai_web
val component
: is_running:bool Value.t
-> reset_all:unit Effect.t Value.t
-> step:unit Effect.t Value.t
-> is_done:bool Value.t
-> unit Computation.t
|
4b4c160a74967b16a353faae4571cd9aef8b77d64907b2be35f9f84be83f9b2a | gergoerdi/clash-compucolor2 | TMS5501.hs | # LANGUAGE ViewPatterns , LambdaCase , RecordWildCards #
# LANGUAGE NumericUnderscores #
module Hardware.Compucolor2.TMS5501
( Ctl.Port
, Input(..)
, Output(..)
, tms5501
, UART.SlowRate
, UART.FastRate
) where
import Clash.Prelude
import RetroClash.Utils
import RetroClash.Port
import RetroClash.Barbies
import RetroClash.Clock
import Hardware.Intel8080 (Value)
import qualified Hardware.Compucolor2.TMS5501.UART as UART
import qualified Hardware.Compucolor2.TMS5501.Controller as Ctl
import Barbies.TH
import Data.Tuple.Curry
declareBareB [d|
data Input = MkInput
{ parallelIn :: BitVector 8
, sensor :: Bit
, serialIn :: Bit
, ack :: Bool
, turbo :: Bool
} |]
declareBareB [d|
data Output = MkOutput
{ parallelOut :: BitVector 8
, serialOut :: Bit
, interruptRequest :: Bool
, rst :: Maybe Value
} |]
tms5501
:: forall dom. (HiddenClockResetEnable dom, KnownNat (DomainPeriod dom), 1 <= DomainPeriod dom)
=> Signals dom Input
-> Signal dom (Maybe (PortCommand Ctl.Port Value))
-> ( Signal dom (Maybe Value)
, Signals dom Output
)
tms5501 MkInput{..} cmd = (dataOut, out)
where
out = MkOutput{..}
(dataOut, unbundle -> Ctl.MkOutput{..}) =
mealyStateB (uncurryN Ctl.controller) Ctl.initS (bbundle Ctl.MkInput{..}, cmd)
interruptRequest = delay False irq
rst = delay Nothing int
fastTick = risePeriod (SNat @(Microseconds 8))
slowTick = riseEveryWhen (SNat @8) fastTick
tick = mux (delay False fast) fastTick slowTick
sensorTrigger = isRising low sensor
inputTrigger = isRising low $ msb <$> parallelIn
(rxResult, rxFlags) =
mealyStateB (uncurryN $ UART.uartRx (SNat @(DomainPeriod dom))) UART.initRxS (fast, turbo, serialIn, delay False rxReset)
(serialOut, txReady) =
mealyStateB (uncurryN $ UART.uartTx (SNat @(DomainPeriod dom))) UART.initTxS (fast, turbo, txNew, delay False txBreak)
| null | https://raw.githubusercontent.com/gergoerdi/clash-compucolor2/e5d6835918d25d7fcf9f0a9d7d381a1220331452/src/Hardware/Compucolor2/TMS5501.hs | haskell | # LANGUAGE ViewPatterns , LambdaCase , RecordWildCards #
# LANGUAGE NumericUnderscores #
module Hardware.Compucolor2.TMS5501
( Ctl.Port
, Input(..)
, Output(..)
, tms5501
, UART.SlowRate
, UART.FastRate
) where
import Clash.Prelude
import RetroClash.Utils
import RetroClash.Port
import RetroClash.Barbies
import RetroClash.Clock
import Hardware.Intel8080 (Value)
import qualified Hardware.Compucolor2.TMS5501.UART as UART
import qualified Hardware.Compucolor2.TMS5501.Controller as Ctl
import Barbies.TH
import Data.Tuple.Curry
declareBareB [d|
data Input = MkInput
{ parallelIn :: BitVector 8
, sensor :: Bit
, serialIn :: Bit
, ack :: Bool
, turbo :: Bool
} |]
declareBareB [d|
data Output = MkOutput
{ parallelOut :: BitVector 8
, serialOut :: Bit
, interruptRequest :: Bool
, rst :: Maybe Value
} |]
tms5501
:: forall dom. (HiddenClockResetEnable dom, KnownNat (DomainPeriod dom), 1 <= DomainPeriod dom)
=> Signals dom Input
-> Signal dom (Maybe (PortCommand Ctl.Port Value))
-> ( Signal dom (Maybe Value)
, Signals dom Output
)
tms5501 MkInput{..} cmd = (dataOut, out)
where
out = MkOutput{..}
(dataOut, unbundle -> Ctl.MkOutput{..}) =
mealyStateB (uncurryN Ctl.controller) Ctl.initS (bbundle Ctl.MkInput{..}, cmd)
interruptRequest = delay False irq
rst = delay Nothing int
fastTick = risePeriod (SNat @(Microseconds 8))
slowTick = riseEveryWhen (SNat @8) fastTick
tick = mux (delay False fast) fastTick slowTick
sensorTrigger = isRising low sensor
inputTrigger = isRising low $ msb <$> parallelIn
(rxResult, rxFlags) =
mealyStateB (uncurryN $ UART.uartRx (SNat @(DomainPeriod dom))) UART.initRxS (fast, turbo, serialIn, delay False rxReset)
(serialOut, txReady) =
mealyStateB (uncurryN $ UART.uartTx (SNat @(DomainPeriod dom))) UART.initTxS (fast, turbo, txNew, delay False txBreak)
| |
82feee8c4ebe7934fe098285116cf797813cbba29323fa99e6b259f7b583032b | Dexterminator/clj-templates | test_feed.clj | {:group-id "yamlparser", :artifact-id "yamlparser", :description "FIXME: write description", :homepage "", :url "", :versions ["0.1.0-SNAPSHOT"]}
{:group-id "org.jmatt", :artifact-id "clojure.algo.generic", :description "clojure.algo.generic for Clojure 1.3.0. Example: (use 'clojure.algo.generic.math-functions)", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "79d1c5cf28463ce2f5ea0b400762e73d6d59952f", :url ""}, :homepage "", :url "", :versions ["0.1.0-SNAPSHOT"]}
{:group-id "cljsjs", :artifact-id "responsive-nav", :description "Responsive navigation plugin without library dependencies and with fast touch screen support.", :scm {:tag "4e9adcf8cd247e66cb804d38a13f7ffb61b9046d", :url ""}, :homepage "-nav.com", :url "-nav.com", :versions ["1.0.39-0"]}
{:group-id "error.handler", :artifact-id "error.handler", :description "Error handling for clojure that goes beyond try/catch", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "9f294876cac7e9e363e37140302507429013a29e", :url ""}, :versions ["1.1.0-SNAPSHOT" "1.0.0-SNAPSHOT"]}
{:group-id "org.clojars.siscia", :artifact-id "clj-stripe", :description "Clojure binding for Stripe", :scm {:connection "scm:git:git-stripe.git", :developer-connection "scm:git:ssh-stripe.git", :tag "7d342a9beac4f1dd896d192974bdb2f13603a8bc", :url "-stripe"}, :versions ["1.0.3" "1.0.2"]}
{:group-id "session", :artifact-id "session", :description "FIXME: write this!", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "da0aea69d6aa0905105c90be5d39645b7469eae4", :url ""}, :homepage "", :url "", :versions ["0.1.2-SNAPSHOT" "0.1.1-SNAPSHOT" "0.1.0-SNAPSHOT"]}
{:group-id "rill-event-sourcing", :artifact-id "rill.uuid", :description "An Event Sourcing Toolkit", :homepage "-event-sourcing/rill", :url "-event-sourcing/rill", :versions ["0.2.3-SNAPSHOT" "0.2.3-RC2" "0.2.3-RC1" "0.2.2" "0.2.1" "0.2.0"]}
{:group-id "com.onekingslane.danger", :artifact-id "jsonschema", :description "Library to help you derive schemas from arbirary collections of JSON", :scm {:connection "scm:git:git-jsonschema.git", :developer-connection "scm:git:ssh-jsonschema.git", :tag "41a5761b5182a0921df53eaaa741936e046a5d3b", :url "-jsonschema"}, :versions ["1.2.2" "1.2.1" "1.1.0" "1.1.0-timestamptz" "1.0.1" "1.0.0"]}
{:group-id "im.chit", :artifact-id "hara.common.state", :description "patterns and utilities", :homepage "", :url "", :versions ["2.5.2" "2.5.1" "2.5.0" "2.4.8" "2.4.7" "2.4.6" "2.4.5" "2.4.4" "2.4.2" "2.4.0" "2.3.7" "2.3.6" "2.3.4" "2.3.3" "2.3.2" "2.3.1" "2.2.17" "2.2.16" "2.2.16-SNAPSHOT" "2.2.15" "2.2.14" "2.2.13" "2.2.12" "2.2.11" "2.2.10" "2.2.9" "2.2.7" "2.2.6" "2.2.5" "2.2.4" "2.2.3" "2.2.2" "2.2.0-SNAPSHOT" "2.1.12" "2.1.11" "2.1.10" "2.1.9" "2.1.8" "2.1.7" "2.1.6" "2.1.5" "2.1.4" "2.1.3" "2.1.2" "2.1.1"]}
{:group-id "ajom", :artifact-id "lein-template", :description "atom plugins in clojurescript", :scm {:tag "HEAD", :url ""}, :homepage "", :url "", :versions ["0.3.2" "0.3.1" "0.3.0" "0.2.0" "0.1.1" "0.1.0"]}
{:group-id "clj-jtwig", :artifact-id "clj-jtwig", :description "Clojure wrapper for JTwig", :scm {:connection "scm:git:git-jtwig.git", :developer-connection "scm:git:ssh-jtwig.git", :tag "eb1c5a31a904161716c0d271e0164970d27b83c1", :url "-jtwig"}, :homepage "-jtwig", :url "-jtwig", :versions ["0.5.1" "0.5" "0.4.1" "0.4" "0.3.2" "0.3.1" "0.2.2" "0.2.1"]}
{:group-id "xyz.a4j", :artifact-id "metrics-clojure-influxdb", :description "Clojure wrapper around InfluxDB metrics library", :scm {:connection "scm:git:git-clojure-influxdb.git", :developer-connection "scm:git:ssh-clojure-influxdb.git", :tag "557763f0fc6dea162dd5aaae34c580730b28e5e9", :url "-clojure-influxdb"}, :homepage "-clojure-influxdb", :url "-clojure-influxdb", :versions ["0.1.0" "0.1.0-SNAPSHOT"]}
{:group-id "clojurewerkz", :artifact-id "persephone", :description "Clojure DSL that generates [Neo4J] Cypher queries", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "aafe951bd0fe22a06010937b06e765672469bf38", :url ""}, :homepage "", :url "", :versions ["0.1.1"]}
{:group-id "org.clojars.jasonjckn", :artifact-id "scribe-thrift7-bindings", :description "FIXME: write description", :versions ["1.0.1" "1.0.0"]}
{:group-id "juxt.modular", :artifact-id "bootstrap", :description "A modular extension that HTML rendering of components with the Twitter Bootstrap library.", :homepage "", :url "", :versions ["0.2.0" "0.1.0"]}
{:group-id "org.clogars.mw10013", :artifact-id "remix", :description "Mix and match machinery for web and sql.", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "0d0b54d84ac8815dbe5a83864e188b736da9e9b1", :url ""}, :versions ["0.0.4"]}
{:group-id "org.clojars.ollez", :artifact-id "swingrepl", :description "A Swing Clojure REPL using BeanShell's JConsole", :scm {:connection "scm:git:git-swingrepl.git", :developer-connection "scm:git:ssh-swingrepl.git", :tag "7710eb019095507043e7356a40a3b12e35699ec5", :url "-swingrepl"}, :versions ["1.4.1-SNAPSHOT"]}
{:group-id "capstan", :artifact-id "lein-template", :description "generate a Capstan clojure project skeleton", :scm {:connection "scm:git:git-lein-plugin.git", :developer-connection "scm:git:ssh-lein-plugin.git", :tag "4bbeea5fcbe8bd7a1a71996f98a5d94a2d3a3ec7", :url "-lein-plugin"}, :homepage "-lein-plugin", :url "-lein-plugin", :versions ["0.1.0"]}
{:group-id "nrepl-figwheel-node", :artifact-id "lein-template", :description "DEPRECATED: Leiningen template for Figwheel on nREPL and Node.js", :scm {:tag "HEAD", :url ""}, :homepage "", :url "", :versions ["0.1.6" "0.1.5" "0.1.4" "0.1.3" "0.1.2" "0.1.1-SNAPSHOT" "0.1.0"]}
{:group-id "analytics-clj", :artifact-id "analytics-clj", :description "Idiomatic Clojure wrapper for the segment.io Java client", :scm {:connection "scm:git:git-clj.git", :developer-connection "scm:git:ssh-clj.git", :tag "4bf86696d81729d543cba8ad1b0208b367421988", :url "-clj"}, :homepage "", :url "", :versions ["0.3.0" "0.2.2" "0.2.1" "0.2.0" "0.1.2" "0.1.1"]}
{:group-id "ryane", :artifact-id "hiroba", :description "A minimalistic Clojure library for the Foursquare REST API - forked from ", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "33012ab85c5095c03e735d851aed0fb78e142b06", :url ""}, :homepage "", :url "", :versions ["0.1.0-alpha2-SNAPSHOT"]}
{:group-id "org.clojars.mpenet", :artifact-id "clj-redis", :scm {:connection "scm:git:git-redis.git", :developer-connection "scm:git:ssh-redis.git", :tag "57eb3b57db954c66379bd2be1f3ee8b9269e933b", :url "-redis"}, :versions ["0.0.8" "0.0.8-SNAPSHOT"]}
{:group-id "containium.systems", :artifact-id "elasticsearch", :description "ElasticSearch Containium System", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "e3e50e3002847df9c169a9475a635469561df981", :url "//"}, :homepage "", :url "", :versions ["0.1.1" "0.1.0" "0.1.0-SNAPSHOT"]}
{:group-id "clj-bucket", :artifact-id "clj-bucket", :description "A low-level implementation of the token bucket algorithm", :scm {:tag "HEAD", :url "-bucket"}, :homepage "-bucket", :url "-bucket", :versions ["0.2.0" "0.1.5" "0.1.4" "0.1.3" "0.1.2"]}
{:group-id "pleasetrythisathome", :artifact-id "tao", :description "Two way data binding for browser history", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "cedb3f7b5d51f6578294f1cf11b26a773d906ae5", :url ""}, :homepage "", :url "", :versions ["0.1.5" "0.1.4" "0.1.3" "0.1.2" "0.1.1" "0.1.1-SNAPSHOT" "0.1.0-SNAPSHOT"]}
{:group-id "net.thegeez", :artifact-id "google-closure-library", :description "The Google Closure Library is a collection of JavaScript code\n designed for use with the Google Closure JavaScript Compiler.\n \n Non-official distribution for usage with the ClojureScript.", :scm {:connection "scm:svn:-library.googlecode.com/svn/trunk", :developer-connection "scm:svn:-library.googlecode.com/svn/trunk", :tag "HEAD", :url "-library/source/browse/#svn/trunk"}, :homepage "-library/", :url "-library/", :versions ["0.0-1698"]}
{:group-id "aviary", :artifact-id "figwheel", :description "ClojureScript Autobuilder/Server which pushes changed files to the browser.", :scm {:tag "HEAD", :url "-figwheel"}, :homepage "-figwheel", :url "-figwheel", :versions ["0.2.5"]}
{:group-id "shale", :artifact-id "shale", :description "A Clojure-backed Selenium hub replacement", :scm {:tag "HEAD", :url ""}, :homepage "", :url "", :versions ["0.3.3" "0.3.3-SNAPSHOT" "0.3.1" "0.3.0" "0.2.2" "0.2.1" "0.2.0" "0.1.1" "0.1.0"]}
{:group-id "org.pingles", :artifact-id "rotary", :description "Amazon DynamoDB API", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "8ca0525f2f559c4ff1a1050a272bff32bb17cfb9", :url ""}, :versions ["0.4.1-SNAPSHOT"]}
{:group-id "org.clojars.turbopape", :artifact-id "milestones", :description "Milestones : the Automagic Project Planner", :scm {:tag "HEAD", :url ""}, :homepage "", :url "", :versions ["1.0.1" "1.0.0" "0.3.0" "0.2.0"]}
{:group-id "keorn", :artifact-id "loom-gorilla", :description "A Gorilla REPL renderer for loom graphs.", :scm {:connection "scm:git:git-gorilla.git", :developer-connection "scm:git:ssh-gorilla.git", :tag "dbdd77076274c4c5672dd1891b68d7952eccc530", :url "-gorilla"}, :homepage "-gorilla", :url "-gorilla", :versions ["0.1.1-SNAPSHOT"]}
{:group-id "suvash", :artifact-id "irclj", :description "A simple IRC library/bot framework.", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "4c3ef716d27e00b6e057792ff9ffd12f026f8114", :url ""}, :homepage "", :url "", :versions ["0.5.0-alpha4" "0.5.0-alpha3"]}
{:group-id "selfsame", :artifact-id "pdf", :description "Predicate dispatch for Clojure(Script).", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "01e0d677a251ecb6b6c55e3afd015fd5d5b2305f", :url ""}, :homepage "", :url "", :versions ["0.0.9.5-SNAPSHOT" "0.0.9-SNAPSHOT"]}
{:group-id "argo", :artifact-id "argo", :description "JSON API implementation for Clojure", :scm {:tag "HEAD", :url ""}, :homepage "", :url "", :versions ["0.1.2" "0.1.1" "0.1.0" "0.1.0-SNAPSHOT"]}
{:group-id "com.report.engine", :artifact-id "clients", :versions ["0.0.3-SNAPSHOT" "0.0.2-SNAPSHOT"]}
{:group-id "fw1", :artifact-id "boot-template", :description "FW/1 template for Boot new", :scm {:tag "c8449a35cde2b162e5c8d47fb4369b2db8482dd5", :url "-one/fw1-template/"}, :homepage "-one/fw1-template/", :url "-one/fw1-template/", :versions ["0.8.0" "0.5.2" "0.5.1" "0.5.0"]}
{:group-id "chrisbetz", :artifact-id "sparkling", :description "A Clojure Library for Apache Spark", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "2779039522606d2df323e624064236c8c0ebb569", :url "//"}, :homepage "", :url "", :versions ["1.0.0-SNAPSHOT"]}
{:group-id "org.clojars.guv", :artifact-id "jmxremote_optional", :versions ["1.0.1_04"]}
{:group-id "parser", :artifact-id "parser", :scm {:connection "scm:git:git-parser.git", :developer-connection "scm:git:ssh-parser.git", :tag "ac9c60d84b65697ccdb7c3852b84e315be2d7324", :url "-parser"}, :versions ["1.0.0"]}
{:group-id "ragtime-c.j.j-0.3.3", :artifact-id "ragtime-c.j.j-0.3.3", :description "A database-independent migration library", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "5332bfa2ca98b6e6c78ecae5e9f129c7a527f375", :url ""}, :versions ["0.3.4-2" "0.3.4-1" "0.3.4"]}
{:group-id "marshallbrekka", :artifact-id "clj-aws-s3", :description "Clojure Amazon S3 library.", :scm {:connection "scm:git:git-aws-s3.git", :developer-connection "scm:git:ssh-aws-s3.git", :tag "528beb3e9c7b2b6a924d92ecef3fd456292cb54a", :url "-aws-s3"}, :versions ["0.3.5" "0.3.5a"]}
{:group-id "hoptoad", :artifact-id "hoptoad-java-notifier", :versions ["1.9"]}
{:group-id "cljsjs", :artifact-id "blend4web", :description "Blend4Web -- Javascript WebGL Framework by Triump LLC", :scm {:tag "ea893d7be7b7ca32328e8c484d954cb8e2681b1b", :url ""}, :homepage "/", :url "/", :versions ["16.11-1"]}
{:group-id "provisdom-clj", :artifact-id "boot-template", :description "The provisdom boot-new template", :scm {:tag "c7cf590021ebff82e63b4f721ff1d9ebd29b5be5", :url "-boot-template"}, :homepage "-boot-template", :url "-boot-template", :versions ["0.2.4" "0.2.3" "0.2.2" "0.2.1" "0.2.0" "0.2.0-SNAPSHOT" "0.1.1" "0.1.1-SNAPSHOT" "0.1.0"]}
{:group-id "org.ozias.cljlibs", :artifact-id "semver", :description "Semantic version-ing library for Clojure", :scm {:tag "HEAD", :url ""}, :homepage "", :url "", :versions ["0.1.5-SNAPSHOT" "0.1.4" "0.1.4-SNAPSHOT" "0.1.3" "0.1.3-SNAPSHOT" "0.1.2" "0.1.2-SNAPSHOT" "0.1.1" "0.1.1-SNAPSHOT" "0.1.1-alpha.0" "0.1.0-SNAPSHOT" "0.1.0-alpha.1" "0.1.0-alpha.0"]}
{:group-id "quile", :artifact-id "dependency-cljs", :description "A data structure for representing dependency graphs", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "010b241c7667d6a685e0b55551edcab1e3caa085", :url ""}, :homepage "-cljs", :url "-cljs", :versions ["0.1.4"]}
{:group-id "tcp-server", :artifact-id "tcp-server", :description "Threaded TCP server library", :scm {:connection "scm:git:git-server.git", :developer-connection "scm:git:ssh-server.git", :tag "553607c1af3337ca5ac85f90d30683a021db100d", :url "-server"}, :versions ["0.1.0"]}
{:group-id "org.clojars.dlepage", :artifact-id "clj-cassandra", :description "Clojure client for Apache Cassandra", :versions ["0.1.3"]}
{:group-id "org.clojars.danielbraun", :artifact-id "expresso", :description "a general Algebraic Expression manipulation library in clojure", :scm {:connection "scm:git:git-numerics/expresso.git", :developer-connection "scm:git:ssh-numerics/expresso.git", :tag "71707402fcdde3bc2c17cd4c89e1220c978a1d5e", :url "-numerics/expresso"}, :homepage "-numerics/expresso", :url "-numerics/expresso", :versions ["0.2.2-SNAPSHOT"]} | null | https://raw.githubusercontent.com/Dexterminator/clj-templates/705e652fece2455257e5008c12c712bb9f7802d1/dev/resources/test_feed.clj | clojure | {:group-id "yamlparser", :artifact-id "yamlparser", :description "FIXME: write description", :homepage "", :url "", :versions ["0.1.0-SNAPSHOT"]}
{:group-id "org.jmatt", :artifact-id "clojure.algo.generic", :description "clojure.algo.generic for Clojure 1.3.0. Example: (use 'clojure.algo.generic.math-functions)", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "79d1c5cf28463ce2f5ea0b400762e73d6d59952f", :url ""}, :homepage "", :url "", :versions ["0.1.0-SNAPSHOT"]}
{:group-id "cljsjs", :artifact-id "responsive-nav", :description "Responsive navigation plugin without library dependencies and with fast touch screen support.", :scm {:tag "4e9adcf8cd247e66cb804d38a13f7ffb61b9046d", :url ""}, :homepage "-nav.com", :url "-nav.com", :versions ["1.0.39-0"]}
{:group-id "error.handler", :artifact-id "error.handler", :description "Error handling for clojure that goes beyond try/catch", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "9f294876cac7e9e363e37140302507429013a29e", :url ""}, :versions ["1.1.0-SNAPSHOT" "1.0.0-SNAPSHOT"]}
{:group-id "org.clojars.siscia", :artifact-id "clj-stripe", :description "Clojure binding for Stripe", :scm {:connection "scm:git:git-stripe.git", :developer-connection "scm:git:ssh-stripe.git", :tag "7d342a9beac4f1dd896d192974bdb2f13603a8bc", :url "-stripe"}, :versions ["1.0.3" "1.0.2"]}
{:group-id "session", :artifact-id "session", :description "FIXME: write this!", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "da0aea69d6aa0905105c90be5d39645b7469eae4", :url ""}, :homepage "", :url "", :versions ["0.1.2-SNAPSHOT" "0.1.1-SNAPSHOT" "0.1.0-SNAPSHOT"]}
{:group-id "rill-event-sourcing", :artifact-id "rill.uuid", :description "An Event Sourcing Toolkit", :homepage "-event-sourcing/rill", :url "-event-sourcing/rill", :versions ["0.2.3-SNAPSHOT" "0.2.3-RC2" "0.2.3-RC1" "0.2.2" "0.2.1" "0.2.0"]}
{:group-id "com.onekingslane.danger", :artifact-id "jsonschema", :description "Library to help you derive schemas from arbirary collections of JSON", :scm {:connection "scm:git:git-jsonschema.git", :developer-connection "scm:git:ssh-jsonschema.git", :tag "41a5761b5182a0921df53eaaa741936e046a5d3b", :url "-jsonschema"}, :versions ["1.2.2" "1.2.1" "1.1.0" "1.1.0-timestamptz" "1.0.1" "1.0.0"]}
{:group-id "im.chit", :artifact-id "hara.common.state", :description "patterns and utilities", :homepage "", :url "", :versions ["2.5.2" "2.5.1" "2.5.0" "2.4.8" "2.4.7" "2.4.6" "2.4.5" "2.4.4" "2.4.2" "2.4.0" "2.3.7" "2.3.6" "2.3.4" "2.3.3" "2.3.2" "2.3.1" "2.2.17" "2.2.16" "2.2.16-SNAPSHOT" "2.2.15" "2.2.14" "2.2.13" "2.2.12" "2.2.11" "2.2.10" "2.2.9" "2.2.7" "2.2.6" "2.2.5" "2.2.4" "2.2.3" "2.2.2" "2.2.0-SNAPSHOT" "2.1.12" "2.1.11" "2.1.10" "2.1.9" "2.1.8" "2.1.7" "2.1.6" "2.1.5" "2.1.4" "2.1.3" "2.1.2" "2.1.1"]}
{:group-id "ajom", :artifact-id "lein-template", :description "atom plugins in clojurescript", :scm {:tag "HEAD", :url ""}, :homepage "", :url "", :versions ["0.3.2" "0.3.1" "0.3.0" "0.2.0" "0.1.1" "0.1.0"]}
{:group-id "clj-jtwig", :artifact-id "clj-jtwig", :description "Clojure wrapper for JTwig", :scm {:connection "scm:git:git-jtwig.git", :developer-connection "scm:git:ssh-jtwig.git", :tag "eb1c5a31a904161716c0d271e0164970d27b83c1", :url "-jtwig"}, :homepage "-jtwig", :url "-jtwig", :versions ["0.5.1" "0.5" "0.4.1" "0.4" "0.3.2" "0.3.1" "0.2.2" "0.2.1"]}
{:group-id "xyz.a4j", :artifact-id "metrics-clojure-influxdb", :description "Clojure wrapper around InfluxDB metrics library", :scm {:connection "scm:git:git-clojure-influxdb.git", :developer-connection "scm:git:ssh-clojure-influxdb.git", :tag "557763f0fc6dea162dd5aaae34c580730b28e5e9", :url "-clojure-influxdb"}, :homepage "-clojure-influxdb", :url "-clojure-influxdb", :versions ["0.1.0" "0.1.0-SNAPSHOT"]}
{:group-id "clojurewerkz", :artifact-id "persephone", :description "Clojure DSL that generates [Neo4J] Cypher queries", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "aafe951bd0fe22a06010937b06e765672469bf38", :url ""}, :homepage "", :url "", :versions ["0.1.1"]}
{:group-id "org.clojars.jasonjckn", :artifact-id "scribe-thrift7-bindings", :description "FIXME: write description", :versions ["1.0.1" "1.0.0"]}
{:group-id "juxt.modular", :artifact-id "bootstrap", :description "A modular extension that HTML rendering of components with the Twitter Bootstrap library.", :homepage "", :url "", :versions ["0.2.0" "0.1.0"]}
{:group-id "org.clogars.mw10013", :artifact-id "remix", :description "Mix and match machinery for web and sql.", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "0d0b54d84ac8815dbe5a83864e188b736da9e9b1", :url ""}, :versions ["0.0.4"]}
{:group-id "org.clojars.ollez", :artifact-id "swingrepl", :description "A Swing Clojure REPL using BeanShell's JConsole", :scm {:connection "scm:git:git-swingrepl.git", :developer-connection "scm:git:ssh-swingrepl.git", :tag "7710eb019095507043e7356a40a3b12e35699ec5", :url "-swingrepl"}, :versions ["1.4.1-SNAPSHOT"]}
{:group-id "capstan", :artifact-id "lein-template", :description "generate a Capstan clojure project skeleton", :scm {:connection "scm:git:git-lein-plugin.git", :developer-connection "scm:git:ssh-lein-plugin.git", :tag "4bbeea5fcbe8bd7a1a71996f98a5d94a2d3a3ec7", :url "-lein-plugin"}, :homepage "-lein-plugin", :url "-lein-plugin", :versions ["0.1.0"]}
{:group-id "nrepl-figwheel-node", :artifact-id "lein-template", :description "DEPRECATED: Leiningen template for Figwheel on nREPL and Node.js", :scm {:tag "HEAD", :url ""}, :homepage "", :url "", :versions ["0.1.6" "0.1.5" "0.1.4" "0.1.3" "0.1.2" "0.1.1-SNAPSHOT" "0.1.0"]}
{:group-id "analytics-clj", :artifact-id "analytics-clj", :description "Idiomatic Clojure wrapper for the segment.io Java client", :scm {:connection "scm:git:git-clj.git", :developer-connection "scm:git:ssh-clj.git", :tag "4bf86696d81729d543cba8ad1b0208b367421988", :url "-clj"}, :homepage "", :url "", :versions ["0.3.0" "0.2.2" "0.2.1" "0.2.0" "0.1.2" "0.1.1"]}
{:group-id "ryane", :artifact-id "hiroba", :description "A minimalistic Clojure library for the Foursquare REST API - forked from ", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "33012ab85c5095c03e735d851aed0fb78e142b06", :url ""}, :homepage "", :url "", :versions ["0.1.0-alpha2-SNAPSHOT"]}
{:group-id "org.clojars.mpenet", :artifact-id "clj-redis", :scm {:connection "scm:git:git-redis.git", :developer-connection "scm:git:ssh-redis.git", :tag "57eb3b57db954c66379bd2be1f3ee8b9269e933b", :url "-redis"}, :versions ["0.0.8" "0.0.8-SNAPSHOT"]}
{:group-id "containium.systems", :artifact-id "elasticsearch", :description "ElasticSearch Containium System", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "e3e50e3002847df9c169a9475a635469561df981", :url "//"}, :homepage "", :url "", :versions ["0.1.1" "0.1.0" "0.1.0-SNAPSHOT"]}
{:group-id "clj-bucket", :artifact-id "clj-bucket", :description "A low-level implementation of the token bucket algorithm", :scm {:tag "HEAD", :url "-bucket"}, :homepage "-bucket", :url "-bucket", :versions ["0.2.0" "0.1.5" "0.1.4" "0.1.3" "0.1.2"]}
{:group-id "pleasetrythisathome", :artifact-id "tao", :description "Two way data binding for browser history", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "cedb3f7b5d51f6578294f1cf11b26a773d906ae5", :url ""}, :homepage "", :url "", :versions ["0.1.5" "0.1.4" "0.1.3" "0.1.2" "0.1.1" "0.1.1-SNAPSHOT" "0.1.0-SNAPSHOT"]}
{:group-id "net.thegeez", :artifact-id "google-closure-library", :description "The Google Closure Library is a collection of JavaScript code\n designed for use with the Google Closure JavaScript Compiler.\n \n Non-official distribution for usage with the ClojureScript.", :scm {:connection "scm:svn:-library.googlecode.com/svn/trunk", :developer-connection "scm:svn:-library.googlecode.com/svn/trunk", :tag "HEAD", :url "-library/source/browse/#svn/trunk"}, :homepage "-library/", :url "-library/", :versions ["0.0-1698"]}
{:group-id "aviary", :artifact-id "figwheel", :description "ClojureScript Autobuilder/Server which pushes changed files to the browser.", :scm {:tag "HEAD", :url "-figwheel"}, :homepage "-figwheel", :url "-figwheel", :versions ["0.2.5"]}
{:group-id "shale", :artifact-id "shale", :description "A Clojure-backed Selenium hub replacement", :scm {:tag "HEAD", :url ""}, :homepage "", :url "", :versions ["0.3.3" "0.3.3-SNAPSHOT" "0.3.1" "0.3.0" "0.2.2" "0.2.1" "0.2.0" "0.1.1" "0.1.0"]}
{:group-id "org.pingles", :artifact-id "rotary", :description "Amazon DynamoDB API", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "8ca0525f2f559c4ff1a1050a272bff32bb17cfb9", :url ""}, :versions ["0.4.1-SNAPSHOT"]}
{:group-id "org.clojars.turbopape", :artifact-id "milestones", :description "Milestones : the Automagic Project Planner", :scm {:tag "HEAD", :url ""}, :homepage "", :url "", :versions ["1.0.1" "1.0.0" "0.3.0" "0.2.0"]}
{:group-id "keorn", :artifact-id "loom-gorilla", :description "A Gorilla REPL renderer for loom graphs.", :scm {:connection "scm:git:git-gorilla.git", :developer-connection "scm:git:ssh-gorilla.git", :tag "dbdd77076274c4c5672dd1891b68d7952eccc530", :url "-gorilla"}, :homepage "-gorilla", :url "-gorilla", :versions ["0.1.1-SNAPSHOT"]}
{:group-id "suvash", :artifact-id "irclj", :description "A simple IRC library/bot framework.", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "4c3ef716d27e00b6e057792ff9ffd12f026f8114", :url ""}, :homepage "", :url "", :versions ["0.5.0-alpha4" "0.5.0-alpha3"]}
{:group-id "selfsame", :artifact-id "pdf", :description "Predicate dispatch for Clojure(Script).", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "01e0d677a251ecb6b6c55e3afd015fd5d5b2305f", :url ""}, :homepage "", :url "", :versions ["0.0.9.5-SNAPSHOT" "0.0.9-SNAPSHOT"]}
{:group-id "argo", :artifact-id "argo", :description "JSON API implementation for Clojure", :scm {:tag "HEAD", :url ""}, :homepage "", :url "", :versions ["0.1.2" "0.1.1" "0.1.0" "0.1.0-SNAPSHOT"]}
{:group-id "com.report.engine", :artifact-id "clients", :versions ["0.0.3-SNAPSHOT" "0.0.2-SNAPSHOT"]}
{:group-id "fw1", :artifact-id "boot-template", :description "FW/1 template for Boot new", :scm {:tag "c8449a35cde2b162e5c8d47fb4369b2db8482dd5", :url "-one/fw1-template/"}, :homepage "-one/fw1-template/", :url "-one/fw1-template/", :versions ["0.8.0" "0.5.2" "0.5.1" "0.5.0"]}
{:group-id "chrisbetz", :artifact-id "sparkling", :description "A Clojure Library for Apache Spark", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "2779039522606d2df323e624064236c8c0ebb569", :url "//"}, :homepage "", :url "", :versions ["1.0.0-SNAPSHOT"]}
{:group-id "org.clojars.guv", :artifact-id "jmxremote_optional", :versions ["1.0.1_04"]}
{:group-id "parser", :artifact-id "parser", :scm {:connection "scm:git:git-parser.git", :developer-connection "scm:git:ssh-parser.git", :tag "ac9c60d84b65697ccdb7c3852b84e315be2d7324", :url "-parser"}, :versions ["1.0.0"]}
{:group-id "ragtime-c.j.j-0.3.3", :artifact-id "ragtime-c.j.j-0.3.3", :description "A database-independent migration library", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "5332bfa2ca98b6e6c78ecae5e9f129c7a527f375", :url ""}, :versions ["0.3.4-2" "0.3.4-1" "0.3.4"]}
{:group-id "marshallbrekka", :artifact-id "clj-aws-s3", :description "Clojure Amazon S3 library.", :scm {:connection "scm:git:git-aws-s3.git", :developer-connection "scm:git:ssh-aws-s3.git", :tag "528beb3e9c7b2b6a924d92ecef3fd456292cb54a", :url "-aws-s3"}, :versions ["0.3.5" "0.3.5a"]}
{:group-id "hoptoad", :artifact-id "hoptoad-java-notifier", :versions ["1.9"]}
{:group-id "cljsjs", :artifact-id "blend4web", :description "Blend4Web -- Javascript WebGL Framework by Triump LLC", :scm {:tag "ea893d7be7b7ca32328e8c484d954cb8e2681b1b", :url ""}, :homepage "/", :url "/", :versions ["16.11-1"]}
{:group-id "provisdom-clj", :artifact-id "boot-template", :description "The provisdom boot-new template", :scm {:tag "c7cf590021ebff82e63b4f721ff1d9ebd29b5be5", :url "-boot-template"}, :homepage "-boot-template", :url "-boot-template", :versions ["0.2.4" "0.2.3" "0.2.2" "0.2.1" "0.2.0" "0.2.0-SNAPSHOT" "0.1.1" "0.1.1-SNAPSHOT" "0.1.0"]}
{:group-id "org.ozias.cljlibs", :artifact-id "semver", :description "Semantic version-ing library for Clojure", :scm {:tag "HEAD", :url ""}, :homepage "", :url "", :versions ["0.1.5-SNAPSHOT" "0.1.4" "0.1.4-SNAPSHOT" "0.1.3" "0.1.3-SNAPSHOT" "0.1.2" "0.1.2-SNAPSHOT" "0.1.1" "0.1.1-SNAPSHOT" "0.1.1-alpha.0" "0.1.0-SNAPSHOT" "0.1.0-alpha.1" "0.1.0-alpha.0"]}
{:group-id "quile", :artifact-id "dependency-cljs", :description "A data structure for representing dependency graphs", :scm {:connection "scm:git:git", :developer-connection "scm:git:ssh", :tag "010b241c7667d6a685e0b55551edcab1e3caa085", :url ""}, :homepage "-cljs", :url "-cljs", :versions ["0.1.4"]}
{:group-id "tcp-server", :artifact-id "tcp-server", :description "Threaded TCP server library", :scm {:connection "scm:git:git-server.git", :developer-connection "scm:git:ssh-server.git", :tag "553607c1af3337ca5ac85f90d30683a021db100d", :url "-server"}, :versions ["0.1.0"]}
{:group-id "org.clojars.dlepage", :artifact-id "clj-cassandra", :description "Clojure client for Apache Cassandra", :versions ["0.1.3"]}
{:group-id "org.clojars.danielbraun", :artifact-id "expresso", :description "a general Algebraic Expression manipulation library in clojure", :scm {:connection "scm:git:git-numerics/expresso.git", :developer-connection "scm:git:ssh-numerics/expresso.git", :tag "71707402fcdde3bc2c17cd4c89e1220c978a1d5e", :url "-numerics/expresso"}, :homepage "-numerics/expresso", :url "-numerics/expresso", :versions ["0.2.2-SNAPSHOT"]} | |
58c405e418324e46c42ba4ebb9534a45f5b93c99d90a3c3a4382746ea8f267ac | semmons99/clojure-euler | prob-040.clj | problem 040 ; ; ; ; ; ; ; ; ; ;
(defn prob-040 []
(let [s (apply str (range 1 1000000))]
(* (Integer. (subs s 0 1))
(Integer. (subs s 99 100))
(Integer. (subs s 999 1000))
(Integer. (subs s 9999 10000))
(Integer. (subs s 99999 100000))
(Integer. (subs s 999999 1000000))))) | null | https://raw.githubusercontent.com/semmons99/clojure-euler/3480bc313b9df7f282dadf6e0b48d96230f1bfc1/prob-040.clj | clojure | ; ; ; ; ; ; ; ; ; | (defn prob-040 []
(let [s (apply str (range 1 1000000))]
(* (Integer. (subs s 0 1))
(Integer. (subs s 99 100))
(Integer. (subs s 999 1000))
(Integer. (subs s 9999 10000))
(Integer. (subs s 99999 100000))
(Integer. (subs s 999999 1000000))))) |
f99ade00b8d1be3c8c8bd80f9d6f7775e77df7c638f249238ab29021bff899c5 | ijvcms/chuanqi_dev | hook_pp.erl | %%%-------------------------------------------------------------------
@author zhengsiying
( C ) 2015 , < COMPANY >
%%% @doc
%%%
%%% @end
Created : 26 . 八月 2015 上午10:55
%%%-------------------------------------------------------------------
-module(hook_pp).
-include("common.hrl").
-include("record.hrl").
-include("proto.hrl").
-include("cache.hrl").
-include("config.hrl").
-include("language_config.hrl").
-include("button_tips_config.hrl").
%% API
-export([
handle/3
]).
%% ====================================================================
%% API functions
%% ====================================================================
%% 直接切换挂机场景(不等待回合结束)
handle(13001, PlayerState, Data) ->
?INFO("13001 ~p", [Data]),
HookSceneId = Data#req_change_hook_scene.scene_id,
case hook_lib:check_scene_id(PlayerState, HookSceneId) of
true ->
Update = #player_state{
db_player_base = #db_player_base{hook_scene_id = HookSceneId}
},
case player_lib:update_player_state(PlayerState, Update) of
{ok, PlayerState1} ->
HookState = player_lib:get_hook_state(),
case hook_lib:update_drive(PlayerState1, HookState, ?HOOK_DRIVE_CLIENT) of
{NewPlayerState, HookState1} ->
scene_mgr_lib:leave_scene(PlayerState, ?LEAVE_SCENE_TYPE_INITIATIVE),
NewHookState = hook_lib:heartbeat(HookState1),
player_lib:put_hook_state(NewHookState),
net_send:send_to_client(NewPlayerState#player_state.socket, 13001, #rep_change_hook_scene{scene_id = HookSceneId}),
{ok, NewPlayerState#player_state{scene_id = null, scene_pid = null}};
_ ->
NewHookState = hook_lib:heartbeat(HookState),
player_lib:put_hook_state(NewHookState),
net_send:send_to_client(PlayerState1#player_state.socket, 13001, #rep_change_hook_scene{scene_id = HookSceneId}),
{ok, PlayerState1#player_state{scene_id = null, scene_pid = null}}
end;
_ ->
skip
end;
_ ->
skip
end;
获取场景刷怪信息
handle(13002, PlayerState, _Data) ->
?INFO("13002 ~p", [_Data]),
HookState = player_lib:get_hook_state(),
case HookState#hook_state.drive of
?HOOK_DRIVE_CLIENT ->
HookState1 = hook_lib:heartbeat(HookState),
case hook_lib:new_round(PlayerState, HookState1) of
{ok, NewHookState} ->
player_lib:put_hook_state(NewHookState),
MonsterList = hook_lib:get_monster_data(NewHookState),
MonsterType =
case NewHookState#hook_state.boss_round of
true ->
2;
_ ->
1
end,
Data = #rep_get_hook_monster{monster_list = MonsterList, monster_type = MonsterType},
net_send:send_to_client(PlayerState#player_state.socket, 13002, Data);
_ ->
NextTime = max(0, HookState#hook_state.next_round_time - util_date:unixtime()),
Data = #rep_round_result{
status = 2,
next_time = NextTime
},
net_send:send_to_client(PlayerState#player_state.socket, 13004, Data)
end;
_ ->
skip
end;
%% 挂机释放技能
handle(13003, PlayerState, Data) ->
?INFO("13003 ~p", [Data]),
HookState = player_lib:get_hook_state(),
case HookState#hook_state.drive of
?HOOK_DRIVE_CLIENT ->
HookState1 = hook_lib:heartbeat(HookState),
#req_hook_use_skill{
caster_flag = CasterFlag,
target_point = TargetPoint,
skill_id = SkillId,
target_list = TargetList
} = Data,
CasterType = CasterFlag#proto_obj_flag.type,
CasterId = CasterFlag#proto_obj_flag.id,
TargetFlagList = [{Type, Id} || #proto_obj_flag{type = Type, id = Id} <- TargetList, Type /= ?OBJ_TYPE_PET],
#proto_point{x = X, y = Y} = TargetPoint,
case hook_lib:obj_use_skill(PlayerState, HookState1, {CasterType, CasterId}, SkillId, TargetFlagList, {X, Y}) of
{NewPlayerState, NewHookState} ->
player_lib:put_hook_state(NewHookState),
{ok, NewPlayerState};
_ ->
skip
end;
_ ->
skip
end;
获取boss可用挑战次数
handle(13006, PlayerState, _Data) ->
?INFO("13006 ~p", [_Data]),
case hook_lib:get_challenge_info(PlayerState) of
{NewPlayerState, ChallengeNum, NeedJade} ->
Data = #rep_challenge_num{challenge_num = ChallengeNum, need_jade = NeedJade},
net_send:send_to_client(NewPlayerState#player_state.socket, 13006, Data),
{ok, NewPlayerState};
_ ->
skip
end;
挑战boss
handle(13007, PlayerState, Data) ->
?INFO("13007 ~p", [Data]),
HookState = player_lib:get_hook_state(),
case HookState#hook_state.drive of
?HOOK_DRIVE_CLIENT ->
HookState1 = hook_lib:heartbeat(HookState),
HookSceneId = Data#req_challenge_boos.scene_id,
挑战boss
case hook_lib:challenge_boos(PlayerState, HookState1, HookSceneId) of
{NewPlayerState, NewHookState} ->
player_lib:put_hook_state(NewHookState),
net_send:send_to_client(NewPlayerState#player_state.socket, 13007, #rep_challenge_boos{scene_id = HookSceneId}),
{ok, NewPlayerState};
_ ->
skip
end;
_ ->
skip
end;
%% 切换挂机场景(等待回合结束)
handle(13008, PlayerState, Data) ->
?INFO("13008 ~p", [Data]),
HookState = player_lib:get_hook_state(),
case HookState#hook_state.drive of
?HOOK_DRIVE_CLIENT ->
HookSceneId = Data#req_change_hook_scene1.scene_id,
case hook_lib:check_scene_id(PlayerState, HookSceneId) of
true ->
HookState1 = hook_lib:heartbeat(HookState),
Update = #player_state{
db_player_base = #db_player_base{hook_scene_id = HookSceneId}
},
case player_lib:update_player_state(PlayerState, Update) of
{ok, NewPlayerState} ->
player_lib:put_hook_state(HookState1),
net_send:send_to_client(PlayerState#player_state.socket, 13008, #rep_change_hook_scene1{scene_id = HookSceneId}),
{ok, NewPlayerState};
_ ->
skip
end;
_ ->
skip
end;
_ ->
skip
end;
%% 获取离线报告
handle(13009, PlayerState, _Data) ->
?INFO("13009 ~p", [_Data]),
HookReport = hook_lib:get_hook_report(PlayerState, null),
case HookReport#proto_hook_report.offline_time >= 300 of
true ->
net_send:send_to_client(PlayerState#player_state.socket, 13009, #rep_offline_report{hook_report = HookReport});
_ ->
skip
end;
%% 快速挂机 扫荡
handle(13010, PlayerState, Data) ->
?INFO("13010 ~p", [Data]),
Times = Data#req_quick_hook.times,
Base = PlayerState#player_state.db_player_base,
%% 购买的次数
BuyHookNum = counter_lib:get_value(PlayerState#player_state.player_id, ?COUNTER_HOOK_BUY_NUM),
%% 免费的次数上限
LimitNum = counter_lib:get_limit(?COUNTER_HOOK_NUM),
%% 已用的次数
HookNum = counter_lib:get_value(PlayerState#player_state.player_id, ?COUNTER_HOOK_NUM),
VipNum = vip_lib:get_vip_hook_num(Base#db_player_base.career, Base#db_player_base.vip),
NewTimes = case Times > LimitNum + VipNum + BuyHookNum - HookNum of
true ->
LimitNum + VipNum + BuyHookNum - HookNum;
_ ->
Times
end,
case NewTimes > 0 of
true ->
PerTimeCount = 7200 * NewTimes, %% 每次快速挂机
GoodsHook = hook_lib:compute_hook_gain(PlayerState, PerTimeCount),%% 计算挂机奖励
%% 添加次数
counter_lib:update_value_limit(PlayerState#player_state.player_id, ?COUNTER_HOOK_NUM, NewTimes),
HookReport = hook_lib:get_hook_report(PlayerState, GoodsHook),
挂机报告
SendData = #rep_cur_power{
need_jade = hook_lib:get_buy_power_need(BuyHookNum),
remain_times = LimitNum + VipNum + BuyHookNum - (HookNum + NewTimes),
buy_num = BuyHookNum,
all_buy_num = vip_lib:get_vip_buy_hook_num(Base#db_player_base.career, Base#db_player_base.vip),
hook_info = HookReport
},
net_send:send_to_client(PlayerState#player_state.socket, 13013, SendData),
%% 领取挂机奖励
NewPlayerState1 = hook_lib:receive_hook_draw(PlayerState, GoodsHook),
%% 刷新挂机红点信息
button_tips_lib:ref_button_tips(NewPlayerState1, ?BTN_HOOK_RAIDS),
task_comply:update_player_task_info(NewPlayerState1, ?TASKSORT_BATTLE, NewTimes);
_ ->
net_send:send_to_client(PlayerState#player_state.socket, 13010, #rep_quick_hook{result = ?ERR_HOOK_POWER_NOT_ENOUGH})
end;
%% 领取挂机奖励
handle(13025, PlayerState, _Data) ->
?INFO("13011 ~p", [_Data]),
NewPlayerState = hook_lib:receive_hook_draw(PlayerState, null),
{ok, NewPlayerState};
购买boss挑战次数
handle(13011, PlayerState, _Data) ->
?INFO("13011 ~p", [_Data]),
case hook_lib:buy_challenge_num(PlayerState) of
{NewPlayerState, ChallengeNum, NeedJade} ->
SendData = #rep_buy_challenge{challenge_num = ChallengeNum, need_jade = NeedJade},
net_send:send_to_client(PlayerState#player_state.socket, 13011, SendData),
{ok, NewPlayerState};
_ ->
skip
end;
%% 获取挂机统计
handle(13012, PlayerState, _Data) ->
?INFO("13012 ~p", [_Data]),
HookState = player_lib:get_hook_state(),
HookSceneId = HookState#hook_state.scene_id,
HookSceneConf = hook_scene_config:get(HookSceneId),
{SumKill, Exp, Coin} = hook_lib:get_hook_statistics(PlayerState, 3600),
PerDrop = HookSceneConf#hook_scene_conf.per_drop,
SendData = #rep_hook_statistics{
hour_kill_num = SumKill,
hour_coin_gain = Coin,
hour_exp_gain = Exp,
drop_rate = PerDrop
},
net_send:send_to_client(PlayerState#player_state.socket, 13012, SendData);
%% 获取当前挂机信息
handle(13013, PlayerState, _Data) ->
?INFO("130013 ~p", [_Data]),
Base = PlayerState#player_state.db_player_base,
%% 购买的次数
BuyHookNum = counter_lib:get_value(PlayerState#player_state.player_id, ?COUNTER_HOOK_BUY_NUM),
%% 免费的次数上限
LimitNum = counter_lib:get_limit(?COUNTER_HOOK_NUM),
%% 已用的次数
HookNum = counter_lib:get_value(PlayerState#player_state.player_id, ?COUNTER_HOOK_NUM),
VipNum = vip_lib:get_vip_hook_num(Base#db_player_base.career, Base#db_player_base.vip),
SendData = #rep_cur_power{
need_jade = hook_lib:get_buy_power_need(BuyHookNum),
remain_times = LimitNum + BuyHookNum + VipNum - HookNum,
buy_num = BuyHookNum,
all_buy_num = vip_lib:get_vip_buy_hook_num(Base#db_player_base.career, Base#db_player_base.vip),
hook_info = #proto_hook_report{}
},
net_send:send_to_client(PlayerState#player_state.socket, 13013, SendData);
购买挂机扫荡次数 20160408 aidan 改版挂机系统
handle(13014, PlayerState, _Data) ->
?ERR("13014 ~p", [111]),
case hook_lib:buy_power(PlayerState) of
{ok, NewPlayerState} ->
更新购买次数信息
net_send:send_to_client(PlayerState#player_state.socket, 13014, #rep_buy_power{result = 0}),
%% 刷新红点
button_tips_lib:ref_button_tips(PlayerState, ?BTN_HOOK_RAIDS),
{ok, NewPlayerState};
{fail, Err} ->
net_send:send_to_client(PlayerState#player_state.socket, 13014, #rep_buy_power{result = Err})
end;
%% 获取挂机星级列表
handle(13015, PlayerState, _Data) ->
?INFO("13015 ~p", [_Data]),
HookStarList = player_hook_star_lib:get_hook_star_list(),
HookStarRewardList = hook_star_reward_lib:get_hook_star_reward_list(),
Data = #rep_hook_star_list{
hook_star_list = HookStarList,
hook_star_reward_list = HookStarRewardList
},
net_send:send_to_client(PlayerState#player_state.socket, 13015, Data);
%% 火墙攻击
handle(13020, PlayerState, Data) ->
HookState = player_lib:get_hook_state(),
case HookState#hook_state.drive of
?HOOK_DRIVE_CLIENT ->
HookState1 = hook_lib:heartbeat(HookState),
case hook_lib:fire_wall_attack(PlayerState, HookState1, Data) of
{NewPlayerState, NewHookState} ->
player_lib:put_hook_state(NewHookState),
{ok, NewPlayerState};
_ ->
skip
end;
_ ->
skip
end;
%% 领取挂机星级奖励
handle(13022, PlayerState, Data) ->
#req_draw_star_reward{
chapter = Chapter,
step = Step
} = Data,
hook_star_reward_lib:draw_reward(PlayerState, Chapter, Step);
%% 领取首次通关奖励
handle(13024, PlayerState, Data) ->
#req_draw_first_reward{scene_id = SceneId} = Data,
player_hook_star_lib:draw_first_prize(PlayerState, SceneId);
handle(Cmd, PlayerState, Data) ->
?ERR("not define ~p cmd:~nstate: ~p~ndata: ~p", [Cmd, PlayerState, Data]),
{ok, PlayerState}.
%% ====================================================================
Internal functions
%% ====================================================================
| null | https://raw.githubusercontent.com/ijvcms/chuanqi_dev/7742184bded15f25be761c4f2d78834249d78097/server/trunk/server/src/business/hook/hook_pp.erl | erlang | -------------------------------------------------------------------
@doc
@end
-------------------------------------------------------------------
API
====================================================================
API functions
====================================================================
直接切换挂机场景(不等待回合结束)
挂机释放技能
切换挂机场景(等待回合结束)
获取离线报告
快速挂机 扫荡
购买的次数
免费的次数上限
已用的次数
每次快速挂机
计算挂机奖励
添加次数
领取挂机奖励
刷新挂机红点信息
领取挂机奖励
获取挂机统计
获取当前挂机信息
购买的次数
免费的次数上限
已用的次数
刷新红点
获取挂机星级列表
火墙攻击
领取挂机星级奖励
领取首次通关奖励
====================================================================
==================================================================== | @author zhengsiying
( C ) 2015 , < COMPANY >
Created : 26 . 八月 2015 上午10:55
-module(hook_pp).
-include("common.hrl").
-include("record.hrl").
-include("proto.hrl").
-include("cache.hrl").
-include("config.hrl").
-include("language_config.hrl").
-include("button_tips_config.hrl").
-export([
handle/3
]).
handle(13001, PlayerState, Data) ->
?INFO("13001 ~p", [Data]),
HookSceneId = Data#req_change_hook_scene.scene_id,
case hook_lib:check_scene_id(PlayerState, HookSceneId) of
true ->
Update = #player_state{
db_player_base = #db_player_base{hook_scene_id = HookSceneId}
},
case player_lib:update_player_state(PlayerState, Update) of
{ok, PlayerState1} ->
HookState = player_lib:get_hook_state(),
case hook_lib:update_drive(PlayerState1, HookState, ?HOOK_DRIVE_CLIENT) of
{NewPlayerState, HookState1} ->
scene_mgr_lib:leave_scene(PlayerState, ?LEAVE_SCENE_TYPE_INITIATIVE),
NewHookState = hook_lib:heartbeat(HookState1),
player_lib:put_hook_state(NewHookState),
net_send:send_to_client(NewPlayerState#player_state.socket, 13001, #rep_change_hook_scene{scene_id = HookSceneId}),
{ok, NewPlayerState#player_state{scene_id = null, scene_pid = null}};
_ ->
NewHookState = hook_lib:heartbeat(HookState),
player_lib:put_hook_state(NewHookState),
net_send:send_to_client(PlayerState1#player_state.socket, 13001, #rep_change_hook_scene{scene_id = HookSceneId}),
{ok, PlayerState1#player_state{scene_id = null, scene_pid = null}}
end;
_ ->
skip
end;
_ ->
skip
end;
获取场景刷怪信息
handle(13002, PlayerState, _Data) ->
?INFO("13002 ~p", [_Data]),
HookState = player_lib:get_hook_state(),
case HookState#hook_state.drive of
?HOOK_DRIVE_CLIENT ->
HookState1 = hook_lib:heartbeat(HookState),
case hook_lib:new_round(PlayerState, HookState1) of
{ok, NewHookState} ->
player_lib:put_hook_state(NewHookState),
MonsterList = hook_lib:get_monster_data(NewHookState),
MonsterType =
case NewHookState#hook_state.boss_round of
true ->
2;
_ ->
1
end,
Data = #rep_get_hook_monster{monster_list = MonsterList, monster_type = MonsterType},
net_send:send_to_client(PlayerState#player_state.socket, 13002, Data);
_ ->
NextTime = max(0, HookState#hook_state.next_round_time - util_date:unixtime()),
Data = #rep_round_result{
status = 2,
next_time = NextTime
},
net_send:send_to_client(PlayerState#player_state.socket, 13004, Data)
end;
_ ->
skip
end;
handle(13003, PlayerState, Data) ->
?INFO("13003 ~p", [Data]),
HookState = player_lib:get_hook_state(),
case HookState#hook_state.drive of
?HOOK_DRIVE_CLIENT ->
HookState1 = hook_lib:heartbeat(HookState),
#req_hook_use_skill{
caster_flag = CasterFlag,
target_point = TargetPoint,
skill_id = SkillId,
target_list = TargetList
} = Data,
CasterType = CasterFlag#proto_obj_flag.type,
CasterId = CasterFlag#proto_obj_flag.id,
TargetFlagList = [{Type, Id} || #proto_obj_flag{type = Type, id = Id} <- TargetList, Type /= ?OBJ_TYPE_PET],
#proto_point{x = X, y = Y} = TargetPoint,
case hook_lib:obj_use_skill(PlayerState, HookState1, {CasterType, CasterId}, SkillId, TargetFlagList, {X, Y}) of
{NewPlayerState, NewHookState} ->
player_lib:put_hook_state(NewHookState),
{ok, NewPlayerState};
_ ->
skip
end;
_ ->
skip
end;
获取boss可用挑战次数
handle(13006, PlayerState, _Data) ->
?INFO("13006 ~p", [_Data]),
case hook_lib:get_challenge_info(PlayerState) of
{NewPlayerState, ChallengeNum, NeedJade} ->
Data = #rep_challenge_num{challenge_num = ChallengeNum, need_jade = NeedJade},
net_send:send_to_client(NewPlayerState#player_state.socket, 13006, Data),
{ok, NewPlayerState};
_ ->
skip
end;
挑战boss
handle(13007, PlayerState, Data) ->
?INFO("13007 ~p", [Data]),
HookState = player_lib:get_hook_state(),
case HookState#hook_state.drive of
?HOOK_DRIVE_CLIENT ->
HookState1 = hook_lib:heartbeat(HookState),
HookSceneId = Data#req_challenge_boos.scene_id,
挑战boss
case hook_lib:challenge_boos(PlayerState, HookState1, HookSceneId) of
{NewPlayerState, NewHookState} ->
player_lib:put_hook_state(NewHookState),
net_send:send_to_client(NewPlayerState#player_state.socket, 13007, #rep_challenge_boos{scene_id = HookSceneId}),
{ok, NewPlayerState};
_ ->
skip
end;
_ ->
skip
end;
handle(13008, PlayerState, Data) ->
?INFO("13008 ~p", [Data]),
HookState = player_lib:get_hook_state(),
case HookState#hook_state.drive of
?HOOK_DRIVE_CLIENT ->
HookSceneId = Data#req_change_hook_scene1.scene_id,
case hook_lib:check_scene_id(PlayerState, HookSceneId) of
true ->
HookState1 = hook_lib:heartbeat(HookState),
Update = #player_state{
db_player_base = #db_player_base{hook_scene_id = HookSceneId}
},
case player_lib:update_player_state(PlayerState, Update) of
{ok, NewPlayerState} ->
player_lib:put_hook_state(HookState1),
net_send:send_to_client(PlayerState#player_state.socket, 13008, #rep_change_hook_scene1{scene_id = HookSceneId}),
{ok, NewPlayerState};
_ ->
skip
end;
_ ->
skip
end;
_ ->
skip
end;
handle(13009, PlayerState, _Data) ->
?INFO("13009 ~p", [_Data]),
HookReport = hook_lib:get_hook_report(PlayerState, null),
case HookReport#proto_hook_report.offline_time >= 300 of
true ->
net_send:send_to_client(PlayerState#player_state.socket, 13009, #rep_offline_report{hook_report = HookReport});
_ ->
skip
end;
handle(13010, PlayerState, Data) ->
?INFO("13010 ~p", [Data]),
Times = Data#req_quick_hook.times,
Base = PlayerState#player_state.db_player_base,
BuyHookNum = counter_lib:get_value(PlayerState#player_state.player_id, ?COUNTER_HOOK_BUY_NUM),
LimitNum = counter_lib:get_limit(?COUNTER_HOOK_NUM),
HookNum = counter_lib:get_value(PlayerState#player_state.player_id, ?COUNTER_HOOK_NUM),
VipNum = vip_lib:get_vip_hook_num(Base#db_player_base.career, Base#db_player_base.vip),
NewTimes = case Times > LimitNum + VipNum + BuyHookNum - HookNum of
true ->
LimitNum + VipNum + BuyHookNum - HookNum;
_ ->
Times
end,
case NewTimes > 0 of
true ->
counter_lib:update_value_limit(PlayerState#player_state.player_id, ?COUNTER_HOOK_NUM, NewTimes),
HookReport = hook_lib:get_hook_report(PlayerState, GoodsHook),
挂机报告
SendData = #rep_cur_power{
need_jade = hook_lib:get_buy_power_need(BuyHookNum),
remain_times = LimitNum + VipNum + BuyHookNum - (HookNum + NewTimes),
buy_num = BuyHookNum,
all_buy_num = vip_lib:get_vip_buy_hook_num(Base#db_player_base.career, Base#db_player_base.vip),
hook_info = HookReport
},
net_send:send_to_client(PlayerState#player_state.socket, 13013, SendData),
NewPlayerState1 = hook_lib:receive_hook_draw(PlayerState, GoodsHook),
button_tips_lib:ref_button_tips(NewPlayerState1, ?BTN_HOOK_RAIDS),
task_comply:update_player_task_info(NewPlayerState1, ?TASKSORT_BATTLE, NewTimes);
_ ->
net_send:send_to_client(PlayerState#player_state.socket, 13010, #rep_quick_hook{result = ?ERR_HOOK_POWER_NOT_ENOUGH})
end;
handle(13025, PlayerState, _Data) ->
?INFO("13011 ~p", [_Data]),
NewPlayerState = hook_lib:receive_hook_draw(PlayerState, null),
{ok, NewPlayerState};
购买boss挑战次数
handle(13011, PlayerState, _Data) ->
?INFO("13011 ~p", [_Data]),
case hook_lib:buy_challenge_num(PlayerState) of
{NewPlayerState, ChallengeNum, NeedJade} ->
SendData = #rep_buy_challenge{challenge_num = ChallengeNum, need_jade = NeedJade},
net_send:send_to_client(PlayerState#player_state.socket, 13011, SendData),
{ok, NewPlayerState};
_ ->
skip
end;
handle(13012, PlayerState, _Data) ->
?INFO("13012 ~p", [_Data]),
HookState = player_lib:get_hook_state(),
HookSceneId = HookState#hook_state.scene_id,
HookSceneConf = hook_scene_config:get(HookSceneId),
{SumKill, Exp, Coin} = hook_lib:get_hook_statistics(PlayerState, 3600),
PerDrop = HookSceneConf#hook_scene_conf.per_drop,
SendData = #rep_hook_statistics{
hour_kill_num = SumKill,
hour_coin_gain = Coin,
hour_exp_gain = Exp,
drop_rate = PerDrop
},
net_send:send_to_client(PlayerState#player_state.socket, 13012, SendData);
handle(13013, PlayerState, _Data) ->
?INFO("130013 ~p", [_Data]),
Base = PlayerState#player_state.db_player_base,
BuyHookNum = counter_lib:get_value(PlayerState#player_state.player_id, ?COUNTER_HOOK_BUY_NUM),
LimitNum = counter_lib:get_limit(?COUNTER_HOOK_NUM),
HookNum = counter_lib:get_value(PlayerState#player_state.player_id, ?COUNTER_HOOK_NUM),
VipNum = vip_lib:get_vip_hook_num(Base#db_player_base.career, Base#db_player_base.vip),
SendData = #rep_cur_power{
need_jade = hook_lib:get_buy_power_need(BuyHookNum),
remain_times = LimitNum + BuyHookNum + VipNum - HookNum,
buy_num = BuyHookNum,
all_buy_num = vip_lib:get_vip_buy_hook_num(Base#db_player_base.career, Base#db_player_base.vip),
hook_info = #proto_hook_report{}
},
net_send:send_to_client(PlayerState#player_state.socket, 13013, SendData);
购买挂机扫荡次数 20160408 aidan 改版挂机系统
handle(13014, PlayerState, _Data) ->
?ERR("13014 ~p", [111]),
case hook_lib:buy_power(PlayerState) of
{ok, NewPlayerState} ->
更新购买次数信息
net_send:send_to_client(PlayerState#player_state.socket, 13014, #rep_buy_power{result = 0}),
button_tips_lib:ref_button_tips(PlayerState, ?BTN_HOOK_RAIDS),
{ok, NewPlayerState};
{fail, Err} ->
net_send:send_to_client(PlayerState#player_state.socket, 13014, #rep_buy_power{result = Err})
end;
handle(13015, PlayerState, _Data) ->
?INFO("13015 ~p", [_Data]),
HookStarList = player_hook_star_lib:get_hook_star_list(),
HookStarRewardList = hook_star_reward_lib:get_hook_star_reward_list(),
Data = #rep_hook_star_list{
hook_star_list = HookStarList,
hook_star_reward_list = HookStarRewardList
},
net_send:send_to_client(PlayerState#player_state.socket, 13015, Data);
handle(13020, PlayerState, Data) ->
HookState = player_lib:get_hook_state(),
case HookState#hook_state.drive of
?HOOK_DRIVE_CLIENT ->
HookState1 = hook_lib:heartbeat(HookState),
case hook_lib:fire_wall_attack(PlayerState, HookState1, Data) of
{NewPlayerState, NewHookState} ->
player_lib:put_hook_state(NewHookState),
{ok, NewPlayerState};
_ ->
skip
end;
_ ->
skip
end;
handle(13022, PlayerState, Data) ->
#req_draw_star_reward{
chapter = Chapter,
step = Step
} = Data,
hook_star_reward_lib:draw_reward(PlayerState, Chapter, Step);
handle(13024, PlayerState, Data) ->
#req_draw_first_reward{scene_id = SceneId} = Data,
player_hook_star_lib:draw_first_prize(PlayerState, SceneId);
handle(Cmd, PlayerState, Data) ->
?ERR("not define ~p cmd:~nstate: ~p~ndata: ~p", [Cmd, PlayerState, Data]),
{ok, PlayerState}.
Internal functions
|
9c44cfe1188f3337f4afd70ce9d2c33fec2ef2068879134467be1ac870f400d4 | wdebeaum/step | whale.lisp | ;;;;
;;;; w::whale
;;;;
(define-words :pos w::N
:words (
(w::whale
(senses((LF-parent ONT::nonhuman-animal)
(templ count-pred-templ)
(meta-data :origin calo-ontology :entry-date 20060128 :change-date nil :comments caloy3)
))
)
))
| null | https://raw.githubusercontent.com/wdebeaum/step/f38c07d9cd3a58d0e0183159d4445de9a0eafe26/src/LexiconManager/Data/new/whale.lisp | lisp |
w::whale
|
(define-words :pos w::N
:words (
(w::whale
(senses((LF-parent ONT::nonhuman-animal)
(templ count-pred-templ)
(meta-data :origin calo-ontology :entry-date 20060128 :change-date nil :comments caloy3)
))
)
))
|
939483e4a70fe2e1c809965c988888b647e0c9692abedf1ef7fb7f7585c0918c | amalloy/aoc-2021 | Main.hs | module Main where
import Control.Arrow ((&&&))
import Control.Monad (replicateM)
import Data.Foldable (asum)
import Data.Char (toLower)
import Data.Maybe (fromMaybe)
import Text.Regex.Applicative
data Segment = A | B | C | D | E | F | G deriving (Enum, Show, Read, Eq, Ord, Bounded)
newtype Digit = Digit { segments :: [Segment] } deriving Show
data Display = Display { allPatterns, reading :: [Digit] } deriving Show
type Input = [Display]
type Regex a = RE Char a
segment :: Regex Segment
segment = asum $ do
seg <- [minBound..maxBound]
pure $ seg <$ sym (toLower (head (show seg)))
digit :: Regex Digit
digit = Digit <$> many segment
display :: Regex Display
display = Display <$> replicateM 10 (digit <* sym ' ') <* sym '|' <*> replicateM 4 (sym ' ' *> digit)
input :: Regex Input
input = many (display <* sym '\n')
part1 :: Input -> Int
part1 = go 0
where go counter [] = counter
go counter (Display _ reading : more) = go counter' more
where counter' = counter + (length . filter simpleDigit $ reading)
simpleDigit = (`elem` [2, 3, 4, 7]) . length . segments
part2 :: Input -> ()
part2 = const ()
prepare :: String -> Input
prepare = fromMaybe [] . (=~ input)
main :: IO ()
main = readFile "input.txt" >>= print . (part1 &&& part2) . prepare
| null | https://raw.githubusercontent.com/amalloy/aoc-2021/197c9acdd2a4dca3e92993437b700e6cfedecbf2/day08/src/Main.hs | haskell | module Main where
import Control.Arrow ((&&&))
import Control.Monad (replicateM)
import Data.Foldable (asum)
import Data.Char (toLower)
import Data.Maybe (fromMaybe)
import Text.Regex.Applicative
data Segment = A | B | C | D | E | F | G deriving (Enum, Show, Read, Eq, Ord, Bounded)
newtype Digit = Digit { segments :: [Segment] } deriving Show
data Display = Display { allPatterns, reading :: [Digit] } deriving Show
type Input = [Display]
type Regex a = RE Char a
segment :: Regex Segment
segment = asum $ do
seg <- [minBound..maxBound]
pure $ seg <$ sym (toLower (head (show seg)))
digit :: Regex Digit
digit = Digit <$> many segment
display :: Regex Display
display = Display <$> replicateM 10 (digit <* sym ' ') <* sym '|' <*> replicateM 4 (sym ' ' *> digit)
input :: Regex Input
input = many (display <* sym '\n')
part1 :: Input -> Int
part1 = go 0
where go counter [] = counter
go counter (Display _ reading : more) = go counter' more
where counter' = counter + (length . filter simpleDigit $ reading)
simpleDigit = (`elem` [2, 3, 4, 7]) . length . segments
part2 :: Input -> ()
part2 = const ()
prepare :: String -> Input
prepare = fromMaybe [] . (=~ input)
main :: IO ()
main = readFile "input.txt" >>= print . (part1 &&& part2) . prepare
| |
858e56c0af6c51d3a5ef126821f29f79305efb2dfb9801627816ee7367b6aa55 | Frama-C/Frama-C-snapshot | components.ml | (**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
open Cil_types
open Cil_datatype
(* ************************************************************************* *)
* { 2 Searching security annotations }
(* ************************************************************************* *)
(*
(** The state of statement for which a security verification should occur. *)
module Security_Annotations =
Cil_computation.StmtSetRef
(struct
let name = "Components.Annotations"
let dependencies = [ Ast.self ]
end)
let rec is_security_predicate p = match p.content with
| Pand(p1, p2) -> is_security_predicate p1 || is_security_predicate p2
| (* [state(lval) op term] *)
Prel(_,
{ term_node = Tapp(f1, _ , ([ _ ])) },
{ term_node = TLval(TVar _,_) })
when f1.l_var_info.lv_name = Model.state_name ->
true
| (* [state(lval) op term] *)
Prel(_,
{ term_node = Tapp(f1, _, [ _ ]) },
{ term_node = _ })
when f1.l_var_info.lv_name = Model.state_name ->
assert false
| _ ->
false
let has_security_requirement kf =
List.exists (is_security_predicate $ Logic_const.pred_of_id_pred)
(Kernel_function.get_spec kf).spec_requires
(* Do not called twice. *)
let search_security_requirements () =
if Security_Annotations.is_empty () then begin
Security_slicing_parameters.feedback
~level:3 "searching security annotations";
TODO : chercher dans les GlobalAnnotations
let is_security_annotation a =
(match a.annot_content with
| AAssert (_behav,p,_) -> is_security_predicate p
| AStmtSpec { spec_requires = l } ->
List.exists
(is_security_predicate $ Logic_const.pred_of_id_pred) l
| APragma _
| AInvariant _ (* | ALoopBehavior _ *)
[ JS 2008/02/26 ] may contain a security predicate
| AVariant _ | AAssigns _
-> false)
in
Annotations.iter
(fun s annotations ->
if
Value.is_reachable_stmt s
&& List.exists
(function Before a | After a -> is_security_annotation a)
!annotations
then
Security_Annotations.add s);
Globals.Functions.iter
(fun kf ->
if has_security_requirement kf then
List.iter
(fun (_, callsites) ->
List.iter Security_Annotations.add callsites)
(!Value.callers kf));
end
*)
(* ************************************************************************* *)
* { 2 Computing security components }
(* ************************************************************************* *)
open PdgIndex
let get_node_stmt node = Key.stmt (!Db.Pdg.node_key node)
module NodeKf = Datatype.Pair(PdgTypes.Node)(Kernel_function)
(* type bwd_kind = Direct | Indirect
type fwd_kind = Impact | Security
type kind =
| Backward of bwd_kind
| Forward of fwd_kind
(** Debugging purpose only *)
let pretty_kind fmt = function
| Backward Direct -> Format.fprintf fmt "backward direct"
| Backward Indirect -> Format.fprintf fmt "backward indirect"
| Forward Security -> Format.fprintf fmt "forward"
| Forward Impact -> Format.fprintf fmt "impact"
*)
Never plugged in . To be tested .
module Memo : sig
val init : kind - > kernel_function - > unit
val push_function : stmt - > kernel_function - > unit
val pop_function : unit - > unit
val memo :
Pdg.t_node - >
( unit - > ( Pdg.t_node * kernel_function ) list ) - >
( Pdg.t_node * kernel_function ) list
end = struct
module = struct
type t =
{ mutable stack : ( stmt * kernel_function ) list ;
mutable current_kf : kernel_function }
let init kf callstack = callstack.stack < - [ ] ; callstack.current_kf < - kf
let push stmt kf stack =
stmt , ) : : stack.stack ;
let pop stack =
let kf = match stack.stack with [ ] - > assert false | ( _ , k ) : : _ - > k in
let equal s1 s2 =
Kernel_function.equal s1.current_kf s2.current_kf
& & try
List.iter2
( fun ( s1 , kf1 ) ( s2 , kf2 ) - >
if not ( s1.sid = s2.sid & & Kernel_function.equal ) then
raise Exit )
s1.stack s2.stack ;
true
with Exit - >
false
let hash = Hashtbl.hash
end
( * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
module Memo : sig
val init: kind -> kernel_function -> unit
val push_function: stmt -> kernel_function -> unit
val pop_function: unit -> unit
val memo:
Pdg.t_node ->
(unit -> (Pdg.t_node * kernel_function) list) ->
(Pdg.t_node * kernel_function) list
end = struct
module Callstack = struct
type t =
{ mutable stack: (stmt * kernel_function) list;
mutable current_kf: kernel_function }
let init kf callstack = callstack.stack <- []; callstack.current_kf <- kf
let push stmt kf stack =
stack.stack <- (stmt, stack.current_kf) :: stack.stack;
stack.current_kf <- kf
let pop stack =
let kf = match stack.stack with [] -> assert false | (_, k) :: _ -> k in
stack.current_kf <- kf
let equal s1 s2 =
Kernel_function.equal s1.current_kf s2.current_kf
&& try
List.iter2
(fun (s1, kf1) (s2, kf2) ->
if not (s1.sid = s2.sid && Kernel_function.equal kf1 kf2) then
raise Exit)
s1.stack s2.stack;
true
with Exit ->
false
let hash = Hashtbl.hash
end
(* *********************************************************************** *)
(* state: kind -> callstack -> (node * kf) -> (node * kf) list *)
module Nodekfs = Hashtbl.Make(NodeKf) (* (node * kf) -> (node * kf) list *)
module Callstacks = struct
callstack - > nodekfs
let memo tbl c =
try find tbl c
with Not_found -> let t = Nodekfs.create 7 in replace tbl c t; t
end
module Memo = struct
include Hashtbl
let memo tbl k callstack =
try
let callstacks = find tbl k in
Callstacks.memo callstacks callstack
with Not_found ->
let callstacks = Callstacks.create 7 in
let t = Nodekfs.create 7 in
Callstacks.replace callstacks callstack t;
replace tbl k callstacks;
t
end
type local_tbl = (Pdg.t_node * kernel_function) list Nodekfs.t
type state =
{ mutable kind: kind;
mutable callstack: Callstack.t;
mutable local_tbl: local_tbl;
memo_tbl: (kind, local_tbl Callstacks.t) Memo.t; }
(* *********************************************************************** *)
let state =
let spec = Cil.empty_funspec () in
{ kind = Backward Direct;
callstack =
{ Callstack.stack = [];
current_kf =
{ fundec =
(* do not use Cil.emptyFunction here since it changes the
numbering of variables *)
Declaration
(spec,
Cil_datatype.Varinfo.dummy,
None,
Cil_datatype.Location.unknown);
return_stmt = None;
spec = Cil.empty_funspec () } };
local_tbl = Nodekfs.create 0;
memo_tbl = Hashtbl.create 5 }
let update () =
state.local_tbl <- Memo.memo state.memo_tbl state.kind state.callstack
let init k kf =
state.kind <- k;
Callstack.init kf state.callstack;
update ()
let push_function stmt kf =
Callstack.push stmt kf state.callstack;
update ()
let pop_function () =
Callstack.pop state.callstack;
update ()
let memo node f =
let key = node, state.callstack.Callstack.current_kf in
try
Nodekfs.find state.local_tbl key
with Not_found ->
let value = f () in
Nodekfs.replace state.local_tbl key value;
value
end
*)
(* used to enforce an invariant on [add] *)
module Todolist : sig
type todo = private
{ node: PdgTypes.Node.t;
kf: kernel_function;
pdg: Db.Pdg.t;
callstack_length: int;
from_deep: bool }
type t = todo list
val mk_init: kernel_function -> Db.Pdg.t -> PdgTypes.Node.t list -> todo list
val add:
PdgTypes.Node.t -> kernel_function -> Db.Pdg.t -> int -> bool -> t -> t
end = struct
type todo =
{ node: PdgTypes.Node.t;
kf: kernel_function;
pdg: Db.Pdg.t;
callstack_length: int;
from_deep: bool }
type t = todo list
let add n kf pdg len fd list =
match !Db.Pdg.node_key n with
| Key.SigKey (Signature.In Signature.InCtrl) ->
(* do not consider node [InCtrl] *)
list
| Key.VarDecl vi when not (Kernel.LibEntry.get () && vi.vglob) ->
do not consider variable declaration ,
except if libEntry is set and they are globals
( i.e. we could have no further info about them )
except if libEntry is set and they are globals
(i.e. we could have no further info about them) *)
list
| _ ->
Security_slicing_parameters.debug ~level:2 "adding node %a (in %s)"
(!Db.Pdg.pretty_node false) n
(Kernel_function.get_name kf);
{ node = n; kf = kf; pdg = pdg;
callstack_length = len; from_deep = fd }
:: list
let mk_init kf pdg =
List.fold_left (fun acc n -> add n kf pdg 0 false acc) []
end
module Component = struct
(* not optimal implementation: no memoization (bts#006) *)
module M = FCMap.Make(NodeKf)
type fwd_kind = Impact | Security
type kind =
| Direct
| Indirect_Backward
| Forward of fwd_kind
type value =
{ pdg: Db.Pdg.t;
mutable callstack_length: int;
mutable direct: bool;
mutable indirect_backward: bool;
mutable forward: bool }
type t = value M.t
let is_direct v = v.direct
let is_indirect_backward v = v.indirect_backward && not v.direct
let is_forward v = not (v.direct || v.indirect_backward)
(** Returns [found, new_already] with:
- [found] is [true] iff [elt] was previously added for [kind]
- [new_already] is [already] updated with [elt] and its (new) associated
value. *)
let check_and_add first elt kind pdg len (already: t) =
try
Format.printf " [ security ] check node % a ( in % s , kind % a)@. "
( ! Pdg.pretty_node true ) ( fst elt )
( Kernel_function.get_name ( snd elt ) )
pretty_kind kind ;
(!Pdg.pretty_node true) (fst elt)
(Kernel_function.get_name (snd elt))
pretty_kind kind;*)
let v = M.find elt already in
let found, dir, up, down = match kind with
| Direct -> true, true, false, false
| Indirect_Backward -> v.indirect_backward, v.direct, true, false
| Forward _ -> v.forward, v.direct, v.indirect_backward, true
in
v.callstack_length <- min v.callstack_length len;
v.direct <- dir;
v.indirect_backward <- up;
v.forward <- down;
found, already
with Not_found ->
let dir, up, down = match kind with
| Direct -> true, false, false
| Indirect_Backward -> false, true, false
| Forward _ -> false, false, true
in
let v =
{ pdg = pdg; callstack_length = len;
direct = dir; indirect_backward = up; forward = down }
in
false,
if first && kind = Forward Impact then
do not add the initial selected stmt for an impact analysis .
fixed FS#411
fixed FS#411 *)
already
else
M.add elt v already
let one_step_related_nodes kind pdg node =
(* do not consider address dependencies now (except for impact analysis):
just consider them during the last slicing pass
(for semantic preservation of pointers) *)
let direct node = !Db.Pdg.direct_data_dpds pdg node in
match kind with
| Direct -> direct node
| Indirect_Backward -> direct node @ !Db.Pdg.direct_ctrl_dpds pdg node
| Forward Security ->
!Db.Pdg.direct_data_uses pdg node @ !Db.Pdg.direct_ctrl_uses pdg node
| Forward Impact ->
!Db.Pdg.direct_data_uses pdg node @ !Db.Pdg.direct_ctrl_uses pdg node
@ !Db.Pdg.direct_addr_uses pdg node
let search_input kind kf lazy_l =
try
match kind with
| Forward _ -> Lazy.force lazy_l
| Direct | Indirect_Backward ->
if !Db.Value.use_spec_instead_of_definition kf
then Lazy.force lazy_l
else []
with Not_found ->
[]
let add_from_deep caller todo n =
Todolist.add n caller (!Db.Pdg.get caller) 0 true todo
let forward_caller kf node todolist =
let pdg = !Db.Pdg.get kf in
List.fold_left
(fun todolist (caller, callsites) ->
(* foreach caller *)
List.fold_left
(fun todolist callsite ->
let nodes =
!Db.Pdg.find_call_out_nodes_to_select
pdg (PdgTypes.NodeSet.singleton node) (!Db.Pdg.get caller) callsite
in
List.fold_left
(add_from_deep caller)
todolist
nodes)
todolist
callsites)
todolist
(!Db.Value.callers kf)
let related_nodes_of_nodes kind result nodes =
let initial_nodes =
List.map (fun n -> n.Todolist.node, n.Todolist.kf) nodes
in
let rec aux first result = function
| [] -> result
| { Todolist.node = node; kf = kf; pdg = pdg;
callstack_length = callstack_length; from_deep = from_deep }
:: todolist
->
let elt = node, kf in
let found, result =
check_and_add first elt kind pdg callstack_length result
in
let todolist =
if found then begin
todolist
end else begin
Security_slicing_parameters.debug
~level:2 "considering node %a (in %s)"
(!Db.Pdg.pretty_node false) node
(Kernel_function.get_name kf);
(* intraprocedural related_nodes *)
let related_nodes = one_step_related_nodes kind pdg node in
Security_slicing_parameters.debug ~level:3
"intraprocedural part done";
let todolist =
List.fold_left
(fun todo n ->
Todolist.add n kf pdg callstack_length false todo)
todolist
related_nodes
in
(* interprocedural part *)
let backward_from_deep compute_nodes =
[ TODO optimisation :]
en fait , regarder from_deep :
, faire pour chaque caller
sinon , faire uniquement pour le caller d'où on vient
en fait, regarder from_deep:
si vrai, faire pour chaque caller
sinon, faire uniquement pour le caller d'où on vient *)
match kind, callstack_length with
| (Direct | Indirect_Backward), 0 ->
(* input of a deep security annotation: foreach call
to [kf], compute its related nodes *)
let do_caller todolist (caller, callsites) =
(* Format.printf "[security of %s] search callers in %s
for zone %a@." (Kernel_function.get_name kf)
(Kernel_function.get_name caller)
Locations.Zone.pretty zone;*)
let pdg_caller = !Db.Pdg.get caller in
let do_call todolist callsite =
match kind with
| Direct | Indirect_Backward ->
let nodes = compute_nodes pdg_caller callsite in
List.fold_left
(add_from_deep caller) todolist nodes
| Forward _ ->
todolist (* not considered here, see at end *)
in
List.fold_left do_call todolist callsites
in
List.fold_left do_caller todolist (!Db.Value.callers kf)
| _ ->
todolist
in
let todolist =
match !Db.Pdg.node_key node with
| Key.SigKey (Signature.In Signature.InCtrl) ->
assert false
| Key.SigKey (Signature.In (Signature.InImpl zone)) ->
let compute_nodes pdg_caller callsite =
let nodes, _undef_zone =
!Db.Pdg.find_location_nodes_at_stmt
pdg_caller callsite ~before:true zone
(* TODO : use undef_zone (see FS#201)? *)
in
let nodes = List.map (fun (n, _z_part) -> n) nodes in
TODO : use _ ?
nodes
in
backward_from_deep compute_nodes
| Key.SigKey key ->
let compute_nodes pdg_caller callsite =
[ match key with
| Signature.In (Signature.InNum n) ->
!Db.Pdg.find_call_input_node pdg_caller callsite n
| Signature.Out Signature.OutRet ->
!Db.Pdg.find_call_output_node pdg_caller callsite
| Signature.In
(Signature.InCtrl | Signature.InImpl _)
| Signature.Out _ ->
assert false ]
in
backward_from_deep compute_nodes
| Key.SigCallKey(id, key) ->
the node is a call : search the related nodes inside the
called function ( see )
called function (see FS#155) *)
if from_deep then
(* already come from a deeper annotation:
do not go again inside it *)
todolist
else
let stmt = Key.call_from_id id in
let called_kfs =
Kernel_function.Hptset.elements
(try Db.Value.call_to_kernel_function stmt
with Db.Value.Not_a_call -> assert false)
in
let todolist =
List.fold_left
(fun todolist called_kf ->
(* foreach called kf *)
" [ security ] search inside % s ( from % s)@. "
( Kernel_function.get_name called_kf )
( Kernel_function.get_name kf ) ;
"[security] search inside %s (from %s)@."
(Kernel_function.get_name called_kf)
(Kernel_function.get_name kf);*)
let called_pdg = !Db.Pdg.get called_kf in
let nodes =
try
match kind, key with
| (Direct | Indirect_Backward),
Signature.Out out_key ->
let nodes, _undef_zone =
!Db.Pdg.find_output_nodes called_pdg out_key
(* TODO: use undef_zone (see FS#201) *)
in
let nodes =
List.map (fun (n, _z_part) -> n) nodes in
TODO : use _ ?
nodes
| _, Signature.In (Signature.InNum n) ->
search_input kind called_kf
(lazy [!Db.Pdg.find_input_node called_pdg n])
| _, Signature.In Signature.InCtrl ->
search_input kind called_kf
(lazy
[!Db.Pdg.find_entry_point_node called_pdg])
| _, Signature.In (Signature.InImpl _) ->
assert false
| Forward _, Signature.Out _ ->
[]
with
| Db.Pdg.Top ->
Security_slicing_parameters.warning
"no precise pdg for function %s. \n\
Ignoring this function in the analysis (potentially incorrect results)."
(Kernel_function.get_name called_kf);
[]
| Db.Pdg.Bottom | Not_found -> assert false
in
List.fold_left
(fun todo n ->
" node % a inside % s@. "
( ! Db . Pdg.pretty_node false ) n
( Kernel_function.get_name called_kf ) ;
(!Db.Pdg.pretty_node false) n
(Kernel_function.get_name called_kf);*)
Todolist.add
n called_kf called_pdg
(callstack_length + 1) false todo)
todolist
nodes)
todolist
called_kfs
in
(match kind with
| Direct | Indirect_Backward ->
todolist
| Forward _ ->
List.fold_left
(fun todolist called_kf ->
let compute_from_stmt fold =
fold
(fun (n, kfn) _ acc ->
if Kernel_function.equal kfn kf then n :: acc
else acc)
in
let from_stmt =
compute_from_stmt M.fold result [] in
let from_stmt =
(* initial nodes may be not in results *)
compute_from_stmt
(fun f e acc ->
List.fold_left
(fun acc e -> f e [] acc) acc e)
initial_nodes
from_stmt
in
let from_stmt = List.fold_left
(fun s n -> PdgTypes.NodeSet.add n s)
PdgTypes.NodeSet.empty from_stmt in
let called_pdg = !Db.Pdg.get called_kf in
let nodes =
try
!Db.Pdg.find_in_nodes_to_select_for_this_call
pdg from_stmt stmt called_pdg
with
| Db.Pdg.Top ->
(* warning already emitted in the previous fold *)
[]
| Db.Pdg.Bottom | Not_found -> assert false
in
List.fold_left
(fun todo n ->
Todolist.add
n called_kf called_pdg
(callstack_length + 1) false todo)
todolist
nodes)
todolist
called_kfs)
| Key.CallStmt _ | Key.VarDecl _ ->
assert false
| Key.Stmt _ | Key.Label _ ->
todolist
in
[ TODO optimisation :] voir commentaire plus haut
match kind with
| (Direct | Indirect_Backward) -> todolist
| Forward _ -> forward_caller kf node todolist
end
in
(* recursive call *)
aux false result todolist
in
aux true result nodes
let initial_nodes kf stmt =
Security_slicing_parameters.debug
~level:3 "computing initial nodes for %d" stmt.sid;
let pdg = !Db.Pdg.get kf in
let nodes =
if Db.Value.is_reachable_stmt stmt then
try !Db.Pdg.find_simple_stmt_nodes pdg stmt
with Not_found -> assert false
else begin
Security_slicing_parameters.debug
~level:3 "stmt %d is dead. skipping." stmt.sid;
[]
end
in
Todolist.mk_init kf pdg nodes
let direct kf stmt =
try
let nodes = initial_nodes kf stmt in
Security_slicing_parameters.debug
"computing direct component %d" stmt.sid;
let res = related_nodes_of_nodes Direct M.empty nodes in
add the initial node , fix FS#180
let mk p =
{ pdg = p; callstack_length = 0;
direct = true; indirect_backward = false; forward = false }
in
let res =
List.fold_left
(fun acc { Todolist.node=n; kf=f; pdg=p } -> M.add (n,f) (mk p) acc)
res
nodes
in
res
with Db.Pdg.Top | Db.Pdg.Bottom ->
Security_slicing_parameters.warning "PDG is not manageable. skipping.";
M.empty
let backward kf stmt =
try
let nodes = initial_nodes kf stmt in
let res = direct kf stmt in
Security_slicing_parameters.debug
"computing backward indirect component for %d" stmt.sid;
related_nodes_of_nodes Indirect_Backward res nodes
with Db.Pdg.Top | Db.Pdg.Bottom ->
Security_slicing_parameters.warning "PDG is not manageable. skipping.";
M.empty
let whole kf stmt =
let res = backward kf stmt in
let from =
M.fold
(fun (n,kf) v acc ->
Todolist.add n kf v.pdg v.callstack_length false(*?*) acc)
res
[]
in
Security_slicing_parameters.debug
"computing forward component for stmt %d" stmt.sid;
related_nodes_of_nodes (Forward Security) res from
(* is exactly an impact analysis iff [fwd_kind = Impact] *)
let forward fwd_kind kf stmt =
let nodes = initial_nodes kf stmt in
Security_slicing_parameters.debug
"computing forward component for stmt %d" stmt.sid;
let res = related_nodes_of_nodes (Forward fwd_kind) M.empty nodes in
let set =
M.fold
(fun (n,_) _ acc ->
Extlib.may_map
~dft:acc
(fun s -> Stmt.Set.add s acc)
(get_node_stmt n))
res
Stmt.Set.empty
in
Stmt.Set.elements set
let get_component kind stmt =
let kf = Kernel_function.find_englobing_kf stmt in
let action, check = match kind with
| Direct -> direct, is_direct
| Indirect_Backward -> backward, is_indirect_backward
| Forward _ -> whole, is_forward
in
let set =
M.fold
(fun (n,_) v acc ->
if check v then
Extlib.may_map
~dft:acc
(fun s -> Stmt.Set.add s acc)
(get_node_stmt n)
else
acc)
(action kf stmt)
Stmt.Set.empty
in
Stmt.Set.elements set
let iter use_ctrl_dpds f kf stmt =
let action = if use_ctrl_dpds then whole else direct in
M.iter ( fun elt _ - > f elt ) ( action kf stmt )
let action = if use_ctrl_dpds then whole else direct in
M.iter (fun elt _ -> f elt) (action kf stmt)
*)
end
(* ************************************************************************ *)
(* Dynamic registration *)
(* ************************************************************************ *)
let register name arg =
Dynamic.register
~journalize:true
~plugin:"Security_slicing"
name
(Datatype.func Stmt.ty (Datatype.list Stmt.ty))
(Component.get_component arg)
let get_direct_component = register "get_direct_component" Component.Direct
let get_indirect_backward_component =
register "get_indirect_backward_component" Component.Indirect_Backward
let get_forward_component = register "get_forward_component"
(Component.Forward Component.Security)
let impact_analysis =
Dynamic.register
~plugin:"Security_slicing"
"impact_analysis"
~journalize:true
(Datatype.func2 Kernel_function.ty Stmt.ty (Datatype.list Stmt.ty))
(Component.forward Component.Impact)
(* ************************************************************************ *)
( * type t = stmt
(* type t = stmt *)
(** Security component table: a security component is represented by the
statement at which a security verification should occur. It is associated
with the list of its statements. *)
module Components : sig
add : t - > stmt - > unit
val find : t - > stmt list
val self : State.t
val fold_fold :
( ' b - > t - > ' a - > ' b ) - > ( ' a - > Cil_types.stmt - > ' a ) - > ' b - > ' a - > ' b
val find: t -> stmt list
val self: State.t
val fold_fold:
('b -> t -> 'a -> 'b) -> ('a -> Cil_types.stmt -> 'a) -> 'b -> 'a -> 'b
*)
end = struct
module S =
State_builder.Hashtbl
(Stmt.Hashtbl)
(Datatype.Ref(Datatype.List(Stmt)))
(struct
let name = "Components"
let size = 7
let dependencies = [ Ast.self; Db.Value.self ]
end)
let () =
Cmdline.run_after_extended_stage
(fun () ->
State_dependency_graph.add_codependencies ~onto:S.self [ !Db.Pdg.self ])
let add c =
let l = S.memo ( fun _ - > ref [ ] ) c in
fun s - > l : = s : : ! l
let find s = ! ( S.find s )
let self = S.self
let fold_fold f g init_f init_g =
S.fold ( fun c l acc - > f acc c ( List.fold_left g init_g ! l ) ) init_f
let add c =
let l = S.memo (fun _ -> ref []) c in
fun s -> l := s :: !l
let find s = !(S.find s)
let self = S.self
let fold_fold f g init_f init_g =
S.fold (fun c l acc -> f acc c (List.fold_left g init_g !l)) init_f
*)
end
module Nodes =
State_builder.SetRef
(struct include NodeKf.Datatype let compare = NodeKf.compare end)
(struct
let name = "Components.Nodes"
let dependencies = [ Security_Annotations.self ]
end)
let use_ctrl_dependencies = ref false
(** Set tables [Components] and [Stmts]. *)
let compute, self =
State_builder.apply_once
"Components.compute"
[ Security_Annotations.self ]
(fun () ->
search_security_requirements ();
let add_component stmt =
Security_slicing_parameters.debug
"computing security component %d" stmt.sid;
let add_one = Components.add stmt in
let kf = Kernel_function.find_englobing_kf stmt in
Component.iter
!use_ctrl_dependencies
(fun (n, _ as elt) ->
Nodes.add elt;
Extlib.may add_one (get_node_stmt n))
kf
stmt
in
Security_Annotations.iter add_component)
let () =
Cmdline.run_after_extended_stage
(fun () ->
Project.State_builder.add_dependency self !Pdg.self;
Project.State_builder.add_dependency Nodes.self self;
Project.State_builder.add_dependency Components.self self)
let get_component =
Dynamic.register
~journalize:true
"Security.get_component"
(Datatype.func Kernel_type.stmt (Datatype.list Kernel_type.stmt))
(fun s -> compute (); Components.find s)
(* ************************************************************************ *)
* { 2 Security slicing }
(* ************************************************************************ *)
let slice ctrl =
use_ctrl_dependencies := ctrl;
Security_slicing_parameters.feedback ~level:2 "beginning slicing";
compute ();
let name = "security slicing" in
let slicing = !Slicing.Project.mk_project name in
let select (n, kf) sel =
Security_slicing_parameters.debug ~level:2 "selecting %a (of %s)"
(!Db.Pdg.pretty_node false) n
(Kernel_function.get_name kf);
!Slicing.Select.select_pdg_nodes
sel
(!Slicing.Mark.make ~data:true ~addr:true ~ctrl)
[ n ]
kf
in
let sel = Nodes.fold select Slicing.Select.empty_selects in
Security_slicing_parameters.debug "adding selection";
!Slicing.Request.add_persistent_selection slicing sel;
Security_slicing_parameters.debug "applying slicing request";
!Slicing.Request.apply_all_internal slicing;
!Slicing.Slice.remove_uncalled slicing;
let p = !Slicing.Project.extract name slicing in
(* Project.copy ~only:(Options.get_selection_after_slicing ()) p;*)
Security_slicing_parameters.feedback ~level:2 "slicing done";
p
let slice =
Dynamic.register
"Security_slicing.slice"
~journalize:true
(Datatype.func Datatype.bool Project.ty)
slice
*)
(*
Local Variables:
compile-command: "make -C ../../.."
End:
*)
| null | https://raw.githubusercontent.com/Frama-C/Frama-C-snapshot/639a3647736bf8ac127d00ebe4c4c259f75f9b87/src/plugins/security_slicing/components.ml | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
*************************************************************************
*************************************************************************
(** The state of statement for which a security verification should occur.
[state(lval) op term]
[state(lval) op term]
Do not called twice.
| ALoopBehavior _
*************************************************************************
*************************************************************************
type bwd_kind = Direct | Indirect
type fwd_kind = Impact | Security
type kind =
| Backward of bwd_kind
| Forward of fwd_kind
(** Debugging purpose only
***********************************************************************
state: kind -> callstack -> (node * kf) -> (node * kf) list
(node * kf) -> (node * kf) list
***********************************************************************
do not use Cil.emptyFunction here since it changes the
numbering of variables
used to enforce an invariant on [add]
do not consider node [InCtrl]
not optimal implementation: no memoization (bts#006)
* Returns [found, new_already] with:
- [found] is [true] iff [elt] was previously added for [kind]
- [new_already] is [already] updated with [elt] and its (new) associated
value.
do not consider address dependencies now (except for impact analysis):
just consider them during the last slicing pass
(for semantic preservation of pointers)
foreach caller
intraprocedural related_nodes
interprocedural part
input of a deep security annotation: foreach call
to [kf], compute its related nodes
Format.printf "[security of %s] search callers in %s
for zone %a@." (Kernel_function.get_name kf)
(Kernel_function.get_name caller)
Locations.Zone.pretty zone;
not considered here, see at end
TODO : use undef_zone (see FS#201)?
already come from a deeper annotation:
do not go again inside it
foreach called kf
TODO: use undef_zone (see FS#201)
initial nodes may be not in results
warning already emitted in the previous fold
recursive call
?
is exactly an impact analysis iff [fwd_kind = Impact]
************************************************************************
Dynamic registration
************************************************************************
************************************************************************
type t = stmt
* Security component table: a security component is represented by the
statement at which a security verification should occur. It is associated
with the list of its statements.
* Set tables [Components] and [Stmts].
************************************************************************
************************************************************************
Project.copy ~only:(Options.get_selection_after_slicing ()) p;
Local Variables:
compile-command: "make -C ../../.."
End:
| This file is part of Frama - C.
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
open Cil_types
open Cil_datatype
* { 2 Searching security annotations }
module Security_Annotations =
Cil_computation.StmtSetRef
(struct
let name = "Components.Annotations"
let dependencies = [ Ast.self ]
end)
let rec is_security_predicate p = match p.content with
| Pand(p1, p2) -> is_security_predicate p1 || is_security_predicate p2
Prel(_,
{ term_node = Tapp(f1, _ , ([ _ ])) },
{ term_node = TLval(TVar _,_) })
when f1.l_var_info.lv_name = Model.state_name ->
true
Prel(_,
{ term_node = Tapp(f1, _, [ _ ]) },
{ term_node = _ })
when f1.l_var_info.lv_name = Model.state_name ->
assert false
| _ ->
false
let has_security_requirement kf =
List.exists (is_security_predicate $ Logic_const.pred_of_id_pred)
(Kernel_function.get_spec kf).spec_requires
let search_security_requirements () =
if Security_Annotations.is_empty () then begin
Security_slicing_parameters.feedback
~level:3 "searching security annotations";
TODO : chercher dans les GlobalAnnotations
let is_security_annotation a =
(match a.annot_content with
| AAssert (_behav,p,_) -> is_security_predicate p
| AStmtSpec { spec_requires = l } ->
List.exists
(is_security_predicate $ Logic_const.pred_of_id_pred) l
| APragma _
[ JS 2008/02/26 ] may contain a security predicate
| AVariant _ | AAssigns _
-> false)
in
Annotations.iter
(fun s annotations ->
if
Value.is_reachable_stmt s
&& List.exists
(function Before a | After a -> is_security_annotation a)
!annotations
then
Security_Annotations.add s);
Globals.Functions.iter
(fun kf ->
if has_security_requirement kf then
List.iter
(fun (_, callsites) ->
List.iter Security_Annotations.add callsites)
(!Value.callers kf));
end
*)
* { 2 Computing security components }
open PdgIndex
let get_node_stmt node = Key.stmt (!Db.Pdg.node_key node)
module NodeKf = Datatype.Pair(PdgTypes.Node)(Kernel_function)
let pretty_kind fmt = function
| Backward Direct -> Format.fprintf fmt "backward direct"
| Backward Indirect -> Format.fprintf fmt "backward indirect"
| Forward Security -> Format.fprintf fmt "forward"
| Forward Impact -> Format.fprintf fmt "impact"
*)
Never plugged in . To be tested .
module Memo : sig
val init : kind - > kernel_function - > unit
val push_function : stmt - > kernel_function - > unit
val pop_function : unit - > unit
val memo :
Pdg.t_node - >
( unit - > ( Pdg.t_node * kernel_function ) list ) - >
( Pdg.t_node * kernel_function ) list
end = struct
module = struct
type t =
{ mutable stack : ( stmt * kernel_function ) list ;
mutable current_kf : kernel_function }
let init kf callstack = callstack.stack < - [ ] ; callstack.current_kf < - kf
let push stmt kf stack =
stmt , ) : : stack.stack ;
let pop stack =
let kf = match stack.stack with [ ] - > assert false | ( _ , k ) : : _ - > k in
let equal s1 s2 =
Kernel_function.equal s1.current_kf s2.current_kf
& & try
List.iter2
( fun ( s1 , kf1 ) ( s2 , kf2 ) - >
if not ( s1.sid = s2.sid & & Kernel_function.equal ) then
raise Exit )
s1.stack s2.stack ;
true
with Exit - >
false
let hash = Hashtbl.hash
end
( * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
module Memo : sig
val init: kind -> kernel_function -> unit
val push_function: stmt -> kernel_function -> unit
val pop_function: unit -> unit
val memo:
Pdg.t_node ->
(unit -> (Pdg.t_node * kernel_function) list) ->
(Pdg.t_node * kernel_function) list
end = struct
module Callstack = struct
type t =
{ mutable stack: (stmt * kernel_function) list;
mutable current_kf: kernel_function }
let init kf callstack = callstack.stack <- []; callstack.current_kf <- kf
let push stmt kf stack =
stack.stack <- (stmt, stack.current_kf) :: stack.stack;
stack.current_kf <- kf
let pop stack =
let kf = match stack.stack with [] -> assert false | (_, k) :: _ -> k in
stack.current_kf <- kf
let equal s1 s2 =
Kernel_function.equal s1.current_kf s2.current_kf
&& try
List.iter2
(fun (s1, kf1) (s2, kf2) ->
if not (s1.sid = s2.sid && Kernel_function.equal kf1 kf2) then
raise Exit)
s1.stack s2.stack;
true
with Exit ->
false
let hash = Hashtbl.hash
end
module Callstacks = struct
callstack - > nodekfs
let memo tbl c =
try find tbl c
with Not_found -> let t = Nodekfs.create 7 in replace tbl c t; t
end
module Memo = struct
include Hashtbl
let memo tbl k callstack =
try
let callstacks = find tbl k in
Callstacks.memo callstacks callstack
with Not_found ->
let callstacks = Callstacks.create 7 in
let t = Nodekfs.create 7 in
Callstacks.replace callstacks callstack t;
replace tbl k callstacks;
t
end
type local_tbl = (Pdg.t_node * kernel_function) list Nodekfs.t
type state =
{ mutable kind: kind;
mutable callstack: Callstack.t;
mutable local_tbl: local_tbl;
memo_tbl: (kind, local_tbl Callstacks.t) Memo.t; }
let state =
let spec = Cil.empty_funspec () in
{ kind = Backward Direct;
callstack =
{ Callstack.stack = [];
current_kf =
{ fundec =
Declaration
(spec,
Cil_datatype.Varinfo.dummy,
None,
Cil_datatype.Location.unknown);
return_stmt = None;
spec = Cil.empty_funspec () } };
local_tbl = Nodekfs.create 0;
memo_tbl = Hashtbl.create 5 }
let update () =
state.local_tbl <- Memo.memo state.memo_tbl state.kind state.callstack
let init k kf =
state.kind <- k;
Callstack.init kf state.callstack;
update ()
let push_function stmt kf =
Callstack.push stmt kf state.callstack;
update ()
let pop_function () =
Callstack.pop state.callstack;
update ()
let memo node f =
let key = node, state.callstack.Callstack.current_kf in
try
Nodekfs.find state.local_tbl key
with Not_found ->
let value = f () in
Nodekfs.replace state.local_tbl key value;
value
end
*)
module Todolist : sig
type todo = private
{ node: PdgTypes.Node.t;
kf: kernel_function;
pdg: Db.Pdg.t;
callstack_length: int;
from_deep: bool }
type t = todo list
val mk_init: kernel_function -> Db.Pdg.t -> PdgTypes.Node.t list -> todo list
val add:
PdgTypes.Node.t -> kernel_function -> Db.Pdg.t -> int -> bool -> t -> t
end = struct
type todo =
{ node: PdgTypes.Node.t;
kf: kernel_function;
pdg: Db.Pdg.t;
callstack_length: int;
from_deep: bool }
type t = todo list
let add n kf pdg len fd list =
match !Db.Pdg.node_key n with
| Key.SigKey (Signature.In Signature.InCtrl) ->
list
| Key.VarDecl vi when not (Kernel.LibEntry.get () && vi.vglob) ->
do not consider variable declaration ,
except if libEntry is set and they are globals
( i.e. we could have no further info about them )
except if libEntry is set and they are globals
(i.e. we could have no further info about them) *)
list
| _ ->
Security_slicing_parameters.debug ~level:2 "adding node %a (in %s)"
(!Db.Pdg.pretty_node false) n
(Kernel_function.get_name kf);
{ node = n; kf = kf; pdg = pdg;
callstack_length = len; from_deep = fd }
:: list
let mk_init kf pdg =
List.fold_left (fun acc n -> add n kf pdg 0 false acc) []
end
module Component = struct
module M = FCMap.Make(NodeKf)
type fwd_kind = Impact | Security
type kind =
| Direct
| Indirect_Backward
| Forward of fwd_kind
type value =
{ pdg: Db.Pdg.t;
mutable callstack_length: int;
mutable direct: bool;
mutable indirect_backward: bool;
mutable forward: bool }
type t = value M.t
let is_direct v = v.direct
let is_indirect_backward v = v.indirect_backward && not v.direct
let is_forward v = not (v.direct || v.indirect_backward)
let check_and_add first elt kind pdg len (already: t) =
try
Format.printf " [ security ] check node % a ( in % s , kind % a)@. "
( ! Pdg.pretty_node true ) ( fst elt )
( Kernel_function.get_name ( snd elt ) )
pretty_kind kind ;
(!Pdg.pretty_node true) (fst elt)
(Kernel_function.get_name (snd elt))
pretty_kind kind;*)
let v = M.find elt already in
let found, dir, up, down = match kind with
| Direct -> true, true, false, false
| Indirect_Backward -> v.indirect_backward, v.direct, true, false
| Forward _ -> v.forward, v.direct, v.indirect_backward, true
in
v.callstack_length <- min v.callstack_length len;
v.direct <- dir;
v.indirect_backward <- up;
v.forward <- down;
found, already
with Not_found ->
let dir, up, down = match kind with
| Direct -> true, false, false
| Indirect_Backward -> false, true, false
| Forward _ -> false, false, true
in
let v =
{ pdg = pdg; callstack_length = len;
direct = dir; indirect_backward = up; forward = down }
in
false,
if first && kind = Forward Impact then
do not add the initial selected stmt for an impact analysis .
fixed FS#411
fixed FS#411 *)
already
else
M.add elt v already
let one_step_related_nodes kind pdg node =
let direct node = !Db.Pdg.direct_data_dpds pdg node in
match kind with
| Direct -> direct node
| Indirect_Backward -> direct node @ !Db.Pdg.direct_ctrl_dpds pdg node
| Forward Security ->
!Db.Pdg.direct_data_uses pdg node @ !Db.Pdg.direct_ctrl_uses pdg node
| Forward Impact ->
!Db.Pdg.direct_data_uses pdg node @ !Db.Pdg.direct_ctrl_uses pdg node
@ !Db.Pdg.direct_addr_uses pdg node
let search_input kind kf lazy_l =
try
match kind with
| Forward _ -> Lazy.force lazy_l
| Direct | Indirect_Backward ->
if !Db.Value.use_spec_instead_of_definition kf
then Lazy.force lazy_l
else []
with Not_found ->
[]
let add_from_deep caller todo n =
Todolist.add n caller (!Db.Pdg.get caller) 0 true todo
let forward_caller kf node todolist =
let pdg = !Db.Pdg.get kf in
List.fold_left
(fun todolist (caller, callsites) ->
List.fold_left
(fun todolist callsite ->
let nodes =
!Db.Pdg.find_call_out_nodes_to_select
pdg (PdgTypes.NodeSet.singleton node) (!Db.Pdg.get caller) callsite
in
List.fold_left
(add_from_deep caller)
todolist
nodes)
todolist
callsites)
todolist
(!Db.Value.callers kf)
let related_nodes_of_nodes kind result nodes =
let initial_nodes =
List.map (fun n -> n.Todolist.node, n.Todolist.kf) nodes
in
let rec aux first result = function
| [] -> result
| { Todolist.node = node; kf = kf; pdg = pdg;
callstack_length = callstack_length; from_deep = from_deep }
:: todolist
->
let elt = node, kf in
let found, result =
check_and_add first elt kind pdg callstack_length result
in
let todolist =
if found then begin
todolist
end else begin
Security_slicing_parameters.debug
~level:2 "considering node %a (in %s)"
(!Db.Pdg.pretty_node false) node
(Kernel_function.get_name kf);
let related_nodes = one_step_related_nodes kind pdg node in
Security_slicing_parameters.debug ~level:3
"intraprocedural part done";
let todolist =
List.fold_left
(fun todo n ->
Todolist.add n kf pdg callstack_length false todo)
todolist
related_nodes
in
let backward_from_deep compute_nodes =
[ TODO optimisation :]
en fait , regarder from_deep :
, faire pour chaque caller
sinon , faire uniquement pour le caller d'où on vient
en fait, regarder from_deep:
si vrai, faire pour chaque caller
sinon, faire uniquement pour le caller d'où on vient *)
match kind, callstack_length with
| (Direct | Indirect_Backward), 0 ->
let do_caller todolist (caller, callsites) =
let pdg_caller = !Db.Pdg.get caller in
let do_call todolist callsite =
match kind with
| Direct | Indirect_Backward ->
let nodes = compute_nodes pdg_caller callsite in
List.fold_left
(add_from_deep caller) todolist nodes
| Forward _ ->
in
List.fold_left do_call todolist callsites
in
List.fold_left do_caller todolist (!Db.Value.callers kf)
| _ ->
todolist
in
let todolist =
match !Db.Pdg.node_key node with
| Key.SigKey (Signature.In Signature.InCtrl) ->
assert false
| Key.SigKey (Signature.In (Signature.InImpl zone)) ->
let compute_nodes pdg_caller callsite =
let nodes, _undef_zone =
!Db.Pdg.find_location_nodes_at_stmt
pdg_caller callsite ~before:true zone
in
let nodes = List.map (fun (n, _z_part) -> n) nodes in
TODO : use _ ?
nodes
in
backward_from_deep compute_nodes
| Key.SigKey key ->
let compute_nodes pdg_caller callsite =
[ match key with
| Signature.In (Signature.InNum n) ->
!Db.Pdg.find_call_input_node pdg_caller callsite n
| Signature.Out Signature.OutRet ->
!Db.Pdg.find_call_output_node pdg_caller callsite
| Signature.In
(Signature.InCtrl | Signature.InImpl _)
| Signature.Out _ ->
assert false ]
in
backward_from_deep compute_nodes
| Key.SigCallKey(id, key) ->
the node is a call : search the related nodes inside the
called function ( see )
called function (see FS#155) *)
if from_deep then
todolist
else
let stmt = Key.call_from_id id in
let called_kfs =
Kernel_function.Hptset.elements
(try Db.Value.call_to_kernel_function stmt
with Db.Value.Not_a_call -> assert false)
in
let todolist =
List.fold_left
(fun todolist called_kf ->
" [ security ] search inside % s ( from % s)@. "
( Kernel_function.get_name called_kf )
( Kernel_function.get_name kf ) ;
"[security] search inside %s (from %s)@."
(Kernel_function.get_name called_kf)
(Kernel_function.get_name kf);*)
let called_pdg = !Db.Pdg.get called_kf in
let nodes =
try
match kind, key with
| (Direct | Indirect_Backward),
Signature.Out out_key ->
let nodes, _undef_zone =
!Db.Pdg.find_output_nodes called_pdg out_key
in
let nodes =
List.map (fun (n, _z_part) -> n) nodes in
TODO : use _ ?
nodes
| _, Signature.In (Signature.InNum n) ->
search_input kind called_kf
(lazy [!Db.Pdg.find_input_node called_pdg n])
| _, Signature.In Signature.InCtrl ->
search_input kind called_kf
(lazy
[!Db.Pdg.find_entry_point_node called_pdg])
| _, Signature.In (Signature.InImpl _) ->
assert false
| Forward _, Signature.Out _ ->
[]
with
| Db.Pdg.Top ->
Security_slicing_parameters.warning
"no precise pdg for function %s. \n\
Ignoring this function in the analysis (potentially incorrect results)."
(Kernel_function.get_name called_kf);
[]
| Db.Pdg.Bottom | Not_found -> assert false
in
List.fold_left
(fun todo n ->
" node % a inside % s@. "
( ! Db . Pdg.pretty_node false ) n
( Kernel_function.get_name called_kf ) ;
(!Db.Pdg.pretty_node false) n
(Kernel_function.get_name called_kf);*)
Todolist.add
n called_kf called_pdg
(callstack_length + 1) false todo)
todolist
nodes)
todolist
called_kfs
in
(match kind with
| Direct | Indirect_Backward ->
todolist
| Forward _ ->
List.fold_left
(fun todolist called_kf ->
let compute_from_stmt fold =
fold
(fun (n, kfn) _ acc ->
if Kernel_function.equal kfn kf then n :: acc
else acc)
in
let from_stmt =
compute_from_stmt M.fold result [] in
let from_stmt =
compute_from_stmt
(fun f e acc ->
List.fold_left
(fun acc e -> f e [] acc) acc e)
initial_nodes
from_stmt
in
let from_stmt = List.fold_left
(fun s n -> PdgTypes.NodeSet.add n s)
PdgTypes.NodeSet.empty from_stmt in
let called_pdg = !Db.Pdg.get called_kf in
let nodes =
try
!Db.Pdg.find_in_nodes_to_select_for_this_call
pdg from_stmt stmt called_pdg
with
| Db.Pdg.Top ->
[]
| Db.Pdg.Bottom | Not_found -> assert false
in
List.fold_left
(fun todo n ->
Todolist.add
n called_kf called_pdg
(callstack_length + 1) false todo)
todolist
nodes)
todolist
called_kfs)
| Key.CallStmt _ | Key.VarDecl _ ->
assert false
| Key.Stmt _ | Key.Label _ ->
todolist
in
[ TODO optimisation :] voir commentaire plus haut
match kind with
| (Direct | Indirect_Backward) -> todolist
| Forward _ -> forward_caller kf node todolist
end
in
aux false result todolist
in
aux true result nodes
let initial_nodes kf stmt =
Security_slicing_parameters.debug
~level:3 "computing initial nodes for %d" stmt.sid;
let pdg = !Db.Pdg.get kf in
let nodes =
if Db.Value.is_reachable_stmt stmt then
try !Db.Pdg.find_simple_stmt_nodes pdg stmt
with Not_found -> assert false
else begin
Security_slicing_parameters.debug
~level:3 "stmt %d is dead. skipping." stmt.sid;
[]
end
in
Todolist.mk_init kf pdg nodes
let direct kf stmt =
try
let nodes = initial_nodes kf stmt in
Security_slicing_parameters.debug
"computing direct component %d" stmt.sid;
let res = related_nodes_of_nodes Direct M.empty nodes in
add the initial node , fix FS#180
let mk p =
{ pdg = p; callstack_length = 0;
direct = true; indirect_backward = false; forward = false }
in
let res =
List.fold_left
(fun acc { Todolist.node=n; kf=f; pdg=p } -> M.add (n,f) (mk p) acc)
res
nodes
in
res
with Db.Pdg.Top | Db.Pdg.Bottom ->
Security_slicing_parameters.warning "PDG is not manageable. skipping.";
M.empty
let backward kf stmt =
try
let nodes = initial_nodes kf stmt in
let res = direct kf stmt in
Security_slicing_parameters.debug
"computing backward indirect component for %d" stmt.sid;
related_nodes_of_nodes Indirect_Backward res nodes
with Db.Pdg.Top | Db.Pdg.Bottom ->
Security_slicing_parameters.warning "PDG is not manageable. skipping.";
M.empty
let whole kf stmt =
let res = backward kf stmt in
let from =
M.fold
(fun (n,kf) v acc ->
res
[]
in
Security_slicing_parameters.debug
"computing forward component for stmt %d" stmt.sid;
related_nodes_of_nodes (Forward Security) res from
let forward fwd_kind kf stmt =
let nodes = initial_nodes kf stmt in
Security_slicing_parameters.debug
"computing forward component for stmt %d" stmt.sid;
let res = related_nodes_of_nodes (Forward fwd_kind) M.empty nodes in
let set =
M.fold
(fun (n,_) _ acc ->
Extlib.may_map
~dft:acc
(fun s -> Stmt.Set.add s acc)
(get_node_stmt n))
res
Stmt.Set.empty
in
Stmt.Set.elements set
let get_component kind stmt =
let kf = Kernel_function.find_englobing_kf stmt in
let action, check = match kind with
| Direct -> direct, is_direct
| Indirect_Backward -> backward, is_indirect_backward
| Forward _ -> whole, is_forward
in
let set =
M.fold
(fun (n,_) v acc ->
if check v then
Extlib.may_map
~dft:acc
(fun s -> Stmt.Set.add s acc)
(get_node_stmt n)
else
acc)
(action kf stmt)
Stmt.Set.empty
in
Stmt.Set.elements set
let iter use_ctrl_dpds f kf stmt =
let action = if use_ctrl_dpds then whole else direct in
M.iter ( fun elt _ - > f elt ) ( action kf stmt )
let action = if use_ctrl_dpds then whole else direct in
M.iter (fun elt _ -> f elt) (action kf stmt)
*)
end
let register name arg =
Dynamic.register
~journalize:true
~plugin:"Security_slicing"
name
(Datatype.func Stmt.ty (Datatype.list Stmt.ty))
(Component.get_component arg)
let get_direct_component = register "get_direct_component" Component.Direct
let get_indirect_backward_component =
register "get_indirect_backward_component" Component.Indirect_Backward
let get_forward_component = register "get_forward_component"
(Component.Forward Component.Security)
let impact_analysis =
Dynamic.register
~plugin:"Security_slicing"
"impact_analysis"
~journalize:true
(Datatype.func2 Kernel_function.ty Stmt.ty (Datatype.list Stmt.ty))
(Component.forward Component.Impact)
( * type t = stmt
module Components : sig
add : t - > stmt - > unit
val find : t - > stmt list
val self : State.t
val fold_fold :
( ' b - > t - > ' a - > ' b ) - > ( ' a - > Cil_types.stmt - > ' a ) - > ' b - > ' a - > ' b
val find: t -> stmt list
val self: State.t
val fold_fold:
('b -> t -> 'a -> 'b) -> ('a -> Cil_types.stmt -> 'a) -> 'b -> 'a -> 'b
*)
end = struct
module S =
State_builder.Hashtbl
(Stmt.Hashtbl)
(Datatype.Ref(Datatype.List(Stmt)))
(struct
let name = "Components"
let size = 7
let dependencies = [ Ast.self; Db.Value.self ]
end)
let () =
Cmdline.run_after_extended_stage
(fun () ->
State_dependency_graph.add_codependencies ~onto:S.self [ !Db.Pdg.self ])
let add c =
let l = S.memo ( fun _ - > ref [ ] ) c in
fun s - > l : = s : : ! l
let find s = ! ( S.find s )
let self = S.self
let fold_fold f g init_f init_g =
S.fold ( fun c l acc - > f acc c ( List.fold_left g init_g ! l ) ) init_f
let add c =
let l = S.memo (fun _ -> ref []) c in
fun s -> l := s :: !l
let find s = !(S.find s)
let self = S.self
let fold_fold f g init_f init_g =
S.fold (fun c l acc -> f acc c (List.fold_left g init_g !l)) init_f
*)
end
module Nodes =
State_builder.SetRef
(struct include NodeKf.Datatype let compare = NodeKf.compare end)
(struct
let name = "Components.Nodes"
let dependencies = [ Security_Annotations.self ]
end)
let use_ctrl_dependencies = ref false
let compute, self =
State_builder.apply_once
"Components.compute"
[ Security_Annotations.self ]
(fun () ->
search_security_requirements ();
let add_component stmt =
Security_slicing_parameters.debug
"computing security component %d" stmt.sid;
let add_one = Components.add stmt in
let kf = Kernel_function.find_englobing_kf stmt in
Component.iter
!use_ctrl_dependencies
(fun (n, _ as elt) ->
Nodes.add elt;
Extlib.may add_one (get_node_stmt n))
kf
stmt
in
Security_Annotations.iter add_component)
let () =
Cmdline.run_after_extended_stage
(fun () ->
Project.State_builder.add_dependency self !Pdg.self;
Project.State_builder.add_dependency Nodes.self self;
Project.State_builder.add_dependency Components.self self)
let get_component =
Dynamic.register
~journalize:true
"Security.get_component"
(Datatype.func Kernel_type.stmt (Datatype.list Kernel_type.stmt))
(fun s -> compute (); Components.find s)
* { 2 Security slicing }
let slice ctrl =
use_ctrl_dependencies := ctrl;
Security_slicing_parameters.feedback ~level:2 "beginning slicing";
compute ();
let name = "security slicing" in
let slicing = !Slicing.Project.mk_project name in
let select (n, kf) sel =
Security_slicing_parameters.debug ~level:2 "selecting %a (of %s)"
(!Db.Pdg.pretty_node false) n
(Kernel_function.get_name kf);
!Slicing.Select.select_pdg_nodes
sel
(!Slicing.Mark.make ~data:true ~addr:true ~ctrl)
[ n ]
kf
in
let sel = Nodes.fold select Slicing.Select.empty_selects in
Security_slicing_parameters.debug "adding selection";
!Slicing.Request.add_persistent_selection slicing sel;
Security_slicing_parameters.debug "applying slicing request";
!Slicing.Request.apply_all_internal slicing;
!Slicing.Slice.remove_uncalled slicing;
let p = !Slicing.Project.extract name slicing in
Security_slicing_parameters.feedback ~level:2 "slicing done";
p
let slice =
Dynamic.register
"Security_slicing.slice"
~journalize:true
(Datatype.func Datatype.bool Project.ty)
slice
*)
|
53710fe10359e628854a151e381768454f2d0b88e29234a7cf1819b00fb4997f | jonase/eastwood | constant_lifter.clj | (ns eastwood.copieddeps.dep2.clojure.tools.analyzer.passes.jvm.constant-lifter
(:require [eastwood.copieddeps.dep1.clojure.tools.analyzer.passes.constant-lifter :as orig]
[eastwood.copieddeps.dep1.clojure.tools.analyzer :refer [analyze-const]]
[eastwood.copieddeps.dep1.clojure.tools.analyzer.utils :refer [constant? classify]]
[eastwood.copieddeps.dep2.clojure.tools.analyzer.passes.jvm.analyze-host-expr :refer [analyze-host-expr]]
[eastwood.copieddeps.dep1.clojure.tools.analyzer.passes.elide-meta :refer [elide-meta]]))
(defn constant-lift*
[ast]
(if (= :var (:op ast))
(let [{:keys [var env form meta]} ast]
(if (constant? var meta)
(let [val @var]
(assoc (analyze-const val env (classify val))
:form form))
ast))
(orig/constant-lift ast)))
(defn constant-lift
"Like eastwood.copieddeps.dep1.clojure.tools.analyzer.passes.constant-lifter/constant-lift but
transforms also :var nodes where the var has :const in the metadata
into :const nodes and preserves tag info"
{:pass-info {:walk :post :depends #{} :after #{#'elide-meta #'analyze-host-expr}}}
[ast]
(merge (constant-lift* ast)
(select-keys ast [:tag :o-tag :return-tag :arglists])))
| null | https://raw.githubusercontent.com/jonase/eastwood/c5b7d9f8ad8f8b38dc7138d853cc65f6987d6058/copied-deps/eastwood/copieddeps/dep2/clojure/tools/analyzer/passes/jvm/constant_lifter.clj | clojure | (ns eastwood.copieddeps.dep2.clojure.tools.analyzer.passes.jvm.constant-lifter
(:require [eastwood.copieddeps.dep1.clojure.tools.analyzer.passes.constant-lifter :as orig]
[eastwood.copieddeps.dep1.clojure.tools.analyzer :refer [analyze-const]]
[eastwood.copieddeps.dep1.clojure.tools.analyzer.utils :refer [constant? classify]]
[eastwood.copieddeps.dep2.clojure.tools.analyzer.passes.jvm.analyze-host-expr :refer [analyze-host-expr]]
[eastwood.copieddeps.dep1.clojure.tools.analyzer.passes.elide-meta :refer [elide-meta]]))
(defn constant-lift*
[ast]
(if (= :var (:op ast))
(let [{:keys [var env form meta]} ast]
(if (constant? var meta)
(let [val @var]
(assoc (analyze-const val env (classify val))
:form form))
ast))
(orig/constant-lift ast)))
(defn constant-lift
"Like eastwood.copieddeps.dep1.clojure.tools.analyzer.passes.constant-lifter/constant-lift but
transforms also :var nodes where the var has :const in the metadata
into :const nodes and preserves tag info"
{:pass-info {:walk :post :depends #{} :after #{#'elide-meta #'analyze-host-expr}}}
[ast]
(merge (constant-lift* ast)
(select-keys ast [:tag :o-tag :return-tag :arglists])))
| |
2efcc36a65ad0bddc8d3d8efaaf05ad3b5916381d71109222b8b3fcba39ccc20 | marick/Midje | checkers.clj | (ns midje.checkers
"Checkers are for checking results of checkables, or checking
that appropriate arguments are passed to prerequisites"
(:require [such.vars :as var]
[such.immigration :as immigrate])
(:require midje.checking.checkers.defining
midje.checking.checkers.chatty
midje.checking.checkers.simple
midje.checking.checkers.combining
midje.checking.checkers.collection))
(when-not (resolve '&)
(let [docstring "This var is defined so that Midje prerequisites can use & for optional args without having to quote it."]
(intern *ns* (vary-meta '& assoc :doc docstring) docstring)))
Immigrating specific vars to reduce the chance that a slipup in one of those
;; files results in polluting the checker namespace.
(immigrate/import-vars [midje.checking.checkers.defining
defchecker checker as-checker]
[midje.checking.checkers.chatty
chatty-checker]
[midje.checking.checkers.simple
truthy falsey TRUTHY FALSEY anything irrelevant exactly throws roughly]
[midje.checking.checkers.combining
every-checker some-checker]
[midje.checking.checkers.collection
has has-suffix has-prefix just contains n-of
one-of two-of three-of four-of five-of six-of
seven-of eight-of nine-of ten-of])
| null | https://raw.githubusercontent.com/marick/Midje/2b9bcb117442d3bd2d16446b47540888d683c717/src/midje/checkers.clj | clojure | files results in polluting the checker namespace. | (ns midje.checkers
"Checkers are for checking results of checkables, or checking
that appropriate arguments are passed to prerequisites"
(:require [such.vars :as var]
[such.immigration :as immigrate])
(:require midje.checking.checkers.defining
midje.checking.checkers.chatty
midje.checking.checkers.simple
midje.checking.checkers.combining
midje.checking.checkers.collection))
(when-not (resolve '&)
(let [docstring "This var is defined so that Midje prerequisites can use & for optional args without having to quote it."]
(intern *ns* (vary-meta '& assoc :doc docstring) docstring)))
Immigrating specific vars to reduce the chance that a slipup in one of those
(immigrate/import-vars [midje.checking.checkers.defining
defchecker checker as-checker]
[midje.checking.checkers.chatty
chatty-checker]
[midje.checking.checkers.simple
truthy falsey TRUTHY FALSEY anything irrelevant exactly throws roughly]
[midje.checking.checkers.combining
every-checker some-checker]
[midje.checking.checkers.collection
has has-suffix has-prefix just contains n-of
one-of two-of three-of four-of five-of six-of
seven-of eight-of nine-of ten-of])
|
802207868f3857ee3c3ab2216aabafe90ae428a5b94a2d164f2e99e4ab34a6bf | RefactoringTools/HaRe | examples.hs |
--------------------------------------------------------------------
--- Haskell Weirdness.
--------------------------------------------------------------------
data RedBlack a = Red a | Black a
data OneTwo a b = One a | Two a b
instance Show a => Show (RedBlack a) where
show = \ x -> case x of
Red v -> show v
Black v -> show v
omega :: Int -> (Int,Int)
omega = \x -> if True then (omega x) else (omega x)
h0 = (\ (Red x) -> 1) (Black 99)
h1 = (\ (Red (Black x)) -> 1) (Red undefined)
h2 = (\ (Red (Two x (Black y))) -> 1) (Red (Two 1 (Black 9)))
h3 = (\ (Red (Two x (Black y))) -> 1) (Red (Two undefined (Black undefined)))
h4 :: Int
h4 = (\ (Red (Two x (Black y))) -> x) (Red (Two undefined (Black undefined)))
ex1 = case undefined of 1 -> 99 ---> undefined
- > 99
ex3 = case undefined of (x,y) -> 99 ---> undefined
ex4 = case undefined of (Red x) -> 99 ---> undefined
-----------------------------------------------------------
-- some examples
-----------------------------------------------------------
redpat = \ x -> Pcondata "red" [(Pvar x)]
greenpat = \ x -> Pcondata "green" [(Pvar x)]
blackpat = \ x -> Pcondata "black" [(Pvar x)]
blackexp = \t -> ConApp "black" [t]
redexp = \t -> ConApp "red" [t]
greenexp = \t -> ConApp "green" [t]
pairpat = Ppair (Pvar "x") (Pvar "y")
black = \ x -> ConApp "black" [x]
red = \ x -> ConApp "red" [x]
green = \ x -> ConApp "green" [x]
--------------------------------------------------
dpat = \ x -> Pcondata "D" [(Pvar x)]
d1 = App (Abs (dpat "i") (Const 42)) Boom
--- run d1 ==> non-termination
d2 = ConApp "D" [Boom]
--- run d2 ==> "(D..." + non-termination
npat = \ x -> Pnewdata "N" (Pvar x)
n1 = App (Abs (npat "i") (Const 42)) Boom
- run n1 = = > 42 ( i.e. , ' Abs ( " i " ) e ' behaves like ' Abs " i " e '
n2 = NewApp "N" Boom
--- run n2 ==> non-termination
--------------------------------------------------
splat phi = (deM phi (\msg -> error "hey - you're applying the empty env!"))
run le = (deM (eval le) (\msg -> error "hey - you're applying the empty env!"))
--- Important to note that abstraction is neither lazy nor strict
---
e1 = App (Abs (redpat "x") (Const 1)) (blackexp (Const 19)) ---> error
e2 = App (Abs (redpat "x") (Var "x")) (blackexp (Const 19)) ---> error
- > 19
- > 4
e5 = App (Abs (redpat "x") (Var "x")) Boom ---> non-term
e6 = App (Abs (redpat "x") (Var "x")) (ConApp "red" [Boom]) ---> non-term
- > 1
- > 1
- > 99
- > 99
- > 99
l4 = Let [(redpat "x", black Undefined)] (Var "x") ---> red != black
- > 99
l6 = Let [(redpat "x", black Undefined),
(redpat "y", green (Const 99))] (Var "x") ---> red != black
HaskellCoreLite > let ( Red x ) = Black 19 in 87
87
HaskellCoreLite > let ( Red x ) = Black 19 in x
Program error : { v1405 ( RedBlack_Black ( Num_fromInt instNum_v35 19 ) ) }
HaskellCoreLite> let (Red x) = Black 19 in 87
87
HaskellCoreLite> let (Red x) = Black 19 in x
Program error: {v1405 (RedBlack_Black (Num_fromInt instNum_v35 19))}
-}
{-
BTW, this works with the old def'n of let
(i.e., dynamic binding with no explicit fixpoints).
-}
evenDef = Abs (Pvar "x") (Cond (Bin IntEq (Var "x") (Const 0))
Tconst
(App (Var "odd") (Bin Plus (Var "x") (Const $ -1))))
oddDef = Abs (Pvar "x") (Cond (Bin IntEq (Var "x") (Const 0))
Fconst
(App (Var "even") (Bin Plus (Var "x") (Const $ -1))))
oddeven = Let [(Pvar "even",evenDef),(Pvar "odd",oddDef)] (App (Var "even") (Const 3))
---this one demonstrates irrefutable patterns
---compare with:
- e1 = App ( Abs ( redpat " x " ) ( Const 1 ) ) ( blackexp ( Const 19 ) ) --- > error
irref0 = App (Abs (Ptilde (redpat "x")) (Const 1)) (blackexp (Const 19))
irref1 = App (Abs (Ptilde (redpat "x")) (Var "x")) (blackexp (Const 19))
v = (z + z) where z = 1
- > 1
c2 = Case Undefined $ [Normal (Pconst 99) (Const 1) []] ---> Undefined
c3 = Case Undefined $ [Normal (redpat "x") (Const 1) []] ---> Undefined
c4 = Case (black Undefined) [Normal (redpat "x") (Const 1) []]
---> match failure
c5 = Case (red Undefined) [Normal (redpat "x") (Const 1) []]
- > 1
c6 :
data RBG a = Red a | Black a | Green a
foo = let = Red ( Green 1 )
in
case of
( Red x ) - > ( case x of ( Black z ) - > 99 )
( Red ( Green y ) ) - > 87
data RBG a = Red a | Black a | Green a
foo = let val = Red (Green 1)
in
case val of
(Red x) -> (case x of (Black z) -> 99)
(Red (Green y)) -> 87
-}
c6body = (Case (Var "val")
[Normal (redpat "x")
(Case (Var "x") [Normal (blackpat "z") (Const 99) []])
[],
Normal (Pcondata "red" [greenpat "y"]) (Const 87) []])
c6 = Let [(Pvar "val", redexp (greenexp (Const 1)))] c6body
--- Simple example of a guarded case statement:
data Match = Guarded P [ ( E , E ) ] [ D ]
| Normal P E [ D ]
c7body = Guarded ( )
[ ( ( Const 1 ) ( Const 1 ) , ( Const 99 ) ) ]
{ - where
data Match = Guarded P [(E,E)] [D]
| Normal P E [D]
c7body = Guarded (Pconst 1)
[(Bin IntEq (Const 1) (Const 1), (Const 99))]
{- where -} [(Pvar "z",(Const 1))]
c7 = Case (Const 1) [c7body]
-}
c7 = let c7body = Guarded (Pvar "x")
[(Bin IntEq (Var "x") (Var "z"), (Const 99))]
{- where -} [(Pvar "z",(Const 1))]
in Case (Const 1) [c7body]
c8 = let c8body = Guarded (Pvar "x")
[(Bin IntEq (Var "x") (Var "z"), (Const 99))]
{- where -} [(Pvar "z",(Const 2))]
in Case (Const 1) [c8body]
c9 =
let
guardedbody = Guarded (Pvar "x")
[(Bin IntEq (Var "x") (Var "z"), (Const 99))]
[(Pvar "z",(Const 1))]
normalbody = Normal (Pvar "y") (Const 101) []
in
Case (Const 1) [guardedbody,normalbody]
c10 =
let
guardedbody = Guarded (Pvar "x")
[(Bin IntEq (Var "x") (Var "z"), (Const 99))]
[(Pvar "z",(Const 2))]
normalbody = Normal (Pvar "y") (Const 101) []
in
Case (Const 1) [guardedbody,normalbody]
projy = App (Abs (Ptuple [Pvar "x", Pvar "y", Pvar "z"])
$ Var "y")
(TupleExp [Boom, Const 2, Boom])
| null | https://raw.githubusercontent.com/RefactoringTools/HaRe/ef5dee64c38fb104e6e5676095946279fbce381c/old/tools/interp/examples.hs | haskell | ------------------------------------------------------------------
- Haskell Weirdness.
------------------------------------------------------------------
-> undefined
-> undefined
-> undefined
---------------------------------------------------------
some examples
---------------------------------------------------------
------------------------------------------------
- run d1 ==> non-termination
- run d2 ==> "(D..." + non-termination
- run n2 ==> non-termination
------------------------------------------------
- Important to note that abstraction is neither lazy nor strict
-
-> error
-> error
-> non-term
-> non-term
-> red != black
-> red != black
BTW, this works with the old def'n of let
(i.e., dynamic binding with no explicit fixpoints).
-this one demonstrates irrefutable patterns
-compare with:
- > error
-> Undefined
-> Undefined
-> match failure
- Simple example of a guarded case statement:
where
where
where |
data RedBlack a = Red a | Black a
data OneTwo a b = One a | Two a b
instance Show a => Show (RedBlack a) where
show = \ x -> case x of
Red v -> show v
Black v -> show v
omega :: Int -> (Int,Int)
omega = \x -> if True then (omega x) else (omega x)
h0 = (\ (Red x) -> 1) (Black 99)
h1 = (\ (Red (Black x)) -> 1) (Red undefined)
h2 = (\ (Red (Two x (Black y))) -> 1) (Red (Two 1 (Black 9)))
h3 = (\ (Red (Two x (Black y))) -> 1) (Red (Two undefined (Black undefined)))
h4 :: Int
h4 = (\ (Red (Two x (Black y))) -> x) (Red (Two undefined (Black undefined)))
- > 99
redpat = \ x -> Pcondata "red" [(Pvar x)]
greenpat = \ x -> Pcondata "green" [(Pvar x)]
blackpat = \ x -> Pcondata "black" [(Pvar x)]
blackexp = \t -> ConApp "black" [t]
redexp = \t -> ConApp "red" [t]
greenexp = \t -> ConApp "green" [t]
pairpat = Ppair (Pvar "x") (Pvar "y")
black = \ x -> ConApp "black" [x]
red = \ x -> ConApp "red" [x]
green = \ x -> ConApp "green" [x]
dpat = \ x -> Pcondata "D" [(Pvar x)]
d1 = App (Abs (dpat "i") (Const 42)) Boom
d2 = ConApp "D" [Boom]
npat = \ x -> Pnewdata "N" (Pvar x)
n1 = App (Abs (npat "i") (Const 42)) Boom
- run n1 = = > 42 ( i.e. , ' Abs ( " i " ) e ' behaves like ' Abs " i " e '
n2 = NewApp "N" Boom
splat phi = (deM phi (\msg -> error "hey - you're applying the empty env!"))
run le = (deM (eval le) (\msg -> error "hey - you're applying the empty env!"))
- > 19
- > 4
- > 1
- > 1
- > 99
- > 99
- > 99
- > 99
l6 = Let [(redpat "x", black Undefined),
HaskellCoreLite > let ( Red x ) = Black 19 in 87
87
HaskellCoreLite > let ( Red x ) = Black 19 in x
Program error : { v1405 ( RedBlack_Black ( Num_fromInt instNum_v35 19 ) ) }
HaskellCoreLite> let (Red x) = Black 19 in 87
87
HaskellCoreLite> let (Red x) = Black 19 in x
Program error: {v1405 (RedBlack_Black (Num_fromInt instNum_v35 19))}
-}
evenDef = Abs (Pvar "x") (Cond (Bin IntEq (Var "x") (Const 0))
Tconst
(App (Var "odd") (Bin Plus (Var "x") (Const $ -1))))
oddDef = Abs (Pvar "x") (Cond (Bin IntEq (Var "x") (Const 0))
Fconst
(App (Var "even") (Bin Plus (Var "x") (Const $ -1))))
oddeven = Let [(Pvar "even",evenDef),(Pvar "odd",oddDef)] (App (Var "even") (Const 3))
irref0 = App (Abs (Ptilde (redpat "x")) (Const 1)) (blackexp (Const 19))
irref1 = App (Abs (Ptilde (redpat "x")) (Var "x")) (blackexp (Const 19))
v = (z + z) where z = 1
- > 1
c4 = Case (black Undefined) [Normal (redpat "x") (Const 1) []]
c5 = Case (red Undefined) [Normal (redpat "x") (Const 1) []]
- > 1
c6 :
data RBG a = Red a | Black a | Green a
foo = let = Red ( Green 1 )
in
case of
( Red x ) - > ( case x of ( Black z ) - > 99 )
( Red ( Green y ) ) - > 87
data RBG a = Red a | Black a | Green a
foo = let val = Red (Green 1)
in
case val of
(Red x) -> (case x of (Black z) -> 99)
(Red (Green y)) -> 87
-}
c6body = (Case (Var "val")
[Normal (redpat "x")
(Case (Var "x") [Normal (blackpat "z") (Const 99) []])
[],
Normal (Pcondata "red" [greenpat "y"]) (Const 87) []])
c6 = Let [(Pvar "val", redexp (greenexp (Const 1)))] c6body
data Match = Guarded P [ ( E , E ) ] [ D ]
| Normal P E [ D ]
c7body = Guarded ( )
[ ( ( Const 1 ) ( Const 1 ) , ( Const 99 ) ) ]
{ - where
data Match = Guarded P [(E,E)] [D]
| Normal P E [D]
c7body = Guarded (Pconst 1)
[(Bin IntEq (Const 1) (Const 1), (Const 99))]
c7 = Case (Const 1) [c7body]
-}
c7 = let c7body = Guarded (Pvar "x")
[(Bin IntEq (Var "x") (Var "z"), (Const 99))]
in Case (Const 1) [c7body]
c8 = let c8body = Guarded (Pvar "x")
[(Bin IntEq (Var "x") (Var "z"), (Const 99))]
in Case (Const 1) [c8body]
c9 =
let
guardedbody = Guarded (Pvar "x")
[(Bin IntEq (Var "x") (Var "z"), (Const 99))]
[(Pvar "z",(Const 1))]
normalbody = Normal (Pvar "y") (Const 101) []
in
Case (Const 1) [guardedbody,normalbody]
c10 =
let
guardedbody = Guarded (Pvar "x")
[(Bin IntEq (Var "x") (Var "z"), (Const 99))]
[(Pvar "z",(Const 2))]
normalbody = Normal (Pvar "y") (Const 101) []
in
Case (Const 1) [guardedbody,normalbody]
projy = App (Abs (Ptuple [Pvar "x", Pvar "y", Pvar "z"])
$ Var "y")
(TupleExp [Boom, Const 2, Boom])
|
4227f95cd240ab16101bb47dddf6a415d44d888aac791c798e82831b1c751126 | haroldcarr/learn-haskell-coq-ml-etc | Run.hs | # LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TemplateHaskell #
module Run
( run
)
where
import qualified Control.Monad.Component as CMC
import qualified Data.Aeson as JSON
import qualified Data.Aeson.Types as JSON
import qualified Data.FileEmbed as DFE
import RIO
import qualified RIO.Text as T
import qualified System.Etc as Etc
import qualified Text.Show.Pretty as SP
------------------------------------------------------------------------------
import Import
specBytes :: ByteString
specBytes = $(DFE.embedFile "./config/spec.yaml")
run :: IO ()
run = CMC.runComponentM "component-program" buildApplication $ \app ->
runRIO app $
logInfo "We're inside the application!"
buildApplication :: CMC.ComponentM App
buildApplication = do
(config, _fileWarnings) <- buildConfig
logFunc <- buildLogger config
liftIO $ runRIO logFunc $ logInfo $ "Config: " <> display (T.pack (SP.ppShow config))
return (App logFunc)
buildConfig :: CMC.ComponentM (Etc.Config, Vector SomeException)
buildConfig = CMC.buildComponent_ "buildConfig" $ do
configSpec <- parseConfigSpec
resolveConfigSpec configSpec
parseConfigSpec :: MonadThrow m => m (Etc.ConfigSpec ())
parseConfigSpec =
case T.decodeUtf8' specBytes of
Left err -> throwM err
Right result -> Etc.parseConfigSpec result
resolveConfigSpec :: Etc.ConfigSpec () -> IO (Etc.Config, Vector SomeException)
resolveConfigSpec configSpec = do
let defaultConfig = Etc.resolveDefault configSpec
(fileConfig, fileWarnings) <- Etc.resolveFiles configSpec
envConfig <- Etc.resolveEnv configSpec
cliConfig <- Etc.resolvePlainCli configSpec
return ( defaultConfig <> fileConfig <> envConfig <> cliConfig
, fileWarnings )
--------------------------------------------------------------------------------
-- Logging
parseLogHandle :: JSON.Value -> JSON.Parser Handle
parseLogHandle = JSON.withText "IOHandle" $ \handleText ->
if handleText == "stdout" then return stdout
else if handleText == "stderr" then return stderr
else JSON.typeMismatch "IOHandle" (JSON.String handleText)
buildLogOptions :: Etc.Config -> IO LogOptions
buildLogOptions config = do
handle0 <- Etc.getConfigValueWith parseLogHandle ["logging", "handle"] config
logOptionsHandle handle0 True
buildLogger :: Etc.Config -> CMC.ComponentM LogFunc
buildLogger config = do
logOptions <- liftIO $ buildLogOptions config
(logFunc, _) <- CMC.buildComponent "logger" (newLogFunc logOptions) snd
return logFunc
| null | https://raw.githubusercontent.com/haroldcarr/learn-haskell-coq-ml-etc/b4e83ec7c7af730de688b7376497b9f49dc24a0e/haskell/course/2018-06-roman-gonzales-rock-solid-haskell-services-lambdaconf/hc/src/Run.hs | haskell | # LANGUAGE OverloadedStrings #
----------------------------------------------------------------------------
------------------------------------------------------------------------------
Logging | # LANGUAGE NoImplicitPrelude #
# LANGUAGE TemplateHaskell #
module Run
( run
)
where
import qualified Control.Monad.Component as CMC
import qualified Data.Aeson as JSON
import qualified Data.Aeson.Types as JSON
import qualified Data.FileEmbed as DFE
import RIO
import qualified RIO.Text as T
import qualified System.Etc as Etc
import qualified Text.Show.Pretty as SP
import Import
specBytes :: ByteString
specBytes = $(DFE.embedFile "./config/spec.yaml")
run :: IO ()
run = CMC.runComponentM "component-program" buildApplication $ \app ->
runRIO app $
logInfo "We're inside the application!"
buildApplication :: CMC.ComponentM App
buildApplication = do
(config, _fileWarnings) <- buildConfig
logFunc <- buildLogger config
liftIO $ runRIO logFunc $ logInfo $ "Config: " <> display (T.pack (SP.ppShow config))
return (App logFunc)
buildConfig :: CMC.ComponentM (Etc.Config, Vector SomeException)
buildConfig = CMC.buildComponent_ "buildConfig" $ do
configSpec <- parseConfigSpec
resolveConfigSpec configSpec
parseConfigSpec :: MonadThrow m => m (Etc.ConfigSpec ())
parseConfigSpec =
case T.decodeUtf8' specBytes of
Left err -> throwM err
Right result -> Etc.parseConfigSpec result
resolveConfigSpec :: Etc.ConfigSpec () -> IO (Etc.Config, Vector SomeException)
resolveConfigSpec configSpec = do
let defaultConfig = Etc.resolveDefault configSpec
(fileConfig, fileWarnings) <- Etc.resolveFiles configSpec
envConfig <- Etc.resolveEnv configSpec
cliConfig <- Etc.resolvePlainCli configSpec
return ( defaultConfig <> fileConfig <> envConfig <> cliConfig
, fileWarnings )
parseLogHandle :: JSON.Value -> JSON.Parser Handle
parseLogHandle = JSON.withText "IOHandle" $ \handleText ->
if handleText == "stdout" then return stdout
else if handleText == "stderr" then return stderr
else JSON.typeMismatch "IOHandle" (JSON.String handleText)
buildLogOptions :: Etc.Config -> IO LogOptions
buildLogOptions config = do
handle0 <- Etc.getConfigValueWith parseLogHandle ["logging", "handle"] config
logOptionsHandle handle0 True
buildLogger :: Etc.Config -> CMC.ComponentM LogFunc
buildLogger config = do
logOptions <- liftIO $ buildLogOptions config
(logFunc, _) <- CMC.buildComponent "logger" (newLogFunc logOptions) snd
return logFunc
|
26b151e7ecae1eefbb72fcd479c52520436115a6902d9e67b3081f32487be9af | bvaugon/ocapic | bytes.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
* Byte sequence operations .
A byte sequence is a mutable data structure that contains a
fixed - length sequence of bytes . Each byte can be indexed in
constant time for reading or writing .
Given a byte sequence [ s ] of length [ l ] , we can access each of the
[ l ] bytes of [ s ] via its index in the sequence . Indexes start at
[ 0 ] , and we will call an index valid in [ s ] if it falls within the
range [ [ 0 ... l-1 ] ] ( inclusive ) . A position is the point between two
bytes or at the beginning or end of the sequence . We call a
position valid in [ s ] if it falls within the range [ [ 0 ... l ] ]
( inclusive ) . Note that the byte at index [ n ] is between positions
[ n ] and [ n+1 ] .
Two parameters [ start ] and [ len ] are said to designate a valid
range of [ s ] if [ len > = 0 ] and [ start ] and [ start+len ] are valid
positions in [ s ] .
Byte sequences can be modified in place , for instance via the [ set ]
and [ blit ] functions described below . See also strings ( module
{ ! String } ) , which are almost the same data structure , but can not be
modified in place .
Bytes are represented by the OCaml type [ char ] .
@since 4.02.0
A byte sequence is a mutable data structure that contains a
fixed-length sequence of bytes. Each byte can be indexed in
constant time for reading or writing.
Given a byte sequence [s] of length [l], we can access each of the
[l] bytes of [s] via its index in the sequence. Indexes start at
[0], and we will call an index valid in [s] if it falls within the
range [[0...l-1]] (inclusive). A position is the point between two
bytes or at the beginning or end of the sequence. We call a
position valid in [s] if it falls within the range [[0...l]]
(inclusive). Note that the byte at index [n] is between positions
[n] and [n+1].
Two parameters [start] and [len] are said to designate a valid
range of [s] if [len >= 0] and [start] and [start+len] are valid
positions in [s].
Byte sequences can be modified in place, for instance via the [set]
and [blit] functions described below. See also strings (module
{!String}), which are almost the same data structure, but cannot be
modified in place.
Bytes are represented by the OCaml type [char].
@since 4.02.0
*)
external length : bytes -> int = "%string_length"
(** Return the length (number of bytes) of the argument. *)
external get : bytes -> int -> char = "%string_safe_get"
(** [get s n] returns the byte at index [n] in argument [s].
Raise [Invalid_argument] if [n] not a valid index in [s]. *)
external set : bytes -> int -> char -> unit = "%string_safe_set"
(** [set s n c] modifies [s] in place, replacing the byte at index [n]
with [c].
Raise [Invalid_argument] if [n] is not a valid index in [s]. *)
external create : int -> bytes = "caml_create_string"
* [ create n ] returns a new byte sequence of length [ n ] . The
sequence is uninitialized and contains arbitrary bytes .
Raise [ Invalid_argument ] if [ n < 0 ] or [ n > ] { ! } .
sequence is uninitialized and contains arbitrary bytes.
Raise [Invalid_argument] if [n < 0] or [n > ]{!Sys.max_string_length}. *)
val make : int -> char -> bytes
* [ make n c ] returns a new byte sequence of length [ n ] , filled with
the byte [ c ] .
Raise [ Invalid_argument ] if [ n < 0 ] or [ n > ] { ! } .
the byte [c].
Raise [Invalid_argument] if [n < 0] or [n > ]{!Sys.max_string_length}. *)
val init : int -> (int -> char) -> bytes
* [ Bytes.init n f ] returns a fresh byte sequence of length [ n ] , with
character [ i ] initialized to the result of [ f i ] ( in increasing
index order ) .
Raise [ Invalid_argument ] if [ n < 0 ] or [ n > ] { ! } .
character [i] initialized to the result of [f i] (in increasing
index order).
Raise [Invalid_argument] if [n < 0] or [n > ]{!Sys.max_string_length}. *)
val empty : bytes
* A byte sequence of size 0 .
val copy : bytes -> bytes
(** Return a new byte sequence that contains the same bytes as the
argument. *)
val of_string : string -> bytes
(** Return a new byte sequence that contains the same bytes as the
given string. *)
val to_string : bytes -> string
(** Return a new string that contains the same bytes as the given byte
sequence. *)
val sub : bytes -> int -> int -> bytes
(** [sub s start len] returns a new byte sequence of length [len],
containing the subsequence of [s] that starts at position [start]
and has length [len].
Raise [Invalid_argument] if [start] and [len] do not designate a
valid range of [s]. *)
val sub_string : bytes -> int -> int -> string
(** Same as [sub] but return a string instead of a byte sequence. *)
val extend : bytes -> int -> int -> bytes
* [ extend s left right ] returns a new byte sequence that contains
the bytes of [ s ] , with [ left ] uninitialized bytes prepended and
[ right ] uninitialized bytes appended to it . If [ left ] or [ right ]
is negative , then bytes are removed ( instead of appended ) from
the corresponding side of [ s ] .
Raise [ Invalid_argument ] if the result length is negative or
longer than { ! } bytes .
the bytes of [s], with [left] uninitialized bytes prepended and
[right] uninitialized bytes appended to it. If [left] or [right]
is negative, then bytes are removed (instead of appended) from
the corresponding side of [s].
Raise [Invalid_argument] if the result length is negative or
longer than {!Sys.max_string_length} bytes. *)
val fill : bytes -> int -> int -> char -> unit
(** [fill s start len c] modifies [s] in place, replacing [len]
characters with [c], starting at [start].
Raise [Invalid_argument] if [start] and [len] do not designate a
valid range of [s]. *)
val blit : bytes -> int -> bytes -> int -> int -> unit
(** [blit src srcoff dst dstoff len] copies [len] bytes from sequence
[src], starting at index [srcoff], to sequence [dst], starting at
index [dstoff]. It works correctly even if [src] and [dst] are the
same byte sequence, and the source and destination intervals
overlap.
Raise [Invalid_argument] if [srcoff] and [len] do not
designate a valid range of [src], or if [dstoff] and [len]
do not designate a valid range of [dst]. *)
val blit_string : string -> int -> bytes -> int -> int -> unit
(** [blit src srcoff dst dstoff len] copies [len] bytes from string
[src], starting at index [srcoff], to byte sequence [dst],
starting at index [dstoff].
Raise [Invalid_argument] if [srcoff] and [len] do not
designate a valid range of [src], or if [dstoff] and [len]
do not designate a valid range of [dst]. *)
val concat : bytes -> bytes list -> bytes
* [ concat sep sl ] concatenates the list of byte sequences [ sl ] ,
inserting the separator byte sequence [ sep ] between each , and
returns the result as a new byte sequence .
Raise [ Invalid_argument ] if the result is longer than
{ ! } bytes .
inserting the separator byte sequence [sep] between each, and
returns the result as a new byte sequence.
Raise [Invalid_argument] if the result is longer than
{!Sys.max_string_length} bytes. *)
val cat : bytes -> bytes -> bytes
* [ cat s1 s2 ] concatenates [ s1 ] and [ s2 ] and returns the result
as new byte sequence .
Raise [ Invalid_argument ] if the result is longer than
{ ! } bytes .
as new byte sequence.
Raise [Invalid_argument] if the result is longer than
{!Sys.max_string_length} bytes. *)
val iter : (char -> unit) -> bytes -> unit
* [ iter f s ] applies function [ f ] in turn to all the bytes of [ s ] .
It is equivalent to [ f ( get s 0 ) ; f ( get s 1 ) ; ... ; f ( get s
( length s - 1 ) ) ; ( ) ] .
It is equivalent to [f (get s 0); f (get s 1); ...; f (get s
(length s - 1)); ()]. *)
val iteri : (int -> char -> unit) -> bytes -> unit
* Same as { ! Bytes.iter } , but the function is applied to the index of
the byte as first argument and the byte itself as second
argument .
the byte as first argument and the byte itself as second
argument. *)
val map : (char -> char) -> bytes -> bytes
(** [map f s] applies function [f] in turn to all the bytes of [s]
(in increasing index order) and stores the resulting bytes in
a new sequence that is returned as the result. *)
val mapi : (int -> char -> char) -> bytes -> bytes
(** [mapi f s] calls [f] with each character of [s] and its
index (in increasing index order) and stores the resulting bytes
in a new sequence that is returned as the result. *)
val trim : bytes -> bytes
* Return a copy of the argument , without leading and trailing
whitespace . The bytes regarded as whitespace are the ASCII
characters [ ' ' ] , [ ' \012 ' ] , [ ' \n ' ] , [ ' \r ' ] , and [ ' \t ' ] .
whitespace. The bytes regarded as whitespace are the ASCII
characters [' '], ['\012'], ['\n'], ['\r'], and ['\t']. *)
val escaped : bytes -> bytes
* Return a copy of the argument , with special characters represented
by escape sequences , following the lexical conventions of OCaml .
All characters outside the ASCII printable range ( 32 .. 126 ) are
escaped , as well as backslash and double - quote .
Raise [ Invalid_argument ] if the result is longer than
{ ! } bytes .
by escape sequences, following the lexical conventions of OCaml.
All characters outside the ASCII printable range (32..126) are
escaped, as well as backslash and double-quote.
Raise [Invalid_argument] if the result is longer than
{!Sys.max_string_length} bytes. *)
val index : bytes -> char -> int
* [ index s c ] returns the index of the first occurrence of byte [ c ]
in [ s ] .
Raise [ Not_found ] if [ c ] does not occur in [ s ] .
in [s].
Raise [Not_found] if [c] does not occur in [s]. *)
val rindex : bytes -> char -> int
(** [rindex s c] returns the index of the last occurrence of byte [c]
in [s].
Raise [Not_found] if [c] does not occur in [s]. *)
val index_from : bytes -> int -> char -> int
* [ index_from s i c ] returns the index of the first occurrence of
byte [ c ] in [ s ] after position [ i ] . [ Bytes.index s c ] is
equivalent to [ Bytes.index_from s 0 c ] .
Raise [ Invalid_argument ] if [ i ] is not a valid position in [ s ] .
Raise [ Not_found ] if [ c ] does not occur in [ s ] after position [ i ] .
byte [c] in [s] after position [i]. [Bytes.index s c] is
equivalent to [Bytes.index_from s 0 c].
Raise [Invalid_argument] if [i] is not a valid position in [s].
Raise [Not_found] if [c] does not occur in [s] after position [i]. *)
val rindex_from : bytes -> int -> char -> int
* [ rindex_from s i c ] returns the index of the last occurrence of
byte [ c ] in [ s ] before position [ i+1 ] . [ rindex s c ] is equivalent
to [ rindex_from s ( Bytes.length s - 1 ) c ] .
Raise [ Invalid_argument ] if [ i+1 ] is not a valid position in [ s ] .
Raise [ Not_found ] if [ c ] does not occur in [ s ] before position [ i+1 ] .
byte [c] in [s] before position [i+1]. [rindex s c] is equivalent
to [rindex_from s (Bytes.length s - 1) c].
Raise [Invalid_argument] if [i+1] is not a valid position in [s].
Raise [Not_found] if [c] does not occur in [s] before position [i+1]. *)
val contains : bytes -> char -> bool
(** [contains s c] tests if byte [c] appears in [s]. *)
val contains_from : bytes -> int -> char -> bool
(** [contains_from s start c] tests if byte [c] appears in [s] after
position [start]. [contains s c] is equivalent to [contains_from
s 0 c].
Raise [Invalid_argument] if [start] is not a valid position in [s]. *)
val rcontains_from : bytes -> int -> char -> bool
* [ rcontains_from s stop c ] tests if byte [ c ] appears in [ s ] before
position [ stop+1 ] .
Raise [ Invalid_argument ] if [ stop < 0 ] or [ stop+1 ] is not a valid
position in [ s ] .
position [stop+1].
Raise [Invalid_argument] if [stop < 0] or [stop+1] is not a valid
position in [s]. *)
val uppercase : bytes -> bytes
[@@ocaml.deprecated "Use Bytes.uppercase_ascii instead."]
(** Return a copy of the argument, with all lowercase letters
translated to uppercase, including accented letters of the ISO
Latin-1 (8859-1) character set.
@deprecated Functions operating on Latin-1 character set are deprecated. *)
val lowercase : bytes -> bytes
[@@ocaml.deprecated "Use Bytes.lowercase_ascii instead."]
(** Return a copy of the argument, with all uppercase letters
translated to lowercase, including accented letters of the ISO
Latin-1 (8859-1) character set.
@deprecated Functions operating on Latin-1 character set are deprecated. *)
val capitalize : bytes -> bytes
[@@ocaml.deprecated "Use Bytes.capitalize_ascii instead."]
* Return a copy of the argument , with the first character set to uppercase ,
using the ISO Latin-1 ( 8859 - 1 ) character set ..
@deprecated Functions operating on Latin-1 character set are deprecated .
using the ISO Latin-1 (8859-1) character set..
@deprecated Functions operating on Latin-1 character set are deprecated. *)
val uncapitalize : bytes -> bytes
[@@ocaml.deprecated "Use Bytes.uncapitalize_ascii instead."]
* Return a copy of the argument , with the first character set to lowercase ,
using the ISO Latin-1 ( 8859 - 1 ) character set ..
@deprecated Functions operating on Latin-1 character set are deprecated .
using the ISO Latin-1 (8859-1) character set..
@deprecated Functions operating on Latin-1 character set are deprecated. *)
val uppercase_ascii : bytes -> bytes
* Return a copy of the argument , with all lowercase letters
translated to uppercase , using the US - ASCII character set .
@since 4.03.0
translated to uppercase, using the US-ASCII character set.
@since 4.03.0 *)
val lowercase_ascii : bytes -> bytes
* Return a copy of the argument , with all uppercase letters
translated to lowercase , using the US - ASCII character set .
@since 4.03.0
translated to lowercase, using the US-ASCII character set.
@since 4.03.0 *)
val capitalize_ascii : bytes -> bytes
* Return a copy of the argument , with the first character set to uppercase ,
using the US - ASCII character set .
@since 4.03.0
using the US-ASCII character set.
@since 4.03.0 *)
val uncapitalize_ascii : bytes -> bytes
* Return a copy of the argument , with the first character set to lowercase ,
using the US - ASCII character set .
@since 4.03.0
using the US-ASCII character set.
@since 4.03.0 *)
type t = bytes
(** An alias for the type of byte sequences. *)
val compare: t -> t -> int
(** The comparison function for byte sequences, with the same
specification as {!Pervasives.compare}. Along with the type [t],
this function [compare] allows the module [Bytes] to be passed as
argument to the functors {!Set.Make} and {!Map.Make}. *)
val equal: t -> t -> bool
* The equality function for byte sequences .
@since 4.03.0
@since 4.03.0 *)
* { 4 Unsafe conversions ( for advanced users ) }
This section describes unsafe , low - level conversion functions
between [ bytes ] and [ string ] . They do not copy the internal data ;
used improperly , they can break the immutability invariant on
strings provided by the [ -safe - string ] option . They are available for
expert library authors , but for most purposes you should use the
always - correct { ! Bytes.to_string } and { ! Bytes.of_string } instead .
This section describes unsafe, low-level conversion functions
between [bytes] and [string]. They do not copy the internal data;
used improperly, they can break the immutability invariant on
strings provided by the [-safe-string] option. They are available for
expert library authors, but for most purposes you should use the
always-correct {!Bytes.to_string} and {!Bytes.of_string} instead.
*)
val unsafe_to_string : bytes -> string
* Unsafely convert a byte sequence into a string .
To reason about the use of [ unsafe_to_string ] , it is convenient to
consider an " ownership " discipline . A piece of code that
manipulates some data " owns " it ; there are several disjoint ownership
modes , including :
- Unique ownership : the data may be accessed and mutated
- Shared ownership : the data has several owners , that may only
access it , not mutate it .
Unique ownership is linear : passing the data to another piece of
code means giving up ownership ( we can not write the
data again ) . A unique owner may decide to make the data shared
( giving up mutation rights on it ) , but shared data may not become
uniquely - owned again .
[ unsafe_to_string s ] can only be used when the caller owns the byte
sequence [ s ] -- either uniquely or as shared immutable data . The
caller gives up ownership of [ s ] , and gains ownership of the
returned string .
There are two valid use - cases that respect this ownership
discipline :
1 . Creating a string by initializing and mutating a byte sequence
that is never changed after initialization is performed .
{ [
let : string =
let s = Bytes.create len in
for i = 0 to len - 1 do Bytes.set s i ( f i ) done ;
Bytes.unsafe_to_string s
] }
This function is safe because the byte sequence [ s ] will never be
accessed or mutated after [ unsafe_to_string ] is called . The
[ string_init ] code gives up ownership of [ s ] , and returns the
ownership of the resulting string to its caller .
Note that it would be unsafe if [ s ] was passed as an additional
parameter to the function [ f ] as it could escape this way and be
mutated in the future -- [ string_init ] would give up ownership of
[ s ] to pass it to [ f ] , and could not call [ unsafe_to_string ]
safely .
We have provided the { ! String.init } , { ! String.map } and
{ ! String.mapi } functions to cover most cases of building
new strings . You should prefer those over [ to_string ] or
[ unsafe_to_string ] whenever applicable .
2 . Temporarily giving ownership of a byte sequence to a function
that expects a uniquely owned string and returns ownership back , so
that we can mutate the sequence again after the call ended .
{ [
let bytes_length ( s : bytes ) =
String.length ( Bytes.unsafe_to_string s )
] }
In this use - case , we do not promise that [ s ] will never be mutated
after the call to [ bytes_length s ] . The { ! } function
temporarily borrows unique ownership of the byte sequence
( and sees it as a [ string ] ) , but returns this ownership back to
the caller , which may assume that [ s ] is still a valid byte
sequence after the call . Note that this is only correct because we
know that { ! } does not capture its argument -- it could
escape by a side - channel such as a memoization combinator .
The caller may not mutate [ s ] while the string is borrowed ( it has
temporarily given up ownership ) . This affects concurrent programs ,
but also higher - order functions : if [ String.length ] returned
a closure to be called later , [ s ] should not be mutated until this
closure is fully applied and returns ownership .
To reason about the use of [unsafe_to_string], it is convenient to
consider an "ownership" discipline. A piece of code that
manipulates some data "owns" it; there are several disjoint ownership
modes, including:
- Unique ownership: the data may be accessed and mutated
- Shared ownership: the data has several owners, that may only
access it, not mutate it.
Unique ownership is linear: passing the data to another piece of
code means giving up ownership (we cannot write the
data again). A unique owner may decide to make the data shared
(giving up mutation rights on it), but shared data may not become
uniquely-owned again.
[unsafe_to_string s] can only be used when the caller owns the byte
sequence [s] -- either uniquely or as shared immutable data. The
caller gives up ownership of [s], and gains ownership of the
returned string.
There are two valid use-cases that respect this ownership
discipline:
1. Creating a string by initializing and mutating a byte sequence
that is never changed after initialization is performed.
{[
let string_init len f : string =
let s = Bytes.create len in
for i = 0 to len - 1 do Bytes.set s i (f i) done;
Bytes.unsafe_to_string s
]}
This function is safe because the byte sequence [s] will never be
accessed or mutated after [unsafe_to_string] is called. The
[string_init] code gives up ownership of [s], and returns the
ownership of the resulting string to its caller.
Note that it would be unsafe if [s] was passed as an additional
parameter to the function [f] as it could escape this way and be
mutated in the future -- [string_init] would give up ownership of
[s] to pass it to [f], and could not call [unsafe_to_string]
safely.
We have provided the {!String.init}, {!String.map} and
{!String.mapi} functions to cover most cases of building
new strings. You should prefer those over [to_string] or
[unsafe_to_string] whenever applicable.
2. Temporarily giving ownership of a byte sequence to a function
that expects a uniquely owned string and returns ownership back, so
that we can mutate the sequence again after the call ended.
{[
let bytes_length (s : bytes) =
String.length (Bytes.unsafe_to_string s)
]}
In this use-case, we do not promise that [s] will never be mutated
after the call to [bytes_length s]. The {!String.length} function
temporarily borrows unique ownership of the byte sequence
(and sees it as a [string]), but returns this ownership back to
the caller, which may assume that [s] is still a valid byte
sequence after the call. Note that this is only correct because we
know that {!String.length} does not capture its argument -- it could
escape by a side-channel such as a memoization combinator.
The caller may not mutate [s] while the string is borrowed (it has
temporarily given up ownership). This affects concurrent programs,
but also higher-order functions: if [String.length] returned
a closure to be called later, [s] should not be mutated until this
closure is fully applied and returns ownership.
*)
val unsafe_of_string : string -> bytes
* Unsafely convert a shared string to a byte sequence that should
not be mutated .
The same ownership discipline that makes [ unsafe_to_string ]
correct applies to [ unsafe_of_string ] : you may use it if you were
the owner of the [ string ] value , and you will own the return
[ bytes ] in the same mode .
In practice , unique ownership of string values is extremely
difficult to reason about correctly . You should always assume
strings are shared , never uniquely owned .
For example , string literals are implicitly shared by the
compiler , so you never uniquely own them .
{ [
let incorrect = Bytes.unsafe_of_string " hello "
let s = Bytes.of_string " hello "
] }
The first declaration is incorrect , because the string literal
[ " hello " ] could be shared by the compiler with other parts of the
program , and mutating [ incorrect ] is a bug . You must always use
the second version , which performs a copy and is thus correct .
Assuming unique ownership of strings that are not string
literals , but are ( partly ) built from string literals , is also
incorrect . For example , mutating [ unsafe_of_string ( " foo " ^ s ) ]
could mutate the shared string [ " foo " ] -- assuming a rope - like
representation of strings . More generally , functions operating on
strings will assume shared ownership , they do not preserve unique
ownership . It is thus incorrect to assume unique ownership of the
result of [ unsafe_of_string ] .
The only case we have reasonable confidence is safe is if the
produced [ bytes ] is shared -- used as an immutable byte
sequence . This is possibly useful for incremental migration of
low - level programs that manipulate immutable sequences of bytes
( for example { ! Marshal.from_bytes } ) and previously used the
[ string ] type for this purpose .
not be mutated.
The same ownership discipline that makes [unsafe_to_string]
correct applies to [unsafe_of_string]: you may use it if you were
the owner of the [string] value, and you will own the return
[bytes] in the same mode.
In practice, unique ownership of string values is extremely
difficult to reason about correctly. You should always assume
strings are shared, never uniquely owned.
For example, string literals are implicitly shared by the
compiler, so you never uniquely own them.
{[
let incorrect = Bytes.unsafe_of_string "hello"
let s = Bytes.of_string "hello"
]}
The first declaration is incorrect, because the string literal
["hello"] could be shared by the compiler with other parts of the
program, and mutating [incorrect] is a bug. You must always use
the second version, which performs a copy and is thus correct.
Assuming unique ownership of strings that are not string
literals, but are (partly) built from string literals, is also
incorrect. For example, mutating [unsafe_of_string ("foo" ^ s)]
could mutate the shared string ["foo"] -- assuming a rope-like
representation of strings. More generally, functions operating on
strings will assume shared ownership, they do not preserve unique
ownership. It is thus incorrect to assume unique ownership of the
result of [unsafe_of_string].
The only case we have reasonable confidence is safe is if the
produced [bytes] is shared -- used as an immutable byte
sequence. This is possibly useful for incremental migration of
low-level programs that manipulate immutable sequences of bytes
(for example {!Marshal.from_bytes}) and previously used the
[string] type for this purpose.
*)
(**/**)
(* The following is for system use only. Do not call directly. *)
external unsafe_get : bytes -> int -> char = "%string_unsafe_get"
external unsafe_set : bytes -> int -> char -> unit = "%string_unsafe_set"
external unsafe_blit :
bytes -> int -> bytes -> int -> int -> unit
= "caml_blit_string" [@@noalloc]
external unsafe_fill :
bytes -> int -> int -> char -> unit = "caml_fill_string" [@@noalloc]
| null | https://raw.githubusercontent.com/bvaugon/ocapic/a14cd9ec3f5022aeb5fe2264d595d7e8f1ddf58a/lib/bytes.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
* Return the length (number of bytes) of the argument.
* [get s n] returns the byte at index [n] in argument [s].
Raise [Invalid_argument] if [n] not a valid index in [s].
* [set s n c] modifies [s] in place, replacing the byte at index [n]
with [c].
Raise [Invalid_argument] if [n] is not a valid index in [s].
* Return a new byte sequence that contains the same bytes as the
argument.
* Return a new byte sequence that contains the same bytes as the
given string.
* Return a new string that contains the same bytes as the given byte
sequence.
* [sub s start len] returns a new byte sequence of length [len],
containing the subsequence of [s] that starts at position [start]
and has length [len].
Raise [Invalid_argument] if [start] and [len] do not designate a
valid range of [s].
* Same as [sub] but return a string instead of a byte sequence.
* [fill s start len c] modifies [s] in place, replacing [len]
characters with [c], starting at [start].
Raise [Invalid_argument] if [start] and [len] do not designate a
valid range of [s].
* [blit src srcoff dst dstoff len] copies [len] bytes from sequence
[src], starting at index [srcoff], to sequence [dst], starting at
index [dstoff]. It works correctly even if [src] and [dst] are the
same byte sequence, and the source and destination intervals
overlap.
Raise [Invalid_argument] if [srcoff] and [len] do not
designate a valid range of [src], or if [dstoff] and [len]
do not designate a valid range of [dst].
* [blit src srcoff dst dstoff len] copies [len] bytes from string
[src], starting at index [srcoff], to byte sequence [dst],
starting at index [dstoff].
Raise [Invalid_argument] if [srcoff] and [len] do not
designate a valid range of [src], or if [dstoff] and [len]
do not designate a valid range of [dst].
* [map f s] applies function [f] in turn to all the bytes of [s]
(in increasing index order) and stores the resulting bytes in
a new sequence that is returned as the result.
* [mapi f s] calls [f] with each character of [s] and its
index (in increasing index order) and stores the resulting bytes
in a new sequence that is returned as the result.
* [rindex s c] returns the index of the last occurrence of byte [c]
in [s].
Raise [Not_found] if [c] does not occur in [s].
* [contains s c] tests if byte [c] appears in [s].
* [contains_from s start c] tests if byte [c] appears in [s] after
position [start]. [contains s c] is equivalent to [contains_from
s 0 c].
Raise [Invalid_argument] if [start] is not a valid position in [s].
* Return a copy of the argument, with all lowercase letters
translated to uppercase, including accented letters of the ISO
Latin-1 (8859-1) character set.
@deprecated Functions operating on Latin-1 character set are deprecated.
* Return a copy of the argument, with all uppercase letters
translated to lowercase, including accented letters of the ISO
Latin-1 (8859-1) character set.
@deprecated Functions operating on Latin-1 character set are deprecated.
* An alias for the type of byte sequences.
* The comparison function for byte sequences, with the same
specification as {!Pervasives.compare}. Along with the type [t],
this function [compare] allows the module [Bytes] to be passed as
argument to the functors {!Set.Make} and {!Map.Make}.
*/*
The following is for system use only. Do not call directly. | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
* Byte sequence operations .
A byte sequence is a mutable data structure that contains a
fixed - length sequence of bytes . Each byte can be indexed in
constant time for reading or writing .
Given a byte sequence [ s ] of length [ l ] , we can access each of the
[ l ] bytes of [ s ] via its index in the sequence . Indexes start at
[ 0 ] , and we will call an index valid in [ s ] if it falls within the
range [ [ 0 ... l-1 ] ] ( inclusive ) . A position is the point between two
bytes or at the beginning or end of the sequence . We call a
position valid in [ s ] if it falls within the range [ [ 0 ... l ] ]
( inclusive ) . Note that the byte at index [ n ] is between positions
[ n ] and [ n+1 ] .
Two parameters [ start ] and [ len ] are said to designate a valid
range of [ s ] if [ len > = 0 ] and [ start ] and [ start+len ] are valid
positions in [ s ] .
Byte sequences can be modified in place , for instance via the [ set ]
and [ blit ] functions described below . See also strings ( module
{ ! String } ) , which are almost the same data structure , but can not be
modified in place .
Bytes are represented by the OCaml type [ char ] .
@since 4.02.0
A byte sequence is a mutable data structure that contains a
fixed-length sequence of bytes. Each byte can be indexed in
constant time for reading or writing.
Given a byte sequence [s] of length [l], we can access each of the
[l] bytes of [s] via its index in the sequence. Indexes start at
[0], and we will call an index valid in [s] if it falls within the
range [[0...l-1]] (inclusive). A position is the point between two
bytes or at the beginning or end of the sequence. We call a
position valid in [s] if it falls within the range [[0...l]]
(inclusive). Note that the byte at index [n] is between positions
[n] and [n+1].
Two parameters [start] and [len] are said to designate a valid
range of [s] if [len >= 0] and [start] and [start+len] are valid
positions in [s].
Byte sequences can be modified in place, for instance via the [set]
and [blit] functions described below. See also strings (module
{!String}), which are almost the same data structure, but cannot be
modified in place.
Bytes are represented by the OCaml type [char].
@since 4.02.0
*)
external length : bytes -> int = "%string_length"
external get : bytes -> int -> char = "%string_safe_get"
external set : bytes -> int -> char -> unit = "%string_safe_set"
external create : int -> bytes = "caml_create_string"
* [ create n ] returns a new byte sequence of length [ n ] . The
sequence is uninitialized and contains arbitrary bytes .
Raise [ Invalid_argument ] if [ n < 0 ] or [ n > ] { ! } .
sequence is uninitialized and contains arbitrary bytes.
Raise [Invalid_argument] if [n < 0] or [n > ]{!Sys.max_string_length}. *)
val make : int -> char -> bytes
* [ make n c ] returns a new byte sequence of length [ n ] , filled with
the byte [ c ] .
Raise [ Invalid_argument ] if [ n < 0 ] or [ n > ] { ! } .
the byte [c].
Raise [Invalid_argument] if [n < 0] or [n > ]{!Sys.max_string_length}. *)
val init : int -> (int -> char) -> bytes
* [ Bytes.init n f ] returns a fresh byte sequence of length [ n ] , with
character [ i ] initialized to the result of [ f i ] ( in increasing
index order ) .
Raise [ Invalid_argument ] if [ n < 0 ] or [ n > ] { ! } .
character [i] initialized to the result of [f i] (in increasing
index order).
Raise [Invalid_argument] if [n < 0] or [n > ]{!Sys.max_string_length}. *)
val empty : bytes
* A byte sequence of size 0 .
val copy : bytes -> bytes
val of_string : string -> bytes
val to_string : bytes -> string
val sub : bytes -> int -> int -> bytes
val sub_string : bytes -> int -> int -> string
val extend : bytes -> int -> int -> bytes
* [ extend s left right ] returns a new byte sequence that contains
the bytes of [ s ] , with [ left ] uninitialized bytes prepended and
[ right ] uninitialized bytes appended to it . If [ left ] or [ right ]
is negative , then bytes are removed ( instead of appended ) from
the corresponding side of [ s ] .
Raise [ Invalid_argument ] if the result length is negative or
longer than { ! } bytes .
the bytes of [s], with [left] uninitialized bytes prepended and
[right] uninitialized bytes appended to it. If [left] or [right]
is negative, then bytes are removed (instead of appended) from
the corresponding side of [s].
Raise [Invalid_argument] if the result length is negative or
longer than {!Sys.max_string_length} bytes. *)
val fill : bytes -> int -> int -> char -> unit
val blit : bytes -> int -> bytes -> int -> int -> unit
val blit_string : string -> int -> bytes -> int -> int -> unit
val concat : bytes -> bytes list -> bytes
* [ concat sep sl ] concatenates the list of byte sequences [ sl ] ,
inserting the separator byte sequence [ sep ] between each , and
returns the result as a new byte sequence .
Raise [ Invalid_argument ] if the result is longer than
{ ! } bytes .
inserting the separator byte sequence [sep] between each, and
returns the result as a new byte sequence.
Raise [Invalid_argument] if the result is longer than
{!Sys.max_string_length} bytes. *)
val cat : bytes -> bytes -> bytes
* [ cat s1 s2 ] concatenates [ s1 ] and [ s2 ] and returns the result
as new byte sequence .
Raise [ Invalid_argument ] if the result is longer than
{ ! } bytes .
as new byte sequence.
Raise [Invalid_argument] if the result is longer than
{!Sys.max_string_length} bytes. *)
val iter : (char -> unit) -> bytes -> unit
* [ iter f s ] applies function [ f ] in turn to all the bytes of [ s ] .
It is equivalent to [ f ( get s 0 ) ; f ( get s 1 ) ; ... ; f ( get s
( length s - 1 ) ) ; ( ) ] .
It is equivalent to [f (get s 0); f (get s 1); ...; f (get s
(length s - 1)); ()]. *)
val iteri : (int -> char -> unit) -> bytes -> unit
* Same as { ! Bytes.iter } , but the function is applied to the index of
the byte as first argument and the byte itself as second
argument .
the byte as first argument and the byte itself as second
argument. *)
val map : (char -> char) -> bytes -> bytes
val mapi : (int -> char -> char) -> bytes -> bytes
val trim : bytes -> bytes
* Return a copy of the argument , without leading and trailing
whitespace . The bytes regarded as whitespace are the ASCII
characters [ ' ' ] , [ ' \012 ' ] , [ ' \n ' ] , [ ' \r ' ] , and [ ' \t ' ] .
whitespace. The bytes regarded as whitespace are the ASCII
characters [' '], ['\012'], ['\n'], ['\r'], and ['\t']. *)
val escaped : bytes -> bytes
* Return a copy of the argument , with special characters represented
by escape sequences , following the lexical conventions of OCaml .
All characters outside the ASCII printable range ( 32 .. 126 ) are
escaped , as well as backslash and double - quote .
Raise [ Invalid_argument ] if the result is longer than
{ ! } bytes .
by escape sequences, following the lexical conventions of OCaml.
All characters outside the ASCII printable range (32..126) are
escaped, as well as backslash and double-quote.
Raise [Invalid_argument] if the result is longer than
{!Sys.max_string_length} bytes. *)
val index : bytes -> char -> int
* [ index s c ] returns the index of the first occurrence of byte [ c ]
in [ s ] .
Raise [ Not_found ] if [ c ] does not occur in [ s ] .
in [s].
Raise [Not_found] if [c] does not occur in [s]. *)
val rindex : bytes -> char -> int
val index_from : bytes -> int -> char -> int
* [ index_from s i c ] returns the index of the first occurrence of
byte [ c ] in [ s ] after position [ i ] . [ Bytes.index s c ] is
equivalent to [ Bytes.index_from s 0 c ] .
Raise [ Invalid_argument ] if [ i ] is not a valid position in [ s ] .
Raise [ Not_found ] if [ c ] does not occur in [ s ] after position [ i ] .
byte [c] in [s] after position [i]. [Bytes.index s c] is
equivalent to [Bytes.index_from s 0 c].
Raise [Invalid_argument] if [i] is not a valid position in [s].
Raise [Not_found] if [c] does not occur in [s] after position [i]. *)
val rindex_from : bytes -> int -> char -> int
* [ rindex_from s i c ] returns the index of the last occurrence of
byte [ c ] in [ s ] before position [ i+1 ] . [ rindex s c ] is equivalent
to [ rindex_from s ( Bytes.length s - 1 ) c ] .
Raise [ Invalid_argument ] if [ i+1 ] is not a valid position in [ s ] .
Raise [ Not_found ] if [ c ] does not occur in [ s ] before position [ i+1 ] .
byte [c] in [s] before position [i+1]. [rindex s c] is equivalent
to [rindex_from s (Bytes.length s - 1) c].
Raise [Invalid_argument] if [i+1] is not a valid position in [s].
Raise [Not_found] if [c] does not occur in [s] before position [i+1]. *)
val contains : bytes -> char -> bool
val contains_from : bytes -> int -> char -> bool
val rcontains_from : bytes -> int -> char -> bool
* [ rcontains_from s stop c ] tests if byte [ c ] appears in [ s ] before
position [ stop+1 ] .
Raise [ Invalid_argument ] if [ stop < 0 ] or [ stop+1 ] is not a valid
position in [ s ] .
position [stop+1].
Raise [Invalid_argument] if [stop < 0] or [stop+1] is not a valid
position in [s]. *)
val uppercase : bytes -> bytes
[@@ocaml.deprecated "Use Bytes.uppercase_ascii instead."]
val lowercase : bytes -> bytes
[@@ocaml.deprecated "Use Bytes.lowercase_ascii instead."]
val capitalize : bytes -> bytes
[@@ocaml.deprecated "Use Bytes.capitalize_ascii instead."]
* Return a copy of the argument , with the first character set to uppercase ,
using the ISO Latin-1 ( 8859 - 1 ) character set ..
@deprecated Functions operating on Latin-1 character set are deprecated .
using the ISO Latin-1 (8859-1) character set..
@deprecated Functions operating on Latin-1 character set are deprecated. *)
val uncapitalize : bytes -> bytes
[@@ocaml.deprecated "Use Bytes.uncapitalize_ascii instead."]
* Return a copy of the argument , with the first character set to lowercase ,
using the ISO Latin-1 ( 8859 - 1 ) character set ..
@deprecated Functions operating on Latin-1 character set are deprecated .
using the ISO Latin-1 (8859-1) character set..
@deprecated Functions operating on Latin-1 character set are deprecated. *)
val uppercase_ascii : bytes -> bytes
* Return a copy of the argument , with all lowercase letters
translated to uppercase , using the US - ASCII character set .
@since 4.03.0
translated to uppercase, using the US-ASCII character set.
@since 4.03.0 *)
val lowercase_ascii : bytes -> bytes
* Return a copy of the argument , with all uppercase letters
translated to lowercase , using the US - ASCII character set .
@since 4.03.0
translated to lowercase, using the US-ASCII character set.
@since 4.03.0 *)
val capitalize_ascii : bytes -> bytes
* Return a copy of the argument , with the first character set to uppercase ,
using the US - ASCII character set .
@since 4.03.0
using the US-ASCII character set.
@since 4.03.0 *)
val uncapitalize_ascii : bytes -> bytes
* Return a copy of the argument , with the first character set to lowercase ,
using the US - ASCII character set .
@since 4.03.0
using the US-ASCII character set.
@since 4.03.0 *)
type t = bytes
val compare: t -> t -> int
val equal: t -> t -> bool
* The equality function for byte sequences .
@since 4.03.0
@since 4.03.0 *)
* { 4 Unsafe conversions ( for advanced users ) }
This section describes unsafe , low - level conversion functions
between [ bytes ] and [ string ] . They do not copy the internal data ;
used improperly , they can break the immutability invariant on
strings provided by the [ -safe - string ] option . They are available for
expert library authors , but for most purposes you should use the
always - correct { ! Bytes.to_string } and { ! Bytes.of_string } instead .
This section describes unsafe, low-level conversion functions
between [bytes] and [string]. They do not copy the internal data;
used improperly, they can break the immutability invariant on
strings provided by the [-safe-string] option. They are available for
expert library authors, but for most purposes you should use the
always-correct {!Bytes.to_string} and {!Bytes.of_string} instead.
*)
val unsafe_to_string : bytes -> string
* Unsafely convert a byte sequence into a string .
To reason about the use of [ unsafe_to_string ] , it is convenient to
consider an " ownership " discipline . A piece of code that
manipulates some data " owns " it ; there are several disjoint ownership
modes , including :
- Unique ownership : the data may be accessed and mutated
- Shared ownership : the data has several owners , that may only
access it , not mutate it .
Unique ownership is linear : passing the data to another piece of
code means giving up ownership ( we can not write the
data again ) . A unique owner may decide to make the data shared
( giving up mutation rights on it ) , but shared data may not become
uniquely - owned again .
[ unsafe_to_string s ] can only be used when the caller owns the byte
sequence [ s ] -- either uniquely or as shared immutable data . The
caller gives up ownership of [ s ] , and gains ownership of the
returned string .
There are two valid use - cases that respect this ownership
discipline :
1 . Creating a string by initializing and mutating a byte sequence
that is never changed after initialization is performed .
{ [
let : string =
let s = Bytes.create len in
for i = 0 to len - 1 do Bytes.set s i ( f i ) done ;
Bytes.unsafe_to_string s
] }
This function is safe because the byte sequence [ s ] will never be
accessed or mutated after [ unsafe_to_string ] is called . The
[ string_init ] code gives up ownership of [ s ] , and returns the
ownership of the resulting string to its caller .
Note that it would be unsafe if [ s ] was passed as an additional
parameter to the function [ f ] as it could escape this way and be
mutated in the future -- [ string_init ] would give up ownership of
[ s ] to pass it to [ f ] , and could not call [ unsafe_to_string ]
safely .
We have provided the { ! String.init } , { ! String.map } and
{ ! String.mapi } functions to cover most cases of building
new strings . You should prefer those over [ to_string ] or
[ unsafe_to_string ] whenever applicable .
2 . Temporarily giving ownership of a byte sequence to a function
that expects a uniquely owned string and returns ownership back , so
that we can mutate the sequence again after the call ended .
{ [
let bytes_length ( s : bytes ) =
String.length ( Bytes.unsafe_to_string s )
] }
In this use - case , we do not promise that [ s ] will never be mutated
after the call to [ bytes_length s ] . The { ! } function
temporarily borrows unique ownership of the byte sequence
( and sees it as a [ string ] ) , but returns this ownership back to
the caller , which may assume that [ s ] is still a valid byte
sequence after the call . Note that this is only correct because we
know that { ! } does not capture its argument -- it could
escape by a side - channel such as a memoization combinator .
The caller may not mutate [ s ] while the string is borrowed ( it has
temporarily given up ownership ) . This affects concurrent programs ,
but also higher - order functions : if [ String.length ] returned
a closure to be called later , [ s ] should not be mutated until this
closure is fully applied and returns ownership .
To reason about the use of [unsafe_to_string], it is convenient to
consider an "ownership" discipline. A piece of code that
manipulates some data "owns" it; there are several disjoint ownership
modes, including:
- Unique ownership: the data may be accessed and mutated
- Shared ownership: the data has several owners, that may only
access it, not mutate it.
Unique ownership is linear: passing the data to another piece of
code means giving up ownership (we cannot write the
data again). A unique owner may decide to make the data shared
(giving up mutation rights on it), but shared data may not become
uniquely-owned again.
[unsafe_to_string s] can only be used when the caller owns the byte
sequence [s] -- either uniquely or as shared immutable data. The
caller gives up ownership of [s], and gains ownership of the
returned string.
There are two valid use-cases that respect this ownership
discipline:
1. Creating a string by initializing and mutating a byte sequence
that is never changed after initialization is performed.
{[
let string_init len f : string =
let s = Bytes.create len in
for i = 0 to len - 1 do Bytes.set s i (f i) done;
Bytes.unsafe_to_string s
]}
This function is safe because the byte sequence [s] will never be
accessed or mutated after [unsafe_to_string] is called. The
[string_init] code gives up ownership of [s], and returns the
ownership of the resulting string to its caller.
Note that it would be unsafe if [s] was passed as an additional
parameter to the function [f] as it could escape this way and be
mutated in the future -- [string_init] would give up ownership of
[s] to pass it to [f], and could not call [unsafe_to_string]
safely.
We have provided the {!String.init}, {!String.map} and
{!String.mapi} functions to cover most cases of building
new strings. You should prefer those over [to_string] or
[unsafe_to_string] whenever applicable.
2. Temporarily giving ownership of a byte sequence to a function
that expects a uniquely owned string and returns ownership back, so
that we can mutate the sequence again after the call ended.
{[
let bytes_length (s : bytes) =
String.length (Bytes.unsafe_to_string s)
]}
In this use-case, we do not promise that [s] will never be mutated
after the call to [bytes_length s]. The {!String.length} function
temporarily borrows unique ownership of the byte sequence
(and sees it as a [string]), but returns this ownership back to
the caller, which may assume that [s] is still a valid byte
sequence after the call. Note that this is only correct because we
know that {!String.length} does not capture its argument -- it could
escape by a side-channel such as a memoization combinator.
The caller may not mutate [s] while the string is borrowed (it has
temporarily given up ownership). This affects concurrent programs,
but also higher-order functions: if [String.length] returned
a closure to be called later, [s] should not be mutated until this
closure is fully applied and returns ownership.
*)
val unsafe_of_string : string -> bytes
* Unsafely convert a shared string to a byte sequence that should
not be mutated .
The same ownership discipline that makes [ unsafe_to_string ]
correct applies to [ unsafe_of_string ] : you may use it if you were
the owner of the [ string ] value , and you will own the return
[ bytes ] in the same mode .
In practice , unique ownership of string values is extremely
difficult to reason about correctly . You should always assume
strings are shared , never uniquely owned .
For example , string literals are implicitly shared by the
compiler , so you never uniquely own them .
{ [
let incorrect = Bytes.unsafe_of_string " hello "
let s = Bytes.of_string " hello "
] }
The first declaration is incorrect , because the string literal
[ " hello " ] could be shared by the compiler with other parts of the
program , and mutating [ incorrect ] is a bug . You must always use
the second version , which performs a copy and is thus correct .
Assuming unique ownership of strings that are not string
literals , but are ( partly ) built from string literals , is also
incorrect . For example , mutating [ unsafe_of_string ( " foo " ^ s ) ]
could mutate the shared string [ " foo " ] -- assuming a rope - like
representation of strings . More generally , functions operating on
strings will assume shared ownership , they do not preserve unique
ownership . It is thus incorrect to assume unique ownership of the
result of [ unsafe_of_string ] .
The only case we have reasonable confidence is safe is if the
produced [ bytes ] is shared -- used as an immutable byte
sequence . This is possibly useful for incremental migration of
low - level programs that manipulate immutable sequences of bytes
( for example { ! Marshal.from_bytes } ) and previously used the
[ string ] type for this purpose .
not be mutated.
The same ownership discipline that makes [unsafe_to_string]
correct applies to [unsafe_of_string]: you may use it if you were
the owner of the [string] value, and you will own the return
[bytes] in the same mode.
In practice, unique ownership of string values is extremely
difficult to reason about correctly. You should always assume
strings are shared, never uniquely owned.
For example, string literals are implicitly shared by the
compiler, so you never uniquely own them.
{[
let incorrect = Bytes.unsafe_of_string "hello"
let s = Bytes.of_string "hello"
]}
The first declaration is incorrect, because the string literal
["hello"] could be shared by the compiler with other parts of the
program, and mutating [incorrect] is a bug. You must always use
the second version, which performs a copy and is thus correct.
Assuming unique ownership of strings that are not string
literals, but are (partly) built from string literals, is also
incorrect. For example, mutating [unsafe_of_string ("foo" ^ s)]
could mutate the shared string ["foo"] -- assuming a rope-like
representation of strings. More generally, functions operating on
strings will assume shared ownership, they do not preserve unique
ownership. It is thus incorrect to assume unique ownership of the
result of [unsafe_of_string].
The only case we have reasonable confidence is safe is if the
produced [bytes] is shared -- used as an immutable byte
sequence. This is possibly useful for incremental migration of
low-level programs that manipulate immutable sequences of bytes
(for example {!Marshal.from_bytes}) and previously used the
[string] type for this purpose.
*)
external unsafe_get : bytes -> int -> char = "%string_unsafe_get"
external unsafe_set : bytes -> int -> char -> unit = "%string_unsafe_set"
external unsafe_blit :
bytes -> int -> bytes -> int -> int -> unit
= "caml_blit_string" [@@noalloc]
external unsafe_fill :
bytes -> int -> int -> char -> unit = "caml_fill_string" [@@noalloc]
|
09c241f6ab3e6546c9024cd1fd30254182f7b39f0c4acf351ece0d1518db1d40 | ocaml/oasis | OASISValues.ml | (******************************************************************************)
OASIS : architecture for building OCaml libraries and applications
(* *)
Copyright ( C ) 2011 - 2016 ,
Copyright ( C ) 2008 - 2011 , OCamlCore SARL
(* *)
(* This library is free software; you can redistribute it and/or modify it *)
(* under the terms of the GNU Lesser General Public License as published by *)
the Free Software Foundation ; either version 2.1 of the License , or ( at
(* your option) any later version, with the OCaml static compilation *)
(* exception. *)
(* *)
(* This library is distributed in the hope that it will be useful, but *)
(* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *)
(* or FITNESS FOR A PARTICULAR PURPOSE. See the file COPYING for more *)
(* details. *)
(* *)
You should have received a copy of the GNU Lesser General Public License
along with this library ; if not , write to the Free Software Foundation ,
Inc. , 51 Franklin St , Fifth Floor , Boston , MA 02110 - 1301 USA
(******************************************************************************)
open OASISGettext
open OASISUtils
exception Not_printable
exception Not_combinable
type 'a t =
{
parse: ctxt:OASISContext.t -> string -> 'a;
update: 'a -> 'a -> 'a;
print: 'a -> string;
}
let update_fail _ _ =
raise Not_combinable
let blackbox =
{
parse =
(fun ~ctxt:_ s ->
failwithf
(f_ "Blackbox type cannot be set to the value '%s'")
s);
update = update_fail;
print = (fun _ -> raise Not_printable);
}
module StdLexer =
struct
let url = OASISValues_lexer.url
let copyright = OASISValues_lexer.copyright
let modul = OASISValues_lexer.modul
end
let lexer ?(fail=(fun ~ctxt:_ _ _ -> ())) lxr nm =
{
parse =
(fun ~ctxt str ->
try
let lexbuf = Lexing.from_string str in
let str_matched = lxr lexbuf in
if str_matched = str then
str
else
failwithf
(f_ "Only substring '%s' of '%s' is a %s")
str_matched
str
(nm ())
with e ->
fail ~ctxt str e;
(* Catch all if the previous ignore error. *)
failwithf
(f_ "String '%s' is not a %s: %s.")
str (nm ()) (Printexc.to_string e));
update = update_fail;
print = (fun s -> s);
}
let url =
lexer
StdLexer.url
(fun () -> s_ "URL")
let copyright =
let base_value =
lexer
StdLexer.copyright
(fun () -> s_ "copyright")
in
{base_value with
parse =
(fun ~ctxt str ->
try
base_value.parse ~ctxt str
with _ ->
failwithf
(f_ "Copyright must follow the convention \
'(C) 2008-2009 J.R. Hacker', here it is '%s'")
str)}
let string =
{
parse = (fun ~ctxt:_ s -> s);
update = (fun s1 s2 -> s1^" "^s2);
print = (fun s -> s);
}
let string_not_empty =
{
parse =
(fun ~ctxt:_ str ->
if str <> "" then
str
else
failwith (s_ "Expecting not empty string"));
update = (fun s1 s2 ->s1^" "^s2);
print = (fun s -> s);
}
let file =
{string_not_empty with update = update_fail}
let file_glob =
{string_not_empty with update = update_fail}
let file_pattern =
{
parse =
(fun ~ctxt:_ str ->
match OASISString.nsplit str '%' with
| [pfx; sfx] -> (pfx, sfx)
| _ -> failwith (s_ "Expecting a file pattern, containing one %."));
update = update_fail;
print = (fun (pfx, sfx) -> pfx ^ "%" ^ sfx);
}
let directory =
{string_not_empty with update = update_fail}
let expandable value =
(* TODO: check expandable value and return a list rather
* than a single value. Use split_expandable defined above.
*)
value
let dot_separated value =
{
parse =
(fun ~ctxt s ->
List.map
(value.parse ~ctxt)
(OASISString.nsplit s '.'));
update =
List.append;
print =
(fun lst ->
String.concat "."
(List.map
value.print
lst));
}
let comma_separated value =
{
parse =
(fun ~ctxt s ->
List.map
(fun s -> value.parse ~ctxt s)
(OASISString.split_comma s));
update =
List.append;
print =
(fun lst ->
String.concat ", "
(List.map
value.print
lst));
}
let newline_separated value =
{
parse =
(fun ~ctxt s ->
List.map
(fun s -> value.parse ~ctxt s)
(OASISString.split_newline s));
update =
List.append;
print =
(fun lst ->
String.concat "\n"
(List.map
value.print
lst));
}
let space_separated =
{
parse =
(fun ~ctxt:_ s ->
List.filter
(fun s -> s <> "")
(OASISString.nsplit s ' '));
update =
List.append;
print =
(fun lst ->
String.concat " " lst);
}
let with_optional_parentheses main_value optional_value =
{
parse =
(fun ~ctxt str ->
match OASISString.split_optional_parentheses str with
| e1, Some e2 ->
main_value.parse ~ctxt e1,
Some (optional_value.parse ~ctxt e2)
| e1, None ->
main_value.parse ~ctxt e1,
None);
update = update_fail;
print =
(function
| v, None ->
main_value.print v
| v, Some opt ->
Printf.sprintf "%s (%s)"
(main_value.print v)
(optional_value.print opt));
}
let opt value =
{
parse = (fun ~ctxt str -> Some (value.parse ~ctxt str));
update = update_fail;
print =
(function
| Some v -> value.print v
| None -> raise Not_printable);
}
let modules =
let base_value =
lexer
StdLexer.modul
~fail:(fun ~ctxt:_ str _ ->
if OASISString.capitalize_ascii str <> str then
failwithf
(f_ "Module name '%s', must be capitalized ('%s').")
str (OASISString.capitalize_ascii str))
(fun () -> s_ "module")
in
comma_separated
{
parse =
(fun ~ctxt s ->
let path =
OASISUnixPath.dirname s
in
let modul =
OASISUnixPath.basename s
in
if String.contains path ' ' then
failwithf
(f_ "Module path '%s' must not contain a ' '")
s;
OASISUnixPath.concat
path
(base_value.parse ~ctxt modul));
update = update_fail;
print = (fun s -> s);
}
let files =
comma_separated file
let categories =
comma_separated url
let choices nm lst =
{
parse =
(fun ~ctxt:_ str ->
try
List.assoc
(OASISString.lowercase_ascii str)
(List.map
(fun (k, v) ->
OASISString.lowercase_ascii k, v)
lst)
with Not_found ->
failwithf
(f_ "Unknown %s %S (possible: %s)")
(nm ()) str
(String.concat ", " (List.map fst lst)));
update = update_fail;
print =
(fun v ->
try
List.assoc
v
(List.map
(fun (s, v) -> v, s)
lst)
with Not_found ->
failwithf
(f_ "Unexpected abstract choice value for %s")
(nm ()));
}
let boolean =
choices
(fun () -> s_ "boolean")
["true", true; "false", false]
let findlib_name =
{
parse =
(fun ~ctxt:_ s ->
if s = "" then
failwith (s_ "Empty string is not a valid findlib package")
else if String.contains s '"' || String.contains s '.' then
failwith (s_ "Findlib package name cannot contain '.' or '\"'")
else
s);
update = update_fail;
print = (fun s -> s);
}
let findlib_full =
{
parse =
(fun ~ctxt s ->
let cpnts = OASISString.nsplit s '.' in
if cpnts = [] then
failwith (s_ "Empty string is not a valid findlib package");
List.iter (fun cpnt ->
let _s: string = findlib_name.parse ~ctxt cpnt in
())
cpnts;
s);
update = update_fail;
print = (fun s -> s);
}
let internal_library =
(* TODO: check that the library really exists *)
{string with update = update_fail}
let command_line =
let split_expandable str =
(* Add a single char to accumulator *)
let rec addchr c =
function
| Some b, _ as acc ->
Buffer.add_char b c;
acc
| None, l ->
let b =
Buffer.create 13
in
addchr c (Some b, l)
in
(* Add a separator that will end the previous
* token or do nothing if already separated
*)
let addsep =
function
| Some b, l ->
None, (Buffer.contents b) :: l
| None, l ->
None, l
in
(* Split the list of char into a list of token
* taking care of matching $( ... ) and ${ ... }
*)
let rec lookup_closing oc cc acc =
function
| c :: tl ->
let acc =
addchr c acc
in
if c = oc then
begin
let acc, tl =
lookup_closing oc cc acc tl
in
lookup_closing oc cc acc tl
end
else if c = cc then
begin
acc, tl
end
else
begin
lookup_closing oc cc acc tl
end
| [] ->
failwithf
(f_ "'%s' contains unbalanced curly braces")
str
in
let rec lookup_dollar acc =
function
| '$' :: ('(' as c) :: tl
| '$' :: ('{' as c) :: tl ->
begin
let acc, tl =
lookup_closing
c (if c = '(' then ')' else '}')
(addchr c (addchr '$' acc))
tl
in
lookup_dollar acc tl
end
| ' ' :: tl ->
lookup_dollar (addsep acc) tl
| c :: tl ->
lookup_dollar (addchr c acc) tl
| [] ->
begin
let l =
match acc with
| Some b, l -> Buffer.contents b :: l
| None, l -> l
in
List.rev l
end
in
(* Transform string into list
*)
let lst =
let rl = ref []
in
String.iter (fun c -> rl := c :: !rl) str;
List.rev !rl
in
lookup_dollar (None, []) lst
in
{
parse =
(fun ~ctxt:_ s ->
match split_expandable s with
| cmd :: args ->
cmd, args
| [] ->
failwithf (f_ "Command line '%s' is invalid") s);
update =
(fun (cmd, args1) (arg2, args3) ->
(cmd, args1 @ (arg2 :: args3)));
print =
(fun (cmd, args) ->
space_separated.print (cmd :: args))
}
let command_line_options =
{ parse = (fun ~ctxt:_ s -> POSIXShell.split s);
update = List.append;
print = (fun lst -> String.concat " " (List.map POSIXShell.escape lst));
}
| null | https://raw.githubusercontent.com/ocaml/oasis/3d1a9421db92a0882ebc58c5df219b18c1e5681d/src/oasis/OASISValues.ml | ocaml | ****************************************************************************
This library is free software; you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published by
your option) any later version, with the OCaml static compilation
exception.
This library is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the file COPYING for more
details.
****************************************************************************
Catch all if the previous ignore error.
TODO: check expandable value and return a list rather
* than a single value. Use split_expandable defined above.
TODO: check that the library really exists
Add a single char to accumulator
Add a separator that will end the previous
* token or do nothing if already separated
Split the list of char into a list of token
* taking care of matching $( ... ) and ${ ... }
Transform string into list
| OASIS : architecture for building OCaml libraries and applications
Copyright ( C ) 2011 - 2016 ,
Copyright ( C ) 2008 - 2011 , OCamlCore SARL
the Free Software Foundation ; either version 2.1 of the License , or ( at
You should have received a copy of the GNU Lesser General Public License
along with this library ; if not , write to the Free Software Foundation ,
Inc. , 51 Franklin St , Fifth Floor , Boston , MA 02110 - 1301 USA
open OASISGettext
open OASISUtils
exception Not_printable
exception Not_combinable
type 'a t =
{
parse: ctxt:OASISContext.t -> string -> 'a;
update: 'a -> 'a -> 'a;
print: 'a -> string;
}
let update_fail _ _ =
raise Not_combinable
let blackbox =
{
parse =
(fun ~ctxt:_ s ->
failwithf
(f_ "Blackbox type cannot be set to the value '%s'")
s);
update = update_fail;
print = (fun _ -> raise Not_printable);
}
module StdLexer =
struct
let url = OASISValues_lexer.url
let copyright = OASISValues_lexer.copyright
let modul = OASISValues_lexer.modul
end
let lexer ?(fail=(fun ~ctxt:_ _ _ -> ())) lxr nm =
{
parse =
(fun ~ctxt str ->
try
let lexbuf = Lexing.from_string str in
let str_matched = lxr lexbuf in
if str_matched = str then
str
else
failwithf
(f_ "Only substring '%s' of '%s' is a %s")
str_matched
str
(nm ())
with e ->
fail ~ctxt str e;
failwithf
(f_ "String '%s' is not a %s: %s.")
str (nm ()) (Printexc.to_string e));
update = update_fail;
print = (fun s -> s);
}
let url =
lexer
StdLexer.url
(fun () -> s_ "URL")
let copyright =
let base_value =
lexer
StdLexer.copyright
(fun () -> s_ "copyright")
in
{base_value with
parse =
(fun ~ctxt str ->
try
base_value.parse ~ctxt str
with _ ->
failwithf
(f_ "Copyright must follow the convention \
'(C) 2008-2009 J.R. Hacker', here it is '%s'")
str)}
let string =
{
parse = (fun ~ctxt:_ s -> s);
update = (fun s1 s2 -> s1^" "^s2);
print = (fun s -> s);
}
let string_not_empty =
{
parse =
(fun ~ctxt:_ str ->
if str <> "" then
str
else
failwith (s_ "Expecting not empty string"));
update = (fun s1 s2 ->s1^" "^s2);
print = (fun s -> s);
}
let file =
{string_not_empty with update = update_fail}
let file_glob =
{string_not_empty with update = update_fail}
let file_pattern =
{
parse =
(fun ~ctxt:_ str ->
match OASISString.nsplit str '%' with
| [pfx; sfx] -> (pfx, sfx)
| _ -> failwith (s_ "Expecting a file pattern, containing one %."));
update = update_fail;
print = (fun (pfx, sfx) -> pfx ^ "%" ^ sfx);
}
let directory =
{string_not_empty with update = update_fail}
let expandable value =
value
let dot_separated value =
{
parse =
(fun ~ctxt s ->
List.map
(value.parse ~ctxt)
(OASISString.nsplit s '.'));
update =
List.append;
print =
(fun lst ->
String.concat "."
(List.map
value.print
lst));
}
let comma_separated value =
{
parse =
(fun ~ctxt s ->
List.map
(fun s -> value.parse ~ctxt s)
(OASISString.split_comma s));
update =
List.append;
print =
(fun lst ->
String.concat ", "
(List.map
value.print
lst));
}
let newline_separated value =
{
parse =
(fun ~ctxt s ->
List.map
(fun s -> value.parse ~ctxt s)
(OASISString.split_newline s));
update =
List.append;
print =
(fun lst ->
String.concat "\n"
(List.map
value.print
lst));
}
let space_separated =
{
parse =
(fun ~ctxt:_ s ->
List.filter
(fun s -> s <> "")
(OASISString.nsplit s ' '));
update =
List.append;
print =
(fun lst ->
String.concat " " lst);
}
let with_optional_parentheses main_value optional_value =
{
parse =
(fun ~ctxt str ->
match OASISString.split_optional_parentheses str with
| e1, Some e2 ->
main_value.parse ~ctxt e1,
Some (optional_value.parse ~ctxt e2)
| e1, None ->
main_value.parse ~ctxt e1,
None);
update = update_fail;
print =
(function
| v, None ->
main_value.print v
| v, Some opt ->
Printf.sprintf "%s (%s)"
(main_value.print v)
(optional_value.print opt));
}
let opt value =
{
parse = (fun ~ctxt str -> Some (value.parse ~ctxt str));
update = update_fail;
print =
(function
| Some v -> value.print v
| None -> raise Not_printable);
}
let modules =
let base_value =
lexer
StdLexer.modul
~fail:(fun ~ctxt:_ str _ ->
if OASISString.capitalize_ascii str <> str then
failwithf
(f_ "Module name '%s', must be capitalized ('%s').")
str (OASISString.capitalize_ascii str))
(fun () -> s_ "module")
in
comma_separated
{
parse =
(fun ~ctxt s ->
let path =
OASISUnixPath.dirname s
in
let modul =
OASISUnixPath.basename s
in
if String.contains path ' ' then
failwithf
(f_ "Module path '%s' must not contain a ' '")
s;
OASISUnixPath.concat
path
(base_value.parse ~ctxt modul));
update = update_fail;
print = (fun s -> s);
}
let files =
comma_separated file
let categories =
comma_separated url
let choices nm lst =
{
parse =
(fun ~ctxt:_ str ->
try
List.assoc
(OASISString.lowercase_ascii str)
(List.map
(fun (k, v) ->
OASISString.lowercase_ascii k, v)
lst)
with Not_found ->
failwithf
(f_ "Unknown %s %S (possible: %s)")
(nm ()) str
(String.concat ", " (List.map fst lst)));
update = update_fail;
print =
(fun v ->
try
List.assoc
v
(List.map
(fun (s, v) -> v, s)
lst)
with Not_found ->
failwithf
(f_ "Unexpected abstract choice value for %s")
(nm ()));
}
let boolean =
choices
(fun () -> s_ "boolean")
["true", true; "false", false]
let findlib_name =
{
parse =
(fun ~ctxt:_ s ->
if s = "" then
failwith (s_ "Empty string is not a valid findlib package")
else if String.contains s '"' || String.contains s '.' then
failwith (s_ "Findlib package name cannot contain '.' or '\"'")
else
s);
update = update_fail;
print = (fun s -> s);
}
let findlib_full =
{
parse =
(fun ~ctxt s ->
let cpnts = OASISString.nsplit s '.' in
if cpnts = [] then
failwith (s_ "Empty string is not a valid findlib package");
List.iter (fun cpnt ->
let _s: string = findlib_name.parse ~ctxt cpnt in
())
cpnts;
s);
update = update_fail;
print = (fun s -> s);
}
let internal_library =
{string with update = update_fail}
let command_line =
let split_expandable str =
let rec addchr c =
function
| Some b, _ as acc ->
Buffer.add_char b c;
acc
| None, l ->
let b =
Buffer.create 13
in
addchr c (Some b, l)
in
let addsep =
function
| Some b, l ->
None, (Buffer.contents b) :: l
| None, l ->
None, l
in
let rec lookup_closing oc cc acc =
function
| c :: tl ->
let acc =
addchr c acc
in
if c = oc then
begin
let acc, tl =
lookup_closing oc cc acc tl
in
lookup_closing oc cc acc tl
end
else if c = cc then
begin
acc, tl
end
else
begin
lookup_closing oc cc acc tl
end
| [] ->
failwithf
(f_ "'%s' contains unbalanced curly braces")
str
in
let rec lookup_dollar acc =
function
| '$' :: ('(' as c) :: tl
| '$' :: ('{' as c) :: tl ->
begin
let acc, tl =
lookup_closing
c (if c = '(' then ')' else '}')
(addchr c (addchr '$' acc))
tl
in
lookup_dollar acc tl
end
| ' ' :: tl ->
lookup_dollar (addsep acc) tl
| c :: tl ->
lookup_dollar (addchr c acc) tl
| [] ->
begin
let l =
match acc with
| Some b, l -> Buffer.contents b :: l
| None, l -> l
in
List.rev l
end
in
let lst =
let rl = ref []
in
String.iter (fun c -> rl := c :: !rl) str;
List.rev !rl
in
lookup_dollar (None, []) lst
in
{
parse =
(fun ~ctxt:_ s ->
match split_expandable s with
| cmd :: args ->
cmd, args
| [] ->
failwithf (f_ "Command line '%s' is invalid") s);
update =
(fun (cmd, args1) (arg2, args3) ->
(cmd, args1 @ (arg2 :: args3)));
print =
(fun (cmd, args) ->
space_separated.print (cmd :: args))
}
let command_line_options =
{ parse = (fun ~ctxt:_ s -> POSIXShell.split s);
update = List.append;
print = (fun lst -> String.concat " " (List.map POSIXShell.escape lst));
}
|
0cbd0d2cfc55c503bccc1c637b4ee46d5b8455defe86f93a8a039d71adf784b6 | hspec/hspec | ParserSpec.hs | # LANGUAGE FlexibleContexts #
{-# LANGUAGE ConstraintKinds #-}
module Test.Hspec.Core.Formatters.Pretty.ParserSpec (spec, Person(..)) where
import Prelude ()
import Helper
import Test.Hspec.Core.Formatters.Pretty.Parser
data Person = Person {
personName :: String
, personAge :: Int
} deriving (Eq, Show)
infix 1 `shouldParseAs`
shouldParseAs :: HasCallStack => String -> Value -> Expectation
shouldParseAs input expected = parseValue input `shouldBe` Just expected
unit :: Value
unit = Tuple []
parentheses :: Value -> Value
parentheses value = Tuple [value]
spec :: Spec
spec = do
describe "parseValue" $ do
it "parses unit" $ do
show () `shouldParseAs` unit
it "parses characters" $ do
show 'c' `shouldParseAs` Char 'c'
it "parses strings" $ do
show "foo" `shouldParseAs` String "foo"
it "accepts rationals" $ do
show (0.5 :: Rational) `shouldParseAs` Rational (Number "1") (Number "2")
it "accepts negative rationals" $ do
show (-0.5 :: Rational) `shouldParseAs` Rational (parentheses $ Number "-1") (Number "2")
it "accepts integers" $ do
"23" `shouldParseAs` Number "23"
it "accepts negative integers" $ do
"-23" `shouldParseAs` Number "-23"
it "accepts floats" $ do
show (23.0 :: Float) `shouldParseAs` Number "23.0"
it "accepts negative floats" $ do
show (-23.0 :: Float) `shouldParseAs` Number "-23.0"
it "parses lists" $ do
show ["foo", "bar", "baz"] `shouldParseAs` List [String "foo", String "bar", String "baz"]
it "parses tuples" $ do
show ("foo", "bar", "baz") `shouldParseAs` Tuple [String "foo", String "bar", String "baz"]
it "parses Nothing" $ do
show (Nothing :: Maybe Int) `shouldParseAs` Constructor "Nothing" []
it "parses Just" $ do
show (Just "foo") `shouldParseAs` Constructor "Just" [String "foo"]
it "parses nested Just" $ do
show (Just $ Just "foo") `shouldParseAs` Constructor "Just" [parentheses (Constructor "Just" [String "foo"])]
it "parses records" $ do
let person = Person "Joe" 23
show person `shouldParseAs` Record "Person" [
("personName", String "Joe")
, ("personAge", Number "23")
]
| null | https://raw.githubusercontent.com/hspec/hspec/58f95102acd41780c3bff2320ea49652e3247447/hspec-core/test/Test/Hspec/Core/Formatters/Pretty/ParserSpec.hs | haskell | # LANGUAGE ConstraintKinds # | # LANGUAGE FlexibleContexts #
module Test.Hspec.Core.Formatters.Pretty.ParserSpec (spec, Person(..)) where
import Prelude ()
import Helper
import Test.Hspec.Core.Formatters.Pretty.Parser
data Person = Person {
personName :: String
, personAge :: Int
} deriving (Eq, Show)
infix 1 `shouldParseAs`
shouldParseAs :: HasCallStack => String -> Value -> Expectation
shouldParseAs input expected = parseValue input `shouldBe` Just expected
unit :: Value
unit = Tuple []
parentheses :: Value -> Value
parentheses value = Tuple [value]
spec :: Spec
spec = do
describe "parseValue" $ do
it "parses unit" $ do
show () `shouldParseAs` unit
it "parses characters" $ do
show 'c' `shouldParseAs` Char 'c'
it "parses strings" $ do
show "foo" `shouldParseAs` String "foo"
it "accepts rationals" $ do
show (0.5 :: Rational) `shouldParseAs` Rational (Number "1") (Number "2")
it "accepts negative rationals" $ do
show (-0.5 :: Rational) `shouldParseAs` Rational (parentheses $ Number "-1") (Number "2")
it "accepts integers" $ do
"23" `shouldParseAs` Number "23"
it "accepts negative integers" $ do
"-23" `shouldParseAs` Number "-23"
it "accepts floats" $ do
show (23.0 :: Float) `shouldParseAs` Number "23.0"
it "accepts negative floats" $ do
show (-23.0 :: Float) `shouldParseAs` Number "-23.0"
it "parses lists" $ do
show ["foo", "bar", "baz"] `shouldParseAs` List [String "foo", String "bar", String "baz"]
it "parses tuples" $ do
show ("foo", "bar", "baz") `shouldParseAs` Tuple [String "foo", String "bar", String "baz"]
it "parses Nothing" $ do
show (Nothing :: Maybe Int) `shouldParseAs` Constructor "Nothing" []
it "parses Just" $ do
show (Just "foo") `shouldParseAs` Constructor "Just" [String "foo"]
it "parses nested Just" $ do
show (Just $ Just "foo") `shouldParseAs` Constructor "Just" [parentheses (Constructor "Just" [String "foo"])]
it "parses records" $ do
let person = Person "Joe" 23
show person `shouldParseAs` Record "Person" [
("personName", String "Joe")
, ("personAge", Number "23")
]
|
be6643b6884aab1ea374378121f1e680af9bfbabaa7470494b418d0c5ee26cf8 | metosin/eines | client.cljs | (ns eines.client
(:require [cognitect.transit :as t]
[eines.impl :as i]))
;;
;; Defaults:
;;
(def default-url (-> js/window.location.protocol
{"http:" "ws:", "https:" "wss:"}
(str "//" js/window.location.host "/ws")))
(def default-options {:on-message identity
:on-connect identity
:on-close identity
:on-error identity
:url default-url
:format :transit+json})
;;
;; Send message to server:
;;
(defn send!
([message]
(send! message nil nil))
([message response-fn]
(send! message response-fn 5000))
([message response-fn timeout]
(let [{:keys [socket pack]} @i/state]
(if socket
(let [message (assoc message :type :eines.type/request)
message (if response-fn
(assoc-in message [:headers :eines/rsvp-request-id] (i/rsvp-request-id response-fn timeout))
message)]
(.send socket (pack message)))
(js/console.error "eines.client/send!: socket is closed")))))
;;
Init WebSocket :
;;
(defn init! [opts]
(let [opts (merge default-options opts)
pack (i/create-packer (-> opts :transit :writer))
unpack (i/create-unpacker (-> opts :transit :reader))]
(swap! i/state i/reset-state (merge opts {:pack pack, :unpack unpack})))
(i/connect!))
;;
;; Helpers:
;;
(defn timeout? [message]
(-> message :type (= :eines.type/timeout)))
(defn success? [message]
(-> message :type #{:eines.type/response :eines.type/request} boolean))
| null | https://raw.githubusercontent.com/metosin/eines/e293d0a3b29eb18fb20bdf0c234cd898e7b87ac9/modules/eines-client/src/eines/client.cljs | clojure |
Defaults:
Send message to server:
Helpers:
| (ns eines.client
(:require [cognitect.transit :as t]
[eines.impl :as i]))
(def default-url (-> js/window.location.protocol
{"http:" "ws:", "https:" "wss:"}
(str "//" js/window.location.host "/ws")))
(def default-options {:on-message identity
:on-connect identity
:on-close identity
:on-error identity
:url default-url
:format :transit+json})
(defn send!
([message]
(send! message nil nil))
([message response-fn]
(send! message response-fn 5000))
([message response-fn timeout]
(let [{:keys [socket pack]} @i/state]
(if socket
(let [message (assoc message :type :eines.type/request)
message (if response-fn
(assoc-in message [:headers :eines/rsvp-request-id] (i/rsvp-request-id response-fn timeout))
message)]
(.send socket (pack message)))
(js/console.error "eines.client/send!: socket is closed")))))
Init WebSocket :
(defn init! [opts]
(let [opts (merge default-options opts)
pack (i/create-packer (-> opts :transit :writer))
unpack (i/create-unpacker (-> opts :transit :reader))]
(swap! i/state i/reset-state (merge opts {:pack pack, :unpack unpack})))
(i/connect!))
(defn timeout? [message]
(-> message :type (= :eines.type/timeout)))
(defn success? [message]
(-> message :type #{:eines.type/response :eines.type/request} boolean))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.